1#[cfg(feature = "gc-drc")]
13pub mod drc;
14
15#[cfg(feature = "gc-null")]
16pub mod null;
17
18use crate::{
19 WasmArrayType, WasmCompositeInnerType, WasmCompositeType, WasmExnType, WasmStorageType,
20 WasmStructType, WasmValType, collections, error::OutOfMemory, prelude::*,
21};
22use alloc::sync::Arc;
23use core::alloc::Layout;
24
25pub const I31_DISCRIMINANT: u32 = 1;
27
28pub const VM_GC_HEADER_SIZE: u32 = 8;
30
31pub const VM_GC_HEADER_ALIGN: u32 = 8;
33
34pub const VM_GC_HEADER_KIND_OFFSET: u32 = 0;
36
37pub const VM_GC_HEADER_TYPE_INDEX_OFFSET: u32 = 4;
39
40pub fn byte_size_of_wasm_ty_in_gc_heap(ty: &WasmStorageType) -> u32 {
43 match ty {
44 WasmStorageType::I8 => 1,
45 WasmStorageType::I16 => 2,
46 WasmStorageType::Val(ty) => match ty {
47 WasmValType::I32 | WasmValType::F32 | WasmValType::Ref(_) => 4,
48 WasmValType::I64 | WasmValType::F64 => 8,
49 WasmValType::V128 => 16,
50 },
51 }
52}
53
54#[cfg(any(feature = "gc-drc", feature = "gc-null"))]
57fn align_up(offset: &mut u32, max_align: &mut u32, align: u32) -> u32 {
58 debug_assert!(max_align.is_power_of_two());
59 debug_assert!(align.is_power_of_two());
60 *offset = offset.checked_add(align - 1).unwrap() & !(align - 1);
61 *max_align = core::cmp::max(*max_align, align);
62 *offset
63}
64
65#[cfg(any(feature = "gc-drc", feature = "gc-null"))]
69fn field(size: &mut u32, align: &mut u32, bytes: u32) -> u32 {
70 let offset = align_up(size, align, bytes);
71 *size += bytes;
72 offset
73}
74
75#[cfg(any(feature = "gc-drc", feature = "gc-null"))]
78fn common_array_layout(
79 ty: &WasmArrayType,
80 header_size: u32,
81 header_align: u32,
82 expected_array_length_offset: u32,
83) -> GcArrayLayout {
84 use core::mem;
85
86 assert!(header_size >= crate::VM_GC_HEADER_SIZE);
87 assert!(header_align >= crate::VM_GC_HEADER_ALIGN);
88
89 let mut size = header_size;
90 let mut align = header_align;
91
92 let length_field_size = u32::try_from(mem::size_of::<u32>()).unwrap();
93 let length_field_offset = field(&mut size, &mut align, length_field_size);
94 assert_eq!(length_field_offset, expected_array_length_offset);
95
96 let elem_size = byte_size_of_wasm_ty_in_gc_heap(&ty.0.element_type);
97 let elems_offset = align_up(&mut size, &mut align, elem_size);
98 assert_eq!(elems_offset, size);
99
100 let elems_are_gc_refs = ty.0.element_type.is_vmgcref_type_and_not_i31();
101 if elems_are_gc_refs {
102 debug_assert_eq!(
103 length_field_offset + length_field_size,
104 elems_offset,
105 "DRC collector relies on GC ref elements appearing directly after the length field, without any padding",
106 );
107 }
108
109 GcArrayLayout {
110 base_size: size,
111 align,
112 elem_size,
113 elems_are_gc_refs,
114 }
115}
116
117#[cfg(any(feature = "gc-null", feature = "gc-drc"))]
121fn common_struct_or_exn_layout(
122 fields: &[crate::WasmFieldType],
123 header_size: u32,
124 header_align: u32,
125) -> (u32, u32, collections::Vec<GcStructLayoutField>) {
126 use crate::PanicOnOom as _;
127
128 let mut size = header_size;
138 let mut align = header_align;
139
140 let fields = fields
141 .iter()
142 .map(|f| {
143 let field_size = byte_size_of_wasm_ty_in_gc_heap(&f.element_type);
144 let offset = field(&mut size, &mut align, field_size);
145 let is_gc_ref = f.element_type.is_vmgcref_type_and_not_i31();
146 GcStructLayoutField { offset, is_gc_ref }
147 })
148 .try_collect::<collections::Vec<_>, _>()
149 .panic_on_oom();
150
151 let align_size_to = align;
154 align_up(&mut size, &mut align, align_size_to);
155
156 (size, align, fields)
157}
158
159#[cfg(any(feature = "gc-null", feature = "gc-drc"))]
162fn common_struct_layout(
163 ty: &WasmStructType,
164 header_size: u32,
165 header_align: u32,
166) -> GcStructLayout {
167 assert!(header_size >= crate::VM_GC_HEADER_SIZE);
168 assert!(header_align >= crate::VM_GC_HEADER_ALIGN);
169
170 let (size, align, fields) = common_struct_or_exn_layout(&ty.fields, header_size, header_align);
171
172 GcStructLayout {
173 size,
174 align,
175 fields,
176 is_exception: false,
177 }
178}
179
180#[cfg(any(feature = "gc-null", feature = "gc-drc"))]
184fn common_exn_layout(ty: &WasmExnType, header_size: u32, header_align: u32) -> GcStructLayout {
185 assert!(header_size >= crate::VM_GC_HEADER_SIZE);
186 assert!(header_align >= crate::VM_GC_HEADER_ALIGN);
187
188 assert!(header_align >= 8);
191 let header_size = header_size + 2 * u32::try_from(core::mem::size_of::<u32>()).unwrap();
192
193 let (size, align, fields) = common_struct_or_exn_layout(&ty.fields, header_size, header_align);
194
195 GcStructLayout {
196 size,
197 align,
198 fields,
199 is_exception: true,
200 }
201}
202
203pub trait GcTypeLayouts {
206 fn array_length_field_offset(&self) -> u32;
211
212 fn exception_tag_instance_offset(&self) -> u32;
218
219 fn exception_tag_defined_offset(&self) -> u32;
225
226 fn gc_layout(&self, ty: &WasmCompositeType) -> Option<GcLayout> {
231 assert!(!ty.shared);
232 match &ty.inner {
233 WasmCompositeInnerType::Array(ty) => Some(self.array_layout(ty).into()),
234 WasmCompositeInnerType::Struct(ty) => Some(Arc::new(self.struct_layout(ty)).into()),
235 WasmCompositeInnerType::Func(_) => None,
236 WasmCompositeInnerType::Cont(_) => {
237 unimplemented!("Stack switching feature not compatible with GC, yet")
238 }
239 WasmCompositeInnerType::Exn(ty) => Some(Arc::new(self.exn_layout(ty)).into()),
240 }
241 }
242
243 fn array_layout(&self, ty: &WasmArrayType) -> GcArrayLayout;
245
246 fn struct_layout(&self, ty: &WasmStructType) -> GcStructLayout;
248
249 fn exn_layout(&self, ty: &WasmExnType) -> GcStructLayout;
251}
252
253#[derive(Clone, Debug)]
255pub enum GcLayout {
256 Array(GcArrayLayout),
258
259 Struct(Arc<GcStructLayout>),
261}
262
263impl From<GcArrayLayout> for GcLayout {
264 fn from(layout: GcArrayLayout) -> Self {
265 Self::Array(layout)
266 }
267}
268
269impl From<Arc<GcStructLayout>> for GcLayout {
270 fn from(layout: Arc<GcStructLayout>) -> Self {
271 Self::Struct(layout)
272 }
273}
274
275impl TryClone for GcLayout {
276 fn try_clone(&self) -> core::result::Result<Self, wasmtime_core::error::OutOfMemory> {
277 Ok(self.clone())
278 }
279}
280
281impl GcLayout {
282 #[track_caller]
284 pub fn unwrap_struct(&self) -> &Arc<GcStructLayout> {
285 match self {
286 Self::Struct(s) => s,
287 _ => panic!("GcLayout::unwrap_struct on non-struct GC layout"),
288 }
289 }
290
291 #[track_caller]
293 pub fn unwrap_array(&self) -> &GcArrayLayout {
294 match self {
295 Self::Array(a) => a,
296 _ => panic!("GcLayout::unwrap_array on non-array GC layout"),
297 }
298 }
299}
300
301#[derive(Clone, Debug)]
316pub struct GcArrayLayout {
317 pub base_size: u32,
321
322 pub align: u32,
324
325 pub elem_size: u32,
327
328 pub elems_are_gc_refs: bool,
330}
331
332impl GcArrayLayout {
333 #[inline]
335 pub fn size_for_len(&self, len: u32) -> u32 {
336 self.elem_offset(len)
337 }
338
339 #[inline]
341 pub fn elem_offset(&self, i: u32) -> u32 {
342 self.base_size + i * self.elem_size
343 }
344
345 pub fn layout(&self, len: u32) -> Layout {
348 let size = self.size_for_len(len);
349 let size = usize::try_from(size).unwrap();
350 let align = usize::try_from(self.align).unwrap();
351 Layout::from_size_align(size, align).unwrap()
352 }
353}
354
355#[derive(Debug)]
371pub struct GcStructLayout {
372 pub size: u32,
374
375 pub align: u32,
377
378 pub fields: collections::Vec<GcStructLayoutField>,
381
382 pub is_exception: bool,
384}
385
386impl TryClone for GcStructLayout {
387 fn try_clone(&self) -> Result<Self, OutOfMemory> {
388 Ok(GcStructLayout {
389 size: self.size,
390 align: self.align,
391 fields: self.fields.try_clone()?,
392 is_exception: self.is_exception,
393 })
394 }
395}
396
397impl GcStructLayout {
398 pub fn layout(&self) -> Layout {
400 let size = usize::try_from(self.size).unwrap();
401 let align = usize::try_from(self.align).unwrap();
402 Layout::from_size_align(size, align).unwrap()
403 }
404}
405
406#[derive(Clone, Copy, Debug)]
408pub struct GcStructLayoutField {
409 pub offset: u32,
411
412 pub is_gc_ref: bool,
418}
419
420impl TryClone for GcStructLayoutField {
421 fn try_clone(&self) -> Result<Self, OutOfMemory> {
422 Ok(*self)
423 }
424}
425
426#[repr(u32)]
452#[derive(Clone, Copy, Debug, PartialEq, Eq)]
453#[rustfmt::skip]
454#[expect(missing_docs, reason = "self-describing variants")]
455pub enum VMGcKind {
456 ExternRef = 0b010000 << 26,
457 AnyRef = 0b100000 << 26,
458 EqRef = 0b101000 << 26,
459 ArrayRef = 0b101010 << 26,
460 StructRef = 0b101100 << 26,
461 ExnRef = 0b000001 << 26,
462}
463
464pub const VM_GC_KIND_SIZE: u8 = 4;
466
467const _: () = assert!(VM_GC_KIND_SIZE as usize == core::mem::size_of::<VMGcKind>());
468
469impl VMGcKind {
470 pub const MASK: u32 = 0b111111 << 26;
472
473 pub const UNUSED_MASK: u32 = !Self::MASK;
476
477 #[inline]
479 pub fn value_fits_in_unused_bits(value: u32) -> bool {
480 (value & Self::UNUSED_MASK) == value
481 }
482
483 #[inline]
486 pub fn from_high_bits_of_u32(val: u32) -> VMGcKind {
487 let masked = val & Self::MASK;
488 match masked {
489 x if x == Self::ExternRef.as_u32() => Self::ExternRef,
490 x if x == Self::AnyRef.as_u32() => Self::AnyRef,
491 x if x == Self::EqRef.as_u32() => Self::EqRef,
492 x if x == Self::ArrayRef.as_u32() => Self::ArrayRef,
493 x if x == Self::StructRef.as_u32() => Self::StructRef,
494 x if x == Self::ExnRef.as_u32() => Self::ExnRef,
495 _ => panic!("invalid `VMGcKind`: {masked:#032b}"),
496 }
497 }
498
499 #[inline]
503 pub fn matches(self, other: Self) -> bool {
504 (self.as_u32() & other.as_u32()) == other.as_u32()
505 }
506
507 #[inline]
509 pub fn as_u32(self) -> u32 {
510 self as u32
511 }
512}
513
514#[cfg(test)]
515mod tests {
516 use super::VMGcKind::*;
517 use crate::prelude::*;
518
519 #[test]
520 fn kind_matches() {
521 let all = [ExternRef, AnyRef, EqRef, ArrayRef, StructRef, ExnRef];
522
523 for (sup, subs) in [
524 (ExternRef, vec![]),
525 (AnyRef, vec![EqRef, ArrayRef, StructRef]),
526 (EqRef, vec![ArrayRef, StructRef]),
528 (ArrayRef, vec![]),
529 (StructRef, vec![]),
530 (ExnRef, vec![]),
531 ] {
532 assert!(sup.matches(sup));
533 for sub in &subs {
534 assert!(sub.matches(sup));
535 }
536 for kind in all.iter().filter(|k| **k != sup && !subs.contains(k)) {
537 assert!(!kind.matches(sup));
538 }
539 }
540 }
541}