1#[cfg(feature = "gc-drc")]
13pub mod drc;
14
15#[cfg(feature = "gc-null")]
16pub mod null;
17
18use crate::{
19 WasmArrayType, WasmCompositeInnerType, WasmCompositeType, WasmStorageType, WasmStructType,
20 WasmValType,
21};
22use crate::{WasmExnType, prelude::*};
23use core::alloc::Layout;
24
25pub const I31_DISCRIMINANT: u32 = 1;
27
28pub const VM_GC_HEADER_SIZE: u32 = 8;
30
31pub const VM_GC_HEADER_ALIGN: u32 = 8;
33
34pub const VM_GC_HEADER_KIND_OFFSET: u32 = 0;
36
37pub const VM_GC_HEADER_TYPE_INDEX_OFFSET: u32 = 4;
39
40pub fn byte_size_of_wasm_ty_in_gc_heap(ty: &WasmStorageType) -> u32 {
43 use crate::{WasmHeapType::*, WasmRefType};
44 match ty {
45 WasmStorageType::I8 => 1,
46 WasmStorageType::I16 => 2,
47 WasmStorageType::Val(ty) => match ty {
48 WasmValType::Ref(WasmRefType {
49 nullable: _,
50 heap_type: ConcreteCont(_) | Cont,
51 }) => unimplemented!("Stack switching feature not compatbile with GC, yet"),
52 WasmValType::I32 | WasmValType::F32 | WasmValType::Ref(_) => 4,
53 WasmValType::I64 | WasmValType::F64 => 8,
54 WasmValType::V128 => 16,
55 },
56 }
57}
58
59#[cfg(any(feature = "gc-drc", feature = "gc-null"))]
62fn align_up(offset: &mut u32, max_align: &mut u32, align: u32) -> u32 {
63 debug_assert!(max_align.is_power_of_two());
64 debug_assert!(align.is_power_of_two());
65 *offset = offset.checked_add(align - 1).unwrap() & !(align - 1);
66 *max_align = core::cmp::max(*max_align, align);
67 *offset
68}
69
70#[cfg(any(feature = "gc-drc", feature = "gc-null"))]
74fn field(size: &mut u32, align: &mut u32, bytes: u32) -> u32 {
75 let offset = align_up(size, align, bytes);
76 *size += bytes;
77 offset
78}
79
80#[cfg(any(feature = "gc-drc", feature = "gc-null"))]
83fn common_array_layout(
84 ty: &WasmArrayType,
85 header_size: u32,
86 header_align: u32,
87 expected_array_length_offset: u32,
88) -> GcArrayLayout {
89 use core::mem;
90
91 assert!(header_size >= crate::VM_GC_HEADER_SIZE);
92 assert!(header_align >= crate::VM_GC_HEADER_ALIGN);
93
94 let mut size = header_size;
95 let mut align = header_align;
96
97 let length_field_size = u32::try_from(mem::size_of::<u32>()).unwrap();
98 let length_field_offset = field(&mut size, &mut align, length_field_size);
99 assert_eq!(length_field_offset, expected_array_length_offset);
100
101 let elem_size = byte_size_of_wasm_ty_in_gc_heap(&ty.0.element_type);
102 let elems_offset = align_up(&mut size, &mut align, elem_size);
103 assert_eq!(elems_offset, size);
104
105 let elems_are_gc_refs = ty.0.element_type.is_vmgcref_type_and_not_i31();
106 if elems_are_gc_refs {
107 debug_assert_eq!(
108 length_field_offset + length_field_size,
109 elems_offset,
110 "DRC collector relies on GC ref elements appearing directly after the length field, without any padding",
111 );
112 }
113
114 GcArrayLayout {
115 base_size: size,
116 align,
117 elem_size,
118 elems_are_gc_refs,
119 }
120}
121
122#[cfg(any(feature = "gc-null", feature = "gc-drc"))]
126fn common_struct_or_exn_layout(
127 fields: &[crate::WasmFieldType],
128 header_size: u32,
129 header_align: u32,
130) -> (u32, u32, Vec<GcStructLayoutField>) {
131 let mut size = header_size;
141 let mut align = header_align;
142
143 let fields = fields
144 .iter()
145 .map(|f| {
146 let field_size = byte_size_of_wasm_ty_in_gc_heap(&f.element_type);
147 let offset = field(&mut size, &mut align, field_size);
148 let is_gc_ref = f.element_type.is_vmgcref_type_and_not_i31();
149 GcStructLayoutField { offset, is_gc_ref }
150 })
151 .collect();
152
153 let align_size_to = align;
156 align_up(&mut size, &mut align, align_size_to);
157
158 (size, align, fields)
159}
160
161#[cfg(any(feature = "gc-null", feature = "gc-drc"))]
164fn common_struct_layout(
165 ty: &WasmStructType,
166 header_size: u32,
167 header_align: u32,
168) -> GcStructLayout {
169 assert!(header_size >= crate::VM_GC_HEADER_SIZE);
170 assert!(header_align >= crate::VM_GC_HEADER_ALIGN);
171
172 let (size, align, fields) = common_struct_or_exn_layout(&ty.fields, header_size, header_align);
173
174 GcStructLayout {
175 size,
176 align,
177 fields,
178 }
179}
180
181#[cfg(any(feature = "gc-null", feature = "gc-drc"))]
185fn common_exn_layout(ty: &WasmExnType, header_size: u32, header_align: u32) -> GcExceptionLayout {
186 assert!(header_size >= crate::VM_GC_HEADER_SIZE);
187 assert!(header_align >= crate::VM_GC_HEADER_ALIGN);
188
189 let tag_offset = header_size;
192 assert!(header_align >= 8);
193 let header_size = header_size + 2 * u32::try_from(core::mem::size_of::<u32>()).unwrap();
194
195 let (size, align, fields) = common_struct_or_exn_layout(&ty.fields, header_size, header_align);
196
197 GcExceptionLayout {
198 size,
199 align,
200 tag_offset,
201 fields,
202 }
203}
204
205pub trait GcTypeLayouts {
208 fn array_length_field_offset(&self) -> u32;
213
214 fn gc_layout(&self, ty: &WasmCompositeType) -> Option<GcLayout> {
219 assert!(!ty.shared);
220 match &ty.inner {
221 WasmCompositeInnerType::Array(ty) => Some(self.array_layout(ty).into()),
222 WasmCompositeInnerType::Struct(ty) => Some(self.struct_layout(ty).into()),
223 WasmCompositeInnerType::Func(_) => None,
224 WasmCompositeInnerType::Cont(_) => {
225 unimplemented!("Stack switching feature not compatbile with GC, yet")
226 }
227 WasmCompositeInnerType::Exn(ty) => Some(self.exn_layout(ty).into()),
228 }
229 }
230
231 fn array_layout(&self, ty: &WasmArrayType) -> GcArrayLayout;
233
234 fn struct_layout(&self, ty: &WasmStructType) -> GcStructLayout;
236
237 fn exn_layout(&self, ty: &WasmExnType) -> GcExceptionLayout;
239}
240
241#[derive(Clone, Debug)]
243pub enum GcLayout {
244 Array(GcArrayLayout),
246
247 Struct(GcStructLayout),
249
250 Exception(GcExceptionLayout),
252}
253
254impl From<GcArrayLayout> for GcLayout {
255 fn from(layout: GcArrayLayout) -> Self {
256 Self::Array(layout)
257 }
258}
259
260impl From<GcStructLayout> for GcLayout {
261 fn from(layout: GcStructLayout) -> Self {
262 Self::Struct(layout)
263 }
264}
265
266impl From<GcExceptionLayout> for GcLayout {
267 fn from(layout: GcExceptionLayout) -> Self {
268 Self::Exception(layout)
269 }
270}
271
272impl GcLayout {
273 #[track_caller]
275 pub fn unwrap_struct(&self) -> &GcStructLayout {
276 match self {
277 Self::Struct(s) => s,
278 _ => panic!("GcLayout::unwrap_struct on non-struct GC layout"),
279 }
280 }
281
282 #[track_caller]
284 pub fn unwrap_array(&self) -> &GcArrayLayout {
285 match self {
286 Self::Array(a) => a,
287 _ => panic!("GcLayout::unwrap_array on non-array GC layout"),
288 }
289 }
290
291 #[track_caller]
293 pub fn unwrap_exception(&self) -> &GcExceptionLayout {
294 match self {
295 Self::Exception(e) => e,
296 _ => panic!("GcLayout::unwrap_exception on a non-exception GC layout"),
297 }
298 }
299}
300
301#[derive(Clone, Debug)]
316pub struct GcArrayLayout {
317 pub base_size: u32,
321
322 pub align: u32,
324
325 pub elem_size: u32,
327
328 pub elems_are_gc_refs: bool,
330}
331
332impl GcArrayLayout {
333 #[inline]
335 pub fn size_for_len(&self, len: u32) -> u32 {
336 self.elem_offset(len)
337 }
338
339 #[inline]
341 pub fn elem_offset(&self, i: u32) -> u32 {
342 self.base_size + i * self.elem_size
343 }
344
345 pub fn layout(&self, len: u32) -> Layout {
348 let size = self.size_for_len(len);
349 let size = usize::try_from(size).unwrap();
350 let align = usize::try_from(self.align).unwrap();
351 Layout::from_size_align(size, align).unwrap()
352 }
353}
354
355#[derive(Clone, Debug)]
365pub struct GcStructLayout {
366 pub size: u32,
368
369 pub align: u32,
371
372 pub fields: Vec<GcStructLayoutField>,
375}
376
377impl GcStructLayout {
378 pub fn layout(&self) -> Layout {
380 let size = usize::try_from(self.size).unwrap();
381 let align = usize::try_from(self.align).unwrap();
382 Layout::from_size_align(size, align).unwrap()
383 }
384}
385
386#[derive(Clone, Copy, Debug)]
388pub struct GcStructLayoutField {
389 pub offset: u32,
391
392 pub is_gc_ref: bool,
398}
399
400#[derive(Clone, Debug)]
410pub struct GcExceptionLayout {
411 pub size: u32,
413
414 pub align: u32,
416
417 pub tag_offset: u32,
419
420 pub fields: Vec<GcStructLayoutField>,
424}
425
426impl GcExceptionLayout {
427 pub fn layout(&self) -> Layout {
429 let size = usize::try_from(self.size).unwrap();
430 let align = usize::try_from(self.align).unwrap();
431 Layout::from_size_align(size, align).unwrap()
432 }
433}
434
435#[repr(u32)]
461#[derive(Clone, Copy, Debug, PartialEq, Eq)]
462#[rustfmt::skip]
463#[expect(missing_docs, reason = "self-describing variants")]
464pub enum VMGcKind {
465 ExternRef = 0b010000 << 26,
466 AnyRef = 0b100000 << 26,
467 EqRef = 0b101000 << 26,
468 ArrayRef = 0b101010 << 26,
469 StructRef = 0b101100 << 26,
470 ExnRef = 0b000001 << 26,
471}
472
473pub const VM_GC_KIND_SIZE: u8 = 4;
475
476const _: () = assert!(VM_GC_KIND_SIZE as usize == core::mem::size_of::<VMGcKind>());
477
478impl VMGcKind {
479 pub const MASK: u32 = 0b111111 << 26;
481
482 pub const UNUSED_MASK: u32 = !Self::MASK;
485
486 #[inline]
488 pub fn value_fits_in_unused_bits(value: u32) -> bool {
489 (value & Self::UNUSED_MASK) == value
490 }
491
492 #[inline]
495 pub fn from_high_bits_of_u32(val: u32) -> VMGcKind {
496 let masked = val & Self::MASK;
497 match masked {
498 x if x == Self::ExternRef.as_u32() => Self::ExternRef,
499 x if x == Self::AnyRef.as_u32() => Self::AnyRef,
500 x if x == Self::EqRef.as_u32() => Self::EqRef,
501 x if x == Self::ArrayRef.as_u32() => Self::ArrayRef,
502 x if x == Self::StructRef.as_u32() => Self::StructRef,
503 x if x == Self::ExnRef.as_u32() => Self::ExnRef,
504 _ => panic!("invalid `VMGcKind`: {masked:#032b}"),
505 }
506 }
507
508 #[inline]
512 pub fn matches(self, other: Self) -> bool {
513 (self.as_u32() & other.as_u32()) == other.as_u32()
514 }
515
516 #[inline]
518 pub fn as_u32(self) -> u32 {
519 self as u32
520 }
521}
522
523#[cfg(test)]
524mod tests {
525 use super::VMGcKind::*;
526 use crate::prelude::*;
527
528 #[test]
529 fn kind_matches() {
530 let all = [ExternRef, AnyRef, EqRef, ArrayRef, StructRef, ExnRef];
531
532 for (sup, subs) in [
533 (ExternRef, vec![]),
534 (AnyRef, vec![EqRef, ArrayRef, StructRef]),
535 (EqRef, vec![ArrayRef, StructRef]),
537 (ArrayRef, vec![]),
538 (StructRef, vec![]),
539 (ExnRef, vec![]),
540 ] {
541 assert!(sup.matches(sup));
542 for sub in &subs {
543 assert!(sub.matches(sup));
544 }
545 for kind in all.iter().filter(|k| **k != sup && !subs.contains(k)) {
546 assert!(!kind.matches(sup));
547 }
548 }
549 }
550}