mmtk/util/metadata/vo_bit/
mod.rs

1//! Valid object bit (VO bit)
2//!
3//! The valid object bit, or "VO bit" for short", is a global per-address metadata.  It is set at
4//! the address of the `ObjectReference` of an object when the object is allocated, and cleared
5//! when the object is determined to be dead by the GC.
6//!
7//! The VO bit metadata serves multiple purposes, including but not limited to:
8//!
9//! | purpose                                     | happens when                                  |
10//! |---------------------------------------------|-----------------------------------------------|
11//! | conservative stack scanning                 | stack scanning                                |
12//! | conservative object scanning                | tracing                                       |
13//! | supporting interior pointers                | tracing                                       |
14//! | heap dumping (by tracing)                   | tracing                                       |
15//! | heap dumping (by iteration)                 | before or after tracing                       |
16//! | heap iteration (for GC algorithm)           | depending on algorithm                        |
17//! | heap iteration (for VM API, e.g. JVM-TI)    | during mutator time                           |
18//! | sanity checking                             | any time in GC                                |
19//!
20//! Among the listed purposes, conservative stack scanning and conservative objects scanning are
21//! visible to the VM binding.  By default, if the "vo_bit" cargo feature is enabled, the VO bits
22//! metadata will be available to the VM binding during stack scanning time.  The VM binding can
23//! further require the VO bits to be available during tracing (for object scanning) by setting
24//! [`crate::vm::ObjectModel::NEED_VO_BITS_DURING_TRACING`] to `true`.  mmtk-core does not
25//! guarantee the VO bits are available to the VM binding during other time.
26//!
27//! Internally, mmtk-core will also make the VO bits available when necessary if mmtk-core needs to
28//! implement features that needs VO bits.
29//!
30//! When the VO bits are available during tracing, if a plan uses evacuation to reclaim space, then
31//! both the from-space copy and the to-space copy of an object will have the VO-bit set.
32//!
33//! *(Note: There are several reasons behind this semantics.  One reason is that a slot may be
34//! visited multiple times during GC.  If a slot is visited twice, we will see the object reference
35//! in the slot pointing to the from-space copy during the first visit, but pointing to the to-space
36//! copy during the second visit.  We consider an object reference valid if it points to either the
37//! from-space or the to-space copy.  If each slot is visited only once, and we see a slot happen to
38//! hold a pointer into the to-space during its only visit, that must be a dangling pointer, and
39//! error should be reported.  However, it is hard to guarantee each slot is only visited once
40//! during tracing because both the VM and the GC algorithm may break this guarantee.  See:
41//! [`crate::plan::PlanConstraints::may_trace_duplicate_edges`])*
42
43// FIXME: The entire vo_bit module should only be available if the "vo_bit" feature is enabled.
44// However, the malloc-based MarkSweepSpace and MarkCompactSpace depends on the VO bits regardless
45// of the "vo_bit" feature.
46#[cfg(feature = "vo_bit")]
47pub(crate) mod helper;
48
49use atomic::Ordering;
50
51use crate::util::metadata::side_metadata::SideMetadataSpec;
52use crate::util::Address;
53use crate::util::ObjectReference;
54use crate::vm::object_model::ObjectModel;
55use crate::vm::VMBinding;
56
57cfg_if::cfg_if! {
58    if #[cfg(feature = "vo_bit_access")] {
59        /// A VO bit is required per min-object-size aligned address, rather than per object, and can only exist as side metadata.
60        /// This is only publicly available when the feature "vo_bit_access" is enabled.
61        /// Check the comments on "vo_bit_access" in `Cargo.toml` before use. Use at your own risk.
62        pub const VO_BIT_SIDE_METADATA_SPEC: SideMetadataSpec =
63        crate::util::metadata::side_metadata::spec_defs::VO_BIT;
64    } else {
65        /// A VO bit is required per min-object-size aligned address, rather than per object, and can only exist as side metadata.
66        pub(crate) const VO_BIT_SIDE_METADATA_SPEC: SideMetadataSpec =
67            crate::util::metadata::side_metadata::spec_defs::VO_BIT;
68    }
69}
70
71/// The base address for VO bit side metadata on 64 bits platforms.
72#[cfg(target_pointer_width = "64")]
73pub const VO_BIT_SIDE_METADATA_ADDR: Address = VO_BIT_SIDE_METADATA_SPEC.get_absolute_offset();
74
75/// Atomically set the VO bit for an object.
76pub(crate) fn set_vo_bit(object: ObjectReference) {
77    debug_assert!(!is_vo_bit_set(object), "{:x}: VO bit already set", object);
78    VO_BIT_SIDE_METADATA_SPEC.store_atomic::<u8>(object.to_raw_address(), 1, Ordering::SeqCst);
79}
80
81/// Atomically unset the VO bit for an object.
82pub(crate) fn unset_vo_bit(object: ObjectReference) {
83    debug_assert!(is_vo_bit_set(object), "{:x}: VO bit not set", object);
84    VO_BIT_SIDE_METADATA_SPEC.store_atomic::<u8>(object.to_raw_address(), 0, Ordering::SeqCst);
85}
86
87/// Atomically unset the VO bit for an object, regardless whether the bit is set or not.
88pub(crate) fn unset_vo_bit_nocheck(object: ObjectReference) {
89    VO_BIT_SIDE_METADATA_SPEC.store_atomic::<u8>(object.to_raw_address(), 0, Ordering::SeqCst);
90}
91
92/// Non-atomically unset the VO bit for an object. The caller needs to ensure the side
93/// metadata for the VO bit for the object is accessed by only one thread.
94///
95/// # Safety
96///
97/// This is unsafe: check the comment on `side_metadata::store`
98pub(crate) unsafe fn unset_vo_bit_unsafe(object: ObjectReference) {
99    debug_assert!(is_vo_bit_set(object), "{:x}: VO bit not set", object);
100    VO_BIT_SIDE_METADATA_SPEC.store::<u8>(object.to_raw_address(), 0);
101}
102
103/// Check if the VO bit is set for an object.
104pub(crate) fn is_vo_bit_set(object: ObjectReference) -> bool {
105    VO_BIT_SIDE_METADATA_SPEC.load_atomic::<u8>(object.to_raw_address(), Ordering::SeqCst) == 1
106}
107
108/// Check if an address can be turned directly into an object reference using the VO bit.
109/// If so, return `Some(object)`. Otherwise return `None`.
110///
111/// The `address` must be word-aligned.
112pub(crate) fn is_vo_bit_set_for_addr(address: Address) -> Option<ObjectReference> {
113    is_vo_bit_set_inner::<true>(address)
114}
115
116/// Check if an address can be turned directly into an object reference using the VO bit.
117/// If so, return `Some(object)`. Otherwise return `None`. The caller needs to ensure the side
118/// metadata for the VO bit for the object is accessed by only one thread.
119///
120/// The `address` must be word-aligned.
121///
122/// # Safety
123///
124/// This is unsafe: check the comment on `side_metadata::load`
125pub(crate) unsafe fn is_vo_bit_set_unsafe(address: Address) -> Option<ObjectReference> {
126    is_vo_bit_set_inner::<false>(address)
127}
128
129fn is_vo_bit_set_inner<const ATOMIC: bool>(addr: Address) -> Option<ObjectReference> {
130    debug_assert!(
131        addr.is_aligned_to(ObjectReference::ALIGNMENT),
132        "Address is not word-aligned: {addr}"
133    );
134
135    // If we haven't mapped VO bit for the address, it cannot be an object
136    if !VO_BIT_SIDE_METADATA_SPEC.is_mapped(addr) {
137        return None;
138    }
139
140    let vo_bit = if ATOMIC {
141        VO_BIT_SIDE_METADATA_SPEC.load_atomic::<u8>(addr, Ordering::SeqCst)
142    } else {
143        unsafe { VO_BIT_SIDE_METADATA_SPEC.load::<u8>(addr) }
144    };
145
146    (vo_bit == 1).then(|| get_object_ref_for_vo_addr(addr))
147}
148
149/// Bulk zero the VO bit.
150pub(crate) fn bzero_vo_bit(start: Address, size: usize) {
151    VO_BIT_SIDE_METADATA_SPEC.bzero_metadata(start, size);
152}
153
154/// Bulk copy VO bits from side mark bits.
155/// Some VMs require the VO bits to be available during tracing.
156/// However, some GC algorithms (such as Immix) cannot clear VO bits for dead objects only.
157/// As an alternative, this function copies the mark bits metadata to VO bits.
158/// The caller needs to ensure the mark bits are set exactly wherever VO bits need to be set before
159/// calling this function.
160pub(crate) fn bcopy_vo_bit_from_mark_bit<VM: VMBinding>(start: Address, size: usize) {
161    let mark_bit_spec = VM::VMObjectModel::LOCAL_MARK_BIT_SPEC;
162    debug_assert!(
163        mark_bit_spec.is_on_side(),
164        "bcopy_vo_bit_from_mark_bits can only be used with on-the-side mark bits."
165    );
166    let side_mark_bit_spec = mark_bit_spec.extract_side_spec();
167    VO_BIT_SIDE_METADATA_SPEC.bcopy_metadata_contiguous(start, size, side_mark_bit_spec);
168}
169
170use crate::util::constants::{LOG_BITS_IN_BYTE, LOG_BYTES_IN_ADDRESS};
171
172/// How many data memory bytes does 1 word in the VO bit side metadata represents?
173pub(crate) const VO_BIT_WORD_TO_REGION: usize = 1
174    << (VO_BIT_SIDE_METADATA_SPEC.log_bytes_in_region
175        + LOG_BITS_IN_BYTE as usize
176        + LOG_BYTES_IN_ADDRESS as usize
177        - VO_BIT_SIDE_METADATA_SPEC.log_num_of_bits);
178
179/// Bulk check if a VO bit word. Return true if there is any bit set in the word.
180pub(crate) fn get_raw_vo_bit_word(addr: Address) -> usize {
181    unsafe { VO_BIT_SIDE_METADATA_SPEC.load_raw_word(addr) }
182}
183
184/// Find the base reference to the object from a potential internal pointer.
185pub(crate) fn find_object_from_internal_pointer<VM: VMBinding>(
186    start: Address,
187    search_limit_bytes: usize,
188) -> Option<ObjectReference> {
189    if !start.is_mapped() {
190        return None;
191    }
192
193    if let Some(vo_addr) = unsafe {
194        VO_BIT_SIDE_METADATA_SPEC.find_prev_non_zero_value::<u8>(start, search_limit_bytes)
195    } {
196        is_internal_ptr_from_vo_bit::<VM>(vo_addr, start)
197    } else {
198        None
199    }
200}
201
202/// Get the object reference from an aligned address where VO bit is set.
203pub(crate) fn get_object_ref_for_vo_addr(vo_addr: Address) -> ObjectReference {
204    // VO bit should be set on the address.
205    debug_assert!(vo_addr.is_aligned_to(ObjectReference::ALIGNMENT));
206    debug_assert!(unsafe { is_vo_addr(vo_addr) });
207    unsafe { ObjectReference::from_raw_address_unchecked(vo_addr) }
208}
209
210/// Check if the address could be an internal pointer in the object.
211fn is_internal_ptr<VM: VMBinding>(obj: ObjectReference, internal_ptr: Address) -> bool {
212    let obj_start = obj.to_object_start::<VM>();
213    let obj_size = VM::VMObjectModel::get_current_size(obj);
214    internal_ptr < obj_start + obj_size
215}
216
217/// Check if the address could be an internal pointer based on where VO bit is set.
218pub(crate) fn is_internal_ptr_from_vo_bit<VM: VMBinding>(
219    vo_addr: Address,
220    internal_ptr: Address,
221) -> Option<ObjectReference> {
222    let obj = get_object_ref_for_vo_addr(vo_addr);
223    if is_internal_ptr::<VM>(obj, internal_ptr) {
224        Some(obj)
225    } else {
226        None
227    }
228}
229
230/// Non-atomically check if the VO bit is set for this address.
231///
232/// # Safety
233/// The caller needs to make sure that no one is modifying VO bit.
234pub(crate) unsafe fn is_vo_addr(addr: Address) -> bool {
235    VO_BIT_SIDE_METADATA_SPEC.load::<u8>(addr) != 0
236}