mmtk/policy/compressor/
forwarding.rs

1use crate::util::constants::BYTES_IN_WORD;
2use crate::util::linear_scan::{Region, RegionIterator};
3use crate::util::metadata::side_metadata::spec_defs::{COMPRESSOR_MARK, COMPRESSOR_OFFSET_VECTOR};
4use crate::util::metadata::side_metadata::SideMetadataSpec;
5use crate::util::{Address, ObjectReference};
6use crate::vm::object_model::ObjectModel;
7use crate::vm::VMBinding;
8use atomic::Ordering;
9use std::marker::PhantomData;
10use std::sync::atomic::AtomicBool;
11
12/// A [`CompressorRegion`] is the granularity at which [`super::CompressorSpace`]
13/// compacts the heap. Objects are allocated inside one region, and are only ever
14/// moved *within* that region.
15#[derive(Copy, Clone, PartialEq, PartialOrd)]
16pub(crate) struct CompressorRegion(Address);
17impl Region for CompressorRegion {
18    const LOG_BYTES: usize = 20; // 1 MiB
19    fn from_aligned_address(address: Address) -> Self {
20        assert!(
21            address.is_aligned_to(Self::BYTES),
22            "{address} is not aligned"
23        );
24        CompressorRegion(address)
25    }
26    fn start(&self) -> Address {
27        self.0
28    }
29}
30
31/// A finite-state machine which visits the positions of marked bits in
32/// the mark bitmap, and accumulates the size of live data that it has
33/// seen between marked bits.
34///
35/// The Compressor caches the state of the transducer at the start of
36/// each block by serialising the state using [`Transducer::encode`], and
37/// then deserialises the state whilst computing forwarding pointers
38/// using [`Transducer::decode`].
39#[derive(Debug)]
40struct Transducer {
41    /// The address for the next object to be copied to, following preceding
42    /// objects which were visited by the transducer.
43    to: Address,
44    /// The address of the last mark bit which the transducer visited.
45    last_bit_visited: Address,
46    /// Whether or not the transducer is currently inside an object
47    /// (i.e. if it has seen a first bit but no matching last bit yet).
48    in_object: bool,
49}
50impl Transducer {
51    pub fn new(to: Address) -> Self {
52        Self {
53            to,
54            last_bit_visited: Address::ZERO,
55            in_object: false,
56        }
57    }
58    pub fn visit_mark_bit(&mut self, address: Address) {
59        if self.in_object {
60            // The size of an object is the distance between the end and
61            // start of the object, and the last word of the object is one
62            // word prior to the end of the object. Thus we must add an
63            // extra word, in order to compute the size of the object based
64            // on the distance between its first and last words.
65            let first_word = self.last_bit_visited;
66            let last_word = address;
67            let size = last_word - first_word + BYTES_IN_WORD;
68            self.to += size;
69        }
70        self.in_object = !self.in_object;
71        self.last_bit_visited = address;
72    }
73
74    pub fn encode(&self, current_position: Address) -> usize {
75        if self.in_object {
76            // We count the space between the last mark bit and
77            // the current address as live when we stop in the
78            // middle of an object.
79            self.to.as_usize() + (current_position - self.last_bit_visited) + 1
80        } else {
81            self.to.as_usize()
82        }
83    }
84
85    pub fn decode(offset: usize, current_position: Address) -> Self {
86        Transducer {
87            to: unsafe { Address::from_usize(offset & !1) },
88            last_bit_visited: current_position,
89            in_object: (offset & 1) == 1,
90        }
91    }
92}
93
94pub struct ForwardingMetadata<VM: VMBinding> {
95    calculated: AtomicBool,
96    vm: PhantomData<VM>,
97}
98
99// A block in the Compressor is the granularity at which we cache
100// the amount of live data preceding an address. We set it to 512 bytes
101// following the paper.
102#[derive(Copy, Clone, PartialEq, PartialOrd)]
103pub(crate) struct Block(Address);
104impl Region for Block {
105    const LOG_BYTES: usize = 9;
106    fn from_aligned_address(address: Address) -> Self {
107        assert!(address.is_aligned_to(Self::BYTES));
108        Block(address)
109    }
110    fn start(&self) -> Address {
111        self.0
112    }
113}
114
115pub(crate) const MARK_SPEC: SideMetadataSpec = COMPRESSOR_MARK;
116pub(crate) const OFFSET_VECTOR_SPEC: SideMetadataSpec = COMPRESSOR_OFFSET_VECTOR;
117
118impl<VM: VMBinding> ForwardingMetadata<VM> {
119    pub fn new() -> ForwardingMetadata<VM> {
120        ForwardingMetadata {
121            calculated: AtomicBool::new(false),
122            vm: PhantomData,
123        }
124    }
125
126    pub fn mark_last_word_of_object(&self, object: ObjectReference) {
127        let last_word_of_object = object.to_object_start::<VM>()
128            + VM::VMObjectModel::get_current_size(object)
129            - BYTES_IN_WORD;
130        #[cfg(debug_assertions)]
131        {
132            // We require to be able to iterate upon first and last bits in the
133            // same bitmap. Therefore the first and last bits cannot be the
134            // same, else we would only encounter one of the two bits.
135            // This requirement implies that objects must be at least two words
136            // large.
137            debug_assert!(
138                MARK_SPEC.are_different_metadata_bits(
139                    object.to_object_start::<VM>(),
140                    last_word_of_object
141                ),
142                "The first and last mark bits should be different bits."
143            );
144        }
145
146        // We only mark the last word as input to computing forwarding
147        // information, so relaxed consistency is okay.
148        MARK_SPEC.fetch_or_atomic::<u8>(last_word_of_object, 1, Ordering::Relaxed);
149    }
150
151    pub fn calculate_offset_vector(&self, region: CompressorRegion, cursor: Address) {
152        let mut state = Transducer::new(region.start());
153        let first_block = Block::from_aligned_address(region.start());
154        let last_block = Block::from_aligned_address(cursor);
155        for block in RegionIterator::<Block>::new(first_block, last_block) {
156            OFFSET_VECTOR_SPEC.store_atomic::<usize>(
157                block.start(),
158                state.encode(block.start()),
159                Ordering::Relaxed,
160            );
161            MARK_SPEC.scan_non_zero_values::<u8>(
162                block.start(),
163                block.end(),
164                &mut |addr: Address| {
165                    state.visit_mark_bit(addr);
166                },
167            );
168        }
169        self.calculated.store(true, Ordering::Relaxed);
170    }
171
172    pub fn release(&self) {
173        self.calculated.store(false, Ordering::Relaxed);
174    }
175
176    pub fn forward(&self, address: Address) -> Address {
177        debug_assert!(
178            self.calculated.load(Ordering::Relaxed),
179            "forward() should only be called when we have calculated an offset vector"
180        );
181        let block = Block::from_unaligned_address(address);
182        let mut state = Transducer::decode(
183            OFFSET_VECTOR_SPEC.load_atomic::<usize>(block.start(), Ordering::Relaxed),
184            block.start(),
185        );
186        // The transducer in this implementation computes the final
187        // address of an object; whereas Total-Live-Data in the paper computes
188        // the distance of the object from the start of the block.
189        MARK_SPEC.scan_non_zero_values::<u8>(block.start(), address, &mut |addr: Address| {
190            state.visit_mark_bit(addr)
191        });
192        state.to
193    }
194
195    pub fn scan_marked_objects(
196        &self,
197        start: Address,
198        end: Address,
199        f: &mut impl FnMut(ObjectReference),
200    ) {
201        let mut in_object = false;
202        MARK_SPEC.scan_non_zero_values::<u8>(start, end, &mut |addr: Address| {
203            if !in_object {
204                let object = ObjectReference::from_raw_address(addr).unwrap();
205                f(object);
206            }
207            in_object = !in_object;
208        });
209    }
210
211    pub fn has_calculated_forwarding_addresses(&self) -> bool {
212        self.calculated.load(Ordering::Relaxed)
213    }
214}