mmtk/policy/compressor/
forwarding.rs1use crate::util::constants::BYTES_IN_WORD;
2use crate::util::linear_scan::{Region, RegionIterator};
3use crate::util::metadata::side_metadata::spec_defs::{COMPRESSOR_MARK, COMPRESSOR_OFFSET_VECTOR};
4use crate::util::metadata::side_metadata::SideMetadataSpec;
5use crate::util::{Address, ObjectReference};
6use crate::vm::object_model::ObjectModel;
7use crate::vm::VMBinding;
8use atomic::Ordering;
9use std::marker::PhantomData;
10use std::sync::atomic::AtomicBool;
11
12#[derive(Copy, Clone, PartialEq, PartialOrd)]
16pub(crate) struct CompressorRegion(Address);
17impl Region for CompressorRegion {
18 const LOG_BYTES: usize = 20; fn from_aligned_address(address: Address) -> Self {
20 assert!(
21 address.is_aligned_to(Self::BYTES),
22 "{address} is not aligned"
23 );
24 CompressorRegion(address)
25 }
26 fn start(&self) -> Address {
27 self.0
28 }
29}
30
31#[derive(Debug)]
40struct Transducer {
41 to: Address,
44 last_bit_visited: Address,
46 in_object: bool,
49}
50impl Transducer {
51 pub fn new(to: Address) -> Self {
52 Self {
53 to,
54 last_bit_visited: Address::ZERO,
55 in_object: false,
56 }
57 }
58 pub fn visit_mark_bit(&mut self, address: Address) {
59 if self.in_object {
60 let first_word = self.last_bit_visited;
66 let last_word = address;
67 let size = last_word - first_word + BYTES_IN_WORD;
68 self.to += size;
69 }
70 self.in_object = !self.in_object;
71 self.last_bit_visited = address;
72 }
73
74 pub fn encode(&self, current_position: Address) -> usize {
75 if self.in_object {
76 self.to.as_usize() + (current_position - self.last_bit_visited) + 1
80 } else {
81 self.to.as_usize()
82 }
83 }
84
85 pub fn decode(offset: usize, current_position: Address) -> Self {
86 Transducer {
87 to: unsafe { Address::from_usize(offset & !1) },
88 last_bit_visited: current_position,
89 in_object: (offset & 1) == 1,
90 }
91 }
92}
93
94pub struct ForwardingMetadata<VM: VMBinding> {
95 calculated: AtomicBool,
96 vm: PhantomData<VM>,
97}
98
99#[derive(Copy, Clone, PartialEq, PartialOrd)]
103pub(crate) struct Block(Address);
104impl Region for Block {
105 const LOG_BYTES: usize = 9;
106 fn from_aligned_address(address: Address) -> Self {
107 assert!(address.is_aligned_to(Self::BYTES));
108 Block(address)
109 }
110 fn start(&self) -> Address {
111 self.0
112 }
113}
114
115pub(crate) const MARK_SPEC: SideMetadataSpec = COMPRESSOR_MARK;
116pub(crate) const OFFSET_VECTOR_SPEC: SideMetadataSpec = COMPRESSOR_OFFSET_VECTOR;
117
118impl<VM: VMBinding> ForwardingMetadata<VM> {
119 pub fn new() -> ForwardingMetadata<VM> {
120 ForwardingMetadata {
121 calculated: AtomicBool::new(false),
122 vm: PhantomData,
123 }
124 }
125
126 pub fn mark_last_word_of_object(&self, object: ObjectReference) {
127 let last_word_of_object = object.to_object_start::<VM>()
128 + VM::VMObjectModel::get_current_size(object)
129 - BYTES_IN_WORD;
130 #[cfg(debug_assertions)]
131 {
132 debug_assert!(
138 MARK_SPEC.are_different_metadata_bits(
139 object.to_object_start::<VM>(),
140 last_word_of_object
141 ),
142 "The first and last mark bits should be different bits."
143 );
144 }
145
146 MARK_SPEC.fetch_or_atomic::<u8>(last_word_of_object, 1, Ordering::Relaxed);
149 }
150
151 pub fn calculate_offset_vector(&self, region: CompressorRegion, cursor: Address) {
152 let mut state = Transducer::new(region.start());
153 let first_block = Block::from_aligned_address(region.start());
154 let last_block = Block::from_aligned_address(cursor);
155 for block in RegionIterator::<Block>::new(first_block, last_block) {
156 OFFSET_VECTOR_SPEC.store_atomic::<usize>(
157 block.start(),
158 state.encode(block.start()),
159 Ordering::Relaxed,
160 );
161 MARK_SPEC.scan_non_zero_values::<u8>(
162 block.start(),
163 block.end(),
164 &mut |addr: Address| {
165 state.visit_mark_bit(addr);
166 },
167 );
168 }
169 self.calculated.store(true, Ordering::Relaxed);
170 }
171
172 pub fn release(&self) {
173 self.calculated.store(false, Ordering::Relaxed);
174 }
175
176 pub fn forward(&self, address: Address) -> Address {
177 debug_assert!(
178 self.calculated.load(Ordering::Relaxed),
179 "forward() should only be called when we have calculated an offset vector"
180 );
181 let block = Block::from_unaligned_address(address);
182 let mut state = Transducer::decode(
183 OFFSET_VECTOR_SPEC.load_atomic::<usize>(block.start(), Ordering::Relaxed),
184 block.start(),
185 );
186 MARK_SPEC.scan_non_zero_values::<u8>(block.start(), address, &mut |addr: Address| {
190 state.visit_mark_bit(addr)
191 });
192 state.to
193 }
194
195 pub fn scan_marked_objects(
196 &self,
197 start: Address,
198 end: Address,
199 f: &mut impl FnMut(ObjectReference),
200 ) {
201 let mut in_object = false;
202 MARK_SPEC.scan_non_zero_values::<u8>(start, end, &mut |addr: Address| {
203 if !in_object {
204 let object = ObjectReference::from_raw_address(addr).unwrap();
205 f(object);
206 }
207 in_object = !in_object;
208 });
209 }
210
211 pub fn has_calculated_forwarding_addresses(&self) -> bool {
212 self.calculated.load(Ordering::Relaxed)
213 }
214}