1use crate::plan::generational::global::GenerationalPlan;
2use crate::plan::global::CommonPlan;
3use crate::plan::global::CreateGeneralPlanArgs;
4use crate::plan::global::CreateSpecificPlanArgs;
5use crate::plan::immix;
6use crate::plan::PlanConstraints;
7use crate::policy::gc_work::TraceKind;
8use crate::policy::gc_work::TRACE_KIND_TRANSITIVE_PIN;
9use crate::policy::immix::defrag::StatsForDefrag;
10use crate::policy::immix::ImmixSpace;
11use crate::policy::immix::TRACE_KIND_FAST;
12use crate::policy::sft::SFT;
13use crate::policy::space::Space;
14use crate::util::copy::CopyConfig;
15use crate::util::copy::CopySelector;
16use crate::util::copy::CopySemantics;
17use crate::util::heap::gc_trigger::SpaceStats;
18use crate::util::metadata::log_bit::UnlogBitsOperation;
19use crate::util::metadata::side_metadata::SideMetadataContext;
20use crate::util::statistics::counter::EventCounter;
21use crate::vm::ObjectModel;
22use crate::vm::VMBinding;
23use crate::Plan;
24
25use atomic::Ordering;
26use std::sync::atomic::AtomicBool;
27use std::sync::{Arc, Mutex};
28
29use mmtk_macros::{HasSpaces, PlanTraceObject};
30
31use super::gc_work::StickyImmixMatureGCWorkContext;
32use super::gc_work::StickyImmixNurseryGCWorkContext;
33
34#[derive(HasSpaces, PlanTraceObject)]
35pub struct StickyImmix<VM: VMBinding> {
36 #[parent]
37 immix: immix::Immix<VM>,
38 gc_full_heap: AtomicBool,
39 next_gc_full_heap: AtomicBool,
40 full_heap_gc_count: Arc<Mutex<EventCounter>>,
41}
42
43pub const STICKY_IMMIX_CONSTRAINTS: PlanConstraints = PlanConstraints {
45 moves_objects: !cfg!(feature = "immix_non_moving"),
47 needs_log_bit: true,
48 barrier: crate::plan::BarrierSelector::ObjectBarrier,
49 may_trace_duplicate_edges: true,
51 generational: true,
52 ..immix::IMMIX_CONSTRAINTS
53};
54
55impl<VM: VMBinding> Plan for StickyImmix<VM> {
56 fn constraints(&self) -> &'static crate::plan::PlanConstraints {
57 &STICKY_IMMIX_CONSTRAINTS
58 }
59
60 fn create_copy_config(&'static self) -> CopyConfig<Self::VM> {
61 use enum_map::enum_map;
62 CopyConfig {
63 copy_mapping: enum_map! {
64 CopySemantics::DefaultCopy => CopySelector::Immix(0),
65 _ => CopySelector::Unused,
66 },
67 space_mapping: vec![(CopySelector::Immix(0), &self.immix.immix_space)],
68 constraints: &STICKY_IMMIX_CONSTRAINTS,
69 }
70 }
71
72 fn base(&self) -> &crate::plan::global::BasePlan<Self::VM> {
73 self.immix.base()
74 }
75
76 fn base_mut(&mut self) -> &mut crate::plan::global::BasePlan<Self::VM> {
77 self.immix.base_mut()
78 }
79
80 fn generational(
81 &self,
82 ) -> Option<&dyn crate::plan::generational::global::GenerationalPlan<VM = Self::VM>> {
83 Some(self)
84 }
85
86 fn common(&self) -> &CommonPlan<Self::VM> {
87 self.immix.common()
88 }
89
90 fn schedule_collection(&'static self, scheduler: &crate::scheduler::GCWorkScheduler<Self::VM>) {
91 let is_full_heap = self.requires_full_heap_collection();
92 self.gc_full_heap.store(is_full_heap, Ordering::SeqCst);
93 probe!(mmtk, gen_full_heap, is_full_heap);
94
95 if !is_full_heap {
96 info!("Nursery GC");
97 scheduler.schedule_common_work::<StickyImmixNurseryGCWorkContext<VM>>(self);
99 } else {
100 info!("Full heap GC");
101 use crate::plan::immix::Immix;
102 use crate::policy::immix::TRACE_KIND_DEFRAG;
103 Immix::schedule_immix_full_heap_collection::<
104 StickyImmix<VM>,
105 StickyImmixMatureGCWorkContext<VM, TRACE_KIND_FAST>,
106 StickyImmixMatureGCWorkContext<VM, TRACE_KIND_DEFRAG>,
107 >(self, &self.immix.immix_space, scheduler);
108 }
109 }
110
111 fn get_allocator_mapping(
112 &self,
113 ) -> &'static enum_map::EnumMap<crate::AllocationSemantics, crate::util::alloc::AllocatorSelector>
114 {
115 &super::mutator::ALLOCATOR_MAPPING
116 }
117
118 fn prepare(&mut self, tls: crate::util::VMWorkerThread) {
119 if self.is_current_gc_nursery() {
120 self.immix.immix_space.prepare(
122 false,
123 Some(StatsForDefrag::new(self)),
124 UnlogBitsOperation::NoOp,
127 );
128 self.immix.common.los.prepare(false);
129 } else {
130 self.full_heap_gc_count.lock().unwrap().inc();
131 self.immix.prepare_inner(
132 tls,
133 UnlogBitsOperation::BulkClear,
135 );
136 }
137 }
138
139 fn release(&mut self, tls: crate::util::VMWorkerThread) {
140 if self.is_current_gc_nursery() {
141 self.immix.immix_space.release(
142 false,
143 UnlogBitsOperation::NoOp,
146 );
147 self.immix.common.los.release(false);
148 } else {
149 self.immix.release_inner(
150 tls,
151 UnlogBitsOperation::NoOp,
153 );
154 }
155 }
156
157 fn end_of_gc(&mut self, tls: crate::util::opaque_pointer::VMWorkerThread) {
158 let next_gc_full_heap =
159 crate::plan::generational::global::CommonGenPlan::should_next_gc_be_full_heap(self);
160 self.next_gc_full_heap
161 .store(next_gc_full_heap, Ordering::Relaxed);
162
163 let was_defrag = self.immix.immix_space.end_of_gc();
164 self.immix
165 .set_last_gc_was_defrag(was_defrag, Ordering::Relaxed);
166
167 self.immix.common.end_of_gc(tls);
168 }
169
170 fn collection_required(&self, space_full: bool, space: Option<SpaceStats<Self::VM>>) -> bool {
171 let nursery_full = self.immix.immix_space.get_pages_allocated()
172 > self.base().gc_trigger.get_max_nursery_pages();
173 if space_full
174 && space.is_some()
175 && space.as_ref().unwrap().0.name() != self.immix.immix_space.name()
176 {
177 self.next_gc_full_heap.store(true, Ordering::SeqCst);
178 }
179 self.immix.collection_required(space_full, space) || nursery_full
180 }
181
182 fn last_collection_was_exhaustive(&self) -> bool {
183 self.gc_full_heap.load(Ordering::Relaxed) && self.immix.last_collection_was_exhaustive()
184 }
185
186 fn current_gc_may_move_object(&self) -> bool {
187 if self.is_current_gc_nursery() {
188 self.get_immix_space().prefer_copy_on_nursery_gc()
189 } else {
190 self.get_immix_space().in_defrag()
191 }
192 }
193
194 fn get_collection_reserved_pages(&self) -> usize {
195 self.immix.get_collection_reserved_pages()
196 }
197
198 fn get_used_pages(&self) -> usize {
199 self.immix.get_used_pages()
200 }
201
202 fn sanity_check_object(&self, object: crate::util::ObjectReference) -> bool {
203 if self.is_current_gc_nursery() {
204 if !VM::VMObjectModel::GLOBAL_LOG_BIT_SPEC.is_unlogged::<VM>(object, Ordering::SeqCst) {
206 error!("Object {} is not unlogged (all objects that have been traced should be unlogged/mature)", object);
207 return false;
208 }
209
210 if self.immix.immix_space.in_space(object) && !self.immix.immix_space.is_marked(object)
212 {
213 error!(
214 "Object {} is not marked (all objects that have been traced should be marked)",
215 object
216 );
217 return false;
218 } else if self.immix.common.los.in_space(object)
219 && !self.immix.common.los.is_live(object)
220 {
221 error!("LOS Object {} is not marked", object);
222 return false;
223 }
224 }
225 true
226 }
227}
228
229impl<VM: VMBinding> GenerationalPlan for StickyImmix<VM> {
230 fn is_current_gc_nursery(&self) -> bool {
231 !self.gc_full_heap.load(Ordering::SeqCst)
232 }
233
234 fn is_object_in_nursery(&self, object: crate::util::ObjectReference) -> bool {
235 self.immix.immix_space.in_space(object) && !self.immix.immix_space.is_marked(object)
236 }
237
238 fn is_address_in_nursery(&self, _addr: crate::util::Address) -> bool {
245 false
246 }
247
248 fn get_mature_physical_pages_available(&self) -> usize {
249 self.immix.immix_space.available_physical_pages()
250 }
251
252 fn get_mature_reserved_pages(&self) -> usize {
253 self.immix.immix_space.reserved_pages()
254 }
255
256 fn force_full_heap_collection(&self) {
257 self.next_gc_full_heap.store(true, Ordering::SeqCst);
258 }
259
260 fn last_collection_full_heap(&self) -> bool {
261 self.gc_full_heap.load(Ordering::SeqCst)
262 }
263}
264
265impl<VM: VMBinding> crate::plan::generational::global::GenerationalPlanExt<VM> for StickyImmix<VM> {
266 fn trace_object_nursery<Q: crate::ObjectQueue, const KIND: TraceKind>(
267 &self,
268 queue: &mut Q,
269 object: crate::util::ObjectReference,
270 worker: &mut crate::scheduler::GCWorker<VM>,
271 ) -> crate::util::ObjectReference {
272 if self.immix.immix_space.in_space(object) {
273 if !self.is_object_in_nursery(object) {
274 trace!("Immix mature object {}, skip", object);
276 return object;
277 } else {
278 let object = if KIND == TRACE_KIND_TRANSITIVE_PIN || KIND == TRACE_KIND_FAST {
280 trace!(
281 "Immix nursery object {} is being traced without moving",
282 object
283 );
284 self.immix
285 .immix_space
286 .trace_object_without_moving(queue, object)
287 } else if self.immix.immix_space.prefer_copy_on_nursery_gc() {
288 let ret = self.immix.immix_space.trace_object_with_opportunistic_copy(
289 queue,
290 object,
291 CopySemantics::DefaultCopy,
294 worker,
295 true,
296 );
297 trace!(
298 "Immix nursery object {} is being traced with opportunistic copy {}",
299 object,
300 if ret == object {
301 "".to_string()
302 } else {
303 format!("-> new object {}", ret)
304 }
305 );
306 ret
307 } else {
308 trace!(
309 "Immix nursery object {} is being traced without moving",
310 object
311 );
312 self.immix
313 .immix_space
314 .trace_object_without_moving(queue, object)
315 };
316
317 return object;
318 }
319 }
320
321 if self.immix.common().get_los().in_space(object) {
322 return self
323 .immix
324 .common()
325 .get_los()
326 .trace_object::<Q>(queue, object);
327 }
328
329 object
330 }
331}
332
333impl<VM: VMBinding> StickyImmix<VM> {
334 pub fn new(args: CreateGeneralPlanArgs<VM>) -> Self {
335 let full_heap_gc_count = args.stats.new_event_counter("majorGC", true, true);
336 let plan_args = CreateSpecificPlanArgs {
337 global_args: args,
338 constraints: &STICKY_IMMIX_CONSTRAINTS,
339 global_side_metadata_specs: SideMetadataContext::new_global_specs(
340 &crate::plan::generational::new_generational_global_metadata_specs::<VM>(),
341 ),
342 };
343
344 let immix = immix::Immix::new_with_args(
345 plan_args,
346 crate::policy::immix::ImmixSpaceArgs {
347 mixed_age: true,
349 never_move_objects: false,
350 },
351 );
352 Self {
353 immix,
354 gc_full_heap: AtomicBool::new(false),
355 next_gc_full_heap: AtomicBool::new(false),
356 full_heap_gc_count,
357 }
358 }
359
360 fn requires_full_heap_collection(&self) -> bool {
361 #[allow(clippy::if_same_then_else, clippy::needless_bool)]
363 if crate::plan::generational::FULL_NURSERY_GC {
364 trace!("full heap: forced full heap");
365 true
367 } else if self
368 .immix
369 .common
370 .base
371 .global_state
372 .user_triggered_collection
373 .load(Ordering::SeqCst)
374 && *self.immix.common.base.options.full_heap_system_gc
375 {
376 true
378 } else if self.next_gc_full_heap.load(Ordering::SeqCst)
379 || self
380 .immix
381 .common
382 .base
383 .global_state
384 .cur_collection_attempts
385 .load(Ordering::SeqCst)
386 > 1
387 {
388 true
390 } else {
391 false
392 }
393 }
394
395 pub fn get_immix_space(&self) -> &ImmixSpace<VM> {
396 &self.immix.immix_space
397 }
398}