mmtk/plan/generational/immix/
global.rs1use super::gc_work::GenImmixMatureGCWorkContext;
2use super::gc_work::GenImmixNurseryGCWorkContext;
3use crate::plan::generational::global::CommonGenPlan;
4use crate::plan::generational::global::GenerationalPlan;
5use crate::plan::global::BasePlan;
6use crate::plan::global::CommonPlan;
7use crate::plan::global::CreateGeneralPlanArgs;
8use crate::plan::global::CreateSpecificPlanArgs;
9use crate::plan::AllocationSemantics;
10use crate::plan::Plan;
11use crate::plan::PlanConstraints;
12use crate::policy::gc_work::TraceKind;
13use crate::policy::immix::defrag::StatsForDefrag;
14use crate::policy::immix::ImmixSpace;
15use crate::policy::immix::ImmixSpaceArgs;
16use crate::policy::immix::{TRACE_KIND_DEFRAG, TRACE_KIND_FAST};
17use crate::policy::space::Space;
18use crate::scheduler::GCWorkScheduler;
19use crate::scheduler::GCWorker;
20use crate::util::alloc::allocators::AllocatorSelector;
21use crate::util::copy::*;
22use crate::util::heap::gc_trigger::SpaceStats;
23use crate::util::heap::VMRequest;
24use crate::util::metadata::log_bit::UnlogBitsOperation;
25use crate::util::Address;
26use crate::util::ObjectReference;
27use crate::util::VMWorkerThread;
28use crate::vm::*;
29use crate::ObjectQueue;
30
31use enum_map::EnumMap;
32use std::sync::atomic::AtomicBool;
33use std::sync::atomic::Ordering;
34
35use mmtk_macros::{HasSpaces, PlanTraceObject};
36
37#[derive(HasSpaces, PlanTraceObject)]
42pub struct GenImmix<VM: VMBinding> {
43 #[parent]
45 pub gen: CommonGenPlan<VM>,
46 #[post_scan]
48 #[space]
49 #[copy_semantics(CopySemantics::Mature)]
50 pub immix_space: ImmixSpace<VM>,
51 pub last_gc_was_defrag: AtomicBool,
53 pub last_gc_was_full_heap: AtomicBool,
55}
56
57pub const GENIMMIX_CONSTRAINTS: PlanConstraints = PlanConstraints {
59 max_non_los_default_alloc_bytes: crate::util::rust_util::min_of_usize(
66 crate::policy::immix::MAX_IMMIX_OBJECT_SIZE,
67 crate::plan::generational::GEN_CONSTRAINTS.max_non_los_default_alloc_bytes,
68 ),
69 ..crate::plan::generational::GEN_CONSTRAINTS
70};
71
72impl<VM: VMBinding> Plan for GenImmix<VM> {
73 fn constraints(&self) -> &'static PlanConstraints {
74 &GENIMMIX_CONSTRAINTS
75 }
76
77 fn create_copy_config(&'static self) -> CopyConfig<Self::VM> {
78 use enum_map::enum_map;
79 CopyConfig {
80 copy_mapping: enum_map! {
81 CopySemantics::PromoteToMature => CopySelector::ImmixHybrid(0),
82 CopySemantics::Mature => CopySelector::ImmixHybrid(0),
83 _ => CopySelector::Unused,
84 },
85 space_mapping: vec![(CopySelector::ImmixHybrid(0), &self.immix_space)],
86 constraints: &GENIMMIX_CONSTRAINTS,
87 }
88 }
89
90 fn last_collection_was_exhaustive(&self) -> bool {
91 self.last_gc_was_full_heap.load(Ordering::Relaxed)
92 && self
93 .immix_space
94 .is_last_gc_exhaustive(self.last_gc_was_defrag.load(Ordering::Relaxed))
95 }
96
97 fn collection_required(&self, space_full: bool, space: Option<SpaceStats<Self::VM>>) -> bool
98 where
99 Self: Sized,
100 {
101 self.gen.collection_required(self, space_full, space)
102 }
103
104 #[allow(clippy::if_same_then_else)]
108 #[allow(clippy::branches_sharing_code)]
109 fn schedule_collection(&'static self, scheduler: &GCWorkScheduler<Self::VM>) {
110 let is_full_heap = self.requires_full_heap_collection();
111 probe!(mmtk, gen_full_heap, is_full_heap);
112
113 if !is_full_heap {
114 info!("Nursery GC");
115 scheduler.schedule_common_work::<GenImmixNurseryGCWorkContext<VM>>(self);
116 } else {
117 info!("Full heap GC");
118 crate::plan::immix::Immix::schedule_immix_full_heap_collection::<
119 GenImmix<VM>,
120 GenImmixMatureGCWorkContext<VM, TRACE_KIND_FAST>,
121 GenImmixMatureGCWorkContext<VM, TRACE_KIND_DEFRAG>,
122 >(self, &self.immix_space, scheduler);
123 }
124 }
125
126 fn get_allocator_mapping(&self) -> &'static EnumMap<AllocationSemantics, AllocatorSelector> {
127 &super::mutator::ALLOCATOR_MAPPING
128 }
129
130 fn prepare(&mut self, tls: VMWorkerThread) {
131 let full_heap = !self.gen.is_current_gc_nursery();
132 self.gen.prepare(tls);
133 if full_heap {
134 self.immix_space.prepare(
135 full_heap,
136 Some(StatsForDefrag::new(self)),
137 UnlogBitsOperation::BulkClear,
139 );
140 } else {
141 }
144 }
145
146 fn release(&mut self, tls: VMWorkerThread) {
147 let full_heap = !self.gen.is_current_gc_nursery();
148 self.gen.release(tls);
149 if full_heap {
150 self.immix_space.release(
151 full_heap,
152 UnlogBitsOperation::NoOp,
154 );
155 } else {
156 }
159
160 self.last_gc_was_full_heap
161 .store(full_heap, Ordering::Relaxed);
162 }
163
164 fn end_of_gc(&mut self, tls: VMWorkerThread) {
165 let next_gc_full_heap = CommonGenPlan::should_next_gc_be_full_heap(self);
166 self.gen.end_of_gc(tls, next_gc_full_heap);
167
168 let did_defrag = self.immix_space.end_of_gc();
169 self.last_gc_was_defrag.store(did_defrag, Ordering::Relaxed);
170 }
171
172 fn current_gc_may_move_object(&self) -> bool {
173 if self.is_current_gc_nursery() {
174 true
175 } else {
176 self.immix_space.in_defrag()
177 }
178 }
179
180 fn get_collection_reserved_pages(&self) -> usize {
181 self.gen.get_collection_reserved_pages() + self.immix_space.defrag_headroom_pages()
182 }
183
184 fn get_used_pages(&self) -> usize {
185 self.gen.get_used_pages() + self.immix_space.reserved_pages()
186 }
187
188 fn get_available_pages(&self) -> usize {
190 (self
192 .get_total_pages()
193 .saturating_sub(self.get_reserved_pages()))
194 >> 1
195 }
196
197 fn base(&self) -> &BasePlan<VM> {
198 &self.gen.common.base
199 }
200
201 fn base_mut(&mut self) -> &mut BasePlan<Self::VM> {
202 &mut self.gen.common.base
203 }
204
205 fn common(&self) -> &CommonPlan<VM> {
206 &self.gen.common
207 }
208
209 fn generational(&self) -> Option<&dyn GenerationalPlan<VM = VM>> {
210 Some(self)
211 }
212}
213
214impl<VM: VMBinding> GenerationalPlan for GenImmix<VM> {
215 fn is_current_gc_nursery(&self) -> bool {
216 self.gen.is_current_gc_nursery()
217 }
218
219 fn is_object_in_nursery(&self, object: ObjectReference) -> bool {
220 self.gen.nursery.in_space(object)
221 }
222
223 fn is_address_in_nursery(&self, addr: Address) -> bool {
224 self.gen.nursery.address_in_space(addr)
225 }
226
227 fn get_mature_physical_pages_available(&self) -> usize {
228 self.immix_space.available_physical_pages()
229 }
230
231 fn get_mature_reserved_pages(&self) -> usize {
232 self.immix_space.reserved_pages()
233 }
234
235 fn force_full_heap_collection(&self) {
236 self.gen.force_full_heap_collection()
237 }
238
239 fn last_collection_full_heap(&self) -> bool {
240 self.gen.last_collection_full_heap()
241 }
242}
243
244impl<VM: VMBinding> crate::plan::generational::global::GenerationalPlanExt<VM> for GenImmix<VM> {
245 fn trace_object_nursery<Q: ObjectQueue, const KIND: TraceKind>(
246 &self,
247 queue: &mut Q,
248 object: ObjectReference,
249 worker: &mut GCWorker<VM>,
250 ) -> ObjectReference {
251 self.gen
252 .trace_object_nursery::<Q, KIND>(queue, object, worker)
253 }
254}
255
256impl<VM: VMBinding> GenImmix<VM> {
257 pub fn new(args: CreateGeneralPlanArgs<VM>) -> Self {
258 let mut plan_args = CreateSpecificPlanArgs {
259 global_args: args,
260 constraints: &GENIMMIX_CONSTRAINTS,
261 global_side_metadata_specs:
262 crate::plan::generational::new_generational_global_metadata_specs::<VM>(),
263 };
264 let immix_space = ImmixSpace::new(
265 plan_args.get_mature_space_args(
266 "immix_mature",
267 true,
268 false,
269 VMRequest::discontiguous(),
270 ),
271 ImmixSpaceArgs {
272 mixed_age: false,
274 never_move_objects: false,
275 },
276 );
277
278 let genimmix = GenImmix {
279 gen: CommonGenPlan::new(plan_args),
280 immix_space,
281 last_gc_was_defrag: AtomicBool::new(false),
282 last_gc_was_full_heap: AtomicBool::new(false),
283 };
284
285 genimmix.verify_side_metadata_sanity();
286
287 genimmix
288 }
289
290 fn requires_full_heap_collection(&self) -> bool {
291 self.gen.requires_full_heap_collection(self)
292 }
293}