mmtk/plan/generational/
global.rs1use crate::plan::global::CommonPlan;
2use crate::plan::global::CreateSpecificPlanArgs;
3use crate::plan::ObjectQueue;
4use crate::plan::Plan;
5use crate::policy::copyspace::CopySpace;
6use crate::policy::gc_work::{TraceKind, TRACE_KIND_TRANSITIVE_PIN};
7use crate::policy::space::Space;
8use crate::scheduler::*;
9use crate::util::copy::CopySemantics;
10use crate::util::heap::gc_trigger::SpaceStats;
11use crate::util::heap::VMRequest;
12use crate::util::statistics::counter::EventCounter;
13use crate::util::Address;
14use crate::util::ObjectReference;
15use crate::util::VMWorkerThread;
16use crate::vm::{ObjectModel, VMBinding};
17use std::sync::atomic::AtomicBool;
18use std::sync::atomic::Ordering;
19use std::sync::{Arc, Mutex};
20
21use mmtk_macros::{HasSpaces, PlanTraceObject};
22
23#[derive(HasSpaces, PlanTraceObject)]
26pub struct CommonGenPlan<VM: VMBinding> {
27 #[space]
29 #[copy_semantics(CopySemantics::PromoteToMature)]
30 pub nursery: CopySpace<VM>,
31 #[parent]
33 pub common: CommonPlan<VM>,
34 pub gc_full_heap: AtomicBool,
36 pub next_gc_full_heap: AtomicBool,
38 pub full_heap_gc_count: Arc<Mutex<EventCounter>>,
39}
40
41impl<VM: VMBinding> CommonGenPlan<VM> {
42 pub fn new(mut args: CreateSpecificPlanArgs<VM>) -> Self {
43 let nursery = CopySpace::new(
44 args.get_nursery_space_args("nursery", true, false, VMRequest::discontiguous()),
45 true,
46 );
47 let full_heap_gc_count = args
48 .global_args
49 .stats
50 .new_event_counter("majorGC", true, true);
51 let common = CommonPlan::new(args);
52
53 CommonGenPlan {
54 nursery,
55 common,
56 gc_full_heap: AtomicBool::default(),
57 next_gc_full_heap: AtomicBool::new(false),
58 full_heap_gc_count,
59 }
60 }
61
62 pub fn prepare(&mut self, tls: VMWorkerThread) {
64 let full_heap = !self.is_current_gc_nursery();
65 if full_heap {
66 self.full_heap_gc_count.lock().unwrap().inc();
67 }
68 self.common.prepare(tls, full_heap);
69 self.nursery.prepare(true);
70 self.nursery
71 .set_copy_for_sft_trace(Some(CopySemantics::PromoteToMature));
72 }
73
74 pub fn release(&mut self, tls: VMWorkerThread) {
76 let full_heap = !self.is_current_gc_nursery();
77 self.common.release(tls, full_heap);
78 self.nursery.release();
79 }
80
81 pub fn end_of_gc(&mut self, tls: VMWorkerThread, next_gc_full_heap: bool) {
82 self.set_next_gc_full_heap(next_gc_full_heap);
83 self.common.end_of_gc(tls);
84 }
85
86 fn virtual_memory_exhausted(plan: &dyn GenerationalPlan<VM = VM>) -> bool {
93 ((plan.get_collection_reserved_pages() as f64
94 * VM::VMObjectModel::VM_WORST_CASE_COPY_EXPANSION) as usize)
95 > plan.get_mature_physical_pages_available()
96 }
97
98 pub fn collection_required<P: Plan<VM = VM>>(
101 &self,
102 plan: &P,
103 space_full: bool,
104 space: Option<SpaceStats<VM>>,
105 ) -> bool {
106 let cur_nursery = self.nursery.reserved_pages();
107 let max_nursery = self.common.base.gc_trigger.get_max_nursery_pages();
108 let nursery_full = cur_nursery >= max_nursery;
109 trace!(
110 "nursery_full = {:?} (nursery = {}, max_nursery = {})",
111 nursery_full,
112 cur_nursery,
113 max_nursery,
114 );
115 if nursery_full {
116 return true;
117 }
118 if Self::virtual_memory_exhausted(plan.generational().unwrap()) {
119 return true;
120 }
121
122 let is_triggered_by_nursery =
126 space.is_some_and(|s| s.0.common().descriptor == self.nursery.common().descriptor);
127 if space_full && !is_triggered_by_nursery {
129 self.next_gc_full_heap.store(true, Ordering::SeqCst);
130 }
131
132 self.common.base.collection_required(plan, space_full)
133 }
134
135 pub fn force_full_heap_collection(&self) {
136 self.next_gc_full_heap.store(true, Ordering::SeqCst);
137 }
138
139 pub fn last_collection_full_heap(&self) -> bool {
140 self.gc_full_heap.load(Ordering::Relaxed)
141 }
142
143 pub fn requires_full_heap_collection<P: Plan<VM = VM>>(&self, plan: &P) -> bool {
146 #[allow(clippy::if_same_then_else, clippy::needless_bool)]
149 let is_full_heap = if crate::plan::generational::FULL_NURSERY_GC {
150 trace!("full heap: forced full heap");
151 true
153 } else if self
154 .common
155 .base
156 .global_state
157 .user_triggered_collection
158 .load(Ordering::SeqCst)
159 && *self.common.base.options.full_heap_system_gc
160 {
161 trace!("full heap: user triggered");
162 true
164 } else if self.next_gc_full_heap.load(Ordering::SeqCst)
165 || self
166 .common
167 .base
168 .global_state
169 .cur_collection_attempts
170 .load(Ordering::SeqCst)
171 > 1
172 {
173 trace!(
174 "full heap: next_gc_full_heap = {}, cur_collection_attempts = {}",
175 self.next_gc_full_heap.load(Ordering::SeqCst),
176 self.common
177 .base
178 .global_state
179 .cur_collection_attempts
180 .load(Ordering::SeqCst)
181 );
182 true
184 } else if Self::virtual_memory_exhausted(plan.generational().unwrap()) {
185 trace!("full heap: virtual memory exhausted");
186 true
187 } else {
188 false
194 };
195
196 self.gc_full_heap.store(is_full_heap, Ordering::SeqCst);
197
198 info!(
199 "{}",
200 if is_full_heap {
201 "Full heap GC"
202 } else {
203 "Nursery GC"
204 }
205 );
206
207 is_full_heap
208 }
209
210 pub fn trace_object_nursery<Q: ObjectQueue, const KIND: TraceKind>(
212 &self,
213 queue: &mut Q,
214 object: ObjectReference,
215 worker: &mut GCWorker<VM>,
216 ) -> ObjectReference {
217 assert!(
218 KIND != TRACE_KIND_TRANSITIVE_PIN,
219 "A copying nursery cannot pin objects"
220 );
221
222 if self.nursery.in_space(object) {
224 return self.nursery.trace_object::<Q>(
225 queue,
226 object,
227 Some(CopySemantics::PromoteToMature),
228 worker,
229 );
230 }
231 if self.common.get_los().in_space(object) {
233 return self.common.get_los().trace_object::<Q>(queue, object);
234 }
235
236 object
237 }
238
239 pub fn is_current_gc_nursery(&self) -> bool {
241 !self.gc_full_heap.load(Ordering::SeqCst)
242 }
243
244 pub fn should_next_gc_be_full_heap(plan: &dyn Plan<VM = VM>) -> bool {
251 let available = plan.get_available_pages();
252 let min_nursery = plan.base().gc_trigger.get_min_nursery_pages();
253 let next_gc_full_heap = available < min_nursery;
254 trace!(
255 "next gc will be full heap? {}, available pages = {}, min nursery = {}",
256 next_gc_full_heap,
257 available,
258 min_nursery
259 );
260 next_gc_full_heap
261 }
262
263 pub fn set_next_gc_full_heap(&self, next_gc_full_heap: bool) {
265 self.next_gc_full_heap
266 .store(next_gc_full_heap, Ordering::SeqCst);
267 }
268
269 pub fn get_collection_reserved_pages(&self) -> usize {
272 self.nursery.reserved_pages()
273 }
274
275 pub fn get_used_pages(&self) -> usize {
278 self.nursery.reserved_pages() + self.common.get_used_pages()
279 }
280}
281
282pub trait GenerationalPlan: Plan {
285 fn is_current_gc_nursery(&self) -> bool;
288
289 fn is_object_in_nursery(&self, object: ObjectReference) -> bool;
291
292 fn is_address_in_nursery(&self, addr: Address) -> bool;
298
299 fn get_mature_physical_pages_available(&self) -> usize;
301
302 fn get_mature_reserved_pages(&self) -> usize;
304
305 fn last_collection_full_heap(&self) -> bool;
307
308 fn force_full_heap_collection(&self);
310}
311
312pub trait GenerationalPlanExt<VM: VMBinding>: GenerationalPlan<VM = VM> {
316 fn trace_object_nursery<Q: ObjectQueue, const KIND: TraceKind>(
319 &self,
320 queue: &mut Q,
321 object: ObjectReference,
322 worker: &mut GCWorker<VM>,
323 ) -> ObjectReference;
324}
325
326pub fn is_nursery_gc<VM: VMBinding>(plan: &dyn Plan<VM = VM>) -> bool {
329 plan.generational()
330 .is_some_and(|plan| plan.is_current_gc_nursery())
331}