mmtk/plan/generational/copying/
global.rs

1use super::gc_work::GenCopyGCWorkContext;
2use super::gc_work::GenCopyNurseryGCWorkContext;
3use super::mutator::ALLOCATOR_MAPPING;
4use crate::plan::generational::global::CommonGenPlan;
5use crate::plan::generational::global::GenerationalPlan;
6use crate::plan::generational::global::GenerationalPlanExt;
7use crate::plan::global::BasePlan;
8use crate::plan::global::CommonPlan;
9use crate::plan::global::CreateGeneralPlanArgs;
10use crate::plan::global::CreateSpecificPlanArgs;
11use crate::plan::AllocationSemantics;
12use crate::plan::Plan;
13use crate::plan::PlanConstraints;
14use crate::policy::copyspace::CopySpace;
15use crate::policy::gc_work::TraceKind;
16use crate::policy::space::Space;
17use crate::scheduler::*;
18use crate::util::alloc::allocators::AllocatorSelector;
19use crate::util::copy::*;
20use crate::util::heap::gc_trigger::SpaceStats;
21use crate::util::heap::VMRequest;
22use crate::util::Address;
23use crate::util::ObjectReference;
24use crate::util::VMWorkerThread;
25use crate::vm::*;
26use crate::ObjectQueue;
27use enum_map::EnumMap;
28use std::sync::atomic::{AtomicBool, Ordering};
29
30use mmtk_macros::{HasSpaces, PlanTraceObject};
31
32#[derive(HasSpaces, PlanTraceObject)]
33pub struct GenCopy<VM: VMBinding> {
34    #[parent]
35    pub gen: CommonGenPlan<VM>,
36    pub hi: AtomicBool,
37    #[space]
38    #[copy_semantics(CopySemantics::Mature)]
39    pub copyspace0: CopySpace<VM>,
40    #[space]
41    #[copy_semantics(CopySemantics::Mature)]
42    pub copyspace1: CopySpace<VM>,
43}
44
45/// The plan constraints for the generational copying plan.
46pub const GENCOPY_CONSTRAINTS: PlanConstraints = crate::plan::generational::GEN_CONSTRAINTS;
47
48impl<VM: VMBinding> Plan for GenCopy<VM> {
49    fn constraints(&self) -> &'static PlanConstraints {
50        &GENCOPY_CONSTRAINTS
51    }
52
53    fn create_copy_config(&'static self) -> CopyConfig<Self::VM> {
54        use enum_map::enum_map;
55        CopyConfig {
56            copy_mapping: enum_map! {
57                CopySemantics::Mature => CopySelector::CopySpace(0),
58                CopySemantics::PromoteToMature => CopySelector::CopySpace(0),
59                _ => CopySelector::Unused,
60            },
61            space_mapping: vec![
62                // The tospace argument doesn't matter, we will rebind before a GC anyway.
63                (CopySelector::CopySpace(0), self.tospace()),
64            ],
65            constraints: &GENCOPY_CONSTRAINTS,
66        }
67    }
68
69    fn collection_required(&self, space_full: bool, space: Option<SpaceStats<Self::VM>>) -> bool
70    where
71        Self: Sized,
72    {
73        self.gen.collection_required(self, space_full, space)
74    }
75
76    fn schedule_collection(&'static self, scheduler: &GCWorkScheduler<VM>) {
77        let is_full_heap = self.requires_full_heap_collection();
78        if is_full_heap {
79            scheduler.schedule_common_work::<GenCopyGCWorkContext<VM>>(self);
80        } else {
81            scheduler.schedule_common_work::<GenCopyNurseryGCWorkContext<VM>>(self);
82        }
83    }
84
85    fn get_allocator_mapping(&self) -> &'static EnumMap<AllocationSemantics, AllocatorSelector> {
86        &ALLOCATOR_MAPPING
87    }
88
89    fn prepare(&mut self, tls: VMWorkerThread) {
90        let full_heap = !self.gen.is_current_gc_nursery();
91        self.gen.prepare(tls);
92        if full_heap {
93            self.hi
94                .store(!self.hi.load(Ordering::SeqCst), Ordering::SeqCst); // flip the semi-spaces
95        }
96        let hi = self.hi.load(Ordering::SeqCst);
97        self.copyspace0.prepare(hi);
98        self.copyspace1.prepare(!hi);
99
100        self.fromspace_mut()
101            .set_copy_for_sft_trace(Some(CopySemantics::Mature));
102        self.tospace_mut().set_copy_for_sft_trace(None);
103    }
104
105    fn prepare_worker(&self, worker: &mut GCWorker<Self::VM>) {
106        unsafe { worker.get_copy_context_mut().copy[0].assume_init_mut() }.rebind(self.tospace());
107    }
108
109    fn release(&mut self, tls: VMWorkerThread) {
110        let full_heap = !self.gen.is_current_gc_nursery();
111        self.gen.release(tls);
112        if full_heap {
113            if VM::VMObjectModel::GLOBAL_LOG_BIT_SPEC.is_on_side() {
114                self.fromspace().clear_side_log_bits();
115            }
116            self.fromspace().release();
117        }
118    }
119
120    fn end_of_gc(&mut self, tls: VMWorkerThread) {
121        let next_gc_full_heap = CommonGenPlan::should_next_gc_be_full_heap(self);
122        self.gen.end_of_gc(tls, next_gc_full_heap);
123    }
124
125    fn get_collection_reserved_pages(&self) -> usize {
126        self.gen.get_collection_reserved_pages() + self.tospace().reserved_pages()
127    }
128
129    fn get_used_pages(&self) -> usize {
130        self.gen.get_used_pages() + self.tospace().reserved_pages()
131    }
132
133    fn current_gc_may_move_object(&self) -> bool {
134        true
135    }
136
137    /// Return the number of pages available for allocation. Assuming all future allocations goes to nursery.
138    fn get_available_pages(&self) -> usize {
139        // super.get_available_pages() / 2 to reserve pages for copying
140        (self
141            .get_total_pages()
142            .saturating_sub(self.get_reserved_pages()))
143            >> 1
144    }
145
146    fn base(&self) -> &BasePlan<VM> {
147        &self.gen.common.base
148    }
149
150    fn base_mut(&mut self) -> &mut BasePlan<Self::VM> {
151        &mut self.gen.common.base
152    }
153
154    fn common(&self) -> &CommonPlan<VM> {
155        &self.gen.common
156    }
157
158    fn generational(&self) -> Option<&dyn GenerationalPlan<VM = Self::VM>> {
159        Some(self)
160    }
161}
162
163impl<VM: VMBinding> GenerationalPlan for GenCopy<VM> {
164    fn is_current_gc_nursery(&self) -> bool {
165        self.gen.is_current_gc_nursery()
166    }
167
168    fn is_object_in_nursery(&self, object: ObjectReference) -> bool {
169        self.gen.nursery.in_space(object)
170    }
171
172    fn is_address_in_nursery(&self, addr: Address) -> bool {
173        self.gen.nursery.address_in_space(addr)
174    }
175
176    fn get_mature_physical_pages_available(&self) -> usize {
177        self.tospace().available_physical_pages()
178    }
179
180    fn get_mature_reserved_pages(&self) -> usize {
181        self.tospace().reserved_pages()
182    }
183
184    fn force_full_heap_collection(&self) {
185        self.gen.force_full_heap_collection()
186    }
187
188    fn last_collection_full_heap(&self) -> bool {
189        self.gen.last_collection_full_heap()
190    }
191}
192
193impl<VM: VMBinding> GenerationalPlanExt<VM> for GenCopy<VM> {
194    fn trace_object_nursery<Q: ObjectQueue, const KIND: TraceKind>(
195        &self,
196        queue: &mut Q,
197        object: ObjectReference,
198        worker: &mut GCWorker<VM>,
199    ) -> ObjectReference {
200        self.gen
201            .trace_object_nursery::<Q, KIND>(queue, object, worker)
202    }
203}
204
205impl<VM: VMBinding> GenCopy<VM> {
206    pub fn new(args: CreateGeneralPlanArgs<VM>) -> Self {
207        let mut plan_args = CreateSpecificPlanArgs {
208            global_args: args,
209            constraints: &GENCOPY_CONSTRAINTS,
210            global_side_metadata_specs:
211                crate::plan::generational::new_generational_global_metadata_specs::<VM>(),
212        };
213
214        let copyspace0 = CopySpace::new(
215            plan_args.get_mature_space_args("copyspace0", true, false, VMRequest::discontiguous()),
216            false,
217        );
218        let copyspace1 = CopySpace::new(
219            plan_args.get_mature_space_args("copyspace1", true, false, VMRequest::discontiguous()),
220            true,
221        );
222
223        let res = GenCopy {
224            gen: CommonGenPlan::new(plan_args),
225            hi: AtomicBool::new(false),
226            copyspace0,
227            copyspace1,
228        };
229
230        res.verify_side_metadata_sanity();
231
232        res
233    }
234
235    fn requires_full_heap_collection(&self) -> bool {
236        self.gen.requires_full_heap_collection(self)
237    }
238
239    pub fn tospace(&self) -> &CopySpace<VM> {
240        if self.hi.load(Ordering::SeqCst) {
241            &self.copyspace1
242        } else {
243            &self.copyspace0
244        }
245    }
246
247    pub fn tospace_mut(&mut self) -> &mut CopySpace<VM> {
248        if self.hi.load(Ordering::SeqCst) {
249            &mut self.copyspace1
250        } else {
251            &mut self.copyspace0
252        }
253    }
254
255    pub fn fromspace(&self) -> &CopySpace<VM> {
256        if self.hi.load(Ordering::SeqCst) {
257            &self.copyspace0
258        } else {
259            &self.copyspace1
260        }
261    }
262
263    pub fn fromspace_mut(&mut self) -> &mut CopySpace<VM> {
264        if self.hi.load(Ordering::SeqCst) {
265            &mut self.copyspace0
266        } else {
267            &mut self.copyspace1
268        }
269    }
270}