mmtk/plan/semispace/
global.rs

1use super::gc_work::SSGCWorkContext;
2use crate::plan::global::CommonPlan;
3use crate::plan::global::CreateGeneralPlanArgs;
4use crate::plan::global::CreateSpecificPlanArgs;
5use crate::plan::semispace::mutator::ALLOCATOR_MAPPING;
6use crate::plan::AllocationSemantics;
7use crate::plan::Plan;
8use crate::plan::PlanConstraints;
9use crate::policy::copyspace::CopySpace;
10use crate::policy::space::Space;
11use crate::scheduler::*;
12use crate::util::alloc::allocators::AllocatorSelector;
13use crate::util::copy::*;
14use crate::util::heap::gc_trigger::SpaceStats;
15use crate::util::heap::VMRequest;
16use crate::util::metadata::side_metadata::SideMetadataContext;
17use crate::util::opaque_pointer::VMWorkerThread;
18use crate::{plan::global::BasePlan, vm::VMBinding};
19use std::sync::atomic::{AtomicBool, Ordering};
20
21use mmtk_macros::{HasSpaces, PlanTraceObject};
22
23use enum_map::EnumMap;
24
25#[derive(HasSpaces, PlanTraceObject)]
26pub struct SemiSpace<VM: VMBinding> {
27    pub hi: AtomicBool,
28    #[space]
29    #[copy_semantics(CopySemantics::DefaultCopy)]
30    pub copyspace0: CopySpace<VM>,
31    #[space]
32    #[copy_semantics(CopySemantics::DefaultCopy)]
33    pub copyspace1: CopySpace<VM>,
34    #[parent]
35    pub common: CommonPlan<VM>,
36}
37
38/// The plan constraints for the semispace plan.
39pub const SS_CONSTRAINTS: PlanConstraints = PlanConstraints {
40    moves_objects: true,
41    max_non_los_default_alloc_bytes:
42        crate::plan::plan_constraints::MAX_NON_LOS_ALLOC_BYTES_COPYING_PLAN,
43    ..PlanConstraints::default()
44};
45
46impl<VM: VMBinding> Plan for SemiSpace<VM> {
47    fn constraints(&self) -> &'static PlanConstraints {
48        &SS_CONSTRAINTS
49    }
50
51    fn create_copy_config(&'static self) -> CopyConfig<Self::VM> {
52        use enum_map::enum_map;
53        CopyConfig {
54            copy_mapping: enum_map! {
55                CopySemantics::DefaultCopy => CopySelector::CopySpace(0),
56                _ => CopySelector::Unused,
57            },
58            space_mapping: vec![
59                // // The tospace argument doesn't matter, we will rebind before a GC anyway.
60                (CopySelector::CopySpace(0), &self.copyspace0),
61            ],
62            constraints: &SS_CONSTRAINTS,
63        }
64    }
65
66    fn schedule_collection(&'static self, scheduler: &GCWorkScheduler<VM>) {
67        scheduler.schedule_common_work::<SSGCWorkContext<VM>>(self);
68    }
69
70    fn get_allocator_mapping(&self) -> &'static EnumMap<AllocationSemantics, AllocatorSelector> {
71        &ALLOCATOR_MAPPING
72    }
73
74    fn prepare(&mut self, tls: VMWorkerThread) {
75        self.common.prepare(tls, true);
76
77        self.hi
78            .store(!self.hi.load(Ordering::SeqCst), Ordering::SeqCst); // flip the semi-spaces
79                                                                       // prepare each of the collected regions
80        let hi = self.hi.load(Ordering::SeqCst);
81        self.copyspace0.prepare(hi);
82        self.copyspace1.prepare(!hi);
83        self.fromspace_mut()
84            .set_copy_for_sft_trace(Some(CopySemantics::DefaultCopy));
85        self.tospace_mut().set_copy_for_sft_trace(None);
86    }
87
88    fn prepare_worker(&self, worker: &mut GCWorker<VM>) {
89        unsafe { worker.get_copy_context_mut().copy[0].assume_init_mut() }.rebind(self.tospace());
90    }
91
92    fn release(&mut self, tls: VMWorkerThread) {
93        self.common.release(tls, true);
94        // release the collected region
95        self.fromspace().release();
96    }
97
98    fn end_of_gc(&mut self, tls: VMWorkerThread) {
99        self.common.end_of_gc(tls)
100    }
101
102    fn collection_required(&self, space_full: bool, _space: Option<SpaceStats<Self::VM>>) -> bool {
103        self.base().collection_required(self, space_full)
104    }
105
106    fn current_gc_may_move_object(&self) -> bool {
107        true
108    }
109
110    fn get_collection_reserved_pages(&self) -> usize {
111        self.tospace().reserved_pages()
112    }
113
114    fn get_used_pages(&self) -> usize {
115        self.tospace().reserved_pages() + self.common.get_used_pages()
116    }
117
118    fn get_available_pages(&self) -> usize {
119        (self
120            .get_total_pages()
121            .saturating_sub(self.get_reserved_pages()))
122            >> 1
123    }
124
125    fn base(&self) -> &BasePlan<VM> {
126        &self.common.base
127    }
128
129    fn base_mut(&mut self) -> &mut BasePlan<Self::VM> {
130        &mut self.common.base
131    }
132
133    fn common(&self) -> &CommonPlan<VM> {
134        &self.common
135    }
136}
137
138impl<VM: VMBinding> SemiSpace<VM> {
139    pub fn new(args: CreateGeneralPlanArgs<VM>) -> Self {
140        let mut plan_args = CreateSpecificPlanArgs {
141            global_args: args,
142            constraints: &SS_CONSTRAINTS,
143            global_side_metadata_specs: SideMetadataContext::new_global_specs(&[]),
144        };
145
146        let res = SemiSpace {
147            hi: AtomicBool::new(false),
148            copyspace0: CopySpace::new(
149                plan_args.get_normal_space_args(
150                    "copyspace0",
151                    true,
152                    false,
153                    VMRequest::discontiguous(),
154                ),
155                false,
156            ),
157            copyspace1: CopySpace::new(
158                plan_args.get_normal_space_args(
159                    "copyspace1",
160                    true,
161                    false,
162                    VMRequest::discontiguous(),
163                ),
164                true,
165            ),
166            common: CommonPlan::new(plan_args),
167        };
168
169        res.verify_side_metadata_sanity();
170
171        res
172    }
173
174    pub fn tospace(&self) -> &CopySpace<VM> {
175        if self.hi.load(Ordering::SeqCst) {
176            &self.copyspace1
177        } else {
178            &self.copyspace0
179        }
180    }
181
182    pub fn tospace_mut(&mut self) -> &mut CopySpace<VM> {
183        if self.hi.load(Ordering::SeqCst) {
184            &mut self.copyspace1
185        } else {
186            &mut self.copyspace0
187        }
188    }
189
190    pub fn fromspace(&self) -> &CopySpace<VM> {
191        if self.hi.load(Ordering::SeqCst) {
192            &self.copyspace0
193        } else {
194            &self.copyspace1
195        }
196    }
197
198    pub fn fromspace_mut(&mut self) -> &mut CopySpace<VM> {
199        if self.hi.load(Ordering::SeqCst) {
200            &mut self.copyspace0
201        } else {
202            &mut self.copyspace1
203        }
204    }
205}