mmtk/plan/generational/
barrier.rs1use crate::plan::barriers::BarrierSemantics;
4use crate::plan::PlanTraceObject;
5use crate::plan::VectorQueue;
6use crate::policy::gc_work::DEFAULT_TRACE;
7use crate::scheduler::WorkBucketStage;
8use crate::util::*;
9use crate::vm::slot::MemorySlice;
10use crate::vm::VMBinding;
11use crate::MMTK;
12
13use super::gc_work::GenNurseryProcessEdges;
14use super::gc_work::ProcessModBuf;
15use super::gc_work::ProcessRegionModBuf;
16use super::global::GenerationalPlanExt;
17
18pub struct GenObjectBarrierSemantics<
19 VM: VMBinding,
20 P: GenerationalPlanExt<VM> + PlanTraceObject<VM>,
21> {
22 mmtk: &'static MMTK<VM>,
24 plan: &'static P,
26 modbuf: VectorQueue<ObjectReference>,
28 region_modbuf: VectorQueue<VM::VMMemorySlice>,
30}
31
32impl<VM: VMBinding, P: GenerationalPlanExt<VM> + PlanTraceObject<VM>>
33 GenObjectBarrierSemantics<VM, P>
34{
35 pub fn new(mmtk: &'static MMTK<VM>, plan: &'static P) -> Self {
36 Self {
37 mmtk,
38 plan,
39 modbuf: VectorQueue::new(),
40 region_modbuf: VectorQueue::new(),
41 }
42 }
43
44 fn flush_modbuf(&mut self) {
45 let buf = self.modbuf.take();
46 if !buf.is_empty() {
47 self.mmtk.scheduler.work_buckets[WorkBucketStage::Closure]
48 .add(ProcessModBuf::<GenNurseryProcessEdges<VM, P, DEFAULT_TRACE>>::new(buf));
49 }
50 }
51
52 fn flush_region_modbuf(&mut self) {
53 let buf = self.region_modbuf.take();
54 if !buf.is_empty() {
55 debug_assert!(!buf.is_empty());
56 self.mmtk.scheduler.work_buckets[WorkBucketStage::Closure].add(ProcessRegionModBuf::<
57 GenNurseryProcessEdges<VM, P, DEFAULT_TRACE>,
58 >::new(buf));
59 }
60 }
61}
62
63impl<VM: VMBinding, P: GenerationalPlanExt<VM> + PlanTraceObject<VM>> BarrierSemantics
64 for GenObjectBarrierSemantics<VM, P>
65{
66 type VM = VM;
67
68 fn flush(&mut self) {
69 self.flush_modbuf();
70 self.flush_region_modbuf();
71 }
72
73 fn object_reference_write_slow(
74 &mut self,
75 src: ObjectReference,
76 _slot: VM::VMSlot,
77 _target: Option<ObjectReference>,
78 ) {
79 self.modbuf.push(src);
81 self.modbuf.is_full().then(|| self.flush_modbuf());
82 }
83
84 fn memory_region_copy_slow(&mut self, _src: VM::VMMemorySlice, dst: VM::VMMemorySlice) {
85 let dst_in_nursery = match dst.object() {
87 Some(obj) => self.plan.is_object_in_nursery(obj),
88 None => self.plan.is_address_in_nursery(dst.start()),
89 };
90 if !dst_in_nursery {
92 self.region_modbuf.push(dst);
94 self.region_modbuf
95 .is_full()
96 .then(|| self.flush_region_modbuf());
97 }
98 }
99
100 fn object_probable_write_slow(&mut self, obj: ObjectReference) {
101 self.modbuf.push(obj);
103 self.modbuf.is_full().then(|| self.flush_modbuf());
104 }
105}