mmtk/plan/generational/
barrier.rs1use crate::plan::barriers::BarrierSemantics;
4use crate::plan::PlanTraceObject;
5use crate::plan::VectorQueue;
6use crate::policy::gc_work::DEFAULT_TRACE;
7use crate::scheduler::WorkBucketStage;
8use crate::util::constants::BYTES_IN_INT;
9use crate::util::*;
10use crate::vm::slot::MemorySlice;
11use crate::vm::VMBinding;
12use crate::MMTK;
13
14use super::gc_work::GenNurseryProcessEdges;
15use super::gc_work::ProcessModBuf;
16use super::gc_work::ProcessRegionModBuf;
17use super::global::GenerationalPlanExt;
18
19pub struct GenObjectBarrierSemantics<
20 VM: VMBinding,
21 P: GenerationalPlanExt<VM> + PlanTraceObject<VM>,
22> {
23 mmtk: &'static MMTK<VM>,
25 plan: &'static P,
27 modbuf: VectorQueue<ObjectReference>,
29 region_modbuf: VectorQueue<VM::VMMemorySlice>,
31}
32
33impl<VM: VMBinding, P: GenerationalPlanExt<VM> + PlanTraceObject<VM>>
34 GenObjectBarrierSemantics<VM, P>
35{
36 pub fn new(mmtk: &'static MMTK<VM>, plan: &'static P) -> Self {
37 Self {
38 mmtk,
39 plan,
40 modbuf: VectorQueue::new(),
41 region_modbuf: VectorQueue::new(),
42 }
43 }
44
45 fn flush_modbuf(&mut self) {
46 let buf = self.modbuf.take();
47 if !buf.is_empty() {
48 self.mmtk.scheduler.work_buckets[WorkBucketStage::Closure]
49 .add(ProcessModBuf::<GenNurseryProcessEdges<VM, P, DEFAULT_TRACE>>::new(buf));
50 }
51 }
52
53 fn flush_region_modbuf(&mut self) {
54 let buf = self.region_modbuf.take();
55 if !buf.is_empty() {
56 debug_assert!(!buf.is_empty());
57 self.mmtk.scheduler.work_buckets[WorkBucketStage::Closure].add(ProcessRegionModBuf::<
58 GenNurseryProcessEdges<VM, P, DEFAULT_TRACE>,
59 >::new(buf));
60 }
61 }
62}
63
64impl<VM: VMBinding, P: GenerationalPlanExt<VM> + PlanTraceObject<VM>> BarrierSemantics
65 for GenObjectBarrierSemantics<VM, P>
66{
67 type VM = VM;
68
69 fn flush(&mut self) {
70 self.flush_modbuf();
71 self.flush_region_modbuf();
72 }
73
74 fn object_reference_write_slow(
75 &mut self,
76 src: ObjectReference,
77 _slot: VM::VMSlot,
78 _target: Option<ObjectReference>,
79 ) {
80 self.modbuf.push(src);
82 self.modbuf.is_full().then(|| self.flush_modbuf());
83 }
84
85 fn memory_region_copy_slow(&mut self, _src: VM::VMMemorySlice, dst: VM::VMMemorySlice) {
86 let dst_in_nursery = match dst.object() {
88 Some(obj) => self.plan.is_object_in_nursery(obj),
89 None => self.plan.is_address_in_nursery(dst.start()),
90 };
91 if !dst_in_nursery {
93 debug_assert_eq!(
95 dst.bytes() & (BYTES_IN_INT - 1),
96 0,
97 "bytes should be a multiple of 32-bit words"
98 );
99 self.region_modbuf.push(dst);
100 self.region_modbuf
101 .is_full()
102 .then(|| self.flush_region_modbuf());
103 }
104 }
105
106 fn object_probable_write_slow(&mut self, obj: ObjectReference) {
107 self.modbuf.push(obj);
109 self.modbuf.is_full().then(|| self.flush_modbuf());
110 }
111}