mmtk/util/heap/
chunk_map.rs1use crate::scheduler::GCWork;
2use crate::util::linear_scan::Region;
3use crate::util::linear_scan::RegionIterator;
4use crate::util::metadata::side_metadata::SideMetadataSpec;
5use crate::util::Address;
6use crate::vm::VMBinding;
7use spin::Mutex;
8use std::ops::Range;
9
10#[repr(transparent)]
12#[derive(Debug, Clone, Copy, PartialOrd, PartialEq, Eq)]
13pub struct Chunk(Address);
14
15impl Region for Chunk {
16 const LOG_BYTES: usize = crate::util::heap::layout::vm_layout::LOG_BYTES_IN_CHUNK;
17
18 fn from_aligned_address(address: Address) -> Self {
19 debug_assert!(address.is_aligned_to(Self::BYTES));
20 Self(address)
21 }
22
23 fn start(&self) -> Address {
24 self.0
25 }
26}
27
28impl Chunk {
29 pub const ZERO: Self = Self(Address::ZERO);
32
33 pub fn iter_region<R: Region>(&self) -> RegionIterator<R> {
35 debug_assert!(R::LOG_BYTES < Self::LOG_BYTES);
37 debug_assert!(R::is_aligned(self.start()));
39 debug_assert!(R::is_aligned(self.end()));
40
41 let start = R::from_aligned_address(self.start());
42 let end = R::from_aligned_address(self.end());
43 RegionIterator::<R>::new(start, end)
44 }
45}
46
47#[repr(transparent)]
51#[derive(PartialEq, Clone, Copy)]
52pub struct ChunkState(u8);
53
54impl ChunkState {
55 const ALLOC_BIT_MASK: u8 = 0x80;
56 const SPACE_INDEX_MASK: u8 = 0x0F;
57
58 pub fn allocated(space_index: usize) -> ChunkState {
60 debug_assert!(space_index < crate::util::heap::layout::heap_parameters::MAX_SPACES);
61 let mut encode = space_index as u8;
62 encode |= Self::ALLOC_BIT_MASK;
63 ChunkState(encode)
64 }
65 pub fn free() -> ChunkState {
67 ChunkState(0u8)
68 }
69 pub fn is_free(&self) -> bool {
71 self.0 == 0
72 }
73 pub fn is_allocated(&self) -> bool {
75 !self.is_free()
76 }
77 pub fn get_space_index(&self) -> usize {
79 debug_assert!(self.is_allocated());
80 let index = (self.0 & Self::SPACE_INDEX_MASK) as usize;
81 debug_assert!(index < crate::util::heap::layout::heap_parameters::MAX_SPACES);
82 index
83 }
84}
85
86impl std::fmt::Debug for ChunkState {
87 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
88 if self.is_free() {
89 write!(f, "Free")
90 } else {
91 write!(f, "Allocated({})", self.get_space_index())
92 }
93 }
94}
95
96pub struct ChunkMap {
102 space_index: usize,
104 chunk_range: Mutex<Range<Chunk>>,
110}
111
112impl ChunkMap {
113 pub const ALLOC_TABLE: SideMetadataSpec =
115 crate::util::metadata::side_metadata::spec_defs::CHUNK_MARK;
116
117 pub fn new(space_index: usize) -> Self {
118 Self {
119 space_index,
120 chunk_range: Mutex::new(Chunk::ZERO..Chunk::ZERO),
121 }
122 }
123
124 pub fn set_allocated(&self, chunk: Chunk, allocated: bool) {
126 let state = if allocated {
127 ChunkState::allocated(self.space_index)
128 } else {
129 ChunkState::free()
130 };
131 if self.get_internal(chunk) == state {
133 return;
134 }
135 #[cfg(debug_assertions)]
136 {
137 let old_state = self.get_internal(chunk);
138 assert!(
140 old_state.is_free() || old_state.get_space_index() == self.space_index,
141 "Chunk {:?}: old state {:?}, new state {:?}. Cannot set to new state.",
142 chunk,
143 old_state,
144 state
145 );
146 }
147 unsafe { Self::ALLOC_TABLE.store::<u8>(chunk.start(), state.0) };
149 if allocated {
151 debug_assert!(!chunk.start().is_zero());
152 let mut range = self.chunk_range.lock();
153 if range.start == Chunk::ZERO {
154 range.start = chunk;
156 range.end = chunk.next();
157 } else if chunk < range.start {
158 range.start = chunk;
159 } else if range.end <= chunk {
160 range.end = chunk.next();
161 }
162 }
163 }
164
165 pub fn get(&self, chunk: Chunk) -> Option<ChunkState> {
167 let state = self.get_internal(chunk);
168 (state.is_allocated() && state.get_space_index() == self.space_index).then_some(state)
169 }
170
171 fn get_internal(&self, chunk: Chunk) -> ChunkState {
173 let byte = unsafe { Self::ALLOC_TABLE.load::<u8>(chunk.start()) };
174 ChunkState(byte)
175 }
176
177 pub fn all_chunks(&self) -> impl Iterator<Item = Chunk> + '_ {
179 let chunk_range = self.chunk_range.lock();
180 RegionIterator::<Chunk>::new(chunk_range.start, chunk_range.end)
181 .filter(|c| self.get(*c).is_some())
182 }
183
184 pub fn generate_tasks<VM: VMBinding>(
186 &self,
187 func: impl Fn(Chunk) -> Box<dyn GCWork<VM>>,
188 ) -> Vec<Box<dyn GCWork<VM>>> {
189 let mut work_packets: Vec<Box<dyn GCWork<VM>>> = vec![];
190 for chunk in self.all_chunks() {
191 work_packets.push(func(chunk));
192 }
193 work_packets
194 }
195}