1use crate::plan::barriers::Barrier;
4use crate::plan::global::Plan;
5use crate::plan::AllocationSemantics;
6use crate::policy::space::Space;
7use crate::util::alloc::allocator::AllocationOptions;
8use crate::util::alloc::allocators::{AllocatorSelector, Allocators};
9use crate::util::alloc::Allocator;
10use crate::util::{Address, ObjectReference};
11use crate::util::{VMMutatorThread, VMWorkerThread};
12use crate::vm::VMBinding;
13use crate::MMTK;
14
15use enum_map::EnumMap;
16
17use super::barriers::NoBarrier;
18
19pub(crate) type SpaceMapping<VM> = Vec<(AllocatorSelector, &'static dyn Space<VM>)>;
20
21pub(crate) fn unreachable_prepare_func<VM: VMBinding>(
25 _mutator: &mut Mutator<VM>,
26 _tls: VMWorkerThread,
27) {
28 unreachable!("`MutatorConfig::prepare_func` must not be called for the current plan.")
29}
30
31#[allow(unused_variables)]
33pub(crate) fn common_prepare_func<VM: VMBinding>(mutator: &mut Mutator<VM>, _tls: VMWorkerThread) {
34 #[cfg(feature = "marksweep_as_nonmoving")]
36 unsafe {
37 mutator.allocator_impl_mut_for_semantic::<crate::util::alloc::FreeListAllocator<VM>>(
38 AllocationSemantics::NonMoving,
39 )
40 }
41 .prepare();
42}
43
44pub(crate) fn unreachable_release_func<VM: VMBinding>(
47 _mutator: &mut Mutator<VM>,
48 _tls: VMWorkerThread,
49) {
50 unreachable!("`MutatorConfig::release_func` must not be called for the current plan.")
51}
52
53#[allow(unused_variables)]
55pub(crate) fn common_release_func<VM: VMBinding>(mutator: &mut Mutator<VM>, _tls: VMWorkerThread) {
56 cfg_if::cfg_if! {
57 if #[cfg(feature = "marksweep_as_nonmoving")] {
58 unsafe { mutator.allocator_impl_mut_for_semantic::<crate::util::alloc::FreeListAllocator<VM>>(
60 AllocationSemantics::NonMoving,
61 )}.release();
62 } else if #[cfg(feature = "immortal_as_nonmoving")] {
63 } else {
65 unsafe { mutator.allocator_impl_mut_for_semantic::<crate::util::alloc::ImmixAllocator<VM>>(
67 AllocationSemantics::NonMoving,
68 )}.reset();
69 }
70 }
71}
72
73#[allow(dead_code)]
75pub(crate) fn no_op_release_func<VM: VMBinding>(_mutator: &mut Mutator<VM>, _tls: VMWorkerThread) {}
76
77#[repr(C)]
80pub struct MutatorConfig<VM: VMBinding> {
81 pub allocator_mapping: &'static EnumMap<AllocationSemantics, AllocatorSelector>,
83 #[allow(clippy::box_collection)]
86 pub space_mapping: Box<SpaceMapping<VM>>,
87 pub prepare_func: &'static (dyn Fn(&mut Mutator<VM>, VMWorkerThread) + Send + Sync),
89 pub release_func: &'static (dyn Fn(&mut Mutator<VM>, VMWorkerThread) + Send + Sync),
91}
92
93impl<VM: VMBinding> std::fmt::Debug for MutatorConfig<VM> {
94 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
95 f.write_str("MutatorConfig:\n")?;
96 f.write_str("Semantics mapping:\n")?;
97 for (semantic, selector) in self.allocator_mapping.iter() {
98 let space_name: &str = match self
99 .space_mapping
100 .iter()
101 .find(|(selector_to_find, _)| selector_to_find == selector)
102 {
103 Some((_, space)) => space.name(),
104 None => "!!!missing space here!!!",
105 };
106 f.write_fmt(format_args!(
107 "- {:?} = {:?} ({:?})\n",
108 semantic, selector, space_name
109 ))?;
110 }
111 f.write_str("Space mapping:\n")?;
112 for (selector, space) in self.space_mapping.iter() {
113 f.write_fmt(format_args!("- {:?} = {:?}\n", selector, space.name()))?;
114 }
115 Ok(())
116 }
117}
118
119pub struct MutatorBuilder<VM: VMBinding> {
121 barrier: Box<dyn Barrier<VM>>,
122 mutator_tls: VMMutatorThread,
124 mmtk: &'static MMTK<VM>,
125 config: MutatorConfig<VM>,
126}
127
128impl<VM: VMBinding> MutatorBuilder<VM> {
129 pub fn new(
130 mutator_tls: VMMutatorThread,
131 mmtk: &'static MMTK<VM>,
132 config: MutatorConfig<VM>,
133 ) -> Self {
134 MutatorBuilder {
135 barrier: Box::new(NoBarrier),
136 mutator_tls,
137 mmtk,
138 config,
139 }
140 }
141
142 pub fn barrier(mut self, barrier: Box<dyn Barrier<VM>>) -> Self {
143 self.barrier = barrier;
144 self
145 }
146
147 pub fn build(self) -> Mutator<VM> {
148 Mutator {
149 allocators: Allocators::<VM>::new(
150 self.mutator_tls,
151 self.mmtk,
152 &self.config.space_mapping,
153 ),
154 barrier: self.barrier,
155 mutator_tls: self.mutator_tls,
156 plan: self.mmtk.get_plan(),
157 config: self.config,
158 }
159 }
160}
161
162#[repr(C)]
170pub struct Mutator<VM: VMBinding> {
171 pub(crate) allocators: Allocators<VM>,
172 pub barrier: Box<dyn Barrier<VM>>,
174 pub mutator_tls: VMMutatorThread,
176 pub(crate) plan: &'static dyn Plan<VM = VM>,
177 pub(crate) config: MutatorConfig<VM>,
178}
179
180impl<VM: VMBinding> MutatorContext<VM> for Mutator<VM> {
181 fn prepare(&mut self, tls: VMWorkerThread) {
182 (*self.config.prepare_func)(self, tls)
183 }
184 fn release(&mut self, tls: VMWorkerThread) {
185 (*self.config.release_func)(self, tls)
186 }
187
188 fn alloc(
190 &mut self,
191 size: usize,
192 align: usize,
193 offset: usize,
194 allocator: AllocationSemantics,
195 ) -> Address {
196 let allocator = unsafe {
197 self.allocators
198 .get_allocator_mut(self.config.allocator_mapping[allocator])
199 };
200 debug_assert!(allocator.get_context().get_alloc_options().is_default());
202 allocator.alloc(size, align, offset)
203 }
204
205 fn alloc_with_options(
206 &mut self,
207 size: usize,
208 align: usize,
209 offset: usize,
210 allocator: AllocationSemantics,
211 options: AllocationOptions,
212 ) -> Address {
213 let allocator = unsafe {
214 self.allocators
215 .get_allocator_mut(self.config.allocator_mapping[allocator])
216 };
217 debug_assert!(allocator.get_context().get_alloc_options().is_default());
219 allocator.alloc_with_options(size, align, offset, options)
220 }
221
222 fn alloc_slow(
223 &mut self,
224 size: usize,
225 align: usize,
226 offset: usize,
227 allocator: AllocationSemantics,
228 ) -> Address {
229 let allocator = unsafe {
230 self.allocators
231 .get_allocator_mut(self.config.allocator_mapping[allocator])
232 };
233 debug_assert!(allocator.get_context().get_alloc_options().is_default());
235 allocator.alloc_slow(size, align, offset)
236 }
237
238 fn alloc_slow_with_options(
239 &mut self,
240 size: usize,
241 align: usize,
242 offset: usize,
243 allocator: AllocationSemantics,
244 options: AllocationOptions,
245 ) -> Address {
246 let allocator = unsafe {
247 self.allocators
248 .get_allocator_mut(self.config.allocator_mapping[allocator])
249 };
250 debug_assert!(allocator.get_context().get_alloc_options().is_default());
252 allocator.alloc_slow_with_options(size, align, offset, options)
253 }
254
255 fn post_alloc(&mut self, refer: ObjectReference, bytes: usize, allocator: AllocationSemantics) {
257 unsafe {
258 self.allocators
259 .get_allocator_mut(self.config.allocator_mapping[allocator])
260 }
261 .get_space()
262 .initialize_object_metadata(refer, bytes)
263 }
264
265 fn get_tls(&self) -> VMMutatorThread {
266 self.mutator_tls
267 }
268
269 fn barrier(&mut self) -> &mut dyn Barrier<VM> {
270 &mut *self.barrier
271 }
272}
273
274impl<VM: VMBinding> Mutator<VM> {
275 fn get_all_allocator_selectors(&self) -> Vec<AllocatorSelector> {
277 use itertools::Itertools;
278 self.config
279 .allocator_mapping
280 .iter()
281 .map(|(_, selector)| *selector)
282 .sorted()
283 .dedup()
284 .filter(|selector| *selector != AllocatorSelector::None)
285 .collect()
286 }
287
288 pub fn on_destroy(&mut self) {
290 for selector in self.get_all_allocator_selectors() {
291 unsafe { self.allocators.get_allocator_mut(selector) }.on_mutator_destroy();
292 }
293 }
294
295 pub unsafe fn allocator(&self, selector: AllocatorSelector) -> &dyn Allocator<VM> {
301 self.allocators.get_allocator(selector)
302 }
303
304 pub unsafe fn allocator_mut(&mut self, selector: AllocatorSelector) -> &mut dyn Allocator<VM> {
310 self.allocators.get_allocator_mut(selector)
311 }
312
313 pub unsafe fn allocator_impl<T: Allocator<VM>>(&self, selector: AllocatorSelector) -> &T {
319 self.allocators.get_typed_allocator(selector)
320 }
321
322 pub unsafe fn allocator_impl_mut<T: Allocator<VM>>(
328 &mut self,
329 selector: AllocatorSelector,
330 ) -> &mut T {
331 self.allocators.get_typed_allocator_mut(selector)
332 }
333
334 pub unsafe fn allocator_impl_for_semantic<T: Allocator<VM>>(
339 &self,
340 semantic: AllocationSemantics,
341 ) -> &T {
342 self.allocator_impl::<T>(self.config.allocator_mapping[semantic])
343 }
344
345 pub unsafe fn allocator_impl_mut_for_semantic<T: Allocator<VM>>(
350 &mut self,
351 semantic: AllocationSemantics,
352 ) -> &mut T {
353 self.allocator_impl_mut::<T>(self.config.allocator_mapping[semantic])
354 }
355
356 pub fn get_allocator_base_offset(selector: AllocatorSelector) -> usize {
358 use crate::util::alloc::*;
359 use memoffset::offset_of;
360 use std::mem::size_of;
361 offset_of!(Mutator<VM>, allocators)
362 + match selector {
363 AllocatorSelector::BumpPointer(index) => {
364 offset_of!(Allocators<VM>, bump_pointer)
365 + size_of::<BumpAllocator<VM>>() * index as usize
366 }
367 AllocatorSelector::FreeList(index) => {
368 offset_of!(Allocators<VM>, free_list)
369 + size_of::<FreeListAllocator<VM>>() * index as usize
370 }
371 AllocatorSelector::Immix(index) => {
372 offset_of!(Allocators<VM>, immix)
373 + size_of::<ImmixAllocator<VM>>() * index as usize
374 }
375 AllocatorSelector::LargeObject(index) => {
376 offset_of!(Allocators<VM>, large_object)
377 + size_of::<LargeObjectAllocator<VM>>() * index as usize
378 }
379 AllocatorSelector::Malloc(index) => {
380 offset_of!(Allocators<VM>, malloc)
381 + size_of::<MallocAllocator<VM>>() * index as usize
382 }
383 AllocatorSelector::MarkCompact(index) => {
384 offset_of!(Allocators<VM>, markcompact)
385 + size_of::<MarkCompactAllocator<VM>>() * index as usize
386 }
387 AllocatorSelector::None => panic!("Expect a valid AllocatorSelector, found None"),
388 }
389 }
390}
391
392pub trait MutatorContext<VM: VMBinding>: Send + 'static {
397 fn prepare(&mut self, tls: VMWorkerThread);
399 fn release(&mut self, tls: VMWorkerThread);
401 fn alloc(
409 &mut self,
410 size: usize,
411 align: usize,
412 offset: usize,
413 allocator: AllocationSemantics,
414 ) -> Address;
415 fn alloc_with_options(
424 &mut self,
425 size: usize,
426 align: usize,
427 offset: usize,
428 allocator: AllocationSemantics,
429 options: AllocationOptions,
430 ) -> Address;
431 fn alloc_slow(
437 &mut self,
438 size: usize,
439 align: usize,
440 offset: usize,
441 allocator: AllocationSemantics,
442 ) -> Address;
443 fn alloc_slow_with_options(
449 &mut self,
450 size: usize,
451 align: usize,
452 offset: usize,
453 allocator: AllocationSemantics,
454 options: AllocationOptions,
455 ) -> Address;
456 fn post_alloc(&mut self, refer: ObjectReference, bytes: usize, allocator: AllocationSemantics);
464 fn flush_remembered_sets(&mut self) {
466 self.barrier().flush();
467 }
468 fn flush(&mut self) {
470 self.flush_remembered_sets();
471 }
472 fn get_tls(&self) -> VMMutatorThread;
475 fn barrier(&mut self) -> &mut dyn Barrier<VM>;
477}
478
479#[allow(dead_code)]
486#[derive(Default)]
487pub(crate) struct ReservedAllocators {
488 pub n_bump_pointer: u8,
489 pub n_large_object: u8,
490 pub n_malloc: u8,
491 pub n_immix: u8,
492 pub n_mark_compact: u8,
493 pub n_free_list: u8,
494}
495
496impl ReservedAllocators {
497 pub const DEFAULT: Self = ReservedAllocators {
498 n_bump_pointer: 0,
499 n_large_object: 0,
500 n_malloc: 0,
501 n_immix: 0,
502 n_mark_compact: 0,
503 n_free_list: 0,
504 };
505 fn validate(&self) {
507 use crate::util::alloc::allocators::*;
508 assert!(
509 self.n_bump_pointer as usize <= MAX_BUMP_ALLOCATORS,
510 "Allocator mapping declared more bump pointer allocators than the max allowed."
511 );
512 assert!(
513 self.n_large_object as usize <= MAX_LARGE_OBJECT_ALLOCATORS,
514 "Allocator mapping declared more large object allocators than the max allowed."
515 );
516 assert!(
517 self.n_malloc as usize <= MAX_MALLOC_ALLOCATORS,
518 "Allocator mapping declared more malloc allocators than the max allowed."
519 );
520 assert!(
521 self.n_immix as usize <= MAX_IMMIX_ALLOCATORS,
522 "Allocator mapping declared more immix allocators than the max allowed."
523 );
524 assert!(
525 self.n_mark_compact as usize <= MAX_MARK_COMPACT_ALLOCATORS,
526 "Allocator mapping declared more mark compact allocators than the max allowed."
527 );
528 assert!(
529 self.n_free_list as usize <= MAX_FREE_LIST_ALLOCATORS,
530 "Allocator mapping declared more free list allocators than the max allowed."
531 );
532 }
533
534 fn add_bump_pointer_allocator(&mut self) -> AllocatorSelector {
537 let selector = AllocatorSelector::BumpPointer(self.n_bump_pointer);
538 self.n_bump_pointer += 1;
539 selector
540 }
541 fn add_large_object_allocator(&mut self) -> AllocatorSelector {
542 let selector = AllocatorSelector::LargeObject(self.n_large_object);
543 self.n_large_object += 1;
544 selector
545 }
546 #[allow(dead_code)]
547 fn add_malloc_allocator(&mut self) -> AllocatorSelector {
548 let selector = AllocatorSelector::Malloc(self.n_malloc);
549 self.n_malloc += 1;
550 selector
551 }
552 #[allow(dead_code)]
553 fn add_immix_allocator(&mut self) -> AllocatorSelector {
554 let selector = AllocatorSelector::Immix(self.n_immix);
555 self.n_immix += 1;
556 selector
557 }
558 #[allow(dead_code)]
559 fn add_mark_compact_allocator(&mut self) -> AllocatorSelector {
560 let selector = AllocatorSelector::MarkCompact(self.n_mark_compact);
561 self.n_mark_compact += 1;
562 selector
563 }
564 #[allow(dead_code)]
565 fn add_free_list_allocator(&mut self) -> AllocatorSelector {
566 let selector = AllocatorSelector::FreeList(self.n_free_list);
567 self.n_free_list += 1;
568 selector
569 }
570}
571
572pub(crate) fn create_allocator_mapping(
578 mut reserved: ReservedAllocators,
579 include_common_plan: bool,
580) -> EnumMap<AllocationSemantics, AllocatorSelector> {
581 let mut map = EnumMap::<AllocationSemantics, AllocatorSelector>::default();
586
587 #[cfg(feature = "code_space")]
590 {
591 map[AllocationSemantics::Code] = reserved.add_bump_pointer_allocator();
592 map[AllocationSemantics::LargeCode] = reserved.add_bump_pointer_allocator();
593 }
594
595 #[cfg(feature = "ro_space")]
596 {
597 map[AllocationSemantics::ReadOnly] = reserved.add_bump_pointer_allocator();
598 }
599
600 if include_common_plan {
603 map[AllocationSemantics::Immortal] = reserved.add_bump_pointer_allocator();
604 map[AllocationSemantics::Los] = reserved.add_large_object_allocator();
605 map[AllocationSemantics::NonMoving] = if cfg!(feature = "marksweep_as_nonmoving") {
606 reserved.add_free_list_allocator()
607 } else if cfg!(feature = "immortal_as_nonmoving") {
608 reserved.add_bump_pointer_allocator()
609 } else {
610 reserved.add_immix_allocator()
611 };
612 }
613
614 reserved.validate();
615 map
616}
617
618pub(crate) fn create_space_mapping<VM: VMBinding>(
625 mut reserved: ReservedAllocators,
626 include_common_plan: bool,
627 plan: &'static dyn Plan<VM = VM>,
628) -> Vec<(AllocatorSelector, &'static dyn Space<VM>)> {
629 let mut vec: Vec<(AllocatorSelector, &'static dyn Space<VM>)> = vec![];
634
635 #[cfg(feature = "code_space")]
638 {
639 vec.push((
640 reserved.add_bump_pointer_allocator(),
641 &plan.base().code_space,
642 ));
643 vec.push((
644 reserved.add_bump_pointer_allocator(),
645 &plan.base().code_lo_space,
646 ));
647 }
648
649 #[cfg(feature = "ro_space")]
650 vec.push((reserved.add_bump_pointer_allocator(), &plan.base().ro_space));
651
652 if include_common_plan {
655 vec.push((
656 reserved.add_bump_pointer_allocator(),
657 plan.common().get_immortal(),
658 ));
659 vec.push((
660 reserved.add_large_object_allocator(),
661 plan.common().get_los(),
662 ));
663 vec.push((
664 if cfg!(feature = "marksweep_as_nonmoving") {
665 reserved.add_free_list_allocator()
666 } else if cfg!(feature = "immortal_as_nonmoving") {
667 reserved.add_bump_pointer_allocator()
668 } else {
669 reserved.add_immix_allocator()
670 },
671 plan.common().get_nonmoving(),
672 ));
673 }
674
675 reserved.validate();
676 vec
677}