1use crate::plan::barriers::Barrier;
4use crate::plan::global::Plan;
5use crate::plan::AllocationSemantics;
6use crate::policy::space::Space;
7use crate::util::alloc::allocator::AllocationOptions;
8use crate::util::alloc::allocators::{AllocatorSelector, Allocators};
9use crate::util::alloc::Allocator;
10use crate::util::{Address, ObjectReference};
11use crate::util::{VMMutatorThread, VMWorkerThread};
12use crate::vm::VMBinding;
13use crate::MMTK;
14
15use enum_map::EnumMap;
16
17use super::barriers::NoBarrier;
18
19pub(crate) type SpaceMapping<VM> = Vec<(AllocatorSelector, &'static dyn Space<VM>)>;
20
21pub(crate) fn unreachable_prepare_func<VM: VMBinding>(
25 _mutator: &mut Mutator<VM>,
26 _tls: VMWorkerThread,
27) {
28 unreachable!("`MutatorConfig::prepare_func` must not be called for the current plan.")
29}
30
31#[allow(unused_variables)]
33pub(crate) fn common_prepare_func<VM: VMBinding>(mutator: &mut Mutator<VM>, _tls: VMWorkerThread) {
34 #[cfg(feature = "marksweep_as_nonmoving")]
36 unsafe {
37 mutator.allocator_impl_mut_for_semantic::<crate::util::alloc::FreeListAllocator<VM>>(
38 AllocationSemantics::NonMoving,
39 )
40 }
41 .prepare();
42}
43
44pub(crate) fn unreachable_release_func<VM: VMBinding>(
47 _mutator: &mut Mutator<VM>,
48 _tls: VMWorkerThread,
49) {
50 unreachable!("`MutatorConfig::release_func` must not be called for the current plan.")
51}
52
53#[allow(unused_variables)]
55pub(crate) fn common_release_func<VM: VMBinding>(mutator: &mut Mutator<VM>, _tls: VMWorkerThread) {
56 cfg_if::cfg_if! {
57 if #[cfg(feature = "marksweep_as_nonmoving")] {
58 unsafe { mutator.allocator_impl_mut_for_semantic::<crate::util::alloc::FreeListAllocator<VM>>(
60 AllocationSemantics::NonMoving,
61 )}.release();
62 } else if #[cfg(feature = "immortal_as_nonmoving")] {
63 } else {
65 unsafe { mutator.allocator_impl_mut_for_semantic::<crate::util::alloc::ImmixAllocator<VM>>(
67 AllocationSemantics::NonMoving,
68 )}.reset();
69 }
70 }
71}
72
73#[allow(dead_code)]
75pub(crate) fn no_op_release_func<VM: VMBinding>(_mutator: &mut Mutator<VM>, _tls: VMWorkerThread) {}
76
77#[repr(C)]
80pub struct MutatorConfig<VM: VMBinding> {
81 pub allocator_mapping: &'static EnumMap<AllocationSemantics, AllocatorSelector>,
83 #[allow(clippy::box_collection)]
86 pub space_mapping: Box<SpaceMapping<VM>>,
87 pub prepare_func: &'static (dyn Fn(&mut Mutator<VM>, VMWorkerThread) + Send + Sync),
89 pub release_func: &'static (dyn Fn(&mut Mutator<VM>, VMWorkerThread) + Send + Sync),
91}
92
93impl<VM: VMBinding> std::fmt::Debug for MutatorConfig<VM> {
94 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
95 f.write_str("MutatorConfig:\n")?;
96 f.write_str("Semantics mapping:\n")?;
97 for (semantic, selector) in self.allocator_mapping.iter() {
98 let space_name: &str = match self
99 .space_mapping
100 .iter()
101 .find(|(selector_to_find, _)| selector_to_find == selector)
102 {
103 Some((_, space)) => space.name(),
104 None => "!!!missing space here!!!",
105 };
106 f.write_fmt(format_args!(
107 "- {:?} = {:?} ({:?})\n",
108 semantic, selector, space_name
109 ))?;
110 }
111 f.write_str("Space mapping:\n")?;
112 for (selector, space) in self.space_mapping.iter() {
113 f.write_fmt(format_args!("- {:?} = {:?}\n", selector, space.name()))?;
114 }
115 Ok(())
116 }
117}
118
119pub struct MutatorBuilder<VM: VMBinding> {
121 barrier: Box<dyn Barrier<VM>>,
122 mutator_tls: VMMutatorThread,
124 mmtk: &'static MMTK<VM>,
125 config: MutatorConfig<VM>,
126}
127
128impl<VM: VMBinding> MutatorBuilder<VM> {
129 pub fn new(
130 mutator_tls: VMMutatorThread,
131 mmtk: &'static MMTK<VM>,
132 config: MutatorConfig<VM>,
133 ) -> Self {
134 MutatorBuilder {
135 barrier: Box::new(NoBarrier),
136 mutator_tls,
137 mmtk,
138 config,
139 }
140 }
141
142 pub fn barrier(mut self, barrier: Box<dyn Barrier<VM>>) -> Self {
143 self.barrier = barrier;
144 self
145 }
146
147 pub fn build(self) -> Mutator<VM> {
148 Mutator {
149 allocators: Allocators::<VM>::new(
150 self.mutator_tls,
151 self.mmtk,
152 &self.config.space_mapping,
153 ),
154 barrier: self.barrier,
155 mutator_tls: self.mutator_tls,
156 plan: self.mmtk.get_plan(),
157 config: self.config,
158 }
159 }
160}
161
162#[repr(C)]
170pub struct Mutator<VM: VMBinding> {
171 pub(crate) allocators: Allocators<VM>,
172 pub barrier: Box<dyn Barrier<VM>>,
174 pub mutator_tls: VMMutatorThread,
176 pub(crate) plan: &'static dyn Plan<VM = VM>,
177 pub(crate) config: MutatorConfig<VM>,
178}
179
180impl<VM: VMBinding> MutatorContext<VM> for Mutator<VM> {
181 fn prepare(&mut self, tls: VMWorkerThread) {
182 (*self.config.prepare_func)(self, tls)
183 }
184 fn release(&mut self, tls: VMWorkerThread) {
185 (*self.config.release_func)(self, tls)
186 }
187
188 fn alloc(
190 &mut self,
191 size: usize,
192 align: usize,
193 offset: usize,
194 allocator: AllocationSemantics,
195 ) -> Address {
196 let allocator = unsafe {
197 self.allocators
198 .get_allocator_mut(self.config.allocator_mapping[allocator])
199 };
200 debug_assert!(allocator.get_context().get_alloc_options().is_default());
202 allocator.alloc(size, align, offset)
203 }
204
205 fn alloc_with_options(
206 &mut self,
207 size: usize,
208 align: usize,
209 offset: usize,
210 allocator: AllocationSemantics,
211 options: AllocationOptions,
212 ) -> Address {
213 let allocator = unsafe {
214 self.allocators
215 .get_allocator_mut(self.config.allocator_mapping[allocator])
216 };
217 debug_assert!(allocator.get_context().get_alloc_options().is_default());
219 allocator.alloc_with_options(size, align, offset, options)
220 }
221
222 fn alloc_slow(
223 &mut self,
224 size: usize,
225 align: usize,
226 offset: usize,
227 allocator: AllocationSemantics,
228 ) -> Address {
229 let allocator = unsafe {
230 self.allocators
231 .get_allocator_mut(self.config.allocator_mapping[allocator])
232 };
233 debug_assert!(allocator.get_context().get_alloc_options().is_default());
235 allocator.alloc_slow(size, align, offset)
236 }
237
238 fn alloc_slow_with_options(
239 &mut self,
240 size: usize,
241 align: usize,
242 offset: usize,
243 allocator: AllocationSemantics,
244 options: AllocationOptions,
245 ) -> Address {
246 let allocator = unsafe {
247 self.allocators
248 .get_allocator_mut(self.config.allocator_mapping[allocator])
249 };
250 debug_assert!(allocator.get_context().get_alloc_options().is_default());
252 allocator.alloc_slow_with_options(size, align, offset, options)
253 }
254
255 fn post_alloc(
257 &mut self,
258 refer: ObjectReference,
259 _bytes: usize,
260 allocator: AllocationSemantics,
261 ) {
262 unsafe {
263 self.allocators
264 .get_allocator_mut(self.config.allocator_mapping[allocator])
265 }
266 .get_space()
267 .initialize_object_metadata(refer)
268 }
269
270 fn get_tls(&self) -> VMMutatorThread {
271 self.mutator_tls
272 }
273
274 fn barrier(&mut self) -> &mut dyn Barrier<VM> {
275 &mut *self.barrier
276 }
277}
278
279impl<VM: VMBinding> Mutator<VM> {
280 fn get_all_allocator_selectors(&self) -> Vec<AllocatorSelector> {
282 use itertools::Itertools;
283 self.config
284 .allocator_mapping
285 .iter()
286 .map(|(_, selector)| *selector)
287 .sorted()
288 .dedup()
289 .filter(|selector| *selector != AllocatorSelector::None)
290 .collect()
291 }
292
293 pub fn on_destroy(&mut self) {
295 for selector in self.get_all_allocator_selectors() {
296 unsafe { self.allocators.get_allocator_mut(selector) }.on_mutator_destroy();
297 }
298 }
299
300 pub unsafe fn allocator(&self, selector: AllocatorSelector) -> &dyn Allocator<VM> {
306 self.allocators.get_allocator(selector)
307 }
308
309 pub unsafe fn allocator_mut(&mut self, selector: AllocatorSelector) -> &mut dyn Allocator<VM> {
315 self.allocators.get_allocator_mut(selector)
316 }
317
318 pub unsafe fn allocator_impl<T: Allocator<VM>>(&self, selector: AllocatorSelector) -> &T {
324 self.allocators.get_typed_allocator(selector)
325 }
326
327 pub unsafe fn allocator_impl_mut<T: Allocator<VM>>(
333 &mut self,
334 selector: AllocatorSelector,
335 ) -> &mut T {
336 self.allocators.get_typed_allocator_mut(selector)
337 }
338
339 pub unsafe fn allocator_impl_for_semantic<T: Allocator<VM>>(
344 &self,
345 semantic: AllocationSemantics,
346 ) -> &T {
347 self.allocator_impl::<T>(self.config.allocator_mapping[semantic])
348 }
349
350 pub unsafe fn allocator_impl_mut_for_semantic<T: Allocator<VM>>(
355 &mut self,
356 semantic: AllocationSemantics,
357 ) -> &mut T {
358 self.allocator_impl_mut::<T>(self.config.allocator_mapping[semantic])
359 }
360
361 pub fn get_allocator_base_offset(selector: AllocatorSelector) -> usize {
363 use crate::util::alloc::*;
364 use memoffset::offset_of;
365 use std::mem::size_of;
366 offset_of!(Mutator<VM>, allocators)
367 + match selector {
368 AllocatorSelector::BumpPointer(index) => {
369 offset_of!(Allocators<VM>, bump_pointer)
370 + size_of::<BumpAllocator<VM>>() * index as usize
371 }
372 AllocatorSelector::FreeList(index) => {
373 offset_of!(Allocators<VM>, free_list)
374 + size_of::<FreeListAllocator<VM>>() * index as usize
375 }
376 AllocatorSelector::Immix(index) => {
377 offset_of!(Allocators<VM>, immix)
378 + size_of::<ImmixAllocator<VM>>() * index as usize
379 }
380 AllocatorSelector::LargeObject(index) => {
381 offset_of!(Allocators<VM>, large_object)
382 + size_of::<LargeObjectAllocator<VM>>() * index as usize
383 }
384 AllocatorSelector::Malloc(index) => {
385 offset_of!(Allocators<VM>, malloc)
386 + size_of::<MallocAllocator<VM>>() * index as usize
387 }
388 AllocatorSelector::MarkCompact(index) => {
389 offset_of!(Allocators<VM>, markcompact)
390 + size_of::<MarkCompactAllocator<VM>>() * index as usize
391 }
392 AllocatorSelector::None => panic!("Expect a valid AllocatorSelector, found None"),
393 }
394 }
395}
396
397pub trait MutatorContext<VM: VMBinding>: Send + 'static {
402 fn prepare(&mut self, tls: VMWorkerThread);
404 fn release(&mut self, tls: VMWorkerThread);
406 fn alloc(
414 &mut self,
415 size: usize,
416 align: usize,
417 offset: usize,
418 allocator: AllocationSemantics,
419 ) -> Address;
420 fn alloc_with_options(
429 &mut self,
430 size: usize,
431 align: usize,
432 offset: usize,
433 allocator: AllocationSemantics,
434 options: AllocationOptions,
435 ) -> Address;
436 fn alloc_slow(
442 &mut self,
443 size: usize,
444 align: usize,
445 offset: usize,
446 allocator: AllocationSemantics,
447 ) -> Address;
448 fn alloc_slow_with_options(
454 &mut self,
455 size: usize,
456 align: usize,
457 offset: usize,
458 allocator: AllocationSemantics,
459 options: AllocationOptions,
460 ) -> Address;
461 fn post_alloc(&mut self, refer: ObjectReference, bytes: usize, allocator: AllocationSemantics);
469 fn flush_remembered_sets(&mut self) {
471 self.barrier().flush();
472 }
473 fn flush(&mut self) {
475 self.flush_remembered_sets();
476 }
477 fn get_tls(&self) -> VMMutatorThread;
480 fn barrier(&mut self) -> &mut dyn Barrier<VM>;
482}
483
484#[allow(dead_code)]
491#[derive(Default)]
492pub(crate) struct ReservedAllocators {
493 pub n_bump_pointer: u8,
494 pub n_large_object: u8,
495 pub n_malloc: u8,
496 pub n_immix: u8,
497 pub n_mark_compact: u8,
498 pub n_free_list: u8,
499}
500
501impl ReservedAllocators {
502 pub const DEFAULT: Self = ReservedAllocators {
503 n_bump_pointer: 0,
504 n_large_object: 0,
505 n_malloc: 0,
506 n_immix: 0,
507 n_mark_compact: 0,
508 n_free_list: 0,
509 };
510 fn validate(&self) {
512 use crate::util::alloc::allocators::*;
513 assert!(
514 self.n_bump_pointer as usize <= MAX_BUMP_ALLOCATORS,
515 "Allocator mapping declared more bump pointer allocators than the max allowed."
516 );
517 assert!(
518 self.n_large_object as usize <= MAX_LARGE_OBJECT_ALLOCATORS,
519 "Allocator mapping declared more large object allocators than the max allowed."
520 );
521 assert!(
522 self.n_malloc as usize <= MAX_MALLOC_ALLOCATORS,
523 "Allocator mapping declared more malloc allocators than the max allowed."
524 );
525 assert!(
526 self.n_immix as usize <= MAX_IMMIX_ALLOCATORS,
527 "Allocator mapping declared more immix allocators than the max allowed."
528 );
529 assert!(
530 self.n_mark_compact as usize <= MAX_MARK_COMPACT_ALLOCATORS,
531 "Allocator mapping declared more mark compact allocators than the max allowed."
532 );
533 assert!(
534 self.n_free_list as usize <= MAX_FREE_LIST_ALLOCATORS,
535 "Allocator mapping declared more free list allocators than the max allowed."
536 );
537 }
538
539 fn add_bump_pointer_allocator(&mut self) -> AllocatorSelector {
542 let selector = AllocatorSelector::BumpPointer(self.n_bump_pointer);
543 self.n_bump_pointer += 1;
544 selector
545 }
546 fn add_large_object_allocator(&mut self) -> AllocatorSelector {
547 let selector = AllocatorSelector::LargeObject(self.n_large_object);
548 self.n_large_object += 1;
549 selector
550 }
551 #[allow(dead_code)]
552 fn add_malloc_allocator(&mut self) -> AllocatorSelector {
553 let selector = AllocatorSelector::Malloc(self.n_malloc);
554 self.n_malloc += 1;
555 selector
556 }
557 #[allow(dead_code)]
558 fn add_immix_allocator(&mut self) -> AllocatorSelector {
559 let selector = AllocatorSelector::Immix(self.n_immix);
560 self.n_immix += 1;
561 selector
562 }
563 #[allow(dead_code)]
564 fn add_mark_compact_allocator(&mut self) -> AllocatorSelector {
565 let selector = AllocatorSelector::MarkCompact(self.n_mark_compact);
566 self.n_mark_compact += 1;
567 selector
568 }
569 #[allow(dead_code)]
570 fn add_free_list_allocator(&mut self) -> AllocatorSelector {
571 let selector = AllocatorSelector::FreeList(self.n_free_list);
572 self.n_free_list += 1;
573 selector
574 }
575}
576
577pub(crate) fn create_allocator_mapping(
583 mut reserved: ReservedAllocators,
584 include_common_plan: bool,
585) -> EnumMap<AllocationSemantics, AllocatorSelector> {
586 let mut map = EnumMap::<AllocationSemantics, AllocatorSelector>::default();
591
592 #[cfg(feature = "code_space")]
595 {
596 map[AllocationSemantics::Code] = reserved.add_bump_pointer_allocator();
597 map[AllocationSemantics::LargeCode] = reserved.add_bump_pointer_allocator();
598 }
599
600 #[cfg(feature = "ro_space")]
601 {
602 map[AllocationSemantics::ReadOnly] = reserved.add_bump_pointer_allocator();
603 }
604
605 if include_common_plan {
608 map[AllocationSemantics::Immortal] = reserved.add_bump_pointer_allocator();
609 map[AllocationSemantics::Los] = reserved.add_large_object_allocator();
610 map[AllocationSemantics::NonMoving] = if cfg!(feature = "marksweep_as_nonmoving") {
611 reserved.add_free_list_allocator()
612 } else if cfg!(feature = "immortal_as_nonmoving") {
613 reserved.add_bump_pointer_allocator()
614 } else {
615 reserved.add_immix_allocator()
616 };
617 }
618
619 reserved.validate();
620 map
621}
622
623pub(crate) fn create_space_mapping<VM: VMBinding>(
630 mut reserved: ReservedAllocators,
631 include_common_plan: bool,
632 plan: &'static dyn Plan<VM = VM>,
633) -> Vec<(AllocatorSelector, &'static dyn Space<VM>)> {
634 let mut vec: Vec<(AllocatorSelector, &'static dyn Space<VM>)> = vec![];
639
640 #[cfg(feature = "code_space")]
643 {
644 vec.push((
645 reserved.add_bump_pointer_allocator(),
646 &plan.base().code_space,
647 ));
648 vec.push((
649 reserved.add_bump_pointer_allocator(),
650 &plan.base().code_lo_space,
651 ));
652 }
653
654 #[cfg(feature = "ro_space")]
655 vec.push((reserved.add_bump_pointer_allocator(), &plan.base().ro_space));
656
657 if include_common_plan {
660 vec.push((
661 reserved.add_bump_pointer_allocator(),
662 plan.common().get_immortal(),
663 ));
664 vec.push((
665 reserved.add_large_object_allocator(),
666 plan.common().get_los(),
667 ));
668 vec.push((
669 if cfg!(feature = "marksweep_as_nonmoving") {
670 reserved.add_free_list_allocator()
671 } else if cfg!(feature = "immortal_as_nonmoving") {
672 reserved.add_bump_pointer_allocator()
673 } else {
674 reserved.add_immix_allocator()
675 },
676 plan.common().get_nonmoving(),
677 ));
678 }
679
680 reserved.validate();
681 vec
682}