mmtk/util/alloc/
large_object_allocator.rs1use std::sync::Arc;
2
3use crate::policy::largeobjectspace::LargeObjectSpace;
4use crate::policy::space::Space;
5use crate::util::alloc::{allocator, Allocator};
6use crate::util::opaque_pointer::*;
7use crate::util::Address;
8use crate::vm::VMBinding;
9
10use super::allocator::AllocatorContext;
11
12#[repr(C)]
15pub struct LargeObjectAllocator<VM: VMBinding> {
16 pub tls: VMThread,
18 space: &'static LargeObjectSpace<VM>,
20 context: Arc<AllocatorContext<VM>>,
21}
22
23impl<VM: VMBinding> Allocator<VM> for LargeObjectAllocator<VM> {
24 fn get_tls(&self) -> VMThread {
25 self.tls
26 }
27
28 fn get_context(&self) -> &AllocatorContext<VM> {
29 &self.context
30 }
31
32 fn get_space(&self) -> &'static dyn Space<VM> {
33 self.space as &'static dyn Space<VM>
35 }
36
37 fn does_thread_local_allocation(&self) -> bool {
38 false
39 }
40
41 fn alloc(&mut self, size: usize, align: usize, offset: usize) -> Address {
42 let cell: Address = self.alloc_slow(size, align, offset);
43 if !cell.is_zero() {
45 allocator::align_allocation::<VM>(cell, align, offset)
46 } else {
47 cell
48 }
49 }
50
51 fn alloc_slow_once(&mut self, size: usize, align: usize, _offset: usize) -> Address {
52 if self.space.handle_obvious_oom_request(
53 self.tls,
54 size,
55 self.get_context().get_alloc_options(),
56 ) {
57 return Address::ZERO;
58 }
59
60 let maxbytes = allocator::get_maximum_aligned_size::<VM>(size, align);
61 let pages = crate::util::conversions::bytes_to_pages_up(maxbytes);
62 self.space
63 .allocate_pages(self.tls, pages, self.get_context().get_alloc_options())
64 }
65}
66
67impl<VM: VMBinding> LargeObjectAllocator<VM> {
68 pub(crate) fn new(
69 tls: VMThread,
70 space: &'static LargeObjectSpace<VM>,
71 context: Arc<AllocatorContext<VM>>,
72 ) -> Self {
73 LargeObjectAllocator {
74 tls,
75 space,
76 context,
77 }
78 }
79}