use super::defrag::Histogram;
use super::line::Line;
use super::ImmixSpace;
use crate::util::constants::*;
use crate::util::heap::blockpageresource::BlockPool;
use crate::util::heap::chunk_map::Chunk;
use crate::util::linear_scan::{Region, RegionIterator};
use crate::util::metadata::side_metadata::{MetadataByteArrayRef, SideMetadataSpec};
#[cfg(feature = "vo_bit")]
use crate::util::metadata::vo_bit;
#[cfg(feature = "object_pinning")]
use crate::util::metadata::MetadataSpec;
use crate::util::object_enum::BlockMayHaveObjects;
use crate::util::Address;
use crate::vm::*;
use std::sync::atomic::Ordering;
#[derive(Debug, PartialEq, Clone, Copy)]
pub enum BlockState {
Unallocated,
Unmarked,
Marked,
Reusable { unavailable_lines: u8 },
}
impl BlockState {
const MARK_UNALLOCATED: u8 = 0;
const MARK_UNMARKED: u8 = u8::MAX;
const MARK_MARKED: u8 = u8::MAX - 1;
}
impl From<u8> for BlockState {
fn from(state: u8) -> Self {
match state {
Self::MARK_UNALLOCATED => BlockState::Unallocated,
Self::MARK_UNMARKED => BlockState::Unmarked,
Self::MARK_MARKED => BlockState::Marked,
unavailable_lines => BlockState::Reusable { unavailable_lines },
}
}
}
impl From<BlockState> for u8 {
fn from(state: BlockState) -> Self {
match state {
BlockState::Unallocated => BlockState::MARK_UNALLOCATED,
BlockState::Unmarked => BlockState::MARK_UNMARKED,
BlockState::Marked => BlockState::MARK_MARKED,
BlockState::Reusable { unavailable_lines } => unavailable_lines,
}
}
}
impl BlockState {
pub const fn is_reusable(&self) -> bool {
matches!(self, BlockState::Reusable { .. })
}
}
#[repr(transparent)]
#[derive(Debug, Clone, Copy, PartialOrd, PartialEq)]
pub struct Block(Address);
impl Region for Block {
#[cfg(not(feature = "immix_smaller_block"))]
const LOG_BYTES: usize = 15;
#[cfg(feature = "immix_smaller_block")]
const LOG_BYTES: usize = 13;
fn from_aligned_address(address: Address) -> Self {
debug_assert!(address.is_aligned_to(Self::BYTES));
Self(address)
}
fn start(&self) -> Address {
self.0
}
}
impl BlockMayHaveObjects for Block {
fn may_have_objects(&self) -> bool {
self.get_state() != BlockState::Unallocated
}
}
impl Block {
pub const LOG_PAGES: usize = Self::LOG_BYTES - LOG_BYTES_IN_PAGE as usize;
pub const PAGES: usize = 1 << Self::LOG_PAGES;
pub const LOG_LINES: usize = Self::LOG_BYTES - Line::LOG_BYTES;
pub const LINES: usize = 1 << Self::LOG_LINES;
pub const DEFRAG_STATE_TABLE: SideMetadataSpec =
crate::util::metadata::side_metadata::spec_defs::IX_BLOCK_DEFRAG;
pub const MARK_TABLE: SideMetadataSpec =
crate::util::metadata::side_metadata::spec_defs::IX_BLOCK_MARK;
pub fn chunk(&self) -> Chunk {
Chunk::from_unaligned_address(self.0)
}
#[allow(clippy::assertions_on_constants)]
pub fn line_mark_table(&self) -> MetadataByteArrayRef<{ Block::LINES }> {
debug_assert!(!super::BLOCK_ONLY);
MetadataByteArrayRef::<{ Block::LINES }>::new(&Line::MARK_TABLE, self.start(), Self::BYTES)
}
pub fn get_state(&self) -> BlockState {
let byte = Self::MARK_TABLE.load_atomic::<u8>(self.start(), Ordering::SeqCst);
byte.into()
}
pub fn set_state(&self, state: BlockState) {
let state = u8::from(state);
Self::MARK_TABLE.store_atomic::<u8>(self.start(), state, Ordering::SeqCst);
}
const DEFRAG_SOURCE_STATE: u8 = u8::MAX;
pub fn is_defrag_source(&self) -> bool {
let byte = Self::DEFRAG_STATE_TABLE.load_atomic::<u8>(self.start(), Ordering::SeqCst);
byte == Self::DEFRAG_SOURCE_STATE
}
pub fn set_as_defrag_source(&self, defrag: bool) {
let byte = if defrag { Self::DEFRAG_SOURCE_STATE } else { 0 };
Self::DEFRAG_STATE_TABLE.store_atomic::<u8>(self.start(), byte, Ordering::SeqCst);
}
pub fn set_holes(&self, holes: usize) {
Self::DEFRAG_STATE_TABLE.store_atomic::<u8>(self.start(), holes as u8, Ordering::SeqCst);
}
pub fn get_holes(&self) -> usize {
let byte = Self::DEFRAG_STATE_TABLE.load_atomic::<u8>(self.start(), Ordering::SeqCst);
debug_assert_ne!(byte, Self::DEFRAG_SOURCE_STATE);
byte as usize
}
pub fn init(&self, copy: bool) {
self.set_state(if copy {
BlockState::Marked
} else {
BlockState::Unmarked
});
Self::DEFRAG_STATE_TABLE.store_atomic::<u8>(self.start(), 0, Ordering::SeqCst);
}
pub fn deinit(&self) {
self.set_state(BlockState::Unallocated);
}
pub fn start_line(&self) -> Line {
Line::from_aligned_address(self.start())
}
pub fn end_line(&self) -> Line {
Line::from_aligned_address(self.end())
}
#[allow(clippy::assertions_on_constants)]
pub fn lines(&self) -> RegionIterator<Line> {
debug_assert!(!super::BLOCK_ONLY);
RegionIterator::<Line>::new(self.start_line(), self.end_line())
}
pub fn sweep<VM: VMBinding>(
&self,
space: &ImmixSpace<VM>,
mark_histogram: &mut Histogram,
line_mark_state: Option<u8>,
) -> bool {
if super::BLOCK_ONLY {
match self.get_state() {
BlockState::Unallocated => false,
BlockState::Unmarked => {
#[cfg(feature = "vo_bit")]
vo_bit::helper::on_region_swept::<VM, _>(self, false);
#[cfg(feature = "object_pinning")]
if let MetadataSpec::OnSide(side) = *VM::VMObjectModel::LOCAL_PINNING_BIT_SPEC {
side.bzero_metadata(self.start(), Block::BYTES);
}
space.release_block(*self);
true
}
BlockState::Marked => {
#[cfg(feature = "vo_bit")]
vo_bit::helper::on_region_swept::<VM, _>(self, true);
false
}
_ => unreachable!(),
}
} else {
let mut marked_lines = 0;
let mut holes = 0;
let mut prev_line_is_marked = true;
let line_mark_state = line_mark_state.unwrap();
for line in self.lines() {
if line.is_marked(line_mark_state) {
marked_lines += 1;
prev_line_is_marked = true;
} else {
if prev_line_is_marked {
holes += 1;
}
#[cfg(feature = "immix_zero_on_release")]
crate::util::memory::zero(line.start(), Line::BYTES);
#[cfg(feature = "object_pinning")]
if let MetadataSpec::OnSide(side) = *VM::VMObjectModel::LOCAL_PINNING_BIT_SPEC {
side.bzero_metadata(line.start(), Line::BYTES);
}
prev_line_is_marked = false;
}
}
if marked_lines == 0 {
#[cfg(feature = "vo_bit")]
vo_bit::helper::on_region_swept::<VM, _>(self, false);
space.release_block(*self);
true
} else {
if marked_lines != Block::LINES {
self.set_state(BlockState::Reusable {
unavailable_lines: marked_lines as _,
});
space.reusable_blocks.push(*self)
} else {
self.set_state(BlockState::Unmarked);
}
mark_histogram[holes] += marked_lines;
self.set_holes(holes);
#[cfg(feature = "vo_bit")]
vo_bit::helper::on_region_swept::<VM, _>(self, true);
false
}
}
}
#[cfg(feature = "vo_bit")]
pub fn clear_vo_bits_for_unmarked_regions(&self, line_mark_state: Option<u8>) {
match line_mark_state {
None => {
match self.get_state() {
BlockState::Unmarked => {
vo_bit::bzero_vo_bit(self.start(), Self::BYTES);
}
BlockState::Marked => {
}
_ => unreachable!(),
}
}
Some(state) => {
for line in self.lines() {
if !line.is_marked(state) {
vo_bit::bzero_vo_bit(line.start(), Line::BYTES);
}
}
}
}
}
}
pub struct ReusableBlockPool {
queue: BlockPool<Block>,
num_workers: usize,
}
impl ReusableBlockPool {
pub fn new(num_workers: usize) -> Self {
Self {
queue: BlockPool::new(num_workers),
num_workers,
}
}
pub fn len(&self) -> usize {
self.queue.len()
}
pub fn push(&self, block: Block) {
self.queue.push(block)
}
pub fn pop(&self) -> Option<Block> {
self.queue.pop()
}
pub fn reset(&mut self) {
self.queue = BlockPool::new(self.num_workers);
}
pub fn iterate_blocks(&self, mut f: impl FnMut(Block)) {
self.queue.iterate_blocks(&mut f);
}
pub fn flush_all(&self) {
self.queue.flush_all();
}
}