|
| 1 | +use std::{ |
| 2 | + num::NonZeroUsize, |
| 3 | + ops::{Deref, DerefMut, Range}, |
| 4 | + sync::atomic::{AtomicU8, Ordering}, |
| 5 | +}; |
| 6 | + |
| 7 | +use mallockit::{space::page_resource::MemRegion, util::mem::size_class::SizeClass}; |
| 8 | + |
| 9 | +use crate::{pool::Pool, ImmixAllocator}; |
| 10 | + |
| 11 | +use super::Address; |
| 12 | + |
| 13 | +#[repr(C)] |
| 14 | +pub struct BlockMeta { |
| 15 | + pub owner: usize, |
| 16 | + // bump_cursor: u32, |
| 17 | + // used_bytes: u32, |
| 18 | + // pub prev: Option<Block>, |
| 19 | + // pub next: Option<Block>, |
| 20 | + // pub size_class: SizeClass, |
| 21 | + // pub group: u8, |
| 22 | + // head_cell: Address, |
| 23 | + // pub owner: &'static Pool, |
| 24 | + pub line_marks: [AtomicU8; 8], |
| 25 | +} |
| 26 | + |
| 27 | +#[derive(Debug, Clone, Copy, PartialEq, Eq)] |
| 28 | +pub struct Block(NonZeroUsize); |
| 29 | + |
| 30 | +impl MemRegion for Block { |
| 31 | + type Meta = BlockMeta; |
| 32 | + |
| 33 | + const LOG_BYTES: usize = 15; |
| 34 | + |
| 35 | + fn start(&self) -> Address { |
| 36 | + Address::from(self.0.get()) |
| 37 | + } |
| 38 | + |
| 39 | + fn from_address(addr: Address) -> Self { |
| 40 | + debug_assert!(!addr.is_zero()); |
| 41 | + debug_assert!(Self::is_aligned(addr)); |
| 42 | + Self(unsafe { NonZeroUsize::new_unchecked(usize::from(addr)) }) |
| 43 | + } |
| 44 | +} |
| 45 | + |
| 46 | +impl Deref for Block { |
| 47 | + type Target = BlockMeta; |
| 48 | + |
| 49 | + fn deref(&self) -> &Self::Target { |
| 50 | + self.meta() |
| 51 | + } |
| 52 | +} |
| 53 | + |
| 54 | +impl DerefMut for Block { |
| 55 | + fn deref_mut(&mut self) -> &mut Self::Target { |
| 56 | + unsafe { self.meta_mut() } |
| 57 | + } |
| 58 | +} |
| 59 | + |
| 60 | +impl Block { |
| 61 | + pub const LINES: usize = Self::DATA_BYTES / Line::BYTES; |
| 62 | + |
| 63 | + pub fn init(mut self, owner: usize) { |
| 64 | + self.owner = owner; |
| 65 | + debug_assert_eq!(Self::META_BYTES, Address::BYTES * 8); |
| 66 | + // self.size_class = size_class; |
| 67 | + // let size = size_class.bytes(); |
| 68 | + // self.head_cell = Address::ZERO; |
| 69 | + // self.bump_cursor = (Address::ZERO + Self::META_BYTES).align_up(size).as_usize() as u32; |
| 70 | + // self.used_bytes = 0; |
| 71 | + } |
| 72 | + |
| 73 | + pub fn lines(self) -> Range<Line> { |
| 74 | + let start = Line::from_address(self.data_start()); |
| 75 | + let end = Line::from_address(self.end()); |
| 76 | + start..end |
| 77 | + } |
| 78 | + |
| 79 | + pub fn get_next_available_lines(self, search_start: Line) -> Option<Range<Line>> { |
| 80 | + let start_cursor = search_start.get_index_within_block(); |
| 81 | + let mut cursor = start_cursor; |
| 82 | + // Find start |
| 83 | + while cursor < self.line_marks.len() { |
| 84 | + let mark = self.line_marks[cursor].load(Ordering::SeqCst); |
| 85 | + if mark == 0 { |
| 86 | + break; |
| 87 | + } |
| 88 | + cursor += 1; |
| 89 | + } |
| 90 | + if cursor == self.line_marks.len() { |
| 91 | + return None; |
| 92 | + } |
| 93 | + let start = Line::from_address(self.data_start() + cursor * Line::BYTES); |
| 94 | + // Find limit |
| 95 | + while cursor < self.line_marks.len() { |
| 96 | + let mark = self.line_marks[cursor].load(Ordering::SeqCst); |
| 97 | + if mark != 0 { |
| 98 | + break; |
| 99 | + } |
| 100 | + cursor += 1; |
| 101 | + } |
| 102 | + let end = Line::from_address(self.data_start() + cursor * Line::BYTES); |
| 103 | + Some(start..end) |
| 104 | + } |
| 105 | +} |
| 106 | + |
| 107 | +#[derive(Debug, Clone, Copy, PartialEq, Eq)] |
| 108 | +pub struct Line(NonZeroUsize); |
| 109 | + |
| 110 | +impl Line { |
| 111 | + pub fn block(self) -> Block { |
| 112 | + Block::containing(self.start()) |
| 113 | + } |
| 114 | + |
| 115 | + pub fn get_index_within_block(self) -> usize { |
| 116 | + (self.start() - self.block().data_start()) / Self::BYTES |
| 117 | + } |
| 118 | +} |
| 119 | + |
| 120 | +impl MemRegion for Line { |
| 121 | + const LOG_BYTES: usize = 8; |
| 122 | + |
| 123 | + fn start(&self) -> Address { |
| 124 | + Address::from(self.0.get()) |
| 125 | + } |
| 126 | + |
| 127 | + fn from_address(addr: Address) -> Self { |
| 128 | + debug_assert!(!addr.is_zero()); |
| 129 | + debug_assert!(Self::is_aligned(addr)); |
| 130 | + Self(unsafe { NonZeroUsize::new_unchecked(usize::from(addr)) }) |
| 131 | + } |
| 132 | +} |
0 commit comments