Skip to content

Instantly share code, notes, and snippets.

@playXE
Created August 17, 2020 09:29
Show Gist options
  • Save playXE/02c7d2f6baf606ea076c67a7a741d774 to your computer and use it in GitHub Desktop.
Save playXE/02c7d2f6baf606ea076c67a7a741d774 to your computer and use it in GitHub Desktop.
pub const ALIGN: usize = 2 * core::mem::size_of::<usize>();
pub fn align_usize(value: usize, align: usize) -> usize {
if align == 0 {
return value;
}
((value + align - 1) / align) * align
}
use std::cell::UnsafeCell;
use std::sync::atomic::{AtomicUsize, Ordering};
pub const K: usize = 1024;
pub const SIZE_CLASSES: usize = 6;
pub const SIZE_CLASS_SMALLEST: SizeClass = SizeClass(0);
pub const SIZE_SMALLEST: usize = 16;
pub const SIZE_CLASS_TINY: SizeClass = SizeClass(1);
pub const SIZE_TINY: usize = 32;
pub const SIZE_CLASS_SMALL: SizeClass = SizeClass(2);
pub const SIZE_SMALL: usize = 128;
pub const SIZE_CLASS_MEDIUM: SizeClass = SizeClass(3);
pub const SIZE_MEDIUM: usize = 2 * K;
pub const SIZE_CLASS_LARGE: SizeClass = SizeClass(4);
pub const SIZE_LARGE: usize = 8 * K;
pub const SIZE_CLASS_HUGE: SizeClass = SizeClass(5);
pub const SIZE_HUGE: usize = 32 * K;
#[derive(Copy, Clone, PartialEq, Eq)]
pub struct SizeClass(usize);
pub const SIZES: [usize; SIZE_CLASSES] = [
SIZE_SMALLEST,
SIZE_TINY,
SIZE_SMALL,
SIZE_MEDIUM,
SIZE_LARGE,
SIZE_HUGE,
];
impl SizeClass {
fn next_up(size: usize) -> SizeClass {
assert!(size >= SIZE_SMALLEST);
if size <= SIZE_SMALLEST {
SIZE_CLASS_SMALLEST
} else if size <= SIZE_TINY {
SIZE_CLASS_TINY
} else if size <= SIZE_SMALL {
SIZE_CLASS_SMALL
} else if size <= SIZE_MEDIUM {
SIZE_CLASS_MEDIUM
} else if size <= SIZE_LARGE {
SIZE_CLASS_LARGE
} else {
SIZE_CLASS_HUGE
}
}
fn next_down(size: usize) -> SizeClass {
assert!(size >= SIZE_SMALLEST);
if size < SIZE_TINY {
SIZE_CLASS_SMALLEST
} else if size < SIZE_SMALL {
SIZE_CLASS_TINY
} else if size < SIZE_MEDIUM {
SIZE_CLASS_SMALL
} else if size < SIZE_LARGE {
SIZE_CLASS_MEDIUM
} else if size < SIZE_HUGE {
SIZE_CLASS_LARGE
} else {
SIZE_CLASS_HUGE
}
}
fn idx(self) -> usize {
self.0
}
fn size(self) -> usize {
SIZES[self.0]
}
}
/// returns 'true' if th given `value` is already aligned
/// to `align`.
pub fn is_aligned(value: usize, align: usize) -> bool {
align_usize(value, align) == value
}
/// Simple free list allocator (Not thread safe).
///
///
/// This space before using freelist uses bump allocation.
pub struct FreeListSpace {
pub mmap: *mut libc::c_void,
pub freelist: UnsafeCell<FreeList>,
pub top: AtomicUsize,
pub limit: AtomicUsize,
}
impl FreeListSpace {
pub fn new(mut size: usize) -> Self {
if !is_aligned(size, 4096) {
size = align_usize(size, 4096);
}
#[cfg(target_os = "windows")]
{
panic!("Woops windows sucks");
}
let mem = unsafe {
libc::mmap(
core::ptr::null_mut(),
size,
libc::PROT_READ | libc::PROT_WRITE,
libc::MAP_ANONYMOUS | libc::MAP_PRIVATE,
-1,
0,
)
};
Self {
mmap: mem,
top: AtomicUsize::new(mem as _),
limit: AtomicUsize::new(mem as usize + size),
freelist: UnsafeCell::new(FreeList::new()),
}
}
pub fn free(&self, addr: *mut libc::c_void, size: usize) {
self.freelist().add(addr, size);
}
pub fn allocate(&self, mut size: usize) -> *mut libc::c_void {
if !is_aligned(size, ALIGN) {
size = align_usize(size, ALIGN);
}
let free_space = self.freelist().alloc(size);
unsafe {
if free_space.is_non_null() {
let object = free_space.addr().cast::<libc::c_void>();
println!("Free list allocate {:p}", object);
let free_size = free_space.size();
assert!(size <= free_size);
let free_start = object.offset(size as _);
let free_end = object.offset(free_size as _);
let new_free_size = free_end as usize - free_start as usize;
self.freelist().add(free_start.cast(), new_free_size);
return object.cast();
}
}
let object = self.top.load(Ordering::Relaxed);
let next_top = object + size;
if next_top <= self.limit.load(Ordering::Relaxed) {
println!("Bump allocate 0x{:x}->0x{:x}", object, next_top);
self.top.store(next_top, Ordering::Relaxed);
return object as *mut libc::c_void;
}
core::ptr::null_mut()
}
fn freelist(&self) -> &mut FreeList {
unsafe { &mut *self.freelist.get() }
}
}
pub struct FreeList {
classes: Vec<FreeListClass>,
}
impl FreeList {
pub fn new() -> FreeList {
let mut classes = Vec::with_capacity(SIZE_CLASSES);
for _ in 0..SIZE_CLASSES {
classes.push(FreeListClass::new());
}
FreeList { classes }
}
pub fn add(&mut self, addr: *mut libc::c_void, size: usize) {
if size < SIZE_SMALLEST {
return;
}
debug_assert!(size >= SIZE_SMALLEST);
let szclass = SizeClass::next_down(size);
let free_class = &mut self.classes[szclass.idx()];
unsafe {
// push new FreeSlot
addr.cast::<FreeSlot>().write(FreeSlot {
size,
next: free_class.head.addr(),
});
}
free_class.head = FreeSpace(addr.cast());
}
pub fn alloc(&mut self, size: usize) -> FreeSpace {
let szclass = SizeClass::next_up(size).idx();
let last = SIZE_CLASS_HUGE.idx();
for class in szclass..last {
let result = self.classes[class].first();
if result.is_non_null() {
assert!(result.size() >= size);
return result;
}
}
self.classes[SIZE_CLASS_HUGE.idx()].find(size)
}
}
struct FreeListClass {
head: FreeSpace,
}
impl FreeListClass {
fn new() -> FreeListClass {
FreeListClass {
head: FreeSpace::null(),
}
}
fn add(&mut self, addr: FreeSpace) {
addr.set_next(self.head);
self.head = addr;
}
fn first(&mut self) -> FreeSpace {
if self.head.is_non_null() {
let ret = self.head;
self.head = ret.next();
ret
} else {
FreeSpace::null()
}
}
fn find(&mut self, minimum_size: usize) -> FreeSpace {
let mut curr = self.head;
let mut prev = FreeSpace::null();
while curr.is_non_null() {
if curr.size() >= minimum_size {
if prev.is_null() {
self.head = curr.next();
} else {
prev.set_next(curr.next());
}
return curr;
}
prev = curr;
curr = curr.next();
}
FreeSpace::null()
}
}
#[repr(C)]
struct FreeSlot {
size: usize,
next: *mut FreeSlot,
}
/// FreeSpace is actually a singly linked list.
/// - set_next is equal to `push_front`
#[derive(Copy, Clone)]
pub struct FreeSpace(*mut FreeSlot);
impl FreeSpace {
#[inline(always)]
pub fn null() -> FreeSpace {
FreeSpace(core::ptr::null_mut())
}
#[inline(always)]
pub fn is_null(self) -> bool {
self.addr().is_null()
}
#[inline(always)]
pub fn is_non_null(self) -> bool {
!self.is_null()
}
#[inline(always)]
fn addr(self) -> *mut FreeSlot {
self.0
}
#[inline(always)]
pub fn next(self) -> FreeSpace {
assert!(self.is_non_null());
unsafe { FreeSpace((*self.0).next) }
}
#[inline(always)]
pub fn set_next(&self, next: FreeSpace) {
assert!(self.is_non_null());
unsafe {
(*self.0).next = next.0;
}
}
#[inline(always)]
pub fn size(self) -> usize {
unsafe { (*self.0).size }
}
}
#[derive(Debug)]
struct Foo {
x: u64,
y: f32,
}
fn main() {
let space = FreeListSpace::new(128 * 1024); // 128kb
unsafe {
let mem = space.allocate(core::mem::size_of::<Foo>()).cast::<Foo>();
mem.write(Foo { x: 0, y: 42.42 });
println!("{:?}", &*mem);
space.free(mem.cast(), core::mem::size_of::<Foo>());
let mem = space.allocate(std::mem::size_of::<Foo>()).cast::<Foo>();
mem.write(Foo { x: 345, y: 42.42 });
println!("{:?}", &*mem);
}
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment