no deps (riscv) + implemented LL heap
This commit is contained in:
@@ -1,34 +1,11 @@
|
||||
use core::{alloc::GlobalAlloc, ops::Range, ptr::null_mut};
|
||||
use core::{alloc::GlobalAlloc, ops::Range};
|
||||
|
||||
use crate::util::mutex::Mutex;
|
||||
use crate::{heap::Heap, util::mutex::Mutex};
|
||||
|
||||
#[global_allocator]
|
||||
static ALLOCATOR: LockedHeap = LockedHeap::empty();
|
||||
pub static ALLOCATOR: LockedHeap = LockedHeap::empty();
|
||||
|
||||
struct Heap {
|
||||
cur: *mut u8,
|
||||
end: *mut u8,
|
||||
}
|
||||
|
||||
impl Heap {
|
||||
pub const fn empty() -> Self {
|
||||
Self {
|
||||
cur: null_mut(),
|
||||
end: null_mut(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn init(&mut self, start: *mut u8, end: *mut u8) {
|
||||
self.cur = start;
|
||||
self.end = end;
|
||||
}
|
||||
}
|
||||
|
||||
pub fn init_heap(range: Range<*mut u8>) {
|
||||
ALLOCATOR.init(range.start, range.end);
|
||||
}
|
||||
|
||||
struct LockedHeap(Mutex<Heap>);
|
||||
pub struct LockedHeap(Mutex<Heap>);
|
||||
|
||||
// should look into why I need this, didn't see it in linked list alloc crate
|
||||
unsafe impl Sync for LockedHeap {}
|
||||
@@ -37,24 +14,19 @@ impl LockedHeap {
|
||||
pub const fn empty() -> Self {
|
||||
Self(Mutex::new(Heap::empty()))
|
||||
}
|
||||
pub fn init(&self, start: *mut u8, end: *mut u8) {
|
||||
self.0.lock().init(start, end);
|
||||
pub unsafe fn init(&self, range: Range<*mut u8>) {
|
||||
self.0.lock().init(range);
|
||||
}
|
||||
pub fn print(&self) {
|
||||
self.0.lock().print();
|
||||
}
|
||||
}
|
||||
|
||||
unsafe impl GlobalAlloc for LockedHeap {
|
||||
unsafe fn alloc(&self, layout: core::alloc::Layout) -> *mut u8 {
|
||||
// blazing fast implementation :sunglasses:
|
||||
// (gonna switch to my own linked list later)
|
||||
let mut heap = self.0.lock();
|
||||
let pointer = heap.cur;
|
||||
heap.cur = heap.cur.add(layout.size());
|
||||
if heap.cur >= heap.end {
|
||||
return null_mut();
|
||||
}
|
||||
return pointer;
|
||||
self.0.lock().alloc(layout)
|
||||
}
|
||||
unsafe fn dealloc(&self, ptr: *mut u8, layout: core::alloc::Layout) {
|
||||
// bet ur impl is slower
|
||||
self.0.lock().dealloc(ptr, layout)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -101,7 +101,7 @@ pub mod mcause {
|
||||
pub mod satp {
|
||||
use core::mem::transmute;
|
||||
|
||||
use crate::{arch::paging::Table, util::bits::get_bits};
|
||||
use crate::{arch::paging::Table, util::bits::bits};
|
||||
|
||||
#[derive(Debug)]
|
||||
#[repr(u64)]
|
||||
@@ -131,9 +131,9 @@ pub mod satp {
|
||||
}
|
||||
pub fn read() -> Satp {
|
||||
let satp = unsafe { csrr!("satp") };
|
||||
let mode = unsafe { transmute(get_bits!(satp[63,60])) };
|
||||
let asid = get_bits!(satp[59, 44]);
|
||||
let ppn = unsafe { transmute(get_bits!(satp[43, 0]) << 12) };
|
||||
let mode = unsafe { transmute(bits!(satp;60,63)) };
|
||||
let asid = bits!(satp;44,59);
|
||||
let ppn = unsafe { transmute(bits!(satp;0,43) << 12) };
|
||||
Satp { mode, asid, ppn }
|
||||
}
|
||||
pub fn write(satp: Satp) {
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
use crate::{
|
||||
arch::csr::{self, satp},
|
||||
util::bits::get_bits,
|
||||
util::bits::bits,
|
||||
};
|
||||
use core::slice;
|
||||
|
||||
@@ -36,7 +36,7 @@ impl Entry {
|
||||
}
|
||||
pub fn get_addr(&self) -> usize {
|
||||
let val = self.0;
|
||||
get_bits!(val[53,10]) << 12
|
||||
bits!(val;10,53) << 12
|
||||
}
|
||||
pub fn clear(&mut self) {
|
||||
self.0 = 0;
|
||||
@@ -100,10 +100,10 @@ pub fn init(mem_end: *mut u8) -> *mut u8 {
|
||||
}
|
||||
|
||||
pub fn virt_to_physical(table: &Table, addr: usize) -> usize {
|
||||
let ppn2 = get_bits!(addr[38,30]);
|
||||
let ppn1 = get_bits!(addr[29,21]);
|
||||
let ppn0 = get_bits!(addr[20,12]);
|
||||
let offset = get_bits!(addr[11,0]);
|
||||
let ppn2 = bits!(addr;30,38);
|
||||
let ppn1 = bits!(addr;21,29);
|
||||
let ppn0 = bits!(addr;12,20);
|
||||
let offset = bits!(addr;0,11);
|
||||
// let satp = csr::satp::read();
|
||||
unsafe {
|
||||
let lvl2 = table as *const Table;
|
||||
|
||||
126
kernel/src/heap/block.rs
Normal file
126
kernel/src/heap/block.rs
Normal file
@@ -0,0 +1,126 @@
|
||||
use core::{
|
||||
mem::{size_of, transmute},
|
||||
ops::{Deref, DerefMut},
|
||||
ptr::null_mut,
|
||||
};
|
||||
|
||||
pub const FREE_SIZE: usize = size_of::<FreeBlockInfo>() + size_of::<FreePointer>();
|
||||
pub const PTR_SIZE: usize = size_of::<FreePointer>();
|
||||
pub const USED_SIZE: usize = size_of::<BlockInfo>();
|
||||
|
||||
pub struct BlockInfo(usize);
|
||||
|
||||
impl BlockInfo {
|
||||
pub const fn new(prev_used: bool, size: usize) -> Self {
|
||||
Self(prev_used as usize | size)
|
||||
}
|
||||
pub fn prev_used(&self) -> bool {
|
||||
self.0 & 1 == 1
|
||||
}
|
||||
pub fn size(&self) -> usize {
|
||||
self.0 & !1
|
||||
}
|
||||
}
|
||||
|
||||
pub struct UsedPointer(*mut BlockInfo);
|
||||
|
||||
impl Deref for UsedPointer {
|
||||
type Target = BlockInfo;
|
||||
fn deref(&self) -> &Self::Target {
|
||||
unsafe { transmute(self.0) }
|
||||
}
|
||||
}
|
||||
|
||||
impl DerefMut for UsedPointer {
|
||||
fn deref_mut(&mut self) -> &mut Self::Target {
|
||||
unsafe { transmute(self.0) }
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy)]
|
||||
pub struct FreeBlockInfo {
|
||||
pub size: usize,
|
||||
pub prev: FreePointer,
|
||||
pub next: FreePointer,
|
||||
}
|
||||
|
||||
impl FreeBlockInfo {
|
||||
pub fn pointer(&mut self) -> FreePointer {
|
||||
FreePointer(self as *mut FreeBlockInfo)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy, PartialEq, Eq)]
|
||||
pub struct FreePointer(*mut FreeBlockInfo);
|
||||
|
||||
impl FreePointer {
|
||||
pub const fn null() -> Self {
|
||||
Self(null_mut())
|
||||
}
|
||||
pub unsafe fn new(addr: *mut u8, info: FreeBlockInfo) -> Self {
|
||||
let ptr = Self(addr as *mut FreeBlockInfo);
|
||||
let len = info.size;
|
||||
*ptr.0 = info;
|
||||
let end = addr.byte_add(len).byte_sub(PTR_SIZE) as *mut FreePointer;
|
||||
*end = ptr;
|
||||
Self(addr as *mut FreeBlockInfo)
|
||||
}
|
||||
pub fn to_used(mut self) -> *mut BlockInfo {
|
||||
self.prev.next = self.next;
|
||||
self.next.prev = self.prev;
|
||||
self.0 as *mut BlockInfo
|
||||
}
|
||||
pub unsafe fn insert_new(&mut self, len: usize) -> *mut BlockInfo {
|
||||
let old = self.0;
|
||||
let new = old.byte_add(len);
|
||||
*new = *old;
|
||||
self.0 = new;
|
||||
self.size = self.size - len;
|
||||
|
||||
self.prev.next = *self;
|
||||
self.next.prev = *self;
|
||||
|
||||
old as *mut BlockInfo
|
||||
}
|
||||
}
|
||||
|
||||
impl Deref for FreePointer {
|
||||
type Target = FreeBlockInfo;
|
||||
fn deref(&self) -> &Self::Target {
|
||||
unsafe { transmute(self.0) }
|
||||
}
|
||||
}
|
||||
|
||||
impl DerefMut for FreePointer {
|
||||
fn deref_mut(&mut self) -> &mut Self::Target {
|
||||
unsafe { transmute(self.0) }
|
||||
}
|
||||
}
|
||||
|
||||
pub struct FreeBlockIter {
|
||||
end: FreePointer,
|
||||
prev: FreePointer,
|
||||
}
|
||||
|
||||
impl Iterator for FreeBlockIter {
|
||||
// you know, I could've returned &'static mut FreeBlockInfo...
|
||||
// that feels too wrong though
|
||||
type Item = FreePointer;
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
self.prev = unsafe { (*self.prev).next };
|
||||
if self.prev == self.end {
|
||||
None
|
||||
} else {
|
||||
Some(self.prev)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl FreeBlockIter {
|
||||
pub fn from(head: &mut FreeBlockInfo) -> Self {
|
||||
Self {
|
||||
end: FreePointer(head),
|
||||
prev: FreePointer(head),
|
||||
}
|
||||
}
|
||||
}
|
||||
266
kernel/src/heap/mod.rs
Normal file
266
kernel/src/heap/mod.rs
Normal file
@@ -0,0 +1,266 @@
|
||||
use core::{mem::size_of, ops::Range, ptr::null_mut};
|
||||
|
||||
// Heap stolen from my own riscv userspace implementation;
|
||||
// uses a doubly linked list of free blocks
|
||||
//
|
||||
// I tried to do this with idomatic rust (wrapper types)
|
||||
// but it turned out to just be cursed and messy bc
|
||||
// it's all unsafe raw pointers in the end :pensive:
|
||||
//
|
||||
// might try again later, but it'll probably just make it
|
||||
// slower and not much safer so
|
||||
|
||||
use crate::println;
|
||||
|
||||
const ALIGN: usize = 0b1000;
|
||||
const ALIGN_MASK: usize = !(ALIGN - 1);
|
||||
|
||||
struct BlockInfo(usize);
|
||||
|
||||
impl BlockInfo {
|
||||
pub const fn new(prev_used: bool, size: usize) -> Self {
|
||||
Self(prev_used as usize | size)
|
||||
}
|
||||
pub fn prev_used(&self) -> bool {
|
||||
self.0 & 1 == 1
|
||||
}
|
||||
pub fn set_prev_used(&mut self) {
|
||||
self.0 |= 1;
|
||||
}
|
||||
pub fn unset_prev_used(&mut self) {
|
||||
self.0 &= !1;
|
||||
}
|
||||
pub fn size(&self) -> usize {
|
||||
self.0 & !1
|
||||
}
|
||||
}
|
||||
|
||||
type BlockPointer = *mut BlockInfo;
|
||||
|
||||
struct FreeBlockInfo {
|
||||
info: BlockInfo,
|
||||
prev: FreePointer,
|
||||
next: FreePointer,
|
||||
}
|
||||
|
||||
impl FreeBlockInfo {
|
||||
pub fn prev_used(&self) -> bool {
|
||||
self.info.prev_used()
|
||||
}
|
||||
pub fn size(&self) -> usize {
|
||||
self.info.size()
|
||||
}
|
||||
}
|
||||
|
||||
type FreePointer = *mut FreeBlockInfo;
|
||||
|
||||
const FREE_SIZE: usize = size_of::<FreeBlockInfo>() + size_of::<FreePointer>();
|
||||
const PTR_SIZE: usize = size_of::<FreePointer>();
|
||||
const USED_SIZE: usize = size_of::<BlockInfo>();
|
||||
|
||||
pub struct Heap {
|
||||
head: FreeBlockInfo,
|
||||
start: *mut u8,
|
||||
end: *mut u8,
|
||||
}
|
||||
|
||||
impl Heap {
|
||||
pub const fn empty() -> Self {
|
||||
Self {
|
||||
head: FreeBlockInfo {
|
||||
info: BlockInfo(0),
|
||||
prev: null_mut(),
|
||||
next: null_mut(),
|
||||
},
|
||||
start: null_mut(),
|
||||
end: null_mut(),
|
||||
}
|
||||
}
|
||||
|
||||
pub unsafe fn init(&mut self, range: Range<*mut u8>) {
|
||||
let head = self.head();
|
||||
let first = range.start as FreePointer;
|
||||
let size = range.end as usize - range.start as usize;
|
||||
create_free(
|
||||
first,
|
||||
FreeBlockInfo {
|
||||
info: BlockInfo::new(true, size),
|
||||
next: head,
|
||||
prev: head,
|
||||
},
|
||||
);
|
||||
self.head.next = first;
|
||||
self.head.prev = first;
|
||||
self.start = range.start;
|
||||
self.end = range.end;
|
||||
}
|
||||
|
||||
pub unsafe fn alloc(&mut self, layout: core::alloc::Layout) -> *mut u8 {
|
||||
// calc aligned size
|
||||
let mut size = layout.size() + USED_SIZE;
|
||||
// does this matter? idr, copying from my riscv userspace impl
|
||||
size = ((size - 1) & ALIGN_MASK) + ALIGN;
|
||||
if size < FREE_SIZE {
|
||||
size = FREE_SIZE;
|
||||
}
|
||||
// search for free block w enough space
|
||||
for free in self.iter_free() {
|
||||
let free_size = (*free).info.size();
|
||||
// free block found
|
||||
if free_size >= size {
|
||||
// deal with leftover space
|
||||
let leftover = free_size - size;
|
||||
if leftover < FREE_SIZE {
|
||||
size = free_size;
|
||||
let mut next_used = free.byte_add(size) as BlockPointer;
|
||||
if next_used as *mut u8 == self.end {
|
||||
next_used = &mut self.head.info;
|
||||
}
|
||||
(*next_used).set_prev_used();
|
||||
let prev = (*free).prev;
|
||||
let next = (*free).next;
|
||||
(*prev).next = next;
|
||||
(*next).prev = prev;
|
||||
} else {
|
||||
let new_free = free.byte_add(size);
|
||||
let prev = (*free).prev;
|
||||
let next = (*free).next;
|
||||
create_free(
|
||||
new_free,
|
||||
FreeBlockInfo {
|
||||
info: BlockInfo::new(true, leftover),
|
||||
prev,
|
||||
next,
|
||||
},
|
||||
);
|
||||
(*prev).next = new_free;
|
||||
(*next).prev = new_free;
|
||||
}
|
||||
// create block
|
||||
let used = free as BlockPointer;
|
||||
(*used) = BlockInfo::new(true, size);
|
||||
let data = used.byte_add(USED_SIZE) as *mut u8;
|
||||
return data;
|
||||
}
|
||||
}
|
||||
return null_mut();
|
||||
}
|
||||
|
||||
pub unsafe fn dealloc(&mut self, ptr: *mut u8, layout: core::alloc::Layout) {
|
||||
let used = ptr.byte_sub(USED_SIZE) as BlockPointer;
|
||||
let mut size = (*used).size();
|
||||
let old_size = size;
|
||||
let mut addr = used as FreePointer;
|
||||
let mut prev = self.head();
|
||||
let mut next = self.head.next;
|
||||
if !(*used).prev_used() {
|
||||
let prev_free = *(used.byte_sub(PTR_SIZE) as *mut FreePointer);
|
||||
addr = prev_free;
|
||||
size += (*prev_free).info.size();
|
||||
prev = (*prev_free).prev;
|
||||
next = (*prev_free).next;
|
||||
}
|
||||
let mut n_block = used.byte_add(old_size);
|
||||
if n_block as *mut u8 != self.end {
|
||||
let mut nn_block = n_block.byte_add((*n_block).size());
|
||||
if nn_block as *mut u8 == self.end {
|
||||
nn_block = &mut self.head.info;
|
||||
}
|
||||
if !(*nn_block).prev_used() {
|
||||
size += (*n_block).size();
|
||||
next = (*next).next;
|
||||
}
|
||||
}
|
||||
create_free(
|
||||
addr,
|
||||
FreeBlockInfo {
|
||||
info: BlockInfo::new(true, size),
|
||||
prev,
|
||||
next,
|
||||
},
|
||||
);
|
||||
(*prev).next = addr;
|
||||
(*next).prev = addr;
|
||||
if n_block as *mut u8 == self.end {
|
||||
n_block = &mut self.head.info;
|
||||
}
|
||||
(*n_block).unset_prev_used();
|
||||
}
|
||||
|
||||
fn iter_free(&mut self) -> FreeBlockIter {
|
||||
FreeBlockIter {
|
||||
prev: &mut self.head,
|
||||
end: &mut self.head,
|
||||
}
|
||||
}
|
||||
fn iter_block(&mut self) -> BlockIter {
|
||||
BlockIter {
|
||||
cur: self.start as BlockPointer,
|
||||
end: self.end,
|
||||
}
|
||||
}
|
||||
|
||||
fn head(&mut self) -> FreePointer {
|
||||
&mut self.head as FreePointer
|
||||
}
|
||||
|
||||
pub fn print(&mut self) {
|
||||
unsafe {
|
||||
println!("heap: {:?} -> {:?}", self.start, self.end);
|
||||
for block in self.iter_block() {
|
||||
let size = (*block).size();
|
||||
let mut n_block = block.byte_add(size);
|
||||
if n_block as *mut u8 == self.end {
|
||||
n_block = &mut self.head.info;
|
||||
}
|
||||
let used = if (*n_block).prev_used() {"used"} else {"free"};
|
||||
println!(" - {:?}: {}, size 0x{:x}", block, used, size);
|
||||
}
|
||||
println!();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
unsafe fn create_free(addr: FreePointer, info: FreeBlockInfo) {
|
||||
let len = info.info.size();
|
||||
*addr = info;
|
||||
let end = addr.byte_add(len).byte_sub(PTR_SIZE) as *mut FreePointer;
|
||||
*end = addr;
|
||||
}
|
||||
|
||||
struct FreeBlockIter {
|
||||
end: FreePointer,
|
||||
prev: FreePointer,
|
||||
}
|
||||
|
||||
impl Iterator for FreeBlockIter {
|
||||
type Item = FreePointer;
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
self.prev = unsafe { (*self.prev).next };
|
||||
if self.prev == self.end {
|
||||
None
|
||||
} else {
|
||||
Some(self.prev)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
struct BlockIter {
|
||||
end: *mut u8,
|
||||
cur: BlockPointer,
|
||||
}
|
||||
|
||||
impl Iterator for BlockIter {
|
||||
type Item = BlockPointer;
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
unsafe {
|
||||
let cur = self.cur;
|
||||
if cur as *mut u8 == self.end {
|
||||
return None;
|
||||
}
|
||||
let size = (*self.cur).size();
|
||||
self.cur = cur.byte_add(size);
|
||||
Some(cur)
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -8,35 +8,52 @@ use core::ops::Range;
|
||||
|
||||
use fdt::FDT;
|
||||
|
||||
use crate::allocator::ALLOCATOR;
|
||||
|
||||
extern crate alloc;
|
||||
|
||||
pub mod allocator;
|
||||
pub mod arch;
|
||||
pub mod dev;
|
||||
pub mod fdt;
|
||||
pub mod heap;
|
||||
pub mod log;
|
||||
pub mod qemu;
|
||||
pub mod util;
|
||||
pub mod dev;
|
||||
|
||||
pub fn main(heap_mem: Range<*mut u8>, fdt: FDT) -> ! {
|
||||
println!("we out here vibin");
|
||||
println!("memory range: {:?}", fdt.mem_range());
|
||||
println!("heap range: {:?}", heap_mem);
|
||||
for node in &fdt {
|
||||
|
||||
for node in &fdt {}
|
||||
unsafe {
|
||||
ALLOCATOR.init(heap_mem);
|
||||
}
|
||||
allocator::init_heap(heap_mem);
|
||||
ALLOCATOR.print();
|
||||
println!("----------- vec test:");
|
||||
let mut test = alloc::vec![1, 2, 3];
|
||||
test.push(3);
|
||||
let test2 = alloc::vec![-1, -2, -3, -4];
|
||||
ALLOCATOR.print();
|
||||
println!("{:?}", test);
|
||||
unsafe {
|
||||
let x = *(0x3000 as *const u8);
|
||||
println!("{}", x);
|
||||
println!("{:?}", test2);
|
||||
drop(test2);
|
||||
drop(test);
|
||||
ALLOCATOR.print();
|
||||
println!("----------- vec vec test:");
|
||||
let mut test = alloc::vec::Vec::new();
|
||||
for i in 0..4 {
|
||||
let n = i*4;
|
||||
test.push(alloc::vec![n, n+1, n+2, n+3]);
|
||||
}
|
||||
for i in 0..10000 {
|
||||
let test2: alloc::vec::Vec<i32> = alloc::vec::Vec::with_capacity(10_000_000);
|
||||
println!("{}", i);
|
||||
ALLOCATOR.print();
|
||||
println!("{:?}", test);
|
||||
drop(test);
|
||||
ALLOCATOR.print();
|
||||
println!("----------- dealloc test:");
|
||||
for i in 0..1000 {
|
||||
let test2: alloc::vec::Vec<i32> = alloc::vec::Vec::with_capacity(10_000_0000);
|
||||
}
|
||||
ALLOCATOR.print();
|
||||
// for _ in 0..40000000 {}
|
||||
// let x = unsafe { *(0x10000000000 as *mut u8) };
|
||||
// println!("we got {x}");
|
||||
|
||||
@@ -3,12 +3,12 @@ use core::{
|
||||
mem::transmute,
|
||||
};
|
||||
|
||||
macro_rules! get_bits {
|
||||
($name:ident[$high:expr,$low:expr]) => {{
|
||||
macro_rules! bits {
|
||||
($name:expr;$low:expr,$high:expr) => {{
|
||||
($name & ((($name - $name + 2).pow($high - $low + 1) - 1) << $low)) >> $low
|
||||
}};
|
||||
}
|
||||
pub(crate) use get_bits;
|
||||
pub(crate) use bits;
|
||||
|
||||
pub trait BeRep {
|
||||
fn _from_be(&self) -> Self;
|
||||
|
||||
Reference in New Issue
Block a user