Nothing to see here, move along
1use lancer_core::bitmap::bitmap_seal::Sealed;
2use lancer_core::bitmap::{BitmapAllocator as BitmapCore, BitmapBacking};
3use limine::memory_map::{Entry, EntryType};
4use x86_64::PhysAddr;
5use x86_64::structures::paging::{FrameAllocator, PhysFrame, Size4KiB};
6
7use super::RawSlice;
8use super::addr;
9use super::frame::OwnedFrame;
10use crate::sync::IrqMutex;
11
12const PAGE_SIZE: u64 = 4096;
13
14static BITMAP_PHYS_BASE: core::sync::atomic::AtomicU64 = core::sync::atomic::AtomicU64::new(0);
15static BITMAP_BYTE_COUNT: core::sync::atomic::AtomicU64 = core::sync::atomic::AtomicU64::new(0);
16
17pub fn bitmap_region() -> (u64, u64) {
18 (
19 BITMAP_PHYS_BASE.load(core::sync::atomic::Ordering::Relaxed),
20 BITMAP_BYTE_COUNT.load(core::sync::atomic::Ordering::Relaxed),
21 )
22}
23
24impl Sealed for RawSlice<u64> {}
25impl BitmapBacking for RawSlice<u64> {
26 fn chunks(&self) -> &[u64] {
27 self.as_slice()
28 }
29
30 fn chunks_mut(&mut self) -> &mut [u64] {
31 self.as_slice_mut()
32 }
33}
34
35static BITMAP: IrqMutex<BitmapCore<RawSlice<u64>>, 3> =
36 IrqMutex::new(BitmapCore::from_backing(RawSlice::empty()));
37
38pub struct BitmapFrameAllocator;
39
40impl BitmapFrameAllocator {
41 pub fn init(memory_map: &[&Entry]) {
42 let max_addr = memory_map
43 .iter()
44 .filter(|entry| entry.entry_type == EntryType::USABLE)
45 .map(|entry| entry.base + entry.length)
46 .fold(0u64, u64::max);
47
48 let total_frames = (max_addr / PAGE_SIZE) as usize;
49 let chunks_needed = total_frames.div_ceil(64);
50 let bitmap_bytes = chunks_needed * 8;
51 let bitmap_frames = bitmap_bytes.div_ceil(PAGE_SIZE as usize);
52
53 let bitmap_region = memory_map
54 .iter()
55 .filter(|entry| entry.entry_type == EntryType::USABLE)
56 .find(|entry| entry.length >= (bitmap_frames as u64) * PAGE_SIZE)
57 .expect("[phys] no usable region large enough for bitmap");
58
59 let bitmap_phys_base = bitmap_region.base;
60 BITMAP_PHYS_BASE.store(bitmap_phys_base, core::sync::atomic::Ordering::Relaxed);
61 BITMAP_BYTE_COUNT.store((bitmap_frames as u64) * PAGE_SIZE, core::sync::atomic::Ordering::Relaxed);
62 let bitmap_virt = addr::phys_to_virt(PhysAddr::new(bitmap_phys_base));
63 assert!(
64 bitmap_virt.is_aligned(8u64),
65 "bitmap backing memory not 8-byte aligned"
66 );
67 let bitmap_ptr = bitmap_virt.as_u64() as *mut u64;
68
69 let mut bm = BITMAP.lock();
70 let mut backing = RawSlice::empty();
71 backing.init(bitmap_ptr, chunks_needed);
72 *bm = BitmapCore::from_backing(backing);
73 bm.init(total_frames);
74
75 memory_map
76 .iter()
77 .filter(|entry| entry.entry_type == EntryType::USABLE)
78 .for_each(|entry| {
79 let start_frame = (entry.base / PAGE_SIZE) as usize;
80 let end_frame = ((entry.base + entry.length) / PAGE_SIZE) as usize;
81 bm.mark_range_free(start_frame..end_frame);
82 });
83
84 let bitmap_start_frame = (bitmap_phys_base / PAGE_SIZE) as usize;
85 (bitmap_start_frame..bitmap_start_frame + bitmap_frames).for_each(|idx| bm.mark_used(idx));
86
87 bm.mark_used(0);
88 }
89
90 pub fn allocate_contiguous(&self, count: usize) -> Option<PhysAddr> {
91 BITMAP
92 .lock()
93 .allocate_contiguous(count)
94 .map(|idx| PhysAddr::new((idx as u64) * PAGE_SIZE))
95 }
96
97 pub fn allocate(&self) -> Option<OwnedFrame> {
98 let mut bm = BITMAP.lock();
99 bm.allocate().map(|idx| {
100 let phys = PhysAddr::new((idx as u64) * PAGE_SIZE);
101 OwnedFrame::new(PhysFrame::containing_address(phys))
102 })
103 }
104
105 pub(crate) fn free_frame_by_addr(phys: PhysAddr) {
106 let mut bm = BITMAP.lock();
107 let frame_idx = (phys.as_u64() / PAGE_SIZE) as usize;
108 bm.deallocate(frame_idx).unwrap_or_else(|_| {
109 panic!(
110 "[phys] invariant violation: double-free of frame {:#x}",
111 phys.as_u64()
112 )
113 });
114 }
115
116 #[allow(dead_code)]
117 pub fn deallocate(&self, frame: OwnedFrame) {
118 Self::free_frame_by_addr(frame.inner().start_address());
119 }
120
121 pub fn deallocate_frame(&self, frame: PhysFrame<Size4KiB>) {
122 Self::free_frame_by_addr(frame.start_address());
123 }
124
125 #[allow(dead_code)]
126 pub fn mark_used(idx: usize) {
127 BITMAP.lock().mark_used(idx);
128 }
129
130 pub fn total_frames() -> usize {
131 BITMAP.lock().total_items()
132 }
133
134 #[allow(dead_code)]
135 pub fn used_frames() -> usize {
136 BITMAP.lock().used_items()
137 }
138
139 pub fn free_frames() -> usize {
140 BITMAP.lock().free_items()
141 }
142}
143
144unsafe impl FrameAllocator<Size4KiB> for BitmapFrameAllocator {
145 fn allocate_frame(&mut self) -> Option<PhysFrame<Size4KiB>> {
146 self.allocate().map(|tf| tf.inner())
147 }
148}
149
150#[allow(dead_code)]
151pub fn log_memory_map(memory_map: &[&Entry]) {
152 crate::kprintln!(" Memory map ({} entries):", memory_map.len());
153 memory_map.iter().for_each(|entry| {
154 let entry_type_str = match entry.entry_type {
155 EntryType::USABLE => "Usable",
156 EntryType::RESERVED => "Reserved",
157 EntryType::ACPI_RECLAIMABLE => "ACPI Reclaimable",
158 EntryType::ACPI_NVS => "ACPI NVS",
159 EntryType::BAD_MEMORY => "Bad Memory",
160 EntryType::BOOTLOADER_RECLAIMABLE => "Bootloader Reclaimable",
161 EntryType::EXECUTABLE_AND_MODULES => "Kernel/Modules",
162 EntryType::FRAMEBUFFER => "Framebuffer",
163 _ => "Unknown",
164 };
165 let size_kb = entry.length / 1024;
166 crate::kprintln!(
167 " {:#016x} - {:#016x} ({:>8} KB) {}",
168 entry.base,
169 entry.base + entry.length,
170 size_kb,
171 entry_type_str
172 );
173 });
174}
175
176pub fn frame_stats() -> (usize, usize) {
177 (
178 BitmapFrameAllocator::total_frames(),
179 BitmapFrameAllocator::free_frames(),
180 )
181}
182
183#[allow(dead_code)]
184pub fn log_frame_stats() {
185 crate::kprintln!(" Frame allocator stats:");
186 crate::kprintln!(" Total frames: {}", BitmapFrameAllocator::total_frames());
187 crate::kprintln!(" Used frames: {}", BitmapFrameAllocator::used_frames());
188 crate::kprintln!(" Free frames: {}", BitmapFrameAllocator::free_frames());
189 crate::kprintln!(
190 " Free memory: {} MB",
191 BitmapFrameAllocator::free_frames() * 4096 / (1024 * 1024)
192 );
193}