···991010use alloc::alloc::{alloc, dealloc, Layout};
1111use core::ptr::NonNull;
1212+use crate::mana_pool::aslr;
12131314/// Default stack size: 64 KB
1415/// This is generous enough for most threads while being conservative with memory
···2425pub struct Stack {
2526 bottom: NonNull<u8>,
2627 size: usize,
2828+ /// ASLR: Random offset from nominal stack pointer (0-64KB)
2929+ /// This makes the actual stack pointer unpredictable
3030+ aslr_offset: usize,
2731}
28322933// Safety: Stack pointers can be safely sent between threads
···4448 ///
4549 /// # Returns
4650 /// Some(Stack) if allocation succeeds, None otherwise
5151+ ///
5252+ /// # Security
5353+ /// ASLR: The stack pointer is randomized with up to 64KB offset to prevent
5454+ /// exploits that rely on predictable stack addresses.
4755 pub fn with_size(size: usize) -> Option<Self> {
4856 // Clamp size to valid range and align to 16 bytes
4957 let size = size.clamp(MIN_STACK_SIZE, MAX_STACK_SIZE);
···5563 // Allocate the stack
5664 let ptr = unsafe { alloc(layout) };
57655858- NonNull::new(ptr).map(|bottom| Stack { bottom, size })
6666+ // SECURITY: ASLR - Randomize stack pointer offset (0-64KB)
6767+ // This makes buffer overflow attacks much harder as the exact
6868+ // stack location is unpredictable between threads
6969+ let aslr_offset = aslr::randomize_offset(65536, 16);
7070+7171+ NonNull::new(ptr).map(|bottom| Stack {
7272+ bottom,
7373+ size,
7474+ aslr_offset,
7575+ })
5976 }
60776178 /// Get the bottom (low address) of the stack
···6683 /// Get the top (high address) of the stack
6784 ///
6885 /// The stack grows downward, so this is bottom + size
8686+ ///
8787+ /// # Security
8888+ /// ASLR: The actual stack top includes a random offset, making the
8989+ /// stack pointer unpredictable to attackers.
6990 pub fn top(&self) -> u64 {
7070- self.bottom() + self.size as u64
9191+ // SECURITY: Include ASLR offset
9292+ // The actual stack pointer starts at top - aslr_offset
9393+ // This gives us up to 64KB of randomization
9494+ self.bottom() + self.size as u64 - self.aslr_offset as u64
9595+ }
9696+9797+ /// Get the ASLR offset applied to this stack
9898+ pub fn aslr_offset(&self) -> usize {
9999+ self.aslr_offset
71100 }
7210173102 /// Get the size of the stack in bytes
+266
heartwood/src/mana_pool/aslr.rs
···11+//! # ASLR - Address Space Layout Randomization
22+//!
33+//! Randomizes memory layout to prevent exploits that rely on predictable addresses.
44+//! Inspired by PaX ASLR.
55+//!
66+//! ## Security Benefits
77+//! - Makes ROP (Return-Oriented Programming) attacks harder
88+//! - Prevents buffer overflow exploits from jumping to known addresses
99+//! - Increases entropy in the system (harder to guess memory layout)
1010+//!
1111+//! ## Randomization Strategy
1212+//! - Stack: 28 bits of entropy (256MB range)
1313+//! - Heap: 28 bits of entropy (256MB range)
1414+//! - Code: 24 bits of entropy (16MB range, aligned)
1515+//! - Groves (libraries): 20 bits of entropy (1MB range, page-aligned)
1616+1717+use super::entropy::{HardwareRng, ChaCha8Rng};
1818+1919+/// Memory layout for a process/thread with ASLR
2020+#[derive(Debug, Clone, Copy)]
2121+pub struct RandomizedLayout {
2222+ /// Randomized code base address
2323+ pub code_base: usize,
2424+2525+ /// Randomized stack base address (top of stack)
2626+ pub stack_base: usize,
2727+2828+ /// Randomized heap base address
2929+ pub heap_base: usize,
3030+3131+ /// Randomized capability table location
3232+ pub capability_table: usize,
3333+3434+ /// Entropy used for this layout (for auditing)
3535+ pub entropy_bits: u32,
3636+}
3737+3838+impl RandomizedLayout {
3939+ /// Create a new randomized memory layout using hardware RNG
4040+ pub fn new() -> Self {
4141+ let mut rng = ChaCha8Rng::from_hardware();
4242+ Self::from_rng(&mut rng)
4343+ }
4444+4545+ /// Create a new randomized layout from a specific RNG (for testing)
4646+ pub fn from_rng(rng: &mut ChaCha8Rng) -> Self {
4747+ // x86_64 user space address ranges (simplified for now)
4848+ // We use the lower half of the address space (0x0000_0000 - 0x7FFF_FFFF_FFFF)
4949+ // Reserved kernel space starts at 0xFFFF_8000_0000_0000
5050+5151+ Self {
5252+ code_base: Self::randomize_code_base(rng),
5353+ stack_base: Self::randomize_stack_base(rng),
5454+ heap_base: Self::randomize_heap_base(rng),
5555+ capability_table: Self::randomize_capability_table(rng),
5656+ entropy_bits: 28 + 28 + 24 + 20, // Total entropy across all regions
5757+ }
5858+ }
5959+6060+ /// Randomize code base (executable region)
6161+ /// Range: 0x00400000 - 0x01400000 (16MB range, 24 bits entropy)
6262+ /// Aligned to 4KB pages
6363+ fn randomize_code_base(rng: &mut ChaCha8Rng) -> usize {
6464+ const CODE_START: usize = 0x00400000; // 4MB (traditional ELF base)
6565+ const CODE_RANGE: usize = 0x01000000; // 16MB range
6666+ const PAGE_SIZE: usize = 0x1000; // 4KB alignment
6767+6868+ let offset = rng.range(0, CODE_RANGE / PAGE_SIZE) * PAGE_SIZE;
6969+ CODE_START + offset
7070+ }
7171+7272+ /// Randomize stack base (grows downward)
7373+ /// Range: 0x70000000 - 0x80000000 (256MB range, 28 bits entropy)
7474+ /// Stack grows DOWN from this address
7575+ fn randomize_stack_base(rng: &mut ChaCha8Rng) -> usize {
7676+ const STACK_START: usize = 0x70000000; // 1.75GB
7777+ const STACK_RANGE: usize = 0x10000000; // 256MB range
7878+ const PAGE_SIZE: usize = 0x1000; // 4KB alignment
7979+8080+ let offset = rng.range(0, STACK_RANGE / PAGE_SIZE) * PAGE_SIZE;
8181+ STACK_START + offset
8282+ }
8383+8484+ /// Randomize heap base (grows upward)
8585+ /// Range: 0x10000000 - 0x20000000 (256MB range, 28 bits entropy)
8686+ fn randomize_heap_base(rng: &mut ChaCha8Rng) -> usize {
8787+ const HEAP_START: usize = 0x10000000; // 256MB
8888+ const HEAP_RANGE: usize = 0x10000000; // 256MB range
8989+ const PAGE_SIZE: usize = 0x1000; // 4KB alignment
9090+9191+ let offset = rng.range(0, HEAP_RANGE / PAGE_SIZE) * PAGE_SIZE;
9292+ HEAP_START + offset
9393+ }
9494+9595+ /// Randomize capability table location
9696+ /// Range: 0x60000000 - 0x60100000 (1MB range, 20 bits entropy)
9797+ fn randomize_capability_table(rng: &mut ChaCha8Rng) -> usize {
9898+ const CAP_START: usize = 0x60000000; // 1.5GB
9999+ const CAP_RANGE: usize = 0x00100000; // 1MB range
100100+ const PAGE_SIZE: usize = 0x1000; // 4KB alignment
101101+102102+ let offset = rng.range(0, CAP_RANGE / PAGE_SIZE) * PAGE_SIZE;
103103+ CAP_START + offset
104104+ }
105105+106106+ /// Randomize a stack offset (for individual thread stacks)
107107+ /// This adds additional per-thread randomization on top of the base
108108+ pub fn randomize_stack_offset() -> usize {
109109+ const MAX_OFFSET: usize = 0x10000; // 64KB max offset
110110+ const ALIGN: usize = 16; // 16-byte alignment for x86_64
111111+112112+ let offset = HardwareRng::range(0, MAX_OFFSET / ALIGN) * ALIGN;
113113+ offset
114114+ }
115115+116116+ /// Get entropy estimate in bits
117117+ pub fn entropy_bits(&self) -> u32 {
118118+ self.entropy_bits
119119+ }
120120+}
121121+122122+impl Default for RandomizedLayout {
123123+ fn default() -> Self {
124124+ Self::new()
125125+ }
126126+}
127127+128128+/// Global ASLR state (for future process management)
129129+pub struct AslrManager {
130130+ /// Master RNG for generating process layouts
131131+ rng: ChaCha8Rng,
132132+133133+ /// Whether ASLR is enabled (can be disabled for debugging)
134134+ enabled: bool,
135135+}
136136+137137+impl AslrManager {
138138+ /// Create a new ASLR manager
139139+ pub fn new() -> Self {
140140+ Self {
141141+ rng: ChaCha8Rng::from_hardware(),
142142+ enabled: true,
143143+ }
144144+ }
145145+146146+ /// Generate a new randomized layout for a process
147147+ pub fn generate_layout(&mut self) -> RandomizedLayout {
148148+ if self.enabled {
149149+ RandomizedLayout::from_rng(&mut self.rng)
150150+ } else {
151151+ // Deterministic layout for debugging
152152+ Self::deterministic_layout()
153153+ }
154154+ }
155155+156156+ /// Get a deterministic (non-random) layout for debugging
157157+ pub fn deterministic_layout() -> RandomizedLayout {
158158+ RandomizedLayout {
159159+ code_base: 0x00400000,
160160+ stack_base: 0x70000000,
161161+ heap_base: 0x10000000,
162162+ capability_table: 0x60000000,
163163+ entropy_bits: 0, // No randomization
164164+ }
165165+ }
166166+167167+ /// Enable or disable ASLR
168168+ pub fn set_enabled(&mut self, enabled: bool) {
169169+ self.enabled = enabled;
170170+ }
171171+172172+ /// Check if ASLR is enabled
173173+ pub fn is_enabled(&self) -> bool {
174174+ self.enabled
175175+ }
176176+}
177177+178178+impl Default for AslrManager {
179179+ fn default() -> Self {
180180+ Self::new()
181181+ }
182182+}
183183+184184+/// Helper to randomize a pointer offset (for fine-grained ASLR)
185185+/// Use this for small randomizations like stack frame offsets
186186+///
187187+/// This function uses fast RDTSC-based entropy to avoid blocking during early boot
188188+pub fn randomize_offset(max_bytes: usize, alignment: usize) -> usize {
189189+ if alignment == 0 || max_bytes == 0 {
190190+ return 0;
191191+ }
192192+193193+ let max_units = max_bytes / alignment;
194194+ // Use fast_u64() to avoid RDRAND blocking during early boot
195195+ let random_value = HardwareRng::fast_u64() as usize;
196196+ (random_value % max_units) * alignment
197197+}
198198+199199+#[cfg(test)]
200200+mod tests {
201201+ use super::*;
202202+203203+ #[test]
204204+ fn test_randomized_layouts_differ() {
205205+ let layout1 = RandomizedLayout::new();
206206+ let layout2 = RandomizedLayout::new();
207207+208208+ // Layouts should be different (probability of collision is negligible)
209209+ assert_ne!(layout1.code_base, layout2.code_base);
210210+ assert_ne!(layout1.stack_base, layout2.stack_base);
211211+ }
212212+213213+ #[test]
214214+ fn test_code_base_in_valid_range() {
215215+ let mut rng = ChaCha8Rng::from_seed(42);
216216+217217+ for _ in 0..100 {
218218+ let layout = RandomizedLayout::from_rng(&mut rng);
219219+220220+ // Code should be in valid range
221221+ assert!(layout.code_base >= 0x00400000);
222222+ assert!(layout.code_base < 0x01400000);
223223+224224+ // Should be page-aligned
225225+ assert_eq!(layout.code_base % 0x1000, 0);
226226+ }
227227+ }
228228+229229+ #[test]
230230+ fn test_stack_base_in_valid_range() {
231231+ let mut rng = ChaCha8Rng::from_seed(42);
232232+233233+ for _ in 0..100 {
234234+ let layout = RandomizedLayout::from_rng(&mut rng);
235235+236236+ // Stack should be in valid range
237237+ assert!(layout.stack_base >= 0x70000000);
238238+ assert!(layout.stack_base < 0x80000000);
239239+240240+ // Should be page-aligned
241241+ assert_eq!(layout.stack_base % 0x1000, 0);
242242+ }
243243+ }
244244+245245+ #[test]
246246+ fn test_deterministic_layout() {
247247+ let layout = AslrManager::deterministic_layout();
248248+249249+ assert_eq!(layout.code_base, 0x00400000);
250250+ assert_eq!(layout.stack_base, 0x70000000);
251251+ assert_eq!(layout.entropy_bits, 0);
252252+ }
253253+254254+ #[test]
255255+ fn test_aslr_manager_toggle() {
256256+ let mut manager = AslrManager::new();
257257+258258+ assert!(manager.is_enabled());
259259+260260+ manager.set_enabled(false);
261261+ assert!(!manager.is_enabled());
262262+263263+ let layout = manager.generate_layout();
264264+ assert_eq!(layout.entropy_bits, 0); // Should be deterministic
265265+ }
266266+}
+62-3
heartwood/src/mana_pool/capability.rs
···4646}
47474848impl CapabilityRights {
4949- /// Full rights (read, write, execute, transfer)
4949+ /// Validate W^X (Write XOR Execute) property
5050+ /// Returns true if rights are valid (never both WRITE and EXECUTE)
5151+ pub fn validate_wx(&self) -> bool {
5252+ // CRITICAL SECURITY: Cannot have both WRITE and EXECUTE
5353+ let has_write = self.contains(Self::WRITE);
5454+ let has_execute = self.contains(Self::EXECUTE);
5555+5656+ // Allowed combinations:
5757+ // - READ only: OK
5858+ // - READ + WRITE: OK (writable data)
5959+ // - READ + EXECUTE: OK (executable code)
6060+ // - WRITE + EXECUTE: FORBIDDEN! (attack vector)
6161+6262+ !(has_write && has_execute)
6363+ }
6464+6565+ /// Full rights (read, write, transfer) - NO EXECUTE for security!
6666+ /// SECURITY: We don't allow full() to include EXECUTE to prevent W+X
5067 pub fn full() -> Self {
5151- Self::READ | Self::WRITE | Self::EXECUTE | Self::TRANSFER
6868+ Self::READ | Self::WRITE | Self::TRANSFER
5269 }
53705471 /// Read-only rights
···5673 Self::READ
5774 }
58755959- /// Read-write rights
7676+ /// Read-write rights (for data pages)
6077 pub fn read_write() -> Self {
6178 Self::READ | Self::WRITE
7979+ }
8080+8181+ /// Read-execute rights (for code pages)
8282+ pub fn read_execute() -> Self {
8383+ Self::READ | Self::EXECUTE
8484+ }
8585+8686+ /// Executable code with transfer capability
8787+ pub fn code_with_transfer() -> Self {
8888+ Self::READ | Self::EXECUTE | Self::TRANSFER
8989+ }
9090+}
9191+9292+#[cfg(test)]
9393+mod tests {
9494+ use super::*;
9595+9696+ #[test]
9797+ fn test_wx_validation() {
9898+ // Valid combinations
9999+ assert!(CapabilityRights::READ.validate_wx());
100100+ assert!(CapabilityRights::read_only().validate_wx());
101101+ assert!(CapabilityRights::read_write().validate_wx());
102102+ assert!(CapabilityRights::read_execute().validate_wx());
103103+104104+ // INVALID: Write + Execute
105105+ let write_execute = CapabilityRights::WRITE | CapabilityRights::EXECUTE;
106106+ assert!(!write_execute.validate_wx(), "W+X should be rejected!");
107107+108108+ // INVALID: Full rights including execute
109109+ let all_rights = CapabilityRights::READ | CapabilityRights::WRITE |
110110+ CapabilityRights::EXECUTE | CapabilityRights::TRANSFER;
111111+ assert!(!all_rights.validate_wx(), "Full rights with W+X should be rejected!");
112112+ }
113113+114114+ #[test]
115115+ fn test_safe_combinations() {
116116+ // These should all be safe
117117+ assert!(CapabilityRights::read_only().validate_wx());
118118+ assert!(CapabilityRights::read_write().validate_wx());
119119+ assert!(CapabilityRights::read_execute().validate_wx());
120120+ assert!(CapabilityRights::code_with_transfer().validate_wx());
62121 }
63122}
+237
heartwood/src/mana_pool/entropy.rs
···11+//! # Entropy - Random Number Generation for Security
22+//!
33+//! Provides cryptographically-strong random numbers for ASLR and other security features.
44+//! Uses hardware RDRAND/RDSEED when available, falls back to RDTSC-seeded PRNG.
55+66+use core::arch::x86_64::{_rdrand64_step, _rdtsc};
77+88+/// Hardware entropy source using x86_64 RDRAND instruction
99+pub struct HardwareRng;
1010+1111+impl HardwareRng {
1212+ /// Try to get a random u64 from RDRAND
1313+ /// Returns None if RDRAND is not available or fails
1414+ pub fn try_u64() -> Option<u64> {
1515+ unsafe {
1616+ let mut value: u64 = 0;
1717+ // RDRAND can fail (returns 0 in CF flag), so we try a few times
1818+ for _ in 0..10 {
1919+ if _rdrand64_step(&mut value) == 1 {
2020+ return Some(value);
2121+ }
2222+ }
2323+ None
2424+ }
2525+ }
2626+2727+ /// Get random u64, using RDRAND or falling back to RDTSC
2828+ pub fn u64() -> u64 {
2929+ Self::try_u64().unwrap_or_else(|| {
3030+ // Fallback: Use RDTSC (timestamp counter) as entropy
3131+ // Not cryptographically secure, but better than nothing
3232+ unsafe { _rdtsc() }
3333+ })
3434+ }
3535+3636+ /// Fast non-blocking random u64 using only RDTSC
3737+ /// Use this during early boot when RDRAND might not be available
3838+ pub fn fast_u64() -> u64 {
3939+ unsafe { _rdtsc() }
4040+ }
4141+4242+ /// Get random u32
4343+ pub fn u32() -> u32 {
4444+ Self::u64() as u32
4545+ }
4646+4747+ /// Get random usize
4848+ pub fn usize() -> usize {
4949+ Self::u64() as usize
5050+ }
5151+5252+ /// Get random value in range [min, max)
5353+ pub fn range(min: usize, max: usize) -> usize {
5454+ if min >= max {
5555+ return min;
5656+ }
5757+ let range = max - min;
5858+ min + (Self::usize() % range)
5959+ }
6060+}
6161+6262+/// Simple ChaCha8-based PRNG for when we need reproducible randomness
6363+/// (e.g., for testing or when hardware RNG is not available)
6464+pub struct ChaCha8Rng {
6565+ state: [u32; 16],
6666+ buffer: [u8; 64],
6767+ buffer_pos: usize,
6868+}
6969+7070+impl ChaCha8Rng {
7171+ /// Create a new ChaCha8 RNG seeded from hardware entropy
7272+ pub fn from_hardware() -> Self {
7373+ let seed = HardwareRng::u64();
7474+ Self::from_seed(seed)
7575+ }
7676+7777+ /// Create a new ChaCha8 RNG from a 64-bit seed
7878+ pub fn from_seed(seed: u64) -> Self {
7979+ let mut state = [0u32; 16];
8080+8181+ // ChaCha constant "expand 32-byte k"
8282+ state[0] = 0x61707865;
8383+ state[1] = 0x3320646e;
8484+ state[2] = 0x79622d32;
8585+ state[3] = 0x6b206574;
8686+8787+ // Seed material
8888+ state[4] = seed as u32;
8989+ state[5] = (seed >> 32) as u32;
9090+ state[6] = seed as u32;
9191+ state[7] = (seed >> 32) as u32;
9292+9393+ // Mix in timestamp for extra entropy
9494+ let timestamp = unsafe { _rdtsc() };
9595+ state[8] = timestamp as u32;
9696+ state[9] = (timestamp >> 32) as u32;
9797+9898+ Self {
9999+ state,
100100+ buffer: [0; 64],
101101+ buffer_pos: 64, // Force generation on first call
102102+ }
103103+ }
104104+105105+ /// Generate next block
106106+ fn generate_block(&mut self) {
107107+ let mut working = self.state;
108108+109109+ // 8 rounds (ChaCha8)
110110+ for _ in 0..4 {
111111+ // Column rounds
112112+ Self::quarter_round(&mut working, 0, 4, 8, 12);
113113+ Self::quarter_round(&mut working, 1, 5, 9, 13);
114114+ Self::quarter_round(&mut working, 2, 6, 10, 14);
115115+ Self::quarter_round(&mut working, 3, 7, 11, 15);
116116+117117+ // Diagonal rounds
118118+ Self::quarter_round(&mut working, 0, 5, 10, 15);
119119+ Self::quarter_round(&mut working, 1, 6, 11, 12);
120120+ Self::quarter_round(&mut working, 2, 7, 8, 13);
121121+ Self::quarter_round(&mut working, 3, 4, 9, 14);
122122+ }
123123+124124+ // Add original state
125125+ for i in 0..16 {
126126+ working[i] = working[i].wrapping_add(self.state[i]);
127127+ }
128128+129129+ // Convert to bytes
130130+ for (i, &word) in working.iter().enumerate() {
131131+ let bytes = word.to_le_bytes();
132132+ self.buffer[i * 4..(i + 1) * 4].copy_from_slice(&bytes);
133133+ }
134134+135135+ // Increment counter
136136+ self.state[12] = self.state[12].wrapping_add(1);
137137+ if self.state[12] == 0 {
138138+ self.state[13] = self.state[13].wrapping_add(1);
139139+ }
140140+141141+ self.buffer_pos = 0;
142142+ }
143143+144144+ #[inline(always)]
145145+ fn quarter_round(state: &mut [u32; 16], a: usize, b: usize, c: usize, d: usize) {
146146+ state[a] = state[a].wrapping_add(state[b]);
147147+ state[d] ^= state[a];
148148+ state[d] = state[d].rotate_left(16);
149149+150150+ state[c] = state[c].wrapping_add(state[d]);
151151+ state[b] ^= state[c];
152152+ state[b] = state[b].rotate_left(12);
153153+154154+ state[a] = state[a].wrapping_add(state[b]);
155155+ state[d] ^= state[a];
156156+ state[d] = state[d].rotate_left(8);
157157+158158+ state[c] = state[c].wrapping_add(state[d]);
159159+ state[b] ^= state[c];
160160+ state[b] = state[b].rotate_left(7);
161161+ }
162162+163163+ /// Get next random u64
164164+ pub fn next_u64(&mut self) -> u64 {
165165+ if self.buffer_pos + 8 > 64 {
166166+ self.generate_block();
167167+ }
168168+169169+ let mut bytes = [0u8; 8];
170170+ bytes.copy_from_slice(&self.buffer[self.buffer_pos..self.buffer_pos + 8]);
171171+ self.buffer_pos += 8;
172172+173173+ u64::from_le_bytes(bytes)
174174+ }
175175+176176+ /// Get next random u32
177177+ pub fn next_u32(&mut self) -> u32 {
178178+ self.next_u64() as u32
179179+ }
180180+181181+ /// Get next random usize
182182+ pub fn next_usize(&mut self) -> usize {
183183+ self.next_u64() as usize
184184+ }
185185+186186+ /// Get random value in range [min, max)
187187+ pub fn range(&mut self, min: usize, max: usize) -> usize {
188188+ if min >= max {
189189+ return min;
190190+ }
191191+ let range = max - min;
192192+ min + (self.next_usize() % range)
193193+ }
194194+}
195195+196196+#[cfg(test)]
197197+mod tests {
198198+ use super::*;
199199+200200+ #[test]
201201+ fn test_hardware_rng() {
202202+ let val1 = HardwareRng::u64();
203203+ let val2 = HardwareRng::u64();
204204+ // Should be different (probability of collision is negligible)
205205+ assert_ne!(val1, val2);
206206+ }
207207+208208+ #[test]
209209+ fn test_chacha8_deterministic() {
210210+ let mut rng1 = ChaCha8Rng::from_seed(12345);
211211+ let mut rng2 = ChaCha8Rng::from_seed(12345);
212212+213213+ // Same seed should produce same sequence
214214+ for _ in 0..100 {
215215+ assert_eq!(rng1.next_u64(), rng2.next_u64());
216216+ }
217217+ }
218218+219219+ #[test]
220220+ fn test_chacha8_different_seeds() {
221221+ let mut rng1 = ChaCha8Rng::from_seed(12345);
222222+ let mut rng2 = ChaCha8Rng::from_seed(54321);
223223+224224+ // Different seeds should produce different values
225225+ assert_ne!(rng1.next_u64(), rng2.next_u64());
226226+ }
227227+228228+ #[test]
229229+ fn test_range() {
230230+ let mut rng = ChaCha8Rng::from_seed(42);
231231+232232+ for _ in 0..100 {
233233+ let val = rng.range(100, 200);
234234+ assert!(val >= 100 && val < 200);
235235+ }
236236+ }
237237+}
+5
heartwood/src/mana_pool/mod.rs
···2222pub mod allocator;
2323pub mod interrupt_lock;
2424pub mod buddy;
2525+pub mod entropy; // Random number generation for ASLR
2626+pub mod aslr; // Address Space Layout Randomization
25272628pub use object_manager::{ObjectManager, ObjectHandle, ObjectType, ObjectInfo};
2729pub use capability::{Capability, CapabilityRights};
···267269 CannotTransfer,
268270 /// Attempting to perform an operation without sufficient rights
269271 InsufficientRights,
272272+ /// SECURITY: Attempting to create a capability with both WRITE and EXECUTE rights
273273+ /// This violates the W^X (Write XOR Execute) security policy
274274+ SecurityViolation,
270275}
+7
heartwood/src/mana_pool/object_manager.rs
···149149 return Err(ManaError::InvalidCapability);
150150 }
151151152152+ // SECURITY: Enforce W^X (Write XOR Execute)
153153+ if !new_rights.validate_wx() {
154154+ // Attempting to create a capability with both WRITE and EXECUTE!
155155+ // This is a critical security violation.
156156+ return Err(ManaError::SecurityViolation);
157157+ }
158158+152159 // Can only derive rights that the original capability has
153160 if !capability.rights.contains(new_rights) {
154161 return Err(ManaError::InsufficientRights);