veridian_kernel/verification/
alloc_proofs.rs1#![allow(unexpected_cfgs)]
2#![allow(dead_code)]
9
10#[cfg(feature = "alloc")]
11use alloc::collections::BTreeSet;
12#[cfg(feature = "alloc")]
13use alloc::vec::Vec;
14
15const MODEL_FRAME_SIZE: u64 = 4096;
17
18const DMA_ZONE_LIMIT: u64 = 16 * 1024 * 1024;
20
21const MAX_BUDDY_ORDER: u32 = 10;
23
24#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord)]
26pub enum MemoryZone {
27 Dma = 0,
29 Normal = 1,
31 High = 2,
33}
34
35#[derive(Debug, Clone, Copy, PartialEq, Eq)]
37pub enum FrameState {
38 Free,
40 Allocated,
42}
43
44#[derive(Debug, Clone, Default)]
46pub struct AllocatorModel {
47 #[cfg(feature = "alloc")]
49 allocated: BTreeSet<u64>,
50 #[cfg(feature = "alloc")]
52 free: BTreeSet<u64>,
53 total_frames: u64,
55 alloc_count: u64,
57 free_count: u64,
59}
60
61#[cfg(feature = "alloc")]
62impl AllocatorModel {
63 pub fn new(base: u64, count: u64) -> Self {
66 let mut free = BTreeSet::new();
67 for i in 0..count {
68 free.insert(base + i * MODEL_FRAME_SIZE);
69 }
70 Self {
71 allocated: BTreeSet::new(),
72 free,
73 total_frames: count,
74 alloc_count: 0,
75 free_count: 0,
76 }
77 }
78
79 pub fn alloc_frame(&mut self) -> Result<u64, AllocModelError> {
81 let frame = *self
83 .free
84 .iter()
85 .next()
86 .ok_or(AllocModelError::OutOfMemory)?;
87 self.free.remove(&frame);
88 self.allocated.insert(frame);
89 self.alloc_count += 1;
90 Ok(frame)
91 }
92
93 pub fn alloc_frame_zone(&mut self, zone: MemoryZone) -> Result<u64, AllocModelError> {
95 let (min, max) = zone_range(zone);
96
97 let frame = self
98 .free
99 .iter()
100 .find(|&&f| f >= min && f < max)
101 .copied()
102 .ok_or(AllocModelError::OutOfMemory)?;
103
104 self.free.remove(&frame);
105 self.allocated.insert(frame);
106 self.alloc_count += 1;
107 Ok(frame)
108 }
109
110 pub fn free_frame(&mut self, frame: u64) -> Result<(), AllocModelError> {
112 if !self.allocated.contains(&frame) {
113 if self.free.contains(&frame) {
114 return Err(AllocModelError::DoubleFree);
115 }
116 return Err(AllocModelError::InvalidFrame);
117 }
118
119 self.allocated.remove(&frame);
120 self.free.insert(frame);
121 self.free_count += 1;
122 Ok(())
123 }
124
125 pub fn is_allocated(&self, frame: u64) -> bool {
127 self.allocated.contains(&frame)
128 }
129
130 pub fn is_free(&self, frame: u64) -> bool {
132 self.free.contains(&frame)
133 }
134
135 pub fn allocated_count(&self) -> usize {
137 self.allocated.len()
138 }
139
140 pub fn free_count(&self) -> usize {
142 self.free.len()
143 }
144}
145
146fn zone_range(zone: MemoryZone) -> (u64, u64) {
148 match zone {
149 MemoryZone::Dma => (0, DMA_ZONE_LIMIT),
150 MemoryZone::Normal => (DMA_ZONE_LIMIT, 4 * 1024 * 1024 * 1024),
151 MemoryZone::High => (4 * 1024 * 1024 * 1024, u64::MAX),
152 }
153}
154
155#[derive(Debug, Clone, Copy, PartialEq, Eq)]
157pub struct BuddyBlock {
158 pub base: u64,
160 pub order: u32,
162 pub free: bool,
164}
165
166impl BuddyBlock {
167 pub fn frame_count(&self) -> u64 {
169 1u64 << self.order
170 }
171
172 pub fn byte_size(&self) -> u64 {
174 self.frame_count() * MODEL_FRAME_SIZE
175 }
176
177 pub fn buddy_base(&self) -> u64 {
179 self.base ^ (self.frame_count() * MODEL_FRAME_SIZE)
180 }
181
182 pub fn split(&self) -> Option<(BuddyBlock, BuddyBlock)> {
184 if self.order == 0 {
185 return None;
186 }
187 let new_order = self.order - 1;
188 let half_size = (1u64 << new_order) * MODEL_FRAME_SIZE;
189 Some((
190 BuddyBlock {
191 base: self.base,
192 order: new_order,
193 free: true,
194 },
195 BuddyBlock {
196 base: self.base + half_size,
197 order: new_order,
198 free: true,
199 },
200 ))
201 }
202
203 pub fn coalesce(&self, other: &BuddyBlock) -> Option<BuddyBlock> {
205 if self.order != other.order {
206 return None;
207 }
208 if !self.free || !other.free {
209 return None;
210 }
211
212 let expected_buddy = self.buddy_base();
213 if other.base != expected_buddy {
214 return None;
215 }
216
217 let new_base = core::cmp::min(self.base, other.base);
218 Some(BuddyBlock {
219 base: new_base,
220 order: self.order + 1,
221 free: true,
222 })
223 }
224}
225
226#[derive(Debug, Clone)]
228pub struct BitmapModel {
229 #[cfg(feature = "alloc")]
231 bitmap: Vec<bool>,
232 base: u64,
234}
235
236#[cfg(feature = "alloc")]
237impl BitmapModel {
238 pub fn new(base: u64, frame_count: usize) -> Self {
240 Self {
241 bitmap: alloc::vec![false; frame_count],
242 base,
243 }
244 }
245
246 pub fn alloc(&mut self) -> Option<u64> {
248 for (i, allocated) in self.bitmap.iter_mut().enumerate() {
249 if !*allocated {
250 *allocated = true;
251 return Some(self.base + (i as u64) * MODEL_FRAME_SIZE);
252 }
253 }
254 None
255 }
256
257 pub fn free(&mut self, addr: u64) -> Result<(), AllocModelError> {
259 let offset = addr
260 .checked_sub(self.base)
261 .ok_or(AllocModelError::InvalidFrame)?;
262 let idx = (offset / MODEL_FRAME_SIZE) as usize;
263 if idx >= self.bitmap.len() {
264 return Err(AllocModelError::InvalidFrame);
265 }
266 if !self.bitmap[idx] {
267 return Err(AllocModelError::DoubleFree);
268 }
269 self.bitmap[idx] = false;
270 Ok(())
271 }
272
273 pub fn is_allocated(&self, addr: u64) -> bool {
275 let offset = match addr.checked_sub(self.base) {
276 Some(o) => o,
277 None => return false,
278 };
279 let idx = (offset / MODEL_FRAME_SIZE) as usize;
280 idx < self.bitmap.len() && self.bitmap[idx]
281 }
282}
283
284#[derive(Debug, Clone, Copy, PartialEq, Eq)]
286pub enum AllocModelError {
287 OutOfMemory,
289 DoubleFree,
291 InvalidFrame,
293 ConservationViolation,
295 BuddyInconsistency,
297 ZoneViolation,
299 AlignmentViolation,
301 OverlapDetected,
303}
304
305pub struct AllocInvariantChecker;
307
308#[cfg(feature = "alloc")]
309impl AllocInvariantChecker {
310 pub fn verify_no_double_alloc(model: &AllocatorModel) -> Result<(), AllocModelError> {
313 for frame in model.allocated.iter() {
315 if model.free.contains(frame) {
316 return Err(AllocModelError::OverlapDetected);
317 }
318 }
319 Ok(())
320 }
321
322 pub fn verify_no_use_after_free(
324 model: &AllocatorModel,
325 frame: u64,
326 ) -> Result<(), AllocModelError> {
327 if model.free.contains(&frame) && model.allocated.contains(&frame) {
328 return Err(AllocModelError::OverlapDetected);
329 }
330 Ok(())
331 }
332
333 pub fn verify_buddy_consistency(block: &BuddyBlock) -> Result<(), AllocModelError> {
335 if block.order > MAX_BUDDY_ORDER {
336 return Err(AllocModelError::BuddyInconsistency);
337 }
338 let size = block.byte_size();
340 if !block.base.is_multiple_of(size) {
341 return Err(AllocModelError::AlignmentViolation);
342 }
343 Ok(())
344 }
345
346 pub fn verify_frame_conservation(model: &AllocatorModel) -> Result<(), AllocModelError> {
348 let sum = (model.allocated.len() as u64) + (model.free.len() as u64);
349 if sum != model.total_frames {
350 return Err(AllocModelError::ConservationViolation);
351 }
352 Ok(())
353 }
354
355 pub fn verify_zone_correctness(frame: u64, zone: MemoryZone) -> Result<(), AllocModelError> {
357 let (min, max) = zone_range(zone);
358 if frame < min || frame >= max {
359 return Err(AllocModelError::ZoneViolation);
360 }
361 Ok(())
362 }
363}
364
365#[cfg(kani)]
370mod kani_proofs {
371 use super::*;
372
373 #[kani::proof]
375 fn proof_no_double_allocation() {
376 let mut model = AllocatorModel::new(0x1000, 4);
377
378 let f1 = model.alloc_frame().unwrap();
379 let f2 = model.alloc_frame().unwrap();
380
381 assert_ne!(f1, f2, "Two allocations must return different frames");
382 }
383
384 #[kani::proof]
386 fn proof_dealloc_makes_available() {
387 let mut model = AllocatorModel::new(0x1000, 1);
388
389 let frame = model.alloc_frame().unwrap();
390 assert!(model.alloc_frame().is_err()); model.free_frame(frame).unwrap();
393 let frame2 = model.alloc_frame().unwrap();
394
395 assert_eq!(frame, frame2, "Freed frame should be reallocatable");
396 }
397
398 #[kani::proof]
400 fn proof_buddy_split_correct() {
401 let order: u32 = kani::any();
402 kani::assume(order > 0 && order <= 5);
403
404 let base: u64 = kani::any();
405 let block_size = (1u64 << order) * MODEL_FRAME_SIZE;
406 kani::assume(base % block_size == 0);
407 kani::assume(base < 0x1_0000_0000); let block = BuddyBlock {
410 base,
411 order,
412 free: true,
413 };
414 let (left, right) = block.split().unwrap();
415
416 assert_eq!(left.order, order - 1);
417 assert_eq!(right.order, order - 1);
418 assert_eq!(left.base, base);
419 assert_eq!(right.base, base + (1u64 << (order - 1)) * MODEL_FRAME_SIZE);
420 assert!(left.free);
421 assert!(right.free);
422 }
423
424 #[kani::proof]
426 fn proof_buddy_coalesce_correct() {
427 let order: u32 = kani::any();
428 kani::assume(order > 0 && order <= 5);
429
430 let block_size = (1u64 << order) * MODEL_FRAME_SIZE;
431 let base: u64 = kani::any();
432 kani::assume(base % block_size == 0);
433 kani::assume(base < 0x1_0000_0000);
434
435 let original = BuddyBlock {
436 base,
437 order,
438 free: true,
439 };
440 let (left, right) = original.split().unwrap();
441 let coalesced = left.coalesce(&right).unwrap();
442
443 assert_eq!(coalesced.base, base);
444 assert_eq!(coalesced.order, order);
445 }
446
447 #[kani::proof]
449 fn proof_bitmap_buddy_threshold() {
450 let frame_count: u32 = kani::any();
451 kani::assume(frame_count > 0 && frame_count <= 2048);
452
453 let use_bitmap = frame_count < 512;
454 let use_buddy = frame_count >= 512;
455
456 assert!(use_bitmap ^ use_buddy);
458 }
459
460 #[kani::proof]
462 fn proof_frame_conservation() {
463 let mut model = AllocatorModel::new(0x1000, 4);
464
465 let initial_total = model.total_frames;
466
467 let f1 = model.alloc_frame().unwrap();
468 assert_eq!(
469 model.allocated_count() as u64 + model.free_count() as u64,
470 initial_total
471 );
472
473 let f2 = model.alloc_frame().unwrap();
474 assert_eq!(
475 model.allocated_count() as u64 + model.free_count() as u64,
476 initial_total
477 );
478
479 model.free_frame(f1).unwrap();
480 assert_eq!(
481 model.allocated_count() as u64 + model.free_count() as u64,
482 initial_total
483 );
484
485 model.free_frame(f2).unwrap();
486 assert_eq!(
487 model.allocated_count() as u64 + model.free_count() as u64,
488 initial_total
489 );
490 }
491
492 #[kani::proof]
494 fn proof_zone_dma_range() {
495 let frame: u64 = kani::any();
496 kani::assume(frame < DMA_ZONE_LIMIT);
497
498 let result = AllocInvariantChecker::verify_zone_correctness(frame, MemoryZone::Dma);
499 assert!(result.is_ok());
500 }
501
502 #[kani::proof]
504 fn proof_alignment_preserved() {
505 let mut model = AllocatorModel::new(0x1000, 4);
506 let frame = model.alloc_frame().unwrap();
507
508 assert_eq!(frame % MODEL_FRAME_SIZE, 0, "Frame must be page-aligned");
509 }
510
511 #[kani::proof]
513 fn proof_no_overlap() {
514 let mut model = AllocatorModel::new(0x1000, 4);
515 let f1 = model.alloc_frame().unwrap();
516 let f2 = model.alloc_frame().unwrap();
517
518 assert_ne!(f1, f2);
520 assert!(f1 + MODEL_FRAME_SIZE <= f2 || f2 + MODEL_FRAME_SIZE <= f1);
522 }
523
524 #[kani::proof]
526 fn proof_free_idempotent() {
527 let mut model = AllocatorModel::new(0x1000, 4);
528 let frame = model.alloc_frame().unwrap();
529 model.free_frame(frame).unwrap();
530
531 let result = model.free_frame(frame);
532 assert_eq!(result, Err(AllocModelError::DoubleFree));
533 }
534}
535
536#[cfg(test)]
541mod tests {
542 use super::*;
543
544 #[cfg(feature = "alloc")]
545 #[test]
546 fn test_alloc_and_free() {
547 let mut model = AllocatorModel::new(0x1000, 4);
548 assert_eq!(model.free_count(), 4);
549 assert_eq!(model.allocated_count(), 0);
550
551 let f = model.alloc_frame().unwrap();
552 assert_eq!(model.free_count(), 3);
553 assert_eq!(model.allocated_count(), 1);
554 assert!(model.is_allocated(f));
555
556 model.free_frame(f).unwrap();
557 assert_eq!(model.free_count(), 4);
558 assert_eq!(model.allocated_count(), 0);
559 assert!(model.is_free(f));
560 }
561
562 #[cfg(feature = "alloc")]
563 #[test]
564 fn test_double_free() {
565 let mut model = AllocatorModel::new(0x1000, 4);
566 let f = model.alloc_frame().unwrap();
567 model.free_frame(f).unwrap();
568 assert_eq!(model.free_frame(f), Err(AllocModelError::DoubleFree));
569 }
570
571 #[cfg(feature = "alloc")]
572 #[test]
573 fn test_out_of_memory() {
574 let mut model = AllocatorModel::new(0x1000, 1);
575 model.alloc_frame().unwrap();
576 assert_eq!(model.alloc_frame(), Err(AllocModelError::OutOfMemory));
577 }
578
579 #[cfg(feature = "alloc")]
580 #[test]
581 fn test_conservation() {
582 let mut model = AllocatorModel::new(0x1000, 8);
583 for _ in 0..4 {
584 model.alloc_frame().unwrap();
585 }
586 assert!(AllocInvariantChecker::verify_frame_conservation(&model).is_ok());
587 }
588
589 #[cfg(feature = "alloc")]
590 #[test]
591 fn test_no_double_alloc() {
592 let mut model = AllocatorModel::new(0x1000, 8);
593 for _ in 0..8 {
594 model.alloc_frame().unwrap();
595 }
596 assert!(AllocInvariantChecker::verify_no_double_alloc(&model).is_ok());
597 }
598
599 #[test]
600 fn test_buddy_split() {
601 let block = BuddyBlock {
602 base: 0x0,
603 order: 3,
604 free: true,
605 };
606 let (left, right) = block.split().unwrap();
607 assert_eq!(left.order, 2);
608 assert_eq!(right.order, 2);
609 assert_eq!(left.base, 0x0);
610 assert_eq!(right.base, 4 * MODEL_FRAME_SIZE);
611 }
612
613 #[test]
614 fn test_buddy_coalesce() {
615 let left = BuddyBlock {
616 base: 0x0,
617 order: 2,
618 free: true,
619 };
620 let right = BuddyBlock {
621 base: 4 * MODEL_FRAME_SIZE,
622 order: 2,
623 free: true,
624 };
625 let merged = left.coalesce(&right).unwrap();
626 assert_eq!(merged.order, 3);
627 assert_eq!(merged.base, 0);
628 }
629
630 #[test]
631 fn test_buddy_no_split_order_zero() {
632 let block = BuddyBlock {
633 base: 0x0,
634 order: 0,
635 free: true,
636 };
637 assert!(block.split().is_none());
638 }
639
640 #[cfg(feature = "alloc")]
641 #[test]
642 fn test_zone_allocation() {
643 let mut model = AllocatorModel::new(0x0, 4);
644 let f = model.alloc_frame_zone(MemoryZone::Dma).unwrap();
645 assert!(f < DMA_ZONE_LIMIT);
646 assert!(AllocInvariantChecker::verify_zone_correctness(f, MemoryZone::Dma).is_ok());
647 }
648
649 #[cfg(feature = "alloc")]
650 #[test]
651 fn test_bitmap_alloc_free() {
652 let mut bm = BitmapModel::new(0x1000, 4);
653 let a = bm.alloc().unwrap();
654 assert!(bm.is_allocated(a));
655
656 bm.free(a).unwrap();
657 assert!(!bm.is_allocated(a));
658 }
659}