Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

add clean_up and clean_up_with_filter #264

Merged
merged 20 commits into from
Sep 5, 2021
Merged
Show file tree
Hide file tree
Changes from 4 commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
16 changes: 8 additions & 8 deletions src/instructions/interrupts.rs
Original file line number Diff line number Diff line change
Expand Up @@ -149,14 +149,14 @@ pub fn int3() {

/// Generate a software interrupt by invoking the `int` instruction.
///
/// This currently needs to be a macro because the `int` argument needs to be an
/// immediate. This macro will be replaced by a generic function when support for
/// const generics is implemented in Rust.
/// ## Safety
///
/// Invoking an arbitrary interrupt is unsafe. It can cause your system to
/// crash if you invoke a double-fault (#8) or machine-check (#18) exception.
/// It can also cause memory/register corruption depending on the interrupt
/// implementation (if it expects values/pointers to be passed in registers).
Freax13 marked this conversation as resolved.
Show resolved Hide resolved
#[cfg(feature = "inline_asm")]
#[cfg_attr(docsrs, doc(cfg(any(feature = "nightly", feature = "inline_asm"))))]
#[macro_export]
macro_rules! software_interrupt {
($x:expr) => {{
asm!("int {id}", id = const $x, options(nomem, nostack));
}};
pub unsafe fn software_interrupt<const NUM: u8>() {
asm!("int {num}", num = const NUM, options(nomem, nostack));
}
99 changes: 97 additions & 2 deletions src/structures/paging/mapper/mapped_page_table.rs
Original file line number Diff line number Diff line change
@@ -1,8 +1,8 @@
use crate::structures::paging::{
frame::PhysFrame,
frame_alloc::FrameAllocator,
frame_alloc::{FrameAllocator, FrameDeallocator},
mapper::*,
page::{AddressNotAligned, Page, Size1GiB, Size2MiB, Size4KiB},
page::{AddressNotAligned, Page, PageRangeInclusive, Size1GiB, Size2MiB, Size4KiB},
page_table::{FrameError, PageTable, PageTableEntry, PageTableFlags},
};

Expand Down Expand Up @@ -140,6 +140,101 @@ impl<'a, P: PageTableFrameMapping> MappedPageTable<'a, P> {

Ok(MapperFlush::new(page))
}

/// Remove all empty P1-P3 tables
#[inline]
pub fn clean_up<D>(&mut self, frame_deallocator: &mut D)
where
D: FrameDeallocator<Size4KiB>,
{
self.clean_up_with_filter(|_| true, frame_deallocator)
}

/// Recursivly iterate through all page tables and conditionally remove empty P1-P3 tables
///
/// On each level `filter` is called with the range of the virtual memory addressable by the page table.
/// If `filter` returns `true` the algorithm recurses for each used entry and if at the end all entries are unused, it is deallocated
///
/// ```
/// # use core::ops::RangeInclusive;
/// # use x86_64::{VirtAddr, structures::paging::{
/// # FrameDeallocator, Size4KiB, MappedPageTable, mapper::PageTableFrameMapping, page::{Page, PageRangeInclusive},
/// # }};
/// # unsafe fn test<P: PageTableFrameMapping>(page_table: &mut MappedPageTable<P>, frame_deallocator: &mut impl FrameDeallocator<Size4KiB>) {
/// fn ranges_intersect(a: PageRangeInclusive, b: PageRangeInclusive) -> bool {
/// a.start <= b.end && b.start <= a.end
/// }
///
/// // clean up all page tables in the lower half of the address space
/// let lower_half = PageRangeInclusive {
/// start: Page::containing_address(VirtAddr::new(0)),
/// end: Page::containing_address(VirtAddr::new(0x0000_7fff_ffff_ffff)),
/// };
/// page_table.clean_up_with_filter(|range| ranges_intersect(range, lower_half), frame_deallocator);
/// # }
/// ```
pub fn clean_up_with_filter<F, D>(&mut self, mut filter: F, frame_deallocator: &mut D)
where
F: FnMut(PageRangeInclusive) -> bool,
D: FrameDeallocator<Size4KiB>,
{
fn clean_up<P: PageTableFrameMapping>(
page_table: &mut PageTable,
page_table_walker: &PageTableWalker<P>,
base_addr: VirtAddr,
level: u8,
filter: &mut impl FnMut(PageRangeInclusive) -> bool,
frame_deallocator: &mut impl FrameDeallocator<Size4KiB>,
) -> bool {
let start = Page::containing_address(base_addr);
let end = Page::containing_address(base_addr + ((1u64 << ((level * 9) + 12)) - 1));

let range = PageRangeInclusive { start, end };
if !filter(range) {
return false;
}

let mut is_empty = true;
for (i, entry) in page_table.iter_mut().enumerate() {
if level != 1 {
if let Ok(page_table) = page_table_walker.next_table_mut(entry) {
let offset = 1u64 << (((level - 1) * 9) + 12);
let base_addr = base_addr + (offset * (i as u64) - 1);
if clean_up(
page_table,
page_table_walker,
base_addr,
level - 1,
filter,
frame_deallocator,
) {
let frame = entry.frame().unwrap();
entry.set_unused();
unsafe {
// SAFETY: the frame is no longer used
frame_deallocator.deallocate_frame(frame);
}
}
}
}

if !entry.is_unused() {
is_empty = false;
}
}

is_empty
}

clean_up(
self.level_4_table,
&self.page_table_walker,
VirtAddr::new(0),
4,
&mut filter,
frame_deallocator,
);
}
}

impl<'a, P: PageTableFrameMapping> Mapper<Size1GiB> for MappedPageTable<'a, P> {
Expand Down
44 changes: 43 additions & 1 deletion src/structures/paging/mapper/offset_page_table.rs
Original file line number Diff line number Diff line change
@@ -1,7 +1,8 @@
#![cfg(target_pointer_width = "64")]

use crate::structures::paging::{
frame::PhysFrame, mapper::*, page_table::PageTable, Page, PageTableFlags,
frame::PhysFrame, mapper::*, page::PageRangeInclusive, page_table::PageTable, FrameDeallocator,
Page, PageTableFlags,
};

/// A Mapper implementation that requires that the complete physically memory is mapped at some
Expand Down Expand Up @@ -41,6 +42,47 @@ impl<'a> OffsetPageTable<'a> {
pub fn level_4_table(&mut self) -> &mut PageTable {
self.inner.level_4_table()
}

/// Remove all empty P1-P3 tables
#[inline]
pub fn clean_up<D>(&mut self, frame_deallocator: &mut D)
where
D: FrameDeallocator<Size4KiB>,
{
self.inner.clean_up(frame_deallocator)
}

/// Recursivly iterate through all page tables and conditionally remove empty P1-P3 tables
///
/// On each level `filter` is called with the range of the virtual memory addressable by the page table.
/// If `filter` returns `true` the algorithm recurses for each used entry and if at the end all entries are unused, it is deallocated
///
/// ```
/// # use core::ops::RangeInclusive;
/// # use x86_64::{VirtAddr, structures::paging::{
/// # FrameDeallocator, Size4KiB, MappedPageTable, mapper::OffsetPageTable, page::{Page, PageRangeInclusive},
/// # }};
/// # unsafe fn test(page_table: &mut OffsetPageTable, frame_deallocator: &mut impl FrameDeallocator<Size4KiB>) {
/// fn ranges_intersect(a: PageRangeInclusive, b: PageRangeInclusive) -> bool {
/// a.start <= b.end && b.start <= a.end
/// }
///
/// // clean up all page tables in the lower half of the address space
/// let lower_half = PageRangeInclusive {
/// start: Page::containing_address(VirtAddr::new(0)),
/// end: Page::containing_address(VirtAddr::new(0x0000_7fff_ffff_ffff)),
/// };
/// page_table.clean_up_with_filter(|range| ranges_intersect(range, lower_half), frame_deallocator);
/// # }
/// ```
#[inline]
pub fn clean_up_with_filter<F, D>(&mut self, filter: F, frame_deallocator: &mut D)
where
F: FnMut(PageRangeInclusive) -> bool,
D: FrameDeallocator<Size4KiB>,
{
self.inner.clean_up_with_filter(filter, frame_deallocator)
}
}

#[derive(Debug)]
Expand Down
2 changes: 1 addition & 1 deletion testing/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@ authors = ["Philipp Oppermann <dev@phil-opp.com>"]
edition = "2018"

[[test]]
name = "breakpoint_exception"
name = "interrupt_handling"
Freax13 marked this conversation as resolved.
Show resolved Hide resolved
harness = false

[[test]]
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,10 @@ use core::sync::atomic::{AtomicUsize, Ordering};
use lazy_static::lazy_static;
use testing::{exit_qemu, serial_print, serial_println, QemuExitCode};

use x86_64::instructions::interrupts;

static BREAKPOINT_HANDLER_CALLED: AtomicUsize = AtomicUsize::new(0);
static INTERRUPT_HANDLER_CALLED: AtomicUsize = AtomicUsize::new(0);

#[no_mangle]
pub extern "C" fn _start() -> ! {
Expand All @@ -16,13 +19,10 @@ pub extern "C" fn _start() -> ! {
init_test_idt();

// invoke a breakpoint exception
x86_64::instructions::interrupts::int3();
interrupts::int3();

match BREAKPOINT_HANDLER_CALLED.load(Ordering::SeqCst) {
1 => {
serial_println!("[ok]");
exit_qemu(QemuExitCode::Success);
}
1 => {}
0 => {
serial_println!("[failed]");
serial_println!(" Breakpoint handler was not called.");
Expand All @@ -35,6 +35,29 @@ pub extern "C" fn _start() -> ! {
}
}

serial_print!("interrupt 42... ");
unsafe { interrupts::software_interrupt::<42>() };
serial_print!("interrupt 77... ");
unsafe { interrupts::software_interrupt::<77>() };
serial_print!("interrupt 42... ");
unsafe { interrupts::software_interrupt::<42>() };

match INTERRUPT_HANDLER_CALLED.load(Ordering::SeqCst) {
3 => {}
0 => {
serial_println!("[failed]");
serial_println!(" Interrupt handler was not called.");
exit_qemu(QemuExitCode::Failed);
}
other => {
serial_println!("[failed]");
serial_println!(" Interrupt handler was called {} times", other);
exit_qemu(QemuExitCode::Failed);
}
}

serial_println!("[ok]");
exit_qemu(QemuExitCode::Success);
loop {}
}

Expand All @@ -49,6 +72,8 @@ lazy_static! {
static ref TEST_IDT: InterruptDescriptorTable = {
let mut idt = InterruptDescriptorTable::new();
idt.breakpoint.set_handler_fn(breakpoint_handler);
idt[42].set_handler_fn(interrupt_handler);
idt[77].set_handler_fn(interrupt_handler);
idt
};
}
Expand All @@ -60,3 +85,7 @@ pub fn init_test_idt() {
extern "x86-interrupt" fn breakpoint_handler(_stack_frame: InterruptStackFrame) {
BREAKPOINT_HANDLER_CALLED.fetch_add(1, Ordering::SeqCst);
}

extern "x86-interrupt" fn interrupt_handler(_stack_frame: InterruptStackFrame) {
INTERRUPT_HANDLER_CALLED.fetch_add(1, Ordering::SeqCst);
}