summaryrefslogtreecommitdiff
path: root/crates/windows-kernel-rs/src/mdl.rs
blob: cb539b693b97aa35906a6ef1c59e8b4b6d9cbe53 (plain) (blame)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
use crate::{error::Error, memory::MemoryCaching};

#[repr(i32)]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum AccessMode {
  KernelMode = windows_kernel_sys::base::_MODE::KernelMode,
  UserMode   = windows_kernel_sys::base::_MODE::UserMode,
}

pub struct MemoryDescriptorList {
  raw: *mut windows_kernel_sys::base::MDL,
}

unsafe impl Send for MemoryDescriptorList {}
unsafe impl Sync for MemoryDescriptorList {}

impl MemoryDescriptorList {
  /// # Safety
  /// `unsafe`
  pub unsafe fn new(addr: *mut core::ffi::c_void, size: usize) -> Result<Self, Error> {
    use windows_kernel_sys::ntoskrnl::IoAllocateMdl;

    let raw = IoAllocateMdl(
      addr,
      size as _,
      false as _,
      false as _,
      core::ptr::null_mut(),
    );

    if raw.is_null() {
      return Err(Error::INSUFFICIENT_RESOURCES);
    }

    Ok(Self {
      raw,
    })
  }

  pub fn build_for_non_paged_pool(&mut self) {
    use windows_kernel_sys::ntoskrnl::MmBuildMdlForNonPagedPool;

    unsafe {
      MmBuildMdlForNonPagedPool(self.raw);
    }
  }

  pub fn map_locked_pages(
    self,
    access: AccessMode,
    caching: MemoryCaching,
    desired_addr: Option<*mut core::ffi::c_void>,
  ) -> Result<LockedMapping, Error> {
    use windows_kernel_sys::ntoskrnl::MmMapLockedPagesSpecifyCache;

    let ptr = unsafe {
      MmMapLockedPagesSpecifyCache(
        self.raw,
        access as _,
        caching as _,
        desired_addr.unwrap_or(core::ptr::null_mut()),
        false as _,
        0,
      )
    };

    Ok(LockedMapping {
      raw: self.raw,
      ptr,
    })
  }
}

impl Drop for MemoryDescriptorList {
  fn drop(&mut self) {
    use windows_kernel_sys::ntoskrnl::IoFreeMdl;

    unsafe {
      IoFreeMdl(self.raw);
    }
  }
}

pub struct LockedMapping {
  raw: *mut windows_kernel_sys::base::MDL,
  ptr: *mut core::ffi::c_void,
}

unsafe impl Send for LockedMapping {}
unsafe impl Sync for LockedMapping {}

impl LockedMapping {
  pub fn ptr(&self) -> *mut core::ffi::c_void { self.ptr }

  pub fn unlock(self) -> MemoryDescriptorList {
    use windows_kernel_sys::ntoskrnl::MmUnmapLockedPages;

    unsafe {
      MmUnmapLockedPages(self.ptr, self.raw);
    }

    MemoryDescriptorList {
      raw: self.raw
    }
  }
}

impl Drop for LockedMapping {
  fn drop(&mut self) {
    use windows_kernel_sys::ntoskrnl::{IoFreeMdl, MmUnmapLockedPages};

    unsafe {
      MmUnmapLockedPages(self.ptr, self.raw);
      IoFreeMdl(self.raw);
    }
  }
}