-void* vmm_v2p(void* va) {
- uintptr_t pd_offset = PD_INDEX(va);
- uintptr_t pt_offset = PT_INDEX(va);
- uintptr_t po = PG_OFFSET(va);
- ptd_t* self_pde = PTD_BASE_VADDR;
-
- ptd_t pde = self_pde[pd_offset];
- if (pde) {
- pt_t pte = ((pt_t*)PT_VADDR(pd_offset))[pt_offset];
- if (pte) {
- uintptr_t ppn = pte >> 12;
- return (void*)P_ADDR(ppn, po);
+void
+vmm_set_mapping(void* va, void* pa, pt_attr attr) {
+ assert(((uintptr_t)va & 0xFFFU) == 0);
+
+ uint32_t l1_index = L1_INDEX(va);
+ uint32_t l2_index = L2_INDEX(va);
+
+ // prevent map of recursive mapping region
+ if (l1_index == 1023) {
+ return;
+ }
+
+ __vmm_map_internal(l1_index, l2_index, (uintptr_t)pa, attr, false);
+}
+
+void
+vmm_unmap_page(void* va)
+{
+ assert(((uintptr_t)va & 0xFFFU) == 0);
+
+ uint32_t l1_index = L1_INDEX(va);
+ uint32_t l2_index = L2_INDEX(va);
+
+ // prevent unmap of recursive mapping region
+ if (l1_index == 1023) {
+ return;
+ }
+
+ x86_page_table* l1pt = (x86_page_table*)L1_BASE_VADDR;
+
+ x86_pte_t l1pte = l1pt->entry[l1_index];
+
+ if (l1pte) {
+ x86_page_table* l2pt = (x86_page_table*)L2_VADDR(l1_index);
+ x86_pte_t l2pte = l2pt->entry[l2_index];
+ if (IS_CACHED(l2pte)) {
+ pmm_free_page((void*)l2pte);