+int
+vmm_set_mapping(pid_t pid, void* va, void* pa, pt_attr attr) {
+ assert(((uintptr_t)va & 0xFFFU) == 0);
+
+ uint32_t l1_index = L1_INDEX(va);
+ uint32_t l2_index = L2_INDEX(va);
+
+ // prevent map of recursive mapping region
+ if (l1_index == 1023) {
+ return 0;
+ }
+
+ __vmm_map_internal(pid, l1_index, l2_index, (uintptr_t)pa, attr, false);
+ return 1;
+}
+
+void
+__vmm_unmap_internal(pid_t pid, void* va, int free_ppage) {
+ assert(((uintptr_t)va & 0xFFFU) == 0);
+
+ uint32_t l1_index = L1_INDEX(va);
+ uint32_t l2_index = L2_INDEX(va);
+
+ // prevent unmap of recursive mapping region
+ if (l1_index == 1023) {
+ return;
+ }
+
+ x86_page_table* l1pt = (x86_page_table*)L1_BASE_VADDR;
+
+ x86_pte_t l1pte = l1pt->entry[l1_index];
+
+ if (l1pte) {
+ x86_page_table* l2pt = (x86_page_table*)L2_VADDR(l1_index);
+ x86_pte_t l2pte = l2pt->entry[l2_index];
+ if (IS_CACHED(l2pte) && free_ppage) {
+ pmm_free_page(pid, (void*)l2pte);
+ }
+ cpu_invplg(va);
+ l2pt->entry[l2_index] = PTE_NULL;
+ }
+}
+
+void
+vmm_unset_mapping(void* va) {
+ __vmm_unmap_internal(0, va, false);
+}
+
+void
+vmm_unmap_page(pid_t pid, void* va)
+{
+ __vmm_unmap_internal(pid, va, true);
+}
+
+v_mapping
+vmm_lookup(void* va)