diff --git a/src/core/CMakeLists.txt b/src/core/CMakeLists.txt
index 02c1784417b44320b521653a22d456de58a923a2..eab08207dfd6d0f8e7a52061b3cb5975f67912ee 100644
--- a/src/core/CMakeLists.txt
+++ b/src/core/CMakeLists.txt
@@ -137,6 +137,8 @@ add_library(core STATIC
     hle/service/nvdrv/devices/nvmap.h
     hle/service/nvdrv/interface.cpp
     hle/service/nvdrv/interface.h
+    hle/service/nvdrv/memory_manager.cpp
+    hle/service/nvdrv/memory_manager.h
     hle/service/nvdrv/nvdrv.cpp
     hle/service/nvdrv/nvdrv.h
     hle/service/nvdrv/nvmemp.cpp
diff --git a/src/core/hle/service/nvdrv/memory_manager.cpp b/src/core/hle/service/nvdrv/memory_manager.cpp
new file mode 100644
index 0000000000000000000000000000000000000000..55a8675d56a812dd2c30d67df4aa57b5ff186a21
--- /dev/null
+++ b/src/core/hle/service/nvdrv/memory_manager.cpp
@@ -0,0 +1,112 @@
+// Copyright 2018 yuzu emulator team
+// Licensed under GPLv2 or any later version
+// Refer to the license.txt file included.
+
+#include "common/assert.h"
+#include "core/hle/service/nvdrv/memory_manager.h"
+
+namespace Service {
+namespace Nvidia {
+
+PAddr MemoryManager::AllocateSpace(u64 size, u64 align) {
+    boost::optional<PAddr> paddr = FindFreeBlock(size, align);
+    ASSERT(paddr);
+
+    for (u64 offset = 0; offset < size; offset += Memory::PAGE_SIZE) {
+        PageSlot(*paddr + offset) = static_cast<u64>(PageStatus::Allocated);
+    }
+
+    return *paddr;
+}
+
+PAddr MemoryManager::AllocateSpace(PAddr paddr, u64 size, u64 align) {
+    for (u64 offset = 0; offset < size; offset += Memory::PAGE_SIZE) {
+        if (IsPageMapped(paddr + offset)) {
+            return AllocateSpace(size, align);
+        }
+    }
+
+    for (u64 offset = 0; offset < size; offset += Memory::PAGE_SIZE) {
+        PageSlot(paddr + offset) = static_cast<u64>(PageStatus::Allocated);
+    }
+
+    return paddr;
+}
+
+PAddr MemoryManager::MapBufferEx(VAddr vaddr, u64 size) {
+    vaddr &= ~Memory::PAGE_MASK;
+
+    boost::optional<PAddr> paddr = FindFreeBlock(size);
+    ASSERT(paddr);
+
+    for (u64 offset = 0; offset < size; offset += Memory::PAGE_SIZE) {
+        PageSlot(*paddr + offset) = vaddr + offset;
+    }
+
+    return *paddr;
+}
+
+PAddr MemoryManager::MapBufferEx(VAddr vaddr, PAddr paddr, u64 size) {
+    vaddr &= ~Memory::PAGE_MASK;
+    paddr &= ~Memory::PAGE_MASK;
+
+    for (u64 offset = 0; offset < size; offset += Memory::PAGE_SIZE) {
+        if (PageSlot(paddr + offset) != static_cast<u64>(PageStatus::Allocated)) {
+            return MapBufferEx(vaddr, size);
+        }
+    }
+
+    for (u64 offset = 0; offset < size; offset += Memory::PAGE_SIZE) {
+        PageSlot(paddr + offset) = vaddr + offset;
+    }
+
+    return paddr;
+}
+
+boost::optional<PAddr> MemoryManager::FindFreeBlock(u64 size, u64 align) {
+    PAddr paddr{};
+    u64 free_space{};
+    align = (align + Memory::PAGE_MASK) & ~Memory::PAGE_MASK;
+
+    while (paddr + free_space < MAX_ADDRESS) {
+        if (!IsPageMapped(paddr + free_space)) {
+            free_space += Memory::PAGE_SIZE;
+            if (free_space >= size) {
+                return paddr;
+            }
+        } else {
+            paddr += free_space + Memory::PAGE_SIZE;
+            free_space = 0;
+            const u64 remainder{paddr % align};
+            if (!remainder) {
+                paddr = (paddr - remainder) + align;
+            }
+        }
+    }
+
+    return {};
+}
+
+VAddr MemoryManager::PhysicalToVirtualAddress(PAddr paddr) {
+    VAddr base_addr = PageSlot(paddr);
+    ASSERT(base_addr != static_cast<u64>(PageStatus::Unmapped));
+    return base_addr + (paddr & Memory::PAGE_MASK);
+}
+
+bool MemoryManager::IsPageMapped(PAddr paddr) {
+    return PageSlot(paddr) != static_cast<u64>(PageStatus::Unmapped);
+}
+
+VAddr& MemoryManager::PageSlot(PAddr paddr) {
+    auto& block = page_table[(paddr >> (Memory::PAGE_BITS + PAGE_TABLE_BITS)) & PAGE_TABLE_MASK];
+    if (!block) {
+        block = std::make_unique<PageBlock>();
+        for (unsigned index = 0; index < PAGE_BLOCK_SIZE; index++) {
+            (*block)[index] = static_cast<u64>(PageStatus::Unmapped);
+        }
+    }
+    return (*block)[(paddr >> Memory::PAGE_BITS) & PAGE_BLOCK_MASK];
+}
+
+} // namespace Nvidia
+} // namespace Service
diff --git a/src/core/hle/service/nvdrv/memory_manager.h b/src/core/hle/service/nvdrv/memory_manager.h
new file mode 100644
index 0000000000000000000000000000000000000000..4ba1a3952948572a8fc14e39c4a12a45c313f227
--- /dev/null
+++ b/src/core/hle/service/nvdrv/memory_manager.h
@@ -0,0 +1,48 @@
+// Copyright 2018 yuzu emulator team
+// Licensed under GPLv2 or any later version
+// Refer to the license.txt file included.
+
+#pragma once
+
+#include <array>
+#include <memory>
+#include "common/common_types.h"
+#include "core/memory.h"
+
+namespace Service {
+namespace Nvidia {
+
+class MemoryManager final {
+public:
+    MemoryManager() = default;
+
+    PAddr AllocateSpace(u64 size, u64 align);
+    PAddr AllocateSpace(PAddr paddr, u64 size, u64 align);
+    PAddr MapBufferEx(VAddr vaddr, u64 size);
+    PAddr MapBufferEx(VAddr vaddr, PAddr paddr, u64 size);
+    VAddr PhysicalToVirtualAddress(PAddr paddr);
+
+private:
+    boost::optional<PAddr> FindFreeBlock(u64 size, u64 align = 1);
+    bool IsPageMapped(PAddr paddr);
+    VAddr& PageSlot(PAddr paddr);
+
+    enum class PageStatus : u64 {
+        Unmapped = 0xFFFFFFFFFFFFFFFFULL,
+        Allocated = 0xFFFFFFFFFFFFFFFEULL,
+    };
+
+    static constexpr u64 MAX_ADDRESS{0x10000000000ULL};
+    static constexpr u64 PAGE_TABLE_BITS{14};
+    static constexpr u64 PAGE_TABLE_SIZE{1 << PAGE_TABLE_BITS};
+    static constexpr u64 PAGE_TABLE_MASK{PAGE_TABLE_SIZE - 1};
+    static constexpr u64 PAGE_BLOCK_BITS{14};
+    static constexpr u64 PAGE_BLOCK_SIZE{1 << PAGE_BLOCK_BITS};
+    static constexpr u64 PAGE_BLOCK_MASK{PAGE_BLOCK_SIZE - 1};
+
+    using PageBlock = std::array<VAddr, PAGE_BLOCK_SIZE>;
+    std::array<std::unique_ptr<PageBlock>, PAGE_TABLE_SIZE> page_table{};
+};
+
+} // namespace Nvidia
+} // namespace Service