2019-04-17 10:49:11 +08:00
|
|
|
//
|
|
|
|
// VulkanMemoryPool.cpp
|
|
|
|
// MNN
|
|
|
|
//
|
|
|
|
// Created by MNN on 2019/01/31.
|
|
|
|
// Copyright © 2018, Alibaba Group Holding Limited
|
|
|
|
//
|
|
|
|
|
2020-11-05 16:41:56 +08:00
|
|
|
#include "VulkanMemoryPool.hpp"
|
2019-04-17 10:49:11 +08:00
|
|
|
namespace MNN {
|
|
|
|
VulkanMemory::VulkanMemory(const VulkanDevice& dev, const VkMemoryAllocateInfo& info) : mDevice(dev) {
|
|
|
|
CALL_VK(mDevice.allocMemory(mMemory, info));
|
|
|
|
mTypeIndex = info.memoryTypeIndex;
|
|
|
|
mSize = info.allocationSize;
|
|
|
|
}
|
|
|
|
VulkanMemory::~VulkanMemory() {
|
|
|
|
mDevice.freeMemory(mMemory);
|
|
|
|
}
|
|
|
|
|
2020-12-15 14:12:35 +08:00
|
|
|
class VulkanAllocator : public BufferAllocator::Allocator {
|
|
|
|
public:
|
|
|
|
VulkanAllocator(const VulkanDevice& device, int index) : mDevice(device), mIndex(index) {
|
|
|
|
// Do nothing
|
|
|
|
}
|
|
|
|
virtual ~ VulkanAllocator() {
|
|
|
|
// Do nothing
|
|
|
|
}
|
2023-09-04 10:42:11 +08:00
|
|
|
virtual MemChunk onAlloc(size_t size, size_t align) override {
|
2020-12-15 14:12:35 +08:00
|
|
|
VkMemoryAllocateInfo info;
|
|
|
|
info.sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO;
|
|
|
|
info.pNext = nullptr;
|
|
|
|
info.allocationSize = size;
|
|
|
|
info.memoryTypeIndex = mIndex;
|
|
|
|
auto mem = new VulkanMemory(mDevice, info);
|
2023-09-04 10:42:11 +08:00
|
|
|
return MemChunk(mem, 0);
|
2020-12-15 14:12:35 +08:00
|
|
|
}
|
2023-09-04 10:42:11 +08:00
|
|
|
virtual void onRelease(MemChunk ptr) override {
|
2020-12-15 14:12:35 +08:00
|
|
|
auto p = (VulkanMemory*)ptr.first;
|
|
|
|
delete p;
|
|
|
|
}
|
|
|
|
private:
|
|
|
|
const VulkanDevice& mDevice;
|
|
|
|
int mIndex;
|
|
|
|
};
|
|
|
|
|
2020-03-02 00:15:28 +08:00
|
|
|
VulkanMemoryPool::VulkanMemoryPool(const VulkanDevice& dev, bool permitFp16) : mDevice(dev) {
|
2020-12-15 14:12:35 +08:00
|
|
|
mAllocators.resize(dev.memProty().memoryTypeCount);
|
|
|
|
for (int i=0; i<mAllocators.size(); ++i) {
|
|
|
|
std::shared_ptr<BufferAllocator::Allocator> allocReal(new VulkanAllocator(dev, i));
|
2023-09-04 10:42:11 +08:00
|
|
|
mAllocators[i].reset(new EagerBufferAllocator(allocReal, dev.proty().limits.nonCoherentAtomSize));
|
2020-12-15 14:12:35 +08:00
|
|
|
}
|
2020-03-02 00:15:28 +08:00
|
|
|
mPermitFp16 = permitFp16;
|
2019-04-17 10:49:11 +08:00
|
|
|
}
|
2020-12-15 14:12:35 +08:00
|
|
|
VulkanMemoryPool::VulkanMemoryPool(const VulkanMemoryPool* parent) : mDevice(parent->mDevice) {
|
|
|
|
mPermitFp16 = parent->mPermitFp16;
|
|
|
|
mAllocators.resize(mDevice.memProty().memoryTypeCount);
|
|
|
|
for (int i=0; i<mAllocators.size(); ++i) {
|
|
|
|
std::shared_ptr<BufferAllocator::Allocator> allocReal = BufferAllocator::Allocator::createRecurse(parent->mAllocators[i].get());
|
2023-09-04 10:42:11 +08:00
|
|
|
mAllocators[i].reset(new EagerBufferAllocator(allocReal, mDevice.proty().limits.nonCoherentAtomSize));
|
2020-12-15 14:12:35 +08:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-04-17 10:49:11 +08:00
|
|
|
VulkanMemoryPool::~VulkanMemoryPool() {
|
2020-11-05 16:41:56 +08:00
|
|
|
clear();
|
2019-04-17 10:49:11 +08:00
|
|
|
}
|
|
|
|
|
2020-11-05 16:41:56 +08:00
|
|
|
VkBuffer VulkanMemoryPool::allocBuffer(size_t size, VkBufferUsageFlags flags, VkSharingMode shared) {
|
|
|
|
VkBuffer res;
|
|
|
|
CALL_VK(mDevice.createBuffer(res, size, flags, shared));
|
|
|
|
return res;
|
|
|
|
}
|
|
|
|
|
|
|
|
void VulkanMemoryPool::returnBuffer(VkBuffer buffer, size_t size, VkBufferUsageFlags flags, VkSharingMode shared) {
|
2021-11-30 10:10:53 +08:00
|
|
|
mDevice.destroyBuffer(buffer);
|
2020-11-05 16:41:56 +08:00
|
|
|
}
|
|
|
|
|
2023-09-04 10:42:11 +08:00
|
|
|
MemChunk VulkanMemoryPool::allocMemory(const VkMemoryRequirements& requirements, VkFlags extraMask,
|
2022-05-27 23:46:44 +08:00
|
|
|
bool separate) {
|
2019-04-17 10:49:11 +08:00
|
|
|
uint32_t index = 0;
|
|
|
|
auto typeBits = requirements.memoryTypeBits;
|
2020-12-15 14:12:35 +08:00
|
|
|
for (uint32_t i = 0; i < mDevice.memProty().memoryTypeCount; i++) {
|
2019-04-17 10:49:11 +08:00
|
|
|
if ((typeBits & 1) == 1) {
|
|
|
|
// Type is available, does it match user properties?
|
2020-12-15 14:12:35 +08:00
|
|
|
if ((mDevice.memProty().memoryTypes[i].propertyFlags & extraMask) == extraMask) {
|
2019-04-17 10:49:11 +08:00
|
|
|
index = i;
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
typeBits >>= 1;
|
|
|
|
}
|
|
|
|
MNN_ASSERT(index >= 0);
|
2020-12-15 14:12:35 +08:00
|
|
|
MNN_ASSERT(index < mAllocators.size());
|
2022-05-27 23:46:44 +08:00
|
|
|
auto mem = mAllocators[index]->alloc(requirements.size, separate, requirements.alignment);
|
2021-11-30 10:10:53 +08:00
|
|
|
MNN_ASSERT(mem.second % requirements.alignment ==0);
|
|
|
|
return mem;
|
2019-04-17 10:49:11 +08:00
|
|
|
}
|
|
|
|
|
2023-09-04 10:42:11 +08:00
|
|
|
void VulkanMemoryPool::returnMemory(MemChunk memory) {
|
2020-12-15 14:12:35 +08:00
|
|
|
auto mem = (VulkanMemory*)memory.first;
|
|
|
|
mAllocators[mem->type()]->free(memory);
|
2019-04-17 10:49:11 +08:00
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
|
|
|
void VulkanMemoryPool::clear() {
|
2020-12-15 14:12:35 +08:00
|
|
|
for (auto& iter : mAllocators) {
|
|
|
|
iter->release(false);
|
2019-04-17 10:49:11 +08:00
|
|
|
}
|
2020-11-05 16:41:56 +08:00
|
|
|
}
|
|
|
|
|
2019-04-17 10:49:11 +08:00
|
|
|
float VulkanMemoryPool::computeSize() const {
|
|
|
|
float totalSize = 0;
|
2020-12-15 14:12:35 +08:00
|
|
|
for (auto& piter : mAllocators) {
|
|
|
|
totalSize += (float)piter->totalSize();
|
2019-04-17 10:49:11 +08:00
|
|
|
}
|
2020-12-15 14:12:35 +08:00
|
|
|
return totalSize / 1024.0f / 1024.0f;
|
2019-04-17 10:49:11 +08:00
|
|
|
}
|
|
|
|
} // namespace MNN
|