MNN/source/core/Backend.cpp

92 lines
2.7 KiB
C++
Raw Normal View History

2019-04-17 10:49:11 +08:00
//
// Backend.cpp
// MNN
//
// Created by MNN on 2018/07/06.
// Copyright © 2018, Alibaba Group Holding Limited
//
2020-11-05 16:41:56 +08:00
#include "core/Backend.hpp"
2019-04-17 10:49:11 +08:00
#include <stdio.h>
#include <mutex>
#include "MNN_generated.h"
2020-11-05 16:41:56 +08:00
#include "backend/cpu/CPUTensorConvert.hpp"
2019-12-27 22:16:57 +08:00
#include "core/Macro.h"
#include "core/TensorUtils.hpp"
2019-04-17 10:49:11 +08:00
namespace MNN {
void registerBackend();
2019-05-09 19:39:33 +08:00
2020-11-05 16:41:56 +08:00
static std::map<MNNForwardType, std::pair<const RuntimeCreator*, bool>>& GetExtraCreator() {
static std::once_flag gInitFlag;
2020-11-05 16:41:56 +08:00
static std::map<MNNForwardType, std::pair<const RuntimeCreator*, bool>>* gExtraCreator;
std::call_once(gInitFlag,
[&]() { gExtraCreator = new std::map<MNNForwardType, std::pair<const RuntimeCreator*, bool>>; });
2019-04-17 10:49:11 +08:00
return *gExtraCreator;
}
2020-11-05 16:41:56 +08:00
const RuntimeCreator* MNNGetExtraRuntimeCreator(MNNForwardType type) {
registerBackend();
2019-04-17 10:49:11 +08:00
auto& gExtraCreator = GetExtraCreator();
auto iter = gExtraCreator.find(type);
if (iter == gExtraCreator.end()) {
return nullptr;
}
if (!iter->second.second) {
return iter->second.first;
}
Backend::Info info;
info.type = type;
2020-11-05 16:41:56 +08:00
std::shared_ptr<Runtime> bn(iter->second.first->onCreate(info));
2019-04-17 10:49:11 +08:00
if (nullptr != bn.get()) {
return iter->second.first;
}
return nullptr;
}
2020-11-05 16:41:56 +08:00
bool MNNInsertExtraRuntimeCreator(MNNForwardType type, const RuntimeCreator* creator, bool needCheck) {
2019-04-17 10:49:11 +08:00
auto& gExtraCreator = GetExtraCreator();
if (gExtraCreator.find(type) != gExtraCreator.end()) {
MNN_ASSERT(false && "duplicate type");
return false;
}
gExtraCreator.insert(std::make_pair(type, std::make_pair(creator, needCheck)));
return true;
}
2020-11-05 16:41:56 +08:00
bool MNNCPUCopyBuffer(const Tensor* srcTensor, const Tensor* dstTensor) {
auto& srcBuffer = srcTensor->buffer();
auto& dstBuffer = dstTensor->buffer();
MNN_ASSERT(srcBuffer.dimensions == dstBuffer.dimensions);
MNN_ASSERT(srcBuffer.type == dstBuffer.type);
if (nullptr == srcBuffer.host || nullptr == dstBuffer.host) {
return false;
}
auto code = CPUTensorConverter::convert(srcTensor, dstTensor);
if (NO_ERROR != code) {
MNN_ERROR("Error in CPUBackend::onCopyBuffer\n");
}
return true;
}
bool Backend::onAcquireBuffer(const Tensor* tensor, StorageType storageType) {
auto mem = this->onAcquire(tensor, storageType);
if (nullptr == mem) {
return false;
}
if (mem == TensorUtils::getDescribe(tensor)->mem.get()) {
return true;
}
TensorUtils::getDescribe(tensor)->mem.reset(mem);
return true;
}
bool Backend::onReleaseBuffer(const Tensor* tensor, StorageType storageType) {
TensorUtils::getDescribe(tensor)->mem.reset(nullptr);
return true;
}
2019-04-17 10:49:11 +08:00
} // namespace MNN