2019-04-17 10:49:11 +08:00
|
|
|
//
|
|
|
|
|
// backendTest.cpp
|
|
|
|
|
// MNN
|
|
|
|
|
//
|
|
|
|
|
// Created by MNN on 2019/01/22.
|
|
|
|
|
// Copyright © 2018, Alibaba Group Holding Limited
|
|
|
|
|
//
|
|
|
|
|
|
|
|
|
|
#define MNN_OPEN_TIME_TRACE
|
|
|
|
|
|
|
|
|
|
#include <math.h>
|
|
|
|
|
#include <stdlib.h>
|
|
|
|
|
#include <algorithm>
|
|
|
|
|
#include <cstring>
|
|
|
|
|
#include <fstream>
|
|
|
|
|
#include <iostream>
|
|
|
|
|
#include <map>
|
|
|
|
|
#include <sstream>
|
|
|
|
|
#include <string>
|
2019-12-27 22:16:57 +08:00
|
|
|
#include <MNN/AutoTime.hpp>
|
|
|
|
|
#include <MNN/Interpreter.hpp>
|
|
|
|
|
#include <MNN/Tensor.hpp>
|
|
|
|
|
#include "core/TensorUtils.hpp"
|
2020-11-05 16:41:56 +08:00
|
|
|
#include "rapidjson/document.h"
|
|
|
|
|
|
2020-02-26 21:24:38 +08:00
|
|
|
template<typename T>
|
|
|
|
|
inline T stringConvert(const char* number) {
|
|
|
|
|
std::istringstream os(number);
|
|
|
|
|
T v;
|
|
|
|
|
os >> v;
|
|
|
|
|
return v;
|
|
|
|
|
}
|
2019-04-17 10:49:11 +08:00
|
|
|
|
|
|
|
|
using namespace MNN;
|
|
|
|
|
|
|
|
|
|
static void compareForwadType(Interpreter* net, MNNForwardType expectType, MNNForwardType compareType, float tolerance,
|
2021-04-08 15:34:23 +08:00
|
|
|
const std::map<std::string, std::shared_ptr<Tensor>>& inputs, const std::string& stopOp, BackendConfig::PrecisionMode precision, int modeNum) {
|
2020-03-04 17:32:11 +08:00
|
|
|
std::vector<std::shared_ptr<MNN::Tensor>> correctResult;
|
|
|
|
|
int index;
|
2019-04-17 10:49:11 +08:00
|
|
|
MNN::ScheduleConfig expectConfig, compareConfig;
|
|
|
|
|
BackendConfig backendConfig;
|
|
|
|
|
backendConfig.precision = precision;
|
|
|
|
|
expectConfig.type = expectType;
|
|
|
|
|
compareConfig.type = compareType;
|
|
|
|
|
compareConfig.backendConfig = &backendConfig;
|
2021-04-08 15:34:23 +08:00
|
|
|
compareConfig.mode = modeNum;
|
2019-04-17 10:49:11 +08:00
|
|
|
auto expectSession = net->createSession(expectConfig);
|
|
|
|
|
auto compareSession = net->createSession(compareConfig);
|
|
|
|
|
|
|
|
|
|
bool allCorrect = true;
|
|
|
|
|
|
2020-03-04 17:32:11 +08:00
|
|
|
MNN::TensorCallBackWithInfo beginCallBack = [&](const std::vector<MNN::Tensor*>& t, const OperatorInfo* op) {
|
|
|
|
|
if (op->name() == stopOp) {
|
2019-04-17 10:49:11 +08:00
|
|
|
return false;
|
|
|
|
|
}
|
|
|
|
|
return true;
|
|
|
|
|
};
|
2020-03-04 17:32:11 +08:00
|
|
|
MNN::TensorCallBackWithInfo saveExpect = [&](const std::vector<MNN::Tensor*>& t, const OperatorInfo* op) {
|
|
|
|
|
if (op->name() == stopOp) {
|
2019-04-17 10:49:11 +08:00
|
|
|
return false;
|
|
|
|
|
}
|
2021-04-08 15:34:23 +08:00
|
|
|
if (op->type() == "Raster") {
|
|
|
|
|
return true;
|
|
|
|
|
}
|
2019-04-17 10:49:11 +08:00
|
|
|
auto tensor = t[0];
|
|
|
|
|
if (tensor->elementSize() <= 0) {
|
|
|
|
|
return true;
|
|
|
|
|
}
|
2020-11-05 16:41:56 +08:00
|
|
|
if (tensor->buffer().device == 0 && tensor->buffer().host == nullptr) {
|
|
|
|
|
return true;
|
|
|
|
|
}
|
2020-03-04 17:32:11 +08:00
|
|
|
std::shared_ptr<MNN::Tensor> copyTensor(MNN::Tensor::createHostTensorFromDevice(tensor, true));
|
|
|
|
|
correctResult.emplace_back(copyTensor);
|
2019-04-17 10:49:11 +08:00
|
|
|
return true;
|
|
|
|
|
};
|
2020-03-04 17:32:11 +08:00
|
|
|
MNN::TensorCallBackWithInfo compareExpect = [&](const std::vector<MNN::Tensor*>& t, const OperatorInfo* op) {
|
|
|
|
|
if (op->name() == stopOp) {
|
2019-04-17 10:49:11 +08:00
|
|
|
return false;
|
|
|
|
|
}
|
2021-04-08 15:34:23 +08:00
|
|
|
if (op->type() == "Raster") {
|
|
|
|
|
return true;
|
|
|
|
|
}
|
2019-04-17 10:49:11 +08:00
|
|
|
auto tensor = t[0];
|
|
|
|
|
if (tensor->elementSize() <= 0) {
|
|
|
|
|
return true;
|
|
|
|
|
}
|
2020-11-05 16:41:56 +08:00
|
|
|
if (tensor->buffer().device == 0 && tensor->buffer().host == nullptr) {
|
|
|
|
|
return true;
|
|
|
|
|
}
|
2020-03-04 17:32:11 +08:00
|
|
|
std::shared_ptr<MNN::Tensor> copyTensor(MNN::Tensor::createHostTensorFromDevice(tensor, true));
|
|
|
|
|
auto expectTensor = correctResult[index++];
|
2019-04-17 10:49:11 +08:00
|
|
|
auto correct = TensorUtils::compareTensors(copyTensor.get(), expectTensor.get(), tolerance, true);
|
|
|
|
|
if (!correct) {
|
2020-03-04 17:32:11 +08:00
|
|
|
MNN_PRINT("%s is error\n", op->name().c_str());
|
2019-04-17 10:49:11 +08:00
|
|
|
allCorrect = false;
|
|
|
|
|
}
|
|
|
|
|
return correct;
|
|
|
|
|
};
|
|
|
|
|
|
|
|
|
|
for (auto& iter : inputs) {
|
|
|
|
|
Tensor* expectInput = net->getSessionInput(expectSession, iter.first.empty() ? NULL : iter.first.c_str());
|
2020-11-05 16:41:56 +08:00
|
|
|
expectInput->copyFromHostTensor(iter.second.get());
|
2019-04-17 10:49:11 +08:00
|
|
|
Tensor* compareInput = net->getSessionInput(compareSession, iter.first.empty() ? NULL : iter.first.c_str());
|
2020-11-05 16:41:56 +08:00
|
|
|
compareInput->copyFromHostTensor(iter.second.get());
|
2019-04-17 10:49:11 +08:00
|
|
|
}
|
2020-03-04 17:32:11 +08:00
|
|
|
correctResult.clear();
|
|
|
|
|
net->runSessionWithCallBackInfo(expectSession, beginCallBack, saveExpect);
|
|
|
|
|
index = 0;
|
|
|
|
|
net->runSessionWithCallBackInfo(compareSession, beginCallBack, compareExpect);
|
2020-11-05 16:41:56 +08:00
|
|
|
if (allCorrect) {
|
|
|
|
|
MNN_PRINT("Correct ! Run second pass\n");
|
|
|
|
|
} else {
|
|
|
|
|
return;
|
|
|
|
|
}
|
|
|
|
|
index = 0;
|
|
|
|
|
for (auto& iter : inputs) {
|
|
|
|
|
Tensor* compareInput = net->getSessionInput(compareSession, iter.first.empty() ? NULL : iter.first.c_str());
|
|
|
|
|
compareInput->copyFromHostTensor(iter.second.get());
|
|
|
|
|
}
|
|
|
|
|
net->runSessionWithCallBackInfo(compareSession, beginCallBack, compareExpect);
|
2019-04-17 10:49:11 +08:00
|
|
|
if (allCorrect) {
|
|
|
|
|
MNN_PRINT("Correct !\n");
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
int main(int argc, const char* argv[]) {
|
|
|
|
|
// read args
|
|
|
|
|
std::string cmd = argv[0];
|
|
|
|
|
std::string pwd = "./";
|
|
|
|
|
auto rslash = cmd.rfind("/");
|
|
|
|
|
if (rslash != std::string::npos) {
|
|
|
|
|
pwd = cmd.substr(0, rslash + 1);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
const char* fileName = argv[1];
|
|
|
|
|
|
|
|
|
|
auto type = MNN_FORWARD_CPU;
|
|
|
|
|
if (argc > 2) {
|
2020-02-26 21:24:38 +08:00
|
|
|
type = (MNNForwardType)stringConvert<int>(argv[2]);
|
2019-04-17 10:49:11 +08:00
|
|
|
}
|
|
|
|
|
MNN_PRINT("Test forward type: %d\n", type);
|
|
|
|
|
|
|
|
|
|
float tolerance = 0.05f;
|
|
|
|
|
if (argc > 3) {
|
2020-02-26 21:24:38 +08:00
|
|
|
tolerance = stringConvert<float>(argv[3]);
|
2019-04-17 10:49:11 +08:00
|
|
|
}
|
|
|
|
|
MNN_PRINT("Tolerance Rate: %f\n", tolerance);
|
|
|
|
|
|
|
|
|
|
// create net
|
|
|
|
|
MNN_PRINT("Open Model %s\n", fileName);
|
|
|
|
|
std::shared_ptr<MNN::Interpreter> net =
|
|
|
|
|
std::shared_ptr<MNN::Interpreter>(MNN::Interpreter::createFromFile(fileName));
|
2020-11-05 16:41:56 +08:00
|
|
|
net->setSessionMode(Interpreter::Session_Debug);
|
2019-04-17 10:49:11 +08:00
|
|
|
|
|
|
|
|
// create session
|
|
|
|
|
ScheduleConfig config;
|
|
|
|
|
config.type = MNN_FORWARD_CPU;
|
|
|
|
|
auto session = net->createSession(config);
|
|
|
|
|
|
2020-11-05 16:41:56 +08:00
|
|
|
std::map<std::string, std::shared_ptr<MNN::Tensor>> inputs;
|
|
|
|
|
std::vector<std::string> inputNames;
|
|
|
|
|
do {
|
|
|
|
|
rapidjson::Document document;
|
|
|
|
|
std::ostringstream jsonNameOs;
|
|
|
|
|
jsonNameOs << pwd << "/input.json";
|
|
|
|
|
std::ifstream fileNames(jsonNameOs.str().c_str());
|
|
|
|
|
if (fileNames.fail()) {
|
|
|
|
|
break;
|
|
|
|
|
}
|
|
|
|
|
std::ostringstream output;
|
|
|
|
|
output << fileNames.rdbuf();
|
|
|
|
|
auto outputStr = output.str();
|
|
|
|
|
document.Parse(outputStr.c_str());
|
|
|
|
|
if (document.HasParseError()) {
|
|
|
|
|
MNN_ERROR("Invalid json\n");
|
|
|
|
|
break;
|
|
|
|
|
}
|
|
|
|
|
if (document.HasMember("inputs")) {
|
|
|
|
|
auto inputsInfo = document["inputs"].GetArray();
|
|
|
|
|
for (auto iter = inputsInfo.begin(); iter !=inputsInfo.end(); iter++) {
|
|
|
|
|
auto obj = iter->GetObject();
|
|
|
|
|
std::string name = obj["name"].GetString();
|
|
|
|
|
inputNames.emplace_back(name);
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
} while (false);
|
|
|
|
|
if (!inputNames.empty()) {
|
|
|
|
|
MNN_PRINT("Find input.json, use inputs:");
|
|
|
|
|
for (auto& n : inputNames) {
|
|
|
|
|
MNN_PRINT(" %s, ", n.c_str());
|
|
|
|
|
}
|
|
|
|
|
MNN_PRINT("\n");
|
|
|
|
|
for (auto name : inputNames) {
|
|
|
|
|
auto inputTensor = net->getSessionInput(session, name.c_str());
|
|
|
|
|
std::shared_ptr<MNN::Tensor> givenTensor(new Tensor(inputTensor, inputTensor->getDimensionType()));
|
|
|
|
|
{
|
|
|
|
|
std::ostringstream fileName;
|
|
|
|
|
fileName << pwd << name << ".txt";
|
|
|
|
|
std::ifstream input(fileName.str().c_str());
|
|
|
|
|
MNN_ASSERT(!input.fail());
|
|
|
|
|
|
|
|
|
|
int size_w = inputTensor->width();
|
|
|
|
|
int size_h = inputTensor->height();
|
|
|
|
|
int bpp = inputTensor->channel();
|
|
|
|
|
int batch = inputTensor->batch();
|
|
|
|
|
// auto backend = net->getBackend(session, inputTensor);
|
|
|
|
|
// MNN_ASSERT(!input.fail());
|
|
|
|
|
MNN_PRINT("Input: %d,%d,%d,%d\n", size_w, size_h, bpp, batch);
|
|
|
|
|
auto inputData = givenTensor->host<float>();
|
|
|
|
|
auto size = givenTensor->size() / sizeof(float);
|
|
|
|
|
for (int i = 0; i < size; ++i) {
|
|
|
|
|
input >> inputData[i];
|
|
|
|
|
}
|
|
|
|
|
inputs.insert(std::make_pair(name, givenTensor));
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
}
|
|
|
|
|
} else {
|
|
|
|
|
auto inputTensor = net->getSessionInput(session, NULL);
|
|
|
|
|
std::shared_ptr<MNN::Tensor> givenTensor(new Tensor(inputTensor, inputTensor->getDimensionType()));
|
|
|
|
|
{
|
|
|
|
|
std::ostringstream fileName;
|
|
|
|
|
fileName << pwd << "input_0"
|
|
|
|
|
<< ".txt";
|
|
|
|
|
std::ifstream input(fileName.str().c_str());
|
|
|
|
|
|
|
|
|
|
int size_w = inputTensor->width();
|
|
|
|
|
int size_h = inputTensor->height();
|
|
|
|
|
int bpp = inputTensor->channel();
|
|
|
|
|
int batch = inputTensor->batch();
|
|
|
|
|
// auto backend = net->getBackend(session, inputTensor);
|
|
|
|
|
// MNN_ASSERT(!input.fail());
|
|
|
|
|
MNN_PRINT("Input: %d,%d,%d,%d\n", size_w, size_h, bpp, batch);
|
|
|
|
|
auto inputData = givenTensor->host<float>();
|
|
|
|
|
auto size = givenTensor->size() / sizeof(float);
|
|
|
|
|
for (int i = 0; i < size; ++i) {
|
|
|
|
|
input >> inputData[i];
|
|
|
|
|
}
|
|
|
|
|
inputs.insert(std::make_pair("", givenTensor));
|
2019-04-17 10:49:11 +08:00
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
BackendConfig::PrecisionMode precision = BackendConfig::Precision_Normal;
|
|
|
|
|
if (argc > 4) {
|
|
|
|
|
precision = (BackendConfig::PrecisionMode)atoi(argv[4]);
|
|
|
|
|
}
|
|
|
|
|
FUNC_PRINT(precision);
|
2021-04-08 15:34:23 +08:00
|
|
|
int modeNum = 1;
|
|
|
|
|
if(argc > 5) {
|
|
|
|
|
modeNum = atoi(argv[5]);//set gpu mode
|
|
|
|
|
}
|
|
|
|
|
FUNC_PRINT(modeNum);
|
2019-04-17 10:49:11 +08:00
|
|
|
std::string stopOp = "";
|
2021-04-08 15:34:23 +08:00
|
|
|
if (argc > 6) {
|
|
|
|
|
stopOp = argv[6];
|
2019-04-17 10:49:11 +08:00
|
|
|
}
|
|
|
|
|
FUNC_PRINT_ALL(stopOp.c_str(), s);
|
2021-04-08 15:34:23 +08:00
|
|
|
compareForwadType(net.get(), MNN_FORWARD_CPU, type, tolerance, inputs, stopOp, precision, modeNum);
|
2019-04-17 10:49:11 +08:00
|
|
|
|
|
|
|
|
return 0;
|
|
|
|
|
}
|