2019-04-17 10:49:11 +08:00
|
|
|
//
|
|
|
|
|
// testModelWithDescrisbe.cpp
|
|
|
|
|
// MNN
|
|
|
|
|
//
|
|
|
|
|
// Created by MNN on 2019/01/22.
|
|
|
|
|
// Copyright © 2018, Alibaba Group Holding Limited
|
|
|
|
|
//
|
|
|
|
|
|
|
|
|
|
#define MNN_OPEN_TIME_TRACE
|
|
|
|
|
|
|
|
|
|
#include <math.h>
|
|
|
|
|
#include <stdio.h>
|
|
|
|
|
#include <stdlib.h>
|
|
|
|
|
#include <cstring>
|
|
|
|
|
#include <fstream>
|
|
|
|
|
#include <map>
|
|
|
|
|
#include <sstream>
|
2019-12-27 22:16:57 +08:00
|
|
|
#include <MNN/AutoTime.hpp>
|
|
|
|
|
#include "core/Backend.hpp"
|
2020-11-05 16:41:56 +08:00
|
|
|
#include "ConfigFile.hpp"
|
2019-12-27 22:16:57 +08:00
|
|
|
#include <MNN/Interpreter.hpp>
|
|
|
|
|
#include <MNN/MNNDefine.h>
|
|
|
|
|
#include "core/Macro.h"
|
|
|
|
|
#include <MNN/Tensor.hpp>
|
|
|
|
|
#include "core/TensorUtils.hpp"
|
2021-11-30 10:10:53 +08:00
|
|
|
#include <MNN/expr/Module.hpp>
|
|
|
|
|
#include <MNN/expr/MathOp.hpp>
|
|
|
|
|
#include <MNN/expr/NeuralNetWorkOp.hpp>
|
2019-04-17 10:49:11 +08:00
|
|
|
|
|
|
|
|
#define NONE "\e[0m"
|
|
|
|
|
#define RED "\e[0;31m"
|
|
|
|
|
#define GREEN "\e[0;32m"
|
|
|
|
|
#define L_GREEN "\e[1;32m"
|
|
|
|
|
#define BLUE "\e[0;34m"
|
|
|
|
|
#define L_BLUE "\e[1;34m"
|
|
|
|
|
#define BOLD "\e[1m"
|
|
|
|
|
|
2021-11-30 10:10:53 +08:00
|
|
|
using namespace MNN::Express;
|
|
|
|
|
|
2020-02-26 21:24:38 +08:00
|
|
|
template<typename T>
|
|
|
|
|
inline T stringConvert(const char* number) {
|
|
|
|
|
std::istringstream os(number);
|
|
|
|
|
T v;
|
|
|
|
|
os >> v;
|
|
|
|
|
return v;
|
|
|
|
|
}
|
|
|
|
|
|
2021-11-30 10:10:53 +08:00
|
|
|
int loadData(const std::string name, void* ptr, int size, halide_type_t dtype) {
|
|
|
|
|
std::ifstream stream(name.c_str());
|
2019-04-17 10:49:11 +08:00
|
|
|
if (stream.fail()) {
|
2021-11-30 10:10:53 +08:00
|
|
|
return -1;
|
2019-04-17 10:49:11 +08:00
|
|
|
}
|
2021-11-30 10:10:53 +08:00
|
|
|
switch (dtype.code) {
|
2019-04-17 10:49:11 +08:00
|
|
|
case halide_type_float: {
|
2021-11-30 10:10:53 +08:00
|
|
|
auto data = static_cast<float*>(ptr);
|
|
|
|
|
for (int i = 0; i < size; ++i) {
|
2019-04-17 10:49:11 +08:00
|
|
|
double temp = 0.0f;
|
|
|
|
|
stream >> temp;
|
|
|
|
|
data[i] = temp;
|
|
|
|
|
}
|
|
|
|
|
} break;
|
|
|
|
|
case halide_type_int: {
|
2021-11-30 10:10:53 +08:00
|
|
|
MNN_ASSERT(dtype.bits == 32);
|
|
|
|
|
auto data = static_cast<int32_t*>(ptr);
|
|
|
|
|
for (int i = 0; i < size; ++i) {
|
2019-04-17 10:49:11 +08:00
|
|
|
int temp = 0;
|
|
|
|
|
stream >> temp;
|
|
|
|
|
data[i] = temp;
|
|
|
|
|
}
|
|
|
|
|
} break;
|
|
|
|
|
case halide_type_uint: {
|
2021-11-30 10:10:53 +08:00
|
|
|
MNN_ASSERT(dtype.bits == 8);
|
|
|
|
|
auto data = static_cast<uint8_t*>(ptr);
|
|
|
|
|
for (int i = 0; i < size; ++i) {
|
2019-04-17 10:49:11 +08:00
|
|
|
int temp = 0;
|
|
|
|
|
stream >> temp;
|
|
|
|
|
data[i] = temp;
|
|
|
|
|
}
|
|
|
|
|
} break;
|
|
|
|
|
default: {
|
|
|
|
|
stream.close();
|
2021-11-30 10:10:53 +08:00
|
|
|
return -1;
|
2019-04-17 10:49:11 +08:00
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
stream.close();
|
2021-11-30 10:10:53 +08:00
|
|
|
return 0;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
MNN::Tensor* createTensor(const MNN::Tensor* shape, const std::string name) {
|
|
|
|
|
auto result = new MNN::Tensor(shape, shape->getDimensionType());
|
|
|
|
|
result->buffer().type = shape->buffer().type;
|
|
|
|
|
if (!loadData(name, result->host<void>(), result->elementSize(), result->getType())) {
|
|
|
|
|
return result;
|
|
|
|
|
}
|
|
|
|
|
delete result;
|
|
|
|
|
return NULL;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
VARP createVar(const std::string name, INTS shape, halide_type_t dtype) {
|
|
|
|
|
int size = 1;
|
|
|
|
|
for (int dim : shape) {
|
|
|
|
|
size *= dim;
|
|
|
|
|
}
|
|
|
|
|
std::unique_ptr<char[]> data(new char[size * dtype.bytes()]);
|
|
|
|
|
loadData(name, data.get(), size, dtype);
|
|
|
|
|
return _Const(data.get(), shape, NHWC, dtype);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
template <typename T>
|
|
|
|
|
bool compareVar(VARP var, std::string name) {
|
|
|
|
|
auto targetValue = createVar(name, var->getInfo()->dim, var->getInfo()->type);
|
|
|
|
|
auto absMax = _ReduceMax(_Abs(targetValue), {});
|
|
|
|
|
absMax = _Maximum(absMax, _Scalar<T>(0));
|
|
|
|
|
auto diff = _Abs(targetValue - var);
|
|
|
|
|
auto diffAbsMax = _ReduceMax(diff);
|
|
|
|
|
auto absMaxV = absMax->readMap<T>()[0];
|
|
|
|
|
auto diffAbsMaxV = diffAbsMax->readMap<T>()[0];
|
2022-02-18 11:30:27 +08:00
|
|
|
// The implemention of isnan in VS2017 isn't accept integer type, so cast all type to double
|
|
|
|
|
#ifdef _MSC_VER
|
|
|
|
|
#define ALI_ISNAN(x) std::isnan(static_cast<long double>(x))
|
|
|
|
|
#else
|
|
|
|
|
#define ALI_ISNAN(x) std::isnan(x)
|
|
|
|
|
#endif
|
|
|
|
|
if (absMaxV * 0.01f < diffAbsMaxV || ALI_ISNAN(absMaxV)) {
|
2021-11-30 10:10:53 +08:00
|
|
|
std::cout << "TESTERROR " << name << " value error : absMaxV:" << absMaxV << " - DiffMax:" << diffAbsMaxV << std::endl;
|
|
|
|
|
return false;
|
|
|
|
|
}
|
|
|
|
|
return true;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
void log_result(bool correct) {
|
|
|
|
|
if (correct) {
|
|
|
|
|
#if defined(_MSC_VER)
|
|
|
|
|
std::cout << "Correct!" << std::endl;
|
|
|
|
|
#else
|
|
|
|
|
std::cout << GREEN << BOLD << "Correct!" << NONE << std::endl;
|
|
|
|
|
#endif
|
|
|
|
|
}
|
2019-04-17 10:49:11 +08:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
int main(int argc, const char* argv[]) {
|
|
|
|
|
// modelName is xxx/xxx/temp.bin ===> xxx/xxx is the root path
|
|
|
|
|
const char* modelName = argv[1];
|
|
|
|
|
std::string modelDir = modelName;
|
|
|
|
|
modelDir = modelDir.substr(0, modelDir.find("temp.bin"));
|
|
|
|
|
std::cout << "model dir: " << modelDir << std::endl;
|
|
|
|
|
|
|
|
|
|
// read args
|
|
|
|
|
auto type = MNN_FORWARD_CPU;
|
|
|
|
|
if (argc > 3) {
|
2020-02-26 21:24:38 +08:00
|
|
|
type = (MNNForwardType)stringConvert<int>(argv[3]);
|
2019-04-17 10:49:11 +08:00
|
|
|
}
|
|
|
|
|
auto tolerance = 0.1f;
|
|
|
|
|
if (argc > 4) {
|
2020-02-26 21:24:38 +08:00
|
|
|
tolerance = stringConvert<float>(argv[4]);
|
2019-04-17 10:49:11 +08:00
|
|
|
}
|
2021-06-11 17:17:13 +08:00
|
|
|
auto precision = MNN::BackendConfig::Precision_High;
|
|
|
|
|
if (argc > 5) {
|
|
|
|
|
precision = (MNN::BackendConfig::PrecisionMode)(stringConvert<int>(argv[5]));
|
|
|
|
|
}
|
2019-04-17 10:49:11 +08:00
|
|
|
|
|
|
|
|
// input config
|
2020-11-05 16:41:56 +08:00
|
|
|
ConfigFile config(argv[2]);
|
2019-04-17 10:49:11 +08:00
|
|
|
auto numOfInputs = config.Read<int>("input_size");
|
|
|
|
|
auto numOfOuputs = config.Read<int>("output_size");
|
|
|
|
|
auto inputNames = splitNames(numOfInputs, config.Read<std::string>("input_names"));
|
|
|
|
|
auto inputDims = splitDims(numOfInputs, config.Read<std::string>("input_dims"));
|
|
|
|
|
auto expectNames = splitNames(numOfOuputs, config.Read<std::string>("output_names"));
|
2021-11-30 10:10:53 +08:00
|
|
|
bool controlFlow = config.KeyExists("control_flow") && config.Read<bool>("control_flow");
|
|
|
|
|
auto dataType = halide_type_of<float>();
|
|
|
|
|
if (config.KeyExists("data_type")) {
|
|
|
|
|
auto dtype = config.Read<std::string>("data_type");
|
|
|
|
|
if (dtype == "float") {
|
|
|
|
|
dataType = halide_type_of<float>();
|
|
|
|
|
} else if (dtype == "int32_t") {
|
|
|
|
|
dataType = halide_type_of<int32_t>();
|
|
|
|
|
} else if (dtype == "uint8_t") {
|
|
|
|
|
dataType = halide_type_of<uint8_t>();
|
|
|
|
|
}
|
|
|
|
|
}
|
2019-04-17 10:49:11 +08:00
|
|
|
// create net & session
|
2019-06-10 21:08:55 +08:00
|
|
|
#if defined(_MSC_VER)
|
|
|
|
|
MNN_PRINT("Testing Model ====> %s\n", modelName);
|
|
|
|
|
#else
|
2019-04-17 10:49:11 +08:00
|
|
|
MNN_PRINT(GREEN "Testing Model ====> %s\n" NONE, modelName);
|
2019-06-10 21:08:55 +08:00
|
|
|
#endif
|
2021-11-30 10:10:53 +08:00
|
|
|
if (controlFlow) {
|
|
|
|
|
std::shared_ptr<Module> model(Module::load(inputNames, expectNames, modelName));
|
|
|
|
|
std::vector<VARP> inputs;
|
|
|
|
|
for (int i = 0; i < numOfInputs; i++) {
|
|
|
|
|
auto inputName = modelDir + inputNames[i] + ".txt";
|
|
|
|
|
inputs.push_back(createVar(inputName, inputDims[i], dataType));
|
|
|
|
|
}
|
|
|
|
|
auto outputs = model->onForward(inputs);
|
|
|
|
|
bool correct = true;
|
|
|
|
|
for (int i = 0; i < numOfOuputs; i++) {
|
|
|
|
|
auto dtype = outputs[i]->getInfo()->type;
|
|
|
|
|
auto outputName = modelDir + expectNames[i] + ".txt";
|
|
|
|
|
if (dtype == halide_type_of<int32_t>()) {
|
|
|
|
|
correct = compareVar<int32_t>(outputs[i], outputName);
|
|
|
|
|
} else if (dtype == halide_type_of<uint8_t>()) {
|
|
|
|
|
correct = compareVar<uint8_t>(outputs[i], outputName);
|
|
|
|
|
} else {
|
|
|
|
|
correct = compareVar<float>(outputs[i], outputName);
|
|
|
|
|
}
|
|
|
|
|
if (!correct) {
|
|
|
|
|
break;
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
log_result(correct);
|
|
|
|
|
} else {
|
|
|
|
|
auto net = std::shared_ptr<MNN::Interpreter>(MNN::Interpreter::createFromFile(modelName));
|
|
|
|
|
MNN::ScheduleConfig schedule;
|
|
|
|
|
schedule.type = type;
|
|
|
|
|
MNN::BackendConfig backendConfig;
|
|
|
|
|
backendConfig.precision = precision;
|
2021-06-11 17:17:13 +08:00
|
|
|
|
2021-11-30 10:10:53 +08:00
|
|
|
schedule.backendConfig = &backendConfig;
|
2020-03-18 15:18:53 +08:00
|
|
|
|
2021-11-30 10:10:53 +08:00
|
|
|
auto session = net->createSession(schedule);
|
2019-04-17 10:49:11 +08:00
|
|
|
|
2021-11-30 10:10:53 +08:00
|
|
|
// resize
|
2020-06-05 17:34:53 +08:00
|
|
|
for (int i = 0; i < numOfInputs; ++i) {
|
|
|
|
|
auto inputTensor = net->getSessionInput(session, inputNames[i].c_str());
|
2021-11-30 10:10:53 +08:00
|
|
|
net->resizeTensor(inputTensor, inputDims[i]);
|
|
|
|
|
}
|
|
|
|
|
net->resizeSession(session);
|
|
|
|
|
auto checkFunction = [&]() {
|
|
|
|
|
// [second] set input-tensor data
|
|
|
|
|
for (int i = 0; i < numOfInputs; ++i) {
|
|
|
|
|
auto inputTensor = net->getSessionInput(session, inputNames[i].c_str());
|
|
|
|
|
auto inputName = modelDir + inputNames[i] + ".txt";
|
|
|
|
|
std::cout << "The " << i << " input: " << inputName << std::endl;
|
2020-06-05 17:34:53 +08:00
|
|
|
|
2021-11-30 10:10:53 +08:00
|
|
|
auto givenTensor = createTensor(inputTensor, inputName);
|
|
|
|
|
if (!givenTensor) {
|
2019-06-10 21:08:55 +08:00
|
|
|
#if defined(_MSC_VER)
|
2021-11-30 10:10:53 +08:00
|
|
|
std::cout << "Failed to open " << inputName << std::endl;
|
2019-06-10 21:08:55 +08:00
|
|
|
#else
|
2021-11-30 10:10:53 +08:00
|
|
|
std::cout << RED << "Failed to open " << inputName << NONE << std::endl;
|
2019-06-10 21:08:55 +08:00
|
|
|
#endif
|
2021-11-30 10:10:53 +08:00
|
|
|
break;
|
|
|
|
|
}
|
|
|
|
|
inputTensor->copyFromHostTensor(givenTensor);
|
|
|
|
|
delete givenTensor;
|
2020-06-05 17:34:53 +08:00
|
|
|
}
|
2019-04-17 10:49:11 +08:00
|
|
|
|
2021-11-30 10:10:53 +08:00
|
|
|
// inference
|
|
|
|
|
net->runSession(session);
|
2020-06-05 17:34:53 +08:00
|
|
|
|
2021-11-30 10:10:53 +08:00
|
|
|
// get ouput-tensor and compare data
|
|
|
|
|
bool correct = true;
|
|
|
|
|
for (int i = 0; i < numOfOuputs; ++i) {
|
|
|
|
|
auto outputTensor = net->getSessionOutput(session, expectNames[i].c_str());
|
|
|
|
|
MNN::Tensor* expectTensor = nullptr;
|
|
|
|
|
std::string expectName;
|
|
|
|
|
// First Check outputname.txt
|
|
|
|
|
{
|
|
|
|
|
std::ostringstream iStrOs;
|
|
|
|
|
iStrOs << expectNames[i];
|
|
|
|
|
expectName = modelDir + iStrOs.str() + ".txt";
|
|
|
|
|
expectTensor = createTensor(outputTensor, expectName);
|
|
|
|
|
}
|
|
|
|
|
if (!expectTensor) {
|
|
|
|
|
// Second check number outputs
|
|
|
|
|
std::ostringstream iStrOs;
|
|
|
|
|
iStrOs << i;
|
|
|
|
|
expectName = modelDir + iStrOs.str() + ".txt";
|
|
|
|
|
expectTensor = createTensor(outputTensor, expectName);
|
|
|
|
|
}
|
|
|
|
|
if (!expectTensor) {
|
2019-06-10 21:08:55 +08:00
|
|
|
#if defined(_MSC_VER)
|
2021-11-30 10:10:53 +08:00
|
|
|
std::cout << "Failed to open " << expectName << std::endl;
|
2019-06-10 21:08:55 +08:00
|
|
|
#else
|
2021-11-30 10:10:53 +08:00
|
|
|
std::cout << RED << "Failed to open " << expectName << NONE << std::endl;
|
2019-06-10 21:08:55 +08:00
|
|
|
#endif
|
2021-11-30 10:10:53 +08:00
|
|
|
break;
|
|
|
|
|
}
|
|
|
|
|
if (!MNN::TensorUtils::compareTensors(outputTensor, expectTensor, tolerance, true)) {
|
|
|
|
|
correct = false;
|
|
|
|
|
break;
|
|
|
|
|
}
|
|
|
|
|
delete expectTensor;
|
2020-06-05 17:34:53 +08:00
|
|
|
}
|
2021-11-30 10:10:53 +08:00
|
|
|
return correct;
|
|
|
|
|
};
|
|
|
|
|
auto correct = checkFunction();
|
|
|
|
|
if (!correct) {
|
|
|
|
|
return 0;
|
|
|
|
|
} else {
|
|
|
|
|
std::cout << "First Time Pass"<<std::endl;
|
2019-04-17 10:49:11 +08:00
|
|
|
}
|
2021-11-30 10:10:53 +08:00
|
|
|
// Second time
|
|
|
|
|
correct = checkFunction();
|
|
|
|
|
log_result(correct);
|
2019-04-17 10:49:11 +08:00
|
|
|
}
|
|
|
|
|
return 0;
|
|
|
|
|
}
|