2019-04-17 10:49:11 +08:00
|
|
|
//
|
|
|
|
|
// testModel.cpp
|
|
|
|
|
// MNN
|
|
|
|
|
//
|
|
|
|
|
// Created by MNN on 2019/01/22.
|
|
|
|
|
// Copyright © 2018, Alibaba Group Holding Limited
|
|
|
|
|
//
|
|
|
|
|
|
|
|
|
|
#define MNN_OPEN_TIME_TRACE
|
|
|
|
|
|
2020-03-22 20:16:29 +08:00
|
|
|
#include <MNN/MNNDefine.h>
|
2019-04-17 10:49:11 +08:00
|
|
|
#include <math.h>
|
|
|
|
|
#include <stdio.h>
|
|
|
|
|
#include <stdlib.h>
|
|
|
|
|
#include <string.h>
|
2020-03-22 20:16:29 +08:00
|
|
|
#include <MNN/AutoTime.hpp>
|
|
|
|
|
#include <MNN/Interpreter.hpp>
|
|
|
|
|
#include <MNN/Tensor.hpp>
|
2019-04-17 10:49:11 +08:00
|
|
|
#include <fstream>
|
|
|
|
|
#include <map>
|
|
|
|
|
#include <sstream>
|
2019-12-27 22:16:57 +08:00
|
|
|
#include "core/Backend.hpp"
|
|
|
|
|
#include "core/Macro.h"
|
|
|
|
|
#include "core/TensorUtils.hpp"
|
2019-04-17 10:49:11 +08:00
|
|
|
|
|
|
|
|
#define NONE "\e[0m"
|
|
|
|
|
#define RED "\e[0;31m"
|
|
|
|
|
#define GREEN "\e[0;32m"
|
|
|
|
|
#define L_GREEN "\e[1;32m"
|
|
|
|
|
#define BLUE "\e[0;34m"
|
|
|
|
|
#define L_BLUE "\e[1;34m"
|
|
|
|
|
#define BOLD "\e[1m"
|
|
|
|
|
|
2020-02-26 21:24:38 +08:00
|
|
|
template<typename T>
|
|
|
|
|
inline T stringConvert(const char* number) {
|
|
|
|
|
std::istringstream os(number);
|
|
|
|
|
T v;
|
|
|
|
|
os >> v;
|
|
|
|
|
return v;
|
|
|
|
|
}
|
|
|
|
|
|
2019-04-17 10:49:11 +08:00
|
|
|
MNN::Tensor* createTensor(const MNN::Tensor* shape, const char* path) {
|
|
|
|
|
std::ifstream stream(path);
|
|
|
|
|
if (stream.fail()) {
|
|
|
|
|
return NULL;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
auto result = new MNN::Tensor(shape, shape->getDimensionType());
|
|
|
|
|
auto data = result->host<float>();
|
|
|
|
|
for (int i = 0; i < result->elementSize(); ++i) {
|
|
|
|
|
double temp = 0.0f;
|
|
|
|
|
stream >> temp;
|
|
|
|
|
data[i] = temp;
|
|
|
|
|
}
|
|
|
|
|
stream.close();
|
|
|
|
|
return result;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
int main(int argc, const char* argv[]) {
|
|
|
|
|
// check given & expect
|
|
|
|
|
const char* modelPath = argv[1];
|
|
|
|
|
const char* givenName = argv[2];
|
|
|
|
|
const char* expectName = argv[3];
|
|
|
|
|
MNN_PRINT("Testing model %s, input: %s, output: %s\n", modelPath, givenName, expectName);
|
|
|
|
|
|
|
|
|
|
// create net
|
|
|
|
|
auto type = MNN_FORWARD_CPU;
|
|
|
|
|
if (argc > 4) {
|
2020-02-26 21:24:38 +08:00
|
|
|
type = (MNNForwardType)stringConvert<int>(argv[4]);
|
2019-04-17 10:49:11 +08:00
|
|
|
}
|
|
|
|
|
auto tolerance = 0.1f;
|
|
|
|
|
if (argc > 5) {
|
2020-02-26 21:24:38 +08:00
|
|
|
tolerance = stringConvert<float>(argv[5]);
|
2019-04-17 10:49:11 +08:00
|
|
|
}
|
|
|
|
|
std::shared_ptr<MNN::Interpreter> net =
|
|
|
|
|
std::shared_ptr<MNN::Interpreter>(MNN::Interpreter::createFromFile(modelPath));
|
|
|
|
|
|
|
|
|
|
// create session
|
|
|
|
|
MNN::ScheduleConfig config;
|
2020-03-22 20:16:29 +08:00
|
|
|
config.type = type;
|
2019-12-27 22:16:57 +08:00
|
|
|
MNN::BackendConfig backendConfig;
|
2020-03-18 15:18:53 +08:00
|
|
|
if (type != MNN_FORWARD_CPU) {
|
|
|
|
|
// Use Precision_High for other backend
|
|
|
|
|
// Test CPU ARM v8.2 and other approciate method
|
|
|
|
|
backendConfig.precision = MNN::BackendConfig::Precision_High;
|
|
|
|
|
}
|
2019-12-27 22:16:57 +08:00
|
|
|
config.backendConfig = &backendConfig;
|
2020-03-22 20:16:29 +08:00
|
|
|
auto session = net->createSession(config);
|
2019-04-17 10:49:11 +08:00
|
|
|
|
|
|
|
|
auto allInput = net->getSessionInputAll(session);
|
|
|
|
|
for (auto& iter : allInput) {
|
|
|
|
|
auto size = iter.second->size();
|
2020-03-22 20:16:29 +08:00
|
|
|
|
|
|
|
|
auto bnType = MNN_FORWARD_CPU;
|
|
|
|
|
auto tensorBn = MNN::TensorUtils::getDescribe(iter.second)->backend;
|
|
|
|
|
if (tensorBn) {
|
|
|
|
|
bnType = tensorBn->type();
|
|
|
|
|
}
|
|
|
|
|
// memory is fp16, but size == element * sizeof(float)
|
|
|
|
|
if (bnType == MNN_FORWARD_CPU_EXTENSION) {
|
|
|
|
|
size /= 2;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
auto ptr = iter.second->host<void>();
|
2019-04-17 10:49:11 +08:00
|
|
|
std::shared_ptr<MNN::Tensor> tempTensor;
|
|
|
|
|
if (nullptr == ptr) {
|
|
|
|
|
tempTensor = std::shared_ptr<MNN::Tensor>(MNN::Tensor::createHostTensorFromDevice(iter.second, false),
|
|
|
|
|
[&iter](void* t) {
|
|
|
|
|
auto hostTensor = (MNN::Tensor*)t;
|
|
|
|
|
iter.second->copyFromHostTensor(hostTensor);
|
|
|
|
|
delete hostTensor;
|
|
|
|
|
});
|
2020-03-22 20:16:29 +08:00
|
|
|
ptr = tempTensor->host<float>();
|
2019-04-17 10:49:11 +08:00
|
|
|
}
|
|
|
|
|
::memset(ptr, 0, size);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// write input tensor
|
|
|
|
|
auto inputTensor = net->getSessionInput(session, NULL);
|
|
|
|
|
auto givenTensor = createTensor(inputTensor, givenName);
|
|
|
|
|
if (!givenTensor) {
|
2019-06-10 21:08:55 +08:00
|
|
|
#if defined(_MSC_VER)
|
|
|
|
|
printf("Failed to open input file %s.\n", givenName);
|
|
|
|
|
#else
|
2019-04-17 10:49:11 +08:00
|
|
|
printf(RED "Failed to open input file %s.\n" NONE, givenName);
|
2019-06-10 21:08:55 +08:00
|
|
|
#endif
|
2019-04-17 10:49:11 +08:00
|
|
|
return -1;
|
|
|
|
|
}
|
- dynamic computation graph (beta)
- add supports (/express)
- add tests
- add benchmarks with it (/benchmark/exprModels)
- Python
- MNN engine and tools were submitted to pip
- available on Windows/macOS/Linux
- Engine/Converter
- add supports for each op benchmarking
- refactor optimizer by separating steps
- CPU
- add supports for Conv3D, Pool3D, ELU, ReverseSequence
- fix ArgMax, Permute, Scale, BinaryOp, Slice, SliceTf
- OpenCL
- add half transform in CPU
- add broadcast supports for binary
- optimize Conv2D, Reshape, Eltwise, Gemm, etc.
- OpenGL
- add sub, real div supports for binary
- add supports for unary
- optimize Conv2D, Reshape
- Vulkan
- add max supports for eltwise
- Metal
- fix metallib missing problem
- Train/Quantization
- use express to refactor training codes
2019-09-26 21:02:07 +08:00
|
|
|
inputTensor->copyFromHostTensor(givenTensor);
|
2019-04-17 10:49:11 +08:00
|
|
|
delete givenTensor;
|
|
|
|
|
|
|
|
|
|
// infer
|
|
|
|
|
net->runSession(session);
|
|
|
|
|
|
|
|
|
|
// read expect tensor
|
|
|
|
|
auto outputTensor = net->getSessionOutput(session, NULL);
|
|
|
|
|
std::shared_ptr<MNN::Tensor> expectTensor(createTensor(outputTensor, expectName));
|
|
|
|
|
if (!expectTensor.get()) {
|
2019-06-10 21:08:55 +08:00
|
|
|
#if defined(_MSC_VER)
|
|
|
|
|
printf("Failed to open expect file %s.\n", expectName);
|
|
|
|
|
#else
|
2019-04-17 10:49:11 +08:00
|
|
|
printf(RED "Failed to open expect file %s.\n" NONE, expectName);
|
2019-06-10 21:08:55 +08:00
|
|
|
#endif
|
2019-04-17 10:49:11 +08:00
|
|
|
return -1;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// compare output with expect
|
|
|
|
|
bool correct = MNN::TensorUtils::compareTensors(outputTensor, expectTensor.get(), tolerance, true);
|
|
|
|
|
if (correct) {
|
2019-06-10 21:08:55 +08:00
|
|
|
#if defined(_MSC_VER)
|
|
|
|
|
printf("Test %s Correct!\n", modelPath);
|
|
|
|
|
#else
|
2019-04-17 10:49:11 +08:00
|
|
|
printf(GREEN BOLD "Test %s Correct!\n" NONE, modelPath);
|
2019-06-10 21:08:55 +08:00
|
|
|
#endif
|
2019-04-17 10:49:11 +08:00
|
|
|
} else {
|
2019-06-10 21:08:55 +08:00
|
|
|
#if defined(_MSC_VER)
|
|
|
|
|
printf("Test Failed %s!\n", modelPath);
|
|
|
|
|
#else
|
2019-04-17 10:49:11 +08:00
|
|
|
printf(RED "Test Failed %s!\n" NONE, modelPath);
|
2019-06-10 21:08:55 +08:00
|
|
|
#endif
|
2019-04-17 10:49:11 +08:00
|
|
|
}
|
|
|
|
|
return 0;
|
|
|
|
|
}
|