MNN/source/shape/ShapeSqueeze.cpp

109 lines
4.0 KiB
C++
Raw Normal View History

2019-04-17 10:49:11 +08:00
//
// ShapeSqueeze.cpp
// MNN
//
// Created by MNN on 2019/01/10.
// Copyright © 2018, Alibaba Group Holding Limited
//
2020-11-05 16:41:56 +08:00
#include "shape/SizeComputer.hpp"
2019-12-27 22:16:57 +08:00
#include "core/Macro.h"
#include "core/TensorUtils.hpp"
2019-04-17 10:49:11 +08:00
namespace MNN {
class UnSqueezeSizeComputer : public SizeComputer {
virtual bool onComputeSize(const MNN::Op* op, const std::vector<Tensor*>& inputs,
const std::vector<Tensor*>& outputs) const override {
MNN_ASSERT(1 == outputs.size());
const int* squeezeDim = nullptr;
int squeezeDimSize = 0;
if (nullptr != op->main_as_SqueezeParam()->squeezeDims()) {
squeezeDim = op->main_as_SqueezeParam()->squeezeDims()->data();
squeezeDimSize = op->main_as_SqueezeParam()->squeezeDims()->size();
2021-09-18 15:52:30 +08:00
} else if (inputs.size() > 1) {
squeezeDim = inputs[1]->host<int>();
squeezeDimSize = inputs[1]->elementSize();
}
2020-11-05 16:41:56 +08:00
auto& ob = outputs[0]->buffer();
auto ib = inputs[0]->buffer();
ob.dimensions = ib.dimensions + squeezeDimSize;
2021-09-18 15:52:30 +08:00
uint8_t mask[MNN_MAX_TENSOR_DIM];
::memset(mask, 0, sizeof(mask));
for (int i = 0; i < squeezeDimSize; i++) {
2020-11-05 16:41:56 +08:00
int axis = squeezeDim[i];
if (axis < 0) {
axis += ob.dimensions;
}
2021-09-18 15:52:30 +08:00
mask[axis] = 1;
}
int oDim = 0;
for (int i = 0; i < ob.dimensions; i++) {
ob.dim[i].extent = 1;
2021-09-18 15:52:30 +08:00
if (mask[i] == 0) {
ob.dim[i].extent = ib.dim[oDim].extent;
oDim++;
}
}
ob.type = inputs[0]->buffer().type;
TensorUtils::getDescribe(outputs[0])->dimensionFormat = TensorUtils::getDescribe(inputs[0])->dimensionFormat;
return true;
}
};
2019-04-17 10:49:11 +08:00
class SqueezeSizeComputer : public SizeComputer {
virtual bool onComputeSize(const MNN::Op* op, const std::vector<Tensor*>& inputs,
const std::vector<Tensor*>& outputs) const override {
MNN_ASSERT(1 == outputs.size());
const int* squeezeDim = nullptr;
int squeezeDimSize = 0;
if (nullptr != op->main_as_SqueezeParam()->squeezeDims()) {
squeezeDim = op->main_as_SqueezeParam()->squeezeDims()->data();
squeezeDimSize = op->main_as_SqueezeParam()->squeezeDims()->size();
2021-09-18 15:52:30 +08:00
} else if (inputs.size() > 1) {
squeezeDim = inputs[1]->host<int>();
squeezeDimSize = inputs[1]->elementSize();
}
2021-09-18 15:52:30 +08:00
uint8_t mask[MNN_MAX_TENSOR_DIM];
::memset(mask, 0, sizeof(mask));
auto& ob = outputs[0]->buffer();
2021-09-18 15:52:30 +08:00
auto& ib = inputs[0]->buffer();
2019-04-17 10:49:11 +08:00
for (int i = 0; i < squeezeDimSize; i++) {
int axis = squeezeDim[i];
if (axis < 0) {
axis += ib.dimensions;
}
2021-09-18 15:52:30 +08:00
mask[axis] = 1;
2019-04-17 10:49:11 +08:00
}
if (squeezeDimSize == 0) {
for (int i = 0; i < ib.dimensions; ++i) {
if (ib.dim[i].extent == 1) {
2021-09-18 15:52:30 +08:00
mask[i] = 1;
++squeezeDimSize;
}
}
}
2020-11-05 16:41:56 +08:00
// in = Tensor(shape=())
// out = Squeeze(in) should also returns a tensor with shape=(), but
// the `squeezeDimSize` and `ib.dimensions` are all 0.
MNN_ASSERT(squeezeDimSize <= ib.dimensions);
2019-04-17 10:49:11 +08:00
ob.dimensions = ib.dimensions - squeezeDimSize;
int oDim = 0;
for (int i = 0; i < ib.dimensions; i++) {
2021-09-18 15:52:30 +08:00
if (mask[i] == 0) {
2019-04-17 10:49:11 +08:00
ob.dim[oDim].extent = ib.dim[i].extent;
oDim++;
}
}
ob.type = inputs[0]->buffer().type;
TensorUtils::getDescribe(outputs[0])->dimensionFormat = TensorUtils::getDescribe(inputs[0])->dimensionFormat;
2019-04-17 10:49:11 +08:00
return true;
}
};
REGISTER_SHAPE(SqueezeSizeComputer, OpType_Squeeze);
REGISTER_SHAPE(UnSqueezeSizeComputer, OpType_Unsqueeze);
2019-04-17 10:49:11 +08:00
} // namespace MNN