MNN/source/backend/metal/MetalCache_generated.h

581 lines
22 KiB
C++

// automatically generated by the FlatBuffers compiler, do not modify
#ifndef FLATBUFFERS_GENERATED_METALCACHE_METALCACHE_H_
#define FLATBUFFERS_GENERATED_METALCACHE_METALCACHE_H_
#include "flatbuffers/flatbuffers.h"
namespace MetalCache {
struct TensorInfo;
struct TensorInfoT;
struct OpInfo;
struct OpInfoT;
struct Autotuning;
struct AutotuningT;
struct Cache;
struct CacheT;
inline const flatbuffers::TypeTable *TensorInfoTypeTable();
inline const flatbuffers::TypeTable *OpInfoTypeTable();
inline const flatbuffers::TypeTable *AutotuningTypeTable();
inline const flatbuffers::TypeTable *CacheTypeTable();
struct TensorInfoT : public flatbuffers::NativeTable {
typedef TensorInfo TableType;
std::vector<int32_t> shape;
TensorInfoT() {
}
};
struct TensorInfo FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table {
typedef TensorInfoT NativeTableType;
static const flatbuffers::TypeTable *MiniReflectTypeTable() {
return TensorInfoTypeTable();
}
const flatbuffers::Vector<int32_t> *shape() const {
return GetPointer<const flatbuffers::Vector<int32_t> *>(4);
}
bool Verify(flatbuffers::Verifier &verifier) const {
return VerifyTableStart(verifier) &&
VerifyOffset(verifier, 4) &&
verifier.VerifyVector(shape()) &&
verifier.EndTable();
}
TensorInfoT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const;
void UnPackTo(TensorInfoT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const;
static flatbuffers::Offset<TensorInfo> Pack(flatbuffers::FlatBufferBuilder &_fbb, const TensorInfoT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr);
};
struct TensorInfoBuilder {
flatbuffers::FlatBufferBuilder &fbb_;
flatbuffers::uoffset_t start_;
void add_shape(flatbuffers::Offset<flatbuffers::Vector<int32_t>> shape) {
fbb_.AddOffset(4, shape);
}
explicit TensorInfoBuilder(flatbuffers::FlatBufferBuilder &_fbb)
: fbb_(_fbb) {
start_ = fbb_.StartTable();
}
TensorInfoBuilder &operator=(const TensorInfoBuilder &);
flatbuffers::Offset<TensorInfo> Finish() {
const auto end = fbb_.EndTable(start_);
auto o = flatbuffers::Offset<TensorInfo>(end);
return o;
}
};
inline flatbuffers::Offset<TensorInfo> CreateTensorInfo(
flatbuffers::FlatBufferBuilder &_fbb,
flatbuffers::Offset<flatbuffers::Vector<int32_t>> shape = 0) {
TensorInfoBuilder builder_(_fbb);
builder_.add_shape(shape);
return builder_.Finish();
}
flatbuffers::Offset<TensorInfo> CreateTensorInfo(flatbuffers::FlatBufferBuilder &_fbb, const TensorInfoT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr);
struct OpInfoT : public flatbuffers::NativeTable {
typedef OpInfo TableType;
std::string name;
int32_t type;
std::vector<std::unique_ptr<TensorInfoT>> inputs;
std::vector<std::unique_ptr<TensorInfoT>> outputs;
OpInfoT()
: type(0) {
}
};
struct OpInfo FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table {
typedef OpInfoT NativeTableType;
static const flatbuffers::TypeTable *MiniReflectTypeTable() {
return OpInfoTypeTable();
}
const flatbuffers::String *name() const {
return GetPointer<const flatbuffers::String *>(4);
}
int32_t type() const {
return GetField<int32_t>(6, 0);
}
const flatbuffers::Vector<flatbuffers::Offset<TensorInfo>> *inputs() const {
return GetPointer<const flatbuffers::Vector<flatbuffers::Offset<TensorInfo>> *>(8);
}
const flatbuffers::Vector<flatbuffers::Offset<TensorInfo>> *outputs() const {
return GetPointer<const flatbuffers::Vector<flatbuffers::Offset<TensorInfo>> *>(10);
}
bool Verify(flatbuffers::Verifier &verifier) const {
return VerifyTableStart(verifier) &&
VerifyOffset(verifier, 4) &&
verifier.VerifyString(name()) &&
VerifyField<int32_t>(verifier, 6) &&
VerifyOffset(verifier, 8) &&
verifier.VerifyVector(inputs()) &&
verifier.VerifyVectorOfTables(inputs()) &&
VerifyOffset(verifier, 10) &&
verifier.VerifyVector(outputs()) &&
verifier.VerifyVectorOfTables(outputs()) &&
verifier.EndTable();
}
OpInfoT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const;
void UnPackTo(OpInfoT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const;
static flatbuffers::Offset<OpInfo> Pack(flatbuffers::FlatBufferBuilder &_fbb, const OpInfoT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr);
};
struct OpInfoBuilder {
flatbuffers::FlatBufferBuilder &fbb_;
flatbuffers::uoffset_t start_;
void add_name(flatbuffers::Offset<flatbuffers::String> name) {
fbb_.AddOffset(4, name);
}
void add_type(int32_t type) {
fbb_.AddElement<int32_t>(6, type, 0);
}
void add_inputs(flatbuffers::Offset<flatbuffers::Vector<flatbuffers::Offset<TensorInfo>>> inputs) {
fbb_.AddOffset(8, inputs);
}
void add_outputs(flatbuffers::Offset<flatbuffers::Vector<flatbuffers::Offset<TensorInfo>>> outputs) {
fbb_.AddOffset(10, outputs);
}
explicit OpInfoBuilder(flatbuffers::FlatBufferBuilder &_fbb)
: fbb_(_fbb) {
start_ = fbb_.StartTable();
}
OpInfoBuilder &operator=(const OpInfoBuilder &);
flatbuffers::Offset<OpInfo> Finish() {
const auto end = fbb_.EndTable(start_);
auto o = flatbuffers::Offset<OpInfo>(end);
return o;
}
};
inline flatbuffers::Offset<OpInfo> CreateOpInfo(
flatbuffers::FlatBufferBuilder &_fbb,
flatbuffers::Offset<flatbuffers::String> name = 0,
int32_t type = 0,
flatbuffers::Offset<flatbuffers::Vector<flatbuffers::Offset<TensorInfo>>> inputs = 0,
flatbuffers::Offset<flatbuffers::Vector<flatbuffers::Offset<TensorInfo>>> outputs = 0) {
OpInfoBuilder builder_(_fbb);
builder_.add_outputs(outputs);
builder_.add_inputs(inputs);
builder_.add_type(type);
builder_.add_name(name);
return builder_.Finish();
}
flatbuffers::Offset<OpInfo> CreateOpInfo(flatbuffers::FlatBufferBuilder &_fbb, const OpInfoT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr);
struct AutotuningT : public flatbuffers::NativeTable {
typedef Autotuning TableType;
std::string key;
std::vector<uint32_t> threadSize;
std::vector<uint32_t> groupNum;
std::vector<uint32_t> groupSize;
uint32_t timeCost;
AutotuningT()
: timeCost(0) {
}
};
struct Autotuning FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table {
typedef AutotuningT NativeTableType;
static const flatbuffers::TypeTable *MiniReflectTypeTable() {
return AutotuningTypeTable();
}
const flatbuffers::String *key() const {
return GetPointer<const flatbuffers::String *>(4);
}
const flatbuffers::Vector<uint32_t> *threadSize() const {
return GetPointer<const flatbuffers::Vector<uint32_t> *>(6);
}
const flatbuffers::Vector<uint32_t> *groupNum() const {
return GetPointer<const flatbuffers::Vector<uint32_t> *>(8);
}
const flatbuffers::Vector<uint32_t> *groupSize() const {
return GetPointer<const flatbuffers::Vector<uint32_t> *>(10);
}
uint32_t timeCost() const {
return GetField<uint32_t>(12, 0);
}
bool Verify(flatbuffers::Verifier &verifier) const {
return VerifyTableStart(verifier) &&
VerifyOffset(verifier, 4) &&
verifier.VerifyString(key()) &&
VerifyOffset(verifier, 6) &&
verifier.VerifyVector(threadSize()) &&
VerifyOffset(verifier, 8) &&
verifier.VerifyVector(groupNum()) &&
VerifyOffset(verifier, 10) &&
verifier.VerifyVector(groupSize()) &&
VerifyField<uint32_t>(verifier, 12) &&
verifier.EndTable();
}
AutotuningT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const;
void UnPackTo(AutotuningT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const;
static flatbuffers::Offset<Autotuning> Pack(flatbuffers::FlatBufferBuilder &_fbb, const AutotuningT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr);
};
struct AutotuningBuilder {
flatbuffers::FlatBufferBuilder &fbb_;
flatbuffers::uoffset_t start_;
void add_key(flatbuffers::Offset<flatbuffers::String> key) {
fbb_.AddOffset(4, key);
}
void add_threadSize(flatbuffers::Offset<flatbuffers::Vector<uint32_t>> threadSize) {
fbb_.AddOffset(6, threadSize);
}
void add_groupNum(flatbuffers::Offset<flatbuffers::Vector<uint32_t>> groupNum) {
fbb_.AddOffset(8, groupNum);
}
void add_groupSize(flatbuffers::Offset<flatbuffers::Vector<uint32_t>> groupSize) {
fbb_.AddOffset(10, groupSize);
}
void add_timeCost(uint32_t timeCost) {
fbb_.AddElement<uint32_t>(12, timeCost, 0);
}
explicit AutotuningBuilder(flatbuffers::FlatBufferBuilder &_fbb)
: fbb_(_fbb) {
start_ = fbb_.StartTable();
}
AutotuningBuilder &operator=(const AutotuningBuilder &);
flatbuffers::Offset<Autotuning> Finish() {
const auto end = fbb_.EndTable(start_);
auto o = flatbuffers::Offset<Autotuning>(end);
return o;
}
};
inline flatbuffers::Offset<Autotuning> CreateAutotuning(
flatbuffers::FlatBufferBuilder &_fbb,
flatbuffers::Offset<flatbuffers::String> key = 0,
flatbuffers::Offset<flatbuffers::Vector<uint32_t>> threadSize = 0,
flatbuffers::Offset<flatbuffers::Vector<uint32_t>> groupNum = 0,
flatbuffers::Offset<flatbuffers::Vector<uint32_t>> groupSize = 0,
uint32_t timeCost = 0) {
AutotuningBuilder builder_(_fbb);
builder_.add_timeCost(timeCost);
builder_.add_groupSize(groupSize);
builder_.add_groupNum(groupNum);
builder_.add_threadSize(threadSize);
builder_.add_key(key);
return builder_.Finish();
}
flatbuffers::Offset<Autotuning> CreateAutotuning(flatbuffers::FlatBufferBuilder &_fbb, const AutotuningT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr);
struct CacheT : public flatbuffers::NativeTable {
typedef Cache TableType;
std::vector<std::unique_ptr<AutotuningT>> tunings;
std::vector<std::unique_ptr<OpInfoT>> tuned;
CacheT() {
}
};
struct Cache FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table {
typedef CacheT NativeTableType;
static const flatbuffers::TypeTable *MiniReflectTypeTable() {
return CacheTypeTable();
}
const flatbuffers::Vector<flatbuffers::Offset<Autotuning>> *tunings() const {
return GetPointer<const flatbuffers::Vector<flatbuffers::Offset<Autotuning>> *>(4);
}
const flatbuffers::Vector<flatbuffers::Offset<OpInfo>> *tuned() const {
return GetPointer<const flatbuffers::Vector<flatbuffers::Offset<OpInfo>> *>(6);
}
bool Verify(flatbuffers::Verifier &verifier) const {
return VerifyTableStart(verifier) &&
VerifyOffset(verifier, 4) &&
verifier.VerifyVector(tunings()) &&
verifier.VerifyVectorOfTables(tunings()) &&
VerifyOffset(verifier, 6) &&
verifier.VerifyVector(tuned()) &&
verifier.VerifyVectorOfTables(tuned()) &&
verifier.EndTable();
}
CacheT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const;
void UnPackTo(CacheT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const;
static flatbuffers::Offset<Cache> Pack(flatbuffers::FlatBufferBuilder &_fbb, const CacheT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr);
};
struct CacheBuilder {
flatbuffers::FlatBufferBuilder &fbb_;
flatbuffers::uoffset_t start_;
void add_tunings(flatbuffers::Offset<flatbuffers::Vector<flatbuffers::Offset<Autotuning>>> tunings) {
fbb_.AddOffset(4, tunings);
}
void add_tuned(flatbuffers::Offset<flatbuffers::Vector<flatbuffers::Offset<OpInfo>>> tuned) {
fbb_.AddOffset(6, tuned);
}
explicit CacheBuilder(flatbuffers::FlatBufferBuilder &_fbb)
: fbb_(_fbb) {
start_ = fbb_.StartTable();
}
CacheBuilder &operator=(const CacheBuilder &);
flatbuffers::Offset<Cache> Finish() {
const auto end = fbb_.EndTable(start_);
auto o = flatbuffers::Offset<Cache>(end);
return o;
}
};
inline flatbuffers::Offset<Cache> CreateCache(
flatbuffers::FlatBufferBuilder &_fbb,
flatbuffers::Offset<flatbuffers::Vector<flatbuffers::Offset<Autotuning>>> tunings = 0,
flatbuffers::Offset<flatbuffers::Vector<flatbuffers::Offset<OpInfo>>> tuned = 0) {
CacheBuilder builder_(_fbb);
builder_.add_tuned(tuned);
builder_.add_tunings(tunings);
return builder_.Finish();
}
flatbuffers::Offset<Cache> CreateCache(flatbuffers::FlatBufferBuilder &_fbb, const CacheT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr);
inline TensorInfoT *TensorInfo::UnPack(const flatbuffers::resolver_function_t *_resolver) const {
auto _o = new TensorInfoT();
UnPackTo(_o, _resolver);
return _o;
}
inline void TensorInfo::UnPackTo(TensorInfoT *_o, const flatbuffers::resolver_function_t *_resolver) const {
(void)_o;
(void)_resolver;
{ auto _e = shape(); if (_e) { _o->shape.resize(_e->size()); for (flatbuffers::uoffset_t _i = 0; _i < _e->size(); _i++) { _o->shape[_i] = _e->Get(_i); } } };
}
inline flatbuffers::Offset<TensorInfo> TensorInfo::Pack(flatbuffers::FlatBufferBuilder &_fbb, const TensorInfoT* _o, const flatbuffers::rehasher_function_t *_rehasher) {
return CreateTensorInfo(_fbb, _o, _rehasher);
}
inline flatbuffers::Offset<TensorInfo> CreateTensorInfo(flatbuffers::FlatBufferBuilder &_fbb, const TensorInfoT *_o, const flatbuffers::rehasher_function_t *_rehasher) {
(void)_rehasher;
(void)_o;
struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const TensorInfoT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va;
auto _shape = _o->shape.size() ? _fbb.CreateVector(_o->shape) : 0;
return MetalCache::CreateTensorInfo(
_fbb,
_shape);
}
inline OpInfoT *OpInfo::UnPack(const flatbuffers::resolver_function_t *_resolver) const {
auto _o = new OpInfoT();
UnPackTo(_o, _resolver);
return _o;
}
inline void OpInfo::UnPackTo(OpInfoT *_o, const flatbuffers::resolver_function_t *_resolver) const {
(void)_o;
(void)_resolver;
{ auto _e = name(); if (_e) _o->name = _e->str(); };
{ auto _e = type(); _o->type = _e; };
{ auto _e = inputs(); if (_e) { _o->inputs.resize(_e->size()); for (flatbuffers::uoffset_t _i = 0; _i < _e->size(); _i++) { _o->inputs[_i] = std::unique_ptr<TensorInfoT>(_e->Get(_i)->UnPack(_resolver)); } } };
{ auto _e = outputs(); if (_e) { _o->outputs.resize(_e->size()); for (flatbuffers::uoffset_t _i = 0; _i < _e->size(); _i++) { _o->outputs[_i] = std::unique_ptr<TensorInfoT>(_e->Get(_i)->UnPack(_resolver)); } } };
}
inline flatbuffers::Offset<OpInfo> OpInfo::Pack(flatbuffers::FlatBufferBuilder &_fbb, const OpInfoT* _o, const flatbuffers::rehasher_function_t *_rehasher) {
return CreateOpInfo(_fbb, _o, _rehasher);
}
inline flatbuffers::Offset<OpInfo> CreateOpInfo(flatbuffers::FlatBufferBuilder &_fbb, const OpInfoT *_o, const flatbuffers::rehasher_function_t *_rehasher) {
(void)_rehasher;
(void)_o;
struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const OpInfoT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va;
auto _name = _o->name.empty() ? 0 : _fbb.CreateString(_o->name);
auto _type = _o->type;
auto _inputs = _o->inputs.size() ? _fbb.CreateVector<flatbuffers::Offset<TensorInfo>> (_o->inputs.size(), [](size_t i, _VectorArgs *__va) { return CreateTensorInfo(*__va->__fbb, __va->__o->inputs[i].get(), __va->__rehasher); }, &_va ) : 0;
auto _outputs = _o->outputs.size() ? _fbb.CreateVector<flatbuffers::Offset<TensorInfo>> (_o->outputs.size(), [](size_t i, _VectorArgs *__va) { return CreateTensorInfo(*__va->__fbb, __va->__o->outputs[i].get(), __va->__rehasher); }, &_va ) : 0;
return MetalCache::CreateOpInfo(
_fbb,
_name,
_type,
_inputs,
_outputs);
}
inline AutotuningT *Autotuning::UnPack(const flatbuffers::resolver_function_t *_resolver) const {
auto _o = new AutotuningT();
UnPackTo(_o, _resolver);
return _o;
}
inline void Autotuning::UnPackTo(AutotuningT *_o, const flatbuffers::resolver_function_t *_resolver) const {
(void)_o;
(void)_resolver;
{ auto _e = key(); if (_e) _o->key = _e->str(); };
{ auto _e = threadSize(); if (_e) { _o->threadSize.resize(_e->size()); for (flatbuffers::uoffset_t _i = 0; _i < _e->size(); _i++) { _o->threadSize[_i] = _e->Get(_i); } } };
{ auto _e = groupNum(); if (_e) { _o->groupNum.resize(_e->size()); for (flatbuffers::uoffset_t _i = 0; _i < _e->size(); _i++) { _o->groupNum[_i] = _e->Get(_i); } } };
{ auto _e = groupSize(); if (_e) { _o->groupSize.resize(_e->size()); for (flatbuffers::uoffset_t _i = 0; _i < _e->size(); _i++) { _o->groupSize[_i] = _e->Get(_i); } } };
{ auto _e = timeCost(); _o->timeCost = _e; };
}
inline flatbuffers::Offset<Autotuning> Autotuning::Pack(flatbuffers::FlatBufferBuilder &_fbb, const AutotuningT* _o, const flatbuffers::rehasher_function_t *_rehasher) {
return CreateAutotuning(_fbb, _o, _rehasher);
}
inline flatbuffers::Offset<Autotuning> CreateAutotuning(flatbuffers::FlatBufferBuilder &_fbb, const AutotuningT *_o, const flatbuffers::rehasher_function_t *_rehasher) {
(void)_rehasher;
(void)_o;
struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const AutotuningT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va;
auto _key = _o->key.empty() ? 0 : _fbb.CreateString(_o->key);
auto _threadSize = _o->threadSize.size() ? _fbb.CreateVector(_o->threadSize) : 0;
auto _groupNum = _o->groupNum.size() ? _fbb.CreateVector(_o->groupNum) : 0;
auto _groupSize = _o->groupSize.size() ? _fbb.CreateVector(_o->groupSize) : 0;
auto _timeCost = _o->timeCost;
return MetalCache::CreateAutotuning(
_fbb,
_key,
_threadSize,
_groupNum,
_groupSize,
_timeCost);
}
inline CacheT *Cache::UnPack(const flatbuffers::resolver_function_t *_resolver) const {
auto _o = new CacheT();
UnPackTo(_o, _resolver);
return _o;
}
inline void Cache::UnPackTo(CacheT *_o, const flatbuffers::resolver_function_t *_resolver) const {
(void)_o;
(void)_resolver;
{ auto _e = tunings(); if (_e) { _o->tunings.resize(_e->size()); for (flatbuffers::uoffset_t _i = 0; _i < _e->size(); _i++) { _o->tunings[_i] = std::unique_ptr<AutotuningT>(_e->Get(_i)->UnPack(_resolver)); } } };
{ auto _e = tuned(); if (_e) { _o->tuned.resize(_e->size()); for (flatbuffers::uoffset_t _i = 0; _i < _e->size(); _i++) { _o->tuned[_i] = std::unique_ptr<OpInfoT>(_e->Get(_i)->UnPack(_resolver)); } } };
}
inline flatbuffers::Offset<Cache> Cache::Pack(flatbuffers::FlatBufferBuilder &_fbb, const CacheT* _o, const flatbuffers::rehasher_function_t *_rehasher) {
return CreateCache(_fbb, _o, _rehasher);
}
inline flatbuffers::Offset<Cache> CreateCache(flatbuffers::FlatBufferBuilder &_fbb, const CacheT *_o, const flatbuffers::rehasher_function_t *_rehasher) {
(void)_rehasher;
(void)_o;
struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const CacheT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va;
auto _tunings = _o->tunings.size() ? _fbb.CreateVector<flatbuffers::Offset<Autotuning>> (_o->tunings.size(), [](size_t i, _VectorArgs *__va) { return CreateAutotuning(*__va->__fbb, __va->__o->tunings[i].get(), __va->__rehasher); }, &_va ) : 0;
auto _tuned = _o->tuned.size() ? _fbb.CreateVector<flatbuffers::Offset<OpInfo>> (_o->tuned.size(), [](size_t i, _VectorArgs *__va) { return CreateOpInfo(*__va->__fbb, __va->__o->tuned[i].get(), __va->__rehasher); }, &_va ) : 0;
return MetalCache::CreateCache(
_fbb,
_tunings,
_tuned);
}
inline const flatbuffers::TypeTable *TensorInfoTypeTable() {
static const flatbuffers::TypeCode type_codes[] = {
{ flatbuffers::ET_INT, 1, -1 }
};
static const char * const names[] = {
"shape"
};
static const flatbuffers::TypeTable tt = {
flatbuffers::ST_TABLE, 1, type_codes, nullptr, nullptr, names
};
return &tt;
}
inline const flatbuffers::TypeTable *OpInfoTypeTable() {
static const flatbuffers::TypeCode type_codes[] = {
{ flatbuffers::ET_STRING, 0, -1 },
{ flatbuffers::ET_INT, 0, -1 },
{ flatbuffers::ET_SEQUENCE, 1, 0 },
{ flatbuffers::ET_SEQUENCE, 1, 0 }
};
static const flatbuffers::TypeFunction type_refs[] = {
TensorInfoTypeTable
};
static const char * const names[] = {
"name",
"type",
"inputs",
"outputs"
};
static const flatbuffers::TypeTable tt = {
flatbuffers::ST_TABLE, 4, type_codes, type_refs, nullptr, names
};
return &tt;
}
inline const flatbuffers::TypeTable *AutotuningTypeTable() {
static const flatbuffers::TypeCode type_codes[] = {
{ flatbuffers::ET_STRING, 0, -1 },
{ flatbuffers::ET_UINT, 1, -1 },
{ flatbuffers::ET_UINT, 1, -1 },
{ flatbuffers::ET_UINT, 1, -1 },
{ flatbuffers::ET_UINT, 0, -1 }
};
static const char * const names[] = {
"key",
"threadSize",
"groupNum",
"groupSize",
"timeCost"
};
static const flatbuffers::TypeTable tt = {
flatbuffers::ST_TABLE, 5, type_codes, nullptr, nullptr, names
};
return &tt;
}
inline const flatbuffers::TypeTable *CacheTypeTable() {
static const flatbuffers::TypeCode type_codes[] = {
{ flatbuffers::ET_SEQUENCE, 1, 0 },
{ flatbuffers::ET_SEQUENCE, 1, 1 }
};
static const flatbuffers::TypeFunction type_refs[] = {
AutotuningTypeTable,
OpInfoTypeTable
};
static const char * const names[] = {
"tunings",
"tuned"
};
static const flatbuffers::TypeTable tt = {
flatbuffers::ST_TABLE, 2, type_codes, type_refs, nullptr, names
};
return &tt;
}
inline const MetalCache::Cache *GetCache(const void *buf) {
return flatbuffers::GetRoot<MetalCache::Cache>(buf);
}
inline const MetalCache::Cache *GetSizePrefixedCache(const void *buf) {
return flatbuffers::GetSizePrefixedRoot<MetalCache::Cache>(buf);
}
inline bool VerifyCacheBuffer(
flatbuffers::Verifier &verifier) {
return verifier.VerifyBuffer<MetalCache::Cache>(nullptr);
}
inline bool VerifySizePrefixedCacheBuffer(
flatbuffers::Verifier &verifier) {
return verifier.VerifySizePrefixedBuffer<MetalCache::Cache>(nullptr);
}
inline void FinishCacheBuffer(
flatbuffers::FlatBufferBuilder &fbb,
flatbuffers::Offset<MetalCache::Cache> root) {
fbb.Finish(root);
}
inline void FinishSizePrefixedCacheBuffer(
flatbuffers::FlatBufferBuilder &fbb,
flatbuffers::Offset<MetalCache::Cache> root) {
fbb.FinishSizePrefixed(root);
}
inline std::unique_ptr<CacheT> UnPackCache(
const void *buf,
const flatbuffers::resolver_function_t *res = nullptr) {
return std::unique_ptr<CacheT>(GetCache(buf)->UnPack(res));
}
} // namespace MetalCache
#endif // FLATBUFFERS_GENERATED_METALCACHE_METALCACHE_H_