Compare commits

...

17 Commits

Author SHA1 Message Date
  i-robot 73b3f3a063 !452 code check clean 3 years ago
  李正龙 5980eeaec2 !449 misra 3 years ago
  isaacxr 58f4962efd opensource clean 3 years ago
  i-robot 7615edda06 !448 code check clean 3 years ago
  isaacxr d58d581e44 codecheck clean 3 years ago
  i-robot 8caca8f7b6 !446 static check clean 3 years ago
  i-robot bab166faf6 !447 Unique "using Status = uint32_t" 3 years ago
  zhangxiaokun 011ba36582 Unique "using Status = uint32_t" 3 years ago
  isaacxr 8934eb4b41 opensource clean 3 years ago
  isaacxr 069dac4ea8 codecheck clean 3 years ago
  i-robot 620d774ce8 !443 Security issue modification 3 years ago
  wangzhengjun f66771df04 code review 3 years ago
  i-robot e76c8a1e40 !435 update owners 3 years ago
  王涛 d215e9fa37
update OWNERS. 3 years ago
  i-robot d0a41996fc !425 ShuffleNetV1网络parser bugfix 3 years ago
  huanruizhi 781fae2808 parser bugfix 3 years ago
  王涛 f4e55296a4
update .gitmodules. 3 years ago
47 changed files with 199 additions and 173 deletions
Split View
  1. +1
    -1
      .gitmodules
  2. +6
    -3
      OWNERS
  3. +1
    -1
      metadef
  4. +1
    -1
      parser/caffe/caffe_custom_parser_adapter.cc
  5. +3
    -3
      parser/caffe/caffe_op_parser.cc
  6. +46
    -33
      parser/caffe/caffe_parser.cc
  7. +1
    -0
      parser/caffe/caffe_parser.h
  8. +14
    -10
      parser/common/acl_graph_parser_util.cc
  9. +2
    -2
      parser/common/acl_graph_parser_util.h
  10. +21
    -10
      parser/common/model_saver.cc
  11. +0
    -5
      parser/common/op_def/arg_op.cc
  12. +0
    -2
      parser/common/op_def/arg_op.h
  13. +0
    -5
      parser/common/op_def/constant_op.cc
  14. +0
    -1
      parser/common/op_def/constant_op.h
  15. +0
    -6
      parser/common/op_def/frameworkop_op.cc
  16. +0
    -2
      parser/common/op_def/frameworkop_op.h
  17. +0
    -5
      parser/common/op_def/no_op_op.cc
  18. +0
    -2
      parser/common/op_def/no_op_op.h
  19. +1
    -1
      parser/common/op_def/operator.h
  20. +0
    -5
      parser/common/op_def/ref_switch_op.cc
  21. +0
    -2
      parser/common/op_def/ref_switch_op.h
  22. +0
    -5
      parser/common/op_def/shape_n_op.cc
  23. +0
    -2
      parser/common/op_def/shape_n_op.h
  24. +0
    -5
      parser/common/op_def/var_is_initialized_op_op.cc
  25. +0
    -1
      parser/common/op_def/var_is_initialized_op_op.h
  26. +0
    -5
      parser/common/op_def/variable_op.cc
  27. +0
    -2
      parser/common/op_def/variable_op.h
  28. +2
    -4
      parser/common/parser_fp16_t.cc
  29. +1
    -2
      parser/common/parser_utils.cc
  30. +1
    -4
      parser/common/tbe_plugin_loader.cc
  31. +3
    -1
      parser/onnx/onnx_custom_parser_adapter.cc
  32. +3
    -2
      parser/onnx/onnx_data_parser.cc
  33. +2
    -2
      parser/onnx/onnx_parser.cc
  34. +3
    -0
      parser/onnx/subgraph_adapter/subgraph_adapter.h
  35. +1
    -0
      parser/onnx/subgraph_adapter/subgraph_adapter_factory.h
  36. +2
    -1
      parser/tensorflow/tensorflow_auto_mapping_parser_adapter.cc
  37. +1
    -1
      parser/tensorflow/tensorflow_enter_parser.cc
  38. +2
    -1
      parser/tensorflow/tensorflow_merge_parser.cc
  39. +24
    -24
      parser/tensorflow/tensorflow_parser.cc
  40. +2
    -4
      parser/tensorflow/tensorflow_reshape_parser.cc
  41. +4
    -0
      parser/tensorflow/tensorflow_shape_n_parser.cc
  42. +2
    -4
      parser/tensorflow/tensorflow_squeeze_parser.cc
  43. +1
    -2
      parser/tensorflow/tensorflow_util.cc
  44. +1
    -1
      tests/depends/error_manager/src/error_manager_stub.cc
  45. BIN
      tests/st/testcase/origin_models/test_snapshot.pb
  46. +18
    -0
      tests/st/testcase/test_tensorflow_parser.cc
  47. +29
    -0
      tests/ut/parser/testcase/tensorflow_parser_testcase/tensorflow_parser_unittest.cc

+ 1
- 1
.gitmodules View File

@@ -1,4 +1,4 @@
[submodule "metadef"]
path = metadef
url = https://gitee.com/ascend/metadef.git
branch = master
branch = r1.7.0

+ 6
- 3
OWNERS View File

@@ -1,8 +1,11 @@
approvers:
- ji_chen
- wqtshg
- ljl0711
- liu-jisheng
- liyihan123
- startzgf168
- qkunz
reviewers:
- xchu42
- sheng-nan
- wqtshg
- ljl0711
- liu-jisheng

+ 1
- 1
metadef

@@ -1 +1 @@
Subproject commit 326ecbb2b4837699aa674cc30e9b9956e4fd364d
Subproject commit 0b6395643fd358080d8d2a80868d09154d47b7e9

+ 1
- 1
parser/caffe/caffe_custom_parser_adapter.cc View File

@@ -83,7 +83,7 @@ Status CaffeCustomParserAdapter::ParseWeights(const Message *op_src, ge::NodePtr
GE_CHECK_NOTNULL(op);
const LayerParameter *layer = reinterpret_cast<const LayerParameter *>(op_src);

GE_CHK_BOOL_RET_STATUS(nullptr != layer, FAILED, "[Convert][Type]Dynamic cast op_src to LayerParameter failed");
GE_CHK_BOOL_RET_STATUS(layer != nullptr, FAILED, "[Convert][Type]Dynamic cast op_src to LayerParameter failed");
GELOGI("layer: %s blobs_size: %d bottom_size: %d", layer->name().c_str(), layer->blobs_size(), layer->bottom_size());
if (layer->blobs_size() == 0) {
return SUCCESS;


+ 3
- 3
parser/caffe/caffe_op_parser.cc View File

@@ -54,16 +54,16 @@ Status CaffeOpParser::ConvertWeight(const BlobProto &proto, const string &lay_na
ConvertShape(proto, shape_vec);
ge::GeShape shape(shape_vec);
// Calculate the number of data in weight
int count = 1;
int32_t count = 1;
for (size_t i = 0; i < shape.GetDimNum(); ++i) {
int dim = shape.GetDim(i);
int32_t dim = static_cast<int32_t>(shape.GetDim(i));
if (dim <= 0) {
REPORT_INNER_ERROR("E19999", "Convert weight fail, dim:%d of layer:%s <=0, check invalid", dim, lay_name.c_str());
GELOGE(FAILED, "[Check][Size]Convert weight fail, dim:%d of layer:%s <=0, check invalid", dim, lay_name.c_str());
return FAILED;
}

if (dim >= INT64_MAX / count) {
if (dim >= INT32_MAX / count) {
REPORT_INNER_ERROR("E19999", "Convert weight fail, shape:%s of layer:%s will overflow after multi",
shape.ToString().c_str(), lay_name.c_str());
GELOGE(FAILED, "[Check][Size]Convert weight fail, Blob size exceeds INT64_MAX, dim:%d, count:%d, layer name:%s",


+ 46
- 33
parser/caffe/caffe_parser.cc View File

@@ -78,6 +78,10 @@ using std::ifstream;
} \
} while (0)

namespace {
const size_t kMaxErrStrLen = 128U;
} // namespace

namespace ge {
graphStatus aclgrphParseCaffe(const char *model_file, const char *weights_file, ge::Graph &graph) {
ErrorManager::GetInstance().SetStage(error_message::kModelCompile, error_message::kParser);
@@ -247,7 +251,9 @@ Status CheckPathValid(const char *model_path, const string &custom_proto, string
string &custom_proto_name) {
string path_model = ge::parser::RealPath(model_path);
if (path_model.empty()) {
ErrorManager::GetInstance().ATCReportErrMessage("E19000", {"path", "errmsg"}, {model_path, strerror(errno)});
char_t err_buf[kMaxErrStrLen + 1U] = {};
const auto err_msg = mmGetErrorFormatMessage(mmGetErrorCode(), &err_buf[0], kMaxErrStrLen);
ErrorManager::GetInstance().ATCReportErrMessage("E19000", {"path", "errmsg"}, {model_path, err_msg});
GELOGE(FAILED, "[Check][Param]ModelPath %s is Invalid path of model", model_path);
return FAILED;
}
@@ -447,24 +453,30 @@ Status CaffeModelParser::CustomProtoParse(const char *model_path, const string &
Status CaffeModelParser::ReadModelWithoutWarning(const char *model_path, google::protobuf::Message *message) {
int32_t copy_fd = mmDup(STDERR_FILENO);
if (copy_fd < 0) {
REPORT_CALL_ERROR("E19999", "Duplicate to file STDERR_FILENO failed, errmsg:%s", strerror(errno));
GELOGE(FAILED, "[Invoke][Dup] failed:%d, reason:%s", copy_fd, strerror(errno));
char_t err_buf[kMaxErrStrLen + 1U] = {};
const auto err_msg = mmGetErrorFormatMessage(mmGetErrorCode(), &err_buf[0], kMaxErrStrLen);
REPORT_CALL_ERROR("E19999", "Duplicate to file STDERR_FILENO failed, errmsg:%s", err_msg);
GELOGE(FAILED, "[Invoke][Dup] failed:%d, reason:%s", copy_fd, err_msg);
return FAILED;
}

int32_t fd = mmOpen(kDevNull, M_RDWR);
if (fd < 0) {
(void)mmClose(copy_fd);
ErrorManager::GetInstance().ATCReportErrMessage("E19001", {"file", "errmsg"}, {kDevNull, strerror(errno)});
GELOGE(FAILED, "[Open][File] %s failed. reason:%s", kDevNull, strerror(errno));
char_t err_buf[kMaxErrStrLen + 1U] = {};
const auto err_msg = mmGetErrorFormatMessage(mmGetErrorCode(), &err_buf[0], kMaxErrStrLen);
ErrorManager::GetInstance().ATCReportErrMessage("E19001", {"file", "errmsg"}, {kDevNull, err_msg});
GELOGE(FAILED, "[Open][File] %s failed. reason:%s", kDevNull, err_msg);
return FAILED;
}

if (mmDup2(fd, STDERR_FILENO) < 0) {
(void)mmClose(fd);
(void)mmClose(copy_fd);
REPORT_CALL_ERROR("E19999", "Duplicate to file STDERR_FILENO failed, errmsg:%s", strerror(errno));
GELOGE(FAILED, "[Invoke][Dup2] Re-orient failed. reason:%s", strerror(errno));
char_t err_buf[kMaxErrStrLen + 1U] = {};
const auto err_msg = mmGetErrorFormatMessage(mmGetErrorCode(), &err_buf[0], kMaxErrStrLen);
REPORT_CALL_ERROR("E19999", "Duplicate to file STDERR_FILENO failed, errmsg:%s", err_msg);
GELOGE(FAILED, "[Invoke][Dup2] Re-orient failed. reason:%s", err_msg);
return FAILED;
}

@@ -478,8 +490,10 @@ Status CaffeModelParser::ReadModelWithoutWarning(const char *model_path, google:
if (mmDup2(copy_fd, STDERR_FILENO) < 0) {
(void)mmClose(fd);
(void)mmClose(copy_fd);
REPORT_CALL_ERROR("E19999", "Duplicate to file STDERR_FILENO failed, errmsg:%s", strerror(errno));
GELOGE(FAILED, "[Invoke][Dup2] Re-orient failed. reason:%s", strerror(errno));
char_t err_buf[kMaxErrStrLen + 1U] = {};
const auto err_msg = mmGetErrorFormatMessage(mmGetErrorCode(), &err_buf[0], kMaxErrStrLen);
REPORT_CALL_ERROR("E19999", "Duplicate to file STDERR_FILENO failed, errmsg:%s", err_msg);
GELOGE(FAILED, "[Invoke][Dup2] Re-orient failed. reason:%s", err_msg);
return FAILED;
}
(void)mmClose(fd);
@@ -861,8 +875,7 @@ Status CaffeModelParser::AddNode(const domi::caffe::LayerParameter &layer, ge::C
// AddConstInput is a function defined in caffe_op_parser, override in caffe_reshape_parser.
std::shared_ptr<CaffeOpParser> caffe_op_parser = std::static_pointer_cast<CaffeOpParser>(op_parser);
GE_CHECK_NOTNULL(caffe_op_parser);
Status status;
status = caffe_op_parser->AddConstInput(node);
Status status = caffe_op_parser->AddConstInput(node);
if (status != SUCCESS) {
REPORT_CALL_ERROR("E19999", "AddConstInput failed for node:%s", node->GetOpDesc()->GetName().c_str());
GELOGE(FAILED, "[Add][ConstInput] to node %s fail.", node->GetOpDesc()->GetName().c_str());
@@ -1101,14 +1114,14 @@ Status CaffeModelParser::AddUserOutNodesTop() {
}

Status CaffeModelParser::AddOutputTop(const domi::caffe::NetParameter &proto_message) {
for (int32_t i = 0; i < proto_message.layer_size(); i++) {
const domi::caffe::LayerParameter &layer = proto_message.layer(i);
for (int32_t layer_index = 0; layer_index < proto_message.layer_size(); ++layer_index) {
const domi::caffe::LayerParameter &layer = proto_message.layer(layer_index);

if (!CheckValidLayer(layer)) {
continue;
}

for (int i = 0; i < layer.top_size(); i++) {
for (int32_t i = 0; i < layer.top_size(); i++) {
string top = layer.top(i);
string top_origin = top;
// Handling 'inplace' scenarios
@@ -1134,7 +1147,7 @@ Status CaffeModelParser::AddOutputTop(const domi::caffe::NetParameter &proto_mes
GELOGI("output in top_blob: %s", layer.name().c_str());
if (top_node_iter != node_map.end()) {
ge::GetParserContext().out_tensor_names.push_back(top_origin);
ge::GetParserContext().default_out_nodes.push_back(std::make_pair(layer.name(), (int32_t)i));
ge::GetParserContext().default_out_nodes.push_back(std::make_pair(layer.name(), i));
GELOGI("The top of out node [%s] is [%s]", layer.name().c_str(), top_origin.c_str());
}
}
@@ -1261,8 +1274,8 @@ Status CaffeModelParser::ParseFromMemory(const char *data, uint32_t size, ge::Co
std::map<std::string, std::vector<std::string>> layer_params_map;
// same param name set <paramnames,layernames>
// std::map<std::vector<std::string>, std::vector<std::string>> params_share_map;
for (int32_t i = 0; i < layer_count; i++) {
domi::caffe::LayerParameter &layer = const_cast<domi::caffe::LayerParameter &>(proto_message.layer(i));
for (int32_t layer_index = 0; layer_index < layer_count; ++layer_index) {
domi::caffe::LayerParameter &layer = const_cast<domi::caffe::LayerParameter &>(proto_message.layer(layer_index));

GE_CHK_BOOL_EXEC_INFO(CheckValidLayer(layer), continue,
"[Check][Layer]layer phase is train, skip this layer, name:%s, type:%s.",
@@ -1284,7 +1297,7 @@ Status CaffeModelParser::ParseFromMemory(const char *data, uint32_t size, ge::Co
// Times accumulation of duplicate operators
layer_name_map[layer.name()]++;
// Set the name in proto and layer
domi::caffe::LayerParameter *duplicate_name_layer = proto_message.mutable_layer(i);
domi::caffe::LayerParameter *duplicate_name_layer = proto_message.mutable_layer(layer_index);
duplicate_name_layer->set_name(new_name); layer.set_name(new_name);)

// Insert the new operator name, the number of times of duplicate name is recorded as 1
@@ -1300,7 +1313,7 @@ Status CaffeModelParser::ParseFromMemory(const char *data, uint32_t size, ge::Co

// parse ParamSpec
std::vector<string> v_param_names;
for (int i = 0; i < layer.param_size(); i++) {
for (int32_t i = 0; i < layer.param_size(); i++) {
const domi::caffe::ParamSpec &param = layer.param(i);
GE_IF_BOOL_EXEC((param.has_name()), v_param_names.emplace_back(param.name()));
}
@@ -1483,8 +1496,8 @@ Status CaffeModelParser::Parse(const char *model_path, ge::ComputeGraphPtr &grap
// <layername,paramnames>
std::map<std::string, std::vector<std::string>> layer_params_map;
// same param name set <paramnames,layernames>
for (int32_t i = 0; i < layer_count; i++) {
domi::caffe::LayerParameter &layer = const_cast<domi::caffe::LayerParameter &>(proto_message.layer(i));
for (int32_t layer_index = 0; layer_index < layer_count; ++layer_index) {
domi::caffe::LayerParameter &layer = const_cast<domi::caffe::LayerParameter &>(proto_message.layer(layer_index));
SaveOrigionLayerTops(layer);
GE_CHK_BOOL_EXEC_INFO(CheckValidLayer(layer), continue,
"[Check][Layer]layer phase is train, skip this layer, name:%s, type:%s.",
@@ -1503,7 +1516,7 @@ Status CaffeModelParser::Parse(const char *model_path, ge::ComputeGraphPtr &grap
// Times accumulation of duplicate operators
layer_name_map[layer.name()]++;
// Set the name in proto and layer
domi::caffe::LayerParameter *duplicate_name_layer = proto_message.mutable_layer(i);
domi::caffe::LayerParameter *duplicate_name_layer = proto_message.mutable_layer(layer_index);
duplicate_name_layer->set_name(new_name); layer.set_name(new_name);)

// Insert the new operator name, the number of times of duplicate name is recorded as 1
@@ -1519,7 +1532,7 @@ Status CaffeModelParser::Parse(const char *model_path, ge::ComputeGraphPtr &grap

// parse ParamSpec
std::vector<string> v_param_names;
for (int i = 0; i < layer.param_size(); i++) {
for (int32_t i = 0; i < layer.param_size(); i++) {
const domi::caffe::ParamSpec &param = layer.param(i);
GE_IF_BOOL_EXEC((param.has_name()), v_param_names.emplace_back(param.name()));
}
@@ -2099,17 +2112,17 @@ Status CaffeWeightsParser::ConvertLayerParameter(const google::protobuf::Message
ge::ComputeGraphPtr &graph) {
vector<string> need_share_layers;
const domi::caffe::LayerParameter *layer = reinterpret_cast<const domi::caffe::LayerParameter *>(layer_message);
const string &layer_name = layer->name();
const string &shared_layer_name = layer->name();
const string &layer_type = layer->type();
for (auto p_iter = params_share_map.begin(); p_iter != params_share_map.end(); ++p_iter) {
if (find(p_iter->second.begin(), p_iter->second.end(), layer_name) != p_iter->second.end()) {
GELOGI("layer:%s need share weights !", layer_name.c_str());
if (find(p_iter->second.begin(), p_iter->second.end(), shared_layer_name) != p_iter->second.end()) {
GELOGI("layer:%s need share weights !", shared_layer_name.c_str());
need_share_layers = p_iter->second;
}
}

if (need_share_layers.size() == 0) {
need_share_layers.push_back(layer_name);
need_share_layers.push_back(shared_layer_name);
}

for (auto share_iter = need_share_layers.begin(); share_iter != need_share_layers.end(); ++share_iter) {
@@ -2216,27 +2229,27 @@ Status CaffeWeightsParser::ConvertNetParameter(const NetParameter &param, ge::Co

for (int i = 0; i < num_layer; ++i) {
const LayerParameter &layer = param.layer(i);
const string &layer_name = layer.name();
const string &param_layer_name = layer.name();

// Skip some layer types
if (skiped_layer_type_.find(layer.type()) != skiped_layer_type_.end()) {
GELOGI("Skip layer %s", layer_name.c_str());
GELOGI("Skip layer %s", param_layer_name.c_str());
continue;
}

GELOGI("Parse layer %s", layer_name.c_str());
GELOGI("Parse layer %s", param_layer_name.c_str());

vector<string> need_share_layers;

for (auto p_iter = params_share_map.begin(); p_iter != params_share_map.end(); ++p_iter) {
if (find(p_iter->second.begin(), p_iter->second.end(), layer_name) != p_iter->second.end()) {
GELOGI("Layer: %s need share weights !", layer_name.c_str());
if (find(p_iter->second.begin(), p_iter->second.end(), param_layer_name) != p_iter->second.end()) {
GELOGI("Layer: %s need share weights !", param_layer_name.c_str());
need_share_layers = p_iter->second;
}
}

if (need_share_layers.size() == 0) {
need_share_layers.push_back(layer_name);
need_share_layers.push_back(param_layer_name);
}

for (auto share_iter = need_share_layers.begin(); share_iter != need_share_layers.end(); ++share_iter) {


+ 1
- 0
parser/caffe/caffe_parser.h View File

@@ -50,6 +50,7 @@ using std::set;
using std::string;
using std::unordered_map;
using std::vector;
using domi::Status;
static std::map<std::vector<std::string>, std::vector<std::string>> params_share_map;

class PARSER_FUNC_VISIBILITY CaffeModelParser : public domi::ModelParser {


+ 14
- 10
parser/common/acl_graph_parser_util.cc View File

@@ -46,6 +46,7 @@ using google::protobuf::io::ZeroCopyInputStream;
using namespace ge::parser;

namespace {
const size_t kMaxErrStrLen = 128U;
const std::string kGraphDefaultName = "domi_default";
/// The maximum length of the file.
/// Based on the security coding specification and the current actual (protobuf) model size, it is determined as 2G-1
@@ -374,7 +375,7 @@ domi::Status AclGrphParseUtil::ParseAclEnableScope(const string &enable_scope_fu
}

void AclGrphParseUtil::AddAttrsForInputNodes(const vector<string> &adjust_fp16_format_vec,
const string &fp16_nodes_name, uint32_t index, OpDescPtr &op_desc) {
const string &fp16_nodes_name, size_t index, OpDescPtr &op_desc) {
if (AttrUtils::SetStr(op_desc, ATTR_ATC_USER_DEFINE_DATATYPE, TypeUtils::DataTypeToSerialString(DT_FLOAT16))) {
if ((index < adjust_fp16_format_vec.size()) && (adjust_fp16_format_vec[index] == "true")) {
GELOGI("This node [%s] should be set NC1HWC0", fp16_nodes_name.c_str());
@@ -405,7 +406,7 @@ domi::Status AclGrphParseUtil::ParseAclInputFp16Nodes(const ComputeGraphPtr &gra
}
GELOGI("The input_fp16_nodes is set %s", input_fp16_nodes.c_str());
vector<string> input_fp16_nodes_vec = StringUtils::Split(input_fp16_nodes, ';');
for (uint32_t i = 0; i < input_fp16_nodes_vec.size(); ++i) {
for (size_t i = 0; i < input_fp16_nodes_vec.size(); ++i) {
ge::NodePtr node = graph->FindNode(input_fp16_nodes_vec[i]);
if (node == nullptr) {
ErrorManager::GetInstance().ATCReportErrMessage("E10016", {"parameter", "opname"},
@@ -494,12 +495,12 @@ domi::Status AclGrphParseUtil::GetDefaultOutInfo(ge::ComputeGraphPtr &compute_gr
std::vector<std::pair<ge::NodePtr, int32_t>> &output_nodes_info) {
std::vector<std::pair<std::string, int32_t>> default_out_nodes = ge::GetParserContext().default_out_nodes;
if (!default_out_nodes.empty()) {
for (uint32_t i = 0; i < default_out_nodes.size(); ++i) {
for (size_t i = 0; i < default_out_nodes.size(); ++i) {
ge::NodePtr out_node = compute_graph->FindNode(default_out_nodes[i].first);
if (out_node == nullptr) {
ErrorManager::GetInstance().ATCReportErrMessage("E10016", {"parameter", "opname"},
{"out_nodes", default_out_nodes[i].first});
GELOGE(domi::FAILED, "[Check][Param] Can not find out_nodes(%d) (%s) in graph.",
GELOGE(domi::FAILED, "[Check][Param] Can not find out_nodes(%zu) (%s) in graph.",
i, default_out_nodes[i].first.c_str());
return domi::FAILED;
}
@@ -692,16 +693,17 @@ FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY long GetFileLength(const std::s
return -1, "[Check][Param] input_file path is null.");

std::string real_path = RealPath(input_file.c_str());

char_t err_buf[kMaxErrStrLen + 1U] = {};
const auto err_msg = mmGetErrorFormatMessage(mmGetErrorCode(), &err_buf[0], kMaxErrStrLen);
GE_CHK_BOOL_TRUE_EXEC_WITH_LOG(real_path.empty(),
REPORT_INPUT_ERROR("E19000", std::vector<std::string>({"path", "errmsg"}),
std::vector<std::string>({real_path, strerror(errno)}));
std::vector<std::string>({real_path, err_msg}));
return -1, "[Get][Path] input_file path '%s' not valid", input_file.c_str());
unsigned long long file_length = 0;
GE_CHK_BOOL_TRUE_EXEC_WITH_LOG(mmGetFileSize(input_file.c_str(), &file_length) != EN_OK,
ErrorManager::GetInstance().ATCReportErrMessage("E19001", {"file", "errmsg"},
{input_file, strerror(errno)});
return -1, "[Open][File] [%s] failed. %s", input_file.c_str(), strerror(errno));
{input_file, err_msg});
return -1, "[Open][File] [%s] failed. %s", input_file.c_str(), err_msg);

GE_CHK_BOOL_TRUE_EXEC_WITH_LOG((file_length == 0 || file_length > kMaxFileSizeLimit),
REPORT_INPUT_ERROR(
@@ -829,11 +831,13 @@ FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY bool ReadProtoFromText(const ch
"[Check][Param]incorrect parameter. nullptr == file || nullptr == message");

std::string real_path = RealPath(file);
char_t err_buf[kMaxErrStrLen + 1U] = {};
const auto err_msg = mmGetErrorFormatMessage(mmGetErrorCode(), &err_buf[0], kMaxErrStrLen);
GE_CHK_BOOL_TRUE_EXEC_WITH_LOG(real_path.empty(),
ErrorManager::GetInstance().ATCReportErrMessage("E19000", {"path", "errmsg"},
{file, strerror(errno)});
{file, err_msg});
return false, "[Check][Param]Path[%s]'s realpath is empty, errmsg[%s]", file,
strerror(errno));
err_msg);

GE_CHK_BOOL_TRUE_EXEC_WITH_LOG(GetFileLength(real_path) == -1, return false, "[Check][Param] file size not valid.");



+ 2
- 2
parser/common/acl_graph_parser_util.h View File

@@ -57,7 +57,7 @@ class AclGrphParseUtil {
domi::Status ParseAclOutputFp16NodesFormat(const std::string &is_output_fp16);
domi::Status ParseAclEnableScope(const std::string &enable_scope_fusion_passes);
static void AddAttrsForInputNodes(const vector<string> &adjust_fp16_format_vec, const string &fp16_nodes_name,
uint32_t index, OpDescPtr &op_desc);
size_t index, OpDescPtr &op_desc);
domi::Status ParseAclInputFp16Nodes(const ComputeGraphPtr &graph, const string &input_fp16_nodes,
const string &is_input_adjust_hw_layout);
domi::Status GetDefaultOutInfo(ge::ComputeGraphPtr &compute_graph,
@@ -157,7 +157,7 @@ bool ValidateStr(const std::string &filePath, const std::string &mode);
std::string CurrentTimeInStr();

template <typename T, typename... Args>
static inline std::shared_ptr<T> MakeShared(Args &&... args) {
inline std::shared_ptr<T> MakeShared(Args &&... args) {
using T_nc = typename std::remove_const<T>::type;
std::shared_ptr<T> ret(new (std::nothrow) T_nc(std::forward<Args>(args)...));
return ret;


+ 21
- 10
parser/common/model_saver.cc View File

@@ -25,6 +25,7 @@
#include "mmpa/mmpa_api.h"

namespace {
const size_t kMaxErrStrLen = 128U;
const int kFileOpSuccess = 0;
} // namespace

@@ -65,8 +66,10 @@ FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY Status ModelSaver::SaveJsonToFi
mode_t mode = S_IRUSR | S_IWUSR;
int32_t fd = mmOpen2(real_path, O_RDWR | O_CREAT | O_TRUNC, mode);
if (fd == EN_ERROR || fd == EN_INVALID_PARAM) {
ErrorManager::GetInstance().ATCReportErrMessage("E19001", {"file", "errmsg"}, {file_path, strerror(errno)});
GELOGE(FAILED, "[Open][File] [%s] failed. %s", file_path, strerror(errno));
char_t err_buf[kMaxErrStrLen + 1U] = {};
const auto err_msg = mmGetErrorFormatMessage(mmGetErrorCode(), &err_buf[0], kMaxErrStrLen);
ErrorManager::GetInstance().ATCReportErrMessage("E19001", {"file", "errmsg"}, {file_path, err_msg});
GELOGE(FAILED, "[Open][File] [%s] failed. %s", file_path, err_msg);
return FAILED;
}
const char *model_char = model_str.c_str();
@@ -74,16 +77,20 @@ FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY Status ModelSaver::SaveJsonToFi
// Write data to file
mmSsize_t mmpa_ret = mmWrite(fd, const_cast<void *>((const void *)model_char), len);
if (mmpa_ret == EN_ERROR || mmpa_ret == EN_INVALID_PARAM) {
char_t err_buf[kMaxErrStrLen + 1U] = {};
const auto err_msg = mmGetErrorFormatMessage(mmGetErrorCode(), &err_buf[0], kMaxErrStrLen);
ErrorManager::GetInstance().ATCReportErrMessage(
"E19004", {"file", "errmsg"}, {file_path, strerror(errno)});
"E19004", {"file", "errmsg"}, {file_path, err_msg});
// Need to both print the error info of mmWrite and mmClose, so return ret after mmClose
GELOGE(FAILED, "[WriteTo][File] %s failed. errno = %ld, %s", file_path, mmpa_ret, strerror(errno));
GELOGE(FAILED, "[WriteTo][File] %s failed. errno = %ld, %s", file_path, mmpa_ret, err_msg);
ret = FAILED;
}
// Close file
if (mmClose(fd) != EN_OK) {
REPORT_INNER_ERROR("E19999", "close file:%s failed. errmsg:%s", file_path, strerror(errno));
GELOGE(FAILED, "[Close][File] %s failed. errmsg:%s", file_path, strerror(errno));
char_t err_buf[kMaxErrStrLen + 1U] = {};
const auto err_msg = mmGetErrorFormatMessage(mmGetErrorCode(), &err_buf[0], kMaxErrStrLen);
REPORT_INNER_ERROR("E19999", "close file:%s failed. errmsg:%s", file_path, err_msg);
GELOGE(FAILED, "[Close][File] %s failed. errmsg:%s", file_path, err_msg);
ret = FAILED;
}
return ret;
@@ -137,11 +144,13 @@ FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY int ModelSaver::CreateDirectory
int32_t ret = mmMkdir(tmp_dir_path, S_IRUSR | S_IWUSR | S_IXUSR); // 700
if (ret != 0) {
if (errno != EEXIST) {
char_t err_buf[kMaxErrStrLen + 1U] = {};
const auto err_msg = mmGetErrorFormatMessage(mmGetErrorCode(), &err_buf[0], kMaxErrStrLen);
REPORT_CALL_ERROR("E19999",
"Can not create directory %s. Make sure the directory exists and writable. errmsg:%s",
directory_path.c_str(), strerror(errno));
directory_path.c_str(), err_msg);
GELOGW("Can not create directory %s. Make sure the directory exists and writable. errmsg:%s",
directory_path.c_str(), strerror(errno));
directory_path.c_str(), err_msg);
return ret;
}
}
@@ -151,11 +160,13 @@ FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY int ModelSaver::CreateDirectory
int32_t ret = mmMkdir(const_cast<char *>(directory_path.c_str()), S_IRUSR | S_IWUSR | S_IXUSR); // 700
if (ret != 0) {
if (errno != EEXIST) {
char_t err_buf[kMaxErrStrLen + 1U] = {};
const auto err_msg = mmGetErrorFormatMessage(mmGetErrorCode(), &err_buf[0], kMaxErrStrLen);
REPORT_CALL_ERROR("E19999",
"Can not create directory %s. Make sure the directory exists and writable. errmsg:%s",
directory_path.c_str(), strerror(errno));
directory_path.c_str(), err_msg);
GELOGW("Can not create directory %s. Make sure the directory exists and writable. errmsg:%s",
directory_path.c_str(), strerror(errno));
directory_path.c_str(), err_msg);
return ret;
}
}


+ 0
- 5
parser/common/op_def/arg_op.cc View File

@@ -23,11 +23,6 @@ FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY ArgOpOperator::ArgOpOperator()

ArgOpOperator::~ArgOpOperator() {}

FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY ArgOpOperator &ArgOpOperator::Name(const std::string &name) {
(void)ParserOperator::Name(name);
return *this;
}

FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY ArgOpOperator &ArgOpOperator::Index(int64_t index) {
Attr("index", static_cast<int64_t>(index));



+ 0
- 2
parser/common/op_def/arg_op.h View File

@@ -25,8 +25,6 @@ class ArgOpOperator : public ParserOperator {

~ArgOpOperator() override;

ArgOpOperator &Name(const std::string &name);

ArgOpOperator &Index(int64_t index);

int64_t GetIndex() const;


+ 0
- 5
parser/common/op_def/constant_op.cc View File

@@ -25,11 +25,6 @@ FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY ConstantOperator::ConstantOpera

FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY ConstantOperator::~ConstantOperator() {}

FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY ConstantOperator &ConstantOperator::Name(const std::string &name) {
ParserOperator::Name(name);
return *this;
}

FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY ConstantOperator &ConstantOperator::VectorAttr(
std::string key, std::vector<int64_t> &value) {
Attr(key, value);


+ 0
- 1
parser/common/op_def/constant_op.h View File

@@ -26,7 +26,6 @@ class ConstantOperator : public ParserOperator {
ConstantOperator();
~ConstantOperator() override;

ConstantOperator &Name(const std::string &name);
ConstantOperator &VectorAttr(std::string key, std::vector<int64_t> &value);

ConstantOperator &DType(ge::DataType t);


+ 0
- 6
parser/common/op_def/frameworkop_op.cc View File

@@ -24,12 +24,6 @@ FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY FrameworkOpOperator::FrameworkO

FrameworkOpOperator::~FrameworkOpOperator() {}

FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY FrameworkOpOperator &FrameworkOpOperator::Name(
const std::string &name) {
ParserOperator::Name(name);
return *this;
}

FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY FrameworkOpOperator &FrameworkOpOperator::Index(int64_t index) {
Attr(RETVAL_ATTR_NAME_INDEX, static_cast<int64_t>(index));
return *this;


+ 0
- 2
parser/common/op_def/frameworkop_op.h View File

@@ -26,8 +26,6 @@ class FrameworkOpOperator : public ParserOperator {

~FrameworkOpOperator() override;

FrameworkOpOperator &Name(const std::string &name);

FrameworkOpOperator &OriginalType(const std::string &type);

FrameworkOpOperator &NodeDefPkg(const std::string &nodedef_pkg);


+ 0
- 5
parser/common/op_def/no_op_op.cc View File

@@ -22,9 +22,4 @@ namespace ge {
FMK_FUNC_HOST_VISIBILITY NoOpOperator::NoOpOperator() : ParserOperator("NoOp") {}

FMK_FUNC_HOST_VISIBILITY NoOpOperator::~NoOpOperator() {}

FMK_FUNC_HOST_VISIBILITY NoOpOperator &NoOpOperator::Name(const std::string &name) {
ParserOperator::Name(name);
return *this;
}
} // namespace ge

+ 0
- 2
parser/common/op_def/no_op_op.h View File

@@ -25,8 +25,6 @@ class NoOpOperator : public ParserOperator {
public:
NoOpOperator();
~NoOpOperator() override;

NoOpOperator &Name(const std::string &name);
};
} // namespace ge



+ 1
- 1
parser/common/op_def/operator.h View File

@@ -45,7 +45,7 @@ class FMK_FUNC_HOST_VISIBILITY ParserOperator {
ParserOperator &AttrVector(std::string key, std::vector<int32_t> &value);
ParserOperator &AttrVector(std::string key, std::vector<int64_t> &value);

ParserOperator &Name(const std::string &name);
virtual ParserOperator &Name(const std::string &name);

ParserOperator &Type(const std::string &type);



+ 0
- 5
parser/common/op_def/ref_switch_op.cc View File

@@ -22,11 +22,6 @@ FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY RefSwitchOperator::RefSwitchOpe

FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY RefSwitchOperator::~RefSwitchOperator() {}

FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY RefSwitchOperator &RefSwitchOperator::Name(const std::string &name) {
ParserOperator::Name(name);
return *this;
}

FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY RefSwitchOperator &RefSwitchOperator::T(ge::DataType t) {
Attr("T", (int64_t)t);
return *this;


+ 0
- 2
parser/common/op_def/ref_switch_op.h View File

@@ -25,8 +25,6 @@ class RefSwitchOperator : public ParserOperator {
public:
RefSwitchOperator();
~RefSwitchOperator() override;

RefSwitchOperator &Name(const std::string &name);
RefSwitchOperator &T(ge::DataType t);
};
} // namespace ge


+ 0
- 5
parser/common/op_def/shape_n_op.cc View File

@@ -24,11 +24,6 @@ FMK_FUNC_HOST_VISIBILITY ShapeNOperator::ShapeNOperator() : ParserOperator("Shap

FMK_FUNC_HOST_VISIBILITY ShapeNOperator::~ShapeNOperator() {}

FMK_FUNC_HOST_VISIBILITY ShapeNOperator &ShapeNOperator::Name(const std::string &name) {
ParserOperator::Name(name);
return *this;
}

FMK_FUNC_HOST_VISIBILITY ShapeNOperator &ShapeNOperator::N(int64_t n) {
Attr(SHAPEN_ATTR_N, n);
return *this;


+ 0
- 2
parser/common/op_def/shape_n_op.h View File

@@ -26,8 +26,6 @@ class ShapeNOperator : public ParserOperator {
ShapeNOperator();
~ShapeNOperator() override;

ShapeNOperator &Name(const std::string &name);

ShapeNOperator &N(int64_t n);
int64_t GetN() const;
ShapeNOperator &InType(ge::DataType t);


+ 0
- 5
parser/common/op_def/var_is_initialized_op_op.cc View File

@@ -24,11 +24,6 @@ VarIsInitializedOpOperator::VarIsInitializedOpOperator() : ParserOperator(ge::pa

VarIsInitializedOpOperator::~VarIsInitializedOpOperator() {}

VarIsInitializedOpOperator &VarIsInitializedOpOperator::Name(const std::string &name) {
ParserOperator::Name(name);
return *this;
}

VarIsInitializedOpOperator &VarIsInitializedOpOperator::VectorAttr(const std::string &key,
std::vector<int64_t> &value) {
Attr(key, value);


+ 0
- 1
parser/common/op_def/var_is_initialized_op_op.h View File

@@ -26,7 +26,6 @@ class VarIsInitializedOpOperator : public ParserOperator {
VarIsInitializedOpOperator();
~VarIsInitializedOpOperator() override;

VarIsInitializedOpOperator &Name(const std::string &name);
VarIsInitializedOpOperator &VectorAttr(const std::string &key, std::vector<int64_t> &value);
};
} // namespace ge


+ 0
- 5
parser/common/op_def/variable_op.cc View File

@@ -23,11 +23,6 @@ VariableOperator::VariableOperator() : ParserOperator(ge::parser::VARIABLE) {}

VariableOperator::~VariableOperator() {}

VariableOperator &VariableOperator::Name(const std::string &name) {
ParserOperator::Name(name);
return *this;
}

VariableOperator &VariableOperator::Container(const std::string &container) {
Attr(VAR_ATTR_CONTAINER, container);
return *this;


+ 0
- 2
parser/common/op_def/variable_op.h View File

@@ -27,8 +27,6 @@ class VariableOperator : public ParserOperator {
VariableOperator();
~VariableOperator() override;

VariableOperator &Name(const std::string &name);

VariableOperator &Container(const std::string &container);

VariableOperator &SharedName(const std::string &sharedname);


+ 2
- 4
parser/common/parser_fp16_t.cc View File

@@ -675,8 +675,7 @@ static uint16_t Fp16Div(uint16_t v_1, uint16_t v_2) {
uint64_t m_tmp;
if (e_a > e_b) {
m_tmp = m_a;
uint16_t tmp;
tmp = e_a - e_b;
uint16_t tmp = e_a - e_b;
for (int i = 0; i < tmp; i++) {
m_tmp = m_tmp << 1;
}
@@ -690,8 +689,7 @@ static uint16_t Fp16Div(uint16_t v_1, uint16_t v_2) {
m_b = m_tmp;
}
m_div = static_cast<float>(m_a * 1.0f / m_b);
fp16_t fp_div;
fp_div = m_div;
fp16_t fp_div = m_div;
ret = fp_div.val;
if (s_a != s_b) {
ret |= kFp16SignMask;


+ 1
- 2
parser/common/parser_utils.cc View File

@@ -212,8 +212,7 @@ Status ParserUtils::HandleInputContext(const NodePtr &node,
// add control edge
if (node->GetInControlAnchor() != nullptr) {
for (const auto &out_anchor : node->GetInControlAnchor()->GetPeerAnchors()) {
graphStatus ret = GraphUtils::AddEdge(out_anchor, peer_in_anchor->GetOwnerNode()->GetInControlAnchor());
if (ret != GRAPH_SUCCESS) {
if (GraphUtils::AddEdge(out_anchor, peer_in_anchor->GetOwnerNode()->GetInControlAnchor()) != GRAPH_SUCCESS) {
REPORT_CALL_ERROR("E19999", "add control edge from %s to %s failed.",
out_anchor->GetOwnerNode()->GetName().c_str(),
peer_in_anchor->GetOwnerNode()->GetName().c_str());


+ 1
- 4
parser/common/tbe_plugin_loader.cc View File

@@ -30,6 +30,7 @@
#include <memory>
#include <string>

#include "external/ge/ge_api_types.h"
#include "common/util/error_manager/error_manager.h"
#include "framework/common/debug/ge_log.h"
#include "framework/common/string_util.h"
@@ -40,10 +41,6 @@
namespace ge {
std::map<string, string> TBEPluginLoader::options_ = {};

namespace {
const std::string FRAMEWORK_TYPE = "ge.frameworkType";
}

// Get Singleton Instance
FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY TBEPluginLoader &TBEPluginLoader::Instance() {
static TBEPluginLoader instance_ptr_;


+ 3
- 1
parser/onnx/onnx_custom_parser_adapter.cc View File

@@ -20,6 +20,8 @@
#include "framework/common/debug/ge_log.h"
#include "parser/common/op_parser_factory.h"
#include "register/op_registry.h"
#include "parser/common/parser_utils.h"
#include "graph/def_types.h"

using domi::ONNX;
using domi::ParseParamByOpFunc;
@@ -28,7 +30,7 @@ using domi::ParseParamFunc;
namespace ge {
Status OnnxCustomParserAdapter::ParseParams(const Message *op_src, ge::Operator &op_dest) {
GE_CHECK_NOTNULL(op_src);
const ge::onnx::NodeProto *node_src = reinterpret_cast<const ge::onnx::NodeProto *>(op_src);
const ge::onnx::NodeProto *node_src = PtrToPtr<const Message, const ge::onnx::NodeProto>(op_src);
GE_CHECK_NOTNULL(node_src);
GELOGI("Onnx op node name = %s, op type= %s, parse params.", node_src->name().c_str(), node_src->op_type().c_str());



+ 3
- 2
parser/onnx/onnx_data_parser.cc View File

@@ -18,6 +18,7 @@
#include <unordered_map>
#include "common/util.h"
#include "graph/debug/ge_attr_define.h"
#include "graph/def_types.h"
#include "parser/common/op_parser_factory.h"
#include "framework/omg/parser/parser_inner_ctx.h"
#include "parser/onnx/onnx_util.h"
@@ -28,7 +29,7 @@ using namespace ge::parser;
namespace ge {
Status OnnxDataParser::ParseParams(const Message *op_src, ge::Operator &op_def) {
GE_CHECK_NOTNULL(op_src);
const ge::onnx::NodeProto *node_src = reinterpret_cast<const ge::onnx::NodeProto *>(op_src);
const ge::onnx::NodeProto *node_src = PtrToPtr<const Message, const ge::onnx::NodeProto>(op_src);
GE_CHECK_NOTNULL(node_src);
GELOGD("Onnx op node name = %s, op type= %s, parse params", node_src->name().c_str(), node_src->op_type().c_str());
if (ParseInputFromModel(op_src, op_def) != SUCCESS) {
@@ -72,7 +73,7 @@ int64_t OnnxDataParser::ParseInputTensor(const ge::onnx::AttributeProto &attribu

Status OnnxDataParser::ParseInputFromModel(const Message *op_src, ge::Operator &op_def) {
GE_CHECK_NOTNULL(op_src);
const ge::onnx::NodeProto *node = reinterpret_cast<const ge::onnx::NodeProto *>(op_src);
const ge::onnx::NodeProto *node = PtrToPtr<const Message, const ge::onnx::NodeProto>(op_src);
GE_CHECK_NOTNULL(node);

// Get attr t:'input_tensor' form NodeProto


+ 2
- 2
parser/onnx/onnx_parser.cc View File

@@ -767,8 +767,8 @@ Status OnnxModelParser::AdaptAndFindAllOnnxGraph(ge::onnx::GraphProto &root_onnx
return FAILED;
}

for (const auto &onnx_graph : onnx_graphs) {
onnx_graph_tasks.push(onnx_graph);
for (const auto &sub_onnx_graph : onnx_graphs) {
onnx_graph_tasks.push(sub_onnx_graph);
}
for (const auto &itr : name_to_onnx_subgraph) {
name_to_onnx_graph.emplace(itr.first, itr.second);


+ 3
- 0
parser/onnx/subgraph_adapter/subgraph_adapter.h View File

@@ -50,6 +50,9 @@ class PARSER_FUNC_VISIBILITY SubgraphAdapter {
virtual domi::Status AdaptAndFindAllSubgraphs(ge::onnx::NodeProto *parent_op,
std::vector<ge::onnx::GraphProto *> &onnx_graphs,
std::map<std::string, ge::onnx::GraphProto *> &name_to_onnx_graph) {
(void)parent_op;
(void)onnx_graphs;
(void)name_to_onnx_graph;
return domi::SUCCESS;
}
};


+ 1
- 0
parser/onnx/subgraph_adapter/subgraph_adapter_factory.h View File

@@ -32,6 +32,7 @@
#endif

#include <map>
#include <memory>
#include <functional>
#include "subgraph_adapter.h"



+ 2
- 1
parser/tensorflow/tensorflow_auto_mapping_parser_adapter.cc View File

@@ -19,6 +19,7 @@
#include "framework/omg/parser/parser_types.h"
#include "common/util.h"
#include "framework/common/debug/ge_log.h"
#include "graph/def_types.h"
#include "parser/common/op_parser_factory.h"
#include "register/op_registry.h"
#include "register/register.h"
@@ -42,7 +43,7 @@ Status TensorFlowAutoMappingParserAdapter::ParseParams(const Message *op_src, ge
GELOGE(PARAM_INVALID, "Op src is null");
return PARAM_INVALID;
}
const NodeDef *node = reinterpret_cast<const NodeDef *>(op_src);
const NodeDef *node = PtrToPtr<const Message, const NodeDef>(op_src);
GELOGD("TF op node name = %s, op type= %s, parse params", node->name().c_str(), node->op().c_str());
if (op_dest == nullptr) {
REPORT_INNER_ERROR("E19999", "Param op_dest is nullptr, check invalid");


+ 1
- 1
parser/tensorflow/tensorflow_enter_parser.cc View File

@@ -31,7 +31,7 @@ Status TensorFlowEnterParser::ParseParams(const Message *op_src, ge::OpDescPtr &
GE_CHECK_NOTNULL(op_desc);
const std::string name = op_desc->GetName();

const NodeDef *node = reinterpret_cast<const NodeDef *>(op_src);
const NodeDef *node = PtrToPtr<const Message, const NodeDef>(op_src);
domi::tensorflow::AttrValue str_attr;
if (!TensorFlowUtil::FindAttrValue(node, ENTER_ATTR_FRAME_NAME, str_attr)) {
REPORT_CALL_ERROR("E19999", "In NodeDef:%s attr:%s not exist, check invalid",


+ 2
- 1
parser/tensorflow/tensorflow_merge_parser.cc View File

@@ -21,6 +21,7 @@
#include "graph/debug/ge_attr_define.h"
#include "parser/common/op_parser_factory.h"
#include "framework/omg/parser/parser_types.h"
#include "graph/def_types.h"

using domi::TENSORFLOW;
using ge::parser::MERGE;
@@ -30,7 +31,7 @@ Status TensorFlowMergeParser::ParseParams(const Message *op_src, ge::OpDescPtr &
GE_CHECK_NOTNULL(op_src);
GE_CHECK_NOTNULL(op_desc);

const NodeDef *node = reinterpret_cast<const NodeDef *>(op_src);
const NodeDef *node = PtrToPtr<const Message, const NodeDef>(op_src);
domi::tensorflow::AttrValue attr_num;
if (!(TensorFlowUtil::FindAttrValue(node, ATTR_NAME_N, attr_num))) {
GELOGW("In NodeDef %s dynamic attr [%s] is not exist.", op_desc->GetName().c_str(), ATTR_NAME_N.c_str());


+ 24
- 24
parser/tensorflow/tensorflow_parser.cc View File

@@ -1515,7 +1515,7 @@ Status TensorFlowModelParser::ParseAllGraph(const google::protobuf::Message *pro
if (tensorflow_op_map.find(node_op) == tensorflow_op_map.end()) {
GELOGW("%s not found in tensorflow_op_map.", node_op.c_str());
}
Status ret = AddNode(node_def, graph, scope_graph);
ret = AddNode(node_def, graph, scope_graph);
if (ret != SUCCESS) {
GELOGE(ret, "Add op[%s] failed", node_def->name().c_str());
DeleteFuisonNodeDef();
@@ -1675,7 +1675,6 @@ Status TensorFlowModelParser::CheckInputNodeName(const string &input_node_name,
}
}

int32_t tmp_index = 0;
auto find = tmp_input_node_name.find(":");
if (find == string::npos) {
*node_name = tmp_input_node_name;
@@ -1683,7 +1682,7 @@ Status TensorFlowModelParser::CheckInputNodeName(const string &input_node_name,
if (index == nullptr) {
return SUCCESS;
}
*index = tmp_index;
*index = 0;

return SUCCESS;
}
@@ -1818,17 +1817,20 @@ Status TensorFlowModelParser::GetInPutIndex(shared_ptr<ge::ScopeGraph> &scope_gr
auto &impl = scope_graph->impl_;
return impl->GetInputOrOutputIndex(info, old_index, true, new_index);
}
return SUCCESS;
GELOGE(INTERNAL_ERROR, "Fusion op should come from scope fusion pass, node name:%s, fusion node name:%s",
info.node_name.c_str(), info.fusion_node_name.c_str());
return INTERNAL_ERROR;
}
Status TensorFlowModelParser::GetOutPutIndex(shared_ptr<ge::ScopeGraph> &scope_graph, const ge::ScopeFusionOpInfo &info,
const int32_t old_index, int32_t &new_index) {
GE_CHECK_NOTNULL(scope_graph);
Status ret;
if (info.scope_pass) {
auto &impl = scope_graph->impl_;
ret = impl->GetInputOrOutputIndex(info, old_index, false, new_index);
return impl->GetInputOrOutputIndex(info, old_index, false, new_index);
}
return ret;
GELOGE(INTERNAL_ERROR, "Fusion op should come from scope fusion pass, node name:%s, fusion node name:%s",
info.node_name.c_str(), info.fusion_node_name.c_str());
return INTERNAL_ERROR;
}

bool TensorFlowModelParser::ConstOpNeedUpdate(const string &op_name) {
@@ -2007,15 +2009,13 @@ Status TensorFlowModelParser::EraseNormalOpOutputIfChild(shared_ptr<ge::ScopeGra
for (auto iter = normal_op_node_context.output_map.begin(); iter != normal_op_node_context.output_map.end();) {
string output_node_name = iter->first;
ge::ScopeFusionOpInfo to_info;
int32_t from_index = 0;
int32_t to_index = 0;

if (IsFusionOpChild(output_node_name, &to_info) &&
nodedef_map_[output_node_name]->op() != TENSORFLOWF_NODE_OP_CONST) {
// Fuse operator, update index
std::vector<std::pair<int32_t, int32_t>> &pairs = iter->second;
int32_t to_index = 0;
for (auto &pair : pairs) {
from_index = pair.first;
int32_t from_index = pair.first;
GE_RETURN_WITH_LOG_IF_ERROR(GetInPutIndex(scope_graph, to_info, pair.second, to_index),
"GetInPutIndex failed ,output_node_name %s.", output_node_name.c_str());
tmp_output_map[to_info.fusion_node_name].push_back({from_index, to_index});
@@ -2044,15 +2044,13 @@ Status TensorFlowModelParser::UpdateNormalOpContext(shared_ptr<ge::ScopeGraph> &
for (auto iter = normal_op_node_context.input_map.begin(); iter != normal_op_node_context.input_map.end();) {
string input_node_name = iter->first;
ge::ScopeFusionOpInfo from_info;
int32_t from_index = 0;
int32_t to_index = 0;

if (IsFusionOpChild(input_node_name, &from_info) &&
nodedef_map_[input_node_name]->op() != TENSORFLOWF_NODE_OP_CONST) {
// Fuse operator, update index
std::vector<std::pair<int32_t, int32_t>> &pairs = iter->second;
int32_t from_index = 0;
for (auto &pair : pairs) {
to_index = pair.second;
int32_t to_index = pair.second;
GE_RETURN_WITH_LOG_IF_ERROR(GetOutPutIndex(scope_graph, from_info, pair.first, from_index),
"GetOutPutIndex failed ,input_node_name %s.", input_node_name.c_str());
tmp_input_map[from_info.fusion_node_name].push_back({from_index, to_index});
@@ -2274,7 +2272,7 @@ Status TensorFlowModelParser::ParseProto(const google::protobuf::Message *proto,
}

// Do not exit immediately when there is an error, wait until all errors are collected before exiting
Status ret = AddFmkNodeDefToMap(node_def, op_node_name_list);
ret = AddFmkNodeDefToMap(node_def, op_node_name_list);
GE_CHK_STATUS_EXEC(ret, return PARAM_INVALID, "add node_def to map failed");
}
PARSER_TIMESTAMP_END(AddFmkNodeDefToMap, "TensorFlowModelParser::AddFmkNodeDefToMap");
@@ -2563,6 +2561,7 @@ Status TensorFlowModelParser::OptimizeSnapShot(domi::tensorflow::NodeDef *curr_m
domi::tensorflow::NodeDef *output_node_def = nodedef_map[output_node_name];
GE_CHECK_NOTNULL(output_node_def);
auto inputs = output_node_def->mutable_input();
std::vector<std::string> added_inputs;
for (auto &input : *inputs) {
string node_name;
bool is_control = false;
@@ -2596,12 +2595,15 @@ Status TensorFlowModelParser::OptimizeSnapShot(domi::tensorflow::NodeDef *curr_m
}
}
if (!is_exist_input) {
output_node_def->add_input("^" + item);
GELOGD("Optimize Snapshot node, dest:%s, set control input:%s.", output_node_name.c_str(), item.c_str());
added_inputs.push_back("^" + item);
}
}
}
}
for (std::string added_input : added_inputs) {
GELOGD("Optimize Snapshot node, dest:%s, set control input:%s.", output_node_name.c_str(), added_input.c_str());
output_node_def->add_input(added_input);
}
}
// Clear the input of snapshot and become an isolated node
curr_mode_def->clear_input();
@@ -3128,8 +3130,7 @@ Status TensorFlowModelParser::TrimGraphByInput(const domi::tensorflow::GraphDef
output_graph_def->Clear();
for (const NodeDef &node : filtered_graph_def.node()) {
if (input_nodes.count(node.name())) {
NodeDef placeholder_node;
placeholder_node = node;
NodeDef placeholder_node = node;
placeholder_node.clear_input();
GE_IF_BOOL_EXEC(node.op() != "Placeholder", placeholder_node.set_op("Placeholder"));
domi::tensorflow::AttrValue attr_value;
@@ -3202,8 +3203,7 @@ Status TensorFlowModelParser::TrimGraphByOutput(const domi::tensorflow::GraphDef
output_graph_def->Clear();
for (const NodeDef &node : filtered_graph_def.node()) {
if (input_nodes.count(node.name())) {
NodeDef placeholder_node;
placeholder_node = node;
NodeDef placeholder_node = node;
placeholder_node.clear_input();
GE_IF_BOOL_EXEC(node.op() != "Placeholder", placeholder_node.set_op("Placeholder"));
domi::tensorflow::AttrValue attr_value;
@@ -3725,8 +3725,8 @@ void TensorFlowModelParser::UpdateInnerInputMap(const string &fusion_op_name, Op
std::map<std::string, std::vector<std::pair<int32_t, int32_t>>> tmp_input_map;
for (auto iter = op_node_context.input_map.begin(); iter != op_node_context.input_map.end();) {
string src_name = iter->first;
std::vector<std::pair<int32_t, int32_t>> &input_idx = iter->second;
if (src_name == ge::kInputFromFusionScope) {
std::vector<std::pair<int32_t, int32_t>> &input_idx = iter->second;
for (const auto &in_pair : input_idx) {
if (in_pair.second != kControlSlot) {
auto data = remap_data_input[fusion_op_name + std::to_string(in_pair.first)];
@@ -3772,8 +3772,8 @@ void TensorFlowModelParser::UpdateInnerOutputMap(const string &fusion_op_name, O
std::map<std::string, std::vector<std::pair<int32_t, int32_t>>> tmp_output_map;
for (auto iter = op_node_context.output_map.begin(); iter != op_node_context.output_map.end();) {
string dst_name = iter->first;
std::vector<std::pair<int32_t, int32_t>> &output_idx = iter->second;
if (dst_name == ge::kOutputToFusionScope) {
std::vector<std::pair<int32_t, int32_t>> &output_idx = iter->second;
for (const auto &out_pair : output_idx) {
if (out_pair.second != kControlSlot) {
auto data_outputs = remap_data_output[fusion_op_name + std::to_string(out_pair.second)];


+ 2
- 4
parser/tensorflow/tensorflow_reshape_parser.cc View File

@@ -33,9 +33,6 @@ Status TensorFlowReshapeParser::ParseDesc(const domi::tensorflow::AttrValue &att
GE_CHK_BOOL_RET_STATUS(TensorFlowUtil::ParseFromAttrValueList(ge_desc, a_list, 0, tf_datatype), PARAM_INVALID,
"parse ge_desc failed.");
uint32_t size_type = 1;
int64_t real_size = 1;
int64_t tmp_dim = 0;

auto data_type = ge_desc.GetDataType();
bool type_ret = ge::TypeUtils::GetDataTypeLength(data_type, size_type);
GE_IF_BOOL_EXEC(!type_ret,
@@ -45,8 +42,9 @@ Status TensorFlowReshapeParser::ParseDesc(const domi::tensorflow::AttrValue &att
ge::TypeUtils::DataTypeToSerialString(data_type).c_str());
return PARAM_INVALID);
// calculate size
int64_t real_size = 1;
for (uint32_t j = 0; j < ge_desc.GetShape().GetDimNum(); ++j) {
tmp_dim = ge_desc.GetShape().GetDim(j);
int64_t tmp_dim = ge_desc.GetShape().GetDim(j);
GE_IF_BOOL_EXEC(tmp_dim < 0, real_size = tmp_dim * (-1) * real_size; continue;);
real_size *= tmp_dim;
}


+ 4
- 0
parser/tensorflow/tensorflow_shape_n_parser.cc View File

@@ -155,10 +155,14 @@ Status TensorFlowShapeNParser::ParseParams(const Message *op_src, ge::OpDescPtr

// AUTO GEN PLEASE DO NOT MODIFY IT
Status TensorFlowShapeNParser::PreParseParams(const domi::tensorflow::NodeDef *node, ShapeNOperator *op) {
(void)node;
(void)op;
return SUCCESS;
}

Status TensorFlowShapeNParser::PostParseParams(const domi::tensorflow::NodeDef *node, ShapeNOperator *op) {
(void)node;
(void)op;
return SUCCESS;
}



+ 2
- 4
parser/tensorflow/tensorflow_squeeze_parser.cc View File

@@ -37,9 +37,6 @@ Status TensorFlowSqueezeParser::ParseDesc(const domi::tensorflow::AttrValue &att
GE_CHK_BOOL_RET_STATUS(TensorFlowUtil::ParseFromAttrValueList(ge_desc, a_list, 0, tf_datatype), domi::PARAM_INVALID,
"parse ge_desc failed.");
uint32_t size_type;
int64_t real_size = 1;
int64_t tmp_dim = 0;

auto data_type = ge_desc.GetDataType();
bool type_ret = ge::TypeUtils::GetDataTypeLength(data_type, size_type);
GE_IF_BOOL_EXEC(!type_ret,
@@ -49,8 +46,9 @@ Status TensorFlowSqueezeParser::ParseDesc(const domi::tensorflow::AttrValue &att
ge::TypeUtils::DataTypeToSerialString(data_type).c_str());
return domi::PARAM_INVALID);
// calculate size
int64_t real_size = 1;
for (uint32_t j = 0; j < ge_desc.GetShape().GetDimNum(); ++j) {
tmp_dim = ge_desc.GetShape().GetDim(j);
int64_t tmp_dim = ge_desc.GetShape().GetDim(j);
GE_IF_BOOL_EXEC(tmp_dim < 0, real_size = tmp_dim * (-1) * real_size; continue;);
PARSER_INT64_MULCHECK(real_size, tmp_dim);
real_size *= tmp_dim;


+ 1
- 2
parser/tensorflow/tensorflow_util.cc View File

@@ -267,13 +267,12 @@ FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY domi::Status TensorFlowUtil::Tr
GE_CHK_BOOL_RET_STATUS(ParseFromAttrValueList(ge_desc, a_list, i, tf_datatype), PARAM_INVALID,
"parse ge_desc failed.");
uint32_t size_type = 1;
int64_t tmp_dim = 0;
auto data_type = ge_desc.GetDataType();
GE_CHK_BOOL_RET_STATUS(ge::TypeUtils::GetDataTypeLength(data_type, size_type), PARAM_INVALID,
"dataType no define size , parse ge_desc failed.");
// get size
for (uint32_t j = 0; j < ge_desc.GetShape().GetDimNum(); ++j) {
tmp_dim = ge_desc.GetShape().GetDim(j);
int64_t tmp_dim = ge_desc.GetShape().GetDim(j);

// The shape infered by fusedbatchnormgrad and mean calling tensorflow is not accurate.
// Here, special treatment is given to the two operators.


+ 1
- 1
tests/depends/error_manager/src/error_manager_stub.cc View File

@@ -98,7 +98,7 @@ void ErrorManager::SetStage(const std::string &first_stage, const std::string &s
}

struct error_message::Context &ErrorManager::GetErrorManagerContext() {
struct error_message::Context error_context;
static struct error_message::Context error_context;
return error_context;
}



BIN
tests/st/testcase/origin_models/test_snapshot.pb View File


+ 18
- 0
tests/st/testcase/test_tensorflow_parser.cc View File

@@ -2387,5 +2387,23 @@ TEST_F(STestTensorflowParser, tensorflow_GraphDefOptimizeIdentity_test)
Status ret = tensorflow_parser.GraphDefOptimizeIdentity(&graph_def, nodedef_map, nodedef_to_optimize);
EXPECT_EQ(ret, ge::PARAM_INVALID);
}
TEST_F(STestTensorflowParser, tensorflow_optimizer_snapshot_no_retval_test) {
std::string caseDir = __FILE__;
std::size_t idx = caseDir.find_last_of("/");
caseDir = caseDir.substr(0, idx);
const std::string root_proto = caseDir + "/origin_models/test_snapshot.pb";
domi::tensorflow::GraphDef graphDef;

bool protoRet =
parser::ReadProtoFromBinaryFile(root_proto.c_str(), &graphDef);
ASSERT_EQ(protoRet, true);

TensorFlowModelParser tensorflow_parser;
ge::ComputeGraphPtr root_graph =
ge::parser::MakeShared<ge::ComputeGraph>("tmp_graph");
Status ret = tensorflow_parser.ParseProto(
reinterpret_cast<google::protobuf::Message *>(&graphDef), root_graph);
EXPECT_EQ(FAILED, ret);
}

} // namespace ge

+ 29
- 0
tests/ut/parser/testcase/tensorflow_parser_testcase/tensorflow_parser_unittest.cc View File

@@ -188,4 +188,33 @@ TEST_F(UtestTensorflowParser, tensorflow_parser_with_external_graph) {
ret = TensorFlowModelParser::AddExternalGraph(root_graph);
EXPECT_EQ(ret, INTERNAL_ERROR);
}

TEST_F(UtestTensorflowParser, optimize_snapshot) {
domi::tensorflow::GraphDef graph_def;

auto mul_node = graph_def.add_node();
mul_node->set_name("optimizer/Mul");
mul_node->set_op("Mul");
mul_node->add_input("Snapshot:0");

auto snapshot_node = graph_def.add_node();
snapshot_node->set_name("Snapshot");
snapshot_node->set_op("Snapshot");
snapshot_node->add_input("loss_scale/read:0");
snapshot_node->add_input("^ShuffleNet/AssignMovingAvg");

auto identity_node = graph_def.add_node();
identity_node->set_name("loss_scale/read");
identity_node->set_op("Identity");
identity_node->add_input("loss_scale/ref:0");

auto assign_node = graph_def.add_node();
assign_node->set_name("ShuffleNet/AssignMovingAvg");
assign_node->set_op("AssignSub");
assign_node->add_input("ShuffleNet/moving_mean:0");

Status ret = TensorFlowModelParser().GraphDefOptimize(&graph_def);
EXPECT_EQ(ret, ge::SUCCESS);
}

} // namespace ge

Loading…
Cancel
Save