diff --git a/parser/caffe/caffe_parser.cc b/parser/caffe/caffe_parser.cc index c2842be..aa893af 100644 --- a/parser/caffe/caffe_parser.cc +++ b/parser/caffe/caffe_parser.cc @@ -1097,8 +1097,8 @@ Status CaffeModelParser::AddUserOutNodesTop() { } Status CaffeModelParser::AddOutputTop(const domi::caffe::NetParameter &proto_message) { - for (int32_t i = 0; i < proto_message.layer_size(); i++) { - const domi::caffe::LayerParameter &layer = proto_message.layer(i); + for (int32_t j = 0; j < proto_message.layer_size(); j++) { + const domi::caffe::LayerParameter &layer = proto_message.layer(j); if (!CheckValidLayer(layer)) { continue; @@ -1296,8 +1296,8 @@ Status CaffeModelParser::ParseFromMemory(const char *data, uint32_t size, ge::Co // parse ParamSpec std::vector v_param_names; - for (int i = 0; i < layer.param_size(); i++) { - const domi::caffe::ParamSpec ¶m = layer.param(i); + for (int j = 0; j < layer.param_size(); j++) { + const domi::caffe::ParamSpec ¶m = layer.param(j); GE_IF_BOOL_EXEC((param.has_name()), v_param_names.emplace_back(param.name())); } @@ -1515,8 +1515,8 @@ Status CaffeModelParser::Parse(const char *model_path, ge::ComputeGraphPtr &grap // parse ParamSpec std::vector v_param_names; - for (int i = 0; i < layer.param_size(); i++) { - const domi::caffe::ParamSpec ¶m = layer.param(i); + for (int j = 0; j < layer.param_size(); j++) { + const domi::caffe::ParamSpec ¶m = layer.param(j); GE_IF_BOOL_EXEC((param.has_name()), v_param_names.emplace_back(param.name())); } @@ -2095,17 +2095,17 @@ Status CaffeWeightsParser::ConvertLayerParameter(const google::protobuf::Message ge::ComputeGraphPtr &graph) { vector need_share_layers; const domi::caffe::LayerParameter *layer = reinterpret_cast(layer_message); - const string &layer_name = layer->name(); + const string &share_layer_name = layer->name(); const string &layer_type = layer->type(); for (auto p_iter = params_share_map.begin(); p_iter != params_share_map.end(); ++p_iter) { - if (find(p_iter->second.begin(), p_iter->second.end(), layer_name) != p_iter->second.end()) { - GELOGI("layer:%s need share weights !", layer_name.c_str()); + if (find(p_iter->second.begin(), p_iter->second.end(), share_layer_name) != p_iter->second.end()) { + GELOGI("layer:%s need share weights !", share_layer_name.c_str()); need_share_layers = p_iter->second; } } if (need_share_layers.size() == 0) { - need_share_layers.push_back(layer_name); + need_share_layers.push_back(share_layer_name); } for (auto share_iter = need_share_layers.begin(); share_iter != need_share_layers.end(); ++share_iter) { @@ -2211,27 +2211,27 @@ Status CaffeWeightsParser::ConvertNetParameter(const NetParameter ¶m, ge::Co for (int i = 0; i < num_layer; ++i) { const LayerParameter &layer = param.layer(i); - const string &layer_name = layer.name(); + const string &share_layer_name = layer.name(); // Skip some layer types if (skiped_layer_type_.find(layer.type()) != skiped_layer_type_.end()) { - GELOGI("Skip layer %s", layer_name.c_str()); + GELOGI("Skip layer %s", share_layer_name.c_str()); continue; } - GELOGI("Parse layer %s", layer_name.c_str()); + GELOGI("Parse layer %s", share_layer_name.c_str()); vector need_share_layers; for (auto p_iter = params_share_map.begin(); p_iter != params_share_map.end(); ++p_iter) { - if (find(p_iter->second.begin(), p_iter->second.end(), layer_name) != p_iter->second.end()) { - GELOGI("Layer: %s need share weights !", layer_name.c_str()); + if (find(p_iter->second.begin(), p_iter->second.end(), share_layer_name) != p_iter->second.end()) { + GELOGI("Layer: %s need share weights !", share_layer_name.c_str()); need_share_layers = p_iter->second; } } if (need_share_layers.size() == 0) { - need_share_layers.push_back(layer_name); + need_share_layers.push_back(share_layer_name); } for (auto share_iter = need_share_layers.begin(); share_iter != need_share_layers.end(); ++share_iter) { diff --git a/parser/common/parser_utils.cc b/parser/common/parser_utils.cc index 74febc9..5e50ef3 100644 --- a/parser/common/parser_utils.cc +++ b/parser/common/parser_utils.cc @@ -212,7 +212,7 @@ Status ParserUtils::HandleInputContext(const NodePtr &node, // add control edge if (node->GetInControlAnchor() != nullptr) { for (const auto &out_anchor : node->GetInControlAnchor()->GetPeerAnchors()) { - graphStatus ret = GraphUtils::AddEdge(out_anchor, peer_in_anchor->GetOwnerNode()->GetInControlAnchor()); + ret = GraphUtils::AddEdge(out_anchor, peer_in_anchor->GetOwnerNode()->GetInControlAnchor()); if (ret != GRAPH_SUCCESS) { REPORT_CALL_ERROR("E19999", "add control edge from %s to %s failed.", out_anchor->GetOwnerNode()->GetName().c_str(), diff --git a/parser/onnx/onnx_parser.cc b/parser/onnx/onnx_parser.cc index e27c7f6..7f7ef44 100644 --- a/parser/onnx/onnx_parser.cc +++ b/parser/onnx/onnx_parser.cc @@ -767,8 +767,8 @@ Status OnnxModelParser::AdaptAndFindAllOnnxGraph(ge::onnx::GraphProto &root_onnx return FAILED; } - for (const auto &onnx_graph : onnx_graphs) { - onnx_graph_tasks.push(onnx_graph); + for (const auto &onnx_graph_task : onnx_graphs) { + onnx_graph_tasks.push(onnx_graph_task); } for (const auto &itr : name_to_onnx_subgraph) { name_to_onnx_graph.emplace(itr.first, itr.second); diff --git a/parser/tensorflow/graph_optimizer.h b/parser/tensorflow/graph_optimizer.h index 9f73d69..bf8c85b 100644 --- a/parser/tensorflow/graph_optimizer.h +++ b/parser/tensorflow/graph_optimizer.h @@ -55,7 +55,7 @@ class ParserGraphOptimizer { const bool GetLocalFmkopFlag() const { return local_fmk_op_flag_; } - void SetFuncBinPath(std::string isFuncBinPath) { func_bin_path_ = isFuncBinPath; } + void SetFuncBinPath(const std::string &isFuncBinPath) { func_bin_path_ = isFuncBinPath; } const std::string GetFuncBinPath() const { return func_bin_path_; } domi::Status InsertHWCK2FZ(ge::OutDataAnchorPtr src_anchor, ge::InDataAnchorPtr dst_anchor, diff --git a/parser/tensorflow/tensorflow_parser.cc b/parser/tensorflow/tensorflow_parser.cc index 640ab0a..4fa70ed 100644 --- a/parser/tensorflow/tensorflow_parser.cc +++ b/parser/tensorflow/tensorflow_parser.cc @@ -1528,7 +1528,7 @@ Status TensorFlowModelParser::ParseAllGraph(const google::protobuf::Message *pro if (tensorflow_op_map.find(node_op) == tensorflow_op_map.end()) { GELOGW("%s not found in tensorflow_op_map.", node_op.c_str()); } - Status ret = AddNode(node_def, graph, scope_graph); + ret = AddNode(node_def, graph, scope_graph); if (ret != SUCCESS) { GELOGE(ret, "Add op[%s] failed", node_def->name().c_str()); DeleteFuisonNodeDef(); @@ -2325,7 +2325,7 @@ Status TensorFlowModelParser::ParseProto(const google::protobuf::Message *proto, } // Do not exit immediately when there is an error, wait until all errors are collected before exiting - Status ret = AddFmkNodeDefToMap(*graph_def, node_def, op_node_name_list); + ret = AddFmkNodeDefToMap(*graph_def, node_def, op_node_name_list); GE_CHK_STATUS_EXEC(ret, return PARAM_INVALID, "add node_def to map failed"); } PARSER_TIMESTAMP_END(AddFmkNodeDefToMap, "TensorFlowModelParser::AddFmkNodeDefToMap");