@@ -82,4 +82,13 @@ bool TransOpUtil::CheckPrecisionLoss(const ge::NodePtr &src_node) { | |||||
} | } | ||||
return true; | return true; | ||||
} | } | ||||
std::string TransOpUtil::TransopMapToString() { | |||||
std::string buffer; | |||||
for (auto &key : Instance().transop_index_map_) { | |||||
buffer += key.first + " "; | |||||
} | |||||
return buffer; | |||||
} | |||||
} // namespace ge | } // namespace ge |
@@ -35,6 +35,8 @@ class GE_FUNC_HOST_VISIBILITY GE_FUNC_DEV_VISIBILITY TransOpUtil { | |||||
static bool CheckPrecisionLoss(const NodePtr &src_node); | static bool CheckPrecisionLoss(const NodePtr &src_node); | ||||
static std::string TransopMapToString(); | |||||
private: | private: | ||||
TransOpUtil(); | TransOpUtil(); | ||||
@@ -218,6 +218,9 @@ NodePtr CreateTransNode(const std::string &name, const std::string &node_type, c | |||||
auto index = TransOpUtil::GetTransOpDataIndex(node_type); | auto index = TransOpUtil::GetTransOpDataIndex(node_type); | ||||
if (index < 0) { | if (index < 0) { | ||||
ErrorManager::GetInstance().ATCReportErrMessage( | |||||
"E19025", {"situation", "reason"}, | |||||
{"The trans node type[" + node_type + "]", "it must be " + TransOpUtil::TransopMapToString()}); | |||||
GELOGE(INTERNAL_ERROR, "The trans node type %s does not exists", node_type.c_str()); | GELOGE(INTERNAL_ERROR, "The trans node type %s does not exists", node_type.c_str()); | ||||
return nullptr; | return nullptr; | ||||
} | } | ||||
@@ -386,6 +389,8 @@ Status RecoverTransRoadForVar(const NodePtr &var, const VarTransRoad &road) { | |||||
auto trans_name = var->GetName() + "_trans_" + std::to_string(index++); | auto trans_name = var->GetName() + "_trans_" + std::to_string(index++); | ||||
auto ret = RecoverOneTransNodeForVar(trans_name, *iter, last_node, last_node); | auto ret = RecoverOneTransNodeForVar(trans_name, *iter, last_node, last_node); | ||||
if (ret != SUCCESS) { | if (ret != SUCCESS) { | ||||
ErrorManager::GetInstance().ATCReportErrMessage( | |||||
"E15001", {"variable", "index", "type"}, {var->GetName(), std::to_string(index), iter->node_type}); | |||||
GELOGE(INTERNAL_ERROR, "Failed to recover trans node for variable %s, index %d, type %s", var->GetName().c_str(), | GELOGE(INTERNAL_ERROR, "Failed to recover trans node for variable %s, index %d, type %s", var->GetName().c_str(), | ||||
index, iter->node_type.c_str()); | index, iter->node_type.c_str()); | ||||
return INTERNAL_ERROR; | return INTERNAL_ERROR; | ||||
@@ -418,6 +423,8 @@ Status RecoverTransRoadForVarRef(const std::set<NodePtr> &nodes, const VarTransR | |||||
auto trans_name = var->GetName() + "_trans_" + std::to_string(index++); | auto trans_name = var->GetName() + "_trans_" + std::to_string(index++); | ||||
auto ret = RecoverOneTransNodeForVarRef(trans_name, *iter, last_node, last_node); | auto ret = RecoverOneTransNodeForVarRef(trans_name, *iter, last_node, last_node); | ||||
if (ret != SUCCESS) { | if (ret != SUCCESS) { | ||||
ErrorManager::GetInstance().ATCReportErrMessage( | |||||
"E15001", {"variable", "index", "type"}, {var->GetName(), std::to_string(index), iter->node_type}); | |||||
GELOGE(INTERNAL_ERROR, "Failed to recover trans node for variable %s, index %d, type %s", | GELOGE(INTERNAL_ERROR, "Failed to recover trans node for variable %s, index %d, type %s", | ||||
var->GetName().c_str(), index, iter->node_type.c_str()); | var->GetName().c_str(), index, iter->node_type.c_str()); | ||||
return INTERNAL_ERROR; | return INTERNAL_ERROR; | ||||
@@ -570,6 +577,8 @@ Status CheckIfDynamicBatchScene(NodePtr &data_node, bool &is_dynamic_batch, Node | |||||
std::string related_node_name; | std::string related_node_name; | ||||
if (AttrUtils::GetStr(data_node->GetOpDesc(), kMbatchSwitchnName, related_node_name)) { | if (AttrUtils::GetStr(data_node->GetOpDesc(), kMbatchSwitchnName, related_node_name)) { | ||||
if (related_node_name.empty()) { | if (related_node_name.empty()) { | ||||
ErrorManager::GetInstance().ATCReportErrMessage( | |||||
"E15002", {"opname", "value", "reason"}, {data_node->GetName(), "flag", "but the value is empty"}); | |||||
GELOGE(INTERNAL_ERROR, "The data node %s has switchn node flag, but the value is empty", | GELOGE(INTERNAL_ERROR, "The data node %s has switchn node flag, but the value is empty", | ||||
data_node->GetName().c_str()); | data_node->GetName().c_str()); | ||||
return INTERNAL_ERROR; | return INTERNAL_ERROR; | ||||
@@ -581,6 +590,9 @@ Status CheckIfDynamicBatchScene(NodePtr &data_node, bool &is_dynamic_batch, Node | |||||
} | } | ||||
} | } | ||||
if (switchn_node == nullptr) { | if (switchn_node == nullptr) { | ||||
ErrorManager::GetInstance().ATCReportErrMessage( | |||||
"E15002", {"opname", "value", "reason"}, | |||||
{data_node->GetName(), related_node_name, "but can not find it on the graph"}); | |||||
GELOGE(INTERNAL_ERROR, "The data node %s has switchn node %s, but can not find it on the graph", | GELOGE(INTERNAL_ERROR, "The data node %s has switchn node %s, but can not find it on the graph", | ||||
data_node->GetName().c_str(), related_node_name.c_str()); | data_node->GetName().c_str(), related_node_name.c_str()); | ||||
return INTERNAL_ERROR; | return INTERNAL_ERROR; | ||||
@@ -681,6 +693,10 @@ Status ProcessInputNC1HWC0DynShape(NodePtr &node_ptr, bool &is_dynamic_batch, No | |||||
ge::GeShape old_shape = input->GetShape(); | ge::GeShape old_shape = input->GetShape(); | ||||
bool support = ((old_format == FORMAT_NC1HWC0) || (old_format == FORMAT_NCHW) || (old_format == FORMAT_NHWC)); | bool support = ((old_format == FORMAT_NC1HWC0) || (old_format == FORMAT_NCHW) || (old_format == FORMAT_NHWC)); | ||||
if (!support) { | if (!support) { | ||||
ErrorManager::GetInstance().ATCReportErrMessage( | |||||
"E19014", {"opname", "value", "reason"}, | |||||
{op_desc->GetName(), "format[" + TypeUtils::FormatToSerialString(old_format) + "]", | |||||
"only support FORMAT_NC1HWC0,FORMAT_NCHW,FORMAT_NHWC"}); | |||||
GELOGE(INTERNAL_ERROR, "The format [%s] is unsupported", TypeUtils::FormatToSerialString(old_format).c_str()); | GELOGE(INTERNAL_ERROR, "The format [%s] is unsupported", TypeUtils::FormatToSerialString(old_format).c_str()); | ||||
return FAILED; | return FAILED; | ||||
} | } | ||||
@@ -761,6 +777,9 @@ Status GetStorageFormatAndShape(OpDescPtr &op_desc, const GeTensorDescPtr &tenso | |||||
op_desc->GetName().c_str(), TypeUtils::FormatToSerialString(storage_format).c_str(), | op_desc->GetName().c_str(), TypeUtils::FormatToSerialString(storage_format).c_str(), | ||||
formats::JoinToString(storage_shape).c_str()); | formats::JoinToString(storage_shape).c_str()); | ||||
} else { | } else { | ||||
ErrorManager::GetInstance().ATCReportErrMessage( | |||||
"15003", {"opname", "format"}, | |||||
{op_desc->GetName(), TypeUtils::FormatToSerialString(storage_format)}); | |||||
GELOGE(PARAM_INVALID, "Update node by storage format failed, storage_shape not set. " | GELOGE(PARAM_INVALID, "Update node by storage format failed, storage_shape not set. " | ||||
"node: [%s], storage_format [%s]", | "node: [%s], storage_format [%s]", | ||||
op_desc->GetName().c_str(), TypeUtils::FormatToSerialString(storage_format).c_str()); | op_desc->GetName().c_str(), TypeUtils::FormatToSerialString(storage_format).c_str()); | ||||
@@ -899,9 +918,14 @@ Status ProcessNetoutputNodeDynShape(NodePtr &node) { | |||||
// check if is_output_adjust_hw_layout is set | // check if is_output_adjust_hw_layout is set | ||||
if (NeedUpdateFormatByOutputTypeParm(op_desc, index)) { | if (NeedUpdateFormatByOutputTypeParm(op_desc, index)) { | ||||
if ((old_format != FORMAT_NCHW) && (old_format != FORMAT_NHWC) && (old_format != FORMAT_NC1HWC0)) { | if ((old_format != FORMAT_NCHW) && (old_format != FORMAT_NHWC) && (old_format != FORMAT_NC1HWC0)) { | ||||
ErrorManager::GetInstance().ATCReportErrMessage( | |||||
"E19014", {"opname", "value", "reason"}, | |||||
{op_desc->GetName(), "format[" + TypeUtils::FormatToSerialString(old_format) + "]", | |||||
"only support FORMAT_NC1HWC0,FORMAT_NCHW,FORMAT_NHWC"}); | |||||
GELOGE(INTERNAL_ERROR, "Format is not one of NCHW, NHWC, NC1HWC0."); | GELOGE(INTERNAL_ERROR, "Format is not one of NCHW, NHWC, NC1HWC0."); | ||||
return FAILED; | return FAILED; | ||||
} | } | ||||
GeTensorDesc old_desc(old_shape, old_format, old_dtype); | GeTensorDesc old_desc(old_shape, old_format, old_dtype); | ||||
if (ProcessNetoutputNodeFp16Nc1hwc0DynShape(old_desc, net_output_input_desc, src_node) != SUCCESS) { | if (ProcessNetoutputNodeFp16Nc1hwc0DynShape(old_desc, net_output_input_desc, src_node) != SUCCESS) { | ||||
GELOGE(INTERNAL_ERROR, "Process netoutput fp16 nc1hwc0."); | GELOGE(INTERNAL_ERROR, "Process netoutput fp16 nc1hwc0."); | ||||
@@ -1034,6 +1058,9 @@ Status GraphPrepare::CheckRefInputNode(const NodePtr &node, const std::string &i | |||||
} | } | ||||
bool is_acceptable = (acceptable_types.find(input_type) != acceptable_types.end()); | bool is_acceptable = (acceptable_types.find(input_type) != acceptable_types.end()); | ||||
if (!is_acceptable) { | if (!is_acceptable) { | ||||
ErrorManager::GetInstance().ATCReportErrMessage( | |||||
"E15005", {"opname", "optype", "opname1", "optype1"}, | |||||
{op_desc->GetName(), node->GetType(), input_op_desc->GetName(), input_op_desc->GetType()}); | |||||
GELOGE(PARAM_INVALID, "The ref input of ref node %s[%s] must be ref node or variable, but %s[%s]isn't.", | GELOGE(PARAM_INVALID, "The ref input of ref node %s[%s] must be ref node or variable, but %s[%s]isn't.", | ||||
node->GetName().c_str(), node->GetType().c_str(), input_op_desc->GetName().c_str(), | node->GetName().c_str(), node->GetType().c_str(), input_op_desc->GetName().c_str(), | ||||
input_op_desc->GetType().c_str()); | input_op_desc->GetType().c_str()); | ||||
@@ -1126,6 +1153,9 @@ Status GraphPrepare::UpdateInput(const std::vector<GeTensor> &user_input) { | |||||
} | } | ||||
if ((index < 0) || (static_cast<size_t>(index) >= user_input.size())) { | if ((index < 0) || (static_cast<size_t>(index) >= user_input.size())) { | ||||
std::string situation = "data op index[" + std::to_string(index) + "]"; | |||||
std::string reason = "it must less than user_input size[" + std::to_string(user_input.size()) + "]"; | |||||
ErrorManager::GetInstance().ATCReportErrMessage("E19025", {"situation", "reason"}, {situation, reason}); | |||||
GELOGE(PARAM_INVALID, "user_input size = %zu, graph data op index = %ld.", user_input.size(), index); | GELOGE(PARAM_INVALID, "user_input size = %zu, graph data op index = %ld.", user_input.size(), index); | ||||
return FAILED; | return FAILED; | ||||
} | } | ||||
@@ -1138,6 +1168,9 @@ Status GraphPrepare::UpdateInput(const std::vector<GeTensor> &user_input) { | |||||
if (need_check_internal_format) { | if (need_check_internal_format) { | ||||
bool is_internal = TypeUtils::IsInternalFormat(format) || TypeUtils::IsInternalFormat(origin_format); | bool is_internal = TypeUtils::IsInternalFormat(format) || TypeUtils::IsInternalFormat(origin_format); | ||||
if (is_internal) { | if (is_internal) { | ||||
ErrorManager::GetInstance().ATCReportErrMessage("E19025", {"situation", "reason"}, | |||||
{"Input format[" + TypeUtils::FormatToSerialString(format) + "] or origin_format[" + | |||||
TypeUtils::FormatToSerialString(origin_format) + "]", "it is not support"}); | |||||
GELOGE(PARAM_INVALID, "Input format %s or origin_format %s is not support.", | GELOGE(PARAM_INVALID, "Input format %s or origin_format %s is not support.", | ||||
TypeUtils::FormatToSerialString(format).c_str(), | TypeUtils::FormatToSerialString(format).c_str(), | ||||
TypeUtils::FormatToSerialString(origin_format).c_str()); | TypeUtils::FormatToSerialString(origin_format).c_str()); | ||||
@@ -1149,6 +1182,8 @@ Status GraphPrepare::UpdateInput(const std::vector<GeTensor> &user_input) { | |||||
uint32_t length = 1; | uint32_t length = 1; | ||||
bool type_ret = TypeUtils::GetDataTypeLength(data_type, length); | bool type_ret = TypeUtils::GetDataTypeLength(data_type, length); | ||||
if (!type_ret) { | if (!type_ret) { | ||||
ErrorManager::GetInstance().ATCReportErrMessage("E19025", {"situation", "reason"}, | |||||
{"Input datatype[" + TypeUtils::DataTypeToSerialString(data_type) + "]", "it is not support"}); | |||||
GELOGE(PARAM_INVALID, "Input datatype %s is not support.", | GELOGE(PARAM_INVALID, "Input datatype %s is not support.", | ||||
TypeUtils::DataTypeToSerialString(data_type).c_str()); | TypeUtils::DataTypeToSerialString(data_type).c_str()); | ||||
return FAILED; | return FAILED; | ||||
@@ -1163,6 +1198,10 @@ Status GraphPrepare::UpdateInput(const std::vector<GeTensor> &user_input) { | |||||
return FAILED); | return FAILED); | ||||
bool size_check = (size != 0 && shape_size != size); | bool size_check = (size != 0 && shape_size != size); | ||||
if (size_check) { | if (size_check) { | ||||
std::string situation = "input data size[" + std::to_string(size) + | |||||
"] and shape_size[" + std::to_string(size) + "]"; | |||||
std::string reason = "because size != 0 and shape_size != size"; | |||||
ErrorManager::GetInstance().ATCReportErrMessage("E19025", {"situation", "reason"}, {situation, reason}); | |||||
GELOGE(PARAM_INVALID, "input data size =%ld, shape_size =%ld.", size, shape_size); | GELOGE(PARAM_INVALID, "input data size =%ld, shape_size =%ld.", size, shape_size); | ||||
return FAILED; | return FAILED; | ||||
} | } | ||||
@@ -1502,6 +1541,8 @@ Status GraphPrepare::VerifyConstOp(const NodePtr &node) { | |||||
uint32_t length = 1; | uint32_t length = 1; | ||||
bool type_ret = TypeUtils::GetDataTypeLength(data_type, length); | bool type_ret = TypeUtils::GetDataTypeLength(data_type, length); | ||||
if (!type_ret) { | if (!type_ret) { | ||||
ErrorManager::GetInstance().ATCReportErrMessage("E19025", {"situation", "reason"}, | |||||
{"Input datatype[" + TypeUtils::DataTypeToSerialString(data_type) + "]", "it is not support"}); | |||||
GELOGE(PARAM_INVALID, "Input datatype %s is not support.", TypeUtils::DataTypeToSerialString(data_type).c_str()); | GELOGE(PARAM_INVALID, "Input datatype %s is not support.", TypeUtils::DataTypeToSerialString(data_type).c_str()); | ||||
return FAILED; | return FAILED; | ||||
} | } | ||||
@@ -1511,14 +1552,20 @@ Status GraphPrepare::VerifyConstOp(const NodePtr &node) { | |||||
if (shape_size == 0) { | if (shape_size == 0) { | ||||
if (ge_tensor_desc.GetShape().GetDims().size() == 0) { | if (ge_tensor_desc.GetShape().GetDims().size() == 0) { | ||||
// shape = [], means it's a sclar tensor. | // shape = [], means it's a sclar tensor. | ||||
GE_CHK_BOOL_EXEC(data_size / length == 1, return PARAM_INVALID, "Const is invalid scalar tensor."); | |||||
GE_CHK_BOOL_EXEC(data_size / length == 1, | |||||
ErrorManager::GetInstance().ATCReportErrMessage("E10043", {"reason"}, {"Const is invalid scalar tensor."}); | |||||
return PARAM_INVALID, "Const is invalid scalar tensor."); | |||||
} else { | } else { | ||||
// shape = [x, y, 0,...], means it's a vector tensor that value is []. | // shape = [x, y, 0,...], means it's a vector tensor that value is []. | ||||
GE_CHK_BOOL_EXEC(data_size == 0, return PARAM_INVALID, "Const is invalid vector scalar."); | |||||
GE_CHK_BOOL_EXEC(data_size == 0, | |||||
ErrorManager::GetInstance().ATCReportErrMessage("E10043", {"reason"}, {"Const is invalid vector scalar."}); | |||||
return PARAM_INVALID, "Const is invalid vector scalar."); | |||||
} | } | ||||
} else { | } else { | ||||
GE_CHK_BOOL_EXEC(data_size == static_cast<size_t>(shape_size * length) && data_size != 0, return PARAM_INVALID, | |||||
"Const input data size is not equal with tensor desc shape"); | |||||
GE_CHK_BOOL_EXEC(data_size == static_cast<size_t>(shape_size * length) && data_size != 0, | |||||
ErrorManager::GetInstance().ATCReportErrMessage( | |||||
"E10043", {"reason"}, {"Const input data size is not equal with tensor desc shape"}); | |||||
return PARAM_INVALID, "Const input data size is not equal with tensor desc shape"); | |||||
} | } | ||||
return SUCCESS; | return SUCCESS; | ||||
} | } | ||||
@@ -1542,6 +1589,9 @@ Status GraphPrepare::CheckUserInput(const std::vector<GeTensor> &user_input) { | |||||
return GE_GRAPH_INIT_FAILED; | return GE_GRAPH_INIT_FAILED; | ||||
} | } | ||||
if ((index < 0) || (static_cast<size_t>(index) >= user_input.size())) { | if ((index < 0) || (static_cast<size_t>(index) >= user_input.size())) { | ||||
std::string situation = "data op index[" + std::to_string(index) + "]"; | |||||
std::string reason = "it must less than user_input size[" + std::to_string(user_input.size()) + "]"; | |||||
ErrorManager::GetInstance().ATCReportErrMessage("E19025", {"situation", "reason"}, {situation, reason}); | |||||
GELOGE(GE_GRAPH_INIT_FAILED, "user_input size:%zu, data op index:%ld.", user_input.size(), index); | GELOGE(GE_GRAPH_INIT_FAILED, "user_input size:%zu, data op index:%ld.", user_input.size(), index); | ||||
return GE_GRAPH_INIT_FAILED; | return GE_GRAPH_INIT_FAILED; | ||||
} | } | ||||
@@ -1549,6 +1599,9 @@ Status GraphPrepare::CheckUserInput(const std::vector<GeTensor> &user_input) { | |||||
for (size_t i = 0; i < desc.GetShape().GetDimNum(); ++i) { | for (size_t i = 0; i < desc.GetShape().GetDimNum(); ++i) { | ||||
if (desc.GetShape().GetDim(i) < 0) { | if (desc.GetShape().GetDim(i) < 0) { | ||||
std::string situation = "data dim[" + std::to_string(i) + "][" + std::to_string(desc.GetShape().GetDim(i)) + "]" ; | |||||
std::string reason = "it need >= 0"; | |||||
ErrorManager::GetInstance().ATCReportErrMessage("E19025", {"situation", "reason"}, {situation, reason}); | |||||
GELOGE(GE_GRAPH_INIT_FAILED, "data dim %zu is not supported, need >= 0, real:%ld.", i, | GELOGE(GE_GRAPH_INIT_FAILED, "data dim %zu is not supported, need >= 0, real:%ld.", i, | ||||
desc.GetShape().GetDim(i)); | desc.GetShape().GetDim(i)); | ||||
return GE_GRAPH_INIT_FAILED; | return GE_GRAPH_INIT_FAILED; | ||||
@@ -53,16 +53,6 @@ | |||||
} \ | } \ | ||||
} while (0) | } while (0) | ||||
#define AIPP_RETURN_STATUS_AND_REPROT_ERRORMSG(expr, _status, errormsg) \ | |||||
do { \ | |||||
bool b = (expr); \ | |||||
if (!b) { \ | |||||
GELOGE(_status, errormsg); \ | |||||
ErrorManager::GetInstance().ATCReportErrMessage("E10043", {"reason"}, {errormsg}); \ | |||||
return _status; \ | |||||
} \ | |||||
} while (0) | |||||
namespace { | namespace { | ||||
const int32_t DEFAULT_MATRIX_R0C0_YUV2RGB = 298; | const int32_t DEFAULT_MATRIX_R0C0_YUV2RGB = 298; | ||||
const int32_t DEFAULT_MATRIX_R0C1_YUV2RGB = 0; | const int32_t DEFAULT_MATRIX_R0C1_YUV2RGB = 0; | ||||
@@ -316,9 +306,8 @@ NodePtr AippOp::FindDataByIndex(const ComputeGraphPtr &graph, int rank) { | |||||
} | } | ||||
return node; | return node; | ||||
} | } | ||||
GELOGE(PARAM_INVALID, "Can not find the data node by index %d", rank); | |||||
string errormsg = "Can not find the data node by aipp parameter related_input_rank " + to_string(rank); | |||||
ErrorManager::GetInstance().ATCReportErrMessage("E10043", {"reason"}, {errormsg}); | |||||
string error_msg = "Can not find the data node by aipp parameter related_input_rank " + to_string(rank); | |||||
GE_ERRORLOG_AND_ERRORMSG(PARAM_INVALID, error_msg.c_str()); | |||||
return nullptr; | return nullptr; | ||||
} | } | ||||
Status AippOp::GetAndCheckTarget(const ComputeGraphPtr &graph, int rank, NodePtr &target, | Status AippOp::GetAndCheckTarget(const ComputeGraphPtr &graph, int rank, NodePtr &target, | ||||
@@ -363,10 +352,10 @@ Status AippOp::GetAndCheckTarget(const ComputeGraphPtr &graph, int rank, NodePtr | |||||
} | } | ||||
if (!edge_indexes.empty() && (*edge_indexes.rbegin() >= data_node->GetOutDataNodes().size())) { | if (!edge_indexes.empty() && (*edge_indexes.rbegin() >= data_node->GetOutDataNodes().size())) { | ||||
GELOGE(PARAM_INVALID, "input_edge_idx %u should smaller than out edge size of target input %zu", | |||||
*edge_indexes.rbegin(), data_node->GetOutDataNodes().size()); | |||||
string errormsg = "The aipp parameter input_edge_idx should be smaller than the target input's outnodes."; | |||||
ErrorManager::GetInstance().ATCReportErrMessage("E10043", {"reason"}, {errormsg}); | |||||
string error_msg = "The aipp parameter input_edge_idx[" + std::to_string(*edge_indexes.rbegin()) + | |||||
"] should be smaller than the target input[" + | |||||
std::to_string(data_node->GetOutDataNodes().size()) +"]'s outnodes."; | |||||
GE_ERRORLOG_AND_ERRORMSG(PARAM_INVALID, error_msg.c_str()); | |||||
return PARAM_INVALID; | return PARAM_INVALID; | ||||
} | } | ||||
target = data_node; | target = data_node; | ||||
@@ -439,8 +428,7 @@ Status AippOp::ConvertRelatedInputNameToRank() { | |||||
if (!convert_flag) { | if (!convert_flag) { | ||||
string error_msg = "Top name " + related_input_name + "convert rank failed, Please" | string error_msg = "Top name " + related_input_name + "convert rank failed, Please" | ||||
" ensure top name in aipp config is the top name of data node."; | " ensure top name in aipp config is the top name of data node."; | ||||
ErrorManager::GetInstance().ATCReportErrMessage("E10043", {"reason"}, {error_msg}); | |||||
GELOGE(PARAM_INVALID, "Top name[%s] converts rank failed.", related_input_name.c_str()); | |||||
GE_ERRORLOG_AND_ERRORMSG(PARAM_INVALID, error_msg.c_str()); | |||||
return PARAM_INVALID; | return PARAM_INVALID; | ||||
} | } | ||||
@@ -537,87 +525,87 @@ Status AippOp::SetDefaultParams() { | |||||
Status AippOp::ValidateParams() { | Status AippOp::ValidateParams() { | ||||
GE_CHECK_NOTNULL(aipp_params_); | GE_CHECK_NOTNULL(aipp_params_); | ||||
AIPP_RETURN_STATUS_AND_REPROT_ERRORMSG(aipp_params_->aipp_mode() != domi::AippOpParams::undefined, PARAM_INVALID, | |||||
"When insert AIPP op, aipp_mode must be configured as static or dynamic "); | |||||
AIPP_RETURN_STATUS_AND_REPROT_ERRORMSG(aipp_params_->var_reci_chn_0_size() <= 1, PARAM_INVALID, | |||||
"The parameter var_reci_chn_0 can not be configed repeatedly"); | |||||
AIPP_RETURN_STATUS_AND_REPROT_ERRORMSG(aipp_params_->var_reci_chn_1_size() <= 1, PARAM_INVALID, | |||||
"The parameter var_reci_chn_1 can not be configed repeatedly"); | |||||
AIPP_RETURN_STATUS_AND_REPROT_ERRORMSG(aipp_params_->var_reci_chn_2_size() <= 1, PARAM_INVALID, | |||||
"The parameter var_reci_chn_2 can not be configed repeatedly"); | |||||
AIPP_RETURN_STATUS_AND_REPROT_ERRORMSG(aipp_params_->var_reci_chn_3_size() <= 1, PARAM_INVALID, | |||||
"The parameter var_reci_chn_3 can not be configed repeatedly"); | |||||
AIPP_RETURN_STATUS_AND_REPROT_ERRORMSG(aipp_params_->matrix_r0c0_size() <= 1, PARAM_INVALID, | |||||
"The parameter matrix_r0c0 can not be configed repeatedly"); | |||||
AIPP_RETURN_STATUS_AND_REPROT_ERRORMSG(aipp_params_->matrix_r0c1_size() <= 1, PARAM_INVALID, | |||||
"The parameter matrix_r0c1 can not be configed repeatedly"); | |||||
AIPP_RETURN_STATUS_AND_REPROT_ERRORMSG(aipp_params_->matrix_r0c2_size() <= 1, PARAM_INVALID, | |||||
"The parameter matrix_r0c2 can not be configed repeatedly"); | |||||
AIPP_RETURN_STATUS_AND_REPROT_ERRORMSG(aipp_params_->matrix_r1c0_size() <= 1, PARAM_INVALID, | |||||
"The parameter matrix_r1c0 can not be configed repeatedly"); | |||||
AIPP_RETURN_STATUS_AND_REPROT_ERRORMSG(aipp_params_->matrix_r1c1_size() <= 1, PARAM_INVALID, | |||||
"The parameter matrix_r1c1 can not be configed repeatedly"); | |||||
AIPP_RETURN_STATUS_AND_REPROT_ERRORMSG(aipp_params_->matrix_r1c2_size() <= 1, PARAM_INVALID, | |||||
"The parameter matrix_r1c2 can not be configed repeatedly"); | |||||
AIPP_RETURN_STATUS_AND_REPROT_ERRORMSG(aipp_params_->matrix_r2c0_size() <= 1, PARAM_INVALID, | |||||
"The parameter matrix_r2c0 can not be configed repeatedly"); | |||||
AIPP_RETURN_STATUS_AND_REPROT_ERRORMSG(aipp_params_->matrix_r2c1_size() <= 1, PARAM_INVALID, | |||||
"The parameter matrix_r2c1 can not be configed repeatedly"); | |||||
AIPP_RETURN_STATUS_AND_REPROT_ERRORMSG(aipp_params_->matrix_r2c2_size() <= 1, PARAM_INVALID, | |||||
"The parameter matrix_r2c2 can not be configed repeatedly"); | |||||
AIPP_RETURN_STATUS_AND_REPROT_ERRORMSG(aipp_params_->output_bias_0_size() <= 1, PARAM_INVALID, | |||||
"The parameter output_bias_0 can not be configed repeatedly"); | |||||
AIPP_RETURN_STATUS_AND_REPROT_ERRORMSG(aipp_params_->output_bias_1_size() <= 1, PARAM_INVALID, | |||||
"The parameter output_bias_1 can not be configed repeatedly"); | |||||
AIPP_RETURN_STATUS_AND_REPROT_ERRORMSG(aipp_params_->output_bias_2_size() <= 1, PARAM_INVALID, | |||||
"The parameter output_bias_2 can not be configed repeatedly"); | |||||
AIPP_RETURN_STATUS_AND_REPROT_ERRORMSG(aipp_params_->input_bias_0_size() <= 1, PARAM_INVALID, | |||||
"The parameter input_bias_0 can not be configed repeatedly"); | |||||
AIPP_RETURN_STATUS_AND_REPROT_ERRORMSG(aipp_params_->input_bias_1_size() <= 1, PARAM_INVALID, | |||||
"The parameter input_bias_1 can not be configed repeatedly"); | |||||
AIPP_RETURN_STATUS_AND_REPROT_ERRORMSG(aipp_params_->input_bias_2_size() <= 1, PARAM_INVALID, | |||||
"The parameter input_bias_2 can not be configed repeatedly"); | |||||
AIPP_RETURN_STATUS_AND_REPROT_ERRORMSG(aipp_params_->input_edge_idx_size() <= 1, PARAM_INVALID, | |||||
"The parameter input_edge_idx can not be configed repeatedly"); | |||||
GE_CHK_LOG_AND_ERRORMSG(aipp_params_->aipp_mode() != domi::AippOpParams::undefined, PARAM_INVALID, | |||||
"When insert AIPP op, aipp_mode must be configured as static or dynamic "); | |||||
GE_CHK_LOG_AND_ERRORMSG(aipp_params_->var_reci_chn_0_size() <= 1, PARAM_INVALID, | |||||
"The parameter var_reci_chn_0 can not be configed repeatedly"); | |||||
GE_CHK_LOG_AND_ERRORMSG(aipp_params_->var_reci_chn_1_size() <= 1, PARAM_INVALID, | |||||
"The parameter var_reci_chn_1 can not be configed repeatedly"); | |||||
GE_CHK_LOG_AND_ERRORMSG(aipp_params_->var_reci_chn_2_size() <= 1, PARAM_INVALID, | |||||
"The parameter var_reci_chn_2 can not be configed repeatedly"); | |||||
GE_CHK_LOG_AND_ERRORMSG(aipp_params_->var_reci_chn_3_size() <= 1, PARAM_INVALID, | |||||
"The parameter var_reci_chn_3 can not be configed repeatedly"); | |||||
GE_CHK_LOG_AND_ERRORMSG(aipp_params_->matrix_r0c0_size() <= 1, PARAM_INVALID, | |||||
"The parameter matrix_r0c0 can not be configed repeatedly"); | |||||
GE_CHK_LOG_AND_ERRORMSG(aipp_params_->matrix_r0c1_size() <= 1, PARAM_INVALID, | |||||
"The parameter matrix_r0c1 can not be configed repeatedly"); | |||||
GE_CHK_LOG_AND_ERRORMSG(aipp_params_->matrix_r0c2_size() <= 1, PARAM_INVALID, | |||||
"The parameter matrix_r0c2 can not be configed repeatedly"); | |||||
GE_CHK_LOG_AND_ERRORMSG(aipp_params_->matrix_r1c0_size() <= 1, PARAM_INVALID, | |||||
"The parameter matrix_r1c0 can not be configed repeatedly"); | |||||
GE_CHK_LOG_AND_ERRORMSG(aipp_params_->matrix_r1c1_size() <= 1, PARAM_INVALID, | |||||
"The parameter matrix_r1c1 can not be configed repeatedly"); | |||||
GE_CHK_LOG_AND_ERRORMSG(aipp_params_->matrix_r1c2_size() <= 1, PARAM_INVALID, | |||||
"The parameter matrix_r1c2 can not be configed repeatedly"); | |||||
GE_CHK_LOG_AND_ERRORMSG(aipp_params_->matrix_r2c0_size() <= 1, PARAM_INVALID, | |||||
"The parameter matrix_r2c0 can not be configed repeatedly"); | |||||
GE_CHK_LOG_AND_ERRORMSG(aipp_params_->matrix_r2c1_size() <= 1, PARAM_INVALID, | |||||
"The parameter matrix_r2c1 can not be configed repeatedly"); | |||||
GE_CHK_LOG_AND_ERRORMSG(aipp_params_->matrix_r2c2_size() <= 1, PARAM_INVALID, | |||||
"The parameter matrix_r2c2 can not be configed repeatedly"); | |||||
GE_CHK_LOG_AND_ERRORMSG(aipp_params_->output_bias_0_size() <= 1, PARAM_INVALID, | |||||
"The parameter output_bias_0 can not be configed repeatedly"); | |||||
GE_CHK_LOG_AND_ERRORMSG(aipp_params_->output_bias_1_size() <= 1, PARAM_INVALID, | |||||
"The parameter output_bias_1 can not be configed repeatedly"); | |||||
GE_CHK_LOG_AND_ERRORMSG(aipp_params_->output_bias_2_size() <= 1, PARAM_INVALID, | |||||
"The parameter output_bias_2 can not be configed repeatedly"); | |||||
GE_CHK_LOG_AND_ERRORMSG(aipp_params_->input_bias_0_size() <= 1, PARAM_INVALID, | |||||
"The parameter input_bias_0 can not be configed repeatedly"); | |||||
GE_CHK_LOG_AND_ERRORMSG(aipp_params_->input_bias_1_size() <= 1, PARAM_INVALID, | |||||
"The parameter input_bias_1 can not be configed repeatedly"); | |||||
GE_CHK_LOG_AND_ERRORMSG(aipp_params_->input_bias_2_size() <= 1, PARAM_INVALID, | |||||
"The parameter input_bias_2 can not be configed repeatedly"); | |||||
GE_CHK_LOG_AND_ERRORMSG(aipp_params_->input_edge_idx_size() <= 1, PARAM_INVALID, | |||||
"The parameter input_edge_idx can not be configed repeatedly"); | |||||
const domi::AippOpParams::AippMode aipp_mode = aipp_params_->aipp_mode(); | const domi::AippOpParams::AippMode aipp_mode = aipp_params_->aipp_mode(); | ||||
if (aipp_mode == domi::AippOpParams::dynamic) { | if (aipp_mode == domi::AippOpParams::dynamic) { | ||||
AIPP_RETURN_STATUS_AND_REPROT_ERRORMSG( | |||||
GE_CHK_LOG_AND_ERRORMSG( | |||||
aipp_params_->max_src_image_size() > 0, PARAM_INVALID, | aipp_params_->max_src_image_size() > 0, PARAM_INVALID, | ||||
"For dynamic AIPP params, max_src_image_size must be set which number should be greater than 0"); | "For dynamic AIPP params, max_src_image_size must be set which number should be greater than 0"); | ||||
} else { | } else { | ||||
AIPP_RETURN_STATUS_AND_REPROT_ERRORMSG(aipp_params_->input_format() != domi::AippOpParams::UNDEFINED, PARAM_INVALID, | |||||
"Input format of AIPP conf is undefined"); | |||||
AIPP_RETURN_STATUS_AND_REPROT_ERRORMSG(aipp_params_->src_image_size_w() >= 0, PARAM_INVALID, | |||||
"Src_image_size_w must not be configed smaller than 0"); | |||||
AIPP_RETURN_STATUS_AND_REPROT_ERRORMSG(aipp_params_->src_image_size_h() >= 0, PARAM_INVALID, | |||||
"Src_image_size_h must not be configed smaller than 0"); | |||||
AIPP_RETURN_STATUS_AND_REPROT_ERRORMSG(aipp_params_->load_start_pos_w() >= 0, PARAM_INVALID, | |||||
"Load_start_pos_w must not be configed smaller than 0"); | |||||
AIPP_RETURN_STATUS_AND_REPROT_ERRORMSG(aipp_params_->load_start_pos_h() >= 0, PARAM_INVALID, | |||||
"Load_start_pos_h must not be configed smaller than 0"); | |||||
AIPP_RETURN_STATUS_AND_REPROT_ERRORMSG(aipp_params_->crop_size_w() >= 0, PARAM_INVALID, | |||||
"Crop_size_w must not be configed smaller than 0"); | |||||
AIPP_RETURN_STATUS_AND_REPROT_ERRORMSG(aipp_params_->resize_output_w() >= 0, PARAM_INVALID, | |||||
"Resize_output_w must not be configed smaller than 0"); | |||||
AIPP_RETURN_STATUS_AND_REPROT_ERRORMSG(aipp_params_->resize_output_h() >= 0, PARAM_INVALID, | |||||
"Resize_output_h must not be configed smaller than 0"); | |||||
AIPP_RETURN_STATUS_AND_REPROT_ERRORMSG(aipp_params_->left_padding_size() >= 0, PARAM_INVALID, | |||||
"Left_padding_size must not be configed smaller than 0"); | |||||
AIPP_RETURN_STATUS_AND_REPROT_ERRORMSG(aipp_params_->right_padding_size() >= 0, PARAM_INVALID, | |||||
"Right_padding_size must not be configed smaller than 0"); | |||||
AIPP_RETURN_STATUS_AND_REPROT_ERRORMSG(aipp_params_->top_padding_size() >= 0, PARAM_INVALID, | |||||
"Top_padding_size must not be configed smaller than 0"); | |||||
AIPP_RETURN_STATUS_AND_REPROT_ERRORMSG(aipp_params_->bottom_padding_size() >= 0, PARAM_INVALID, | |||||
"Bottom_padding_size must not be configed smaller than 0"); | |||||
GE_CHK_LOG_AND_ERRORMSG(aipp_params_->input_format() != domi::AippOpParams::UNDEFINED, PARAM_INVALID, | |||||
"Input format of AIPP conf is undefined"); | |||||
GE_CHK_LOG_AND_ERRORMSG(aipp_params_->src_image_size_w() >= 0, PARAM_INVALID, | |||||
"Src_image_size_w must not be configed smaller than 0"); | |||||
GE_CHK_LOG_AND_ERRORMSG(aipp_params_->src_image_size_h() >= 0, PARAM_INVALID, | |||||
"Src_image_size_h must not be configed smaller than 0"); | |||||
GE_CHK_LOG_AND_ERRORMSG(aipp_params_->load_start_pos_w() >= 0, PARAM_INVALID, | |||||
"Load_start_pos_w must not be configed smaller than 0"); | |||||
GE_CHK_LOG_AND_ERRORMSG(aipp_params_->load_start_pos_h() >= 0, PARAM_INVALID, | |||||
"Load_start_pos_h must not be configed smaller than 0"); | |||||
GE_CHK_LOG_AND_ERRORMSG(aipp_params_->crop_size_w() >= 0, PARAM_INVALID, | |||||
"Crop_size_w must not be configed smaller than 0"); | |||||
GE_CHK_LOG_AND_ERRORMSG(aipp_params_->resize_output_w() >= 0, PARAM_INVALID, | |||||
"Resize_output_w must not be configed smaller than 0"); | |||||
GE_CHK_LOG_AND_ERRORMSG(aipp_params_->resize_output_h() >= 0, PARAM_INVALID, | |||||
"Resize_output_h must not be configed smaller than 0"); | |||||
GE_CHK_LOG_AND_ERRORMSG(aipp_params_->left_padding_size() >= 0, PARAM_INVALID, | |||||
"Left_padding_size must not be configed smaller than 0"); | |||||
GE_CHK_LOG_AND_ERRORMSG(aipp_params_->right_padding_size() >= 0, PARAM_INVALID, | |||||
"Right_padding_size must not be configed smaller than 0"); | |||||
GE_CHK_LOG_AND_ERRORMSG(aipp_params_->top_padding_size() >= 0, PARAM_INVALID, | |||||
"Top_padding_size must not be configed smaller than 0"); | |||||
GE_CHK_LOG_AND_ERRORMSG(aipp_params_->bottom_padding_size() >= 0, PARAM_INVALID, | |||||
"Bottom_padding_size must not be configed smaller than 0"); | |||||
} | } | ||||
return SUCCESS; | return SUCCESS; | ||||
@@ -790,17 +778,20 @@ Status AippOp::CreateAippData(const NodePtr &aipp_node) { | |||||
int64_t batch_count = -1; | int64_t batch_count = -1; | ||||
if (GetDataDimN(data_node, ori_data_format, batch_count) != ge::SUCCESS) { | if (GetDataDimN(data_node, ori_data_format, batch_count) != ge::SUCCESS) { | ||||
GELOGE(PARAM_INVALID, "Get data_node dims and transfer to nchw_dims failed!"); | |||||
string error_msg = "Get data_node dims and transfer to nchw_dims failed!"; | |||||
GE_ERRORLOG_AND_ERRORMSG(PARAM_INVALID, error_msg.c_str()); | |||||
return PARAM_INVALID; | return PARAM_INVALID; | ||||
} | } | ||||
if (batch_count <= 0) { | if (batch_count <= 0) { | ||||
GELOGE(PARAM_INVALID, "Batch count %ld is invalid", batch_count); | |||||
string error_msg = "Batch count[" + std::to_string(batch_count) + "] is invalid, it must positive."; | |||||
GE_ERRORLOG_AND_ERRORMSG(PARAM_INVALID, error_msg.c_str()); | |||||
return PARAM_INVALID; | return PARAM_INVALID; | ||||
} | } | ||||
int64_t max_dynamic_aipp_size = CalcMaxSize(batch_count); | int64_t max_dynamic_aipp_size = CalcMaxSize(batch_count); | ||||
if (max_dynamic_aipp_size < 0) { | if (max_dynamic_aipp_size < 0) { | ||||
GELOGE(PARAM_INVALID, "The dynamic aipp size is not positive."); | |||||
string error_msg = "The dynamic aipp size is not positive"; | |||||
GE_ERRORLOG_AND_ERRORMSG(PARAM_INVALID, error_msg.c_str()); | |||||
return PARAM_INVALID; | return PARAM_INVALID; | ||||
} | } | ||||
@@ -124,19 +124,13 @@ Status InsertNewOpUtil::CheckInputNamePositionNotRepeat() { | |||||
if (another_item->related_input_name().empty()) { | if (another_item->related_input_name().empty()) { | ||||
string error_msg = "Can not both set related_input_name and related_input_rank!" | string error_msg = "Can not both set related_input_name and related_input_rank!" | ||||
" Please ensure param is the same with the first aipp config(related_input_name)."; | " Please ensure param is the same with the first aipp config(related_input_name)."; | ||||
ErrorManager::GetInstance().ATCReportErrMessage("E10043", {"reason"}, {error_msg}); | |||||
GELOGE(PARAM_INVALID, | |||||
"Can not both set related_input_rank and related_input_name!" | |||||
" Please ensure param is the same with the first aipp config(related_input_name)."); | |||||
GE_ERRORLOG_AND_ERRORMSG(PARAM_INVALID, error_msg.c_str()); | |||||
return PARAM_INVALID; | return PARAM_INVALID; | ||||
} | } | ||||
if (item->related_input_name() == another_item->related_input_name()) { | if (item->related_input_name() == another_item->related_input_name()) { | ||||
string error_msg = "Can not insert aipp to the same postion! Please ensure related_input_name" | string error_msg = "Can not insert aipp to the same postion! Please ensure related_input_name" | ||||
" param is different in different aipp config."; | " param is different in different aipp config."; | ||||
ErrorManager::GetInstance().ATCReportErrMessage("E10043", {"reason"}, {error_msg}); | |||||
GELOGE(PARAM_INVALID, | |||||
"Can not insert aipp op to the same postion! Please ensure related_input_rank param " | |||||
"is different in different aipp config."); | |||||
GE_ERRORLOG_AND_ERRORMSG(PARAM_INVALID, error_msg.c_str()); | |||||
return PARAM_INVALID; | return PARAM_INVALID; | ||||
} | } | ||||
} | } | ||||
@@ -156,19 +150,13 @@ Status InsertNewOpUtil::CheckInputRankPositionNoRepeat() { | |||||
if (!another_item->related_input_name().empty()) { | if (!another_item->related_input_name().empty()) { | ||||
string error_msg = "Can not both set related_input_rank and related_input_name!" | string error_msg = "Can not both set related_input_rank and related_input_name!" | ||||
" Please ensure param is the same with the first aipp config(related_input_rank)."; | " Please ensure param is the same with the first aipp config(related_input_rank)."; | ||||
ErrorManager::GetInstance().ATCReportErrMessage("E10043", {"reason"}, {error_msg}); | |||||
GELOGE(PARAM_INVALID, | |||||
"Can not both set related_input_rank and related_input_name!" | |||||
" Please ensure param is the same with the first aipp config(related_input_rank)."); | |||||
GE_ERRORLOG_AND_ERRORMSG(PARAM_INVALID, error_msg.c_str()); | |||||
return PARAM_INVALID; | return PARAM_INVALID; | ||||
} | } | ||||
if (item->related_input_rank() == another_item->related_input_rank()) { | if (item->related_input_rank() == another_item->related_input_rank()) { | ||||
string error_msg = "Can not insert aipp to the same postion! Please ensure related_input_rank" | string error_msg = "Can not insert aipp to the same postion! Please ensure related_input_rank" | ||||
" param is different in different aipp config."; | " param is different in different aipp config."; | ||||
ErrorManager::GetInstance().ATCReportErrMessage("E10043", {"reason"}, {error_msg}); | |||||
GELOGE(PARAM_INVALID, | |||||
"Can not insert aipp op to the same postion! Please ensure related_input_rank param " | |||||
"is different in different aipp config."); | |||||
GE_ERRORLOG_AND_ERRORMSG(PARAM_INVALID, error_msg.c_str()); | |||||
return PARAM_INVALID; | return PARAM_INVALID; | ||||
} | } | ||||
} | } | ||||
@@ -224,9 +212,10 @@ Status InsertNewOpUtil::CheckGraph(const ComputeGraphPtr &graph) { | |||||
} | } | ||||
} | } | ||||
} | } | ||||
GE_CHK_BOOL_RET_STATUS((aippNodes.size() == 0) || (aippNodes.size() == next_nodes_cnt), PARAM_INVALID, | |||||
"Can not config part of outputs of Data node to support AIPP, config all " | |||||
"of the outputs of Data to support AIPP, or config none of them"); | |||||
GE_CHK_LOG_AND_ERRORMSG((aippNodes.size() == 0) || (aippNodes.size() == next_nodes_cnt), | |||||
PARAM_INVALID, | |||||
"Can not config part of outputs of Data node to support AIPP, config all " | |||||
"of the outputs of Data to support AIPP, or config none of them"); | |||||
std::unique_ptr<domi::AippOpParams> aippParams(new (std::nothrow) domi::AippOpParams()); | std::unique_ptr<domi::AippOpParams> aippParams(new (std::nothrow) domi::AippOpParams()); | ||||
GE_CHECK_NOTNULL(aippParams); | GE_CHECK_NOTNULL(aippParams); | ||||
@@ -238,16 +227,19 @@ Status InsertNewOpUtil::CheckGraph(const ComputeGraphPtr &graph) { | |||||
GE_CHK_STATUS(GetAippParams(currAippParam, aippNodes[i])); | GE_CHK_STATUS(GetAippParams(currAippParam, aippNodes[i])); | ||||
if (aippMode == domi::AippOpParams::static_) { | if (aippMode == domi::AippOpParams::static_) { | ||||
GE_CHK_BOOL_RET_STATUS(aippParams->input_format() == currAippParam->input_format(), PARAM_INVALID, | |||||
"The input_format of all aipp_ops after one Data should be the same"); | |||||
GE_CHK_BOOL_RET_STATUS(aippParams->src_image_size_w() == currAippParam->src_image_size_w(), PARAM_INVALID, | |||||
"The src_image_size_w of all aipp_ops after one Data should be the same"); | |||||
GE_CHK_BOOL_RET_STATUS(aippParams->src_image_size_h() == currAippParam->src_image_size_h(), PARAM_INVALID, | |||||
"The src_image_size_h of all aipp_ops after one Data should be the same"); | |||||
GE_CHK_LOG_AND_ERRORMSG( | |||||
aippParams->input_format() == currAippParam->input_format(), | |||||
PARAM_INVALID, "The input_format of all aipp_ops after one Data should be the same"); | |||||
GE_CHK_LOG_AND_ERRORMSG( | |||||
aippParams->src_image_size_w() == currAippParam->src_image_size_w(), | |||||
PARAM_INVALID, "The src_image_size_w of all aipp_ops after one Data should be the same"); | |||||
GE_CHK_LOG_AND_ERRORMSG( | |||||
aippParams->src_image_size_h() == currAippParam->src_image_size_h(), | |||||
PARAM_INVALID, "The src_image_size_h of all aipp_ops after one Data should be the same"); | |||||
} else { | } else { | ||||
GE_CHK_BOOL_RET_STATUS(aippParams->max_src_image_size() == currAippParam->max_src_image_size(), | |||||
PARAM_INVALID, | |||||
"The max_src_image_size of all aipp_ops after one Data should be the same"); | |||||
GE_CHK_LOG_AND_ERRORMSG( | |||||
aippParams->max_src_image_size() == currAippParam->max_src_image_size(), | |||||
PARAM_INVALID, "The max_src_image_size of all aipp_ops after one Data should be the same"); | |||||
} | } | ||||
}); | }); | ||||
} | } | ||||
@@ -290,7 +282,8 @@ Status InsertNewOpUtil::UpdateDataNodeByAipp(const ComputeGraphPtr &graph) { | |||||
for (auto &switchn : updated_switchn) { | for (auto &switchn : updated_switchn) { | ||||
auto data_iter = switchn_names_to_data.find(switchn->GetName()); | auto data_iter = switchn_names_to_data.find(switchn->GetName()); | ||||
if (data_iter == switchn_names_to_data.end()) { | if (data_iter == switchn_names_to_data.end()) { | ||||
GELOGE(INTERNAL_ERROR, "Failed to find relative data node by switchn %s", switchn->GetName().c_str()); | |||||
string error_msg = "Failed to find relative data node by switchn[" + switchn->GetName() + "]"; | |||||
GE_ERRORLOG_AND_ERRORMSG(INTERNAL_ERROR, error_msg.c_str()); | |||||
return INTERNAL_ERROR; | return INTERNAL_ERROR; | ||||
} | } | ||||
GE_RETURN_IF_ERROR(UpdateDataBySwitchN(switchn, data_iter->second)); | GE_RETURN_IF_ERROR(UpdateDataBySwitchN(switchn, data_iter->second)); | ||||
@@ -477,7 +470,8 @@ Status InsertNewOpUtil::UpdateDataBySwitchN(const NodePtr &switchn, const NodePt | |||||
} | } | ||||
} | } | ||||
if (max_index >= switchn->GetOpDesc()->GetOutputsSize()) { | if (max_index >= switchn->GetOpDesc()->GetOutputsSize()) { | ||||
GELOGE(INTERNAL_ERROR, "No max size found from switchn node %s", switchn->GetName().c_str()); | |||||
string error_msg = "No max size found from switchn node[" + switchn->GetName()+ "]"; | |||||
GE_ERRORLOG_AND_ERRORMSG(INTERNAL_ERROR, error_msg.c_str()); | |||||
return INTERNAL_ERROR; | return INTERNAL_ERROR; | ||||
} | } | ||||
auto output_desc = switchn->GetOpDesc()->MutableOutputDesc(max_index); | auto output_desc = switchn->GetOpDesc()->MutableOutputDesc(max_index); | ||||
@@ -595,6 +595,8 @@ Status MultiBatchGraphCopyer::CheckCopyResult(const std::vector<NodePtr> &start_ | |||||
} | } | ||||
auto dims = NodeUtils::GetOutputDesc(*node, kDataOutIndex).GetShape().GetDims(); | auto dims = NodeUtils::GetOutputDesc(*node, kDataOutIndex).GetShape().GetDims(); | ||||
if (!IsAllDimsPositive(dims)) { | if (!IsAllDimsPositive(dims)) { | ||||
ErrorManager::GetInstance().ATCReportErrMessage("E15004", {"opname", "shape"}, | |||||
{node->GetName(), formats::ShapeToString(dims)}); | |||||
GELOGE(INTERNAL_ERROR, "Failed to copy multi batch graph, the node %s still has unknown shape %s", | GELOGE(INTERNAL_ERROR, "Failed to copy multi batch graph, the node %s still has unknown shape %s", | ||||
node->GetName().c_str(), formats::ShapeToString(dims).c_str()); | node->GetName().c_str(), formats::ShapeToString(dims).c_str()); | ||||
return INTERNAL_ERROR; | return INTERNAL_ERROR; | ||||
@@ -124,6 +124,8 @@ Status ParserDataToDynmaicInfo(const vector<vector<int64_t>> &shapes, | |||||
auto tmp_index = cur_data_index; | auto tmp_index = cur_data_index; | ||||
for (size_t i = 0; i < static_cast<size_t>(dynamic_dims_num); ++i) { | for (size_t i = 0; i < static_cast<size_t>(dynamic_dims_num); ++i) { | ||||
if (tmp_index >= dynamic_gear_info.size()) { | if (tmp_index >= dynamic_gear_info.size()) { | ||||
ErrorManager::GetInstance().ATCReportErrMessage( | |||||
"E10045", {"name", "shape"}, {data_name, formats::JoinToString(data_shape)}); | |||||
GELOGE(PARAM_INVALID, "Data: %s shape: %s make dynamic dims overflow", data_name.c_str(), | GELOGE(PARAM_INVALID, "Data: %s shape: %s make dynamic dims overflow", data_name.c_str(), | ||||
formats::JoinToString(data_shape).c_str()); | formats::JoinToString(data_shape).c_str()); | ||||
return FAILED; | return FAILED; | ||||
@@ -131,6 +133,8 @@ Status ParserDataToDynmaicInfo(const vector<vector<int64_t>> &shapes, | |||||
one_gear.push_back(dynamic_gear_info[tmp_index++]); | one_gear.push_back(dynamic_gear_info[tmp_index++]); | ||||
} | } | ||||
} else { | } else { | ||||
ErrorManager::GetInstance().ATCReportErrMessage( | |||||
"E10046", {"name", "shape"}, {data_name, formats::JoinToString(data_shape)}); | |||||
GELOGE(PARAM_INVALID, "Dynamic dims num of data: %s shape: %s can not be more than one gear dynamic info size", | GELOGE(PARAM_INVALID, "Dynamic dims num of data: %s shape: %s can not be more than one gear dynamic info size", | ||||
data_name.c_str(), formats::JoinToString(data_shape).c_str()); | data_name.c_str(), formats::JoinToString(data_shape).c_str()); | ||||
return FAILED; | return FAILED; | ||||
@@ -28,7 +28,7 @@ | |||||
#if !defined(__ANDROID__) && !defined(ANDROID) | #if !defined(__ANDROID__) && !defined(ANDROID) | ||||
#define DOMI_LOGE(...) GE_LOG_ERROR(GE_MODULE_NAME, ge::FAILED, __VA_ARGS__) | #define DOMI_LOGE(...) GE_LOG_ERROR(GE_MODULE_NAME, ge::FAILED, __VA_ARGS__) | ||||
#else | #else | ||||
#include<android/log.h> | |||||
#include <android/log.h> | |||||
#if defined(BUILD_VERSION_PERF) | #if defined(BUILD_VERSION_PERF) | ||||
#define DOMI_LOGE(fmt, ...) | #define DOMI_LOGE(fmt, ...) | ||||
#else | #else | ||||
@@ -83,12 +83,12 @@ | |||||
} while (0); | } while (0); | ||||
// If expr is not GRAPH_SUCCESS, print the log and return FAILED | // If expr is not GRAPH_SUCCESS, print the log and return FAILED | ||||
#define GE_CHK_GRAPH_STATUS_RET(expr, ...) \ | |||||
do { \ | |||||
if ((expr) != ge::GRAPH_SUCCESS) { \ | |||||
DOMI_LOGE(__VA_ARGS__); \ | |||||
return FAILED; \ | |||||
} \ | |||||
#define GE_CHK_GRAPH_STATUS_RET(expr, ...) \ | |||||
do { \ | |||||
if ((expr) != ge::GRAPH_SUCCESS) { \ | |||||
DOMI_LOGE(__VA_ARGS__); \ | |||||
return FAILED; \ | |||||
} \ | |||||
} while (0); | } while (0); | ||||
// If expr is not SUCCESS, print the log and execute a custom statement | // If expr is not SUCCESS, print the log and execute a custom statement | ||||
@@ -99,13 +99,13 @@ | |||||
} while (0); | } while (0); | ||||
// If expr is not true, print the log and return the specified status | // If expr is not true, print the log and return the specified status | ||||
#define GE_CHK_BOOL_RET_STATUS(expr, _status, ...) \ | |||||
do { \ | |||||
bool b = (expr); \ | |||||
if (!b) { \ | |||||
GELOGE(_status, __VA_ARGS__); \ | |||||
return _status; \ | |||||
} \ | |||||
#define GE_CHK_BOOL_RET_STATUS(expr, _status, ...) \ | |||||
do { \ | |||||
bool b = (expr); \ | |||||
if (!b) { \ | |||||
GELOGE(_status, __VA_ARGS__); \ | |||||
return _status; \ | |||||
} \ | |||||
} while (0); | } while (0); | ||||
// If expr is not true, print the log and return the specified status | // If expr is not true, print the log and return the specified status | ||||
@@ -253,4 +253,20 @@ | |||||
exec_expr1; \ | exec_expr1; \ | ||||
} | } | ||||
#define GE_ERRORLOG_AND_ERRORMSG(_status, errormsg) \ | |||||
{ \ | |||||
GELOGE(_status, "%s", errormsg); \ | |||||
ErrorManager::GetInstance().ATCReportErrMessage("E10043", {"reason"}, {errormsg}); \ | |||||
} | |||||
#define GE_CHK_LOG_AND_ERRORMSG(expr, _status, errormsg) \ | |||||
do { \ | |||||
bool b = (expr); \ | |||||
if (!b) { \ | |||||
GELOGE(_status, "%s", errormsg); \ | |||||
ErrorManager::GetInstance().ATCReportErrMessage("E10043", {"reason"}, {errormsg}); \ | |||||
return _status; \ | |||||
} \ | |||||
} while (0) | |||||
#endif // INC_FRAMEWORK_COMMON_DEBUG_LOG_H_ | #endif // INC_FRAMEWORK_COMMON_DEBUG_LOG_H_ |