You can not select more than 25 topics Topics must start with a chinese character,a letter or number, can include dashes ('-') and can be up to 35 characters long.

dropout_pass_unittest.cc 4.3 kB

5 years ago
123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108
  1. /**
  2. * Copyright 2019-2020 Huawei Technologies Co., Ltd
  3. *
  4. * Licensed under the Apache License, Version 2.0 (the "License");
  5. * you may not use this file except in compliance with the License.
  6. * You may obtain a copy of the License at
  7. *
  8. * http://www.apache.org/licenses/LICENSE-2.0
  9. *
  10. * Unless required by applicable law or agreed to in writing, software
  11. * distributed under the License is distributed on an "AS IS" BASIS,
  12. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  13. * See the License for the specific language governing permissions and
  14. * limitations under the License.
  15. */
  16. #include <gtest/gtest.h>
  17. #define protected public
  18. #define private public
  19. #include "graph/passes/dropout_pass.h"
  20. #include "common/op/ge_op_utils.h"
  21. #include "common/types.h"
  22. #include "graph/anchor.h"
  23. #include "graph/attr_value.h"
  24. #include "graph/compute_graph.h"
  25. #include "graph/op_desc.h"
  26. #include "graph/utils/attr_utils.h"
  27. #include "graph/utils/graph_utils.h"
  28. #include "graph/utils/op_desc_utils.h"
  29. #include "graph/utils/tensor_utils.h"
  30. #include "inc/pass_manager.h"
  31. #undef protected
  32. #undef private
  33. using namespace testing;
  34. namespace ge {
  35. class UtestGraphPassesDropoutPass : public Test {
  36. protected:
  37. NodePtr AddNode(ComputeGraphPtr graph, const string &name, const string &type, int32_t in_anchors_num = 1,
  38. int32_t out_anchors_num = 1) {
  39. GeTensorDesc tensor_desc;
  40. OpDescPtr op_desc = make_shared<OpDesc>(name, type);
  41. for (int32_t i = 0; i < in_anchors_num; i++) {
  42. op_desc->AddInputDesc(tensor_desc);
  43. }
  44. for (int32_t i = 0; i < out_anchors_num; i++) {
  45. op_desc->AddOutputDesc(tensor_desc);
  46. }
  47. NodePtr node = graph->AddNode(op_desc);
  48. return node;
  49. }
  50. };
  51. TEST_F(UtestGraphPassesDropoutPass, dropout_remove_succ) {
  52. ComputeGraphPtr graph = std::make_shared<ComputeGraph>("test");
  53. NodePtr dropout_node = AddNode(graph, "dropout", DROPOUT);
  54. NodePtr reduce_min_node = AddNode(graph, "reduceMin", REDUCEMIN);
  55. NodePtr reduce_max_node = AddNode(graph, "reduceMax", REDUCEMAX);
  56. GraphUtils::AddEdge(reduce_max_node->GetOutDataAnchor(0), dropout_node->GetInDataAnchor(0));
  57. GraphUtils::AddEdge(dropout_node->GetOutDataAnchor(0), reduce_min_node->GetInDataAnchor(0));
  58. vector<bool> is_input_const_vec = {true};
  59. reduce_min_node->GetOpDesc()->SetIsInputConst(is_input_const_vec);
  60. DropOutPass drop_out_pass;
  61. Status status = drop_out_pass.Run(dropout_node);
  62. EXPECT_EQ(SUCCESS, status);
  63. is_input_const_vec = reduce_min_node->GetOpDesc()->GetIsInputConst();
  64. EXPECT_EQ(is_input_const_vec[0], true);
  65. NodePtr found_node = graph->FindNode("dropout");
  66. EXPECT_EQ(nullptr, found_node);
  67. NodePtr node = std::make_shared<Node>();
  68. status = drop_out_pass.Run(node);
  69. EXPECT_EQ(PARAM_INVALID, status);
  70. }
  71. TEST_F(UtestGraphPassesDropoutPass, dropout_remove_fail1) {
  72. ComputeGraphPtr graph = std::make_shared<ComputeGraph>("test");
  73. NodePtr dropout_node = AddNode(graph, "dropout", DROPOUT, 0, 1);
  74. NodePtr reduce_min_node = AddNode(graph, "reduceMin", REDUCEMIN);
  75. GraphUtils::AddEdge(dropout_node->GetOutDataAnchor(0), reduce_min_node->GetInDataAnchor(0));
  76. DropOutPass drop_out_pass;
  77. Status status = drop_out_pass.Run(dropout_node);
  78. EXPECT_EQ(FAILED, status);
  79. }
  80. TEST_F(UtestGraphPassesDropoutPass, dropout_square) {
  81. ComputeGraphPtr graph = std::make_shared<ComputeGraph>("test");
  82. NodePtr dropout_node = AddNode(graph, "dropout", DROPOUT);
  83. NodePtr square_node = AddNode(graph, "square", SQUARE);
  84. NodePtr softplus_node = AddNode(graph, "softplus", SOFTPLUS);
  85. NodePtr const_node = AddNode(graph, "const", CONSTANT);
  86. GraphUtils::AddEdge(square_node->GetOutControlAnchor(), dropout_node->GetInControlAnchor());
  87. GraphUtils::AddEdge(const_node->GetOutDataAnchor(0), dropout_node->GetInDataAnchor(0));
  88. GraphUtils::AddEdge(dropout_node->GetOutDataAnchor(0), softplus_node->GetInDataAnchor(0));
  89. DropOutPass drop_out_pass;
  90. Status status = drop_out_pass.Run(dropout_node);
  91. EXPECT_EQ(SUCCESS, status);
  92. EXPECT_EQ(square_node->GetOutControlAnchor()->GetPeerInControlAnchors().at(0), softplus_node->GetInControlAnchor());
  93. EXPECT_EQ(const_node->GetOutDataAnchor(0)->GetPeerInDataAnchors().at(0), softplus_node->GetInDataAnchor(0));
  94. }
  95. } // namespace ge

图引擎模块(GE)是MindSpore的一个子模块,其代码由C++实现,位于前端模块ME和底层硬件之间,起到承接作用。图引擎模块以ME下发的图作为输入,然后进行一系列的深度图优化操作,最后输出一张可以在底层硬件上高效运行的图。GE针对昇腾AI处理器的硬件结构特点,做了特定的优化工作,以此来充分发挥出昇腾AI处理器的强大算力。在进行模型训练/推理时,GE会被自动调用而用户并不感知。GE主要由GE API和GE Core两部分组成,详细的架构图如下所示