You can not select more than 25 topics Topics must start with a chinese character,a letter or number, can include dashes ('-') and can be up to 35 characters long.

test_gradient_method.py 13 kB

5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369
  1. # Copyright 2019 Huawei Technologies Co., Ltd
  2. #
  3. # Licensed under the Apache License, Version 2.0 (the "License");
  4. # you may not use this file except in compliance with the License.
  5. # You may obtain a copy of the License at
  6. #
  7. # http://www.apache.org/licenses/LICENSE-2.0
  8. #
  9. # Unless required by applicable law or agreed to in writing, software
  10. # distributed under the License is distributed on an "AS IS" BASIS,
  11. # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  12. # See the License for the specific language governing permissions and
  13. # limitations under the License.
  14. """
  15. Gradient-Attack test.
  16. """
  17. import numpy as np
  18. import pytest
  19. import mindspore.nn as nn
  20. from mindspore.nn import Cell, SoftmaxCrossEntropyWithLogits
  21. import mindspore.context as context
  22. from mindspore.ops.composite import GradOperation
  23. from mindarmour.adv_robustness.attacks import FastGradientMethod
  24. from mindarmour.adv_robustness.attacks import FastGradientSignMethod
  25. from mindarmour.adv_robustness.attacks import LeastLikelyClassMethod
  26. from mindarmour.adv_robustness.attacks import RandomFastGradientMethod
  27. from mindarmour.adv_robustness.attacks import RandomFastGradientSignMethod
  28. from mindarmour.adv_robustness.attacks import RandomLeastLikelyClassMethod
  29. # for user
  30. class Net(Cell):
  31. """
  32. Construct the network of target model.
  33. Examples:
  34. >>> net = Net()
  35. """
  36. def __init__(self):
  37. """
  38. Introduce the layers used for network construction.
  39. """
  40. super(Net, self).__init__()
  41. self._relu = nn.ReLU()
  42. def construct(self, inputs):
  43. """
  44. Construct network.
  45. Args:
  46. inputs (Tensor): Input data.
  47. """
  48. out = self._relu(inputs)
  49. return out
  50. class Net2(Cell):
  51. """
  52. Construct the network of target model. A network with multiple input data.
  53. Examples:
  54. >>> net = Net2()
  55. """
  56. def __init__(self):
  57. super(Net2, self).__init__()
  58. self._relu = nn.ReLU()
  59. def construct(self, inputs1, inputs2):
  60. out1 = self._relu(inputs1)
  61. out2 = self._relu(inputs2)
  62. return out1 + out2, out1 - out2
  63. class LossNet(Cell):
  64. """
  65. Loss function for test.
  66. """
  67. def construct(self, loss1, loss2, labels1, labels2):
  68. return loss1 + loss2 - labels1 - labels2
  69. class WithLossCell(Cell):
  70. """Wrap the network with loss function"""
  71. def __init__(self, backbone, loss_fn):
  72. super(WithLossCell, self).__init__(auto_prefix=False)
  73. self._backbone = backbone
  74. self._loss_fn = loss_fn
  75. def construct(self, inputs1, inputs2, labels1, labels2):
  76. out = self._backbone(inputs1, inputs2)
  77. return self._loss_fn(*out, labels1, labels2)
  78. class GradWrapWithLoss(Cell):
  79. """
  80. Construct a network to compute the gradient of loss function in \
  81. input space and weighted by 'weight'.
  82. """
  83. def __init__(self, network):
  84. super(GradWrapWithLoss, self).__init__()
  85. self._grad_all = GradOperation(get_all=True, sens_param=False)
  86. self._network = network
  87. def construct(self, *inputs):
  88. gout = self._grad_all(self._network)(*inputs)
  89. return gout[0]
  90. @pytest.mark.level0
  91. @pytest.mark.platform_arm_ascend_training
  92. @pytest.mark.platform_x86_ascend_training
  93. @pytest.mark.env_card
  94. @pytest.mark.component_mindarmour
  95. def test_fast_gradient_method():
  96. """
  97. Fast gradient method unit test.
  98. """
  99. context.set_context(mode=context.GRAPH_MODE, device_target="Ascend")
  100. input_np = np.asarray([[0.1, 0.2, 0.7]], np.float32)
  101. label = np.asarray([2], np.int32)
  102. label = np.eye(3)[label].astype(np.float32)
  103. attack = FastGradientMethod(Net(), loss_fn=SoftmaxCrossEntropyWithLogits(sparse=False))
  104. ms_adv_x = attack.generate(input_np, label)
  105. assert np.any(ms_adv_x != input_np), 'Fast gradient method: generate value' \
  106. ' must not be equal to original value.'
  107. @pytest.mark.level0
  108. @pytest.mark.platform_x86_gpu_inference
  109. @pytest.mark.env_card
  110. @pytest.mark.component_mindarmour
  111. def test_fast_gradient_method_gpu():
  112. """
  113. Fast gradient method unit test.
  114. """
  115. context.set_context(mode=context.GRAPH_MODE, device_target="GPU")
  116. input_np = np.asarray([[0.1, 0.2, 0.7]], np.float32)
  117. label = np.asarray([2], np.int32)
  118. label = np.eye(3)[label].astype(np.float32)
  119. attack = FastGradientMethod(Net(), loss_fn=SoftmaxCrossEntropyWithLogits(sparse=False))
  120. ms_adv_x = attack.generate(input_np, label)
  121. assert np.any(ms_adv_x != input_np), 'Fast gradient method: generate value' \
  122. ' must not be equal to original value.'
  123. @pytest.mark.level0
  124. @pytest.mark.platform_x86_cpu
  125. @pytest.mark.env_card
  126. @pytest.mark.component_mindarmour
  127. def test_fast_gradient_method_cpu():
  128. """
  129. Fast gradient method unit test.
  130. """
  131. context.set_context(mode=context.GRAPH_MODE, device_target="CPU")
  132. input_np = np.asarray([[0.1, 0.2, 0.7]], np.float32)
  133. label = np.asarray([2], np.int32)
  134. loss = SoftmaxCrossEntropyWithLogits(sparse=True)
  135. attack = FastGradientMethod(Net(), loss_fn=loss)
  136. ms_adv_x = attack.generate(input_np, label)
  137. assert np.any(ms_adv_x != input_np), 'Fast gradient method: generate value' \
  138. ' must not be equal to original value.'
  139. @pytest.mark.level0
  140. @pytest.mark.platform_arm_ascend_training
  141. @pytest.mark.platform_x86_ascend_training
  142. @pytest.mark.env_card
  143. @pytest.mark.component_mindarmour
  144. def test_random_fast_gradient_method():
  145. """
  146. Random fast gradient method unit test.
  147. """
  148. context.set_context(mode=context.GRAPH_MODE, device_target="Ascend")
  149. input_np = np.asarray([[0.1, 0.2, 0.7]], np.float32)
  150. label = np.asarray([2], np.int32)
  151. label = np.eye(3)[label].astype(np.float32)
  152. attack = RandomFastGradientMethod(Net(), loss_fn=SoftmaxCrossEntropyWithLogits(sparse=False))
  153. ms_adv_x = attack.generate(input_np, label)
  154. assert np.any(ms_adv_x != input_np), 'Random fast gradient method: ' \
  155. 'generate value must not be equal to' \
  156. ' original value.'
  157. @pytest.mark.level0
  158. @pytest.mark.platform_arm_ascend_training
  159. @pytest.mark.platform_x86_ascend_training
  160. @pytest.mark.env_card
  161. @pytest.mark.component_mindarmour
  162. def test_fast_gradient_sign_method():
  163. """
  164. Fast gradient sign method unit test.
  165. """
  166. context.set_context(mode=context.GRAPH_MODE, device_target="Ascend")
  167. input_np = np.asarray([[0.1, 0.2, 0.7]], np.float32)
  168. label = np.asarray([2], np.int32)
  169. label = np.eye(3)[label].astype(np.float32)
  170. attack = FastGradientSignMethod(Net(), loss_fn=SoftmaxCrossEntropyWithLogits(sparse=False))
  171. ms_adv_x = attack.generate(input_np, label)
  172. assert np.any(ms_adv_x != input_np), 'Fast gradient sign method: generate' \
  173. ' value must not be equal to' \
  174. ' original value.'
  175. @pytest.mark.level0
  176. @pytest.mark.platform_arm_ascend_training
  177. @pytest.mark.platform_x86_ascend_training
  178. @pytest.mark.env_card
  179. @pytest.mark.component_mindarmour
  180. def test_random_fast_gradient_sign_method():
  181. """
  182. Random fast gradient sign method unit test.
  183. """
  184. context.set_context(mode=context.GRAPH_MODE, device_target="Ascend")
  185. input_np = np.random.random((1, 28)).astype(np.float32)
  186. label = np.asarray([2], np.int32)
  187. label = np.eye(28)[label].astype(np.float32)
  188. attack = RandomFastGradientSignMethod(Net(), loss_fn=SoftmaxCrossEntropyWithLogits(sparse=False))
  189. ms_adv_x = attack.generate(input_np, label)
  190. assert np.any(ms_adv_x != input_np), 'Random fast gradient sign method: ' \
  191. 'generate value must not be equal to' \
  192. ' original value.'
  193. @pytest.mark.level0
  194. @pytest.mark.platform_arm_ascend_training
  195. @pytest.mark.platform_x86_ascend_training
  196. @pytest.mark.env_card
  197. @pytest.mark.component_mindarmour
  198. def test_least_likely_class_method():
  199. """
  200. Least likely class method unit test.
  201. """
  202. context.set_context(mode=context.GRAPH_MODE, device_target="Ascend")
  203. input_np = np.asarray([[0.1, 0.2, 0.7]], np.float32)
  204. label = np.asarray([2], np.int32)
  205. label = np.eye(3)[label].astype(np.float32)
  206. attack = LeastLikelyClassMethod(Net(), loss_fn=SoftmaxCrossEntropyWithLogits(sparse=False))
  207. ms_adv_x = attack.generate(input_np, label)
  208. assert np.any(ms_adv_x != input_np), 'Least likely class method: generate' \
  209. ' value must not be equal to' \
  210. ' original value.'
  211. @pytest.mark.level0
  212. @pytest.mark.platform_arm_ascend_training
  213. @pytest.mark.platform_x86_ascend_training
  214. @pytest.mark.env_card
  215. @pytest.mark.component_mindarmour
  216. def test_random_least_likely_class_method():
  217. """
  218. Random least likely class method unit test.
  219. """
  220. context.set_context(mode=context.GRAPH_MODE, device_target="Ascend")
  221. input_np = np.asarray([[0.1, 0.2, 0.7]], np.float32)
  222. label = np.asarray([2], np.int32)
  223. label = np.eye(3)[label].astype(np.float32)
  224. attack = RandomLeastLikelyClassMethod(Net(), eps=0.1, alpha=0.01, \
  225. loss_fn=SoftmaxCrossEntropyWithLogits(sparse=False))
  226. ms_adv_x = attack.generate(input_np, label)
  227. assert np.any(ms_adv_x != input_np), 'Random least likely class method: ' \
  228. 'generate value must not be equal to' \
  229. ' original value.'
  230. @pytest.mark.level0
  231. @pytest.mark.platform_arm_ascend_training
  232. @pytest.mark.platform_x86_ascend_training
  233. @pytest.mark.env_card
  234. @pytest.mark.component_mindarmour
  235. def test_fast_gradient_method_multi_inputs():
  236. """
  237. Fast gradient method unit test.
  238. """
  239. context.set_context(mode=context.GRAPH_MODE, device_target="Ascend")
  240. inputs1 = np.asarray([[0.1, 0.2, 0.7]]).astype(np.float32)
  241. inputs2 = np.asarray([[0.4, 0.8, 0.5]]).astype(np.float32)
  242. labels1 = np.expand_dims(np.eye(3)[1].astype(np.float32), axis=0)
  243. labels2 = np.expand_dims(np.eye(3)[2].astype(np.float32), axis=0)
  244. with_loss_cell = WithLossCell(Net2(), LossNet())
  245. grad_with_loss_net = GradWrapWithLoss(with_loss_cell)
  246. attack = FastGradientMethod(grad_with_loss_net)
  247. ms_adv_x = attack.generate((inputs1, inputs2), (labels1, labels2))
  248. assert np.any(ms_adv_x != inputs1), 'Fast gradient method: generate value' \
  249. ' must not be equal to original value.'
  250. @pytest.mark.level0
  251. @pytest.mark.platform_arm_ascend_training
  252. @pytest.mark.platform_x86_ascend_training
  253. @pytest.mark.env_card
  254. @pytest.mark.component_mindarmour
  255. def test_batch_generate():
  256. """
  257. Fast gradient method unit test.
  258. """
  259. context.set_context(mode=context.GRAPH_MODE, device_target="Ascend")
  260. input_np = np.random.random([10, 3]).astype(np.float32)
  261. label = np.random.randint(0, 3, [10])
  262. label = np.eye(3)[label].astype(np.float32)
  263. loss_fn = SoftmaxCrossEntropyWithLogits(sparse=False)
  264. attack = FastGradientMethod(Net(), loss_fn=loss_fn)
  265. ms_adv_x = attack.batch_generate(input_np, label, 4)
  266. assert np.any(ms_adv_x != input_np), 'Fast gradient method: generate value' \
  267. ' must not be equal to original value.'
  268. @pytest.mark.level0
  269. @pytest.mark.platform_arm_ascend_training
  270. @pytest.mark.platform_x86_ascend_training
  271. @pytest.mark.env_card
  272. @pytest.mark.component_mindarmour
  273. def test_batch_generate_multi_inputs():
  274. """
  275. Fast gradient method unit test.
  276. """
  277. context.set_context(mode=context.GRAPH_MODE, device_target="Ascend")
  278. inputs1 = np.asarray([[0.1, 0.2, 0.7]]).astype(np.float32)
  279. inputs2 = np.asarray([[0.4, 0.8, 0.5]]).astype(np.float32)
  280. labels1 = np.expand_dims(np.eye(3)[1].astype(np.float32), axis=0)
  281. labels2 = np.expand_dims(np.eye(3)[2].astype(np.float32), axis=0)
  282. with_loss_cell = WithLossCell(Net2(), LossNet())
  283. grad_with_loss_net = GradWrapWithLoss(with_loss_cell)
  284. attack = FastGradientMethod(grad_with_loss_net)
  285. ms_adv_x = attack.generate((inputs1, inputs2), (labels1, labels2))
  286. assert np.any(ms_adv_x != inputs1), 'Fast gradient method: generate value' \
  287. ' must not be equal to original value.'
  288. @pytest.mark.level0
  289. @pytest.mark.platform_arm_ascend_training
  290. @pytest.mark.platform_x86_ascend_training
  291. @pytest.mark.env_card
  292. @pytest.mark.component_mindarmour
  293. def test_assert_error():
  294. """
  295. Random least likely class method unit test.
  296. """
  297. context.set_context(mode=context.GRAPH_MODE, device_target="Ascend")
  298. with pytest.raises(ValueError) as e:
  299. assert RandomLeastLikelyClassMethod(Net(), eps=0.05, alpha=0.21, \
  300. loss_fn=SoftmaxCrossEntropyWithLogits(sparse=False))
  301. assert str(e.value) == 'eps must be larger than alpha!'

MindArmour关注AI的安全和隐私问题。致力于增强模型的安全可信、保护用户的数据隐私。主要包含3个模块:对抗样本鲁棒性模块、Fuzz Testing模块、隐私保护与评估模块。 对抗样本鲁棒性模块 对抗样本鲁棒性模块用于评估模型对于对抗样本的鲁棒性,并提供模型增强方法用于增强模型抗对抗样本攻击的能力,提升模型鲁棒性。对抗样本鲁棒性模块包含了4个子模块:对抗样本的生成、对抗样本的检测、模型防御、攻防评估。