Merge pull request !302 from 张澍坤/mastertags/v1.6.0
@@ -14,8 +14,10 @@ | |||
""" | |||
Genetic-Attack test. | |||
""" | |||
import gc | |||
import numpy as np | |||
import pytest | |||
import mindspore.ops.operations as M | |||
from mindspore import Tensor | |||
from mindspore import context | |||
@@ -112,6 +114,8 @@ def test_genetic_attack(): | |||
sparse=False) | |||
_, adv_data, _ = attack.generate(inputs, labels) | |||
assert np.any(inputs != adv_data) | |||
del inputs, labels, adv_data | |||
gc.collect() | |||
@pytest.mark.level0 | |||
@pytest.mark.platform_arm_ascend_training | |||
@@ -136,7 +140,8 @@ def test_supplement(): | |||
sparse=False) | |||
# raise error | |||
_, _, _ = attack.generate(inputs, labels) | |||
del inputs, labels | |||
gc.collect() | |||
@pytest.mark.level0 | |||
@pytest.mark.platform_arm_ascend_training | |||
@@ -163,7 +168,8 @@ def test_value_error(): | |||
# raise error | |||
with pytest.raises(ValueError): | |||
assert attack.generate(inputs, labels) | |||
del inputs, labels | |||
gc.collect() | |||
@pytest.mark.level0 | |||
@pytest.mark.platform_x86_cpu | |||
@@ -189,3 +195,5 @@ def test_genetic_attack_detection_cpu(): | |||
_, adv_img, _ = attack.generate(img_data, (pre_gt_boxes, pre_gt_labels)) | |||
adv_imgs.append(adv_img) | |||
assert np.any(inputs != np.array(adv_imgs)) | |||
del inputs, adv_imgs | |||
gc.collect() |
@@ -12,9 +12,10 @@ | |||
# See the License for the specific language governing permissions and | |||
# limitations under the License. | |||
import os | |||
import gc | |||
import numpy as np | |||
import pytest | |||
from mindspore import Tensor | |||
from mindspore import context | |||
from mindspore.train.serialization import load_checkpoint, load_param_into_net | |||
@@ -153,7 +154,8 @@ def test_hsja_mnist_attack_ascend(): | |||
accuracy_adv = np.mean(np.equal(pred_lables_adv, gts)) | |||
LOGGER.info(TAG, 'mis-classification rate of adversaries is : %s', | |||
accuracy_adv) | |||
del pred_labels, adv_data, predict_labels, true_labels | |||
gc.collect() | |||
@pytest.mark.level0 | |||
@pytest.mark.platform_x86_cpu | |||
@@ -226,6 +228,8 @@ def test_hsja_mnist_attack_cpu(): | |||
accuracy_adv = np.mean(np.equal(pred_lables_adv, gts)) | |||
LOGGER.info(TAG, 'mis-classification rate of adversaries is : %s', | |||
accuracy_adv) | |||
del pred_labels, adv_data, predict_labels, true_labels | |||
gc.collect() | |||
@pytest.mark.level0 | |||
@@ -15,6 +15,7 @@ | |||
PointWise Attack test | |||
""" | |||
import os | |||
import gc | |||
import numpy as np | |||
import pytest | |||
@@ -86,6 +87,8 @@ def test_pointwise_attack_method_ascend(): | |||
assert np.any(adv_data[is_adv][0] != input_np[is_adv][0]), 'Pointwise attack method: ' \ | |||
'generate value must not be equal' \ | |||
' to original value.' | |||
del input_np, labels, adv_data | |||
gc.collect() | |||
@pytest.mark.level0 | |||
@@ -126,3 +129,5 @@ def test_pointwise_attack_method_cpu(): | |||
assert np.any(adv_data[is_adv][0] != input_np[is_adv][0]), 'Pointwise attack method: ' \ | |||
'generate value must not be equal' \ | |||
' to original value.' | |||
del input_np, labels, adv_data | |||
gc.collect() |
@@ -14,6 +14,7 @@ | |||
""" | |||
PSO-Attack test. | |||
""" | |||
import gc | |||
import numpy as np | |||
import pytest | |||
@@ -115,7 +116,8 @@ def test_pso_attack(): | |||
attack = PSOAttack(model, bounds=(0.0, 1.0), pm=0.5, sparse=False) | |||
_, adv_data, _ = attack.generate(inputs, labels) | |||
assert np.any(inputs != adv_data) | |||
del inputs, labels, adv_data | |||
gc.collect() | |||
@pytest.mark.level0 | |||
@pytest.mark.platform_arm_ascend_training | |||
@@ -141,7 +143,8 @@ def test_pso_attack_targeted(): | |||
sparse=False) | |||
_, adv_data, _ = attack.generate(inputs, labels) | |||
assert np.any(inputs != adv_data) | |||
del inputs, labels, adv_data | |||
gc.collect() | |||
@pytest.mark.level0 | |||
@pytest.mark.platform_x86_gpu_training | |||
@@ -165,7 +168,8 @@ def test_pso_attack_gpu(): | |||
attack = PSOAttack(model, bounds=(0.0, 1.0), pm=0.5, sparse=False) | |||
_, adv_data, _ = attack.generate(inputs, labels) | |||
assert np.any(inputs != adv_data) | |||
del inputs, labels, adv_data | |||
gc.collect() | |||
@pytest.mark.level0 | |||
@pytest.mark.platform_x86_cpu | |||
@@ -189,7 +193,8 @@ def test_pso_attack_cpu(): | |||
attack = PSOAttack(model, bounds=(0.0, 1.0), pm=0.5, sparse=False) | |||
_, adv_data, _ = attack.generate(inputs, labels) | |||
assert np.any(inputs != adv_data) | |||
del inputs, labels, adv_data | |||
gc.collect() | |||
@pytest.mark.level0 | |||
@pytest.mark.platform_x86_cpu | |||
@@ -15,6 +15,7 @@ | |||
SaltAndPepper Attack Test | |||
""" | |||
import os | |||
import gc | |||
import numpy as np | |||
import pytest | |||
@@ -73,6 +74,9 @@ def test_salt_and_pepper_attack_method_ascend(): | |||
_, adv_data, _ = attack.generate(inputs, labels) | |||
assert np.any(adv_data[0] != inputs[0]), 'Salt and pepper attack method: generate value must not be equal' \ | |||
' to original value.' | |||
del inputs, labels, adv_data | |||
gc.collect() | |||
@pytest.mark.level0 | |||
@pytest.mark.platform_x86_cpu | |||
@@ -104,3 +108,5 @@ def test_salt_and_pepper_attack_method_cpu(): | |||
_, adv_data, _ = attack.generate(inputs, labels) | |||
assert np.any(adv_data[0] != inputs[0]), 'Salt and pepper attack method: generate value must not be equal' \ | |||
' to original value.' | |||
del inputs, labels, adv_data | |||
gc.collect() |
@@ -14,6 +14,7 @@ | |||
""" | |||
Batch-generate-attack test. | |||
""" | |||
import gc | |||
import numpy as np | |||
import pytest | |||
@@ -129,6 +130,9 @@ def test_batch_generate_attack_ascend(): | |||
assert np.any(ms_adv_x != input_np), 'Fast gradient method: generate value' \ | |||
' must not be equal to original value.' | |||
del input_np, label, ms_adv_x | |||
gc.collect() | |||
@pytest.mark.level0 | |||
@pytest.mark.platform_x86_cpu | |||
@@ -151,7 +155,8 @@ def test_batch_generate_attack_cpu(): | |||
assert np.any(ms_adv_x != input_np), 'Fast gradient method: generate value' \ | |||
' must not be equal to original value.' | |||
del input_np, label, ms_adv_x | |||
gc.collect() | |||
@pytest.mark.level0 | |||
@@ -181,7 +186,8 @@ def test_batch_generate_attack_multi_inputs_ascend(): | |||
assert np.any(ms_adv_x != inputs1), 'Fast gradient method: generate value' \ | |||
' must not be equal to original value.' | |||
del inputs1, inputs2, labels1, labels2, ms_adv_x | |||
gc.collect() | |||
@pytest.mark.level0 | |||
@pytest.mark.platform_x86_cpu | |||
@@ -209,3 +215,5 @@ def test_batch_generate_attack_multi_inputs_cpu(): | |||
assert np.any(ms_adv_x != inputs1), 'Fast gradient method: generate value' \ | |||
' must not be equal to original value.' | |||
del inputs1, inputs2, labels1, labels2, ms_adv_x | |||
gc.collect() |
@@ -14,6 +14,7 @@ | |||
""" | |||
CW-Attack test. | |||
""" | |||
import gc | |||
import numpy as np | |||
import pytest | |||
@@ -72,7 +73,8 @@ def test_cw_attack_ascend(): | |||
attack = CarliniWagnerL2Attack(net, num_classes, targeted=False) | |||
adv_data = attack.generate(input_np, label_np) | |||
assert np.any(input_np != adv_data) | |||
del input_np, label_np, adv_data | |||
gc.collect() | |||
@pytest.mark.level0 | |||
@pytest.mark.platform_x86_cpu | |||
@@ -93,7 +95,8 @@ def test_cw_attack_cpu(): | |||
attack = CarliniWagnerL2Attack(net, num_classes, targeted=False) | |||
adv_data = attack.generate(input_np, label_np) | |||
assert np.any(input_np != adv_data) | |||
del input_np, label_np, adv_data | |||
gc.collect() | |||
@pytest.mark.level0 | |||
@pytest.mark.platform_arm_ascend_training | |||
@@ -115,7 +118,8 @@ def test_cw_attack_targeted_ascend(): | |||
attack = CarliniWagnerL2Attack(net, num_classes, targeted=True) | |||
adv_data = attack.generate(input_np, target_np) | |||
assert np.any(input_np != adv_data) | |||
del input_np, target_np, adv_data | |||
gc.collect() | |||
@pytest.mark.level0 | |||
@pytest.mark.platform_x86_cpu | |||
@@ -136,3 +140,5 @@ def test_cw_attack_targeted_cpu(): | |||
attack = CarliniWagnerL2Attack(net, num_classes, targeted=True) | |||
adv_data = attack.generate(input_np, target_np) | |||
assert np.any(input_np != adv_data) | |||
del input_np, target_np, adv_data | |||
gc.collect() |
@@ -14,6 +14,7 @@ | |||
""" | |||
DeepFool-Attack test. | |||
""" | |||
import gc | |||
import numpy as np | |||
import pytest | |||
@@ -97,6 +98,8 @@ def test_deepfool_attack_ascend(): | |||
assert np.allclose(adv_data, expect_value), 'mindspore deepfool_method' \ | |||
' implementation error, ms_adv_x != expect_value' | |||
del input_np, true_labels, adv_data, expect_value | |||
gc.collect() | |||
@pytest.mark.level0 | |||
@pytest.mark.platform_x86_cpu | |||
@@ -124,7 +127,8 @@ def test_deepfool_attack_cpu(): | |||
0.40406296]]) | |||
assert np.allclose(adv_data, expect_value), 'mindspore deepfool_method' \ | |||
' implementation error, ms_adv_x != expect_value' | |||
del input_np, true_labels, adv_data, expect_value | |||
gc.collect() | |||
@pytest.mark.level0 | |||
@pytest.mark.platform_arm_ascend_training | |||
@@ -151,7 +155,8 @@ def test_deepfool_attack_detection_ascend(): | |||
bounds=(0.0, 1.0)) | |||
adv_data = attack.generate((inputs1_np, inputs2_np), (gt_boxes, gt_labels)) | |||
assert np.any(adv_data != inputs1_np) | |||
del inputs1_np, inputs2_np, gt_labels, adv_data | |||
gc.collect() | |||
@pytest.mark.level0 | |||
@pytest.mark.platform_x86_cpu | |||
@@ -177,7 +182,8 @@ def test_deepfool_attack_detection_cpu(): | |||
bounds=(0.0, 1.0)) | |||
adv_data = attack.generate((inputs1_np, inputs2_np), (gt_boxes, gt_labels)) | |||
assert np.any(adv_data != inputs1_np) | |||
del inputs1_np, inputs2_np, gt_labels, adv_data | |||
gc.collect() | |||
@pytest.mark.level0 | |||
@pytest.mark.platform_arm_ascend_training | |||
@@ -202,7 +208,8 @@ def test_deepfool_attack_inf_ascend(): | |||
bounds=(0.0, 1.0)) | |||
adv_data = attack.generate(input_np, true_labels) | |||
assert np.any(input_np != adv_data) | |||
del input_np, true_labels, adv_data | |||
gc.collect() | |||
@pytest.mark.level0 | |||
@pytest.mark.platform_x86_cpu | |||
@@ -226,7 +233,8 @@ def test_deepfool_attack_inf_cpu(): | |||
bounds=(0.0, 1.0)) | |||
adv_data = attack.generate(input_np, true_labels) | |||
assert np.any(input_np != adv_data) | |||
del input_np, true_labels, adv_data | |||
gc.collect() | |||
@pytest.mark.level0 | |||
@pytest.mark.platform_arm_ascend_training | |||
@@ -251,7 +259,8 @@ def test_value_error_ascend(): | |||
attack = DeepFool(net, classes, max_iters=10, norm_level=1, | |||
bounds=(0.0, 1.0)) | |||
assert attack.generate(input_np, true_labels) | |||
del input_np, true_labels | |||
gc.collect() | |||
@pytest.mark.level0 | |||
@pytest.mark.platform_x86_cpu | |||
@@ -275,3 +284,5 @@ def test_value_error_cpu(): | |||
attack = DeepFool(net, classes, max_iters=10, norm_level=1, | |||
bounds=(0.0, 1.0)) | |||
assert attack.generate(input_np, true_labels) | |||
del input_np, true_labels | |||
gc.collect() |
@@ -14,6 +14,7 @@ | |||
""" | |||
Gradient-Attack test. | |||
""" | |||
import gc | |||
import numpy as np | |||
import pytest | |||
@@ -130,6 +131,8 @@ def test_fast_gradient_method(): | |||
assert np.any(ms_adv_x != input_np), 'Fast gradient method: generate value' \ | |||
' must not be equal to original value.' | |||
del input_np, label, ms_adv_x | |||
gc.collect() | |||
@pytest.mark.level0 | |||
@@ -150,7 +153,8 @@ def test_fast_gradient_method_gpu(): | |||
assert np.any(ms_adv_x != input_np), 'Fast gradient method: generate value' \ | |||
' must not be equal to original value.' | |||
del input_np, label, ms_adv_x | |||
gc.collect() | |||
@pytest.mark.level0 | |||
@pytest.mark.platform_x86_cpu | |||
@@ -170,7 +174,8 @@ def test_fast_gradient_method_cpu(): | |||
assert np.any(ms_adv_x != input_np), 'Fast gradient method: generate value' \ | |||
' must not be equal to original value.' | |||
del input_np, label, ms_adv_x | |||
gc.collect() | |||
@pytest.mark.level0 | |||
@pytest.mark.platform_arm_ascend_training | |||
@@ -192,7 +197,8 @@ def test_random_fast_gradient_method(): | |||
assert np.any(ms_adv_x != input_np), 'Random fast gradient method: ' \ | |||
'generate value must not be equal to' \ | |||
' original value.' | |||
del input_np, label, ms_adv_x | |||
gc.collect() | |||
@pytest.mark.level0 | |||
@pytest.mark.platform_arm_ascend_training | |||
@@ -213,8 +219,9 @@ def test_fast_gradient_sign_method(): | |||
assert np.any(ms_adv_x != input_np), 'Fast gradient sign method: generate' \ | |||
' value must not be equal to' \ | |||
' original value.' | |||
' original value.' | |||
del input_np, label, ms_adv_x | |||
gc.collect() | |||
@pytest.mark.level0 | |||
@pytest.mark.platform_arm_ascend_training | |||
@@ -236,7 +243,8 @@ def test_random_fast_gradient_sign_method(): | |||
assert np.any(ms_adv_x != input_np), 'Random fast gradient sign method: ' \ | |||
'generate value must not be equal to' \ | |||
' original value.' | |||
del input_np, label, ms_adv_x | |||
gc.collect() | |||
@pytest.mark.level0 | |||
@pytest.mark.platform_arm_ascend_training | |||
@@ -258,7 +266,8 @@ def test_least_likely_class_method(): | |||
assert np.any(ms_adv_x != input_np), 'Least likely class method: generate' \ | |||
' value must not be equal to' \ | |||
' original value.' | |||
del input_np, label, ms_adv_x | |||
gc.collect() | |||
@pytest.mark.level0 | |||
@pytest.mark.platform_arm_ascend_training | |||
@@ -281,7 +290,8 @@ def test_random_least_likely_class_method(): | |||
assert np.any(ms_adv_x != input_np), 'Random least likely class method: ' \ | |||
'generate value must not be equal to' \ | |||
' original value.' | |||
del input_np, label, ms_adv_x | |||
gc.collect() | |||
@pytest.mark.level0 | |||
@pytest.mark.platform_arm_ascend_training | |||
@@ -305,7 +315,8 @@ def test_fast_gradient_method_multi_inputs(): | |||
assert np.any(ms_adv_x != inputs1), 'Fast gradient method: generate value' \ | |||
' must not be equal to original value.' | |||
del inputs1, inputs2, labels1, labels2, ms_adv_x | |||
gc.collect() | |||
@pytest.mark.level0 | |||
@pytest.mark.platform_arm_ascend_training | |||
@@ -14,6 +14,7 @@ | |||
""" | |||
Iterative-gradient Attack test. | |||
""" | |||
import gc | |||
import numpy as np | |||
import pytest | |||
@@ -79,7 +80,8 @@ def test_basic_iterative_method_ascend(): | |||
assert np.any( | |||
ms_adv_x != input_np), 'Basic iterative method: generate value' \ | |||
' must not be equal to original value.' | |||
del input_np, label, ms_adv_x | |||
gc.collect() | |||
@pytest.mark.level0 | |||
@pytest.mark.platform_x86_cpu | |||
@@ -104,7 +106,8 @@ def test_basic_iterative_method_cpu(): | |||
assert np.any( | |||
ms_adv_x != input_np), 'Basic iterative method: generate value' \ | |||
' must not be equal to original value.' | |||
del input_np, label, ms_adv_x | |||
gc.collect() | |||
@pytest.mark.level0 | |||
@pytest.mark.platform_arm_ascend_training | |||
@@ -129,7 +132,8 @@ def test_momentum_iterative_method_ascend(): | |||
assert np.any(ms_adv_x != input_np), 'Momentum iterative method: generate' \ | |||
' value must not be equal to' \ | |||
' original value.' | |||
del input_np, label, ms_adv_x | |||
gc.collect() | |||
@pytest.mark.level0 | |||
@pytest.mark.platform_x86_cpu | |||
@@ -153,7 +157,8 @@ def test_momentum_iterative_method_cpu(): | |||
assert np.any(ms_adv_x != input_np), 'Momentum iterative method: generate' \ | |||
' value must not be equal to' \ | |||
' original value.' | |||
del input_np, label, ms_adv_x | |||
gc.collect() | |||
@pytest.mark.level0 | |||
@pytest.mark.platform_arm_ascend_training | |||
@@ -180,7 +185,8 @@ def test_projected_gradient_descent_method_ascend(): | |||
ms_adv_x != input_np), 'Projected gradient descent method: ' \ | |||
'generate value must not be equal to' \ | |||
' original value.' | |||
del input_np, label, ms_adv_x | |||
gc.collect() | |||
@pytest.mark.level0 | |||
@pytest.mark.platform_x86_cpu | |||
@@ -206,7 +212,8 @@ def test_projected_gradient_descent_method_cpu(): | |||
ms_adv_x != input_np), 'Projected gradient descent method: ' \ | |||
'generate value must not be equal to' \ | |||
' original value.' | |||
del input_np, label, ms_adv_x | |||
gc.collect() | |||
@pytest.mark.level0 | |||
@pytest.mark.platform_arm_ascend_training | |||
@@ -230,7 +237,8 @@ def test_diverse_input_iterative_method_ascend(): | |||
assert np.any(ms_adv_x != input_np), 'Diverse input iterative method: generate' \ | |||
' value must not be equal to' \ | |||
' original value.' | |||
del input_np, label, ms_adv_x | |||
gc.collect() | |||
@pytest.mark.level0 | |||
@pytest.mark.platform_x86_cpu | |||
@@ -253,7 +261,8 @@ def test_diverse_input_iterative_method_cpu(): | |||
assert np.any(ms_adv_x != input_np), 'Diverse input iterative method: generate' \ | |||
' value must not be equal to' \ | |||
' original value.' | |||
del input_np, label, ms_adv_x | |||
gc.collect() | |||
@pytest.mark.level0 | |||
@pytest.mark.platform_arm_ascend_training | |||
@@ -277,7 +286,8 @@ def test_momentum_diverse_input_iterative_method_ascend(): | |||
assert np.any(ms_adv_x != input_np), 'Momentum diverse input iterative method: ' \ | |||
'generate value must not be equal to' \ | |||
' original value.' | |||
del input_np, label, ms_adv_x | |||
gc.collect() | |||
@pytest.mark.level0 | |||
@pytest.mark.platform_x86_cpu | |||
@@ -301,7 +311,8 @@ def test_momentum_diverse_input_iterative_method_cpu(): | |||
assert np.any(ms_adv_x != input_np), 'Momentum diverse input iterative method: ' \ | |||
'generate value must not be equal to' \ | |||
' original value.' | |||
del input_np, label, ms_adv_x | |||
gc.collect() | |||
@pytest.mark.level0 | |||
@pytest.mark.platform_arm_ascend_training | |||
@@ -321,7 +332,8 @@ def test_error_ascend(): | |||
label = np.asarray([2], np.int32) | |||
label = np.eye(3)[label].astype(np.float32) | |||
assert attack.generate(input_np, label) | |||
del input_np, label | |||
gc.collect() | |||
@pytest.mark.level0 | |||
@pytest.mark.platform_x86_cpu | |||
@@ -340,3 +352,5 @@ def test_error_cpu(): | |||
label = np.asarray([2], np.int32) | |||
label = np.eye(3)[label].astype(np.float32) | |||
assert attack.generate(input_np, label) | |||
del input_np, label | |||
gc.collect() |
@@ -14,6 +14,7 @@ | |||
""" | |||
JSMA-Attack test. | |||
""" | |||
import gc | |||
import numpy as np | |||
import pytest | |||
@@ -77,7 +78,8 @@ def test_jsma_attack(): | |||
attack = JSMAAttack(net, classes, max_iteration=5) | |||
adv_data = attack.generate(input_np, label_np) | |||
assert np.any(input_np != adv_data) | |||
del input_np, label_np, adv_data | |||
gc.collect() | |||
@pytest.mark.level0 | |||
@pytest.mark.platform_arm_ascend_training | |||
@@ -105,6 +107,8 @@ def test_jsma_attack_2(): | |||
attack = JSMAAttack(net, classes, max_iteration=5, increase=False) | |||
adv_data = attack.generate(input_np, label_np) | |||
assert np.any(input_np != adv_data) | |||
del input_np, label_np, adv_data | |||
gc.collect() | |||
@pytest.mark.level0 | |||
@@ -132,6 +136,8 @@ def test_jsma_attack_gpu(): | |||
attack = JSMAAttack(net, classes, max_iteration=5) | |||
adv_data = attack.generate(input_np, label_np) | |||
assert np.any(input_np != adv_data) | |||
del input_np, label_np, adv_data | |||
gc.collect() | |||
@pytest.mark.level0 | |||
@@ -159,3 +165,5 @@ def test_jsma_attack_cpu(): | |||
attack = JSMAAttack(net, classes, max_iteration=5) | |||
adv_data = attack.generate(input_np, label_np) | |||
assert np.any(input_np != adv_data) | |||
del input_np, label_np, adv_data | |||
gc.collect() |
@@ -15,9 +15,10 @@ | |||
LBFGS-Attack test. | |||
""" | |||
import os | |||
import gc | |||
import numpy as np | |||
import pytest | |||
from mindspore import context | |||
from mindspore.train.serialization import load_checkpoint, load_param_into_net | |||
@@ -71,6 +72,8 @@ def test_lbfgs_attack_ascend(): | |||
LOGGER.debug(TAG, 'target_np is :{}'.format(target_np[0])) | |||
_ = attack.generate(input_np, target_np) | |||
del input_np, label_np, target_np | |||
gc.collect() | |||
@pytest.mark.level0 | |||
@pytest.mark.platform_x86_cpu | |||
@@ -107,3 +110,5 @@ def test_lbfgs_attack_cpu(): | |||
attack = LBFGS(net, is_targeted=True) | |||
LOGGER.debug(TAG, 'target_np is :{}'.format(target_np[0])) | |||
_ = attack.generate(input_np, target_np) | |||
del input_np, label_np, target_np | |||
gc.collect() |