提交 febaf7a2 编写于 作者: Z zheng-huanhuan 提交者: xue

fix pylint warning.

上级 dc4a6ccf
...@@ -12,18 +12,17 @@ ...@@ -12,18 +12,17 @@
# See the License for the specific language governing permissions and # See the License for the specific language governing permissions and
# limitations under the License. # limitations under the License.
import sys import sys
import numpy as np
import numpy as np
from mindspore import Model from mindspore import Model
from mindspore import context from mindspore import context
from mindspore.train.serialization import load_checkpoint, load_param_into_net
from mindspore.nn import SoftmaxCrossEntropyWithLogits from mindspore.nn import SoftmaxCrossEntropyWithLogits
from mindspore.train.serialization import load_checkpoint, load_param_into_net
from lenet5_net import LeNet5
from mindarmour.attacks.gradient_method import FastGradientSignMethod from mindarmour.attacks.gradient_method import FastGradientSignMethod
from mindarmour.utils.logger import LogUtil
from mindarmour.fuzzing.model_coverage_metrics import ModelCoverageMetrics from mindarmour.fuzzing.model_coverage_metrics import ModelCoverageMetrics
from mindarmour.utils.logger import LogUtil
from lenet5_net import LeNet5
sys.path.append("..") sys.path.append("..")
from data_processing import generate_mnist_dataset from data_processing import generate_mnist_dataset
......
...@@ -12,18 +12,16 @@ ...@@ -12,18 +12,16 @@
# See the License for the specific language governing permissions and # See the License for the specific language governing permissions and
# limitations under the License. # limitations under the License.
import sys import sys
import numpy as np
import numpy as np
from mindspore import Model from mindspore import Model
from mindspore import context from mindspore import context
from mindspore.train.serialization import load_checkpoint, load_param_into_net from mindspore.train.serialization import load_checkpoint, load_param_into_net
from mindspore.nn import SoftmaxCrossEntropyWithLogits
from mindarmour.attacks.gradient_method import FastGradientSignMethod
from mindarmour.utils.logger import LogUtil
from mindarmour.fuzzing.model_coverage_metrics import ModelCoverageMetrics
from mindarmour.fuzzing.fuzzing import Fuzzing
from lenet5_net import LeNet5 from lenet5_net import LeNet5
from mindarmour.fuzzing.fuzzing import Fuzzing
from mindarmour.fuzzing.model_coverage_metrics import ModelCoverageMetrics
from mindarmour.utils.logger import LogUtil
sys.path.append("..") sys.path.append("..")
from data_processing import generate_mnist_dataset from data_processing import generate_mnist_dataset
...@@ -81,8 +79,11 @@ def test_lenet_mnist_fuzzing(): ...@@ -81,8 +79,11 @@ def test_lenet_mnist_fuzzing():
model_fuzz_test = Fuzzing(initial_seeds, model, train_images, 20) model_fuzz_test = Fuzzing(initial_seeds, model, train_images, 20)
failed_tests = model_fuzz_test.fuzzing() failed_tests = model_fuzz_test.fuzzing()
if failed_tests:
model_coverage_test.test_adequacy_coverage_calculate(np.array(failed_tests).astype(np.float32)) model_coverage_test.test_adequacy_coverage_calculate(np.array(failed_tests).astype(np.float32))
LOGGER.info(TAG, 'KMNC of this test is : %s', model_coverage_test.get_kmnc()) LOGGER.info(TAG, 'KMNC of this test is : %s', model_coverage_test.get_kmnc())
else:
LOGGER.info(TAG, 'Fuzzing test identifies none failed test')
if __name__ == '__main__': if __name__ == '__main__':
......
...@@ -13,20 +13,19 @@ ...@@ -13,20 +13,19 @@
# limitations under the License. # limitations under the License.
import sys import sys
import time import time
import numpy as np import numpy as np
import pytest import pytest
from scipy.special import softmax
from mindspore import Model from mindspore import Model
from mindspore import Tensor from mindspore import Tensor
from mindspore import context from mindspore import context
from mindspore.train.serialization import load_checkpoint, load_param_into_net from mindspore.train.serialization import load_checkpoint, load_param_into_net
from scipy.special import softmax
from lenet5_net import LeNet5
from mindarmour.attacks.carlini_wagner import CarliniWagnerL2Attack from mindarmour.attacks.carlini_wagner import CarliniWagnerL2Attack
from mindarmour.utils.logger import LogUtil
from mindarmour.evaluations.attack_evaluation import AttackEvaluate from mindarmour.evaluations.attack_evaluation import AttackEvaluate
from mindarmour.utils.logger import LogUtil
from lenet5_net import LeNet5
context.set_context(mode=context.GRAPH_MODE, device_target="Ascend") context.set_context(mode=context.GRAPH_MODE, device_target="Ascend")
......
...@@ -13,20 +13,19 @@ ...@@ -13,20 +13,19 @@
# limitations under the License. # limitations under the License.
import sys import sys
import time import time
import numpy as np import numpy as np
import pytest import pytest
from scipy.special import softmax
from mindspore import Model from mindspore import Model
from mindspore import Tensor from mindspore import Tensor
from mindspore import context from mindspore import context
from mindspore.train.serialization import load_checkpoint, load_param_into_net from mindspore.train.serialization import load_checkpoint, load_param_into_net
from scipy.special import softmax
from lenet5_net import LeNet5
from mindarmour.attacks.deep_fool import DeepFool from mindarmour.attacks.deep_fool import DeepFool
from mindarmour.utils.logger import LogUtil
from mindarmour.evaluations.attack_evaluation import AttackEvaluate from mindarmour.evaluations.attack_evaluation import AttackEvaluate
from mindarmour.utils.logger import LogUtil
from lenet5_net import LeNet5
context.set_context(mode=context.GRAPH_MODE, device_target="Ascend") context.set_context(mode=context.GRAPH_MODE, device_target="Ascend")
......
...@@ -13,21 +13,19 @@ ...@@ -13,21 +13,19 @@
# limitations under the License. # limitations under the License.
import sys import sys
import time import time
import numpy as np import numpy as np
import pytest import pytest
from scipy.special import softmax
from mindspore import Model from mindspore import Model
from mindspore import Tensor from mindspore import Tensor
from mindspore import context from mindspore import context
from mindspore.train.serialization import load_checkpoint, load_param_into_net from mindspore.train.serialization import load_checkpoint, load_param_into_net
from scipy.special import softmax
from lenet5_net import LeNet5
from mindarmour.attacks.gradient_method import FastGradientSignMethod from mindarmour.attacks.gradient_method import FastGradientSignMethod
from mindarmour.utils.logger import LogUtil
from mindarmour.evaluations.attack_evaluation import AttackEvaluate from mindarmour.evaluations.attack_evaluation import AttackEvaluate
from mindarmour.utils.logger import LogUtil
from lenet5_net import LeNet5
context.set_context(mode=context.GRAPH_MODE, device_target="Ascend") context.set_context(mode=context.GRAPH_MODE, device_target="Ascend")
......
...@@ -13,20 +13,19 @@ ...@@ -13,20 +13,19 @@
# limitations under the License. # limitations under the License.
import sys import sys
import time import time
import numpy as np import numpy as np
import pytest import pytest
from scipy.special import softmax
from mindspore import Tensor from mindspore import Tensor
from mindspore import context from mindspore import context
from mindspore.train.serialization import load_checkpoint, load_param_into_net from mindspore.train.serialization import load_checkpoint, load_param_into_net
from scipy.special import softmax
from mindarmour.attacks.black.genetic_attack import GeneticAttack from lenet5_net import LeNet5
from mindarmour.attacks.black.black_model import BlackModel from mindarmour.attacks.black.black_model import BlackModel
from mindarmour.utils.logger import LogUtil from mindarmour.attacks.black.genetic_attack import GeneticAttack
from mindarmour.evaluations.attack_evaluation import AttackEvaluate from mindarmour.evaluations.attack_evaluation import AttackEvaluate
from mindarmour.utils.logger import LogUtil
from lenet5_net import LeNet5
context.set_context(mode=context.GRAPH_MODE, device_target="Ascend") context.set_context(mode=context.GRAPH_MODE, device_target="Ascend")
...@@ -97,8 +96,8 @@ def test_genetic_attack_on_mnist(): ...@@ -97,8 +96,8 @@ def test_genetic_attack_on_mnist():
per_bounds=0.1, step_size=0.25, temp=0.1, per_bounds=0.1, step_size=0.25, temp=0.1,
sparse=True) sparse=True)
targeted_labels = np.random.randint(0, 10, size=len(true_labels)) targeted_labels = np.random.randint(0, 10, size=len(true_labels))
for i in range(len(true_labels)): for i, true_l in enumerate(true_labels):
if targeted_labels[i] == true_labels[i]: if targeted_labels[i] == true_l:
targeted_labels[i] = (targeted_labels[i] + 1) % 10 targeted_labels[i] = (targeted_labels[i] + 1) % 10
start_time = time.clock() start_time = time.clock()
success_list, adv_data, query_list = attack.generate( success_list, adv_data, query_list = attack.generate(
......
...@@ -12,18 +12,17 @@ ...@@ -12,18 +12,17 @@
# See the License for the specific language governing permissions and # See the License for the specific language governing permissions and
# limitations under the License. # limitations under the License.
import sys import sys
import numpy as np import numpy as np
import pytest import pytest
from mindspore import Tensor from mindspore import Tensor
from mindspore import context from mindspore import context
from mindspore.train.serialization import load_checkpoint, load_param_into_net from mindspore.train.serialization import load_checkpoint, load_param_into_net
from mindarmour.attacks.black.hop_skip_jump_attack import HopSkipJumpAttack from lenet5_net import LeNet5
from mindarmour.attacks.black.black_model import BlackModel from mindarmour.attacks.black.black_model import BlackModel
from mindarmour.attacks.black.hop_skip_jump_attack import HopSkipJumpAttack
from mindarmour.utils.logger import LogUtil from mindarmour.utils.logger import LogUtil
from lenet5_net import LeNet5
sys.path.append("..") sys.path.append("..")
from data_processing import generate_mnist_dataset from data_processing import generate_mnist_dataset
...@@ -64,9 +63,9 @@ def random_target_labels(true_labels): ...@@ -64,9 +63,9 @@ def random_target_labels(true_labels):
def create_target_images(dataset, data_labels, target_labels): def create_target_images(dataset, data_labels, target_labels):
res = [] res = []
for label in target_labels: for label in target_labels:
for i in range(len(data_labels)): for data_label, data in zip(data_labels, dataset):
if data_labels[i] == label: if data_label == label:
res.append(dataset[i]) res.append(data)
break break
return np.array(res) return np.array(res)
...@@ -126,9 +125,9 @@ def test_hsja_mnist_attack(): ...@@ -126,9 +125,9 @@ def test_hsja_mnist_attack():
target_images = create_target_images(test_images, predict_labels, target_images = create_target_images(test_images, predict_labels,
target_labels) target_labels)
attack.set_target_images(target_images) attack.set_target_images(target_images)
success_list, adv_data, query_list = attack.generate(test_images, target_labels) success_list, adv_data, _ = attack.generate(test_images, target_labels)
else: else:
success_list, adv_data, query_list = attack.generate(test_images, None) success_list, adv_data, _ = attack.generate(test_images, None)
adv_datas = [] adv_datas = []
gts = [] gts = []
...@@ -136,7 +135,7 @@ def test_hsja_mnist_attack(): ...@@ -136,7 +135,7 @@ def test_hsja_mnist_attack():
if success: if success:
adv_datas.append(adv) adv_datas.append(adv)
gts.append(gt) gts.append(gt)
if len(gts) > 0: if gts:
adv_datas = np.concatenate(np.asarray(adv_datas), axis=0) adv_datas = np.concatenate(np.asarray(adv_datas), axis=0)
gts = np.asarray(gts) gts = np.asarray(gts)
pred_logits_adv = model.predict(adv_datas) pred_logits_adv = model.predict(adv_datas)
......
...@@ -13,20 +13,19 @@ ...@@ -13,20 +13,19 @@
# limitations under the License. # limitations under the License.
import sys import sys
import time import time
import numpy as np import numpy as np
import pytest import pytest
from scipy.special import softmax
from mindspore import Model from mindspore import Model
from mindspore import Tensor from mindspore import Tensor
from mindspore import context from mindspore import context
from mindspore.train.serialization import load_checkpoint, load_param_into_net from mindspore.train.serialization import load_checkpoint, load_param_into_net
from scipy.special import softmax
from lenet5_net import LeNet5
from mindarmour.attacks.jsma import JSMAAttack from mindarmour.attacks.jsma import JSMAAttack
from mindarmour.utils.logger import LogUtil
from mindarmour.evaluations.attack_evaluation import AttackEvaluate from mindarmour.evaluations.attack_evaluation import AttackEvaluate
from mindarmour.utils.logger import LogUtil
from lenet5_net import LeNet5
context.set_context(mode=context.GRAPH_MODE, device_target="Ascend") context.set_context(mode=context.GRAPH_MODE, device_target="Ascend")
...@@ -79,8 +78,8 @@ def test_jsma_attack(): ...@@ -79,8 +78,8 @@ def test_jsma_attack():
predict_labels = np.concatenate(predict_labels) predict_labels = np.concatenate(predict_labels)
true_labels = np.concatenate(test_labels) true_labels = np.concatenate(test_labels)
targeted_labels = np.random.randint(0, 10, size=len(true_labels)) targeted_labels = np.random.randint(0, 10, size=len(true_labels))
for i in range(len(true_labels)): for i, true_l in enumerate(true_labels):
if targeted_labels[i] == true_labels[i]: if targeted_labels[i] == true_l:
targeted_labels[i] = (targeted_labels[i] + 1) % 10 targeted_labels[i] = (targeted_labels[i] + 1) % 10
accuracy = np.mean(np.equal(predict_labels, true_labels)) accuracy = np.mean(np.equal(predict_labels, true_labels))
LOGGER.info(TAG, "prediction accuracy before attacking is : %g", accuracy) LOGGER.info(TAG, "prediction accuracy before attacking is : %g", accuracy)
......
...@@ -13,20 +13,19 @@ ...@@ -13,20 +13,19 @@
# limitations under the License. # limitations under the License.
import sys import sys
import time import time
import numpy as np import numpy as np
import pytest import pytest
from scipy.special import softmax
from mindspore import Model from mindspore import Model
from mindspore import Tensor from mindspore import Tensor
from mindspore import context from mindspore import context
from mindspore.train.serialization import load_checkpoint, load_param_into_net from mindspore.train.serialization import load_checkpoint, load_param_into_net
from scipy.special import softmax
from lenet5_net import LeNet5
from mindarmour.attacks.lbfgs import LBFGS from mindarmour.attacks.lbfgs import LBFGS
from mindarmour.utils.logger import LogUtil
from mindarmour.evaluations.attack_evaluation import AttackEvaluate from mindarmour.evaluations.attack_evaluation import AttackEvaluate
from mindarmour.utils.logger import LogUtil
from lenet5_net import LeNet5
context.set_context(mode=context.GRAPH_MODE, device_target="Ascend") context.set_context(mode=context.GRAPH_MODE, device_target="Ascend")
...@@ -85,8 +84,8 @@ def test_lbfgs_attack(): ...@@ -85,8 +84,8 @@ def test_lbfgs_attack():
is_targeted = True is_targeted = True
if is_targeted: if is_targeted:
targeted_labels = np.random.randint(0, 10, size=len(true_labels)).astype(np.int32) targeted_labels = np.random.randint(0, 10, size=len(true_labels)).astype(np.int32)
for i in range(len(true_labels)): for i, true_l in enumerate(true_labels):
if targeted_labels[i] == true_labels[i]: if targeted_labels[i] == true_l:
targeted_labels[i] = (targeted_labels[i] + 1) % 10 targeted_labels[i] = (targeted_labels[i] + 1) % 10
else: else:
targeted_labels = true_labels.astype(np.int32) targeted_labels = true_labels.astype(np.int32)
......
...@@ -13,21 +13,20 @@ ...@@ -13,21 +13,20 @@
# limitations under the License. # limitations under the License.
import sys import sys
import time import time
import numpy as np import numpy as np
import pytest import pytest
from scipy.special import softmax
from mindspore import Model from mindspore import Model
from mindspore import Tensor from mindspore import Tensor
from mindspore import context from mindspore import context
from mindspore.train.serialization import load_checkpoint, load_param_into_net from mindspore.train.serialization import load_checkpoint, load_param_into_net
from scipy.special import softmax
from mindarmour.attacks.iterative_gradient_method import MomentumDiverseInputIterativeMethod
from mindarmour.utils.logger import LogUtil
from mindarmour.evaluations.attack_evaluation import AttackEvaluate
from lenet5_net import LeNet5 from lenet5_net import LeNet5
from mindarmour.attacks.iterative_gradient_method import \
MomentumDiverseInputIterativeMethod
from mindarmour.evaluations.attack_evaluation import AttackEvaluate
from mindarmour.utils.logger import LogUtil
context.set_context(mode=context.GRAPH_MODE, device_target="Ascend") context.set_context(mode=context.GRAPH_MODE, device_target="Ascend")
......
...@@ -12,18 +12,17 @@ ...@@ -12,18 +12,17 @@
# See the License for the specific language governing permissions and # See the License for the specific language governing permissions and
# limitations under the License. # limitations under the License.
import sys import sys
import numpy as np import numpy as np
import pytest import pytest
from mindspore import Tensor from mindspore import Tensor
from mindspore import context from mindspore import context
from mindspore.train.serialization import load_checkpoint, load_param_into_net from mindspore.train.serialization import load_checkpoint, load_param_into_net
from mindarmour.attacks.black.natural_evolutionary_strategy import NES from lenet5_net import LeNet5
from mindarmour.attacks.black.black_model import BlackModel from mindarmour.attacks.black.black_model import BlackModel
from mindarmour.attacks.black.natural_evolutionary_strategy import NES
from mindarmour.utils.logger import LogUtil from mindarmour.utils.logger import LogUtil
from lenet5_net import LeNet5
sys.path.append("..") sys.path.append("..")
from data_processing import generate_mnist_dataset from data_processing import generate_mnist_dataset
...@@ -73,9 +72,9 @@ def _pseudorandom_target(index, total_indices, true_class): ...@@ -73,9 +72,9 @@ def _pseudorandom_target(index, total_indices, true_class):
def create_target_images(dataset, data_labels, target_labels): def create_target_images(dataset, data_labels, target_labels):
res = [] res = []
for label in target_labels: for label in target_labels:
for i in range(len(data_labels)): for data_label, data in zip(data_labels, dataset):
if data_labels[i] == label: if data_label == label:
res.append(dataset[i]) res.append(data)
break break
return np.array(res) return np.array(res)
......
...@@ -13,21 +13,19 @@ ...@@ -13,21 +13,19 @@
# limitations under the License. # limitations under the License.
import sys import sys
import time import time
import numpy as np import numpy as np
import pytest import pytest
from scipy.special import softmax
from mindspore import Model from mindspore import Model
from mindspore import Tensor from mindspore import Tensor
from mindspore import context from mindspore import context
from mindspore.train.serialization import load_checkpoint, load_param_into_net from mindspore.train.serialization import load_checkpoint, load_param_into_net
from scipy.special import softmax
from lenet5_net import LeNet5
from mindarmour.attacks.iterative_gradient_method import ProjectedGradientDescent from mindarmour.attacks.iterative_gradient_method import ProjectedGradientDescent
from mindarmour.utils.logger import LogUtil
from mindarmour.evaluations.attack_evaluation import AttackEvaluate from mindarmour.evaluations.attack_evaluation import AttackEvaluate
from mindarmour.utils.logger import LogUtil
from lenet5_net import LeNet5
context.set_context(mode=context.GRAPH_MODE, device_target="Ascend") context.set_context(mode=context.GRAPH_MODE, device_target="Ascend")
......
...@@ -12,20 +12,19 @@ ...@@ -12,20 +12,19 @@
# See the License for the specific language governing permissions and # See the License for the specific language governing permissions and
# limitations under the License. # limitations under the License.
import sys import sys
import numpy as np import numpy as np
import pytest import pytest
from scipy.special import softmax
from mindspore import Tensor from mindspore import Tensor
from mindspore import context from mindspore import context
from mindspore.train.serialization import load_checkpoint, load_param_into_net from mindspore.train.serialization import load_checkpoint, load_param_into_net
from scipy.special import softmax
from mindarmour.attacks.black.pointwise_attack import PointWiseAttack from lenet5_net import LeNet5
from mindarmour.attacks.black.black_model import BlackModel from mindarmour.attacks.black.black_model import BlackModel
from mindarmour.utils.logger import LogUtil from mindarmour.attacks.black.pointwise_attack import PointWiseAttack
from mindarmour.evaluations.attack_evaluation import AttackEvaluate from mindarmour.evaluations.attack_evaluation import AttackEvaluate
from mindarmour.utils.logger import LogUtil
from lenet5_net import LeNet5
context.set_context(mode=context.GRAPH_MODE, device_target="Ascend") context.set_context(mode=context.GRAPH_MODE, device_target="Ascend")
...@@ -99,8 +98,8 @@ def test_pointwise_attack_on_mnist(): ...@@ -99,8 +98,8 @@ def test_pointwise_attack_on_mnist():
attack = PointWiseAttack(model=model, is_targeted=is_target) attack = PointWiseAttack(model=model, is_targeted=is_target)
if is_target: if is_target:
targeted_labels = np.random.randint(0, 10, size=len(true_labels)) targeted_labels = np.random.randint(0, 10, size=len(true_labels))
for i in range(len(true_labels)): for i, true_l in enumerate(true_labels):
if targeted_labels[i] == true_labels[i]: if targeted_labels[i] == true_l:
targeted_labels[i] = (targeted_labels[i] + 1) % 10 targeted_labels[i] = (targeted_labels[i] + 1) % 10
else: else:
targeted_labels = true_labels targeted_labels = true_labels
......
...@@ -13,20 +13,19 @@ ...@@ -13,20 +13,19 @@
# limitations under the License. # limitations under the License.
import sys import sys
import time import time
import numpy as np import numpy as np
import pytest import pytest
from scipy.special import softmax
from mindspore import Tensor from mindspore import Tensor
from mindspore import context from mindspore import context
from mindspore.train.serialization import load_checkpoint, load_param_into_net from mindspore.train.serialization import load_checkpoint, load_param_into_net
from scipy.special import softmax
from mindarmour.attacks.black.pso_attack import PSOAttack from lenet5_net import LeNet5
from mindarmour.attacks.black.black_model import BlackModel from mindarmour.attacks.black.black_model import BlackModel
from mindarmour.utils.logger import LogUtil from mindarmour.attacks.black.pso_attack import PSOAttack
from mindarmour.evaluations.attack_evaluation import AttackEvaluate from mindarmour.evaluations.attack_evaluation import AttackEvaluate
from mindarmour.utils.logger import LogUtil
from lenet5_net import LeNet5
context.set_context(mode=context.GRAPH_MODE, device_target="Ascend") context.set_context(mode=context.GRAPH_MODE, device_target="Ascend")
......
...@@ -12,20 +12,19 @@ ...@@ -12,20 +12,19 @@
# See the License for the specific language governing permissions and # See the License for the specific language governing permissions and
# limitations under the License. # limitations under the License.
import sys import sys
import numpy as np import numpy as np
import pytest import pytest
from scipy.special import softmax
from mindspore import Tensor from mindspore import Tensor
from mindspore import context from mindspore import context
from mindspore.train.serialization import load_checkpoint, load_param_into_net from mindspore.train.serialization import load_checkpoint, load_param_into_net
from scipy.special import softmax
from mindarmour.attacks.black.salt_and_pepper_attack import SaltAndPepperNoiseAttack from lenet5_net import LeNet5
from mindarmour.attacks.black.black_model import BlackModel from mindarmour.attacks.black.black_model import BlackModel
from mindarmour.utils.logger import LogUtil from mindarmour.attacks.black.salt_and_pepper_attack import SaltAndPepperNoiseAttack
from mindarmour.evaluations.attack_evaluation import AttackEvaluate from mindarmour.evaluations.attack_evaluation import AttackEvaluate
from mindarmour.utils.logger import LogUtil
from lenet5_net import LeNet5
context.set_context(mode=context.GRAPH_MODE, device_target="Ascend") context.set_context(mode=context.GRAPH_MODE, device_target="Ascend")
...@@ -102,8 +101,8 @@ def test_salt_and_pepper_attack_on_mnist(): ...@@ -102,8 +101,8 @@ def test_salt_and_pepper_attack_on_mnist():
sparse=True) sparse=True)
if is_target: if is_target:
targeted_labels = np.random.randint(0, 10, size=len(true_labels)) targeted_labels = np.random.randint(0, 10, size=len(true_labels))
for i in range(len(true_labels)): for i, true_l in enumerate(true_labels):
if targeted_labels[i] == true_labels[i]: if targeted_labels[i] == true_l:
targeted_labels[i] = (targeted_labels[i] + 1) % 10 targeted_labels[i] = (targeted_labels[i] + 1) % 10
else: else:
targeted_labels = true_labels targeted_labels = true_labels
......
...@@ -12,25 +12,22 @@ ...@@ -12,25 +12,22 @@
# See the License for the specific language governing permissions and # See the License for the specific language governing permissions and
# limitations under the License. # limitations under the License.
"""defense example using nad""" """defense example using nad"""
import sys
import logging import logging
import sys
import numpy as np import numpy as np
import pytest import pytest
from mindspore import Tensor from mindspore import Tensor
from mindspore import context from mindspore import context
from mindspore import nn from mindspore import nn
from mindspore.nn import SoftmaxCrossEntropyWithLogits from mindspore.nn import SoftmaxCrossEntropyWithLogits
from mindspore.train.serialization import load_checkpoint, load_param_into_net from mindspore.train.serialization import load_checkpoint, load_param_into_net
from lenet5_net import LeNet5
from mindarmour.attacks import FastGradientSignMethod from mindarmour.attacks import FastGradientSignMethod
from mindarmour.defenses import NaturalAdversarialDefense from mindarmour.defenses import NaturalAdversarialDefense
from mindarmour.utils.logger import LogUtil from mindarmour.utils.logger import LogUtil
from lenet5_net import LeNet5
sys.path.append("..") sys.path.append("..")
from data_processing import generate_mnist_dataset from data_processing import generate_mnist_dataset
......
...@@ -12,30 +12,30 @@ ...@@ -12,30 +12,30 @@
# See the License for the specific language governing permissions and # See the License for the specific language governing permissions and
# limitations under the License. # limitations under the License.
"""evaluate example""" """evaluate example"""
import sys
import os import os
import sys
import time import time
import numpy as np
from scipy.special import softmax
from lenet5_net import LeNet5 import numpy as np
from mindspore import Model from mindspore import Model
from mindspore import Tensor from mindspore import Tensor
from mindspore import context from mindspore import context
from mindspore import nn from mindspore import nn
from mindspore.nn import Cell from mindspore.nn import Cell
from mindspore.ops.operations import TensorAdd
from mindspore.nn import SoftmaxCrossEntropyWithLogits from mindspore.nn import SoftmaxCrossEntropyWithLogits
from mindspore.ops.operations import TensorAdd
from mindspore.train.serialization import load_checkpoint, load_param_into_net from mindspore.train.serialization import load_checkpoint, load_param_into_net
from scipy.special import softmax
from lenet5_net import LeNet5
from mindarmour.attacks import FastGradientSignMethod from mindarmour.attacks import FastGradientSignMethod
from mindarmour.attacks import GeneticAttack from mindarmour.attacks import GeneticAttack
from mindarmour.attacks.black.black_model import BlackModel from mindarmour.attacks.black.black_model import BlackModel
from mindarmour.defenses import NaturalAdversarialDefense from mindarmour.defenses import NaturalAdversarialDefense
from mindarmour.detectors.black.similarity_detector import SimilarityDetector
from mindarmour.evaluations import BlackDefenseEvaluate from mindarmour.evaluations import BlackDefenseEvaluate
from mindarmour.evaluations import DefenseEvaluate from mindarmour.evaluations import DefenseEvaluate
from mindarmour.utils.logger import LogUtil from mindarmour.utils.logger import LogUtil
from mindarmour.detectors.black.similarity_detector import SimilarityDetector
sys.path.append("..") sys.path.append("..")
from data_processing import generate_mnist_dataset from data_processing import generate_mnist_dataset
...@@ -237,7 +237,7 @@ def test_black_defense(): ...@@ -237,7 +237,7 @@ def test_black_defense():
# gen black-box adversarial examples of test data # gen black-box adversarial examples of test data
for idx in range(attacked_size): for idx in range(attacked_size):
raw_st = time.time() raw_st = time.time()
raw_sl, raw_a, raw_qc = attack_rm.generate( _, raw_a, raw_qc = attack_rm.generate(
np.expand_dims(attacked_sample[idx], axis=0), np.expand_dims(attacked_sample[idx], axis=0),
np.expand_dims(attack_target_label[idx], axis=0)) np.expand_dims(attack_target_label[idx], axis=0))
raw_t = time.time() - raw_st raw_t = time.time() - raw_st
...@@ -271,7 +271,7 @@ def test_black_defense(): ...@@ -271,7 +271,7 @@ def test_black_defense():
sparse=False) sparse=False)
for idx in range(attacked_size): for idx in range(attacked_size):
def_st = time.time() def_st = time.time()
def_sl, def_a, def_qc = attack_dm.generate( _, def_a, def_qc = attack_dm.generate(
np.expand_dims(attacked_sample[idx], axis=0), np.expand_dims(attacked_sample[idx], axis=0),
np.expand_dims(attack_target_label[idx], axis=0)) np.expand_dims(attack_target_label[idx], axis=0))
def_t = time.time() - def_st def_t = time.time() - def_st
......
...@@ -12,23 +12,22 @@ ...@@ -12,23 +12,22 @@
# See the License for the specific language governing permissions and # See the License for the specific language governing permissions and
# limitations under the License. # limitations under the License.
import sys import sys
import numpy as np import numpy as np
import pytest import pytest
from scipy.special import softmax
from mindspore import Model from mindspore import Model
from mindspore import context
from mindspore import Tensor from mindspore import Tensor
from mindspore import context
from mindspore.nn import Cell from mindspore.nn import Cell
from mindspore.ops.operations import TensorAdd from mindspore.ops.operations import TensorAdd
from mindspore.train.serialization import load_checkpoint, load_param_into_net from mindspore.train.serialization import load_checkpoint, load_param_into_net
from scipy.special import softmax
from mindarmour.utils.logger import LogUtil from lenet5_net import LeNet5
from mindarmour.attacks.black.pso_attack import PSOAttack
from mindarmour.attacks.black.black_model import BlackModel from mindarmour.attacks.black.black_model import BlackModel
from mindarmour.attacks.black.pso_attack import PSOAttack
from mindarmour.detectors.black.similarity_detector import SimilarityDetector from mindarmour.detectors.black.similarity_detector import SimilarityDetector
from mindarmour.utils.logger import LogUtil
from lenet5_net import LeNet5
context.set_context(mode=context.GRAPH_MODE, device_target="Ascend") context.set_context(mode=context.GRAPH_MODE, device_target="Ascend")
......
...@@ -11,20 +11,18 @@ ...@@ -11,20 +11,18 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and # See the License for the specific language governing permissions and
# limitations under the License. # limitations under the License.
# ============================================================================
import os import os
import sys import sys
import mindspore.nn as nn import mindspore.nn as nn
from mindspore import context, Tensor from mindspore import context
from mindspore.nn.metrics import Accuracy
from mindspore.train import Model
from mindspore.train.callback import ModelCheckpoint, CheckpointConfig, LossMonitor from mindspore.train.callback import ModelCheckpoint, CheckpointConfig, LossMonitor
from mindspore.train.serialization import load_checkpoint, load_param_into_net from mindspore.train.serialization import load_checkpoint, load_param_into_net
from mindspore.train import Model
from mindspore.nn.metrics import Accuracy
from mindarmour.utils.logger import LogUtil
from lenet5_net import LeNet5 from lenet5_net import LeNet5
from mindarmour.utils.logger import LogUtil
sys.path.append("..") sys.path.append("..")
from data_processing import generate_mnist_dataset from data_processing import generate_mnist_dataset
......
...@@ -183,8 +183,7 @@ class FastGradientMethod(GradientMethod): ...@@ -183,8 +183,7 @@ class FastGradientMethod(GradientMethod):
>>> grad = self._gradient([[0.2, 0.3, 0.4]], >>> grad = self._gradient([[0.2, 0.3, 0.4]],
>>> [[0, 1, 0, 0, 0, 0, 0, 0, 0, 0]) >>> [[0, 1, 0, 0, 0, 0, 0, 0, 0, 0])
""" """
sens = Tensor(np.array([1.0], self._dtype)) out_grad = self._grad_all(Tensor(inputs), Tensor(labels))
out_grad = self._grad_all(Tensor(inputs), Tensor(labels), sens)
if isinstance(out_grad, tuple): if isinstance(out_grad, tuple):
out_grad = out_grad[0] out_grad = out_grad[0]
gradient = out_grad.asnumpy() gradient = out_grad.asnumpy()
...@@ -286,8 +285,7 @@ class FastGradientSignMethod(GradientMethod): ...@@ -286,8 +285,7 @@ class FastGradientSignMethod(GradientMethod):
>>> grad = self._gradient([[0.2, 0.3, 0.4]], >>> grad = self._gradient([[0.2, 0.3, 0.4]],
>>> [[0, 1, 0, 0, 0, 0, 0, 0, 0, 0]) >>> [[0, 1, 0, 0, 0, 0, 0, 0, 0, 0])
""" """
sens = Tensor(np.array([1.0], self._dtype)) out_grad = self._grad_all(Tensor(inputs), Tensor(labels))
out_grad = self._grad_all(Tensor(inputs), Tensor(labels), sens)
if isinstance(out_grad, tuple): if isinstance(out_grad, tuple):
out_grad = out_grad[0] out_grad = out_grad[0]
gradient = out_grad.asnumpy() gradient = out_grad.asnumpy()
......
...@@ -351,9 +351,8 @@ class MomentumIterativeMethod(IterativeGradientMethod): ...@@ -351,9 +351,8 @@ class MomentumIterativeMethod(IterativeGradientMethod):
>>> grad = self._gradient([[0.5, 0.3, 0.4]], >>> grad = self._gradient([[0.5, 0.3, 0.4]],
>>> [[0, 0, 0, 1, 0, 0, 0, 0, 0, 0]) >>> [[0, 0, 0, 1, 0, 0, 0, 0, 0, 0])
""" """
sens = Tensor(np.array([1.0], inputs.dtype))
# get grad of loss over x # get grad of loss over x
out_grad = self._loss_grad(Tensor(inputs), Tensor(labels), sens) out_grad = self._loss_grad(Tensor(inputs), Tensor(labels))
if isinstance(out_grad, tuple): if isinstance(out_grad, tuple):
out_grad = out_grad[0] out_grad = out_grad[0]
gradient = out_grad.asnumpy() gradient = out_grad.asnumpy()
......
...@@ -115,12 +115,11 @@ class LBFGS(Attack): ...@@ -115,12 +115,11 @@ class LBFGS(Attack):
def _gradient(self, cur_input, labels, shape): def _gradient(self, cur_input, labels, shape):
""" Return model gradient to minimize loss in l-bfgs-b.""" """ Return model gradient to minimize loss in l-bfgs-b."""
label_dtype = labels.dtype label_dtype = labels.dtype
sens = Tensor(np.array([1], self._dtype))
labels = np.expand_dims(labels, axis=0).astype(label_dtype) labels = np.expand_dims(labels, axis=0).astype(label_dtype)
# input shape should like original shape # input shape should like original shape
reshape_input = np.expand_dims(cur_input.reshape(shape), reshape_input = np.expand_dims(cur_input.reshape(shape),
axis=0) axis=0)
out_grad = self._grad_all(Tensor(reshape_input), Tensor(labels), sens) out_grad = self._grad_all(Tensor(reshape_input), Tensor(labels))
if isinstance(out_grad, tuple): if isinstance(out_grad, tuple):
out_grad = out_grad[0] out_grad = out_grad[0]
return out_grad.asnumpy() return out_grad.asnumpy()
...@@ -131,8 +130,8 @@ class LBFGS(Attack): ...@@ -131,8 +130,8 @@ class LBFGS(Attack):
the cross-entropy loss. the cross-entropy loss.
""" """
cur_input = cur_input.astype(self._dtype) cur_input = cur_input.astype(self._dtype)
l2_distance = np.linalg.norm(cur_input.reshape( l2_distance = np.linalg.norm(
(cur_input.shape[0], -1)) - start_input.reshape( cur_input.reshape((cur_input.shape[0], -1)) - start_input.reshape(
(start_input.shape[0], -1))) (start_input.shape[0], -1)))
logits = self._forward_one(cur_input.reshape(shape)).flatten() logits = self._forward_one(cur_input.reshape(shape)).flatten()
logits = logits - np.max(logits) logits = logits - np.max(logits)
......
...@@ -14,17 +14,17 @@ ...@@ -14,17 +14,17 @@
""" """
Fuzzing. Fuzzing.
""" """
import numpy as np
from random import choice from random import choice
from mindspore import Tensor import numpy as np
from mindspore import Model from mindspore import Model
from mindspore import Tensor
from mindarmour.fuzzing.model_coverage_metrics import ModelCoverageMetrics from mindarmour.fuzzing.model_coverage_metrics import ModelCoverageMetrics
from mindarmour.utils.image_transform import Contrast, Brightness, Blur, Noise, \
Translate, Scale, Shear, Rotate
from mindarmour.utils._check_param import check_model, check_numpy_param, \ from mindarmour.utils._check_param import check_model, check_numpy_param, \
check_int_positive check_int_positive
from mindarmour.utils.image_transform import Contrast, Brightness, Blur, Noise, \
Translate, Scale, Shear, Rotate
class Fuzzing: class Fuzzing:
...@@ -40,9 +40,10 @@ class Fuzzing: ...@@ -40,9 +40,10 @@ class Fuzzing:
target_model (Model): Target fuzz model. target_model (Model): Target fuzz model.
train_dataset (numpy.ndarray): Training dataset used for determine train_dataset (numpy.ndarray): Training dataset used for determine
the neurons' output boundaries. the neurons' output boundaries.
const_K (int): The number of mutate tests for a seed. const_k (int): The number of mutate tests for a seed.
mode (str): Image mode used in image transform, 'L' means grey graph. mode (str): Image mode used in image transform, 'L' means grey graph.
Default: 'L'. Default: 'L'.
max_seed_num (int): The initial seeds max value. Default: 1000
""" """
def __init__(self, initial_seeds, target_model, train_dataset, const_K, def __init__(self, initial_seeds, target_model, train_dataset, const_K,
...@@ -50,7 +51,7 @@ class Fuzzing: ...@@ -50,7 +51,7 @@ class Fuzzing:
self.initial_seeds = initial_seeds self.initial_seeds = initial_seeds
self.target_model = check_model('model', target_model, Model) self.target_model = check_model('model', target_model, Model)
self.train_dataset = check_numpy_param('train_dataset', train_dataset) self.train_dataset = check_numpy_param('train_dataset', train_dataset)
self.K = check_int_positive('const_k', const_K) self.const_k = check_int_positive('const_k', const_K)
self.mode = mode self.mode = mode
self.max_seed_num = check_int_positive('max_seed_num', max_seed_num) self.max_seed_num = check_int_positive('max_seed_num', max_seed_num)
self.coverage_metrics = ModelCoverageMetrics(target_model, 1000, 10, self.coverage_metrics = ModelCoverageMetrics(target_model, 1000, 10,
...@@ -73,7 +74,7 @@ class Fuzzing: ...@@ -73,7 +74,7 @@ class Fuzzing:
'Noise': Noise, 'Noise': Noise,
'Translate': Translate, 'Scale': Scale, 'Shear': Shear, 'Translate': Translate, 'Scale': Scale, 'Shear': Shear,
'Rotate': Rotate} 'Rotate': Rotate}
for _ in range(self.K): for _ in range(self.const_k):
for _ in range(try_num): for _ in range(try_num):
if (info[0] == info[1]).all(): if (info[0] == info[1]).all():
trans_strage = self._random_pick_mutate(affine_trans, trans_strage = self._random_pick_mutate(affine_trans,
...@@ -91,7 +92,7 @@ class Fuzzing: ...@@ -91,7 +92,7 @@ class Fuzzing:
if trans_strage in affine_trans: if trans_strage in affine_trans:
info[1] = mutate_test info[1] = mutate_test
mutate_tests.append(mutate_test) mutate_tests.append(mutate_test)
if len(mutate_tests) == 0: if not mutate_tests:
mutate_tests.append(seed) mutate_tests.append(seed)
return np.array(mutate_tests) return np.array(mutate_tests)
...@@ -109,7 +110,7 @@ class Fuzzing: ...@@ -109,7 +110,7 @@ class Fuzzing:
seed = self._select_next() seed = self._select_next()
failed_tests = [] failed_tests = []
seed_num = 0 seed_num = 0
while len(seed) > 0 and seed_num < self.max_seed_num: while seed and seed_num < self.max_seed_num:
mutate_tests = self._metamorphic_mutate(seed[0]) mutate_tests = self._metamorphic_mutate(seed[0])
coverages, results = self._run(mutate_tests, coverage_metric) coverages, results = self._run(mutate_tests, coverage_metric)
coverage_gains = self._coverage_gains(coverages) coverage_gains = self._coverage_gains(coverages)
...@@ -157,13 +158,13 @@ class Fuzzing: ...@@ -157,13 +158,13 @@ class Fuzzing:
beta = 0.2 beta = 0.2
diff = np.array(seed - mutate_test).flatten() diff = np.array(seed - mutate_test).flatten()
size = np.shape(diff)[0] size = np.shape(diff)[0]
L0 = np.linalg.norm(diff, ord=0) l0 = np.linalg.norm(diff, ord=0)
Linf = np.linalg.norm(diff, ord=np.inf) linf = np.linalg.norm(diff, ord=np.inf)
if L0 > alpha*size: if l0 > alpha*size:
if Linf < 256: if linf < 256:
is_valid = True is_valid = True
else: else:
if Linf < beta*255: if linf < beta*255:
is_valid = True is_valid = True
return is_valid return is_valid
...@@ -13,7 +13,6 @@ ...@@ -13,7 +13,6 @@
# limitations under the License. # limitations under the License.
""" Util for MindArmour. """ """ Util for MindArmour. """
import numpy as np import numpy as np
from mindspore import Tensor from mindspore import Tensor
from mindspore.nn import Cell from mindspore.nn import Cell
from mindspore.ops.composite import GradOperation from mindspore.ops.composite import GradOperation
...@@ -99,23 +98,21 @@ class GradWrapWithLoss(Cell): ...@@ -99,23 +98,21 @@ class GradWrapWithLoss(Cell):
super(GradWrapWithLoss, self).__init__() super(GradWrapWithLoss, self).__init__()
self._grad_all = GradOperation(name="get_all", self._grad_all = GradOperation(name="get_all",
get_all=True, get_all=True,
sens_param=True) sens_param=False)
self._network = network self._network = network
def construct(self, inputs, labels, weight): def construct(self, inputs, labels):
""" """
Compute gradient of `inputs` with labels and weight. Compute gradient of `inputs` with labels and weight.
Args: Args:
inputs (Tensor): Inputs of network. inputs (Tensor): Inputs of network.
labels (Tensor): Labels of inputs. labels (Tensor): Labels of inputs.
weight (Tensor): Weight of each gradient, `weight` has the same
shape with labels.
Returns: Returns:
Tensor, gradient matrix. Tensor, gradient matrix.
""" """
gout = self._grad_all(self._network)(inputs, labels, weight) gout = self._grad_all(self._network)(inputs, labels)
return gout[0] return gout[0]
......
...@@ -12,12 +12,9 @@ ...@@ -12,12 +12,9 @@
# See the License for the specific language governing permissions and # See the License for the specific language governing permissions and
# limitations under the License. # limitations under the License.
import numpy as np import numpy as np
import math
from mindspore import nn from mindspore import nn
from mindspore.ops import operations as P
from mindspore.common.tensor import Tensor from mindspore.common.tensor import Tensor
from mindspore import context from mindspore.ops import operations as P
def variance_scaling_raw(shape): def variance_scaling_raw(shape):
...@@ -110,8 +107,7 @@ class ResidualBlock(nn.Cell): ...@@ -110,8 +107,7 @@ class ResidualBlock(nn.Cell):
def __init__(self, def __init__(self,
in_channels, in_channels,
out_channels, out_channels,
stride=1, stride=1):
down_sample=False):
super(ResidualBlock, self).__init__() super(ResidualBlock, self).__init__()
out_chls = out_channels // self.expansion out_chls = out_channels // self.expansion
...@@ -168,7 +164,7 @@ class ResidualBlockWithDown(nn.Cell): ...@@ -168,7 +164,7 @@ class ResidualBlockWithDown(nn.Cell):
self.bn3 = bn_with_initialize_last(out_channels) self.bn3 = bn_with_initialize_last(out_channels)
self.relu = P.ReLU() self.relu = P.ReLU()
self.downSample = down_sample self.downsample = down_sample
self.conv_down_sample = conv1x1(in_channels, out_channels, stride=stride, padding=0) self.conv_down_sample = conv1x1(in_channels, out_channels, stride=stride, padding=0)
self.bn_down_sample = bn_with_initialize(out_channels) self.bn_down_sample = bn_with_initialize(out_channels)
......
...@@ -18,7 +18,6 @@ Fuction: ...@@ -18,7 +18,6 @@ Fuction:
Usage: Usage:
py.test test_cifar10_attack_fgsm.py py.test test_cifar10_attack_fgsm.py
""" """
import os
import numpy as np import numpy as np
import pytest import pytest
......
...@@ -16,15 +16,13 @@ Genetic-Attack test. ...@@ -16,15 +16,13 @@ Genetic-Attack test.
""" """
import numpy as np import numpy as np
import pytest import pytest
import mindspore.ops.operations as M import mindspore.ops.operations as M
from mindspore import Tensor from mindspore import Tensor
from mindspore.nn import Cell
from mindspore import context from mindspore import context
from mindspore.nn import Cell
from mindarmour.attacks.black.genetic_attack import GeneticAttack
from mindarmour.attacks.black.black_model import BlackModel from mindarmour.attacks.black.black_model import BlackModel
from mindarmour.attacks.black.genetic_attack import GeneticAttack
context.set_context(mode=context.GRAPH_MODE, device_target="Ascend") context.set_context(mode=context.GRAPH_MODE, device_target="Ascend")
...@@ -115,7 +113,7 @@ def test_supplement(): ...@@ -115,7 +113,7 @@ def test_supplement():
adaptive=True, adaptive=True,
sparse=False) sparse=False)
# raise error # raise error
_, adv_data, _ = attack.generate(inputs, labels) _, _, _ = attack.generate(inputs, labels)
@pytest.mark.level0 @pytest.mark.level0
...@@ -140,5 +138,5 @@ def test_value_error(): ...@@ -140,5 +138,5 @@ def test_value_error():
adaptive=True, adaptive=True,
sparse=False) sparse=False)
# raise error # raise error
with pytest.raises(ValueError) as e: with pytest.raises(ValueError):
assert attack.generate(inputs, labels) assert attack.generate(inputs, labels)
...@@ -11,19 +11,19 @@ ...@@ -11,19 +11,19 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and # See the License for the specific language governing permissions and
# limitations under the License. # limitations under the License.
import sys
import os import os
import sys
import numpy as np import numpy as np
import pytest import pytest
from mindspore import Tensor from mindspore import Tensor
from mindspore import context from mindspore import context
from mindspore.train.serialization import load_checkpoint, load_param_into_net from mindspore.train.serialization import load_checkpoint, load_param_into_net
from mindarmour.attacks.black.hop_skip_jump_attack import HopSkipJumpAttack
from mindarmour.attacks.black.black_model import BlackModel from mindarmour.attacks.black.black_model import BlackModel
from mindarmour.attacks.black.hop_skip_jump_attack import HopSkipJumpAttack
from mindarmour.utils.logger import LogUtil from mindarmour.utils.logger import LogUtil
sys.path.append(os.path.join(os.path.dirname(os.path.abspath(__file__)), sys.path.append(os.path.join(os.path.dirname(os.path.abspath(__file__)),
"../../../../../")) "../../../../../"))
from example.mnist_demo.lenet5_net import LeNet5 from example.mnist_demo.lenet5_net import LeNet5
...@@ -135,7 +135,7 @@ def test_hsja_mnist_attack(): ...@@ -135,7 +135,7 @@ def test_hsja_mnist_attack():
attack.set_target_images(target_images) attack.set_target_images(target_images)
success_list, adv_data, _ = attack.generate(test_images, target_labels) success_list, adv_data, _ = attack.generate(test_images, target_labels)
else: else:
success_list, adv_data, query_list = attack.generate(test_images, None) success_list, adv_data, _ = attack.generate(test_images, None)
assert (adv_data != test_images).any() assert (adv_data != test_images).any()
adv_datas = [] adv_datas = []
...@@ -144,7 +144,7 @@ def test_hsja_mnist_attack(): ...@@ -144,7 +144,7 @@ def test_hsja_mnist_attack():
if success: if success:
adv_datas.append(adv) adv_datas.append(adv)
gts.append(gt) gts.append(gt)
if len(gts) > 0: if gts:
adv_datas = np.concatenate(np.asarray(adv_datas), axis=0) adv_datas = np.concatenate(np.asarray(adv_datas), axis=0)
gts = np.asarray(gts) gts = np.asarray(gts)
pred_logits_adv = model.predict(adv_datas) pred_logits_adv = model.predict(adv_datas)
...@@ -162,5 +162,5 @@ def test_hsja_mnist_attack(): ...@@ -162,5 +162,5 @@ def test_hsja_mnist_attack():
def test_value_error(): def test_value_error():
model = get_model() model = get_model()
norm = 'l2' norm = 'l2'
with pytest.raises(ValueError) as e: with pytest.raises(ValueError):
assert HopSkipJumpAttack(model, constraint=norm, stepsize_search='bad-search') assert HopSkipJumpAttack(model, constraint=norm, stepsize_search='bad-search')
...@@ -11,19 +11,19 @@ ...@@ -11,19 +11,19 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and # See the License for the specific language governing permissions and
# limitations under the License. # limitations under the License.
import os
import sys import sys
import numpy as np import numpy as np
import os
import pytest import pytest
from mindspore import Tensor from mindspore import Tensor
from mindspore import context from mindspore import context
from mindspore.train.serialization import load_checkpoint, load_param_into_net from mindspore.train.serialization import load_checkpoint, load_param_into_net
from mindarmour.attacks.black.natural_evolutionary_strategy import NES
from mindarmour.attacks.black.black_model import BlackModel from mindarmour.attacks.black.black_model import BlackModel
from mindarmour.attacks.black.natural_evolutionary_strategy import NES
from mindarmour.utils.logger import LogUtil from mindarmour.utils.logger import LogUtil
sys.path.append(os.path.join(os.path.dirname(os.path.abspath(__file__)), sys.path.append(os.path.join(os.path.dirname(os.path.abspath(__file__)),
"../../../../../")) "../../../../../"))
from example.mnist_demo.lenet5_net import LeNet5 from example.mnist_demo.lenet5_net import LeNet5
...@@ -156,7 +156,7 @@ def nes_mnist_attack(scene, top_k): ...@@ -156,7 +156,7 @@ def nes_mnist_attack(scene, top_k):
assert (advs != test_images[:batch_num]).any() assert (advs != test_images[:batch_num]).any()
adv_pred = np.argmax(model.predict(advs), axis=1) adv_pred = np.argmax(model.predict(advs), axis=1)
adv_accuracy = np.mean(np.equal(adv_pred, true_labels[:test_length])) _ = np.mean(np.equal(adv_pred, true_labels[:test_length]))
@pytest.mark.level0 @pytest.mark.level0
......
...@@ -14,19 +14,18 @@ ...@@ -14,19 +14,18 @@
""" """
PointWise Attack test PointWise Attack test
""" """
import sys
import os import os
import sys
import numpy as np import numpy as np
import pytest import pytest
from mindspore import Tensor from mindspore import Tensor
from mindspore import context from mindspore import context
from mindspore.train.serialization import load_checkpoint, load_param_into_net from mindspore.train.serialization import load_checkpoint, load_param_into_net
from mindarmour.attacks.black.black_model import BlackModel
from mindarmour.attacks.black.pointwise_attack import PointWiseAttack from mindarmour.attacks.black.pointwise_attack import PointWiseAttack
from mindarmour.utils.logger import LogUtil from mindarmour.utils.logger import LogUtil
from mindarmour.attacks.black.black_model import BlackModel
sys.path.append(os.path.join(os.path.dirname(os.path.abspath(__file__)), sys.path.append(os.path.join(os.path.dirname(os.path.abspath(__file__)),
"../../../../../")) "../../../../../"))
...@@ -81,7 +80,7 @@ def test_pointwise_attack_method(): ...@@ -81,7 +80,7 @@ def test_pointwise_attack_method():
LOGGER.info(TAG, 'original sample predict labels are :{}'.format(pre_label)) LOGGER.info(TAG, 'original sample predict labels are :{}'.format(pre_label))
LOGGER.info(TAG, 'true labels are: {}'.format(labels)) LOGGER.info(TAG, 'true labels are: {}'.format(labels))
attack = PointWiseAttack(model, sparse=True, is_targeted=False) attack = PointWiseAttack(model, sparse=True, is_targeted=False)
is_adv, adv_data, query_times = attack.generate(input_np, pre_label) is_adv, adv_data, _ = attack.generate(input_np, pre_label)
LOGGER.info(TAG, 'adv sample predict labels are: {}' LOGGER.info(TAG, 'adv sample predict labels are: {}'
.format(np.argmax(model.predict(adv_data), axis=1))) .format(np.argmax(model.predict(adv_data), axis=1)))
......
...@@ -233,10 +233,6 @@ def test_assert_error(): ...@@ -233,10 +233,6 @@ def test_assert_error():
""" """
Random least likely class method unit test. Random least likely class method unit test.
""" """
input_np = np.asarray([[0.1, 0.2, 0.7]], np.float32)
label = np.asarray([2], np.int32)
label = np.eye(3)[label].astype(np.float32)
with pytest.raises(ValueError) as e: with pytest.raises(ValueError) as e:
assert RandomLeastLikelyClassMethod(Net(), eps=0.05, alpha=0.21) assert RandomLeastLikelyClassMethod(Net(), eps=0.05, alpha=0.21)
assert str(e.value) == 'eps must be larger than alpha!' assert str(e.value) == 'eps must be larger than alpha!'
...@@ -134,7 +134,6 @@ def test_diverse_input_iterative_method(): ...@@ -134,7 +134,6 @@ def test_diverse_input_iterative_method():
label = np.asarray([2], np.int32) label = np.asarray([2], np.int32)
label = np.eye(3)[label].astype(np.float32) label = np.eye(3)[label].astype(np.float32)
for i in range(5):
attack = DiverseInputIterativeMethod(Net()) attack = DiverseInputIterativeMethod(Net())
ms_adv_x = attack.generate(input_np, label) ms_adv_x = attack.generate(input_np, label)
assert np.any(ms_adv_x != input_np), 'Diverse input iterative method: generate' \ assert np.any(ms_adv_x != input_np), 'Diverse input iterative method: generate' \
...@@ -155,7 +154,6 @@ def test_momentum_diverse_input_iterative_method(): ...@@ -155,7 +154,6 @@ def test_momentum_diverse_input_iterative_method():
label = np.asarray([2], np.int32) label = np.asarray([2], np.int32)
label = np.eye(3)[label].astype(np.float32) label = np.eye(3)[label].astype(np.float32)
for i in range(5):
attack = MomentumDiverseInputIterativeMethod(Net()) attack = MomentumDiverseInputIterativeMethod(Net())
ms_adv_x = attack.generate(input_np, label) ms_adv_x = attack.generate(input_np, label)
assert np.any(ms_adv_x != input_np), 'Momentum diverse input iterative method: ' \ assert np.any(ms_adv_x != input_np), 'Momentum diverse input iterative method: ' \
......
...@@ -14,11 +14,11 @@ ...@@ -14,11 +14,11 @@
""" """
LBFGS-Attack test. LBFGS-Attack test.
""" """
import os
import sys import sys
import numpy as np import numpy as np
import pytest import pytest
import os
from mindspore import context from mindspore import context
from mindspore.train.serialization import load_checkpoint, load_param_into_net from mindspore.train.serialization import load_checkpoint, load_param_into_net
...@@ -69,4 +69,4 @@ def test_lbfgs_attack(): ...@@ -69,4 +69,4 @@ def test_lbfgs_attack():
attack = LBFGS(net, is_targeted=True) attack = LBFGS(net, is_targeted=True)
LOGGER.debug(TAG, 'target_np is :{}'.format(target_np[0])) LOGGER.debug(TAG, 'target_np is :{}'.format(target_np[0]))
adv_data = attack.generate(input_np, target_np) _ = attack.generate(input_np, target_np)
...@@ -18,10 +18,8 @@ import numpy as np ...@@ -18,10 +18,8 @@ import numpy as np
from mindspore import nn from mindspore import nn
from mindspore import Tensor from mindspore import Tensor
from mindspore.nn import Cell
from mindspore.nn import WithLossCell, TrainOneStepCell from mindspore.nn import WithLossCell, TrainOneStepCell
from mindspore.nn.optim.momentum import Momentum from mindspore.nn.optim.momentum import Momentum
from mindspore.ops import operations as P
from mindspore import context from mindspore import context
from mindspore.common.initializer import TruncatedNormal from mindspore.common.initializer import TruncatedNormal
...@@ -58,7 +56,7 @@ class Net(nn.Cell): ...@@ -58,7 +56,7 @@ class Net(nn.Cell):
self.fc3 = fc_with_initialize(84, 10) self.fc3 = fc_with_initialize(84, 10)
self.relu = nn.ReLU() self.relu = nn.ReLU()
self.max_pool2d = nn.MaxPool2d(kernel_size=2, stride=2) self.max_pool2d = nn.MaxPool2d(kernel_size=2, stride=2)
self.reshape = P.Reshape() self.flatten = nn.Flatten()
def construct(self, x): def construct(self, x):
x = self.conv1(x) x = self.conv1(x)
...@@ -67,7 +65,7 @@ class Net(nn.Cell): ...@@ -67,7 +65,7 @@ class Net(nn.Cell):
x = self.conv2(x) x = self.conv2(x)
x = self.relu(x) x = self.relu(x)
x = self.max_pool2d(x) x = self.max_pool2d(x)
x = self.reshape(x, (-1, 16*5*5)) x = self.flatten(x)
x = self.fc1(x) x = self.fc1(x)
x = self.relu(x) x = self.relu(x)
x = self.fc2(x) x = self.fc2(x)
...@@ -75,6 +73,7 @@ class Net(nn.Cell): ...@@ -75,6 +73,7 @@ class Net(nn.Cell):
x = self.fc3(x) x = self.fc3(x)
return x return x
if __name__ == '__main__': if __name__ == '__main__':
num_classes = 10 num_classes = 10
batch_size = 32 batch_size = 32
...@@ -104,4 +103,3 @@ if __name__ == '__main__': ...@@ -104,4 +103,3 @@ if __name__ == '__main__':
train_net.set_train() train_net.set_train()
train_net(Tensor(inputs_np), Tensor(labels_np)) train_net(Tensor(inputs_np), Tensor(labels_np))
...@@ -14,20 +14,19 @@ ...@@ -14,20 +14,19 @@
""" """
Adversarial defense test. Adversarial defense test.
""" """
import numpy as np
import pytest
import logging import logging
from mindspore import nn import numpy as np
import pytest
from mindspore import Tensor from mindspore import Tensor
from mindspore import context from mindspore import context
from mindspore import nn
from mindspore.nn.optim.momentum import Momentum from mindspore.nn.optim.momentum import Momentum
from mock_net import Net
from mindarmour.defenses.adversarial_defense import AdversarialDefense from mindarmour.defenses.adversarial_defense import AdversarialDefense
from mindarmour.utils.logger import LogUtil from mindarmour.utils.logger import LogUtil
from mock_net import Net
LOGGER = LogUtil.get_instance() LOGGER = LogUtil.get_instance()
TAG = 'Ad_Test' TAG = 'Ad_Test'
......
...@@ -14,22 +14,21 @@ ...@@ -14,22 +14,21 @@
""" """
ensemble adversarial defense test. ensemble adversarial defense test.
""" """
import numpy as np
import pytest
import logging import logging
from mindspore import nn import numpy as np
import pytest
from mindspore import context from mindspore import context
from mindspore import nn
from mindspore.nn.optim.momentum import Momentum from mindspore.nn.optim.momentum import Momentum
from mock_net import Net
from mindarmour.attacks.gradient_method import FastGradientSignMethod from mindarmour.attacks.gradient_method import FastGradientSignMethod
from mindarmour.attacks.iterative_gradient_method import \ from mindarmour.attacks.iterative_gradient_method import \
ProjectedGradientDescent ProjectedGradientDescent
from mindarmour.defenses.adversarial_defense import EnsembleAdversarialDefense from mindarmour.defenses.adversarial_defense import EnsembleAdversarialDefense
from mindarmour.utils.logger import LogUtil from mindarmour.utils.logger import LogUtil
from mock_net import Net
LOGGER = LogUtil.get_instance() LOGGER = LogUtil.get_instance()
TAG = 'Ead_Test' TAG = 'Ead_Test'
...@@ -54,7 +53,7 @@ def test_ead(): ...@@ -54,7 +53,7 @@ def test_ead():
if not sparse: if not sparse:
labels = np.eye(num_classes)[labels].astype(np.float32) labels = np.eye(num_classes)[labels].astype(np.float32)
net = Net() net = SimpleNet()
loss_fn = nn.SoftmaxCrossEntropyWithLogits(is_grad=False, sparse=sparse) loss_fn = nn.SoftmaxCrossEntropyWithLogits(is_grad=False, sparse=sparse)
optimizer = Momentum(net.trainable_params(), 0.001, 0.9) optimizer = Momentum(net.trainable_params(), 0.001, 0.9)
......
...@@ -14,20 +14,19 @@ ...@@ -14,20 +14,19 @@
""" """
Natural adversarial defense test. Natural adversarial defense test.
""" """
import numpy as np
import pytest
import logging import logging
from mindspore import nn import numpy as np
import pytest
from mindspore import context from mindspore import context
from mindspore import nn
from mindspore.nn.optim.momentum import Momentum from mindspore.nn.optim.momentum import Momentum
from mock_net import Net
from mindarmour.defenses.natural_adversarial_defense import \ from mindarmour.defenses.natural_adversarial_defense import \
NaturalAdversarialDefense NaturalAdversarialDefense
from mindarmour.utils.logger import LogUtil from mindarmour.utils.logger import LogUtil
from mock_net import Net
LOGGER = LogUtil.get_instance() LOGGER = LogUtil.get_instance()
TAG = 'Nad_Test' TAG = 'Nad_Test'
......
...@@ -14,20 +14,19 @@ ...@@ -14,20 +14,19 @@
""" """
Projected adversarial defense test. Projected adversarial defense test.
""" """
import numpy as np
import pytest
import logging import logging
from mindspore import nn import numpy as np
import pytest
from mindspore import context from mindspore import context
from mindspore import nn
from mindspore.nn.optim.momentum import Momentum from mindspore.nn.optim.momentum import Momentum
from mock_net import Net
from mindarmour.defenses.projected_adversarial_defense import \ from mindarmour.defenses.projected_adversarial_defense import \
ProjectedAdversarialDefense ProjectedAdversarialDefense
from mindarmour.utils.logger import LogUtil from mindarmour.utils.logger import LogUtil
from mock_net import Net
LOGGER = LogUtil.get_instance() LOGGER = LogUtil.get_instance()
TAG = 'Pad_Test' TAG = 'Pad_Test'
......
...@@ -98,4 +98,3 @@ def test_similarity_detector(): ...@@ -98,4 +98,3 @@ def test_similarity_detector():
1561, 1612, 1663, 1714, 1765, 1561, 1612, 1663, 1714, 1765,
1816, 1867, 1918, 1969] 1816, 1867, 1918, 1969]
assert np.all(detector.get_detected_queries() == expected_value) assert np.all(detector.get_detected_queries() == expected_value)
...@@ -111,6 +111,3 @@ def test_spatial_smoothing_diff(): ...@@ -111,6 +111,3 @@ def test_spatial_smoothing_diff():
0.38254014, 0.543059, 0.06452079, 0.36902517, 0.38254014, 0.543059, 0.06452079, 0.36902517,
1.1845329, 0.3870097]) 1.1845329, 0.3870097])
assert np.allclose(diffs, expected_value, 0.0001, 0.0001) assert np.allclose(diffs, expected_value, 0.0001, 0.0001)
...@@ -81,7 +81,7 @@ def test_value_error(): ...@@ -81,7 +81,7 @@ def test_value_error():
@pytest.mark.platform_x86_ascend_training @pytest.mark.platform_x86_ascend_training
@pytest.mark.env_card @pytest.mark.env_card
@pytest.mark.component_mindarmour @pytest.mark.component_mindarmour
def test_value_error(): def test_empty_input_error():
# prepare test data # prepare test data
np.random.seed(1024) np.random.seed(1024)
inputs = np.array([]) inputs = np.array([])
......
...@@ -30,7 +30,7 @@ def test_radar_metric(): ...@@ -30,7 +30,7 @@ def test_radar_metric():
metrics_labels = ['before', 'after'] metrics_labels = ['before', 'after']
# create obj # create obj
rm = RadarMetric(metrics_name, metrics_data, metrics_labels, title='', _ = RadarMetric(metrics_name, metrics_data, metrics_labels, title='',
scale='sparse') scale='sparse')
...@@ -54,4 +54,3 @@ def test_value_error(): ...@@ -54,4 +54,3 @@ def test_value_error():
with pytest.raises(ValueError): with pytest.raises(ValueError):
assert RadarMetric(['MR', 'ACAC', 'ASS'], metrics_data, metrics_labels, assert RadarMetric(['MR', 'ACAC', 'ASS'], metrics_data, metrics_labels,
title='', scale='bad_s') title='', scale='bad_s')
...@@ -16,18 +16,15 @@ Model-fuzz coverage test. ...@@ -16,18 +16,15 @@ Model-fuzz coverage test.
""" """
import numpy as np import numpy as np
import pytest import pytest
import sys
from mindspore.train import Model
from mindspore import nn
from mindspore.ops import operations as P
from mindspore import context from mindspore import context
from mindspore import nn
from mindspore.common.initializer import TruncatedNormal from mindspore.common.initializer import TruncatedNormal
from mindspore.ops import operations as P
from mindspore.train import Model
from mindarmour.utils.logger import LogUtil
from mindarmour.fuzzing.model_coverage_metrics import ModelCoverageMetrics
from mindarmour.fuzzing.fuzzing import Fuzzing from mindarmour.fuzzing.fuzzing import Fuzzing
from mindarmour.fuzzing.model_coverage_metrics import ModelCoverageMetrics
from mindarmour.utils.logger import LogUtil
LOGGER = LogUtil.get_instance() LOGGER = LogUtil.get_instance()
TAG = 'Fuzzing test' TAG = 'Fuzzing test'
...@@ -116,17 +113,18 @@ def test_fuzzing_ascend(): ...@@ -116,17 +113,18 @@ def test_fuzzing_ascend():
model_fuzz_test = Fuzzing(initial_seeds, model, training_data, 5, model_fuzz_test = Fuzzing(initial_seeds, model, training_data, 5,
max_seed_num=10) max_seed_num=10)
failed_tests = model_fuzz_test.fuzzing() failed_tests = model_fuzz_test.fuzzing()
model_coverage_test.test_adequacy_coverage_calculate( if failed_tests:
np.array(failed_tests).astype(np.float32)) model_coverage_test.test_adequacy_coverage_calculate(np.array(failed_tests).astype(np.float32))
LOGGER.info(TAG, 'KMNC of this test is : %s', LOGGER.info(TAG, 'KMNC of this test is : %s', model_coverage_test.get_kmnc())
model_coverage_test.get_kmnc()) else:
LOGGER.info(TAG, 'Fuzzing test identifies none failed test')
@pytest.mark.level0 @pytest.mark.level0
@pytest.mark.platform_x86_cpu @pytest.mark.platform_x86_cpu
@pytest.mark.env_onecard @pytest.mark.env_onecard
@pytest.mark.component_mindarmour @pytest.mark.component_mindarmour
def test_fuzzing_ascend(): def test_fuzzing_CPU():
context.set_context(mode=context.GRAPH_MODE, device_target="CPU") context.set_context(mode=context.GRAPH_MODE, device_target="CPU")
# load network # load network
net = Net() net = Net()
...@@ -155,7 +153,8 @@ def test_fuzzing_ascend(): ...@@ -155,7 +153,8 @@ def test_fuzzing_ascend():
model_fuzz_test = Fuzzing(initial_seeds, model, training_data, 5, model_fuzz_test = Fuzzing(initial_seeds, model, training_data, 5,
max_seed_num=10) max_seed_num=10)
failed_tests = model_fuzz_test.fuzzing() failed_tests = model_fuzz_test.fuzzing()
model_coverage_test.test_adequacy_coverage_calculate( if failed_tests:
np.array(failed_tests).astype(np.float32)) model_coverage_test.test_adequacy_coverage_calculate(np.array(failed_tests).astype(np.float32))
LOGGER.info(TAG, 'KMNC of this test is : %s', LOGGER.info(TAG, 'KMNC of this test is : %s', model_coverage_test.get_kmnc())
model_coverage_test.get_kmnc()) else:
LOGGER.info(TAG, 'Fuzzing test identifies none failed test')
...@@ -35,7 +35,7 @@ def test_contrast(): ...@@ -35,7 +35,7 @@ def test_contrast():
mode = 'L' mode = 'L'
trans = Contrast(image, mode) trans = Contrast(image, mode)
trans.random_param() trans.random_param()
trans_image = trans.transform() _ = trans.transform()
@pytest.mark.level0 @pytest.mark.level0
...@@ -47,7 +47,7 @@ def test_brightness(): ...@@ -47,7 +47,7 @@ def test_brightness():
mode = 'L' mode = 'L'
trans = Brightness(image, mode) trans = Brightness(image, mode)
trans.random_param() trans.random_param()
trans_image = trans.transform() _ = trans.transform()
@pytest.mark.level0 @pytest.mark.level0
...@@ -61,7 +61,7 @@ def test_blur(): ...@@ -61,7 +61,7 @@ def test_blur():
mode = 'L' mode = 'L'
trans = Blur(image, mode) trans = Blur(image, mode)
trans.random_param() trans.random_param()
trans_image = trans.transform() _ = trans.transform()
@pytest.mark.level0 @pytest.mark.level0
...@@ -75,7 +75,7 @@ def test_noise(): ...@@ -75,7 +75,7 @@ def test_noise():
mode = 'L' mode = 'L'
trans = Noise(image, mode) trans = Noise(image, mode)
trans.random_param() trans.random_param()
trans_image = trans.transform() _ = trans.transform()
@pytest.mark.level0 @pytest.mark.level0
...@@ -89,7 +89,7 @@ def test_translate(): ...@@ -89,7 +89,7 @@ def test_translate():
mode = 'L' mode = 'L'
trans = Translate(image, mode) trans = Translate(image, mode)
trans.random_param() trans.random_param()
trans_image = trans.transform() _ = trans.transform()
@pytest.mark.level0 @pytest.mark.level0
...@@ -103,7 +103,7 @@ def test_shear(): ...@@ -103,7 +103,7 @@ def test_shear():
mode = 'L' mode = 'L'
trans = Shear(image, mode) trans = Shear(image, mode)
trans.random_param() trans.random_param()
trans_image = trans.transform() _ = trans.transform()
@pytest.mark.level0 @pytest.mark.level0
...@@ -117,7 +117,7 @@ def test_scale(): ...@@ -117,7 +117,7 @@ def test_scale():
mode = 'L' mode = 'L'
trans = Scale(image, mode) trans = Scale(image, mode)
trans.random_param() trans.random_param()
trans_image = trans.transform() _ = trans.transform()
@pytest.mark.level0 @pytest.mark.level0
...@@ -131,6 +131,4 @@ def test_rotate(): ...@@ -131,6 +131,4 @@ def test_rotate():
mode = 'L' mode = 'L'
trans = Rotate(image, mode) trans = Rotate(image, mode)
trans.random_param() trans.random_param()
trans_image = trans.transform() _ = trans.transform()
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册