提交 929ef67b 编写于 作者: M mindspore-ci-bot 提交者: Gitee

!1411 pylint warning clean

Merge pull request !1411 from liubuyu/master
......@@ -21,7 +21,7 @@ from mindspore.common import ms_function
from mindspore.common.tensor import Tensor
def setup_module(module):
def setup_module():
context.set_context(mode=context.PYNATIVE_MODE, device_target="Ascend")
......@@ -33,7 +33,7 @@ c5 = Tensor([14], mstype.int32)
@ms_function
def simple_if(x, y, z):
def simple_if(x, y):
if x < y:
x = x + 1
else:
......@@ -43,7 +43,7 @@ def simple_if(x, y, z):
@ms_function
def if_by_if(x, y, z):
def if_by_if(x, y):
if x < y:
x = x + 1
if y > x:
......@@ -66,7 +66,7 @@ def if_in_if(x, y, z):
@ms_function
def simple_while(x, y, z):
def simple_while(x, y):
y = y + 4
while x < y:
x = x + 1
......@@ -137,13 +137,13 @@ def while_in_while_in_while(x, y, z):
@pytest.mark.platform_arm_ascend_training
@pytest.mark.env_onecard
def test_simple_if():
output = simple_if(c1, c2, c3)
output = simple_if(c1, c2)
expect = Tensor([6], mstype.int32)
assert output == expect
def test_if_by_if():
output = if_by_if(c1, c2, c3)
output = if_by_if(c1, c2)
expect = Tensor([8], mstype.int32)
assert output == expect
......@@ -163,7 +163,7 @@ def test_if_in_if():
@pytest.mark.platform_arm_ascend_training
@pytest.mark.env_onecard
def test_simple_while():
output = simple_while(c1, c2, c3)
output = simple_while(c1, c2)
expect = Tensor([21], mstype.int32)
assert output == expect
......
......@@ -18,7 +18,7 @@ from mindspore.common import dtype as mstype
@ms_function
def t1_while(x, y, z):
def t1_while(x, y):
y = y + 4
while x < y:
x = x + 1
......@@ -30,9 +30,8 @@ def test_net():
context.set_context(mode=context.GRAPH_MODE, device_target="Ascend")
c1 = Tensor([2], mstype.int32)
c2 = Tensor([14], mstype.int32)
c3 = Tensor([1], mstype.int32)
expect = Tensor([21], mstype.int32)
ret = t1_while(c1, c2, c3)
ret = t1_while(c1, c2)
assert ret == expect
......
......@@ -19,8 +19,8 @@ curr_path = os.path.abspath(os.curdir)
file_memreuse = curr_path + "/mem_reuse_check/memreuse.ir"
file_normal = curr_path + "/mem_reuse_check/normal_mem.ir"
checker = os.path.exists(file_memreuse)
assert checker == True
assert checker, True
checker = os.path.exists(file_normal)
assert checker == True
assert checker, True
checker = filecmp.cmp(file_memreuse, file_normal)
assert checker == True
assert checker, True
......@@ -99,8 +99,7 @@ class ResidualBlock(nn.Cell):
def __init__(self,
in_channels,
out_channels,
stride=1,
down_sample=False):
stride=1):
super(ResidualBlock, self).__init__()
out_chls = out_channels // self.expansion
......@@ -188,7 +187,7 @@ class ResidualBlockWithDown(nn.Cell):
class MakeLayer0(nn.Cell):
def __init__(self, block, layer_num, in_channels, out_channels, stride):
def __init__(self, block, in_channels, out_channels, stride):
super(MakeLayer0, self).__init__()
self.a = ResidualBlockWithDown(in_channels, out_channels, stride=1, down_sample=True)
self.b = block(out_channels, out_channels, stride=stride)
......@@ -204,7 +203,7 @@ class MakeLayer0(nn.Cell):
class MakeLayer1(nn.Cell):
def __init__(self, block, layer_num, in_channels, out_channels, stride):
def __init__(self, block, in_channels, out_channels, stride):
super(MakeLayer1, self).__init__()
self.a = ResidualBlockWithDown(in_channels, out_channels, stride=stride, down_sample=True)
self.b = block(out_channels, out_channels, stride=1)
......@@ -222,7 +221,7 @@ class MakeLayer1(nn.Cell):
class MakeLayer2(nn.Cell):
def __init__(self, block, layer_num, in_channels, out_channels, stride):
def __init__(self, block, in_channels, out_channels, stride):
super(MakeLayer2, self).__init__()
self.a = ResidualBlockWithDown(in_channels, out_channels, stride=stride, down_sample=True)
self.b = block(out_channels, out_channels, stride=1)
......@@ -244,7 +243,7 @@ class MakeLayer2(nn.Cell):
class MakeLayer3(nn.Cell):
def __init__(self, block, layer_num, in_channels, out_channels, stride):
def __init__(self, block, in_channels, out_channels, stride):
super(MakeLayer3, self).__init__()
self.a = ResidualBlockWithDown(in_channels, out_channels, stride=stride, down_sample=True)
self.b = block(out_channels, out_channels, stride=1)
......@@ -260,7 +259,7 @@ class MakeLayer3(nn.Cell):
class ResNet(nn.Cell):
def __init__(self, block, layer_num, num_classes=100, batch_size=32):
def __init__(self, block, num_classes=100, batch_size=32):
super(ResNet, self).__init__()
self.batch_size = batch_size
self.num_classes = num_classes
......@@ -271,10 +270,10 @@ class ResNet(nn.Cell):
self.relu = P.ReLU()
self.maxpool = nn.MaxPool2d(kernel_size=3, stride=2, pad_mode="same")
self.layer1 = MakeLayer0(block, layer_num[0], in_channels=64, out_channels=256, stride=1)
self.layer2 = MakeLayer1(block, layer_num[1], in_channels=256, out_channels=512, stride=2)
self.layer3 = MakeLayer2(block, layer_num[2], in_channels=512, out_channels=1024, stride=2)
self.layer4 = MakeLayer3(block, layer_num[3], in_channels=1024, out_channels=2048, stride=2)
self.layer1 = MakeLayer0(block, in_channels=64, out_channels=256, stride=1)
self.layer2 = MakeLayer1(block, in_channels=256, out_channels=512, stride=2)
self.layer3 = MakeLayer2(block, in_channels=512, out_channels=1024, stride=2)
self.layer4 = MakeLayer3(block, in_channels=1024, out_channels=2048, stride=2)
self.pool = P.ReduceMean(keep_dims=True)
self.squeeze = P.Squeeze(axis=(2, 3))
......@@ -298,4 +297,4 @@ class ResNet(nn.Cell):
def resnet50(batch_size, num_classes):
return ResNet(ResidualBlock, [3, 4, 6, 3], num_classes, batch_size)
return ResNet(ResidualBlock, num_classes, batch_size)
......@@ -114,9 +114,9 @@ class CrossEntropyLoss(nn.Cell):
def construct(self, logits, label):
label = self.one_hot(label, F.shape(logits)[1], self.one, self.zero)
loss = self.cross_entropy(logits, label)[0]
loss = self.mean(loss, (-1,))
return loss
loss_func = self.cross_entropy(logits, label)[0]
loss_func = self.mean(loss_func, (-1,))
return loss_func
if __name__ == '__main__':
......@@ -146,4 +146,4 @@ if __name__ == '__main__':
res = model.eval(eval_dataset)
print("result: ", res)
checker = os.path.exists("./memreuse.ir")
assert checker == True
assert checker, True
......@@ -114,9 +114,9 @@ class CrossEntropyLoss(nn.Cell):
def construct(self, logits, label):
label = self.one_hot(label, F.shape(logits)[1], self.one, self.zero)
loss = self.cross_entropy(logits, label)[0]
loss = self.mean(loss, (-1,))
return loss
loss_func = self.cross_entropy(logits, label)[0]
loss_func = self.mean(loss_func, (-1,))
return loss_func
if __name__ == '__main__':
......@@ -146,4 +146,4 @@ if __name__ == '__main__':
res = model.eval(eval_dataset)
print("result: ", res)
checker = os.path.exists("./normal_memreuse.ir")
assert checker == True
assert checker, True
......@@ -95,8 +95,7 @@ class ResidualBlock(nn.Cell):
def __init__(self,
in_channels,
out_channels,
stride=1,
down_sample=False):
stride=1):
super(ResidualBlock, self).__init__()
out_chls = out_channels // self.expansion
......@@ -184,7 +183,7 @@ class ResidualBlockWithDown(nn.Cell):
class MakeLayer0(nn.Cell):
def __init__(self, block, layer_num, in_channels, out_channels, stride):
def __init__(self, block, in_channels, out_channels, stride):
super(MakeLayer0, self).__init__()
self.a = ResidualBlockWithDown(in_channels, out_channels, stride=1, down_sample=True)
self.b = block(out_channels, out_channels, stride=stride)
......@@ -200,7 +199,7 @@ class MakeLayer0(nn.Cell):
class MakeLayer1(nn.Cell):
def __init__(self, block, layer_num, in_channels, out_channels, stride):
def __init__(self, block, in_channels, out_channels, stride):
super(MakeLayer1, self).__init__()
self.a = ResidualBlockWithDown(in_channels, out_channels, stride=stride, down_sample=True)
self.b = block(out_channels, out_channels, stride=1)
......@@ -218,7 +217,7 @@ class MakeLayer1(nn.Cell):
class MakeLayer2(nn.Cell):
def __init__(self, block, layer_num, in_channels, out_channels, stride):
def __init__(self, block, in_channels, out_channels, stride):
super(MakeLayer2, self).__init__()
self.a = ResidualBlockWithDown(in_channels, out_channels, stride=stride, down_sample=True)
self.b = block(out_channels, out_channels, stride=1)
......@@ -240,7 +239,7 @@ class MakeLayer2(nn.Cell):
class MakeLayer3(nn.Cell):
def __init__(self, block, layer_num, in_channels, out_channels, stride):
def __init__(self, block, in_channels, out_channels, stride):
super(MakeLayer3, self).__init__()
self.a = ResidualBlockWithDown(in_channels, out_channels, stride=stride, down_sample=True)
self.b = block(out_channels, out_channels, stride=1)
......@@ -256,7 +255,7 @@ class MakeLayer3(nn.Cell):
class ResNet(nn.Cell):
def __init__(self, block, layer_num, num_classes=100, batch_size=32):
def __init__(self, block, num_classes=100, batch_size=32):
super(ResNet, self).__init__()
self.batch_size = batch_size
self.num_classes = num_classes
......@@ -267,14 +266,10 @@ class ResNet(nn.Cell):
self.relu = P.ReLU()
self.maxpool = nn.MaxPool2d(kernel_size=3, stride=2, pad_mode="SAME")
self.layer1 = MakeLayer0(
block, layer_num[0], in_channels=64, out_channels=256, stride=1)
self.layer2 = MakeLayer1(
block, layer_num[1], in_channels=256, out_channels=512, stride=2)
self.layer3 = MakeLayer2(
block, layer_num[2], in_channels=512, out_channels=1024, stride=2)
self.layer4 = MakeLayer3(
block, layer_num[3], in_channels=1024, out_channels=2048, stride=2)
self.layer1 = MakeLayer0(block, in_channels=64, out_channels=256, stride=1)
self.layer2 = MakeLayer1(block, in_channels=256, out_channels=512, stride=2)
self.layer3 = MakeLayer2(block, in_channels=512, out_channels=1024, stride=2)
self.layer4 = MakeLayer3(block, in_channels=1024, out_channels=2048, stride=2)
self.pool = P.ReduceMean(keep_dims=True)
self.fc = fc_with_initialize(512 * block.expansion, num_classes)
......@@ -298,4 +293,4 @@ class ResNet(nn.Cell):
def resnet50(batch_size, num_classes):
return ResNet(ResidualBlock, [3, 4, 6, 3], num_classes, batch_size)
return ResNet(ResidualBlock, num_classes, batch_size)
......@@ -18,7 +18,7 @@ import numpy as np
from apply_momentum import ApplyMomentum
import mindspore.context as context
import mindspore.nn as nn
import mindspore.nn as wrap
from mindspore.nn import wrap
from mindspore import Tensor, Model
from mindspore.common.api import ms_function
from mindspore.nn.loss import SoftmaxCrossEntropyWithLogits
......
......@@ -13,12 +13,10 @@
# limitations under the License.
# ============================================================================
import numpy as np
from resnet_torch import resnet50
from mindspore import Tensor
from mindspore.train.serialization import save, load, _check_filedir_or_create, _chg_model_file_name_if_same_exist, \
_read_file_last_line, context, export
from mindspore.train.serialization import context, export
context.set_context(mode=context.GRAPH_MODE, device_target="Ascend")
......@@ -26,6 +24,4 @@ context.set_context(mode=context.GRAPH_MODE, device_target="Ascend")
def test_resnet50_export(batch_size=1, num_classes=5):
input_np = np.random.uniform(0.0, 1.0, size=[batch_size, 3, 224, 224]).astype(np.float32)
net = resnet50(batch_size, num_classes)
# param_dict = load_checkpoint("./resnet50-1_103.ckpt")
# load_param_into_net(net, param_dict)
export(net, Tensor(input_np), file_name="./me_resnet50.pb", file_format="GEIR")
......@@ -99,8 +99,7 @@ class ResidualBlock(nn.Cell):
def __init__(self,
in_channels,
out_channels,
stride=1,
down_sample=False):
stride=1):
super(ResidualBlock, self).__init__()
out_chls = out_channels // self.expansion
......@@ -188,7 +187,7 @@ class ResidualBlockWithDown(nn.Cell):
class MakeLayer0(nn.Cell):
def __init__(self, block, layer_num, in_channels, out_channels, stride):
def __init__(self, block, in_channels, out_channels, stride):
super(MakeLayer0, self).__init__()
self.a = ResidualBlockWithDown(in_channels, out_channels, stride=1, down_sample=True)
self.b = block(out_channels, out_channels, stride=stride)
......@@ -204,7 +203,7 @@ class MakeLayer0(nn.Cell):
class MakeLayer1(nn.Cell):
def __init__(self, block, layer_num, in_channels, out_channels, stride):
def __init__(self, block, in_channels, out_channels, stride):
super(MakeLayer1, self).__init__()
self.a = ResidualBlockWithDown(in_channels, out_channels, stride=stride, down_sample=True)
self.b = block(out_channels, out_channels, stride=1)
......@@ -222,7 +221,7 @@ class MakeLayer1(nn.Cell):
class MakeLayer2(nn.Cell):
def __init__(self, block, layer_num, in_channels, out_channels, stride):
def __init__(self, block, in_channels, out_channels, stride):
super(MakeLayer2, self).__init__()
self.a = ResidualBlockWithDown(in_channels, out_channels, stride=stride, down_sample=True)
self.b = block(out_channels, out_channels, stride=1)
......@@ -244,7 +243,7 @@ class MakeLayer2(nn.Cell):
class MakeLayer3(nn.Cell):
def __init__(self, block, layer_num, in_channels, out_channels, stride):
def __init__(self, block, in_channels, out_channels, stride):
super(MakeLayer3, self).__init__()
self.a = ResidualBlockWithDown(in_channels, out_channels, stride=stride, down_sample=True)
self.b = block(out_channels, out_channels, stride=1)
......@@ -260,7 +259,7 @@ class MakeLayer3(nn.Cell):
class ResNet(nn.Cell):
def __init__(self, block, layer_num, num_classes=100, batch_size=32):
def __init__(self, block, num_classes=100, batch_size=32):
super(ResNet, self).__init__()
self.batch_size = batch_size
self.num_classes = num_classes
......@@ -271,10 +270,10 @@ class ResNet(nn.Cell):
self.relu = P.ReLU()
self.maxpool = P.MaxPoolWithArgmax(ksize=3, strides=2, padding="SAME")
self.layer1 = MakeLayer0(block, layer_num[0], in_channels=64, out_channels=256, stride=1)
self.layer2 = MakeLayer1(block, layer_num[1], in_channels=256, out_channels=512, stride=2)
self.layer3 = MakeLayer2(block, layer_num[2], in_channels=512, out_channels=1024, stride=2)
self.layer4 = MakeLayer3(block, layer_num[3], in_channels=1024, out_channels=2048, stride=2)
self.layer1 = MakeLayer0(block, in_channels=64, out_channels=256, stride=1)
self.layer2 = MakeLayer1(block, in_channels=256, out_channels=512, stride=2)
self.layer3 = MakeLayer2(block, in_channels=512, out_channels=1024, stride=2)
self.layer4 = MakeLayer3(block, in_channels=1024, out_channels=2048, stride=2)
self.pool = P.ReduceMean(keep_dims=True)
self.squeeze = P.Squeeze(axis=(2, 3))
......@@ -298,4 +297,4 @@ class ResNet(nn.Cell):
def resnet50(batch_size, num_classes):
return ResNet(ResidualBlock, [3, 4, 6, 3], num_classes, batch_size)
return ResNet(ResidualBlock, num_classes, batch_size)
......@@ -116,9 +116,9 @@ class CrossEntropyLoss(nn.Cell):
def construct(self, logits, label):
label = self.one_hot(label, F.shape(logits)[1], self.one, self.zero)
loss = self.cross_entropy(logits, label)[0]
loss = self.mean(loss, (-1,))
return loss
loss_func = self.cross_entropy(logits, label)[0]
loss_func = self.mean(loss_func, (-1,))
return loss_func
if __name__ == '__main__':
......
......@@ -15,7 +15,7 @@
import os
import random
import time
import pytest
import numpy as np
from resnet import resnet50
......@@ -30,9 +30,8 @@ from mindspore import Tensor
from mindspore import context
from mindspore.nn.optim.momentum import Momentum
from mindspore.ops import operations as P
from mindspore.train.callback import ModelCheckpoint, CheckpointConfig, Callback
from mindspore.train.callback import Callback
from mindspore.train.model import Model
from mindspore.train.serialization import load_checkpoint, load_param_into_net
random.seed(1)
np.random.seed(1)
......
......@@ -15,11 +15,10 @@
import os
import random
from multiprocessing import Process, Queue
import numpy as np
import pytest
from multiprocessing import Process, Queue
from resnet import resnet50
import mindspore.common.dtype as mstype
import mindspore.dataset as ds
import mindspore.dataset.transforms.c_transforms as C
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册