Skip to content
体验新版
项目
组织
正在加载...
登录
切换导航
打开侧边栏
magicwindyyd
mindspore
提交
c82a8bf4
M
mindspore
项目概览
magicwindyyd
/
mindspore
与 Fork 源项目一致
Fork自
MindSpore / mindspore
通知
1
Star
1
Fork
0
代码
文件
提交
分支
Tags
贡献者
分支图
Diff
Issue
0
列表
看板
标记
里程碑
合并请求
0
Wiki
0
Wiki
分析
仓库
DevOps
项目成员
Pages
M
mindspore
项目概览
项目概览
详情
发布
仓库
仓库
文件
提交
分支
标签
贡献者
分支图
比较
Issue
0
Issue
0
列表
看板
标记
里程碑
合并请求
0
合并请求
0
Pages
分析
分析
仓库分析
DevOps
Wiki
0
Wiki
成员
成员
收起侧边栏
关闭侧边栏
动态
分支图
创建新Issue
提交
Issue看板
提交
c82a8bf4
编写于
6月 08, 2020
作者:
M
mindspore-ci-bot
提交者:
Gitee
6月 08, 2020
浏览文件
操作
浏览文件
下载
差异文件
!1678 modify print
Merge pull request !1678 from jinyaohui/print
上级
0a897b0c
5e43edc4
变更
21
隐藏空白更改
内联
并排
Showing
21 changed file
with
43 addition
and
49 deletion
+43
-49
mindspore/dataset/text/utils.py
mindspore/dataset/text/utils.py
+1
-1
mindspore/nn/layer/lstm.py
mindspore/nn/layer/lstm.py
+8
-7
tests/mindspore_test_framework/utils/check_gradient.py
tests/mindspore_test_framework/utils/check_gradient.py
+1
-1
tests/perf_test/bert/test_bert_train.py
tests/perf_test/bert/test_bert_train.py
+1
-1
tests/st/nccl/test_nccl_all_reduce_op.py
tests/st/nccl/test_nccl_all_reduce_op.py
+3
-3
tests/st/networks/models/bert/bert_tdt_lossscale.py
tests/st/networks/models/bert/bert_tdt_lossscale.py
+4
-3
tests/st/networks/test_gpu_lenet.py
tests/st/networks/test_gpu_lenet.py
+2
-1
tests/st/networks/test_gpu_resnet.py
tests/st/networks/test_gpu_resnet.py
+1
-1
tests/st/networks/test_network_main.py
tests/st/networks/test_network_main.py
+1
-2
tests/ut/python/nn/test_clip_by_norm.py
tests/ut/python/nn/test_clip_by_norm.py
+2
-2
tests/ut/python/ops/test_ops.py
tests/ut/python/ops/test_ops.py
+4
-4
tests/ut/python/optimizer/test_debug_location.py
tests/ut/python/optimizer/test_debug_location.py
+2
-2
tests/ut/python/pynative_mode/nn/test_dropout.py
tests/ut/python/pynative_mode/nn/test_dropout.py
+0
-1
tests/ut/python/pynative_mode/test_graph_return_const_param.py
.../ut/python/pynative_mode/test_graph_return_const_param.py
+3
-3
tests/ut/python/pynative_mode/test_hook.py
tests/ut/python/pynative_mode/test_hook.py
+2
-2
tests/ut/python/pynative_mode/test_insert_grad_of.py
tests/ut/python/pynative_mode/test_insert_grad_of.py
+0
-1
tests/ut/python/pynative_mode/test_parse_method.py
tests/ut/python/pynative_mode/test_parse_method.py
+2
-1
tests/ut/python/pynative_mode/test_training.py
tests/ut/python/pynative_mode/test_training.py
+0
-2
tests/ut/python/train/quant/test_quant.py
tests/ut/python/train/quant/test_quant.py
+0
-5
tests/ut/python/utils/test_callback.py
tests/ut/python/utils/test_callback.py
+3
-3
tests/ut/python/utils/test_serialize.py
tests/ut/python/utils/test_serialize.py
+3
-3
未找到文件。
mindspore/dataset/text/utils.py
浏览文件 @
c82a8bf4
...
@@ -16,8 +16,8 @@ Some basic function for nlp
...
@@ -16,8 +16,8 @@ Some basic function for nlp
"""
"""
from
enum
import
IntEnum
from
enum
import
IntEnum
import
mindspore._c_dataengine
as
cde
import
numpy
as
np
import
numpy
as
np
import
mindspore._c_dataengine
as
cde
from
.validators
import
check_from_file
,
check_from_list
,
check_from_dict
from
.validators
import
check_from_file
,
check_from_list
,
check_from_dict
...
...
mindspore/nn/layer/lstm.py
浏览文件 @
c82a8bf4
...
@@ -13,15 +13,16 @@
...
@@ -13,15 +13,16 @@
# limitations under the License.
# limitations under the License.
# ============================================================================
# ============================================================================
"""lstm"""
"""lstm"""
from
mindspore.ops
import
operations
as
P
import
numpy
as
np
from
mindspore.nn.cell
import
Cell
from
mindspore.common.parameter
import
Parameter
,
ParameterTuple
from
mindspore.common.initializer
import
initializer
from
mindspore._checkparam
import
Validator
as
validator
from
mindspore
import
context
import
mindspore.nn
as
nn
import
mindspore.nn
as
nn
from
mindspore
import
context
from
mindspore._checkparam
import
Validator
as
validator
from
mindspore.common.initializer
import
initializer
from
mindspore.common.parameter
import
Parameter
,
ParameterTuple
from
mindspore.common.tensor
import
Tensor
from
mindspore.common.tensor
import
Tensor
import
numpy
as
np
from
mindspore.nn.cell
import
Cell
from
mindspore.ops
import
operations
as
P
__all__
=
[
'LSTM'
,
'LSTMCell'
]
__all__
=
[
'LSTM'
,
'LSTMCell'
]
...
...
tests/mindspore_test_framework/utils/check_gradient.py
浏览文件 @
c82a8bf4
...
@@ -18,8 +18,8 @@
...
@@ -18,8 +18,8 @@
from
typing
import
Callable
,
List
,
Any
from
typing
import
Callable
,
List
,
Any
import
mindspore._c_expression
as
_c_expression
import
numpy
as
np
import
numpy
as
np
import
mindspore._c_expression
as
_c_expression
from
mindspore
import
ParameterTuple
from
mindspore
import
ParameterTuple
from
mindspore
import
Tensor
from
mindspore
import
Tensor
...
...
tests/perf_test/bert/test_bert_train.py
浏览文件 @
c82a8bf4
...
@@ -22,9 +22,9 @@ import os
...
@@ -22,9 +22,9 @@ import os
import
mindspore.common.dtype
as
mstype
import
mindspore.common.dtype
as
mstype
import
mindspore.context
as
context
import
mindspore.context
as
context
from
mindspore
import
Tensor
from
mindspore
import
Tensor
from
model_zoo.bert.src
import
BertConfig
,
BertNetworkWithLoss
,
BertTrainOneStepCell
,
BertTrainOneStepWithLossScaleCell
from
mindspore.nn.optim
import
AdamWeightDecayDynamicLR
from
mindspore.nn.optim
import
AdamWeightDecayDynamicLR
from
mindspore.train.loss_scale_manager
import
DynamicLossScaleManager
from
mindspore.train.loss_scale_manager
import
DynamicLossScaleManager
from
model_zoo.bert.src
import
BertConfig
,
BertNetworkWithLoss
,
BertTrainOneStepCell
,
BertTrainOneStepWithLossScaleCell
from
...dataset_mock
import
MindData
from
...dataset_mock
import
MindData
from
...ops_common
import
nn
,
np
,
batch_tuple_tensor
,
build_construct_graph
from
...ops_common
import
nn
,
np
,
batch_tuple_tensor
,
build_construct_graph
...
...
tests/st/nccl/test_nccl_all_reduce_op.py
浏览文件 @
c82a8bf4
...
@@ -91,10 +91,10 @@ class Net2(nn.Cell):
...
@@ -91,10 +91,10 @@ class Net2(nn.Cell):
self
.
all_reduce3
=
P
.
AllReduce
(
self
.
op2
,
group
=
NCCL_WORLD_COMM_GROUP
)
self
.
all_reduce3
=
P
.
AllReduce
(
self
.
op2
,
group
=
NCCL_WORLD_COMM_GROUP
)
def
construct
(
self
):
def
construct
(
self
):
x
=
self
.
all_reduce1
(
self
.
x1
)
x
_
=
self
.
all_reduce1
(
self
.
x1
)
y
=
self
.
all_reduce2
(
x
)
y
=
self
.
all_reduce2
(
x
_
)
z
=
self
.
all_reduce3
(
y
)
z
=
self
.
all_reduce3
(
y
)
return
(
x
,
y
,
z
)
return
(
x
_
,
y
,
z
)
def
test_AllReduce2
():
def
test_AllReduce2
():
...
...
tests/st/networks/models/bert/bert_tdt_lossscale.py
浏览文件 @
c82a8bf4
...
@@ -17,8 +17,11 @@
...
@@ -17,8 +17,11 @@
import
os
import
os
import
time
import
time
import
pytest
import
numpy
as
np
import
numpy
as
np
import
pytest
from
src.bert_for_pre_training
import
BertNetworkWithLoss
,
BertTrainOneStepWithLossScaleCell
from
src.bert_model
import
BertConfig
import
mindspore.common.dtype
as
mstype
import
mindspore.common.dtype
as
mstype
import
mindspore.dataset.engine.datasets
as
de
import
mindspore.dataset.engine.datasets
as
de
...
@@ -26,8 +29,6 @@ import mindspore.dataset.transforms.c_transforms as C
...
@@ -26,8 +29,6 @@ import mindspore.dataset.transforms.c_transforms as C
from
mindspore
import
context
from
mindspore
import
context
from
mindspore
import
log
as
logger
from
mindspore
import
log
as
logger
from
mindspore.common.tensor
import
Tensor
from
mindspore.common.tensor
import
Tensor
from
src.bert_model
import
BertConfig
from
src.bert_for_pre_training
import
BertNetworkWithLoss
,
BertTrainOneStepWithLossScaleCell
from
mindspore.nn.optim
import
Lamb
from
mindspore.nn.optim
import
Lamb
from
mindspore.train.callback
import
Callback
from
mindspore.train.callback
import
Callback
from
mindspore.train.loss_scale_manager
import
DynamicLossScaleManager
from
mindspore.train.loss_scale_manager
import
DynamicLossScaleManager
...
...
tests/st/networks/test_gpu_lenet.py
浏览文件 @
c82a8bf4
...
@@ -13,8 +13,9 @@
...
@@ -13,8 +13,9 @@
# limitations under the License.
# limitations under the License.
# ============================================================================
# ============================================================================
import
numpy
as
np
import
os
import
os
import
numpy
as
np
import
pytest
import
pytest
import
mindspore.context
as
context
import
mindspore.context
as
context
...
...
tests/st/networks/test_gpu_resnet.py
浏览文件 @
c82a8bf4
...
@@ -397,5 +397,5 @@ def test_trainTensor_amp(num_classes=10, epoch=18, batch_size=16):
...
@@ -397,5 +397,5 @@ def test_trainTensor_amp(num_classes=10, epoch=18, batch_size=16):
loss
=
train_network
(
data
,
label
)
loss
=
train_network
(
data
,
label
)
losses
.
append
(
loss
)
losses
.
append
(
loss
)
assert
(
losses
[
-
1
][
0
].
asnumpy
()
<
1
)
assert
(
losses
[
-
1
][
0
].
asnumpy
()
<
1
)
assert
(
losses
[
-
1
][
1
].
asnumpy
()
==
False
)
assert
not
losses
[
-
1
][
1
].
asnumpy
(
)
assert
(
losses
[
-
1
][
2
].
asnumpy
()
>
1
)
assert
(
losses
[
-
1
][
2
].
asnumpy
()
>
1
)
tests/st/networks/test_network_main.py
浏览文件 @
c82a8bf4
...
@@ -19,9 +19,8 @@ Usage:
...
@@ -19,9 +19,8 @@ Usage:
python test_network_main.py --net lenet --target Ascend
python test_network_main.py --net lenet --target Ascend
"""
"""
import
argparse
import
argparse
import
numpy
as
np
import
numpy
as
np
import
os
import
time
from
models.alexnet
import
AlexNet
from
models.alexnet
import
AlexNet
from
models.lenet
import
LeNet
from
models.lenet
import
LeNet
from
models.resnetv1_5
import
resnet50
from
models.resnetv1_5
import
resnet50
...
...
tests/ut/python/nn/test_clip_by_norm.py
浏览文件 @
c82a8bf4
...
@@ -35,10 +35,10 @@ def test_clip_by_norm_const():
...
@@ -35,10 +35,10 @@ def test_clip_by_norm_const():
super
(
Network
,
self
).
__init__
()
super
(
Network
,
self
).
__init__
()
self
.
norm_value
=
Tensor
(
np
.
array
([
1
]).
astype
(
np
.
float32
))
self
.
norm_value
=
Tensor
(
np
.
array
([
1
]).
astype
(
np
.
float32
))
self
.
clip
=
nn
.
ClipByNorm
()
self
.
clip
=
nn
.
ClipByNorm
()
def
construct
(
self
,
x
):
def
construct
(
self
,
x
):
return
self
.
clip
(
x
,
self
.
norm_value
)
return
self
.
clip
(
x
,
self
.
norm_value
)
net
=
Network
()
net
=
Network
()
x
=
Tensor
(
np
.
array
([[
-
2
,
0
,
0
],
[
0
,
3
,
4
]]).
astype
(
np
.
float32
))
x
=
Tensor
(
np
.
array
([[
-
2
,
0
,
0
],
[
0
,
3
,
4
]]).
astype
(
np
.
float32
))
output
=
net
(
x
)
net
(
x
)
tests/ut/python/ops/test_ops.py
浏览文件 @
c82a8bf4
...
@@ -14,6 +14,7 @@
...
@@ -14,6 +14,7 @@
# ============================================================================
# ============================================================================
""" test ops """
""" test ops """
import
functools
import
functools
import
numpy
as
np
import
numpy
as
np
import
mindspore.nn
as
nn
import
mindspore.nn
as
nn
...
@@ -907,7 +908,7 @@ test_case_nn_ops = [
...
@@ -907,7 +908,7 @@ test_case_nn_ops = [
'skip'
:
[
'backward'
]}),
'skip'
:
[
'backward'
]}),
(
'BasicLSTMCell'
,
{
(
'BasicLSTMCell'
,
{
'block'
:
P
.
BasicLSTMCell
(
keep_prob
=
1.0
,
forget_bias
=
1.0
,
state_is_tuple
=
True
,
activation
=
'tanh'
),
'block'
:
P
.
BasicLSTMCell
(
keep_prob
=
1.0
,
forget_bias
=
1.0
,
state_is_tuple
=
True
,
activation
=
'tanh'
),
'desc_inputs'
:
[[
128
,
128
],
[
128
,
128
],
[
128
,
128
],
[
512
,
256
,
1
,
1
],[
512
,
1
,
1
,
1
]],
'desc_inputs'
:
[[
128
,
128
],
[
128
,
128
],
[
128
,
128
],
[
512
,
256
,
1
,
1
],
[
512
,
1
,
1
,
1
]],
'desc_bprop'
:
[[
128
,
128
],
[
128
,
128
],
[
128
,
128
],
[
128
,
128
],
[
128
,
128
],
[
128
,
128
],
[
128
,
128
]],
'desc_bprop'
:
[[
128
,
128
],
[
128
,
128
],
[
128
,
128
],
[
128
,
128
],
[
128
,
128
],
[
128
,
128
],
[
128
,
128
]],
'skip'
:
[]}),
'skip'
:
[]}),
(
'TopK'
,
{
(
'TopK'
,
{
...
@@ -1333,7 +1334,7 @@ test_case_array_ops = [
...
@@ -1333,7 +1334,7 @@ test_case_array_ops = [
'desc_inputs'
:
[(
Tensor
(
np
.
array
([
1
],
np
.
float32
)),
'desc_inputs'
:
[(
Tensor
(
np
.
array
([
1
],
np
.
float32
)),
Tensor
(
np
.
array
([
1
],
np
.
float32
)),
Tensor
(
np
.
array
([
1
],
np
.
float32
)),
Tensor
(
np
.
array
([
1
],
np
.
float32
)))],
Tensor
(
np
.
array
([
1
],
np
.
float32
)))],
'desc_bprop'
:
[[
3
,
]]}),
'desc_bprop'
:
[[
3
,]]}),
(
'Pack_0'
,
{
(
'Pack_0'
,
{
'block'
:
NetForPackInput
(
P
.
Pack
()),
'block'
:
NetForPackInput
(
P
.
Pack
()),
'desc_inputs'
:
[[
2
,
2
],
[
2
,
2
],
[
2
,
2
]],
'desc_inputs'
:
[[
2
,
2
],
[
2
,
2
],
[
2
,
2
]],
...
@@ -1499,8 +1500,7 @@ test_case = functools.reduce(lambda x, y: x + y, test_case_lists)
...
@@ -1499,8 +1500,7 @@ test_case = functools.reduce(lambda x, y: x + y, test_case_lists)
test_exec_case
=
test_case
test_exec_case
=
test_case
test_backward_exec_case
=
filter
(
lambda
x
:
'skip'
not
in
x
[
1
]
or
test_backward_exec_case
=
filter
(
lambda
x
:
'skip'
not
in
x
[
1
]
or
'backward'
not
in
x
[
1
][
'skip'
],
test_case
)
'backward'
not
in
x
[
1
][
'skip'
],
test_case
)
@
non_graph_engine
@
non_graph_engine
...
...
tests/ut/python/optimizer/test_debug_location.py
浏览文件 @
c82a8bf4
...
@@ -144,7 +144,7 @@ def test_op_forward_infererror():
...
@@ -144,7 +144,7 @@ def test_op_forward_infererror():
input_np
=
np
.
random
.
randn
(
2
,
3
,
4
,
5
).
astype
(
np
.
float32
)
input_np
=
np
.
random
.
randn
(
2
,
3
,
4
,
5
).
astype
(
np
.
float32
)
input_me
=
Tensor
(
input_np
)
input_me
=
Tensor
(
input_np
)
net
=
Net3
()
net
=
Net3
()
with
pytest
.
raises
(
TypeError
)
as
e
:
with
pytest
.
raises
(
TypeError
):
net
(
input_me
)
net
(
input_me
)
...
@@ -162,7 +162,7 @@ def test_sequential_resolve_error():
...
@@ -162,7 +162,7 @@ def test_sequential_resolve_error():
input_np
=
np
.
random
.
randn
(
2
,
3
,
4
,
5
).
astype
(
np
.
float32
)
input_np
=
np
.
random
.
randn
(
2
,
3
,
4
,
5
).
astype
(
np
.
float32
)
input_me
=
Tensor
(
input_np
)
input_me
=
Tensor
(
input_np
)
net
=
SequenceNet
()
net
=
SequenceNet
()
with
pytest
.
raises
(
RuntimeError
)
as
e
:
with
pytest
.
raises
(
RuntimeError
):
net
(
input_me
)
net
(
input_me
)
...
...
tests/ut/python/pynative_mode/nn/test_dropout.py
浏览文件 @
c82a8bf4
...
@@ -19,7 +19,6 @@ import mindspore.nn as nn
...
@@ -19,7 +19,6 @@ import mindspore.nn as nn
from
mindspore
import
Tensor
from
mindspore
import
Tensor
from
mindspore
import
context
from
mindspore
import
context
from
mindspore
import
dtype
as
mstype
from
mindspore
import
dtype
as
mstype
from
mindspore.common.api
import
_executor
context
.
set_context
(
device_target
=
"Ascend"
)
context
.
set_context
(
device_target
=
"Ascend"
)
...
...
tests/ut/python/pynative_mode/test_graph_return_const_param.py
浏览文件 @
c82a8bf4
...
@@ -44,7 +44,7 @@ class ChooseInitParameterWithInput(nn.Cell):
...
@@ -44,7 +44,7 @@ class ChooseInitParameterWithInput(nn.Cell):
self
.
x
=
Parameter
(
Tensor
(
np
.
ones
(
2
),
dtype
=
mstype
.
int32
),
name
=
'x'
)
self
.
x
=
Parameter
(
Tensor
(
np
.
ones
(
2
),
dtype
=
mstype
.
int32
),
name
=
'x'
)
@
ms_function
@
ms_function
def
construct
(
self
,
input
):
def
construct
(
self
,
input
_data
):
return
self
.
x
return
self
.
x
...
@@ -57,7 +57,7 @@ def test_choose_init_param():
...
@@ -57,7 +57,7 @@ def test_choose_init_param():
def
test_choose_param_with_input
():
def
test_choose_param_with_input
():
choose
=
ChooseInitParameterWithInput
()
choose
=
ChooseInitParameterWithInput
()
input
=
Tensor
(
np
.
zeros
(
2
),
dtype
=
mstype
.
int32
)
input
_data
=
Tensor
(
np
.
zeros
(
2
),
dtype
=
mstype
.
int32
)
expect
=
Tensor
(
np
.
ones
(
2
),
dtype
=
mstype
.
int32
)
expect
=
Tensor
(
np
.
ones
(
2
),
dtype
=
mstype
.
int32
)
out
=
choose
(
input
)
out
=
choose
(
input
_data
)
assert
np
.
allclose
(
expect
.
asnumpy
(),
out
.
asnumpy
())
assert
np
.
allclose
(
expect
.
asnumpy
(),
out
.
asnumpy
())
tests/ut/python/pynative_mode/test_hook.py
浏览文件 @
c82a8bf4
import
numpy
as
np
import
numpy
as
np
import
mindspore.nn
as
nn
import
mindspore.nn
as
nn
import
mindspore.ops.operations
as
P
import
mindspore.ops.operations
as
P
from
mindspore.ops
import
composite
as
C
from
mindspore
import
context
,
Tensor
,
ParameterTuple
from
mindspore
import
context
,
Tensor
,
ParameterTuple
from
mindspore.common.initializer
import
TruncatedNormal
from
mindspore.common.initializer
import
TruncatedNormal
from
mindspore.nn
import
WithLossCell
,
Momentum
from
mindspore.nn
import
WithLossCell
,
Momentum
from
mindspore.ops
import
composite
as
C
context
.
set_context
(
mode
=
context
.
PYNATIVE_MODE
,
device_target
=
"GPU"
)
context
.
set_context
(
mode
=
context
.
PYNATIVE_MODE
,
device_target
=
"GPU"
)
...
@@ -45,7 +46,6 @@ class LeNet5(nn.Cell):
...
@@ -45,7 +46,6 @@ class LeNet5(nn.Cell):
Lenet network
Lenet network
Args:
Args:
num_class (int): Num classes. Default: 10.
num_class (int): Num classes. Default: 10.
Returns:
Returns:
Tensor, output tensor
Tensor, output tensor
...
...
tests/ut/python/pynative_mode/test_insert_grad_of.py
浏览文件 @
c82a8bf4
...
@@ -21,7 +21,6 @@ from mindspore import Tensor
...
@@ -21,7 +21,6 @@ from mindspore import Tensor
from
mindspore
import
context
from
mindspore
import
context
from
mindspore.common.api
import
ms_function
from
mindspore.common.api
import
ms_function
from
mindspore.ops
import
composite
as
C
from
mindspore.ops
import
composite
as
C
from
mindspore.ops
import
functional
as
F
from
mindspore.ops
import
operations
as
P
from
mindspore.ops
import
operations
as
P
from
....mindspore_test_framework.utils.bprop_util
import
bprop
from
....mindspore_test_framework.utils.bprop_util
import
bprop
from
....mindspore_test_framework.utils.debug_util
import
PrintShapeTypeCell
,
PrintGradShapeTypeCell
from
....mindspore_test_framework.utils.debug_util
import
PrintShapeTypeCell
,
PrintGradShapeTypeCell
...
...
tests/ut/python/pynative_mode/test_parse_method.py
浏览文件 @
c82a8bf4
...
@@ -19,9 +19,10 @@
...
@@ -19,9 +19,10 @@
@Desc : test parse the object's method
@Desc : test parse the object's method
"""
"""
import
logging
import
logging
from
dataclasses
import
dataclass
import
numpy
as
np
import
numpy
as
np
import
pytest
import
pytest
from
dataclasses
import
dataclass
import
mindspore.nn
as
nn
import
mindspore.nn
as
nn
from
mindspore
import
context
from
mindspore
import
context
...
...
tests/ut/python/pynative_mode/test_training.py
浏览文件 @
c82a8bf4
...
@@ -19,9 +19,7 @@ import mindspore.nn as nn
...
@@ -19,9 +19,7 @@ import mindspore.nn as nn
from
mindspore
import
context
from
mindspore
import
context
from
mindspore.common.tensor
import
Tensor
from
mindspore.common.tensor
import
Tensor
from
mindspore.nn
import
WithGradCell
,
WithLossCell
from
mindspore.nn
import
WithGradCell
,
WithLossCell
from
mindspore.nn.optim
import
Momentum
from
mindspore.ops
import
operations
as
P
from
mindspore.ops
import
operations
as
P
from
mindspore.train.model
import
Model
from
..ut_filter
import
non_graph_engine
from
..ut_filter
import
non_graph_engine
...
...
tests/ut/python/train/quant/test_quant.py
浏览文件 @
c82a8bf4
...
@@ -13,14 +13,9 @@
...
@@ -13,14 +13,9 @@
# limitations under the License.
# limitations under the License.
# ============================================================================
# ============================================================================
""" tests for quant """
""" tests for quant """
import
numpy
as
np
from
mobilenetv2_combined
import
MobileNetV2
import
mindspore.context
as
context
import
mindspore.context
as
context
from
mindspore
import
Tensor
from
mindspore
import
nn
from
mindspore
import
nn
from
mindspore.nn.layer
import
combined
from
mindspore.nn.layer
import
combined
from
mindspore.train.quant
import
quant
as
qat
context
.
set_context
(
mode
=
context
.
GRAPH_MODE
,
device_target
=
"GPU"
)
context
.
set_context
(
mode
=
context
.
GRAPH_MODE
,
device_target
=
"GPU"
)
...
...
tests/ut/python/utils/test_callback.py
浏览文件 @
c82a8bf4
...
@@ -74,7 +74,7 @@ class LossNet(nn.Cell):
...
@@ -74,7 +74,7 @@ class LossNet(nn.Cell):
return
out
return
out
def
test_
Model_C
heckpoint_prefix_invalid
():
def
test_
model_c
heckpoint_prefix_invalid
():
"""Test ModelCheckpoint prefix invalid."""
"""Test ModelCheckpoint prefix invalid."""
with
pytest
.
raises
(
ValueError
):
with
pytest
.
raises
(
ValueError
):
ModelCheckpoint
(
123
)
ModelCheckpoint
(
123
)
...
@@ -338,9 +338,9 @@ def test_step_end_save_graph():
...
@@ -338,9 +338,9 @@ def test_step_end_save_graph():
ckpoint_cb
.
begin
(
run_context
)
ckpoint_cb
.
begin
(
run_context
)
# import pdb;pdb.set_trace()
# import pdb;pdb.set_trace()
ckpoint_cb
.
step_end
(
run_context
)
ckpoint_cb
.
step_end
(
run_context
)
assert
os
.
path
.
exists
(
'./test_files/test-graph.meta'
)
==
True
assert
os
.
path
.
exists
(
'./test_files/test-graph.meta'
)
if
os
.
path
.
exists
(
'./test_files/test-graph.meta'
):
if
os
.
path
.
exists
(
'./test_files/test-graph.meta'
):
os
.
chmod
(
'./test_files/test-graph.meta'
,
stat
.
S_IWRITE
)
os
.
chmod
(
'./test_files/test-graph.meta'
,
stat
.
S_IWRITE
)
os
.
remove
(
'./test_files/test-graph.meta'
)
os
.
remove
(
'./test_files/test-graph.meta'
)
ckpoint_cb
.
step_end
(
run_context
)
ckpoint_cb
.
step_end
(
run_context
)
assert
os
.
path
.
exists
(
'./test_files/test-graph.meta'
)
==
False
assert
not
os
.
path
.
exists
(
'./test_files/test-graph.meta'
)
tests/ut/python/utils/test_serialize.py
浏览文件 @
c82a8bf4
...
@@ -71,16 +71,16 @@ def setup_module():
...
@@ -71,16 +71,16 @@ def setup_module():
def
test_save_graph
():
def
test_save_graph
():
""" test_exec_save_graph """
""" test_exec_save_graph """
class
Net
(
nn
.
Cell
):
class
Net
1
(
nn
.
Cell
):
def
__init__
(
self
):
def
__init__
(
self
):
super
(
Net
,
self
).
__init__
()
super
(
Net
1
,
self
).
__init__
()
self
.
add
=
P
.
TensorAdd
()
self
.
add
=
P
.
TensorAdd
()
def
construct
(
self
,
x
,
y
):
def
construct
(
self
,
x
,
y
):
z
=
self
.
add
(
x
,
y
)
z
=
self
.
add
(
x
,
y
)
return
z
return
z
net
=
Net
()
net
=
Net
1
()
net
.
set_train
()
net
.
set_train
()
out_me_list
=
[]
out_me_list
=
[]
x
=
Tensor
(
np
.
random
.
rand
(
2
,
1
,
2
,
3
).
astype
(
np
.
float32
))
x
=
Tensor
(
np
.
random
.
rand
(
2
,
1
,
2
,
3
).
astype
(
np
.
float32
))
...
...
编辑
预览
Markdown
is supported
0%
请重试
或
添加新附件
.
添加附件
取消
You are about to add
0
people
to the discussion. Proceed with caution.
先完成此消息的编辑!
取消
想要评论请
注册
或
登录