Skip to content
体验新版
项目
组织
正在加载...
登录
切换导航
打开侧边栏
magicwindyyd
mindspore
提交
a04d4971
M
mindspore
项目概览
magicwindyyd
/
mindspore
与 Fork 源项目一致
Fork自
MindSpore / mindspore
通知
1
Star
1
Fork
0
代码
文件
提交
分支
Tags
贡献者
分支图
Diff
Issue
0
列表
看板
标记
里程碑
合并请求
0
Wiki
0
Wiki
分析
仓库
DevOps
项目成员
Pages
M
mindspore
项目概览
项目概览
详情
发布
仓库
仓库
文件
提交
分支
标签
贡献者
分支图
比较
Issue
0
Issue
0
列表
看板
标记
里程碑
合并请求
0
合并请求
0
Pages
分析
分析
仓库分析
DevOps
Wiki
0
Wiki
成员
成员
收起侧边栏
关闭侧边栏
动态
分支图
创建新Issue
提交
Issue看板
提交
a04d4971
编写于
7月 02, 2020
作者:
C
chentingting
浏览文件
操作
浏览文件
下载
电子邮件补丁
差异文件
fix gcn import error
上级
14868eb2
变更
4
隐藏空白更改
内联
并排
Showing
4 changed file
with
122 addition
and
121 deletion
+122
-121
model_zoo/gcn/src/gcn.py
model_zoo/gcn/src/gcn.py
+0
-117
model_zoo/gcn/src/metrics.py
model_zoo/gcn/src/metrics.py
+116
-0
model_zoo/gcn/train.py
model_zoo/gcn/train.py
+4
-3
tests/st/gnn/gcn/test_gcn.py
tests/st/gnn/gcn/test_gcn.py
+2
-1
未找到文件。
model_zoo/gcn/src/gcn.py
浏览文件 @
a04d4971
...
...
@@ -15,13 +15,9 @@
"""GCN."""
import
numpy
as
np
from
mindspore
import
nn
from
mindspore.common.parameter
import
ParameterTuple
from
mindspore.ops
import
composite
as
C
from
mindspore.ops
import
functional
as
F
from
mindspore.ops
import
operations
as
P
from
mindspore
import
Tensor
from
mindspore.nn.layer.activation
import
get_activation
from
model_zoo.gcn.src.metrics
import
Loss
,
Accuracy
def
glorot
(
shape
):
...
...
@@ -105,116 +101,3 @@ class GCN(nn.Cell):
output0
=
self
.
layer0
(
self
.
adj
,
self
.
feature
)
output1
=
self
.
layer1
(
self
.
adj
,
output0
)
return
output1
class
LossAccuracyWrapper
(
nn
.
Cell
):
"""
Wraps the GCN model with loss and accuracy cell.
Args:
network (Cell): GCN network.
label (numpy.ndarray): Dataset labels.
mask (numpy.ndarray): Mask for training, evaluation or test.
weight_decay (float): Weight decay parameter for weight of the first convolution layer.
"""
def
__init__
(
self
,
network
,
label
,
mask
,
weight_decay
):
super
(
LossAccuracyWrapper
,
self
).
__init__
()
self
.
network
=
network
self
.
loss
=
Loss
(
label
,
mask
,
weight_decay
,
network
.
trainable_params
()[
0
])
self
.
accuracy
=
Accuracy
(
label
,
mask
)
def
construct
(
self
):
preds
=
self
.
network
()
loss
=
self
.
loss
(
preds
)
accuracy
=
self
.
accuracy
(
preds
)
return
loss
,
accuracy
class
LossWrapper
(
nn
.
Cell
):
"""
Wraps the GCN model with loss.
Args:
network (Cell): GCN network.
label (numpy.ndarray): Dataset labels.
mask (numpy.ndarray): Mask for training.
weight_decay (float): Weight decay parameter for weight of the first convolution layer.
"""
def
__init__
(
self
,
network
,
label
,
mask
,
weight_decay
):
super
(
LossWrapper
,
self
).
__init__
()
self
.
network
=
network
self
.
loss
=
Loss
(
label
,
mask
,
weight_decay
,
network
.
trainable_params
()[
0
])
def
construct
(
self
):
preds
=
self
.
network
()
loss
=
self
.
loss
(
preds
)
return
loss
class
TrainOneStepCell
(
nn
.
Cell
):
r
"""
Network training package class.
Wraps the network with an optimizer. The resulting Cell be trained without inputs.
Backward graph will be created in the construct function to do parameter updating. Different
parallel modes are available to run the training.
Args:
network (Cell): The training network.
optimizer (Cell): Optimizer for updating the weights.
sens (Number): The scaling number to be filled as the input of backpropagation. Default value is 1.0.
Outputs:
Tensor, a scalar Tensor with shape :math:`()`.
Examples:
>>> net = Net()
>>> loss_fn = nn.SoftmaxCrossEntropyWithLogits()
>>> optim = nn.Momentum(net.trainable_params(), learning_rate=0.1, momentum=0.9)
>>> loss_net = nn.WithLossCell(net, loss_fn)
>>> train_net = nn.TrainOneStepCell(loss_net, optim)
"""
def
__init__
(
self
,
network
,
optimizer
,
sens
=
1.0
):
super
(
TrainOneStepCell
,
self
).
__init__
(
auto_prefix
=
False
)
self
.
network
=
network
self
.
network
.
add_flags
(
defer_inline
=
True
)
self
.
weights
=
ParameterTuple
(
network
.
trainable_params
())
self
.
optimizer
=
optimizer
self
.
grad
=
C
.
GradOperation
(
'grad'
,
get_by_list
=
True
,
sens_param
=
True
)
self
.
sens
=
sens
def
construct
(
self
):
weights
=
self
.
weights
loss
=
self
.
network
()
sens
=
P
.
Fill
()(
P
.
DType
()(
loss
),
P
.
Shape
()(
loss
),
self
.
sens
)
grads
=
self
.
grad
(
self
.
network
,
weights
)(
sens
)
return
F
.
depend
(
loss
,
self
.
optimizer
(
grads
))
class
TrainNetWrapper
(
nn
.
Cell
):
"""
Wraps the GCN model with optimizer.
Args:
network (Cell): GCN network.
label (numpy.ndarray): Dataset labels.
mask (numpy.ndarray): Mask for training, evaluation or test.
config (ConfigGCN): Configuration for GCN.
"""
def
__init__
(
self
,
network
,
label
,
mask
,
config
):
super
(
TrainNetWrapper
,
self
).
__init__
(
auto_prefix
=
True
)
self
.
network
=
network
loss_net
=
LossWrapper
(
network
,
label
,
mask
,
config
.
weight_decay
)
optimizer
=
nn
.
Adam
(
loss_net
.
trainable_params
(),
learning_rate
=
config
.
learning_rate
)
self
.
loss_train_net
=
TrainOneStepCell
(
loss_net
,
optimizer
)
self
.
accuracy
=
Accuracy
(
label
,
mask
)
def
construct
(
self
):
loss
=
self
.
loss_train_net
()
accuracy
=
self
.
accuracy
(
self
.
network
())
return
loss
,
accuracy
model_zoo/gcn/src/metrics.py
浏览文件 @
a04d4971
...
...
@@ -17,6 +17,9 @@ from mindspore import nn
from
mindspore
import
Tensor
from
mindspore.common
import
dtype
as
mstype
from
mindspore.ops
import
operations
as
P
from
mindspore.common.parameter
import
ParameterTuple
from
mindspore.ops
import
composite
as
C
from
mindspore.ops
import
functional
as
F
class
Loss
(
nn
.
Cell
):
...
...
@@ -68,3 +71,116 @@ class Accuracy(nn.Cell):
mask
=
mask
/
mask_reduce
accuracy_all
*=
mask
return
self
.
mean
(
accuracy_all
)
class
LossAccuracyWrapper
(
nn
.
Cell
):
"""
Wraps the GCN model with loss and accuracy cell.
Args:
network (Cell): GCN network.
label (numpy.ndarray): Dataset labels.
mask (numpy.ndarray): Mask for training, evaluation or test.
weight_decay (float): Weight decay parameter for weight of the first convolution layer.
"""
def
__init__
(
self
,
network
,
label
,
mask
,
weight_decay
):
super
(
LossAccuracyWrapper
,
self
).
__init__
()
self
.
network
=
network
self
.
loss
=
Loss
(
label
,
mask
,
weight_decay
,
network
.
trainable_params
()[
0
])
self
.
accuracy
=
Accuracy
(
label
,
mask
)
def
construct
(
self
):
preds
=
self
.
network
()
loss
=
self
.
loss
(
preds
)
accuracy
=
self
.
accuracy
(
preds
)
return
loss
,
accuracy
class
LossWrapper
(
nn
.
Cell
):
"""
Wraps the GCN model with loss.
Args:
network (Cell): GCN network.
label (numpy.ndarray): Dataset labels.
mask (numpy.ndarray): Mask for training.
weight_decay (float): Weight decay parameter for weight of the first convolution layer.
"""
def
__init__
(
self
,
network
,
label
,
mask
,
weight_decay
):
super
(
LossWrapper
,
self
).
__init__
()
self
.
network
=
network
self
.
loss
=
Loss
(
label
,
mask
,
weight_decay
,
network
.
trainable_params
()[
0
])
def
construct
(
self
):
preds
=
self
.
network
()
loss
=
self
.
loss
(
preds
)
return
loss
class
TrainOneStepCell
(
nn
.
Cell
):
r
"""
Network training package class.
Wraps the network with an optimizer. The resulting Cell be trained without inputs.
Backward graph will be created in the construct function to do parameter updating. Different
parallel modes are available to run the training.
Args:
network (Cell): The training network.
optimizer (Cell): Optimizer for updating the weights.
sens (Number): The scaling number to be filled as the input of backpropagation. Default value is 1.0.
Outputs:
Tensor, a scalar Tensor with shape :math:`()`.
Examples:
>>> net = Net()
>>> loss_fn = nn.SoftmaxCrossEntropyWithLogits()
>>> optim = nn.Momentum(net.trainable_params(), learning_rate=0.1, momentum=0.9)
>>> loss_net = nn.WithLossCell(net, loss_fn)
>>> train_net = nn.TrainOneStepCell(loss_net, optim)
"""
def
__init__
(
self
,
network
,
optimizer
,
sens
=
1.0
):
super
(
TrainOneStepCell
,
self
).
__init__
(
auto_prefix
=
False
)
self
.
network
=
network
self
.
network
.
add_flags
(
defer_inline
=
True
)
self
.
weights
=
ParameterTuple
(
network
.
trainable_params
())
self
.
optimizer
=
optimizer
self
.
grad
=
C
.
GradOperation
(
'grad'
,
get_by_list
=
True
,
sens_param
=
True
)
self
.
sens
=
sens
def
construct
(
self
):
weights
=
self
.
weights
loss
=
self
.
network
()
sens
=
P
.
Fill
()(
P
.
DType
()(
loss
),
P
.
Shape
()(
loss
),
self
.
sens
)
grads
=
self
.
grad
(
self
.
network
,
weights
)(
sens
)
return
F
.
depend
(
loss
,
self
.
optimizer
(
grads
))
class
TrainNetWrapper
(
nn
.
Cell
):
"""
Wraps the GCN model with optimizer.
Args:
network (Cell): GCN network.
label (numpy.ndarray): Dataset labels.
mask (numpy.ndarray): Mask for training, evaluation or test.
config (ConfigGCN): Configuration for GCN.
"""
def
__init__
(
self
,
network
,
label
,
mask
,
config
):
super
(
TrainNetWrapper
,
self
).
__init__
(
auto_prefix
=
True
)
self
.
network
=
network
loss_net
=
LossWrapper
(
network
,
label
,
mask
,
config
.
weight_decay
)
optimizer
=
nn
.
Adam
(
loss_net
.
trainable_params
(),
learning_rate
=
config
.
learning_rate
)
self
.
loss_train_net
=
TrainOneStepCell
(
loss_net
,
optimizer
)
self
.
accuracy
=
Accuracy
(
label
,
mask
)
def
construct
(
self
):
loss
=
self
.
loss_train_net
()
accuracy
=
self
.
accuracy
(
self
.
network
())
return
loss
,
accuracy
model_zoo/gcn/train.py
浏览文件 @
a04d4971
...
...
@@ -26,9 +26,10 @@ from matplotlib import animation
from
sklearn
import
manifold
from
mindspore
import
context
from
model_zoo.gcn.src.gcn
import
GCN
,
LossAccuracyWrapper
,
TrainNetWrapper
from
model_zoo.gcn.src.config
import
ConfigGCN
from
model_zoo.gcn.src.dataset
import
get_adj_features_labels
,
get_mask
from
src.gcn
import
GCN
from
src.metrics
import
LossAccuracyWrapper
,
TrainNetWrapper
from
src.config
import
ConfigGCN
from
src.dataset
import
get_adj_features_labels
,
get_mask
def
t_SNE
(
out_feature
,
dim
):
...
...
tests/st/gnn/gcn/test_gcn.py
浏览文件 @
a04d4971
...
...
@@ -17,7 +17,8 @@ import time
import
pytest
import
numpy
as
np
from
mindspore
import
context
from
model_zoo.gcn.src.gcn
import
GCN
,
LossAccuracyWrapper
,
TrainNetWrapper
from
model_zoo.gcn.src.gcn
import
GCN
from
model_zoo.gcn.src.metrics
import
LossAccuracyWrapper
,
TrainNetWrapper
from
model_zoo.gcn.src.config
import
ConfigGCN
from
model_zoo.gcn.src.dataset
import
get_adj_features_labels
,
get_mask
...
...
编辑
预览
Markdown
is supported
0%
请重试
或
添加新附件
.
添加附件
取消
You are about to add
0
people
to the discussion. Proceed with caution.
先完成此消息的编辑!
取消
想要评论请
注册
或
登录