未验证 提交 1c858591 编写于 作者: 小飞猪 提交者: GitHub

[xdoctest][task 239] reformat example code with google style in...

[xdoctest][task 239] reformat example code with google style in `python/paddle/incubate/asp/asp.py` (#56731)

* [Doctest]fix No.239, test=docs_preview

* fix style
上级 5f3c7ba4
...@@ -51,71 +51,71 @@ def set_excluded_layers(param_names, main_program=None): ...@@ -51,71 +51,71 @@ def set_excluded_layers(param_names, main_program=None):
.. code-block:: python .. code-block:: python
import paddle >>> import paddle
class MyLayer(paddle.nn.Layer): >>> class MyLayer(paddle.nn.Layer):
def __init__(self): ... def __init__(self):
super().__init__() ... super().__init__()
self.conv1 = paddle.nn.Conv2D( ... self.conv1 = paddle.nn.Conv2D(
in_channels=3, out_channels=4, kernel_size=3, padding=2) ... in_channels=3, out_channels=4, kernel_size=3, padding=2)
self.linear1 = paddle.nn.Linear(4624, 100) ... self.linear1 = paddle.nn.Linear(4624, 100)
...
def forward(self, img): ... def forward(self, img):
hidden = self.conv1(img) ... hidden = self.conv1(img)
hidden = paddle.flatten(hidden, start_axis=1) ... hidden = paddle.flatten(hidden, start_axis=1)
prediction = self.linear1(hidden) ... prediction = self.linear1(hidden)
return prediction ... return prediction
my_layer = MyLayer() >>> my_layer = MyLayer()
optimizer = paddle.optimizer.SGD( >>> optimizer = paddle.optimizer.SGD(
learning_rate=0.01, parameters=my_layer.parameters()) ... learning_rate=0.01, parameters=my_layer.parameters())
# Need to set excluded layers before calling decorate >>> # Need to set excluded layers before calling decorate
paddle.incubate.asp.set_excluded_layers([my_layer.linear1.full_name()]) >>> paddle.incubate.asp.set_excluded_layers([my_layer.linear1.full_name()])
optimizer = paddle.incubate.asp.decorate(optimizer) >>> optimizer = paddle.incubate.asp.decorate(optimizer)
2. Usage of Static Graph 2. Usage of Static Graph
.. code-block:: python .. code-block:: python
import paddle >>> import paddle
paddle.enable_static() >>> paddle.enable_static()
class MyLayer(paddle.nn.Layer): >>> class MyLayer(paddle.nn.Layer):
def __init__(self): ... def __init__(self):
super().__init__() ... super().__init__()
self.conv1 = paddle.nn.Conv2D( ... self.conv1 = paddle.nn.Conv2D(
in_channels=3, out_channels=4, kernel_size=3, padding=2) ... in_channels=3, out_channels=4, kernel_size=3, padding=2)
self.linear1 = paddle.nn.Linear(4624, 100) ... self.linear1 = paddle.nn.Linear(4624, 100)
...
def forward(self, img): ... def forward(self, img):
hidden = self.conv1(img) ... hidden = self.conv1(img)
hidden = paddle.flatten(hidden, start_axis=1) ... hidden = paddle.flatten(hidden, start_axis=1)
prediction = self.linear1(hidden) ... prediction = self.linear1(hidden)
return prediction ... return prediction
main_program = paddle.static.Program() >>> main_program = paddle.static.Program()
startup_program = paddle.static.Program() >>> startup_program = paddle.static.Program()
with paddle.static.program_guard(main_program, startup_program): >>> with paddle.static.program_guard(main_program, startup_program):
input_data = paddle.static.data(name='data', shape=[None, 3, 224, 224]) ... input_data = paddle.static.data(name='data', shape=[None, 3, 224, 224])
label = paddle.static.data(name='label', shape=[None, 100]) ... label = paddle.static.data(name='label', shape=[None, 100])
my_layer = MyLayer() ... my_layer = MyLayer()
prob = my_layer(input_data) ... prob = my_layer(input_data)
loss = paddle.mean(paddle.nn.functional.square_error_cost(prob, label)) ... loss = paddle.mean(paddle.nn.functional.square_error_cost(prob, label))
...
# Setup exluded layers out from ASP workflow. ... # Setup exluded layers out from ASP workflow.
# Please note, excluded_layers must be set before calling optimizer.minimize(). ... # Please note, excluded_layers must be set before calling optimizer.minimize().
paddle.incubate.asp.set_excluded_layers([my_layer.linear1.full_name()], main_program) ... paddle.incubate.asp.set_excluded_layers([my_layer.linear1.full_name()], main_program)
...
optimizer = paddle.optimizer.SGD(learning_rate=0.1) ... optimizer = paddle.optimizer.SGD(learning_rate=0.1)
optimizer = paddle.static.amp.decorate(optimizer ) ... optimizer = paddle.static.amp.decorate(optimizer )
# Calling paddle.incubate.asp.decorate() to wrap minimize() in optimizer, which ... # Calling paddle.incubate.asp.decorate() to wrap minimize() in optimizer, which
# will insert necessary masking operations for ASP workflow. ... # will insert necessary masking operations for ASP workflow.
optimizer = paddle.incubate.asp.decorate(optimizer) ... optimizer = paddle.incubate.asp.decorate(optimizer)
optimizer.minimize(loss, startup_program) ... optimizer.minimize(loss, startup_program)
""" """
if main_program is None: if main_program is None:
main_program = paddle.static.default_main_program() main_program = paddle.static.default_main_program()
...@@ -138,77 +138,77 @@ def reset_excluded_layers(main_program=None): ...@@ -138,77 +138,77 @@ def reset_excluded_layers(main_program=None):
.. code-block:: python .. code-block:: python
import paddle >>> import paddle
class MyLayer(paddle.nn.Layer): >>> class MyLayer(paddle.nn.Layer):
def __init__(self): ... def __init__(self):
super().__init__() ... super().__init__()
self.conv1 = paddle.nn.Conv2D( ... self.conv1 = paddle.nn.Conv2D(
in_channels=3, out_channels=4, kernel_size=3, padding=2) ... in_channels=3, out_channels=4, kernel_size=3, padding=2)
self.linear1 = paddle.nn.Linear(4624, 100) ... self.linear1 = paddle.nn.Linear(4624, 100)
...
def forward(self, img): ... def forward(self, img):
hidden = self.conv1(img) ... hidden = self.conv1(img)
hidden = paddle.flatten(hidden, start_axis=1) ... hidden = paddle.flatten(hidden, start_axis=1)
prediction = self.linear1(hidden) ... prediction = self.linear1(hidden)
return prediction ... return prediction
my_layer = MyLayer() >>> my_layer = MyLayer()
optimizer = paddle.optimizer.SGD( >>> optimizer = paddle.optimizer.SGD(
learning_rate=0.01, parameters=my_layer.parameters()) ... learning_rate=0.01, parameters=my_layer.parameters())
# Need to set excluded layers before calling decorate >>> # Need to set excluded layers before calling decorate
paddle.incubate.asp.set_excluded_layers([my_layer.linear1.full_name()]) >>> paddle.incubate.asp.set_excluded_layers([my_layer.linear1.full_name()])
# Reset excluded_layers, all supported layers would be included into Automatic SParsity's workflow. >>> # Reset excluded_layers, all supported layers would be included into Automatic SParsity's workflow.
# Please note, reset_excluded_layers also must be called before calling asp.decorate(). >>> # Please note, reset_excluded_layers also must be called before calling asp.decorate().
paddle.incubate.asp.reset_excluded_layers() >>> paddle.incubate.asp.reset_excluded_layers()
optimizer = paddle.incubate.asp.decorate(optimizer) >>> optimizer = paddle.incubate.asp.decorate(optimizer)
2. Usage of Static Graph 2. Usage of Static Graph
.. code-block:: python .. code-block:: python
import paddle >>> import paddle
paddle.enable_static() >>> paddle.enable_static()
class MyLayer(paddle.nn.Layer): >>> class MyLayer(paddle.nn.Layer):
def __init__(self): ... def __init__(self):
super().__init__() ... super().__init__()
self.conv1 = paddle.nn.Conv2D( ... self.conv1 = paddle.nn.Conv2D(
in_channels=3, out_channels=4, kernel_size=3, padding=2) ... in_channels=3, out_channels=4, kernel_size=3, padding=2)
self.linear1 = paddle.nn.Linear(4624, 100) ... self.linear1 = paddle.nn.Linear(4624, 100)
...
def forward(self, img): ... def forward(self, img):
hidden = self.conv1(img) ... hidden = self.conv1(img)
hidden = paddle.flatten(hidden, start_axis=1) ... hidden = paddle.flatten(hidden, start_axis=1)
prediction = self.linear1(hidden) ... prediction = self.linear1(hidden)
return prediction ... return prediction
main_program = paddle.static.Program() >>> main_program = paddle.static.Program()
startup_program = paddle.static.Program() >>> startup_program = paddle.static.Program()
with paddle.static.program_guard(main_program, startup_program): >>> with paddle.static.program_guard(main_program, startup_program):
input_data = paddle.static.data(name='data', shape=[None, 3, 224, 224]) ... input_data = paddle.static.data(name='data', shape=[None, 3, 224, 224])
label = paddle.static.data(name='label', shape=[None, 100]) ... label = paddle.static.data(name='label', shape=[None, 100])
my_layer = MyLayer() ... my_layer = MyLayer()
prob = my_layer(input_data) ... prob = my_layer(input_data)
loss = paddle.mean(paddle.nn.functional.square_error_cost(prob, label)) ... loss = paddle.mean(paddle.nn.functional.square_error_cost(prob, label))
...
# Setup exluded layers out from ASP workflow. ... # Setup exluded layers out from ASP workflow.
# Please note, excluded_layers must be set before calling optimizer.minimize(). ... # Please note, excluded_layers must be set before calling optimizer.minimize().
paddle.incubate.asp.set_excluded_layers([my_layer.linear1.full_name()], main_program) ... paddle.incubate.asp.set_excluded_layers([my_layer.linear1.full_name()], main_program)
# Reset excluded_layers, all supported layers would be included into Automatic SParsity's workflow. ... # Reset excluded_layers, all supported layers would be included into Automatic SParsity's workflow.
# Please note, reset_excluded_layers also must be called before calling optimizer.minimize(). ... # Please note, reset_excluded_layers also must be called before calling optimizer.minimize().
paddle.incubate.asp.reset_excluded_layers(main_program) ... paddle.incubate.asp.reset_excluded_layers(main_program)
...
optimizer = paddle.optimizer.SGD(learning_rate=0.1) ... optimizer = paddle.optimizer.SGD(learning_rate=0.1)
optimizer = paddle.static.amp.decorate(optimizer ) ... optimizer = paddle.static.amp.decorate(optimizer )
# Calling paddle.incubate.asp.decorate() to wrap minimize() in optimizer, which ... # Calling paddle.incubate.asp.decorate() to wrap minimize() in optimizer, which
# will insert necessary masking operations for ASP workflow. ... # will insert necessary masking operations for ASP workflow.
optimizer = paddle.incubate.asp.decorate(optimizer) ... optimizer = paddle.incubate.asp.decorate(optimizer)
optimizer.minimize(loss, startup_program) ... optimizer.minimize(loss, startup_program)
""" """
ASPHelper.reset_excluded_layers(main_program=main_program) ASPHelper.reset_excluded_layers(main_program=main_program)
...@@ -229,72 +229,72 @@ def decorate(optimizer): ...@@ -229,72 +229,72 @@ def decorate(optimizer):
.. code-block:: python .. code-block:: python
import paddle >>> import paddle
class MyLayer(paddle.nn.Layer): >>> class MyLayer(paddle.nn.Layer):
def __init__(self): ... def __init__(self):
super().__init__() ... super().__init__()
self.conv1 = paddle.nn.Conv2D( ... self.conv1 = paddle.nn.Conv2D(
in_channels=3, out_channels=4, kernel_size=3, padding=2) ... in_channels=3, out_channels=4, kernel_size=3, padding=2)
self.linear1 = paddle.nn.Linear(4624, 32) ... self.linear1 = paddle.nn.Linear(4624, 32)
self.linear2 = paddle.nn.Linear(32, 32) ... self.linear2 = paddle.nn.Linear(32, 32)
self.linear3 = paddle.nn.Linear(32, 10) ... self.linear3 = paddle.nn.Linear(32, 10)
...
def forward(self, img): ... def forward(self, img):
hidden = self.conv1(img) ... hidden = self.conv1(img)
hidden = paddle.flatten(hidden, start_axis=1) ... hidden = paddle.flatten(hidden, start_axis=1)
hidden = self.linear1(hidden) ... hidden = self.linear1(hidden)
hidden = self.linear2(hidden) ... hidden = self.linear2(hidden)
prediction = self.linear3(hidden) ... prediction = self.linear3(hidden)
return prediction ... return prediction
my_layer = MyLayer() >>> my_layer = MyLayer()
optimizer = paddle.optimizer.SGD( >>> optimizer = paddle.optimizer.SGD(
learning_rate=0.01, parameters=my_layer.parameters()) ... learning_rate=0.01, parameters=my_layer.parameters())
# Calling paddle.incubate.asp.decorate() to wrap step() in optimizer, which >>> # Calling paddle.incubate.asp.decorate() to wrap step() in optimizer, which
# will apply necessary masking operations for ASP workflow. >>> # will apply necessary masking operations for ASP workflow.
# In dynamic graph mode, ASP would create related mask variables during decoration. >>> # In dynamic graph mode, ASP would create related mask variables during decoration.
optimizer = paddle.incubate.asp.decorate(optimizer) >>> optimizer = paddle.incubate.asp.decorate(optimizer)
2. Usage of Static Graph 2. Usage of Static Graph
.. code-block:: python .. code-block:: python
import paddle >>> import paddle
paddle.enable_static() >>> paddle.enable_static()
class MyLayer(paddle.nn.Layer): >>> class MyLayer(paddle.nn.Layer):
def __init__(self): ... def __init__(self):
super().__init__() ... super().__init__()
self.conv1 = paddle.nn.Conv2D( ... self.conv1 = paddle.nn.Conv2D(
in_channels=3, out_channels=4, kernel_size=3, padding=2) ... in_channels=3, out_channels=4, kernel_size=3, padding=2)
self.linear1 = paddle.nn.Linear(4624, 100) ... self.linear1 = paddle.nn.Linear(4624, 100)
...
def forward(self, img): ... def forward(self, img):
hidden = self.conv1(img) ... hidden = self.conv1(img)
hidden = paddle.flatten(hidden, start_axis=1) ... hidden = paddle.flatten(hidden, start_axis=1)
prediction = self.linear1(hidden) ... prediction = self.linear1(hidden)
return prediction ... return prediction
main_program = paddle.static.Program() >>> main_program = paddle.static.Program()
startup_program = paddle.static.Program() >>> startup_program = paddle.static.Program()
with paddle.static.program_guard(main_program, startup_program): >>> with paddle.static.program_guard(main_program, startup_program):
input_data = paddle.static.data(name='data', shape=[None, 3, 224, 224]) ... input_data = paddle.static.data(name='data', shape=[None, 3, 224, 224])
label = paddle.static.data(name='label', shape=[None, 100]) ... label = paddle.static.data(name='label', shape=[None, 100])
my_layer = MyLayer() ... my_layer = MyLayer()
prob = my_layer(input_data) ... prob = my_layer(input_data)
loss = paddle.mean(paddle.nn.functional.square_error_cost(prob, label)) ... loss = paddle.mean(paddle.nn.functional.square_error_cost(prob, label))
...
optimizer = paddle.optimizer.SGD(learning_rate=0.1) ... optimizer = paddle.optimizer.SGD(learning_rate=0.1)
# Calling paddle.incubate.asp.decorate() to wrap minimize() in optimizer, which ... # Calling paddle.incubate.asp.decorate() to wrap minimize() in optimizer, which
# will insert necessary masking operations for ASP workflow. ... # will insert necessary masking operations for ASP workflow.
# In static graph mode, ASP creates related mask variables ... # In static graph mode, ASP creates related mask variables
# during minimize(). ... # during minimize().
optimizer = paddle.incubate.asp.decorate(optimizer) ... optimizer = paddle.incubate.asp.decorate(optimizer)
optimizer.minimize(loss, startup_program) ... optimizer.minimize(loss, startup_program)
""" """
return ASPHelper.decorate(optimizer) return ASPHelper.decorate(optimizer)
...@@ -326,112 +326,112 @@ def prune_model(model, n=2, m=4, mask_algo='mask_1d', with_mask=True): ...@@ -326,112 +326,112 @@ def prune_model(model, n=2, m=4, mask_algo='mask_1d', with_mask=True):
.. code-block:: python .. code-block:: python
import paddle >>> import paddle
import numpy as np >>> import numpy as np
class MyLayer(paddle.nn.Layer): >>> class MyLayer(paddle.nn.Layer):
def __init__(self): ... def __init__(self):
super().__init__() ... super().__init__()
self.conv1 = paddle.nn.Conv2D( ... self.conv1 = paddle.nn.Conv2D(
in_channels=3, out_channels=4, kernel_size=3, padding=2) ... in_channels=3, out_channels=4, kernel_size=3, padding=2)
self.linear1 = paddle.nn.Linear(4624, 32) ... self.linear1 = paddle.nn.Linear(4624, 32)
self.linear2 = paddle.nn.Linear(32, 32) ... self.linear2 = paddle.nn.Linear(32, 32)
self.linear3 = paddle.nn.Linear(32, 10) ... self.linear3 = paddle.nn.Linear(32, 10)
...
def forward(self, img): ... def forward(self, img):
hidden = self.conv1(img) ... hidden = self.conv1(img)
hidden = paddle.flatten(hidden, start_axis=1) ... hidden = paddle.flatten(hidden, start_axis=1)
hidden = self.linear1(hidden) ... hidden = self.linear1(hidden)
hidden = self.linear2(hidden) ... hidden = self.linear2(hidden)
prediction = self.linear3(hidden) ... prediction = self.linear3(hidden)
return prediction ... return prediction
my_layer = MyLayer() >>> my_layer = MyLayer()
loss_fn = paddle.nn.MSELoss(reduction='mean') >>> loss_fn = paddle.nn.MSELoss(reduction='mean')
optimizer = paddle.optimizer.SGD( >>> optimizer = paddle.optimizer.SGD(
learning_rate=0.01, parameters=my_layer.parameters()) ... learning_rate=0.01, parameters=my_layer.parameters())
# Calling paddle.incubate.asp.decorate() to wrap step() in optimizer, which >>> # Calling paddle.incubate.asp.decorate() to wrap step() in optimizer, which
# will apply necessary masking operations for ASP workflow. >>> # will apply necessary masking operations for ASP workflow.
# In dynamic graph mode, ASP would create related mask variables during decoration. >>> # In dynamic graph mode, ASP would create related mask variables during decoration.
optimizer = paddle.incubate.asp.decorate(optimizer) >>> optimizer = paddle.incubate.asp.decorate(optimizer)
# Must call paddle.incubate.asp.decorate() first before calling paddle.incubate.asp.prune_model() >>> # Must call paddle.incubate.asp.decorate() first before calling paddle.incubate.asp.prune_model()
paddle.incubate.asp.prune_model(my_layer, mask_algo='mask_2d_best') >>> paddle.incubate.asp.prune_model(my_layer, mask_algo='mask_2d_best')
for i in range(10): >>> for i in range(10):
imgs = paddle.to_tensor( ... imgs = paddle.to_tensor(
np.random.randn(64, 3, 32, 32), ... np.random.randn(64, 3, 32, 32),
dtype='float32', stop_gradient=False) ... dtype='float32', stop_gradient=False)
labels = paddle.to_tensor( ... labels = paddle.to_tensor(
np.random.randint(10, size=(64, 1)), ... np.random.randint(10, size=(64, 1)),
dtype='float32', stop_gradient=False) ... dtype='float32', stop_gradient=False)
output = my_layer(imgs) ... output = my_layer(imgs)
loss = loss_fn(output, labels) ... loss = loss_fn(output, labels)
loss.backward() ... loss.backward()
optimizer.step() ... optimizer.step()
optimizer.clear_grad() ... optimizer.clear_grad()
2. Usage of Static Graph 2. Usage of Static Graph
.. code-block:: python .. code-block:: python
import paddle >>> import paddle
import numpy as np >>> import numpy as np
paddle.enable_static() >>> paddle.enable_static()
class MyLayer(paddle.nn.Layer): >>> class MyLayer(paddle.nn.Layer):
def __init__(self): ... def __init__(self):
super().__init__() ... super().__init__()
self.conv1 = paddle.nn.Conv2D( ... self.conv1 = paddle.nn.Conv2D(
in_channels=3, out_channels=4, kernel_size=3, padding=2) ... in_channels=3, out_channels=4, kernel_size=3, padding=2)
self.linear1 = paddle.nn.Linear(4624, 32) ... self.linear1 = paddle.nn.Linear(4624, 32)
self.linear2 = paddle.nn.Linear(32, 32) ... self.linear2 = paddle.nn.Linear(32, 32)
self.linear3 = paddle.nn.Linear(32, 10) ... self.linear3 = paddle.nn.Linear(32, 10)
...
def forward(self, img): ... def forward(self, img):
hidden = self.conv1(img) ... hidden = self.conv1(img)
hidden = paddle.flatten(hidden, start_axis=1) ... hidden = paddle.flatten(hidden, start_axis=1)
hidden = self.linear1(hidden) ... hidden = self.linear1(hidden)
hidden = self.linear2(hidden) ... hidden = self.linear2(hidden)
prediction = self.linear3(hidden) ... prediction = self.linear3(hidden)
return prediction ... return prediction
main_program = paddle.static.Program() >>> main_program = paddle.static.Program()
startup_program = paddle.static.Program() >>> startup_program = paddle.static.Program()
with paddle.static.program_guard(main_program, startup_program): >>> with paddle.static.program_guard(main_program, startup_program):
input_data = paddle.static.data(name='data', shape=[None, 3, 32, 32]) ... input_data = paddle.static.data(name='data', shape=[None, 3, 32, 32])
label = paddle.static.data(name='label', shape=[None, 1]) ... label = paddle.static.data(name='label', shape=[None, 1])
my_layer = MyLayer() ... my_layer = MyLayer()
prob = my_layer(input_data) ... prob = my_layer(input_data)
loss = paddle.mean(paddle.nn.functional.square_error_cost(prob, label)) ... loss = paddle.mean(paddle.nn.functional.square_error_cost(prob, label))
...
optimizer = paddle.optimizer.SGD(learning_rate=0.1) ... optimizer = paddle.optimizer.SGD(learning_rate=0.1)
# Calling paddle.incubate.asp.decorate() to wrap minimize() in optimizer, which ... # Calling paddle.incubate.asp.decorate() to wrap minimize() in optimizer, which
# will insert necessary masking operations for ASP workflow. ... # will insert necessary masking operations for ASP workflow.
# In static graph mode, ASP creates related mask variables ... # In static graph mode, ASP creates related mask variables
# during minimize(). ... # during minimize().
optimizer = paddle.incubate.asp.decorate(optimizer) ... optimizer = paddle.incubate.asp.decorate(optimizer)
optimizer.minimize(loss, startup_program) ... optimizer.minimize(loss, startup_program)
device = paddle.device.get_device() >>> device = paddle.device.get_device()
place = paddle.set_device(device) >>> place = paddle.set_device(device)
exe = paddle.static.Executor(place) >>> exe = paddle.static.Executor(place)
exe.run(startup_program) >>> exe.run(startup_program)
# Must call exe.run(startup_program) first before calling paddle.asp.prune_model() >>> # Must call exe.run(startup_program) first before calling paddle.asp.prune_model()
paddle.incubate.asp.prune_model(my_layer, mask_algo='mask_2d_best') >>> paddle.incubate.asp.prune_model(my_layer, mask_algo='mask_2d_best')
# it also be accepted to call >>> # it also be accepted to call
# paddle.incubate.asp.prune_model(main_program, mask_algo='mask_2d_best') >>> # paddle.incubate.asp.prune_model(main_program, mask_algo='mask_2d_best')
for i in range(10): >>> for i in range(10):
imgs = np.random.randn(64, 3, 32, 32).astype('float32') ... imgs = np.random.randn(64, 3, 32, 32).astype('float32')
labels = np.random.randint(10, size=(64, 1)).astype('float32') ... labels = np.random.randint(10, size=(64, 1)).astype('float32')
exe.run(main_program, feed={'data':imgs, 'label':labels}) ... exe.run(main_program, feed={'data':imgs, 'label':labels})
""" """
device = paddle.device.get_device() device = paddle.device.get_device()
place = paddle.set_device(device) place = paddle.set_device(device)
...@@ -726,19 +726,20 @@ class ASPHelper: ...@@ -726,19 +726,20 @@ class ASPHelper:
Examples: Examples:
.. code-block:: python .. code-block:: python
from paddle.incubate.asp import ASPHelper >>> from paddle.incubate.asp import ASPHelper
>>> paddle.enable_static()
main_program = paddle.static.Program() >>> main_program = paddle.static.Program()
startup_program = paddle.static.Program() >>> startup_program = paddle.static.Program()
with paddle.static.program_guard(main_program, startup_program): >>> with paddle.static.program_guard(main_program, startup_program):
input_data = paddle.static.data(name='data', shape=[None, 128]) ... input_data = paddle.static.data(name='data', shape=[None, 128])
fc = paddle.static.nn.fc(x=input_data, num_flatten_dims=-1, size=32, activation=None) ... fc = paddle.static.nn.fc(x=input_data, num_flatten_dims=-1, size=32, activation=None)
for param in main_program.global_block().all_parameters(): >>> for param in main_program.global_block().all_parameters():
ASPHelper._is_supported_layer(main_program, param.name) ... print(param.name,'->',ASPHelper._is_supported_layer(main_program, param.name))
# fc_0.w_0 -> True fc_0.w_0 -> True
# fc_0.b_0 -> False fc_0.b_0 -> False
""" """
param_name_list = param_name.split('.') param_name_list = param_name.split('.')
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册