提交 2ad3c4c0 编写于 作者: H Hongsheng Zeng 提交者: Bo Zhou

fix bug of ParamAttr (#126)

* fix bug of ParamAttr

* refine imports of unittest
上级 84c94cc4
...@@ -20,7 +20,7 @@ https://github.com/deepmind/scalable_agent/blob/master/vtrace_test.py ...@@ -20,7 +20,7 @@ https://github.com/deepmind/scalable_agent/blob/master/vtrace_test.py
import copy import copy
import numpy as np import numpy as np
import unittest import unittest
from parl.core.fluid import layers from parl import layers
from paddle import fluid from paddle import fluid
from parameterized import parameterized from parameterized import parameterized
from parl.algorithms.fluid.impala import vtrace from parl.algorithms.fluid.impala import vtrace
......
...@@ -16,7 +16,7 @@ Wrappers for fluid.layers. It helps to easily share parameters between layers. ...@@ -16,7 +16,7 @@ Wrappers for fluid.layers. It helps to easily share parameters between layers.
Here is an example: Here is an example:
```python ```python
import parl.layers as layers from parl import layers
class MLPModel(Model): class MLPModel(Model):
def __init__(self): def __init__(self):
...@@ -67,6 +67,7 @@ def update_attr_name(name, default_name, attr, is_bias): ...@@ -67,6 +67,7 @@ def update_attr_name(name, default_name, attr, is_bias):
attr.name = name attr.name = name
return attr return attr
attr = ParamAttr._to_attr(attr)
name = (default_name if name is None else name) name = (default_name if name is None else name)
suffix = "b" if is_bias else "w" suffix = "b" if is_bias else "w"
new_name = unique_name.generate(name + "." + suffix) new_name = unique_name.generate(name + "." + suffix)
......
...@@ -12,18 +12,20 @@ ...@@ -12,18 +12,20 @@
# See the License for the specific language governing permissions and # See the License for the specific language governing permissions and
# limitations under the License. # limitations under the License.
import parl.core.fluid.layers as layers import parl
from paddle import fluid
from parl import layers
import unittest import unittest
from parl.core.fluid.model import Model
class MyNetWork(Model): class MyNetWork(parl.Model):
def __init__(self): def __init__(self):
self.fc1 = layers.fc(100) self.fc1 = layers.fc(100)
self.fc2 = layers.fc(100) self.fc2 = layers.fc(100)
self.fc3 = layers.fc(100, bias_attr=False) self.fc3 = layers.fc(100, bias_attr=False)
self.fc4 = layers.fc(100, param_attr=False) self.fc4 = layers.fc(100, param_attr=False)
self.fc5 = layers.fc(100, name="fc", bias_attr=False) self.fc5 = layers.fc(100, name="fc", bias_attr=False)
self.fc6 = layers.fc(100, param_attr=fluid.initializer.Xavier())
self.embedding = layers.embedding((100, 128)) self.embedding = layers.embedding((100, 128))
self.embedding_custom = layers.embedding((100, 128), self.embedding_custom = layers.embedding((100, 128),
name="embedding_custom") name="embedding_custom")
...@@ -55,6 +57,8 @@ class TestParamName(unittest.TestCase): ...@@ -55,6 +57,8 @@ class TestParamName(unittest.TestCase):
self.assertEqual(net.fc5.param_name, "fc.w_4") self.assertEqual(net.fc5.param_name, "fc.w_4")
self.assertEqual(net.fc5.bias_name, None) self.assertEqual(net.fc5.bias_name, None)
self.assertEqual(net.fc6.param_name, "fc.w_5")
## embedding layer has no bias ## embedding layer has no bias
self.assertEqual(net.embedding.param_name, "embedding.w_0") self.assertEqual(net.embedding.param_name, "embedding.w_0")
self.assertEqual(net.embedding.bias_name, None) self.assertEqual(net.embedding.bias_name, None)
......
...@@ -14,12 +14,12 @@ ...@@ -14,12 +14,12 @@
import numpy as np import numpy as np
import paddle.fluid as fluid import paddle.fluid as fluid
import parl
import unittest import unittest
from parl import layers from parl import layers
from parl.core.fluid.model import Model
class MyNetWork(Model): class MyNetWork(parl.Model):
def __init__(self): def __init__(self):
self.fc1 = layers.fc(64, bias_attr=False) self.fc1 = layers.fc(64, bias_attr=False)
self.fc2 = layers.fc(64, bias_attr=False) self.fc2 = layers.fc(64, bias_attr=False)
......
...@@ -14,16 +14,16 @@ ...@@ -14,16 +14,16 @@
import numpy as np import numpy as np
import paddle.fluid as fluid import paddle.fluid as fluid
import parl.core.fluid.layers as layers import parl
import unittest import unittest
from copy import deepcopy from copy import deepcopy
from paddle.fluid import ParamAttr from paddle.fluid import ParamAttr
from parl.core.fluid.model import Model from parl import layers
from parl.utils import get_gpu_count from parl.utils import get_gpu_count
from parl.core.fluid.plutils import fetch_value from parl.core.fluid.plutils import fetch_value
class TestModel(Model): class TestModel(parl.Model):
def __init__(self): def __init__(self):
self.fc1 = layers.fc( self.fc1 = layers.fc(
size=256, size=256,
...@@ -48,7 +48,7 @@ class TestModel(Model): ...@@ -48,7 +48,7 @@ class TestModel(Model):
return out return out
class TestModel2(Model): class TestModel2(parl.Model):
def __init__(self): def __init__(self):
self.created_param = layers.create_parameter( self.created_param = layers.create_parameter(
shape=[100], shape=[100],
...@@ -60,7 +60,7 @@ class TestModel2(Model): ...@@ -60,7 +60,7 @@ class TestModel2(Model):
return out return out
class TestModel3(Model): class TestModel3(parl.Model):
def __init__(self): def __init__(self):
self.fc1 = layers.fc(64, bias_attr=False) self.fc1 = layers.fc(64, bias_attr=False)
self.batch_norm = layers.batch_norm() self.batch_norm = layers.batch_norm()
...@@ -71,7 +71,7 @@ class TestModel3(Model): ...@@ -71,7 +71,7 @@ class TestModel3(Model):
return out return out
class TestModel4(Model): class TestModel4(parl.Model):
def __init__(self): def __init__(self):
self.fc1 = layers.fc(size=256) self.fc1 = layers.fc(size=256)
self.fc2 = layers.fc(size=128) self.fc2 = layers.fc(size=128)
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册