未验证 提交 1d9cb932 编写于 作者: Q Qiyang Min 提交者: GitHub

Merge pull request #12989 from velconia/fix_last_of_py3

Fix random diff between python2 and python3
......@@ -24,6 +24,7 @@ set and test set into paddle reader creators.
from __future__ import print_function
import numpy as np
import zipfile
import paddle.dataset.common
import re
......@@ -150,12 +151,12 @@ def __initialize_meta_info__():
def __reader__(rand_seed=0, test_ratio=0.1, is_test=False):
fn = __initialize_meta_info__()
rand = random.Random(x=rand_seed)
np.random.seed(rand_seed)
with zipfile.ZipFile(file=fn) as package:
with package.open('ml-1m/ratings.dat') as rating:
for line in rating:
line = cpt.to_text(line, encoding='latin')
if (rand.random() < test_ratio) == is_test:
if (np.random.random() < test_ratio) == is_test:
uid, mov_id, rating, _ = line.strip().split("::")
uid = int(uid)
mov_id = int(mov_id)
......
......@@ -17,6 +17,7 @@ All layers just related to the neural network.
from __future__ import print_function
import numpy as np
from ..layer_helper import LayerHelper
from ..initializer import Normal, Constant
from ..framework import Variable
......@@ -24,7 +25,6 @@ from ..param_attr import ParamAttr
from .layer_function_generator import autodoc, templatedoc
from .tensor import concat
from . import utils
import random
from .. import unique_name
from functools import reduce
......@@ -5102,7 +5102,7 @@ def random_crop(x, shape, seed=None):
dtype = x.dtype
out = helper.create_tmp_variable(dtype)
if seed is None:
seed = random.randint(-65536, 65535)
seed = np.random.randint(-65536, 65536)
op_attrs = {"shape": shape}
if isinstance(seed, int):
op_attrs["startup_seed"] = seed
......@@ -5416,7 +5416,7 @@ def prelu(x, mode, param_attr=None, name=None):
channel:elements in a channel share same weight
element:each element has a weight
name(str|None): A name for this layer(optional). If set None, the layer
will be named automatically.
will be named automatically.
Returns:
Variable: The output tensor with the same shape as input.
......@@ -5530,23 +5530,23 @@ def sequence_mask(x, maxlen=None, dtype='int64', name=None):
Supposing :code:`x` is a Tensor with shape [d_1, d_2, ..., d_n], the
:code:`y` is a mask with shape [d_1, d_2, ..., d_n, maxlen], where:
.. math::
y(i_1, i_2,..., i_n, j) = (j < x(i_1, i_2,..., i_n))
Args:
x (Variable): Input tensor of sequence_mask layer,
x (Variable): Input tensor of sequence_mask layer,
whose elements are integers less than :code:`maxlen`.
maxlen (int|None): Maximum length of the sequence. If :code:`maxlen`
is None, it would be replace with :math:`max(x)`.
dtype (np.dtype|core.VarDesc.VarType|str): Data type of the output.
name (str|None): A name for this layer(optional). If set None, the
layer will be named automatically.
name (str|None): A name for this layer(optional). If set None, the
layer will be named automatically.
Returns:
Variable: The output sequence mask.
"""
helper = LayerHelper('sequence_mask', **locals())
......@@ -5571,23 +5571,23 @@ def stack(x, axis=0):
**Stack Layer**
This layer stacks all of the input :code:`x` along axis.
Input :code:`x` can be a single variable, a :code:`list` of variables,
or a :code:`tuple` of variables. If :code:`x` is a :code:`list` or
:code:`tuple`, the shapes of all these variables must be the same.
Supposing the shape of each input is :math:`[d_0, d_1, ..., d_{n-1}]`,
the shape of the output variable would be
:math:`[d_0, d_1, ..., d_{axis}=len(x), ..., d_{n-1}]`.
Input :code:`x` can be a single variable, a :code:`list` of variables,
or a :code:`tuple` of variables. If :code:`x` is a :code:`list` or
:code:`tuple`, the shapes of all these variables must be the same.
Supposing the shape of each input is :math:`[d_0, d_1, ..., d_{n-1}]`,
the shape of the output variable would be
:math:`[d_0, d_1, ..., d_{axis}=len(x), ..., d_{n-1}]`.
If :code:`axis` < 0, it would be replaced with :code:`axis+rank(x[0])+1`.
If :code:`axis` is None, it would be replaced with 0.
If :code:`axis` is None, it would be replaced with 0.
Args:
x (Variable|list(Variable)|tuple(Variable)): Input variables.
x (Variable|list(Variable)|tuple(Variable)): Input variables.
axis (int|None): The axis along which all inputs are stacked.
Returns:
Variable: The stacked variable.
"""
helper = LayerHelper('stack', **locals())
......
......@@ -31,7 +31,6 @@ Steps to transpile pserver:
"""
import math
import random
import numpy as np
import collections
import six
......@@ -239,8 +238,8 @@ class DistributeTranspiler(object):
grad_var_mapping_items = list(six.iteritems(self.grad_var_mapping))
if not self.config.slice_var_up:
random.seed(self.origin_program.random_seed)
random.shuffle(grad_var_mapping_items)
np.random.seed(self.origin_program.random_seed)
np.random.shuffle(grad_var_mapping_items)
grad_name_to_send_dummy_out = dict()
for grad_varname, splited_vars in grad_var_mapping_items:
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册