diff --git a/python/paddle/fluid/clip.py b/python/paddle/fluid/clip.py index dce4b53c132f30fba228b084e48f6321861c341f..4b0a792f784fffcce3f911d3e7448b472d39f8e1 100644 --- a/python/paddle/fluid/clip.py +++ b/python/paddle/fluid/clip.py @@ -13,6 +13,7 @@ # limitations under the License. import copy +import six import functools from . import layers @@ -246,8 +247,8 @@ class GradientClipByGlobalNorm(BaseGradientClipAttr): """ def __init__(self, clip_norm, group_name="default_group"): - if not isinstance(group_name, str): - raise TypeError("'group_name' must be a basestring.") + if not isinstance(group_name, six.string_types): + raise TypeError("'group_name' must be a %s." % (six.string_types)) self.clip_norm = clip_norm self.group_name = group_name @@ -312,7 +313,7 @@ def set_gradient_clip(clip, param_list=None, program=None): program = framework.default_main_program() if param_list is None: param_list = program.block(0).all_parameters() - if all(isinstance(elem, str) for elem in param_list): + if all(isinstance(elem, six.string_types) for elem in param_list): param_list = [program.block(0).var(elem) for elem in param_list] if not all(isinstance(elem, framework.Parameter) for elem in param_list): raise TypeError( diff --git a/python/paddle/fluid/data_feeder.py b/python/paddle/fluid/data_feeder.py index 023a3c9c274582205bfc3af6b7d9f8fe95b37525..9452cf0e2a3a2eddb761149466bfc1ee3d23dfd9 100644 --- a/python/paddle/fluid/data_feeder.py +++ b/python/paddle/fluid/data_feeder.py @@ -15,7 +15,8 @@ from . import core import numpy import os -import six.moves as six +import six +from six.moves import zip, range, xrange import multiprocessing from .framework import Variable, default_main_program @@ -52,7 +53,7 @@ class DataToLoDTensorConverter(object): self.data = [] self.lod = [] - for i in six.range(lod_level): + for i in six.moves.range(lod_level): self.lod.append([]) def feed(self, data): @@ -141,7 +142,7 @@ class DataFeeder(object): if program is None: program = default_main_program() for each_var in feed_list: - if isinstance(each_var, str): + if isinstance(each_var, six.string_types): each_var = program.block(0).var(each_var) if not isinstance(each_var, Variable): raise TypeError("Feed list should contain a list of variable") @@ -173,7 +174,7 @@ class DataFeeder(object): dict: the result of conversion. """ converter = [] - for lod_level, shape, dtype in six.zip( + for lod_level, shape, dtype in six.moves.zip( self.feed_lod_level, self.feed_shapes, self.feed_dtypes): converter.append( DataToLoDTensorConverter( @@ -186,10 +187,12 @@ class DataFeeder(object): assert len(each_sample) == len(converter), ( "The number of fields in data (%s) does not match " + "len(feed_list) (%s)") % (len(each_sample), len(converter)) - for each_converter, each_slot in six.zip(converter, each_sample): + for each_converter, each_slot in six.moves.zip(converter, + each_sample): each_converter.feed(each_slot) ret_dict = {} - for each_name, each_converter in six.zip(self.feed_names, converter): + for each_name, each_converter in six.moves.zip(self.feed_names, + converter): ret_dict[each_name] = each_converter.done() return ret_dict @@ -211,12 +214,14 @@ class DataFeeder(object): if isinstance(self.place, core.CUDAPlace): places = [ core.CUDAPlace(i) - for i in six.xrange(self._get_number_of_places_(num_places)) + for i in six.moves.xrange( + self._get_number_of_places_(num_places)) ] else: places = [ core.CPUPlace() - for _ in six.xrange(self._get_number_of_places_(num_places)) + for _ in six.moves.xrange( + self._get_number_of_places_(num_places)) ] if len(iterable) != len(places): @@ -226,7 +231,7 @@ class DataFeeder(object): "must be same.") place = self.place - for p, batch in six.zip(places, iterable): + for p, batch in six.moves.zip(places, iterable): self.place = p yield self.feed(batch) self.place = place diff --git a/python/paddle/fluid/executor.py b/python/paddle/fluid/executor.py index d2f130b86da4918b216994e674b4ec5b7b7f4a1d..35da1d06a2c1da8ba663ea0f0b9a0e58ea7c4470 100644 --- a/python/paddle/fluid/executor.py +++ b/python/paddle/fluid/executor.py @@ -14,6 +14,7 @@ import numpy as np import contextlib +import six from .framework import Program, default_main_program, Variable from . import core @@ -211,7 +212,7 @@ def _get_program_cache_key(feed, fetch_list): return var.desc.name() elif isinstance(var, str): return var - elif isinstance(var, str): + elif isinstance(var, six.string_types): return str(var) else: raise TypeError(str(var) + " should be Variable or str") @@ -229,8 +230,8 @@ class Executor(object): to feed map and fetch_list. Feed map provides input data for the program. fetch_list provides the variables(or names) that user want to get after program run. Note: the executor will run all operators in the program but not only the operators dependent by the fetch_list. - It store the global variables into the global scope, and create a local scope for the temporary - variables. The local scope contents will be discarded after every minibatch forward/backward finished. + It store the global variables into the global scope, and create a local scope for the temporary + variables. The local scope contents will be discarded after every minibatch forward/backward finished. But the global scope variables will be persistent through different runs. All of ops in program will be running in sequence. diff --git a/python/paddle/fluid/framework.py b/python/paddle/fluid/framework.py index 6dc979dd5b0b5a348f6f6ea3da2259844d5a1602..f0653a43ce607b2f321bb5c4ac49a8fa56c81e99 100644 --- a/python/paddle/fluid/framework.py +++ b/python/paddle/fluid/framework.py @@ -524,12 +524,12 @@ class Operator(object): % (in_proto.name, len(in_args))) in_arg_names = [] for arg in in_args: - if issubclass(arg.__class__, six.string_types): + if isinstance(arg, six.string_types): in_arg_names.append(arg) elif isinstance(arg, six.binary_type): in_arg_names.append(arg.decode()) else: - if issubclass(arg.name.__class__, six.string_types): + if isinstance(arg.name, six.string_types): in_arg_names.append(arg.name) elif isinstance(arg.name, six.binary_type): in_arg_names.append(arg.name.decode()) @@ -561,7 +561,7 @@ class Operator(object): (out_proto.name, len(out_args))) out_arg_names = [] for arg in out_args: - if issubclass(arg.name.__class__, six.string_types): + if isinstance(arg.name, six.string_types): out_arg_names.append(arg.name) elif isinstance(arg.name, six.binary_type): out_arg_names.append(arg.name.decode()) @@ -911,7 +911,7 @@ class Block(object): Returns: Variable: the Variable with the giving name. """ - if not issubclass(name.__class__, six.string_types): + if not isinstance(name, six.string_types): if not isinstance(name, six.binary_type): raise TypeError( "var require string as parameter, but get %s instead." % diff --git a/python/paddle/fluid/graphviz.py b/python/paddle/fluid/graphviz.py index b72dd7bb01c5d3f39b9b035a7f8e1a6204446fba..ba67bf5ae6fe44ea23414d444a270c436c195326 100644 --- a/python/paddle/fluid/graphviz.py +++ b/python/paddle/fluid/graphviz.py @@ -14,12 +14,13 @@ import os import random +import six import subprocess import logging def crepr(v): - if type(v) is str or type(v) is str: + if isinstance(v, six.string_types): return '"%s"' % v return str(v) diff --git a/python/paddle/fluid/io.py b/python/paddle/fluid/io.py index 599a7782eecf8d7190ce5ed034863641bf4ab4d6..38001e841b23306f2578b68f8af2d5ba6861138d 100644 --- a/python/paddle/fluid/io.py +++ b/python/paddle/fluid/io.py @@ -612,9 +612,6 @@ def save_inference_model(dirname, if not (all( isinstance(name, six.text_type) for name in feeded_var_names)): - import sys - print([type(name) for name in feeded_var_names]) - sys.stdout.flush() raise ValueError( "'feed_var_names' should be a list of str.") else: diff --git a/python/paddle/fluid/layer_helper.py b/python/paddle/fluid/layer_helper.py index 715c562516bacd760792bca673f2389fd6fbd7a3..64337465ed0778edf89c5c4fda06cd27b3f386f2 100644 --- a/python/paddle/fluid/layer_helper.py +++ b/python/paddle/fluid/layer_helper.py @@ -14,6 +14,7 @@ import copy import itertools +import six from .framework import Variable, Parameter, default_main_program, default_startup_program, dtype_is_floating from . import unique_name @@ -398,7 +399,7 @@ class LayerHelper(object): act = self.kwargs.get('act', None) if act is None: return input_var - if isinstance(act, str): + if isinstance(act, six.string_types): act = {'type': act} if 'use_cudnn' in self.kwargs and self.kwargs.get('use_cudnn'): diff --git a/python/paddle/fluid/op.py b/python/paddle/fluid/op.py index 37ba8d9f09d2523eb0648d79541b1d5167a51494..93f021a360ac61f64e769d057df188d79f6f2bb6 100644 --- a/python/paddle/fluid/op.py +++ b/python/paddle/fluid/op.py @@ -32,7 +32,7 @@ def get_all_op_protos(): def is_str(s): - return isinstance(s, str) or isinstance(s, str) + return isinstance(s, six.string_types) class OpDescCreationMethod(object): diff --git a/python/paddle/fluid/param_attr.py b/python/paddle/fluid/param_attr.py index 04e0c9e6317e26c21fd1749273472acdfaf8e1fd..afae577656c8970338f3b02208fcb4c738628ab6 100644 --- a/python/paddle/fluid/param_attr.py +++ b/python/paddle/fluid/param_attr.py @@ -12,6 +12,8 @@ # See the License for the specific language governing permissions and # limitations under the License. +import six + from .initializer import Initializer, Xavier, Constant from .regularizer import WeightDecayRegularizer @@ -134,7 +136,7 @@ class ParamAttr(object): return [ParamAttr._to_attr(a) for a in arg] elif isinstance(arg, ParamAttr): return arg - elif isinstance(arg, str) or isinstance(arg, str): + elif isinstance(arg, six.string_types): return ParamAttr(name=arg) elif isinstance(arg, Initializer): return ParamAttr(initializer=arg) diff --git a/python/paddle/fluid/tests/unittests/benchmark.py b/python/paddle/fluid/tests/unittests/benchmark.py index f3c48d3d7f4b8613f9f98f468aa6232971994196..b98a92dcbe5626c6cca93b3f5894302399793bf9 100644 --- a/python/paddle/fluid/tests/unittests/benchmark.py +++ b/python/paddle/fluid/tests/unittests/benchmark.py @@ -16,6 +16,7 @@ import numpy as np import unittest import time import itertools +import six import paddle.fluid as fluid import paddle.fluid.core as core @@ -40,7 +41,8 @@ class BenchmarkSuite(OpTest): expect_t = np.array(item_cpu_out) actual = item_gpu_out actual_t = np.array(item_gpu_out) - var_name = variable if isinstance(variable, str) else variable.name + var_name = variable if isinstance( + variable, six.string_types) else variable.name self.assertTrue( np.allclose( actual_t, expect_t, atol=atol), diff --git a/python/paddle/fluid/tests/unittests/test_parallel_op.py b/python/paddle/fluid/tests/unittests/test_parallel_op.py index fb2c9d41e41b6eef5ca54cc6a3f368bb89c1afd7..c9617e36778740ce9620c3ad495c64c17277fde1 100644 --- a/python/paddle/fluid/tests/unittests/test_parallel_op.py +++ b/python/paddle/fluid/tests/unittests/test_parallel_op.py @@ -18,6 +18,7 @@ import paddle.fluid as fluid from paddle.fluid.layers.device import get_places import paddle.fluid.profiler as profiler import numpy +import six class BaseParallelForTest(unittest.TestCase): @@ -25,20 +26,20 @@ class BaseParallelForTest(unittest.TestCase): """ Run the unittest for parallel.for Args: - callback(callable): A callable function returns a generator. There - are two yields in the generator function. The first yield - returns the data layers, and the second yield returns the loss. - The modified data variables will be sent back during the first + callback(callable): A callable function returns a generator. There + are two yields in the generator function. The first yield + returns the data layers, and the second yield returns the loss. + The modified data variables will be sent back during the first yield. feed(dict): The executor feeding dictionary. - fetch(list|basestr): The fetch name lists. + fetch(list|basestr): The fetch name lists. Returns: None Raises: - AssertionError when the computation of cpu, parallel.for in cpu, + AssertionError when the computation of cpu, parallel.for in cpu, gpu, parallel.for in gpu are different. """ @@ -95,14 +96,14 @@ class BaseParallelForTest(unittest.TestCase): """ Run a single test, returns the fetch values Args: - place(Place): the computation place. - use_parallel(bool): Whether use parallel.for or not. + place(Place): the computation place. + use_parallel(bool): Whether use parallel.for or not. Returns: Fetched numpy arrays. """ - if isinstance(fetch, str): + if isinstance(fetch, six.string_types): fetch = [fetch] main = fluid.Program() startup = fluid.Program() @@ -156,7 +157,7 @@ class BaseParallelForTest(unittest.TestCase): Returns: None - + Raises: AssertionError diff --git a/python/paddle/fluid/unique_name.py b/python/paddle/fluid/unique_name.py index 9b661746e9d84edc8738663ecf7552e61d6f24cb..d4e28f4b9193d0b233344c0acb39ef956a7007c7 100644 --- a/python/paddle/fluid/unique_name.py +++ b/python/paddle/fluid/unique_name.py @@ -14,6 +14,7 @@ import collections import contextlib +import six import sys __all__ = ['generate', 'switch', 'guard'] @@ -67,7 +68,7 @@ def switch(new_generator=None): @contextlib.contextmanager def guard(new_generator=None): - if isinstance(new_generator, str): + if isinstance(new_generator, six.string_types): new_generator = UniqueNameGenerator(new_generator) old = switch(new_generator) yield diff --git a/python/paddle/reader/creator.py b/python/paddle/reader/creator.py index 025338a2ae021cbeba1c21fdf74efc19d8320d12..369ff5e22b75cf33054d51f8075507d613d93468 100644 --- a/python/paddle/reader/creator.py +++ b/python/paddle/reader/creator.py @@ -67,10 +67,11 @@ def recordio(paths, buf_size=100): import recordio as rec import paddle.reader.decorator as dec + import six import six.moves.cPickle as pickle def reader(): - if isinstance(paths, str): + if isinstance(paths, six.string_types): path = paths else: path = ",".join(paths)