未验证 提交 e40ed0f4 编写于 作者: C ceci3 提交者: GitHub

fix python2 (#660)

上级 e0e40863
...@@ -61,13 +61,13 @@ def extract_vars(inputs): ...@@ -61,13 +61,13 @@ def extract_vars(inputs):
vars.append(_value) vars.append(_value)
else: else:
_logger.warn( _logger.warn(
f"Variable is excepted, but get an element with type({type(_value)}) from inputs whose type is dict. And the key of element is {_key}." "Variable is excepted, but get an element with type({}) from inputs whose type is dict. And the key of element is {}.".format(type(_value), _key)
) )
elif isinstance(inputs, (tuple, list)): elif isinstance(inputs, (tuple, list)):
for _value in inputs: for _value in inputs:
vars.extend(extract_vars(_value)) vars.extend(extract_vars(_value))
if len(vars) == 0: if len(vars) == 0:
_logger.warn(f"Extract none variables from inputs.") _logger.warn("Extract none variables from inputs.")
return vars return vars
......
...@@ -320,7 +320,7 @@ class FilterPruner(Pruner): ...@@ -320,7 +320,7 @@ class FilterPruner(Pruner):
""" """
if var_name in self.skip_vars: if var_name in self.skip_vars:
_logger.warn( _logger.warn(
f"{var_name} is skiped beacause it is not support for pruning derectly." "{} is skiped beacause it is not support for pruning derectly.".format(var_name)
) )
return return
if isinstance(pruned_dims, int): if isinstance(pruned_dims, int):
...@@ -340,7 +340,7 @@ class FilterPruner(Pruner): ...@@ -340,7 +340,7 @@ class FilterPruner(Pruner):
'var': param, 'var': param,
'value': np.array(param.value().get_tensor()) 'value': np.array(param.value().get_tensor())
}) })
_logger.debug(f"set value of {param.name} into group") _logger.debug("set value of {} into group".format(param.name))
mask = self.cal_mask(var_name, pruned_ratio, group_dict) mask = self.cal_mask(var_name, pruned_ratio, group_dict)
for _name in group_dict: for _name in group_dict:
...@@ -356,7 +356,7 @@ class FilterPruner(Pruner): ...@@ -356,7 +356,7 @@ class FilterPruner(Pruner):
src_mask = self._transform_mask(src_mask, trans) src_mask = self._transform_mask(src_mask, trans)
current_mask = src_mask current_mask = src_mask
assert len(current_mask) == var_shape[dims[ assert len(current_mask) == var_shape[dims[
0]], f"The length of current_mask must be equal to the size of dimension to be pruned on. But get: len(current_mask): {len(current_mask)}; var_shape: {var_shape}; dims: {dims}; var name: {_name}; len(mask): {len(mask)}" 0]], "The length of current_mask must be equal to the size of dimension to be pruned on. But get: len(current_mask): {}; var_shape: {}; dims: {}; var name: {}; len(mask): {}".format(len(current_mask), var_shape, dims, _name, len(mask))
plan.add(_name, PruningMask(dims, current_mask, pruned_ratio)) plan.add(_name, PruningMask(dims, current_mask, pruned_ratio))
if apply == "lazy": if apply == "lazy":
plan.apply(self.model, lazy=True) plan.apply(self.model, lazy=True)
......
...@@ -60,7 +60,7 @@ def collect_convs(params, graph, visited={}): ...@@ -60,7 +60,7 @@ def collect_convs(params, graph, visited={}):
param = graph.var(_param) param = graph.var(_param)
if param is None: if param is None:
_logger.warning( _logger.warning(
f"Cann't found relative variables of {_param} because {_param} is not in target program or model. Please make sure {_param} is in your program if you are using static API of PaddlePaddle. And make sure your model in correctly mode and contains {_param} if you are using dynamic API of PaddlePaddle." "Cann't found relative variables of {} because {} is not in target program or model. Please make sure {} is in your program if you are using static API of PaddlePaddle. And make sure your model in correctly mode and contains {} if you are using dynamic API of PaddlePaddle.".format(_param, _param, _param, _param)
) )
groups.append([]) groups.append([])
continue continue
......
...@@ -95,7 +95,7 @@ class PruneWorker(object): ...@@ -95,7 +95,7 @@ class PruneWorker(object):
_logger.debug("\nfrom: {}\nto: {}\npruned_axis: {}; var: {}".format( _logger.debug("\nfrom: {}\nto: {}\npruned_axis: {}; var: {}".format(
self.op, op, pruned_axis, var.name())) self.op, op, pruned_axis, var.name()))
_logger.debug( _logger.debug(
f"visit {op.type()} by var [{var.name()}] on axis [{pruned_axis}];\t visited={self.visited}\n" "visit {} by var [{}] on axis [{}];\t visited={}\n".format(op.type(), var.name(), pruned_axis, self.visited)
) )
walker = cls(op, pruned_params=self.pruned_params, visited=self.visited) walker = cls(op, pruned_params=self.pruned_params, visited=self.visited)
walker.prune(var, pruned_axis, pruned_idx) walker.prune(var, pruned_axis, pruned_idx)
...@@ -123,7 +123,7 @@ class conv2d(PruneWorker): ...@@ -123,7 +123,7 @@ class conv2d(PruneWorker):
def _prune(self, var, pruned_axis, pruned_idx): def _prune(self, var, pruned_axis, pruned_idx):
if self._is_depthwise_conv(self.op): if self._is_depthwise_conv(self.op):
_logger.debug(f"Meet conv2d who is depthwise conv2d actually.") _logger.debug("Meet conv2d who is depthwise conv2d actually.")
walker = depthwise_conv2d( walker = depthwise_conv2d(
self.op, self.pruned_params, visited=self.visited) self.op, self.pruned_params, visited=self.visited)
walker._prune(var, pruned_axis, pruned_idx) walker._prune(var, pruned_axis, pruned_idx)
......
...@@ -144,7 +144,7 @@ class Pruner(): ...@@ -144,7 +144,7 @@ class Pruner():
op.attr("groups") * new_shape[pruned_axis] / op.attr("groups") * new_shape[pruned_axis] /
origin_shape[pruned_axis]) origin_shape[pruned_axis])
_logger.debug( _logger.debug(
f"change groups of conv({param.name()}) from {op.attr('groups')} to {new_groups}; origin_shape: {origin_shape}; new_shape: {new_shape}" "change groups of conv({}) from {} to {}; origin_shape: {}; new_shape: {}".format(param.name(), op.attr('groups'), new_groups, origin_shape, new_shape)
) )
op.set_attr("groups", new_groups) op.set_attr("groups", new_groups)
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册