From 7c13292cff454fdf0f12d965268bffeb5cad5aed Mon Sep 17 00:00:00 2001 From: dangqingqing Date: Thu, 27 Jul 2017 12:53:30 +0800 Subject: [PATCH] Fix bug for multi-GPU inference. --- python/paddle/v2/inference.py | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/python/paddle/v2/inference.py b/python/paddle/v2/inference.py index 40134a3270c..4dcc3ab57e7 100644 --- a/python/paddle/v2/inference.py +++ b/python/paddle/v2/inference.py @@ -35,6 +35,13 @@ class Inference(object): name = param.getName() assert isinstance(val, api.Vector) val.copyFromNumpyArray(parameters.get(name).flatten()) + # the setValueUpdated function is called in randomize, zeroMem, + # load function in paddle/parameter/Parameter.cpp. But in the + # inference mode, the setValueUpdated is never called, it will + # cause the parameter will not be dispatched + # in MultiGradientMachine for multi-GPU. So setValueUpdated is + # called here, but it's better to call this function in one place. + param.setValueUpdated() self.__gradient_machine__ = gm self.__data_types__ = topo.data_type() -- GitLab