base.py 2.0 KB
Newer Older
1 2 3 4 5 6 7 8 9 10 11 12 13
# Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
#     http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
S
sneaxiy 已提交
14
from ..wrapped_decorator import contextmanager
15 16 17 18 19 20 21 22 23 24 25 26
import numpy as np

from paddle.fluid import core
from paddle.fluid import framework

__all__ = ['enabled', 'guard', 'to_variable']


def enabled():
    return framework._in_imperative_mode()


S
sneaxiy 已提交
27
@contextmanager
P
Paddle CI 已提交
28
def guard(place=None):
29 30
    train = framework.Program()
    startup = framework.Program()
M
minqiyang 已提交
31
    tracer = core.Tracer(train.current_block().desc)
M
minqiyang 已提交
32

P
Paddle CI 已提交
33
    if place is None:
M
minqiyang 已提交
34
        if core.is_compiled_with_cuda():
P
Paddle CI 已提交
35
            place = core.CUDAPlace(0)
M
minqiyang 已提交
36 37 38
        else:
            place = core.CPUPlace()

39 40
    with framework.program_guard(train, startup):
        with framework.unique_name.guard():
P
Paddle CI 已提交
41 42 43
            with framework._imperative_guard(tracer):
                with framework._imperative_place_guard(place):
                    yield
44 45 46 47


def to_variable(value, block=None):
    if isinstance(value, np.ndarray):
48 49
        assert enabled(), "to_variable could only be called in imperative mode"

50 51 52 53 54 55 56 57
        if not block:
            block = framework.default_main_program().current_block()
        py_var = framework.Variable(
            block,
            type=core.VarDesc.VarType.LOD_TENSOR,
            name=None,
            shape=value.shape,
            dtype=value.dtype)
M
minqiyang 已提交
58
        var = py_var._ivar.value()
59
        tensor = var.get_tensor()
M
minqiyang 已提交
60
        tensor.set(value, framework._current_expected_place())
61 62 63
        return py_var
    elif isinstance(value, framework.Variable):
        return value