gradient_machine.h 3.1 KB
Newer Older
Y
Yu Yang 已提交
1 2 3 4 5 6 7 8 9 10 11 12 13 14
/* Copyright (c) 2016 PaddlePaddle Authors. All Rights Reserve.

Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at

    http://www.apache.org/licenses/LICENSE-2.0

Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License. */

Y
Yu Yang 已提交
15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47
#ifndef __PADDLE_CAPI_GRADIENT_MACHINE_H__
#define __PADDLE_CAPI_GRADIENT_MACHINE_H__
#include "arguments.h"
#include "config.h"
#include "error.h"

#ifdef __cplusplus
extern "C" {
#endif
/**
 * @brief GradientMachine means a neural network.
 */
typedef void* paddle_gradient_machine;

/**
 * @brief Create a gradient machine used for model inference.
 * @param [out] machine that used for model inference.
 * @param [in] modelConfigProtobuf
 * @param [in] size
 * @return paddle_error
 */
PD_API paddle_error paddle_gradient_machine_create_for_inference(
    paddle_gradient_machine* machine, void* modelConfigProtobuf, int size);

/**
 * @brief Load parameter from disk.
 * @param machine Gradient Machine.
 * @param path local directory path.
 * @return paddle_error
 */
PD_API paddle_error paddle_gradient_machine_load_parameter_from_disk(
    paddle_gradient_machine machine, const char* path);

48 49 50 51 52 53 54 55 56
/**
 * @brief Load parameter from buffer.
 * @param machine Gradient Machine.
 * @param buffer containing all parameters.
 * @return paddle_error
 */
PD_API paddle_error paddle_gradient_machine_load_parameter_from_buffer(
    paddle_gradient_machine machine, const char* buf, uint64_t length);

Y
Yu Yang 已提交
57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85
/**
 * @brief Forward a gradient machine
 * @param machine Gradient machine
 * @param inArgs input arguments
 * @param outArgs output arguments
 * @param isTrain is train or not
 * @return paddle_error
 */
PD_API paddle_error
paddle_gradient_machine_forward(paddle_gradient_machine machine,
                                paddle_arguments inArgs,
                                paddle_arguments outArgs,
                                bool isTrain);

/**
 * @brief Create a gradient machine, which parameters are shared from another
 *        gradient machine.
 * @param [in] origin gradient machine
 * @param [in] modelConfigProtobuf model config protobuf
 * @param [in] size of model config buffer.
 * @param [out] slave gradient machine, the output value.
 * @return paddle_error
 */
PD_API paddle_error
paddle_gradient_machine_create_shared_param(paddle_gradient_machine origin,
                                            void* modelConfigProtobuf,
                                            int size,
                                            paddle_gradient_machine* slave);

Y
Yu Yang 已提交
86 87 88
PD_API paddle_error
paddle_gradient_machine_randomize_param(paddle_gradient_machine machine);

Y
Yu Yang 已提交
89 90 91 92 93 94 95 96 97 98 99 100
/**
 * @brief Destroy a gradient machine
 * @param machine that need to destroy
 * @return paddle_error
 */
PD_API paddle_error
paddle_gradient_machine_destroy(paddle_gradient_machine machine);

#ifdef __cplusplus
}
#endif
#endif