hl_cnn_stub.h 4.5 KB
Newer Older
Z
zhangjinchao01 已提交
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40
/* Copyright (c) 2016 Baidu, Inc. All Rights Reserve.

Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at

    http://www.apache.org/licenses/LICENSE-2.0

Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License. */


#ifndef HL_CNN_STUB_H_
#define HL_CNN_STUB_H_

#include "hl_cnn.h"

inline void hl_shrink_col2feature(
    const real * dataCol, size_t channels,
    size_t height, size_t width,
    size_t blockH, size_t blockW,
    size_t strideH, size_t strideW,
    size_t paddingH, size_t paddingW,
    size_t outputH, size_t outputW,
    real* dataIm,
    real alpha, real beta) {}

inline void hl_expand_feature2col(
    const real* dataIm, size_t channels,
    size_t height, size_t width,
    size_t blockH, size_t blockW,
    size_t strideH, size_t strideW,
    size_t paddingH, size_t paddingW,
    size_t outputH, size_t outputW,
    real* dataCol) {}

inline void hl_maxpool_forward(
41 42 43 44 45 46 47
    const int frameCnt, const real* inputData,
    const int channels,
    const int height, const int width,
    const int pooledH, const int pooledW,
    const int sizeX, const int sizeY,
    const int strideH, const int strideW,
    const int paddingH, const int paddingW, real* tgtData) {}
Z
zhangjinchao01 已提交
48 49

inline void hl_maxpool_backward(
50
    const int frameCnt, const real* inputData,
Z
zhangjinchao01 已提交
51
    const real* outData, const real* outGrad,
52 53 54 55 56 57 58 59
    const int channels, const int height,
    const int width,
    const int pooledH, const int pooledW,
    const int sizeX, const int sizeY,
    const int strideH, const int strideW,
    const int paddingH, const int paddingW,
    real scaleA, real scaleB,
    real* targetGrad) {}
Z
zhangjinchao01 已提交
60 61

inline void hl_avgpool_forward(
62 63 64 65 66 67 68
    const int frameCnt, const real* inputData,
    const int channels,
    const int height, const int width,
    const int pooledH, const int pooledW,
    const int sizeX, const int sizeY,
    const int strideH, const int strideW,
    const int paddingH, const int paddingW, real* tgtData) {}
Z
zhangjinchao01 已提交
69 70

inline void hl_avgpool_backward(
71 72 73 74 75 76 77 78 79
    const int frameCnt, const real* outGrad,
    const int channels, const int height,
    const int width,
    const int pooledH, const int pooledW,
    const int sizeX, const int sizeY,
    const int strideH, const int strideW,
    int paddingH, int paddingW,
    real scaleA, real scaleB,
    real* backGrad) {}
Z
zhangjinchao01 已提交
80 81 82 83 84 85 86 87 88 89 90 91

inline void hl_CMRNorm_forward(
    size_t frameCnt, const real* in, real* scale, real* out,
    size_t channels, size_t height, size_t width, size_t sizeX,
    real alpha, real beta) {}

inline void hl_CMRNorm_backward(
    size_t frameCnt, const real* inV, const real* scale,
    const real* outV, const real* outDiff, real *inDiff,
    size_t channels, size_t height, size_t width, size_t sizeX,
    real alpha, real beta) {}

L
liaogang 已提交
92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115
inline void hl_bilinear_forward(const real* inData,
                                const size_t inImgH,
                                const size_t inImgW,
                                const size_t inputH,
                                const size_t inputW,
                                real* outData,
                                const size_t outImgH,
                                const size_t outImgW,
                                const size_t outputH,
                                const size_t outputW,
                                const size_t numChannels) {}

inline void hl_bilinear_backward(real* inGrad,
                                const size_t inImgH,
                                const size_t inImgW,
                                const size_t inputH,
                                const size_t inputW,
                                const real* outGrad,
                                const size_t outImgH,
                                const size_t outImgW,
                                const size_t outputH,
                                const size_t outputW,
                                const size_t numChannels) {}

116 117 118 119 120 121 122 123
inline void hl_maxout_forward(
    const real* inData, real* outData, int* idData,
    size_t batchSize, size_t size, size_t featLen, size_t group) {}

inline void hl_maxout_backward(
    real* inGrad, const real* outGrad, const int* idData,
    size_t batchSize, size_t size, size_t featLen, size_t group) {}

Z
zhangjinchao01 已提交
124
#endif  // HL_CNN_STUB_H_