hl_cnn_stub.h 4.8 KB
Newer Older
Z
zhangjinchao01 已提交
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40
/* Copyright (c) 2016 Baidu, Inc. All Rights Reserve.

Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at

    http://www.apache.org/licenses/LICENSE-2.0

Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License. */


#ifndef HL_CNN_STUB_H_
#define HL_CNN_STUB_H_

#include "hl_cnn.h"

inline void hl_shrink_col2feature(
    const real * dataCol, size_t channels,
    size_t height, size_t width,
    size_t blockH, size_t blockW,
    size_t strideH, size_t strideW,
    size_t paddingH, size_t paddingW,
    size_t outputH, size_t outputW,
    real* dataIm,
    real alpha, real beta) {}

inline void hl_expand_feature2col(
    const real* dataIm, size_t channels,
    size_t height, size_t width,
    size_t blockH, size_t blockW,
    size_t strideH, size_t strideW,
    size_t paddingH, size_t paddingW,
    size_t outputH, size_t outputW,
    real* dataCol) {}

inline void hl_maxpool_forward(
41 42 43 44 45 46
    const int frameCnt, const real* inputData,
    const int channels,
    const int height, const int width,
    const int pooledH, const int pooledW,
    const int sizeX, const int sizeY,
    const int strideH, const int strideW,
Q
qijun 已提交
47 48
    const int paddingH, const int paddingW,
    real* tgtData, const int tgtStride) {}
Z
zhangjinchao01 已提交
49 50

inline void hl_maxpool_backward(
51
    const int frameCnt, const real* inputData,
Z
zhangjinchao01 已提交
52
    const real* outData, const real* outGrad,
53 54 55 56 57 58 59
    const int channels, const int height,
    const int width,
    const int pooledH, const int pooledW,
    const int sizeX, const int sizeY,
    const int strideH, const int strideW,
    const int paddingH, const int paddingW,
    real scaleA, real scaleB,
Q
qijun 已提交
60
    real* targetGrad, const int outStride) {}
Z
zhangjinchao01 已提交
61 62

inline void hl_avgpool_forward(
63 64 65 66 67 68
    const int frameCnt, const real* inputData,
    const int channels,
    const int height, const int width,
    const int pooledH, const int pooledW,
    const int sizeX, const int sizeY,
    const int strideH, const int strideW,
Q
qijun 已提交
69 70
    const int paddingH, const int paddingW,
    real* tgtData, const int tgtStride) {}
Z
zhangjinchao01 已提交
71 72

inline void hl_avgpool_backward(
73 74 75 76 77 78 79 80
    const int frameCnt, const real* outGrad,
    const int channels, const int height,
    const int width,
    const int pooledH, const int pooledW,
    const int sizeX, const int sizeY,
    const int strideH, const int strideW,
    int paddingH, int paddingW,
    real scaleA, real scaleB,
Q
qijun 已提交
81
    real* backGrad, const int outStride) {}
Z
zhangjinchao01 已提交
82 83 84 85 86 87 88 89 90 91 92 93

inline void hl_CMRNorm_forward(
    size_t frameCnt, const real* in, real* scale, real* out,
    size_t channels, size_t height, size_t width, size_t sizeX,
    real alpha, real beta) {}

inline void hl_CMRNorm_backward(
    size_t frameCnt, const real* inV, const real* scale,
    const real* outV, const real* outDiff, real *inDiff,
    size_t channels, size_t height, size_t width, size_t sizeX,
    real alpha, real beta) {}

L
liaogang 已提交
94 95 96 97 98 99 100 101 102 103
inline void hl_bilinear_forward(const real* inData,
                                const size_t inImgH,
                                const size_t inImgW,
                                const size_t inputH,
                                const size_t inputW,
                                real* outData,
                                const size_t outImgH,
                                const size_t outImgW,
                                const size_t outputH,
                                const size_t outputW,
L
liaogang 已提交
104 105 106
                                const size_t numChannels,
                                const real ratioH,
                                const real ratioW) {}
L
liaogang 已提交
107 108 109 110 111 112 113 114 115 116 117

inline void hl_bilinear_backward(real* inGrad,
                                const size_t inImgH,
                                const size_t inImgW,
                                const size_t inputH,
                                const size_t inputW,
                                const real* outGrad,
                                const size_t outImgH,
                                const size_t outImgW,
                                const size_t outputH,
                                const size_t outputW,
L
liaogang 已提交
118 119 120
                                const size_t numChannels,
                                const real ratioH,
                                const real ratioW) {}
L
liaogang 已提交
121

122 123 124 125 126 127 128 129
inline void hl_maxout_forward(
    const real* inData, real* outData, int* idData,
    size_t batchSize, size_t size, size_t featLen, size_t group) {}

inline void hl_maxout_backward(
    real* inGrad, const real* outGrad, const int* idData,
    size_t batchSize, size_t size, size_t featLen, size_t group) {}

Z
zhangjinchao01 已提交
130
#endif  // HL_CNN_STUB_H_