提交 cdae0c75 编写于 作者: C chengduoZH

fix Conv3d, DeConv3d (bias shape)

上级 2d31ab5f
...@@ -42,10 +42,10 @@ bool Conv3DLayer::init(const LayerMap &layerMap, ...@@ -42,10 +42,10 @@ bool Conv3DLayer::init(const LayerMap &layerMap,
if (sharedBiases_) { if (sharedBiases_) {
CHECK_EQ((size_t)numFilters_, biasParameter_->getSize()); CHECK_EQ((size_t)numFilters_, biasParameter_->getSize());
biases_ = biases_ =
std::unique_ptr<Weight>(new Weight(1, numFilters_, biasParameter_)); std::unique_ptr<Weight>(new Weight(numFilters_, 1, biasParameter_));
} else { } else {
biases_ = biases_ =
std::unique_ptr<Weight>(new Weight(1, getSize(), biasParameter_)); std::unique_ptr<Weight>(new Weight(getSize(), 1, biasParameter_));
} }
} }
return true; return true;
...@@ -224,20 +224,31 @@ void Conv3DLayer::bpropData(int i) { ...@@ -224,20 +224,31 @@ void Conv3DLayer::bpropData(int i) {
} }
void Conv3DLayer::bpropBiases() { void Conv3DLayer::bpropBiases() {
MatrixPtr biases = Matrix::create(biases_->getWGrad()->getData(),
1,
biases_->getWGrad()->getElementCnt(),
false,
useGpu_);
MatrixPtr outGradMat = getOutputGrad(); MatrixPtr outGradMat = getOutputGrad();
if (this->sharedBiases_) { if (this->sharedBiases_) {
biases_->getWGrad()->collectSharedBias(*outGradMat, 1.0f); biases->collectSharedBias(*outGradMat, 1.0f);
} else { } else {
biases_->getWGrad()->collectBias(*outGradMat, 1.0f); biases->collectBias(*outGradMat, 1.0f);
} }
} }
void Conv3DLayer::addBias() { void Conv3DLayer::addBias() {
MatrixPtr outMat = getOutputValue(); MatrixPtr outMat = getOutputValue();
MatrixPtr bias = Matrix::create(biases_->getW()->getData(),
1,
biases_->getW()->getElementCnt(),
false,
useGpu_);
if (this->sharedBiases_) { if (this->sharedBiases_) {
outMat->addSharedBias(*(biases_->getW()), 1.0f); outMat->addSharedBias(*(bias), 1.0f);
} else { } else {
outMat->addBias(*(biases_->getW()), 1.0f); outMat->addBias(*(bias), 1.0f);
} }
} }
......
...@@ -42,10 +42,10 @@ bool DeConv3DLayer::init(const LayerMap &layerMap, ...@@ -42,10 +42,10 @@ bool DeConv3DLayer::init(const LayerMap &layerMap,
if (sharedBiases_) { if (sharedBiases_) {
CHECK_EQ((size_t)numFilters_, biasParameter_->getSize()); CHECK_EQ((size_t)numFilters_, biasParameter_->getSize());
biases_ = biases_ =
std::unique_ptr<Weight>(new Weight(1, numFilters_, biasParameter_)); std::unique_ptr<Weight>(new Weight(numFilters_, 1, biasParameter_));
} else { } else {
biases_ = biases_ =
std::unique_ptr<Weight>(new Weight(1, getSize(), biasParameter_)); std::unique_ptr<Weight>(new Weight(getSize(), 1, biasParameter_));
} }
} }
return true; return true;
...@@ -191,21 +191,31 @@ void DeConv3DLayer::bpropWeights(int i) {} ...@@ -191,21 +191,31 @@ void DeConv3DLayer::bpropWeights(int i) {}
void DeConv3DLayer::bpropData(int i) {} void DeConv3DLayer::bpropData(int i) {}
void DeConv3DLayer::bpropBiases() { void DeConv3DLayer::bpropBiases() {
MatrixPtr biases = Matrix::create(biases_->getWGrad()->getData(),
1,
biases_->getWGrad()->getElementCnt(),
false,
useGpu_);
const MatrixPtr &outGradMat = getOutputGrad(); const MatrixPtr &outGradMat = getOutputGrad();
if (this->sharedBiases_) { if (this->sharedBiases_) {
biases_->getWGrad()->collectSharedBias(*outGradMat, 1.0f); biases->collectSharedBias(*outGradMat, 1.0f);
} else { } else {
biases_->getWGrad()->collectBias(*outGradMat, 1.0f); biases->collectBias(*outGradMat, 1.0f);
} }
} }
void DeConv3DLayer::addBias() { void DeConv3DLayer::addBias() {
MatrixPtr outMat = getOutputValue(); MatrixPtr outMat = getOutputValue();
MatrixPtr bias = Matrix::create(biases_->getW()->getData(),
1,
biases_->getW()->getElementCnt(),
false,
useGpu_);
if (this->sharedBiases_) { if (this->sharedBiases_) {
outMat->addSharedBias(*(biases_->getW()), 1.0f); outMat->addSharedBias(*(bias), 1.0f);
} else { } else {
outMat->addBias(*(biases_->getW()), 1.0f); outMat->addBias(*(bias), 1.0f);
} }
} }
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册