未验证 提交 b25804c3 编写于 作者: D dzhwinter 提交者: GitHub

"fix unsigned compare problem" (#5359)

* "fix unsigned compare problem"

* "remove gtest from CMakeList"

* "remove namespace"
上级 4d728934
include_directories(${CMAKE_CURRENT_BINARY_DIR})
set(OPITMIZER_SRCS set(OPITMIZER_SRCS
adadelta_optimizer.cc adadelta_optimizer.cc
adagrad_optimizer.cc adagrad_optimizer.cc
...@@ -9,11 +7,6 @@ set(OPITMIZER_SRCS ...@@ -9,11 +7,6 @@ set(OPITMIZER_SRCS
sgd_optimizer.cc sgd_optimizer.cc
) )
add_library(paddle_optimizer STATIC ${OPITMIZER_SRCS}) cc_library(paddle_optimizer STATIC SRCS ${OPITMIZER_SRCS} DEPS paddle_proto glog)
add_dependencies(paddle_optimizer paddle_proto ${external_project_dependencies}) cc_test(serialization_test SRCS serialization_test.cc DEPS paddle_proto)
cc_test(parameter_optimizer_test SRCS parameter_optimizer_test.cc DEPS paddle_optimizer)
if(WITH_TESTING)
add_simple_unittest(serialization_test)
add_simple_unittest(parameter_optimizer_test)
endif()
/* Copyright (c) 2016 PaddlePaddle Authors. All Rights Reserve.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License. */
#include "adadelta_optimizer.h" #include "adadelta_optimizer.h"
#include <algorithm> #include <algorithm>
#include <cmath> #include <cmath>
......
/* Copyright (c) 2016 PaddlePaddle Authors. All Rights Reserve.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License. */
#pragma once #pragma once
#include "parameter_optimizer.h" #include "parameter_optimizer.h"
......
/* Copyright (c) 2016 PaddlePaddle Authors. All Rights Reserve.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License. */
#include <cmath> #include <cmath>
#include "adagrad_optimizer.h" #include "adagrad_optimizer.h"
......
/* Copyright (c) 2016 PaddlePaddle Authors. All Rights Reserve.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License. */
#pragma once #pragma once
#include "parameter_optimizer.h" #include "parameter_optimizer.h"
......
/* Copyright (c) 2016 PaddlePaddle Authors. All Rights Reserve.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License. */
#include "adam_optimizer.h" #include "adam_optimizer.h"
#include <cmath> #include <cmath>
......
/* Copyright (c) 2016 PaddlePaddle Authors. All Rights Reserve.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License. */
#pragma once #pragma once
#include "parameter_optimizer.h" #include "parameter_optimizer.h"
......
/* Copyright (c) 2016 PaddlePaddle Authors. All Rights Reserve.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License. */
#include "optimizer.h" #include "optimizer.h"
#include <glog/logging.h> #include <glog/logging.h>
#include <cstdlib> #include <cstdlib>
...@@ -6,8 +20,8 @@ ...@@ -6,8 +20,8 @@
#include "parameter_optimizer.h" #include "parameter_optimizer.h"
using namespace paddle; using paddle::optimizer::ParameterOptimizer;
using namespace paddle::optimizer; using paddle::optimizer::Tensor;
template <paddle_element_type VALUE> template <paddle_element_type VALUE>
struct EnumToType {}; struct EnumToType {};
...@@ -15,22 +29,21 @@ struct EnumToType {}; ...@@ -15,22 +29,21 @@ struct EnumToType {};
template <class T> template <class T>
struct TypeToEnum {}; struct TypeToEnum {};
#define MATCH_ENUM_TYPE(TYPE, ENUM) \ #define MATCH_ENUM_TYPE(TYPE, ENUM) \
template <> \ template <> \
struct TypeToEnum<TYPE> { \ struct TypeToEnum<TYPE> { \
static paddle_element_type v() { return ENUM; }; \ static paddle_element_type v() { return ENUM; } \
static constexpr TYPE value = ENUM; \ static constexpr TYPE value = ENUM; \
}; \ }; \
template <> \ template <> \
struct EnumToType<ENUM> { \ struct EnumToType<ENUM> { \
typedef TYPE Type; \ typedef TYPE Type; \
} }
MATCH_ENUM_TYPE(int32_t, PADDLE_ELEMENT_TYPE_INT32); MATCH_ENUM_TYPE(int32_t, PADDLE_ELEMENT_TYPE_INT32);
MATCH_ENUM_TYPE(uint32_t, PADDLE_ELEMENT_TYPE_UINT32); MATCH_ENUM_TYPE(uint32_t, PADDLE_ELEMENT_TYPE_UINT32);
MATCH_ENUM_TYPE(int64_t, PADDLE_ELEMENT_TYPE_INT64); MATCH_ENUM_TYPE(int64_t, PADDLE_ELEMENT_TYPE_INT64);
MATCH_ENUM_TYPE(uint64_t, PADDLE_ELEMENT_TYPE_UINT64); MATCH_ENUM_TYPE(uint64_t, PADDLE_ELEMENT_TYPE_UINT64);
// TODO(zhihong): only implement below type, need to fix
MATCH_ENUM_TYPE(float, PADDLE_ELEMENT_TYPE_FLOAT32); MATCH_ENUM_TYPE(float, PADDLE_ELEMENT_TYPE_FLOAT32);
MATCH_ENUM_TYPE(double, PADDLE_ELEMENT_TYPE_FLOAT64); MATCH_ENUM_TYPE(double, PADDLE_ELEMENT_TYPE_FLOAT64);
......
/* Copyright (c) 2016 PaddlePaddle Authors. All Rights Reserve.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License. */
#pragma once #pragma once
#include <stdbool.h> #include <stdbool.h>
......
/* Copyright (c) 2016 PaddlePaddle Authors. All Rights Reserve.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License. */
#include <glog/logging.h> #include <glog/logging.h>
#include "adadelta_optimizer.h" #include "adadelta_optimizer.h"
#include "adagrad_optimizer.h" #include "adagrad_optimizer.h"
......
/* Copyright (c) 2016 PaddlePaddle Authors. All Rights Reserve.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License. */
#pragma once #pragma once
#include <glog/logging.h> #include <glog/logging.h>
......
...@@ -110,7 +110,7 @@ public: ...@@ -110,7 +110,7 @@ public:
int s = 0; int s = 0;
float* newp = (float*)opts_[i]->get_weight(&s); float* newp = (float*)opts_[i]->get_weight(&s);
EXPECT_EQ(s, kSize); EXPECT_EQ(static_cast<size_t>(s), kSize);
for (size_t j = 0; j < kSize; ++j) { for (size_t j = 0; j < kSize; ++j) {
EXPECT_EQ(newp[j], (*p)[j]); EXPECT_EQ(newp[j], (*p)[j]);
} }
......
/* Copyright (c) 2016 PaddlePaddle Authors. All Rights Reserve.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License. */
#include "sgd_optimizer.h" #include "sgd_optimizer.h"
#include "serialization.h" #include "serialization.h"
......
/* Copyright (c) 2016 PaddlePaddle Authors. All Rights Reserve.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License. */
#pragma once #pragma once
#include "parameter_optimizer.h" #include "parameter_optimizer.h"
...@@ -15,7 +29,6 @@ public: ...@@ -15,7 +29,6 @@ public:
nesterov_(n) { nesterov_(n) {
if (momentum_ != 0.0) { if (momentum_ != 0.0) {
size_t size = parameter->size(); size_t size = parameter->size();
// TODO: fix it with align aware allocator bind to Tensor
momentums_ = new Tensor(size); momentums_ = new Tensor(size);
} }
} }
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册