提交 4af4883f 编写于 作者: H hedaoyuan

fix some tests which only need paddle::initMain in main

上级 cf498cfa
......@@ -65,9 +65,3 @@ TEST(LinearChainCRF, decoding) {
}
}
}
int main(int argc, char** argv) {
initMain(argc, argv);
testing::InitGoogleTest(&argc, argv);
return RUN_ALL_TESTS();
}
......@@ -730,9 +730,3 @@ TEST(ProtoSequenceDataProvider, test) {
} // end for (int numIdSlots : numSlotsArray)
} // end for (int numSparseNonValueVecSlots : numSlotsArray)
}
int main(int argc, char** argv) {
initMain(argc, argv);
testing::InitGoogleTest(&argc, argv);
return RUN_ALL_TESTS();
}
......@@ -242,9 +242,3 @@ TEST(Layer, WarpCTCLayer) {
}
}
}
int main(int argc, char** argv) {
testing::InitGoogleTest(&argc, argv);
initMain(argc, argv);
return RUN_ALL_TESTS();
}
......@@ -77,11 +77,4 @@ TEST(CpuGpuVector, subCreate) {
checkDataEqual(v1Check->getData() + offset, v2Check->getData(), size2);
}
int main(int argc, char** argv) {
testing::InitGoogleTest(&argc, argv);
initMain(argc, argv);
int ret = RUN_ALL_TESTS();
return ret;
}
#endif
......@@ -114,9 +114,3 @@ TEST(ExecViaCpu, test1) {
testWrapper(functor);
}
#endif
int main(int argc, char** argv) {
paddle::initMain(argc, argv);
testing::InitGoogleTest(&argc, argv);
return RUN_ALL_TESTS();
}
......@@ -169,9 +169,3 @@ TEST(SIMDFunction, decayL1_WithoutLR) {
ASSERT_NEAR(dest[i], simd_dest[i], EPSILON);
}
}
int main(int argc, char** argv) {
paddle::initMain(argc, argv);
testing::InitGoogleTest(&argc, argv);
return RUN_ALL_TESTS();
}
......@@ -561,9 +561,3 @@ TEST(Matrix, SparseMatrixCSCFormatTrimFrom) {
checkSMatrixEqual2(matA, matD);
#endif
}
int main(int argc, char** argv) {
paddle::initMain(argc, argv);
testing::InitGoogleTest(&argc, argv);
return RUN_ALL_TESTS();
}
......@@ -1163,11 +1163,3 @@ TEST(Quaternary, CompareOp) {
TestQuaternaryMatrix<GpuMatrix> testGpu(testQuaternaryCompareOp<GpuMatrix>);
#endif
}
int main(int argc, char** argv) {
testing::InitGoogleTest(&argc, argv);
hl_start();
hl_init(0);
return RUN_ALL_TESTS();
}
......@@ -459,11 +459,3 @@ void testSparseMomentum(size_t size, bool useGpu) {
}
TEST(Training, SparseMomentum) { testCase(testSparseMomentum); }
int main(int argc, char** argv) {
testing::InitGoogleTest(&argc, argv);
initMain(argc, argv);
hl_start();
hl_init(FLAGS_gpu_id);
return RUN_ALL_TESTS();
}
......@@ -53,9 +53,3 @@ TEST(MatrixBatchTransTest, test_batch_matrix_transpose) {
checkMatrixEqual(cBatchTransMat, cMat_d2h);
}
#endif
int main(int argc, char** argv) {
paddle::initMain(argc, argv);
testing::InitGoogleTest(&argc, argv);
return RUN_ALL_TESTS();
}
......@@ -139,11 +139,3 @@ TEST(sgdUpdate, GPU) {
testMatrixCase(testSgdUpdate<GpuMatrix>);
}
#endif
int main(int argc, char** argv) {
testing::InitGoogleTest(&argc, argv);
hl_start();
hl_init(0);
return RUN_ALL_TESTS();
}
......@@ -1262,10 +1262,4 @@ TEST(Matrix, MaxOutFwdBwd) {
}
}
int main(int argc, char** argv) {
testing::InitGoogleTest(&argc, argv);
initMain(argc, argv);
return RUN_ALL_TESTS();
}
#endif
......@@ -171,11 +171,4 @@ TEST(SMatrix, sMatrixCollectBias) {
}
}
int main(int argc, char** argv) {
testing::InitGoogleTest(&argc, argv);
initMain(argc, argv);
int ret = RUN_ALL_TESTS();
return ret;
}
#endif
......@@ -23,15 +23,6 @@ limitations under the License. */
using namespace paddle; // NOLINT
int main(int argc, char** argv) {
paddle::initMain(argc, argv);
testing::InitGoogleTest(&argc, argv);
int ret = RUN_ALL_TESTS();
return ret;
}
class CommonTest : public ::testing::Test {
protected:
CommonTest() : testStat_("test") {}
......
......@@ -96,9 +96,3 @@ TEST(CustomStackTrace, normalTest) {
}
});
}
int main(int argc, char** argv) {
testing::InitGoogleTest(&argc, argv);
paddle::initMain(argc, argv);
return RUN_ALL_TESTS();
}
......@@ -44,8 +44,3 @@ TEST(SIMDFlags, normalPrint) {
LOG(INFO) << "Has AVX2: " << std::boolalpha << HAS_AVX2;
LOG(INFO) << "Has AVX512: " << std::boolalpha << HAS_AVX512;
}
int main(int argc, char** argv) {
testing::InitGoogleTest(&argc, argv);
return RUN_ALL_TESTS();
}
......@@ -53,9 +53,3 @@ TEST(ThreadSpinLock, normalTest) {
});
}
}
int main(int argc, char** argv) {
testing::InitGoogleTest(&argc, argv);
paddle::initMain(argc, argv);
return RUN_ALL_TESTS();
}
......@@ -79,8 +79,3 @@ TEST(AsyncThreadPool, addBatchJobWithResults) {
ASSERT_EQ(res[i], i);
}
}
int main(int argc, char** argv) {
testing::InitGoogleTest(&argc, argv);
return RUN_ALL_TESTS();
}
......@@ -64,9 +64,3 @@ TEST(ThreadBarrier, normalTest) {
});
}
}
int main(int argc, char** argv) {
testing::InitGoogleTest(&argc, argv);
paddle::initMain(argc, argv);
return RUN_ALL_TESTS();
}
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册