未验证 提交 229e91bf 编写于 作者: A Allen Guo 提交者: GitHub

[IPU] remove paddle_ipu shared library (#41307)

* remove paddle_ipu shared library

* fix unique_name
上级 5ae8babb
......@@ -199,13 +199,6 @@ IF(WITH_XPU)
DSTS ${dst_dir} ${dst_dir})
ENDIF()
IF(WITH_IPU)
set(dst_dir "${PADDLE_INFERENCE_INSTALL_DIR}/third_party/install/ipu")
copy(inference_lib_dist
SRCS ${CMAKE_BINARY_DIR}/paddle/fluid/platform/device/ipu/libpaddle_ipu.so
DSTS ${dst_dir})
ENDIF()
# CMakeCache Info
copy(inference_lib_dist
SRCS ${CMAKE_CURRENT_BINARY_DIR}/CMakeCache.txt
......
......@@ -150,7 +150,7 @@ if(WITH_IPU)
pass_library(ipu_runtime_replacer_pass base DIR ipu)
pass_library(inference_process_pass base DIR ipu)
pass_library(inference_postprocess_pass base DIR ipu)
pass_library(popart_canonicalization_pass base DIR ipu DEPS paddle_ipu)
pass_library(popart_canonicalization_pass base DIR ipu)
pass_library(ipu_inplace_pass base DIR ipu)
pass_library(infer_shape_pass base DIR ipu)
pass_library(delete_scale_op_pass base DIR ipu)
......
......@@ -53,8 +53,6 @@ endif()
#TODO(wilber, T8T9): Do we still need to support windows gpu static library?
if(WIN32 AND WITH_GPU)
cc_library(paddle_inference DEPS ${fluid_modules} ${phi_modules} ${STATIC_INFERENCE_API} ${utils_modules})
elseif(WITH_IPU)
cc_library(paddle_inference DEPS ${fluid_modules} ${phi_modules} ${STATIC_INFERENCE_API} ${utils_modules} paddle_ipu)
else()
create_static_lib(paddle_inference ${fluid_modules} ${phi_modules} ${STATIC_INFERENCE_API} ${utils_modules})
endif()
......
......@@ -74,6 +74,10 @@
#include "paddle/fluid/inference/tensorrt/trt_int8_calibrator.h"
#endif
#ifdef PADDLE_WITH_IPU
#include "paddle/fluid/platform/device/ipu/paddle_ipu_handler.h"
#endif
namespace paddle {
using inference::Singleton;
......
IF(WITH_IPU)
FILE(GLOB POPART_CANONICALIZATION_SRC ${PADDLE_SOURCE_DIR}/paddle/fluid/platform/device/ipu/popart_canonicalization/*.cc)
list(APPEND PADDLE_IPU_SRC ${POPART_CANONICALIZATION_SRC})
if(WITH_IPU)
set(paddle_ipu_handler ${CMAKE_CURRENT_BINARY_DIR}/paddle_ipu_handler.h.tmp)
set(paddle_ipu_handler_final ${CMAKE_CURRENT_BINARY_DIR}/paddle_ipu_handler.h)
file(WRITE ${paddle_ipu_handler} "// Auto generated from CMake. DO NOT EDIT!\n\n")
file(APPEND ${paddle_ipu_handler} "\#pragma once\n")
file(APPEND ${paddle_ipu_handler} "\#include \"paddle/fluid/platform/device/ipu/popart_canonicalization/canonicalization_utils.h\"\n\n")
file(GLOB POPART_CANONICALIZATION_SRC ${CMAKE_CURRENT_SOURCE_DIR}/popart_canonicalization/*.cc)
copy_if_different(${paddle_ipu_handler} ${paddle_ipu_handler_final})
foreach(file_path ${POPART_CANONICALIZATION_SRC})
file(READ ${file_path} file_content)
string(REGEX MATCHALL "(REGISTER_HANDLER)(\\()([A-Za-z0-9_]+)(,)" op_handlers ${file_content})
string(REPLACE "REGISTER_HANDLER(" "" op_handlers "${op_handlers}")
string(REPLACE "," "" op_handlers "${op_handlers}")
foreach(op_handler ${op_handlers})
file(APPEND ${paddle_ipu_handler} "USE_HANDLER(${op_handler});\n")
endforeach()
endforeach()
set(IPU_BACKEND_SRC
"ipu_strategy.cc"
"ipu_executor.cc"
......@@ -13,10 +29,7 @@ IF(WITH_IPU)
"ipu_device.cc"
)
cc_library(ipu_backend SRCS ${IPU_BACKEND_SRC} DEPS popart-only graph graph_helper popdist)
cc_library(popart_canonicalization SRCS ${POPART_CANONICALIZATION_SRC} DEPS graph)
cc_library(ipu_backend SRCS ${IPU_BACKEND_SRC} DEPS popart-only graph graph_helper popdist popart_canonicalization)
cc_library(ipu_info SRCS ${IPU_INFO_SRC} DEPS popart-only enforce)
add_library(paddle_ipu SHARED ${PADDLE_IPU_SRC})
add_dependencies(paddle_ipu ipu_backend)
set(PADDLE_IPU_LIB "${CMAKE_CURRENT_BINARY_DIR}/libpaddle_ipu.so" CACHE STRING "")
set(PADDLE_IPU_LIB_DIR "${CMAKE_CURRENT_BINARY_DIR}" CACHE STRING "")
ENDIF()
endif()
......@@ -88,6 +88,11 @@ Node *log_softmax_handler(Graph *graph, Node *node) {
node->outputs);
}
} // namespace
} // namespace ipu
} // namespace platform
} // namespace paddle
REGISTER_HANDLER(relu, relu_handler);
REGISTER_HANDLER(tanh, tanh_handler);
REGISTER_HANDLER(log, log_handler);
......@@ -95,8 +100,3 @@ REGISTER_HANDLER(sigmoid, sigmoid_handler);
REGISTER_HANDLER(sqrt, sqrt_handler);
REGISTER_HANDLER(gelu, gelu_handler);
REGISTER_HANDLER(log_softmax, log_softmax_handler);
} // namespace
} // namespace ipu
} // namespace platform
} // namespace paddle
......@@ -23,9 +23,36 @@ namespace paddle {
namespace platform {
namespace ipu {
#define REGISTER_HANDLER(name, func) \
static bool __UNUSED_##name = \
paddle::platform::ipu::RegisterHandler(#name, func)
#define STATIC_ASSERT_GLOBAL_NAMESPACE(uniq_name, msg) \
struct __test_global_namespace_##uniq_name##__ {}; \
static_assert(std::is_same<::__test_global_namespace_##uniq_name##__, \
__test_global_namespace_##uniq_name##__>::value, \
msg)
#define REGISTER_HANDLER(op_type, handler) \
STATIC_ASSERT_GLOBAL_NAMESPACE( \
__reg_ipu_op_handler__##op_type, \
"REGISTER_HANDLER must be called in global namespace"); \
struct __PaddleRegisterIpuOpHandler_##op_type { \
__PaddleRegisterIpuOpHandler_##op_type() { \
::paddle::platform::ipu::RegisterHandler( \
#op_type, paddle::platform::ipu::handler); \
} \
int Touch() const { return 0; } \
}; \
static __PaddleRegisterIpuOpHandler_##op_type \
__PaddleRegisterIpuOpHandler_instance##op_type; \
int TouchPaddleIpuOpHandlerRegister_##op_type() { \
return __PaddleRegisterIpuOpHandler_instance##op_type.Touch(); \
}
#define USE_HANDLER(op_type) \
STATIC_ASSERT_GLOBAL_NAMESPACE( \
__use_ipu_op_handler__##op_type, \
"USE_HANDLER must be called in global namespace"); \
extern int TouchPaddleIpuOpHandlerRegister_##op_type(); \
UNUSED static int use_handler__itself_##op_type##_ = \
TouchPaddleIpuOpHandlerRegister_##op_type()
using SymbolHandler = std::function<Node *(Graph *, Node *)>;
......
......@@ -93,6 +93,11 @@ Node *elementwise_mod_handler(Graph *graph, Node *node) {
return elementwise_op_handler(graph, node, "popart_mod");
}
} // namespace
} // namespace ipu
} // namespace platform
} // namespace paddle
REGISTER_HANDLER(elementwise_add, elementwise_add_handler);
REGISTER_HANDLER(elementwise_sub, elementwise_sub_handler);
REGISTER_HANDLER(elementwise_div, elementwise_div_handler);
......@@ -101,8 +106,3 @@ REGISTER_HANDLER(elementwise_min, elementwise_min_handler);
REGISTER_HANDLER(elementwise_max, elementwise_max_handler);
REGISTER_HANDLER(elementwise_pow, elementwise_pow_handler);
REGISTER_HANDLER(elementwise_mod, elementwise_mod_handler);
} // namespace
} // namespace ipu
} // namespace platform
} // namespace paddle
......@@ -58,14 +58,14 @@ Node *less_than_handler(Graph *graph, Node *node) {
{GetOutputVarNode("Out", node)}, {});
}
} // namespace
} // namespace ipu
} // namespace platform
} // namespace paddle
REGISTER_HANDLER(equal, equal_handler);
REGISTER_HANDLER(logical_not, logical_not_handler);
REGISTER_HANDLER(logical_or, logical_or_handler);
REGISTER_HANDLER(logical_and, logical_and_handler);
REGISTER_HANDLER(greater_than, greater_than_handler);
REGISTER_HANDLER(less_than, less_than_handler);
} // namespace
} // namespace ipu
} // namespace platform
} // namespace paddle
......@@ -366,6 +366,11 @@ Node *arg_max_handler(Graph *graph, Node *node) {
{{"axis", axis}, {"keepdims", int64_t{0}}});
}
} // namespace
} // namespace ipu
} // namespace platform
} // namespace paddle
REGISTER_HANDLER(mean, mean_handler);
REGISTER_HANDLER(pow, pow_handler);
REGISTER_HANDLER(mul, mul_handler);
......@@ -377,8 +382,3 @@ REGISTER_HANDLER(cross_entropy2, cross_entropy2_handler);
REGISTER_HANDLER(cumsum, cumsum_handler);
REGISTER_HANDLER(matmul_v2, matmul_v2_handler);
REGISTER_HANDLER(arg_max, arg_max_handler);
} // namespace
} // namespace ipu
} // namespace platform
} // namespace paddle
......@@ -298,6 +298,11 @@ Node *dropout_handler(Graph *graph, Node *node) {
}
}
} // namespace
} // namespace ipu
} // namespace platform
} // namespace paddle
REGISTER_HANDLER(pool2d, pool2d_handler);
REGISTER_HANDLER(batch_norm, batch_norm_handler);
REGISTER_HANDLER(group_norm, group_norm_handler);
......@@ -305,8 +310,3 @@ REGISTER_HANDLER(instance_norm, instance_norm_handler);
REGISTER_HANDLER(layer_norm, layer_norm_handler);
REGISTER_HANDLER(conv2d, conv2d_handler);
REGISTER_HANDLER(dropout, dropout_handler);
} // namespace
} // namespace ipu
} // namespace platform
} // namespace paddle
......@@ -77,6 +77,11 @@ Node *detach_handler(Graph *graph, Node *node) {
node->outputs);
}
} // namespace
} // namespace ipu
} // namespace platform
} // namespace paddle
REGISTER_HANDLER(custom_op, custom_op_handler);
REGISTER_HANDLER(print, print_handler);
REGISTER_HANDLER(popart_optimizer, popart_optimizer_handler);
......@@ -84,8 +89,3 @@ REGISTER_HANDLER(checkpointoutput, checkpointoutput_handler);
REGISTER_HANDLER(custom_nll_loss, custom_nll_loss_handler);
REGISTER_HANDLER(identity, identity_handler);
REGISTER_HANDLER(detach, detach_handler);
} // namespace
} // namespace ipu
} // namespace platform
} // namespace paddle
......@@ -56,13 +56,13 @@ Node *reduce_prod_handler(Graph *graph, Node *node) {
return reduce_op_handler(graph, node, "popart_reduceprod");
}
} // namespace
} // namespace ipu
} // namespace platform
} // namespace paddle
REGISTER_HANDLER(reduce_mean, reduce_mean_handler);
REGISTER_HANDLER(reduce_min, reduce_min_handler);
REGISTER_HANDLER(reduce_sum, reduce_sum_handler);
REGISTER_HANDLER(reduce_max, reduce_max_handler);
REGISTER_HANDLER(reduce_prod, reduce_prod_handler);
} // namespace
} // namespace ipu
} // namespace platform
} // namespace paddle
......@@ -86,10 +86,10 @@ Node *topk_handler(Graph *graph, Node *node) {
static_cast<int>(framework::proto::VarType::INT32));
}
REGISTER_HANDLER(top_k, topk_handler);
REGISTER_HANDLER(top_k_v2, topk_handler);
} // namespace
} // namespace ipu
} // namespace platform
} // namespace paddle
REGISTER_HANDLER(top_k, topk_handler);
REGISTER_HANDLER(top_k_v2, topk_handler);
......@@ -570,6 +570,11 @@ Node *split_handler(Graph *graph, Node *node) {
{"split", std::vector<int64_t>{sections.begin(), sections.end()}}});
}
} // namespace
} // namespace ipu
} // namespace platform
} // namespace paddle
REGISTER_HANDLER(fill_constant, fill_constant_handler);
REGISTER_HANDLER(gaussian_random, gaussian_random_handler);
REGISTER_HANDLER(uniform_random, uniform_random_handler);
......@@ -593,8 +598,3 @@ REGISTER_HANDLER(lookup_table_v2, lookup_table_v2_handler);
REGISTER_HANDLER(split, split_handler);
REGISTER_HANDLER(one_hot, one_hot_handler);
REGISTER_HANDLER(one_hot_v2, one_hot_v2_handler);
} // namespace
} // namespace ipu
} // namespace platform
} // namespace paddle
......@@ -369,10 +369,6 @@ if(WITH_PYTHON)
target_link_libraries(paddle_pybind ${ROCM_HIPRTC_LIB})
endif()
if(WITH_IPU)
target_link_libraries(paddle_pybind paddle_ipu)
endif()
get_property (os_dependency_modules GLOBAL PROPERTY OS_DEPENDENCY_MODULES)
target_link_libraries(paddle_pybind ${os_dependency_modules})
add_dependencies(paddle_pybind op_function_generator_cmd)
......
......@@ -547,10 +547,6 @@ if '${WITH_XPU_BKCL}' == 'ON':
shutil.copy('${XPU_BKCL_LIB}', libs_path)
package_data['paddle.libs']+=['${XPU_BKCL_LIB_NAME}']
if '${WITH_IPU}' == 'ON':
shutil.copy('${PADDLE_IPU_LIB}', libs_path)
package_data['paddle.libs'] += ['libpaddle_ipu' + ext_name]
# remove unused paddle/libs/__init__.py
if os.path.isfile(libs_path+'/__init__.py'):
os.remove(libs_path+'/__init__.py')
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册