未验证 提交 29ab75b6 编写于 作者: R RedContritio 提交者: GitHub

move test_*tokenizer to /test/tokenizer (#52658)

上级 6366cffe
...@@ -552,8 +552,6 @@ if((WITH_GPU) AND (CUDA_VERSION GREATER_EQUAL 11.6)) ...@@ -552,8 +552,6 @@ if((WITH_GPU) AND (CUDA_VERSION GREATER_EQUAL 11.6))
endif() endif()
set_tests_properties(test_conv2d_op PROPERTIES LABELS "RUN_TYPE=EXCLUSIVE") set_tests_properties(test_conv2d_op PROPERTIES LABELS "RUN_TYPE=EXCLUSIVE")
set_tests_properties(test_faster_tokenizer_op PROPERTIES LABELS
"RUN_TYPE=EXCLUSIVE")
set_tests_properties(test_conv2d_op_depthwise_conv set_tests_properties(test_conv2d_op_depthwise_conv
PROPERTIES LABELS "RUN_TYPE=EXCLUSIVE") PROPERTIES LABELS "RUN_TYPE=EXCLUSIVE")
set_tests_properties(test_conv2d_api PROPERTIES LABELS "RUN_TYPE=EXCLUSIVE") set_tests_properties(test_conv2d_api PROPERTIES LABELS "RUN_TYPE=EXCLUSIVE")
......
...@@ -126,7 +126,7 @@ if(WITH_TESTING) ...@@ -126,7 +126,7 @@ if(WITH_TESTING)
add_subdirectory(rpc) add_subdirectory(rpc)
# add_subdirectory(sequence) # add_subdirectory(sequence)
add_subdirectory(standalone_executor) add_subdirectory(standalone_executor)
# add_subdirectory(tokenizer) add_subdirectory(tokenizer)
# add_subdirectory(white_list) # add_subdirectory(white_list)
add_subdirectory(xpu) add_subdirectory(xpu)
endif() endif()
......
file(
GLOB TEST_OPS
RELATIVE "${CMAKE_CURRENT_SOURCE_DIR}"
"test_*.py")
string(REPLACE ".py" "" TEST_OPS "${TEST_OPS}")
foreach(src ${TEST_OPS})
py_test(${src} SRCS ${src}.py)
endforeach()
set_tests_properties(test_faster_tokenizer_op PROPERTIES LABELS
"RUN_TYPE=EXCLUSIVE")
...@@ -13,20 +13,17 @@ ...@@ -13,20 +13,17 @@
# limitations under the License. # limitations under the License.
import os import os
import sys
import tempfile import tempfile
import unittest import unittest
import numpy as np import numpy as np
from bert_tokenizer import BertTokenizer
import paddle import paddle
from paddle import _legacy_C_ops, nn from paddle import _legacy_C_ops, nn
from paddle.fluid.framework import _non_static_mode, core from paddle.fluid.framework import _non_static_mode, core
from paddle.fluid.layer_helper import LayerHelper from paddle.fluid.layer_helper import LayerHelper
sys.path.append("./tokenizer")
from tokenizer.bert_tokenizer import BertTokenizer
def to_string_tensor(string_values, name): def to_string_tensor(string_values, name):
""" """
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册