Skip to content
体验新版
项目
组织
正在加载...
登录
切换导航
打开侧边栏
Crayon鑫
Paddle
提交
1d2bd35e
P
Paddle
项目概览
Crayon鑫
/
Paddle
与 Fork 源项目一致
Fork自
PaddlePaddle / Paddle
通知
1
Star
1
Fork
0
代码
文件
提交
分支
Tags
贡献者
分支图
Diff
Issue
1
列表
看板
标记
里程碑
合并请求
0
Wiki
0
Wiki
分析
仓库
DevOps
项目成员
Pages
P
Paddle
项目概览
项目概览
详情
发布
仓库
仓库
文件
提交
分支
标签
贡献者
分支图
比较
Issue
1
Issue
1
列表
看板
标记
里程碑
合并请求
0
合并请求
0
Pages
分析
分析
仓库分析
DevOps
Wiki
0
Wiki
成员
成员
收起侧边栏
关闭侧边栏
动态
分支图
创建新Issue
提交
Issue看板
未验证
提交
1d2bd35e
编写于
2月 23, 2021
作者:
S
Shang Zhizhou
提交者:
GitHub
2月 23, 2021
浏览文件
操作
浏览文件
下载
电子邮件补丁
差异文件
update merge pr #31060(update trt int8 calibrator to IEntropyCalibratorV2) (#31121)
上级
a0fa0d9e
变更
4
显示空白变更内容
内联
并排
Showing
4 changed file
with
21 addition
and
19 deletion
+21
-19
paddle/fluid/inference/tensorrt/trt_int8_calibrator.h
paddle/fluid/inference/tensorrt/trt_int8_calibrator.h
+1
-1
paddle/fluid/inference/tests/api/trt_dynamic_shape_ernie_serialize_deserialize_test.h
.../api/trt_dynamic_shape_ernie_serialize_deserialize_test.h
+1
-18
paddle/fluid/inference/tests/api/trt_split_converter_test.cc
paddle/fluid/inference/tests/api/trt_split_converter_test.cc
+3
-0
paddle/fluid/inference/tests/api/trt_test_helper.h
paddle/fluid/inference/tests/api/trt_test_helper.h
+16
-0
未找到文件。
paddle/fluid/inference/tensorrt/trt_int8_calibrator.h
浏览文件 @
1d2bd35e
...
...
@@ -34,7 +34,7 @@ namespace tensorrt {
class
TensorRTEngine
;
struct
TRTInt8Calibrator
:
public
nvinfer1
::
IInt8EntropyCalibrator
{
struct
TRTInt8Calibrator
:
public
nvinfer1
::
IInt8EntropyCalibrator
2
{
public:
TRTInt8Calibrator
(
const
std
::
unordered_map
<
std
::
string
,
size_t
>&
buffers
,
int
batch_size
,
std
::
string
engine_name
,
...
...
paddle/fluid/inference/tests/api/trt_dynamic_shape_ernie_serialize_deserialize_test.h
浏览文件 @
1d2bd35e
...
...
@@ -12,7 +12,6 @@ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License. */
#pragma once
#include <dirent.h>
#include <gflags/gflags.h>
#include <glog/logging.h>
#include <gtest/gtest.h>
...
...
@@ -27,22 +26,6 @@ limitations under the License. */
namespace
paddle
{
namespace
inference
{
static
int
DeleteCache
(
std
::
string
path
)
{
DIR
*
dir
=
opendir
(
path
.
c_str
());
if
(
dir
==
NULL
)
return
0
;
struct
dirent
*
ptr
;
while
((
ptr
=
readdir
(
dir
))
!=
NULL
)
{
if
(
std
::
strcmp
(
ptr
->
d_name
,
"."
)
==
0
||
std
::
strcmp
(
ptr
->
d_name
,
".."
)
==
0
)
{
continue
;
}
else
if
(
ptr
->
d_type
==
8
)
{
std
::
string
file_rm
=
path
+
"/"
+
ptr
->
d_name
;
return
remove
(
file_rm
.
c_str
());
}
}
return
0
;
}
static
void
run
(
const
AnalysisConfig
&
config
,
std
::
vector
<
float
>*
out_data
)
{
auto
predictor
=
CreatePaddlePredictor
(
config
);
auto
input_names
=
predictor
->
GetInputNames
();
...
...
@@ -111,7 +94,7 @@ static void trt_ernie(bool with_fp16, std::vector<float> result) {
// Delete serialization cache to perform serialization first rather than
// deserialization.
std
::
string
opt_cache_dir
=
FLAGS_infer_model
+
"/_opt_cache"
;
DeleteCache
(
opt_cache_dir
);
delete_cache_files
(
opt_cache_dir
);
SetConfig
(
&
config
,
model_dir
,
true
/* use_gpu */
);
...
...
paddle/fluid/inference/tests/api/trt_split_converter_test.cc
浏览文件 @
1d2bd35e
...
...
@@ -23,6 +23,9 @@ namespace inference {
TEST
(
TensorRT
,
split_converter
)
{
std
::
string
model_dir
=
FLAGS_infer_model
+
"/split_converter"
;
std
::
string
opt_cache_dir
=
model_dir
+
"/_opt_cache"
;
delete_cache_files
(
opt_cache_dir
);
AnalysisConfig
config
;
int
batch_size
=
4
;
config
.
EnableUseGpu
(
100
,
0
);
...
...
paddle/fluid/inference/tests/api/trt_test_helper.h
浏览文件 @
1d2bd35e
...
...
@@ -12,6 +12,7 @@ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License. */
#pragma once
#include <dirent.h>
#include <string>
#include <vector>
...
...
@@ -134,5 +135,20 @@ void compare_continuous_input(std::string model_dir, bool use_tensorrt) {
}
}
void
delete_cache_files
(
std
::
string
path
)
{
DIR
*
dir
=
opendir
(
path
.
c_str
());
if
(
dir
==
NULL
)
return
;
struct
dirent
*
ptr
;
while
((
ptr
=
readdir
(
dir
))
!=
NULL
)
{
if
(
std
::
strcmp
(
ptr
->
d_name
,
"."
)
==
0
||
std
::
strcmp
(
ptr
->
d_name
,
".."
)
==
0
)
{
continue
;
}
else
if
(
ptr
->
d_type
==
8
)
{
std
::
string
file_rm
=
path
+
"/"
+
ptr
->
d_name
;
remove
(
file_rm
.
c_str
());
}
}
}
}
// namespace inference
}
// namespace paddle
编辑
预览
Markdown
is supported
0%
请重试
或
添加新附件
.
添加附件
取消
You are about to add
0
people
to the discussion. Proceed with caution.
先完成此消息的编辑!
取消
想要评论请
注册
或
登录