From e9354ecc7fa5cf9e9994e82809ae18ba2886fcdf Mon Sep 17 00:00:00 2001 From: wjj19950828 Date: Mon, 28 Mar 2022 20:59:23 +0800 Subject: [PATCH] Add convert states statistics --- x2paddle/convert.py | 17 +++++++++++++++++ x2paddle/utils.py | 43 +++++++++++++++++++++++++++++++++++++++++++ 2 files changed, 60 insertions(+) diff --git a/x2paddle/convert.py b/x2paddle/convert.py index dd0b28c..d4f12f9 100644 --- a/x2paddle/convert.py +++ b/x2paddle/convert.py @@ -14,6 +14,7 @@ from six import text_type as _text_type from x2paddle import program +from utils import ConverterCheck import argparse import sys import logging @@ -131,6 +132,7 @@ def tf2paddle(model_path, convert_to_lite=False, lite_valid_places="arm", lite_model_type="naive_buffer"): + ConverterCheck(task="TensorFlow", convert_state="Start").start() # check tensorflow installation and version try: import os @@ -159,8 +161,11 @@ def tf2paddle(model_path, graph_opt = GraphOptimizer(source_frame="tf") graph_opt.optimize(mapper.paddle_graph) mapper.paddle_graph.gen_model(save_dir) + ConverterCheck(task="TensorFlow", convert_state="Success").start() if convert_to_lite: + ConverterCheck(task="TensorFlow", lite_state="Start").start() convert2lite(save_dir, lite_valid_places, lite_model_type) + ConverterCheck(task="TensorFlow", lite_state="Success").start() def caffe2paddle(proto_file, @@ -170,6 +175,7 @@ def caffe2paddle(proto_file, convert_to_lite=False, lite_valid_places="arm", lite_model_type="naive_buffer"): + ConverterCheck(task="Caffe", convert_state="Start").start() from x2paddle.decoder.caffe_decoder import CaffeDecoder from x2paddle.op_mapper.caffe2paddle.caffe_op_mapper import CaffeOpMapper import google.protobuf as gpb @@ -189,8 +195,11 @@ def caffe2paddle(proto_file, graph_opt.optimize(mapper.paddle_graph) logging.info("Model optimized.") mapper.paddle_graph.gen_model(save_dir) + ConverterCheck(task="Caffe", convert_state="Success").start() if convert_to_lite: + ConverterCheck(task="Caffe", lite_state="Start").start() convert2lite(save_dir, lite_valid_places, lite_model_type) + ConverterCheck(task="Caffe", lite_state="Success").start() def onnx2paddle(model_path, @@ -198,6 +207,7 @@ def onnx2paddle(model_path, convert_to_lite=False, lite_valid_places="arm", lite_model_type="naive_buffer"): + ConverterCheck(task="ONNX", convert_state="Start").start() # check onnx installation and version try: import onnx @@ -224,8 +234,11 @@ def onnx2paddle(model_path, graph_opt.optimize(mapper.paddle_graph) logging.info("Model optimized.") mapper.paddle_graph.gen_model(save_dir) + ConverterCheck(task="ONNX", convert_state="Success").start() if convert_to_lite: + ConverterCheck(task="ONNX", lite_state="Start").start() convert2lite(save_dir, lite_valid_places, lite_model_type) + ConverterCheck(task="ONNX", lite_state="Success").start() def pytorch2paddle(module, @@ -236,6 +249,7 @@ def pytorch2paddle(module, convert_to_lite=False, lite_valid_places="arm", lite_model_type="naive_buffer"): + ConverterCheck(task="PyTorch", convert_state="Start").start() # check pytorch installation and version try: import torch @@ -275,8 +289,11 @@ def pytorch2paddle(module, logging.info("Model optimized.") mapper.paddle_graph.gen_model( save_dir, jit_type=jit_type, enable_code_optim=enable_code_optim) + ConverterCheck(task="PyTorch", convert_state="Success").start() if convert_to_lite: + ConverterCheck(task="PyTorch", lite_state="Start").start() convert2lite(save_dir, lite_valid_places, lite_model_type) + ConverterCheck(task="PyTorch", lite_state="Success").start() def main(): diff --git a/x2paddle/utils.py b/x2paddle/utils.py index 3747916..fb8d48f 100644 --- a/x2paddle/utils.py +++ b/x2paddle/utils.py @@ -14,6 +14,10 @@ # limitations under the License. import paddle +import requests +import threading + +stats_api = "http://paddlepaddle.org.cn/paddlehub/stat" def string(param): @@ -32,6 +36,45 @@ def check_version(): return True +class ConverterCheck(threading.Thread): + """ + Count the number of calls to model convertion + """ + + def __init__(self, + task="onnx", + version=__version__, + convert_state=None, + lite_state=None, + extra_info=None): + threading.Thread.__init__(self) + self._task = task + self._version = version + self._convert_state = convert_state + self._lite_state = lite_state + self._extra_info = extra_info + + def run(self): + params = { + 'task': self._task, + 'version': self._version, + 'paddle_version': paddle.__version__, + 'convert_state': self._convert_state, + 'from': 'x2paddle' + } + if self._lite_state is not None: + params.update(self._lite_state) + if self._extra_info is not None: + params.update(self._extra_info) + + try: + requests.get(stats_api, params) + except Exception: + pass + + return + + class PaddleDtypes(): def __init__(self, is_new_version=True): if is_new_version: -- GitLab