未验证 提交 5f25183e 编写于 作者: S Shuangchi He 提交者: GitHub

[CodeStyle][F401] remove unused imports in unittests/r_cmake_paddle_tools. (#46712)

上级 4bbb0b38
......@@ -20,6 +20,7 @@ res = sys.argv[1]
out = sys.argv[2]
var = re.sub(r'[ .-]', '_', os.path.basename(res))
open(out, "w").write("const unsigned char " + var + "[] = {" + ",".join([
"0x%02x" % ord(c) for c in open(res).read()
]) + ",0};\n" + "const unsigned " + var + "_size = sizeof(" + var + ");\n")
open(out, "w").write("const unsigned char " + var + "[] = {" +
",".join(["0x%02x" % ord(c)
for c in open(res).read()]) + ",0};\n" +
"const unsigned " + var + "_size = sizeof(" + var + ");\n")
......@@ -12,11 +12,8 @@
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
import yaml
import re
import argparse
import os
########################
### Global Variables ###
......
......@@ -12,23 +12,19 @@
# See the License for the specific language governing permissions and
# limitations under the License.
import yaml
import re
import argparse
import os
import logging
from codegen_utils import core_ops_returns_info, core_ops_args_info, core_ops_args_type_info
from codegen_utils import yaml_types_mapping
from codegen_utils import ReadFwdFile, ReadBwdFile
from codegen_utils import FindGradName, FindForwardName, GetSavedName, GetGradNodeName
from codegen_utils import ReadBwdFile
from codegen_utils import FindForwardName, GetGradNodeName, GetSavedName
from codegen_utils import IsPlainTensorType, IsVectorTensorType
from codegen_utils import GetConstReference, RemoveConstAndReference
from codegen_utils import GetDygraphForwardFunctionName, GetIntermediateAPIFunctionName, GetDygraphLogName
from codegen_utils import GetDygraphForwardFunctionName, GetIntermediateAPIFunctionName
from codegen_utils import GetAutoGradMetaName, GetAutoGradMetaVectorName
from codegen_utils import RemoveSpecialSymbolsInName, RecoverBaseNameOfInplaceFunction
from codegen_utils import GetInplacedFunctionName
from codegen_utils import ParseYamlArgs, ParseYamlReturns, ParseYamlForwardFromBackward
from codegen_utils import ParseYamlForward, ParseYamlBackward
from codegen_utils import ParseYamlForwardFromBackward
from codegen_utils import ParseYamlBackward
from codegen_utils import ParseYamlInplaceInfo
from codegen_utils import FunctionGeneratorBase, GeneratorBase
from codegen_utils import ops_to_fill_zero_for_empty_grads
......
......@@ -14,11 +14,9 @@
import os
import argparse
import logging
from codegen_utils import FunctionGeneratorBase, GeneratorBase
from codegen_utils import yaml_types_mapping
from codegen_utils import ReadFwdFile, IsVectorTensorType, GetForwardFunctionName
from codegen_utils import ParseYamlForward, GetInplacedFunctionName
from codegen_utils import GetForwardFunctionName, IsVectorTensorType
from codegen_utils import GetInplacedFunctionName
###########################
## Global Configurations ##
......
......@@ -12,7 +12,7 @@
# See the License for the specific language governing permissions and
# limitations under the License.
import tarfile, os
import tarfile
import sys
......
......@@ -10,18 +10,11 @@
# without warranties or conditions of any kind, either express or implied.
# see the license for the specific language governing permissions and
# limitations under the license.
import hashlib
import unittest
import os
import io
import numpy as np
import time
import sys
import random
import functools
import contextlib
from PIL import Image
import math
from paddle.dataset.common import download
import tarfile
import argparse
......@@ -163,12 +156,12 @@ def run_convert():
retry = 0
try_limit = 3
while not (os.path.exists(output_file) and
os.path.getsize(output_file) == FULL_SIZE_BYTES):
while not (os.path.exists(output_file)
and os.path.getsize(output_file) == FULL_SIZE_BYTES):
if os.path.exists(output_file):
sys.stderr.write(
"\n\nThe existing binary file[{}] is broken. Start to generate new one...\n\n".
format(output_file))
"\n\nThe existing binary file[{}] is broken. Start to generate new one...\n\n"
.format(output_file))
os.remove(output_file)
if retry < try_limit:
retry = retry + 1
......@@ -204,15 +197,16 @@ def convert_Imagenet_local2bin(args):
img = Image.open(img_path)
img = process_image(img)
np_img = np.array(img)
of.seek(SIZE_INT64 + SIZE_FLOAT32 * DATA_DIM * DATA_DIM * 3 *
idx)
of.seek(SIZE_INT64 +
SIZE_FLOAT32 * DATA_DIM * DATA_DIM * 3 * idx)
of.write(np_img.astype('float32').tobytes())
#save label(int64_t) to file
label_int = (int)(label)
np_label = np.array(label_int)
of.seek(SIZE_INT64 + SIZE_FLOAT32 * DATA_DIM * DATA_DIM * 3 *
num_images + idx * SIZE_INT64)
of.seek(SIZE_INT64 +
SIZE_FLOAT32 * DATA_DIM * DATA_DIM * 3 * num_images +
idx * SIZE_INT64)
of.write(np_label.astype('int64').tobytes())
# The bin file should contain
......@@ -221,39 +215,41 @@ def convert_Imagenet_local2bin(args):
target_size = SIZE_INT64 + num_images * 3 * args.data_dim * args.data_dim * SIZE_FLOAT32 + num_images * SIZE_INT64
if (os.path.getsize(bin_file_path) == target_size):
print(
"Success! The user data output binary file can be found at: {0}".
format(bin_file_path))
"Success! The user data output binary file can be found at: {0}"
.format(bin_file_path))
else:
print("Conversion failed!")
def main_preprocess_Imagenet(args):
parser = argparse.ArgumentParser(
description="Convert the full Imagenet val set or local data to binary file.",
description=
"Convert the full Imagenet val set or local data to binary file.",
usage=None,
add_help=True)
parser.add_argument(
'--local',
action="store_true",
help="If used, user need to set --data_dir and then convert file")
parser.add_argument(
"--data_dir", default="", type=str, help="Dataset root directory")
parser.add_argument("--data_dir",
default="",
type=str,
help="Dataset root directory")
parser.add_argument(
"--label_list",
type=str,
default="val_list.txt",
help="List of object labels with same sequence as denoted in the annotation file"
help=
"List of object labels with same sequence as denoted in the annotation file"
)
parser.add_argument(
"--output_file",
type=str,
default="imagenet_small.bin",
help="File path of the output binary file")
parser.add_argument(
"--data_dim",
type=int,
default=DATA_DIM,
help="Image preprocess with data_dim width and height")
parser.add_argument("--output_file",
type=str,
default="imagenet_small.bin",
help="File path of the output binary file")
parser.add_argument("--data_dim",
type=int,
default=DATA_DIM,
help="Image preprocess with data_dim width and height")
args = parser.parse_args()
if args.local:
......
......@@ -12,10 +12,6 @@
# See the License for the specific language governing permissions and
# limitations under the License.
from full_pascalvoc_test_preprocess import main_pascalvoc_preprocess
import numpy as np
import paddle.fluid.core as core
import paddle.fluid as fluid
import unittest
import os
......
......@@ -13,7 +13,6 @@
# limitations under the License.
import paddle
from paddle.nn import Layer
from paddle.static import InputSpec
from paddle.jit import to_static
import sys
......
......@@ -12,7 +12,6 @@
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import re
import math
from functools import partial
......
......@@ -12,7 +12,6 @@
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import yaml
import argparse
import re
......
......@@ -12,7 +12,6 @@
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import yaml
import argparse
import re
......
......@@ -12,17 +12,14 @@
# See the License for the specific language governing permissions and
# limitations under the License.
from typing import List, Dict
import itertools
import re
from jinja2.filters import do_xmlattr
from type_mapping import (input_types_map, optional_input_types_map,
attr_types_map, opmaker_attr_types_map,
output_type_map)
from type_mapping import (dense_input_types_map, dense_optional_input_types_map,
dense_output_types_map, sr_input_types_map,
sr_optional_input_types_map, sr_output_types_map,
dense_output_types_map, sr_output_types_map,
phi_attr_types_map)
......
......@@ -14,8 +14,6 @@
import argparse
import os
import re
from itertools import chain
from pathlib import Path
import yaml
......
......@@ -12,10 +12,8 @@
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import yaml
import argparse
import re
from api_gen import ForwardAPI
from sparse_api_gen import SparseAPI
......
......@@ -12,7 +12,6 @@
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import yaml
import re
import argparse
......
......@@ -13,7 +13,6 @@
# limitations under the License.
import argparse
from pathlib import Path
import yaml
......
......@@ -13,7 +13,6 @@
# limitations under the License.
import re
import yaml
from copy import copy
from typing import Dict, Any, List, Tuple
from tests import is_attr, is_input, is_output, is_vec
......
......@@ -12,10 +12,8 @@
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import yaml
import argparse
import re
from api_gen import ForwardAPI
from api_base import PREFIX_TENSOR_NAME
......
......@@ -12,10 +12,8 @@
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import yaml
import argparse
import re
from sparse_api_gen import SparseAPI
from backward_api_gen import BackwardAPI
......
......@@ -12,10 +12,8 @@
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import yaml
import argparse
import re
from api_gen import ForwardAPI
......
......@@ -12,7 +12,6 @@
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import yaml
import argparse
......
......@@ -16,10 +16,8 @@
#
import platform
from sys import argv
import argparse
import os
import time
def parse_args():
......
......@@ -16,10 +16,8 @@
# pylint: skip-file
import functools
import numpy as np
from paddle.fluid.core import AnalysisConfig
from paddle.fluid.core import AnalysisPredictor
from paddle.fluid.core import create_paddle_predictor
......
......@@ -12,18 +12,7 @@
# See the License for the specific language governing permissions and
# limitations under the License.
import argparse
import json
import six
import sys
import re
import os
import glob
import unittest
import pandas
import tempfile
import platform
import pandas as pd
class ChromeTraceFormatter(object):
......
......@@ -17,8 +17,7 @@ import time
import json
import glob
import logging
import pandas as pd
from multiprocessing import Process, Lock
from multiprocessing import Lock
""" Some terms to clarify the code
in most case, one or more paremeters may be set as input args for a class or a function
in form of single variable or k-v dict
......
......@@ -14,21 +14,16 @@
import os
import glob
import logging
import argparse
import multiprocessing
import pandas as pd
from multiprocessing import Process
from NetFileReader import netFileReader
from DCGMFileReader import dcgmFileReader
from ProfileFileReader import profileFileReader
from CspFileReader import getLogger
from CspFileReader import TIME_PATH, DCGM_PATH, NET_PATH, PROFILE_PATH
from CspFileReader import NETINFO_TRACE_NUM, DCGMINFO_TRACE_NUM, PIPELINEINFO_TRACE_NUM
from CspFileReader import FILEORGANIZEFORM_BYRANK, FILEORGANIZEFORM_BYTRAINER, FILEORGANIZEFORM_BYOTHER, FILEORGANIZEFORM
from CspFileReader import FILEORGANIZEFORM_BYRANK, FILEORGANIZEFORM_BYTRAINER
def get_argparse():
......
......@@ -14,23 +14,17 @@
import os
import re
import json
import glob
import logging
import tempfile
import argparse
import pandas as pd
import multiprocessing
from multiprocessing import Process
from CspChromeTraceFormatter import ChromeTraceFormatter
from CspFileReader import FileReader
from CspFileReader import getLogger
from CspFileReader import dcgmMetricParameterMap
from CspFileReader import TIME_PATH, DCGM_PATH, NET_PATH, PROFILE_PATH
from CspFileReader import NETINFO_TRACE_NUM, DCGMINFO_TRACE_NUM, PIPELINEINFO_TRACE_NUM
from CspFileReader import FILEORGANIZEFORM_BYRANK, FILEORGANIZEFORM_BYTRAINER, FILEORGANIZEFORM_BYOTHER, FILEORGANIZEFORM
from CspFileReader import PIPELINEINFO_TRACE_NUM
from CspFileReader import FILEORGANIZEFORM_BYTRAINER
class dcgmFileReader(FileReader):
......
......@@ -12,22 +12,15 @@
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import json
import glob
import logging
import pandas as pd
import multiprocessing
from multiprocessing import Process
from CspChromeTraceFormatter import ChromeTraceFormatter
from CspFileReader import FileReader
from CspFileReader import getLogger
from CspFileReader import TIME_PATH, DCGM_PATH, NET_PATH, PROFILE_PATH
from CspFileReader import NETINFO_TRACE_NUM, DCGMINFO_TRACE_NUM, PIPELINEINFO_TRACE_NUM
from CspFileReader import FILEORGANIZEFORM_BYRANK, FILEORGANIZEFORM_BYTRAINER, FILEORGANIZEFORM_BYOTHER, FILEORGANIZEFORM
from CspFileReader import PIPELINEINFO_TRACE_NUM
from CspFileReader import FILEORGANIZEFORM_BYTRAINER
class netFileReader(FileReader):
......
......@@ -12,26 +12,19 @@
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import six
import glob
import json
import logging
import argparse
import pandas as pd
import multiprocessing
from multiprocessing import Process
import google.protobuf.text_format as text_format
import paddle.fluid.proto.profiler.profiler_pb2 as profiler_pb2
from CspChromeTraceFormatter import ChromeTraceFormatter
from CspFileReader import FileReader
from CspFileReader import getLogger
from CspFileReader import TIME_PATH, DCGM_PATH, NET_PATH, PROFILE_PATH
from CspFileReader import NETINFO_TRACE_NUM, DCGMINFO_TRACE_NUM, PIPELINEINFO_TRACE_NUM
from CspFileReader import FILEORGANIZEFORM_BYRANK, FILEORGANIZEFORM_BYTRAINER, FILEORGANIZEFORM_BYOTHER, FILEORGANIZEFORM
from CspFileReader import FILEORGANIZEFORM_BYRANK
class profileFileReader(FileReader):
......
......@@ -15,11 +15,7 @@
import commands
from xml.etree import ElementTree
import re
import time
import queue
import threading
import os
import json
import sys
......
......@@ -14,8 +14,6 @@
import difflib
import sys
import importlib
import os
import count_api_without_core_ops
with open(sys.argv[1], 'r') as f:
......
......@@ -15,7 +15,6 @@
""" Get pull requests. """
import os
import time
import os.path
from github import Github
......
......@@ -13,10 +13,9 @@
# limitations under the License.
"""DocstringChecker is used to check python doc string's style."""
import six
import astroid
from pylint.checkers import BaseChecker, utils
from pylint.checkers import BaseChecker
from pylint.interfaces import IAstroidChecker
from collections import defaultdict
......
......@@ -15,8 +15,6 @@
import docstring_checker
import pylint.testutils
import astroid
import pytest
import sys
class TestDocstring(pylint.testutils.CheckerTestCase):
......
......@@ -16,11 +16,7 @@ import importlib
import inspect
import collections
import sys
import pydoc
import hashlib
import functools
import platform
from paddle import _C_ops, _legacy_C_ops
__all__ = [
'get_apis_with_and_without_core_ops',
......
......@@ -14,7 +14,6 @@
# See the License for the specific language governing permissions and
# limitations under the License.
import difflib
import sys
try:
......
......@@ -15,8 +15,6 @@
import ssl
import re
import urllib.request
import json
import collections
import sys
import getopt
import external_error_pb2
......
......@@ -15,10 +15,7 @@
# limitations under the License.
import os
import time
import json
import datetime
import codecs
import sys
......
......@@ -16,7 +16,6 @@
import os
import json
import re
import sys
import time
import subprocess
import requests
......
......@@ -13,8 +13,6 @@
# limitations under the License.
import os
import json
import time
import sys
import re
......
......@@ -14,7 +14,6 @@
import os
import sys
import re
import json
......
......@@ -15,7 +15,6 @@
import queue
import threading
import os
import json
import time
import sys
......
......@@ -14,9 +14,6 @@
"""
A fake model with multiple FC layers to test CINN on a more complex model.
"""
import numpy
import sys, os
import numpy as np
import paddle
import paddle.fluid as fluid
......
......@@ -14,7 +14,6 @@
import json
import yaml
import sys
import os
from get_compat_kernel_signature import get_compat_kernels_info
......
......@@ -13,8 +13,6 @@
# limitations under the License.
import os
import re
import json
skip_list = ["adam_sig.cc", "adamw_sig.cc"]
......
......@@ -17,7 +17,7 @@
import argparse
import json
import yaml
from typing import List, Dict, Any
from typing import Dict, List
skipped_phi_api_list_file = "/tools/infrt/skipped_phi_api.json"
api_yaml_file = "/paddle/phi/api/yaml/api.yaml"
......
......@@ -14,7 +14,6 @@
import os
import re
import json
skip_list = []
......
......@@ -12,7 +12,6 @@
# See the License for the specific language governing permissions and
# limitations under the License.
import sys
import os
import platform
......
......@@ -25,6 +25,7 @@ import hashlib
import pkgutil
import logging
import argparse
import paddle
member_dict = collections.OrderedDict()
......@@ -126,7 +127,6 @@ def get_all_api(root_path='paddle', attr="__all__"):
"""
walk through the paddle package to collect all the apis.
"""
import paddle
global api_info_dict
api_counter = 0
for filefinder, name, ispkg in pkgutil.walk_packages(
......@@ -140,7 +140,6 @@ def get_all_api(root_path='paddle', attr="__all__"):
continue
except AttributeError:
logger.warning("AttributeError occurred when `eval(%s)`", name)
pass
else:
api_counter += process_module(m, attr)
......@@ -159,7 +158,6 @@ def insert_api_into_dict(full_name, gen_doc_anno=None):
Return:
api_info object or None
"""
import paddle
try:
obj = eval(full_name)
fc_id = id(obj)
......@@ -222,7 +220,6 @@ def process_module(m, attr="__all__"):
def check_public_api():
import paddle
modulelist = [ #npqa
paddle, paddle.amp, paddle.nn, paddle.nn.functional,
paddle.nn.initializer, paddle.nn.utils, paddle.static, paddle.static.nn,
......@@ -271,7 +268,6 @@ def check_public_api():
def check_allmodule_callable():
import paddle
modulelist = [paddle]
for m in modulelist:
visit_all_module(m)
......
......@@ -12,13 +12,10 @@
# See the License for the specific language governing permissions and
# limitations under the License.
from xml.etree import ElementTree
import re
import time
import queue
import threading
import os
import json
import sys
taskQueue = queue.Queue()
......
......@@ -448,8 +448,6 @@ def get_filenames(full_test=False):
'''
global whl_error
import paddle
import paddle.fluid.contrib.slim.quantization
whl_error = []
if full_test:
get_full_api_from_pr_spec()
......
......@@ -11,7 +11,6 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import sys
import distro
import platform
......
......@@ -17,10 +17,7 @@
TestCases for check_api_compatible.py
"""
import unittest
import sys
import os
import tempfile
import inspect
from check_api_compatible import read_argspec_from_file
from check_api_compatible import check_compatible
......
......@@ -451,7 +451,6 @@ class Test_get_api_md5(unittest.TestCase):
def tearDown(self):
os.remove(self.api_pr_spec_filename)
pass
def test_get_api_md5(self):
res = get_api_md5('paddle/fluid/API_PR.spec')
......
......@@ -15,10 +15,7 @@
import argparse
import json
import six
import sys
import unittest
import google.protobuf.text_format as text_format
import paddle.fluid.proto.profiler.profiler_pb2 as profiler_pb2
parser = argparse.ArgumentParser(description=__doc__)
......
......@@ -13,9 +13,7 @@
# limitations under the License.
"""To get a list of prec ut """
import sys
import os
import platform
def get_prec_ut_list(all_test_cases, prec_test_cases):
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册