未验证 提交 d77c236f 编写于 作者: M Manuel Garcia 提交者: GitHub

Replace deprecated methods 'warn' and 'getargspec' (#3627)

上级 2ecc6525
......@@ -93,7 +93,7 @@ def dota_2_coco(image_dir,
# annotations
anno_txt_path = osp.join(txt_dir, osp.splitext(basename)[0] + '.txt')
if not osp.exists(anno_txt_path):
logger.warn('path of {} not exists'.format(anno_txt_path))
logger.warning('path of {} not exists'.format(anno_txt_path))
for line in open(anno_txt_path):
line = line.strip()
......
......@@ -185,12 +185,12 @@ def extract_schema(cls):
annotations = argspec.annotations
has_kwargs = argspec.varkw is not None
else:
argspec = inspect.getargspec(ctor)
argspec = inspect.getfullargspec(ctor)
# python 2 type hinting workaround, see pep-3107
# however, since `typeguard` does not support python 2, type checking
# is still python 3 only for now
annotations = getattr(ctor, '__annotations__', {})
has_kwargs = argspec.keywords is not None
has_kwargs = argspec.varkw is not None
names = [arg for arg in argspec.args if arg != 'self']
defaults = argspec.defaults
......
......@@ -52,7 +52,7 @@ def _make_python_representer(cls):
if hasattr(inspect, 'getfullargspec'):
argspec = inspect.getfullargspec(cls)
else:
argspec = inspect.getargspec(cls.__init__)
argspec = inspect.getfullargspec(cls.__init__)
argnames = [arg for arg in argspec.args if arg != 'self']
def python_representer(dumper, obj):
......
......@@ -56,9 +56,9 @@ class Compose(object):
data = f(data)
except Exception as e:
stack_info = traceback.format_exc()
logger.warn("fail to map sample transform [{}] "
"with error: {} and stack:\n{}".format(
f, e, str(stack_info)))
logger.warning("fail to map sample transform [{}] "
"with error: {} and stack:\n{}".format(
f, e, str(stack_info)))
raise e
return data
......@@ -75,9 +75,9 @@ class BatchCompose(Compose):
data = f(data)
except Exception as e:
stack_info = traceback.format_exc()
logger.warn("fail to map batch transform [{}] "
"with error: {} and stack:\n{}".format(
f, e, str(stack_info)))
logger.warning("fail to map batch transform [{}] "
"with error: {} and stack:\n{}".format(
f, e, str(stack_info)))
raise e
# remove keys which is not needed by model
......@@ -185,8 +185,8 @@ class BaseDataLoader(object):
if use_shared_memory:
shm_size = _get_shared_memory_size_in_M()
if shm_size is not None and shm_size < 1024.:
logger.warn("Shared memory size is less than 1G, "
"disable shared_memory in DataLoader")
logger.warning("Shared memory size is less than 1G, "
"disable shared_memory in DataLoader")
use_shared_memory = False
self.dataloader = DataLoader(
......
......@@ -77,7 +77,7 @@ def get_categories(metric_type, anno_file=None, arch=None):
elif metric_type.lower() == 'oid':
if anno_file and os.path.isfile(anno_file):
logger.warn("only default categories support for OID19")
logger.warning("only default categories support for OID19")
return _oid19_category()
elif metric_type.lower() == 'widerface':
......
......@@ -175,12 +175,12 @@ class MOTDataSet(DetDataset):
lbl_file = self.label_files[data_name][img_index - start_index]
if not os.path.exists(img_file):
logger.warn('Illegal image file: {}, and it will be ignored'.
format(img_file))
logger.warning('Illegal image file: {}, and it will be ignored'.
format(img_file))
continue
if not os.path.isfile(lbl_file):
logger.warn('Illegal label file: {}, and it will be ignored'.
format(lbl_file))
logger.warning('Illegal label file: {}, and it will be ignored'.
format(lbl_file))
continue
labels = np.loadtxt(lbl_file, dtype=np.float32).reshape(-1, 6)
......
......@@ -89,13 +89,14 @@ class VOCDataSet(DetDataset):
img_file, xml_file = [os.path.join(image_dir, x) \
for x in line.strip().split()[:2]]
if not os.path.exists(img_file):
logger.warn(
logger.warning(
'Illegal image file: {}, and it will be ignored'.format(
img_file))
continue
if not os.path.isfile(xml_file):
logger.warn('Illegal xml file: {}, and it will be ignored'.
format(xml_file))
logger.warning(
'Illegal xml file: {}, and it will be ignored'.format(
xml_file))
continue
tree = ET.parse(xml_file)
if tree.find('id') is None:
......@@ -107,7 +108,7 @@ class VOCDataSet(DetDataset):
im_w = float(tree.find('size').find('width').text)
im_h = float(tree.find('size').find('height').text)
if im_w < 0 or im_h < 0:
logger.warn(
logger.warning(
'Illegal width: {} or height: {} in annotation, '
'and {} will be ignored'.format(im_w, im_h, xml_file))
continue
......@@ -137,7 +138,7 @@ class VOCDataSet(DetDataset):
gt_score.append([1.])
difficult.append([_difficult])
else:
logger.warn(
logger.warning(
'Found an invalid bbox in annotations: xml_file: {}'
', x1: {}, y1: {}, x2: {}, y2: {}.'.format(
xml_file, x1, y1, x2, y2))
......
......@@ -139,9 +139,9 @@ class WIDERFaceDataSet(DetDataset):
h = float(split_str[3])
# Filter out wrong labels
if w < 0 or h < 0:
logger.warn('Illegal box with w: {}, h: {} in '
'img: {}, and it will be ignored'.format(
w, h, file_dict[num_class][0]))
logger.warning('Illegal box with w: {}, h: {} in '
'img: {}, and it will be ignored'.format(
w, h, file_dict[num_class][0]))
continue
xmin = max(0, xmin)
ymin = max(0, ymin)
......
......@@ -131,7 +131,7 @@ class Decode(BaseOperator):
if 'h' not in sample:
sample['h'] = im.shape[0]
elif sample['h'] != im.shape[0]:
logger.warn(
logger.warning(
"The actual image height: {} is not equal to the "
"height: {} in annotation, and update sample['h'] by actual "
"image height.".format(im.shape[0], sample['h']))
......@@ -139,7 +139,7 @@ class Decode(BaseOperator):
if 'w' not in sample:
sample['w'] = im.shape[1]
elif sample['w'] != im.shape[1]:
logger.warn(
logger.warning(
"The actual image width: {} is not equal to the "
"width: {} in annotation, and update sample['w'] by actual "
"image width.".format(im.shape[1], sample['w']))
......@@ -726,7 +726,7 @@ class Resize(BaseOperator):
# apply rbox
if 'gt_rbox2poly' in sample:
if np.array(sample['gt_rbox2poly']).shape[1] != 8:
logger.warn(
logger.warning(
"gt_rbox2poly's length shoule be 8, but actually is {}".
format(len(sample['gt_rbox2poly'])))
sample['gt_rbox2poly'] = self.apply_bbox(sample['gt_rbox2poly'],
......
......@@ -175,7 +175,7 @@ class Checkpointer(Callback):
else:
key = 'mask'
if key not in map_res:
logger.warn("Evaluation results empty, this may be due to " \
logger.warning("Evaluation results empty, this may be due to " \
"training iterations being too few or not " \
"loading the correct weights.")
return
......
......@@ -75,7 +75,7 @@ class Tracker(object):
if self.cfg.metric == 'MOT':
self._metrics = [MOTMetric(), ]
else:
logger.warn("Metric not support for metric type {}".format(
logger.warning("Metric not support for metric type {}".format(
self.cfg.metric))
self._metrics = []
......
......@@ -246,7 +246,7 @@ class Trainer(object):
elif self.cfg.metric == 'MOTDet':
self._metrics = [JDEDetMetric(), ]
else:
logger.warn("Metric not support for metric type {}".format(
logger.warning("Metric not support for metric type {}".format(
self.cfg.metric))
self._metrics = []
......
......@@ -185,12 +185,12 @@ def extract_schema(cls):
annotations = argspec.annotations
has_kwargs = argspec.varkw is not None
else:
argspec = inspect.getargspec(ctor)
argspec = inspect.getfullargspec(ctor)
# python 2 type hinting workaround, see pep-3107
# however, since `typeguard` does not support python 2, type checking
# is still python 3 only for now
annotations = getattr(ctor, '__annotations__', {})
has_kwargs = argspec.keywords is not None
has_kwargs = argspec.varkw is not None
names = [arg for arg in argspec.args if arg != 'self']
defaults = argspec.defaults
......
......@@ -52,7 +52,7 @@ def _make_python_representer(cls):
if hasattr(inspect, 'getfullargspec'):
argspec = inspect.getfullargspec(cls)
else:
argspec = inspect.getargspec(cls.__init__)
argspec = inspect.getfullargspec(cls.__init__)
argnames = [arg for arg in argspec.args if arg != 'self']
def python_representer(dumper, obj):
......
......@@ -166,8 +166,8 @@ def make_partial(cls):
if not hasattr(op_module, op_name):
import logging
logger = logging.getLogger(__name__)
logger.warn('{} OP not found, maybe a newer version of paddle '
'is required.'.format(cls.__op__))
logger.warning('{} OP not found, maybe a newer version of paddle '
'is required.'.format(cls.__op__))
return cls
op = getattr(op_module, op_name)
......
......@@ -211,10 +211,10 @@ class ParallelMap(object):
else:
errmsg = "consumer[{}] exit abnormally".format(w.ident)
logger.warn(errmsg)
logger.warning(errmsg)
if abnormal_num > 0:
logger.warn("{} consumers have exited abnormally!!!" \
logger.warning("{} consumers have exited abnormally!!!" \
.format(abnormal_num))
return abnormal_num == 0
......@@ -239,7 +239,7 @@ class ParallelMap(object):
if isinstance(sample, EndSignal):
self._consumer_endsig[sample.id] = sample
logger.warn("recv endsignal from outq with errmsg[{}]" \
logger.warning("recv endsignal from outq with errmsg[{}]" \
.format(sample.errmsg))
if len(self._consumer_endsig.keys()) < len(self._consumers):
......@@ -268,7 +268,7 @@ class ParallelMap(object):
" for some consumers exited abnormally before!!!"
if not self.drained():
logger.warn("reset before epoch[{}] finishes".format(
logger.warning("reset before epoch[{}] finishes".format(
self._epoch))
self._produced = self._produced - self._consumed
else:
......
......@@ -46,8 +46,9 @@ class Compose(object):
data = f(data, ctx)
except Exception as e:
stack_info = traceback.format_exc()
logger.warn("fail to map op [{}] with error: {} and stack:\n{}".
format(f, e, str(stack_info)))
logger.warning(
"fail to map op [{}] with error: {} and stack:\n{}".format(
f, e, str(stack_info)))
raise e
return data
......
......@@ -75,7 +75,7 @@ class SharedQueue(Queue):
stack_info = traceback.format_exc()
err_msg = 'failed to put a element to SharedQueue '\
'with stack info[%s]' % (stack_info)
logger.warn(err_msg)
logger.warning(err_msg)
if buff is not None:
buff.free()
......@@ -95,7 +95,7 @@ class SharedQueue(Queue):
stack_info = traceback.format_exc()
err_msg = 'failed to get element from SharedQueue '\
'with stack info[%s]' % (stack_info)
logger.warn(err_msg)
logger.warning(err_msg)
raise e
finally:
if buff is not None:
......
......@@ -233,7 +233,7 @@ class PageAllocator(object):
fname = fname + '.' + str(uuid.uuid4())[:6]
with open(fname, 'wb') as f:
f.write(pickle.dumps(info, -1))
logger.warn('dump alloc info to file[%s]' % (fname))
logger.warning('dump alloc info to file[%s]' % (fname))
def _reset(self):
alloc_page_pos = self._header_pages
......@@ -460,7 +460,7 @@ class SharedMemoryMgr(object):
if start is None:
time.sleep(0.1)
if ct % 100 == 0:
logger.warn('not enough space for reason[%s]' % (errmsg))
logger.warning('not enough space for reason[%s]' % (errmsg))
ct += 1
else:
......
......@@ -97,8 +97,8 @@ class COCODataSet(DataSet):
if 'annotations' not in coco.dataset:
self.load_image_only = True
logger.warn('Annotation file: {} does not contains ground truth '
'and load image information only.'.format(anno_path))
logger.warning('Annotation file: {} does not contains ground truth '
'and load image information only.'.format(anno_path))
for img_id in img_ids:
img_anno = coco.loadImgs([img_id])[0]
......@@ -109,14 +109,14 @@ class COCODataSet(DataSet):
im_path = os.path.join(image_dir,
im_fname) if image_dir else im_fname
if not os.path.exists(im_path):
logger.warn('Illegal image file: {}, and it will be '
'ignored'.format(im_path))
logger.warning('Illegal image file: {}, and it will be '
'ignored'.format(im_path))
continue
if im_w < 0 or im_h < 0:
logger.warn('Illegal width: {} or height: {} in annotation, '
'and im_id: {} will be ignored'.format(im_w, im_h,
img_id))
logger.warning('Illegal width: {} or height: {} in annotation, '
'and im_id: {} will be ignored'.format(
im_w, im_h, img_id))
continue
coco_rec = {
......@@ -141,7 +141,7 @@ class COCODataSet(DataSet):
inst['clean_bbox'] = [x1, y1, x2, y2]
bboxes.append(inst)
else:
logger.warn(
logger.warning(
'Found an invalid bbox in annotations: im_id: {}, '
'x1: {}, y1: {}, x2: {}, y2: {}.'.format(
img_id, x1, y1, x2, y2))
......
......@@ -111,13 +111,14 @@ class VOCDataSet(DataSet):
img_file, xml_file = [os.path.join(image_dir, x) \
for x in line.strip().split()[:2]]
if not os.path.exists(img_file):
logger.warn(
logger.warning(
'Illegal image file: {}, and it will be ignored'.format(
img_file))
continue
if not os.path.isfile(xml_file):
logger.warn('Illegal xml file: {}, and it will be ignored'.
format(xml_file))
logger.warning(
'Illegal xml file: {}, and it will be ignored'.format(
xml_file))
continue
tree = ET.parse(xml_file)
if tree.find('id') is None:
......@@ -129,7 +130,7 @@ class VOCDataSet(DataSet):
im_w = float(tree.find('size').find('width').text)
im_h = float(tree.find('size').find('height').text)
if im_w < 0 or im_h < 0:
logger.warn(
logger.warning(
'Illegal width: {} or height: {} in annotation, '
'and {} will be ignored'.format(im_w, im_h, xml_file))
continue
......@@ -156,7 +157,7 @@ class VOCDataSet(DataSet):
is_crowd.append([0])
difficult.append([_difficult])
else:
logger.warn(
logger.warning(
'Found an invalid bbox in annotations: xml_file: {}'
', x1: {}, y1: {}, x2: {}, y2: {}.'.format(
xml_file, x1, y1, x2, y2))
......
......@@ -133,9 +133,9 @@ class WIDERFaceDataSet(DataSet):
h = float(split_str[3])
# Filter out wrong labels
if w < 0 or h < 0:
logger.warn('Illegal box with w: {}, h: {} in '
'img: {}, and it will be ignored'.format(
w, h, file_dict[num_class][0]))
logger.warning('Illegal box with w: {}, h: {} in '
'img: {}, and it will be ignored'.format(
w, h, file_dict[num_class][0]))
continue
xmin = max(0, xmin)
ymin = max(0, ymin)
......
......@@ -1453,19 +1453,19 @@ def _parse_policy_info(name, prob, level, replace_value, augmentation_hparams):
# Check to see if prob is passed into function. This is used for operations
# where we alter bboxes independently.
# pytype:disable=wrong-arg-types
if 'prob' in inspect.getargspec(func)[0]:
if 'prob' in inspect.getfullargspec(func)[0]:
args = tuple([prob] + list(args))
# pytype:enable=wrong-arg-types
# Add in replace arg if it is required for the function that is being called.
if 'replace' in inspect.getargspec(func)[0]:
if 'replace' in inspect.getfullargspec(func)[0]:
# Make sure replace is the final argument
assert 'replace' == inspect.getargspec(func)[0][-1]
assert 'replace' == inspect.getfullargspec(func)[0][-1]
args = tuple(list(args) + [replace_value])
# Add bboxes as the second positional argument for the function if it does
# not already exist.
if 'bboxes' not in inspect.getargspec(func)[0]:
if 'bboxes' not in inspect.getfullargspec(func)[0]:
func = bbox_wrapper(func)
return (func, prob, args)
......@@ -1473,11 +1473,11 @@ def _parse_policy_info(name, prob, level, replace_value, augmentation_hparams):
def _apply_func_with_prob(func, image, args, prob, bboxes):
"""Apply `func` to image w/ `args` as input with probability `prob`."""
assert isinstance(args, tuple)
assert 'bboxes' == inspect.getargspec(func)[0][1]
assert 'bboxes' == inspect.getfullargspec(func)[0][1]
# If prob is a function argument, then this randomness is being handled
# inside the function, so make sure it is always called.
if 'prob' in inspect.getargspec(func)[0]:
if 'prob' in inspect.getfullargspec(func)[0]:
prob = 1.0
# Apply the function with probability `prob`.
......
......@@ -125,7 +125,7 @@ class DecodeImage(BaseOperator):
if 'h' not in sample:
sample['h'] = im.shape[0]
elif sample['h'] != im.shape[0]:
logger.warn(
logger.warning(
"The actual image height: {} is not equal to the "
"height: {} in annotation, and update sample['h'] by actual "
"image height.".format(im.shape[0], sample['h']))
......@@ -133,7 +133,7 @@ class DecodeImage(BaseOperator):
if 'w' not in sample:
sample['w'] = im.shape[1]
elif sample['w'] != im.shape[1]:
logger.warn(
logger.warning(
"The actual image width: {} is not equal to the "
"width: {} in annotation, and update sample['w'] by actual "
"image width.".format(im.shape[1], sample['w']))
......
......@@ -67,7 +67,7 @@ class YOLOv3Loss(object):
self.match_score = match_score
if batch_size != -1:
logger.warn(
logger.warning(
"config YOLOv3Loss.batch_size is deprecated, "
"training batch size should be set by TrainReader.batch_size")
......
......@@ -321,7 +321,7 @@ def _download(url, path, md5sum=None):
shutil.move(tmp_fullname, fullname)
return fullname
else:
logger.warn(
logger.warning(
"Download from url imcomplete, try downloading again...")
os.remove(tmp_fullname)
continue
......
......@@ -111,7 +111,7 @@ def main():
if cfg.weights:
checkpoint.load_params(exe, eval_prog, cfg.weights)
else:
logger.warn("Please set cfg.weights to load trained model.")
logger.warning("Please set cfg.weights to load trained model.")
# whether output bbox is normalized in model output layer
is_bbox_normalized = False
......
......@@ -265,9 +265,9 @@ class YOLOv5AnchorCluster(BaseAnchorCluster):
wh0 = self.whs
i = (wh0 < 3.0).any(1).sum()
if i:
logger.warn('Extremely small objects found. %d of %d'
'labels are < 3 pixels in width or height' %
(i, len(wh0)))
logger.warning('Extremely small objects found. %d of %d'
'labels are < 3 pixels in width or height' %
(i, len(wh0)))
wh = wh0[(wh0 >= 2.0).any(1)]
logger.info('Running kmeans for %g anchors on %g points...' %
......
......@@ -252,9 +252,9 @@ class YOLOv5AnchorCluster(BaseAnchorCluster):
wh0 = self.whs
i = (wh0 < 3.0).any(1).sum()
if i:
logger.warn('Extremely small objects found. %d of %d'
'labels are < 3 pixels in width or height' %
(i, len(wh0)))
logger.warning('Extremely small objects found. %d of %d'
'labels are < 3 pixels in width or height' %
(i, len(wh0)))
wh = wh0[(wh0 >= 2.0).any(1)]
logger.info('Running kmeans for %g anchors on %g points...' %
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册