未验证 提交 d77c236f 编写于 作者: M Manuel Garcia 提交者: GitHub

Replace deprecated methods 'warn' and 'getargspec' (#3627)

上级 2ecc6525
...@@ -93,7 +93,7 @@ def dota_2_coco(image_dir, ...@@ -93,7 +93,7 @@ def dota_2_coco(image_dir,
# annotations # annotations
anno_txt_path = osp.join(txt_dir, osp.splitext(basename)[0] + '.txt') anno_txt_path = osp.join(txt_dir, osp.splitext(basename)[0] + '.txt')
if not osp.exists(anno_txt_path): if not osp.exists(anno_txt_path):
logger.warn('path of {} not exists'.format(anno_txt_path)) logger.warning('path of {} not exists'.format(anno_txt_path))
for line in open(anno_txt_path): for line in open(anno_txt_path):
line = line.strip() line = line.strip()
......
...@@ -185,12 +185,12 @@ def extract_schema(cls): ...@@ -185,12 +185,12 @@ def extract_schema(cls):
annotations = argspec.annotations annotations = argspec.annotations
has_kwargs = argspec.varkw is not None has_kwargs = argspec.varkw is not None
else: else:
argspec = inspect.getargspec(ctor) argspec = inspect.getfullargspec(ctor)
# python 2 type hinting workaround, see pep-3107 # python 2 type hinting workaround, see pep-3107
# however, since `typeguard` does not support python 2, type checking # however, since `typeguard` does not support python 2, type checking
# is still python 3 only for now # is still python 3 only for now
annotations = getattr(ctor, '__annotations__', {}) annotations = getattr(ctor, '__annotations__', {})
has_kwargs = argspec.keywords is not None has_kwargs = argspec.varkw is not None
names = [arg for arg in argspec.args if arg != 'self'] names = [arg for arg in argspec.args if arg != 'self']
defaults = argspec.defaults defaults = argspec.defaults
......
...@@ -52,7 +52,7 @@ def _make_python_representer(cls): ...@@ -52,7 +52,7 @@ def _make_python_representer(cls):
if hasattr(inspect, 'getfullargspec'): if hasattr(inspect, 'getfullargspec'):
argspec = inspect.getfullargspec(cls) argspec = inspect.getfullargspec(cls)
else: else:
argspec = inspect.getargspec(cls.__init__) argspec = inspect.getfullargspec(cls.__init__)
argnames = [arg for arg in argspec.args if arg != 'self'] argnames = [arg for arg in argspec.args if arg != 'self']
def python_representer(dumper, obj): def python_representer(dumper, obj):
......
...@@ -56,9 +56,9 @@ class Compose(object): ...@@ -56,9 +56,9 @@ class Compose(object):
data = f(data) data = f(data)
except Exception as e: except Exception as e:
stack_info = traceback.format_exc() stack_info = traceback.format_exc()
logger.warn("fail to map sample transform [{}] " logger.warning("fail to map sample transform [{}] "
"with error: {} and stack:\n{}".format( "with error: {} and stack:\n{}".format(
f, e, str(stack_info))) f, e, str(stack_info)))
raise e raise e
return data return data
...@@ -75,9 +75,9 @@ class BatchCompose(Compose): ...@@ -75,9 +75,9 @@ class BatchCompose(Compose):
data = f(data) data = f(data)
except Exception as e: except Exception as e:
stack_info = traceback.format_exc() stack_info = traceback.format_exc()
logger.warn("fail to map batch transform [{}] " logger.warning("fail to map batch transform [{}] "
"with error: {} and stack:\n{}".format( "with error: {} and stack:\n{}".format(
f, e, str(stack_info))) f, e, str(stack_info)))
raise e raise e
# remove keys which is not needed by model # remove keys which is not needed by model
...@@ -185,8 +185,8 @@ class BaseDataLoader(object): ...@@ -185,8 +185,8 @@ class BaseDataLoader(object):
if use_shared_memory: if use_shared_memory:
shm_size = _get_shared_memory_size_in_M() shm_size = _get_shared_memory_size_in_M()
if shm_size is not None and shm_size < 1024.: if shm_size is not None and shm_size < 1024.:
logger.warn("Shared memory size is less than 1G, " logger.warning("Shared memory size is less than 1G, "
"disable shared_memory in DataLoader") "disable shared_memory in DataLoader")
use_shared_memory = False use_shared_memory = False
self.dataloader = DataLoader( self.dataloader = DataLoader(
......
...@@ -77,7 +77,7 @@ def get_categories(metric_type, anno_file=None, arch=None): ...@@ -77,7 +77,7 @@ def get_categories(metric_type, anno_file=None, arch=None):
elif metric_type.lower() == 'oid': elif metric_type.lower() == 'oid':
if anno_file and os.path.isfile(anno_file): if anno_file and os.path.isfile(anno_file):
logger.warn("only default categories support for OID19") logger.warning("only default categories support for OID19")
return _oid19_category() return _oid19_category()
elif metric_type.lower() == 'widerface': elif metric_type.lower() == 'widerface':
......
...@@ -175,12 +175,12 @@ class MOTDataSet(DetDataset): ...@@ -175,12 +175,12 @@ class MOTDataSet(DetDataset):
lbl_file = self.label_files[data_name][img_index - start_index] lbl_file = self.label_files[data_name][img_index - start_index]
if not os.path.exists(img_file): if not os.path.exists(img_file):
logger.warn('Illegal image file: {}, and it will be ignored'. logger.warning('Illegal image file: {}, and it will be ignored'.
format(img_file)) format(img_file))
continue continue
if not os.path.isfile(lbl_file): if not os.path.isfile(lbl_file):
logger.warn('Illegal label file: {}, and it will be ignored'. logger.warning('Illegal label file: {}, and it will be ignored'.
format(lbl_file)) format(lbl_file))
continue continue
labels = np.loadtxt(lbl_file, dtype=np.float32).reshape(-1, 6) labels = np.loadtxt(lbl_file, dtype=np.float32).reshape(-1, 6)
......
...@@ -89,13 +89,14 @@ class VOCDataSet(DetDataset): ...@@ -89,13 +89,14 @@ class VOCDataSet(DetDataset):
img_file, xml_file = [os.path.join(image_dir, x) \ img_file, xml_file = [os.path.join(image_dir, x) \
for x in line.strip().split()[:2]] for x in line.strip().split()[:2]]
if not os.path.exists(img_file): if not os.path.exists(img_file):
logger.warn( logger.warning(
'Illegal image file: {}, and it will be ignored'.format( 'Illegal image file: {}, and it will be ignored'.format(
img_file)) img_file))
continue continue
if not os.path.isfile(xml_file): if not os.path.isfile(xml_file):
logger.warn('Illegal xml file: {}, and it will be ignored'. logger.warning(
format(xml_file)) 'Illegal xml file: {}, and it will be ignored'.format(
xml_file))
continue continue
tree = ET.parse(xml_file) tree = ET.parse(xml_file)
if tree.find('id') is None: if tree.find('id') is None:
...@@ -107,7 +108,7 @@ class VOCDataSet(DetDataset): ...@@ -107,7 +108,7 @@ class VOCDataSet(DetDataset):
im_w = float(tree.find('size').find('width').text) im_w = float(tree.find('size').find('width').text)
im_h = float(tree.find('size').find('height').text) im_h = float(tree.find('size').find('height').text)
if im_w < 0 or im_h < 0: if im_w < 0 or im_h < 0:
logger.warn( logger.warning(
'Illegal width: {} or height: {} in annotation, ' 'Illegal width: {} or height: {} in annotation, '
'and {} will be ignored'.format(im_w, im_h, xml_file)) 'and {} will be ignored'.format(im_w, im_h, xml_file))
continue continue
...@@ -137,7 +138,7 @@ class VOCDataSet(DetDataset): ...@@ -137,7 +138,7 @@ class VOCDataSet(DetDataset):
gt_score.append([1.]) gt_score.append([1.])
difficult.append([_difficult]) difficult.append([_difficult])
else: else:
logger.warn( logger.warning(
'Found an invalid bbox in annotations: xml_file: {}' 'Found an invalid bbox in annotations: xml_file: {}'
', x1: {}, y1: {}, x2: {}, y2: {}.'.format( ', x1: {}, y1: {}, x2: {}, y2: {}.'.format(
xml_file, x1, y1, x2, y2)) xml_file, x1, y1, x2, y2))
......
...@@ -139,9 +139,9 @@ class WIDERFaceDataSet(DetDataset): ...@@ -139,9 +139,9 @@ class WIDERFaceDataSet(DetDataset):
h = float(split_str[3]) h = float(split_str[3])
# Filter out wrong labels # Filter out wrong labels
if w < 0 or h < 0: if w < 0 or h < 0:
logger.warn('Illegal box with w: {}, h: {} in ' logger.warning('Illegal box with w: {}, h: {} in '
'img: {}, and it will be ignored'.format( 'img: {}, and it will be ignored'.format(
w, h, file_dict[num_class][0])) w, h, file_dict[num_class][0]))
continue continue
xmin = max(0, xmin) xmin = max(0, xmin)
ymin = max(0, ymin) ymin = max(0, ymin)
......
...@@ -131,7 +131,7 @@ class Decode(BaseOperator): ...@@ -131,7 +131,7 @@ class Decode(BaseOperator):
if 'h' not in sample: if 'h' not in sample:
sample['h'] = im.shape[0] sample['h'] = im.shape[0]
elif sample['h'] != im.shape[0]: elif sample['h'] != im.shape[0]:
logger.warn( logger.warning(
"The actual image height: {} is not equal to the " "The actual image height: {} is not equal to the "
"height: {} in annotation, and update sample['h'] by actual " "height: {} in annotation, and update sample['h'] by actual "
"image height.".format(im.shape[0], sample['h'])) "image height.".format(im.shape[0], sample['h']))
...@@ -139,7 +139,7 @@ class Decode(BaseOperator): ...@@ -139,7 +139,7 @@ class Decode(BaseOperator):
if 'w' not in sample: if 'w' not in sample:
sample['w'] = im.shape[1] sample['w'] = im.shape[1]
elif sample['w'] != im.shape[1]: elif sample['w'] != im.shape[1]:
logger.warn( logger.warning(
"The actual image width: {} is not equal to the " "The actual image width: {} is not equal to the "
"width: {} in annotation, and update sample['w'] by actual " "width: {} in annotation, and update sample['w'] by actual "
"image width.".format(im.shape[1], sample['w'])) "image width.".format(im.shape[1], sample['w']))
...@@ -726,7 +726,7 @@ class Resize(BaseOperator): ...@@ -726,7 +726,7 @@ class Resize(BaseOperator):
# apply rbox # apply rbox
if 'gt_rbox2poly' in sample: if 'gt_rbox2poly' in sample:
if np.array(sample['gt_rbox2poly']).shape[1] != 8: if np.array(sample['gt_rbox2poly']).shape[1] != 8:
logger.warn( logger.warning(
"gt_rbox2poly's length shoule be 8, but actually is {}". "gt_rbox2poly's length shoule be 8, but actually is {}".
format(len(sample['gt_rbox2poly']))) format(len(sample['gt_rbox2poly'])))
sample['gt_rbox2poly'] = self.apply_bbox(sample['gt_rbox2poly'], sample['gt_rbox2poly'] = self.apply_bbox(sample['gt_rbox2poly'],
......
...@@ -175,7 +175,7 @@ class Checkpointer(Callback): ...@@ -175,7 +175,7 @@ class Checkpointer(Callback):
else: else:
key = 'mask' key = 'mask'
if key not in map_res: if key not in map_res:
logger.warn("Evaluation results empty, this may be due to " \ logger.warning("Evaluation results empty, this may be due to " \
"training iterations being too few or not " \ "training iterations being too few or not " \
"loading the correct weights.") "loading the correct weights.")
return return
......
...@@ -75,7 +75,7 @@ class Tracker(object): ...@@ -75,7 +75,7 @@ class Tracker(object):
if self.cfg.metric == 'MOT': if self.cfg.metric == 'MOT':
self._metrics = [MOTMetric(), ] self._metrics = [MOTMetric(), ]
else: else:
logger.warn("Metric not support for metric type {}".format( logger.warning("Metric not support for metric type {}".format(
self.cfg.metric)) self.cfg.metric))
self._metrics = [] self._metrics = []
......
...@@ -246,7 +246,7 @@ class Trainer(object): ...@@ -246,7 +246,7 @@ class Trainer(object):
elif self.cfg.metric == 'MOTDet': elif self.cfg.metric == 'MOTDet':
self._metrics = [JDEDetMetric(), ] self._metrics = [JDEDetMetric(), ]
else: else:
logger.warn("Metric not support for metric type {}".format( logger.warning("Metric not support for metric type {}".format(
self.cfg.metric)) self.cfg.metric))
self._metrics = [] self._metrics = []
......
...@@ -185,12 +185,12 @@ def extract_schema(cls): ...@@ -185,12 +185,12 @@ def extract_schema(cls):
annotations = argspec.annotations annotations = argspec.annotations
has_kwargs = argspec.varkw is not None has_kwargs = argspec.varkw is not None
else: else:
argspec = inspect.getargspec(ctor) argspec = inspect.getfullargspec(ctor)
# python 2 type hinting workaround, see pep-3107 # python 2 type hinting workaround, see pep-3107
# however, since `typeguard` does not support python 2, type checking # however, since `typeguard` does not support python 2, type checking
# is still python 3 only for now # is still python 3 only for now
annotations = getattr(ctor, '__annotations__', {}) annotations = getattr(ctor, '__annotations__', {})
has_kwargs = argspec.keywords is not None has_kwargs = argspec.varkw is not None
names = [arg for arg in argspec.args if arg != 'self'] names = [arg for arg in argspec.args if arg != 'self']
defaults = argspec.defaults defaults = argspec.defaults
......
...@@ -52,7 +52,7 @@ def _make_python_representer(cls): ...@@ -52,7 +52,7 @@ def _make_python_representer(cls):
if hasattr(inspect, 'getfullargspec'): if hasattr(inspect, 'getfullargspec'):
argspec = inspect.getfullargspec(cls) argspec = inspect.getfullargspec(cls)
else: else:
argspec = inspect.getargspec(cls.__init__) argspec = inspect.getfullargspec(cls.__init__)
argnames = [arg for arg in argspec.args if arg != 'self'] argnames = [arg for arg in argspec.args if arg != 'self']
def python_representer(dumper, obj): def python_representer(dumper, obj):
......
...@@ -166,8 +166,8 @@ def make_partial(cls): ...@@ -166,8 +166,8 @@ def make_partial(cls):
if not hasattr(op_module, op_name): if not hasattr(op_module, op_name):
import logging import logging
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
logger.warn('{} OP not found, maybe a newer version of paddle ' logger.warning('{} OP not found, maybe a newer version of paddle '
'is required.'.format(cls.__op__)) 'is required.'.format(cls.__op__))
return cls return cls
op = getattr(op_module, op_name) op = getattr(op_module, op_name)
......
...@@ -211,10 +211,10 @@ class ParallelMap(object): ...@@ -211,10 +211,10 @@ class ParallelMap(object):
else: else:
errmsg = "consumer[{}] exit abnormally".format(w.ident) errmsg = "consumer[{}] exit abnormally".format(w.ident)
logger.warn(errmsg) logger.warning(errmsg)
if abnormal_num > 0: if abnormal_num > 0:
logger.warn("{} consumers have exited abnormally!!!" \ logger.warning("{} consumers have exited abnormally!!!" \
.format(abnormal_num)) .format(abnormal_num))
return abnormal_num == 0 return abnormal_num == 0
...@@ -239,7 +239,7 @@ class ParallelMap(object): ...@@ -239,7 +239,7 @@ class ParallelMap(object):
if isinstance(sample, EndSignal): if isinstance(sample, EndSignal):
self._consumer_endsig[sample.id] = sample self._consumer_endsig[sample.id] = sample
logger.warn("recv endsignal from outq with errmsg[{}]" \ logger.warning("recv endsignal from outq with errmsg[{}]" \
.format(sample.errmsg)) .format(sample.errmsg))
if len(self._consumer_endsig.keys()) < len(self._consumers): if len(self._consumer_endsig.keys()) < len(self._consumers):
...@@ -268,7 +268,7 @@ class ParallelMap(object): ...@@ -268,7 +268,7 @@ class ParallelMap(object):
" for some consumers exited abnormally before!!!" " for some consumers exited abnormally before!!!"
if not self.drained(): if not self.drained():
logger.warn("reset before epoch[{}] finishes".format( logger.warning("reset before epoch[{}] finishes".format(
self._epoch)) self._epoch))
self._produced = self._produced - self._consumed self._produced = self._produced - self._consumed
else: else:
......
...@@ -46,8 +46,9 @@ class Compose(object): ...@@ -46,8 +46,9 @@ class Compose(object):
data = f(data, ctx) data = f(data, ctx)
except Exception as e: except Exception as e:
stack_info = traceback.format_exc() stack_info = traceback.format_exc()
logger.warn("fail to map op [{}] with error: {} and stack:\n{}". logger.warning(
format(f, e, str(stack_info))) "fail to map op [{}] with error: {} and stack:\n{}".format(
f, e, str(stack_info)))
raise e raise e
return data return data
......
...@@ -75,7 +75,7 @@ class SharedQueue(Queue): ...@@ -75,7 +75,7 @@ class SharedQueue(Queue):
stack_info = traceback.format_exc() stack_info = traceback.format_exc()
err_msg = 'failed to put a element to SharedQueue '\ err_msg = 'failed to put a element to SharedQueue '\
'with stack info[%s]' % (stack_info) 'with stack info[%s]' % (stack_info)
logger.warn(err_msg) logger.warning(err_msg)
if buff is not None: if buff is not None:
buff.free() buff.free()
...@@ -95,7 +95,7 @@ class SharedQueue(Queue): ...@@ -95,7 +95,7 @@ class SharedQueue(Queue):
stack_info = traceback.format_exc() stack_info = traceback.format_exc()
err_msg = 'failed to get element from SharedQueue '\ err_msg = 'failed to get element from SharedQueue '\
'with stack info[%s]' % (stack_info) 'with stack info[%s]' % (stack_info)
logger.warn(err_msg) logger.warning(err_msg)
raise e raise e
finally: finally:
if buff is not None: if buff is not None:
......
...@@ -233,7 +233,7 @@ class PageAllocator(object): ...@@ -233,7 +233,7 @@ class PageAllocator(object):
fname = fname + '.' + str(uuid.uuid4())[:6] fname = fname + '.' + str(uuid.uuid4())[:6]
with open(fname, 'wb') as f: with open(fname, 'wb') as f:
f.write(pickle.dumps(info, -1)) f.write(pickle.dumps(info, -1))
logger.warn('dump alloc info to file[%s]' % (fname)) logger.warning('dump alloc info to file[%s]' % (fname))
def _reset(self): def _reset(self):
alloc_page_pos = self._header_pages alloc_page_pos = self._header_pages
...@@ -460,7 +460,7 @@ class SharedMemoryMgr(object): ...@@ -460,7 +460,7 @@ class SharedMemoryMgr(object):
if start is None: if start is None:
time.sleep(0.1) time.sleep(0.1)
if ct % 100 == 0: if ct % 100 == 0:
logger.warn('not enough space for reason[%s]' % (errmsg)) logger.warning('not enough space for reason[%s]' % (errmsg))
ct += 1 ct += 1
else: else:
......
...@@ -97,8 +97,8 @@ class COCODataSet(DataSet): ...@@ -97,8 +97,8 @@ class COCODataSet(DataSet):
if 'annotations' not in coco.dataset: if 'annotations' not in coco.dataset:
self.load_image_only = True self.load_image_only = True
logger.warn('Annotation file: {} does not contains ground truth ' logger.warning('Annotation file: {} does not contains ground truth '
'and load image information only.'.format(anno_path)) 'and load image information only.'.format(anno_path))
for img_id in img_ids: for img_id in img_ids:
img_anno = coco.loadImgs([img_id])[0] img_anno = coco.loadImgs([img_id])[0]
...@@ -109,14 +109,14 @@ class COCODataSet(DataSet): ...@@ -109,14 +109,14 @@ class COCODataSet(DataSet):
im_path = os.path.join(image_dir, im_path = os.path.join(image_dir,
im_fname) if image_dir else im_fname im_fname) if image_dir else im_fname
if not os.path.exists(im_path): if not os.path.exists(im_path):
logger.warn('Illegal image file: {}, and it will be ' logger.warning('Illegal image file: {}, and it will be '
'ignored'.format(im_path)) 'ignored'.format(im_path))
continue continue
if im_w < 0 or im_h < 0: if im_w < 0 or im_h < 0:
logger.warn('Illegal width: {} or height: {} in annotation, ' logger.warning('Illegal width: {} or height: {} in annotation, '
'and im_id: {} will be ignored'.format(im_w, im_h, 'and im_id: {} will be ignored'.format(
img_id)) im_w, im_h, img_id))
continue continue
coco_rec = { coco_rec = {
...@@ -141,7 +141,7 @@ class COCODataSet(DataSet): ...@@ -141,7 +141,7 @@ class COCODataSet(DataSet):
inst['clean_bbox'] = [x1, y1, x2, y2] inst['clean_bbox'] = [x1, y1, x2, y2]
bboxes.append(inst) bboxes.append(inst)
else: else:
logger.warn( logger.warning(
'Found an invalid bbox in annotations: im_id: {}, ' 'Found an invalid bbox in annotations: im_id: {}, '
'x1: {}, y1: {}, x2: {}, y2: {}.'.format( 'x1: {}, y1: {}, x2: {}, y2: {}.'.format(
img_id, x1, y1, x2, y2)) img_id, x1, y1, x2, y2))
......
...@@ -111,13 +111,14 @@ class VOCDataSet(DataSet): ...@@ -111,13 +111,14 @@ class VOCDataSet(DataSet):
img_file, xml_file = [os.path.join(image_dir, x) \ img_file, xml_file = [os.path.join(image_dir, x) \
for x in line.strip().split()[:2]] for x in line.strip().split()[:2]]
if not os.path.exists(img_file): if not os.path.exists(img_file):
logger.warn( logger.warning(
'Illegal image file: {}, and it will be ignored'.format( 'Illegal image file: {}, and it will be ignored'.format(
img_file)) img_file))
continue continue
if not os.path.isfile(xml_file): if not os.path.isfile(xml_file):
logger.warn('Illegal xml file: {}, and it will be ignored'. logger.warning(
format(xml_file)) 'Illegal xml file: {}, and it will be ignored'.format(
xml_file))
continue continue
tree = ET.parse(xml_file) tree = ET.parse(xml_file)
if tree.find('id') is None: if tree.find('id') is None:
...@@ -129,7 +130,7 @@ class VOCDataSet(DataSet): ...@@ -129,7 +130,7 @@ class VOCDataSet(DataSet):
im_w = float(tree.find('size').find('width').text) im_w = float(tree.find('size').find('width').text)
im_h = float(tree.find('size').find('height').text) im_h = float(tree.find('size').find('height').text)
if im_w < 0 or im_h < 0: if im_w < 0 or im_h < 0:
logger.warn( logger.warning(
'Illegal width: {} or height: {} in annotation, ' 'Illegal width: {} or height: {} in annotation, '
'and {} will be ignored'.format(im_w, im_h, xml_file)) 'and {} will be ignored'.format(im_w, im_h, xml_file))
continue continue
...@@ -156,7 +157,7 @@ class VOCDataSet(DataSet): ...@@ -156,7 +157,7 @@ class VOCDataSet(DataSet):
is_crowd.append([0]) is_crowd.append([0])
difficult.append([_difficult]) difficult.append([_difficult])
else: else:
logger.warn( logger.warning(
'Found an invalid bbox in annotations: xml_file: {}' 'Found an invalid bbox in annotations: xml_file: {}'
', x1: {}, y1: {}, x2: {}, y2: {}.'.format( ', x1: {}, y1: {}, x2: {}, y2: {}.'.format(
xml_file, x1, y1, x2, y2)) xml_file, x1, y1, x2, y2))
......
...@@ -133,9 +133,9 @@ class WIDERFaceDataSet(DataSet): ...@@ -133,9 +133,9 @@ class WIDERFaceDataSet(DataSet):
h = float(split_str[3]) h = float(split_str[3])
# Filter out wrong labels # Filter out wrong labels
if w < 0 or h < 0: if w < 0 or h < 0:
logger.warn('Illegal box with w: {}, h: {} in ' logger.warning('Illegal box with w: {}, h: {} in '
'img: {}, and it will be ignored'.format( 'img: {}, and it will be ignored'.format(
w, h, file_dict[num_class][0])) w, h, file_dict[num_class][0]))
continue continue
xmin = max(0, xmin) xmin = max(0, xmin)
ymin = max(0, ymin) ymin = max(0, ymin)
......
...@@ -1453,19 +1453,19 @@ def _parse_policy_info(name, prob, level, replace_value, augmentation_hparams): ...@@ -1453,19 +1453,19 @@ def _parse_policy_info(name, prob, level, replace_value, augmentation_hparams):
# Check to see if prob is passed into function. This is used for operations # Check to see if prob is passed into function. This is used for operations
# where we alter bboxes independently. # where we alter bboxes independently.
# pytype:disable=wrong-arg-types # pytype:disable=wrong-arg-types
if 'prob' in inspect.getargspec(func)[0]: if 'prob' in inspect.getfullargspec(func)[0]:
args = tuple([prob] + list(args)) args = tuple([prob] + list(args))
# pytype:enable=wrong-arg-types # pytype:enable=wrong-arg-types
# Add in replace arg if it is required for the function that is being called. # Add in replace arg if it is required for the function that is being called.
if 'replace' in inspect.getargspec(func)[0]: if 'replace' in inspect.getfullargspec(func)[0]:
# Make sure replace is the final argument # Make sure replace is the final argument
assert 'replace' == inspect.getargspec(func)[0][-1] assert 'replace' == inspect.getfullargspec(func)[0][-1]
args = tuple(list(args) + [replace_value]) args = tuple(list(args) + [replace_value])
# Add bboxes as the second positional argument for the function if it does # Add bboxes as the second positional argument for the function if it does
# not already exist. # not already exist.
if 'bboxes' not in inspect.getargspec(func)[0]: if 'bboxes' not in inspect.getfullargspec(func)[0]:
func = bbox_wrapper(func) func = bbox_wrapper(func)
return (func, prob, args) return (func, prob, args)
...@@ -1473,11 +1473,11 @@ def _parse_policy_info(name, prob, level, replace_value, augmentation_hparams): ...@@ -1473,11 +1473,11 @@ def _parse_policy_info(name, prob, level, replace_value, augmentation_hparams):
def _apply_func_with_prob(func, image, args, prob, bboxes): def _apply_func_with_prob(func, image, args, prob, bboxes):
"""Apply `func` to image w/ `args` as input with probability `prob`.""" """Apply `func` to image w/ `args` as input with probability `prob`."""
assert isinstance(args, tuple) assert isinstance(args, tuple)
assert 'bboxes' == inspect.getargspec(func)[0][1] assert 'bboxes' == inspect.getfullargspec(func)[0][1]
# If prob is a function argument, then this randomness is being handled # If prob is a function argument, then this randomness is being handled
# inside the function, so make sure it is always called. # inside the function, so make sure it is always called.
if 'prob' in inspect.getargspec(func)[0]: if 'prob' in inspect.getfullargspec(func)[0]:
prob = 1.0 prob = 1.0
# Apply the function with probability `prob`. # Apply the function with probability `prob`.
......
...@@ -125,7 +125,7 @@ class DecodeImage(BaseOperator): ...@@ -125,7 +125,7 @@ class DecodeImage(BaseOperator):
if 'h' not in sample: if 'h' not in sample:
sample['h'] = im.shape[0] sample['h'] = im.shape[0]
elif sample['h'] != im.shape[0]: elif sample['h'] != im.shape[0]:
logger.warn( logger.warning(
"The actual image height: {} is not equal to the " "The actual image height: {} is not equal to the "
"height: {} in annotation, and update sample['h'] by actual " "height: {} in annotation, and update sample['h'] by actual "
"image height.".format(im.shape[0], sample['h'])) "image height.".format(im.shape[0], sample['h']))
...@@ -133,7 +133,7 @@ class DecodeImage(BaseOperator): ...@@ -133,7 +133,7 @@ class DecodeImage(BaseOperator):
if 'w' not in sample: if 'w' not in sample:
sample['w'] = im.shape[1] sample['w'] = im.shape[1]
elif sample['w'] != im.shape[1]: elif sample['w'] != im.shape[1]:
logger.warn( logger.warning(
"The actual image width: {} is not equal to the " "The actual image width: {} is not equal to the "
"width: {} in annotation, and update sample['w'] by actual " "width: {} in annotation, and update sample['w'] by actual "
"image width.".format(im.shape[1], sample['w'])) "image width.".format(im.shape[1], sample['w']))
......
...@@ -67,7 +67,7 @@ class YOLOv3Loss(object): ...@@ -67,7 +67,7 @@ class YOLOv3Loss(object):
self.match_score = match_score self.match_score = match_score
if batch_size != -1: if batch_size != -1:
logger.warn( logger.warning(
"config YOLOv3Loss.batch_size is deprecated, " "config YOLOv3Loss.batch_size is deprecated, "
"training batch size should be set by TrainReader.batch_size") "training batch size should be set by TrainReader.batch_size")
......
...@@ -321,7 +321,7 @@ def _download(url, path, md5sum=None): ...@@ -321,7 +321,7 @@ def _download(url, path, md5sum=None):
shutil.move(tmp_fullname, fullname) shutil.move(tmp_fullname, fullname)
return fullname return fullname
else: else:
logger.warn( logger.warning(
"Download from url imcomplete, try downloading again...") "Download from url imcomplete, try downloading again...")
os.remove(tmp_fullname) os.remove(tmp_fullname)
continue continue
......
...@@ -111,7 +111,7 @@ def main(): ...@@ -111,7 +111,7 @@ def main():
if cfg.weights: if cfg.weights:
checkpoint.load_params(exe, eval_prog, cfg.weights) checkpoint.load_params(exe, eval_prog, cfg.weights)
else: else:
logger.warn("Please set cfg.weights to load trained model.") logger.warning("Please set cfg.weights to load trained model.")
# whether output bbox is normalized in model output layer # whether output bbox is normalized in model output layer
is_bbox_normalized = False is_bbox_normalized = False
......
...@@ -265,9 +265,9 @@ class YOLOv5AnchorCluster(BaseAnchorCluster): ...@@ -265,9 +265,9 @@ class YOLOv5AnchorCluster(BaseAnchorCluster):
wh0 = self.whs wh0 = self.whs
i = (wh0 < 3.0).any(1).sum() i = (wh0 < 3.0).any(1).sum()
if i: if i:
logger.warn('Extremely small objects found. %d of %d' logger.warning('Extremely small objects found. %d of %d'
'labels are < 3 pixels in width or height' % 'labels are < 3 pixels in width or height' %
(i, len(wh0))) (i, len(wh0)))
wh = wh0[(wh0 >= 2.0).any(1)] wh = wh0[(wh0 >= 2.0).any(1)]
logger.info('Running kmeans for %g anchors on %g points...' % logger.info('Running kmeans for %g anchors on %g points...' %
......
...@@ -252,9 +252,9 @@ class YOLOv5AnchorCluster(BaseAnchorCluster): ...@@ -252,9 +252,9 @@ class YOLOv5AnchorCluster(BaseAnchorCluster):
wh0 = self.whs wh0 = self.whs
i = (wh0 < 3.0).any(1).sum() i = (wh0 < 3.0).any(1).sum()
if i: if i:
logger.warn('Extremely small objects found. %d of %d' logger.warning('Extremely small objects found. %d of %d'
'labels are < 3 pixels in width or height' % 'labels are < 3 pixels in width or height' %
(i, len(wh0))) (i, len(wh0)))
wh = wh0[(wh0 >= 2.0).any(1)] wh = wh0[(wh0 >= 2.0).any(1)]
logger.info('Running kmeans for %g anchors on %g points...' % logger.info('Running kmeans for %g anchors on %g points...' %
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册