提交 8432b73d 编写于 作者: E Evgeny Izutov

pylint fixes

上级 59c1dde1
...@@ -147,7 +147,9 @@ disable=print-statement, ...@@ -147,7 +147,9 @@ disable=print-statement,
attribute-defined-outside-init, attribute-defined-outside-init,
too-many-boolean-expressions, too-many-boolean-expressions,
too-many-locals, too-many-locals,
assignment-from-no-return assignment-from-no-return,
unnecessary-pass,
useless-object-inheritance
# Enable the message, report, category or checker with the given id(s). You can # Enable the message, report, category or checker with the given id(s). You can
# either give multiple identifier separated by comma (,) or put this option # either give multiple identifier separated by comma (,) or put this option
......
...@@ -14,6 +14,7 @@ ...@@ -14,6 +14,7 @@
import time import time
import traceback import traceback
import signal import signal
from builtins import range
from os import listdir from os import listdir
from os.path import exists, basename, isfile, join from os.path import exists, basename, isfile, join
from collections import namedtuple from collections import namedtuple
...@@ -221,7 +222,7 @@ class SampleDataFromDisk(object): ...@@ -221,7 +222,7 @@ class SampleDataFromDisk(object):
frame_id = int(bbox.attrib['frame']) frame_id = int(bbox.attrib['frame'])
action_name = None action_name = None
for bbox_attr_id in xrange(len(bbox)): for bbox_attr_id, _ in enumerate(bbox):
attribute_name = bbox[bbox_attr_id].attrib['name'] attribute_name = bbox[bbox_attr_id].attrib['name']
if attribute_name != 'action': if attribute_name != 'action':
continue continue
...@@ -390,7 +391,7 @@ class ActionsDataLayer(BaseLayer): ...@@ -390,7 +391,7 @@ class ActionsDataLayer(BaseLayer):
neighbours.sort(key=lambda t: t[1]) neighbours.sort(key=lambda t: t[1])
support_size = np.random.randint(2, len(valid_objects) + 1) support_size = np.random.randint(2, len(valid_objects) + 1)
support_set = [neighbours[i][0] for i in xrange(support_size)] support_set = [neighbours[i][0] for i in range(support_size)]
support_bbox = _find_limits(support_set) support_bbox = _find_limits(support_set)
crop_aspect_ratio = np.random.uniform(self.crop_ratio_limits_[0], self.crop_ratio_limits_[1]) crop_aspect_ratio = np.random.uniform(self.crop_ratio_limits_[0], self.crop_ratio_limits_[1])
...@@ -817,7 +818,7 @@ class ActionsDataLayer(BaseLayer): ...@@ -817,7 +818,7 @@ class ActionsDataLayer(BaseLayer):
final_num_classes = len(class_subsets) final_num_classes = len(class_subsets)
final_ids = np.zeros([final_num_classes * min_queue_size], dtype=np.int32) final_ids = np.zeros([final_num_classes * min_queue_size], dtype=np.int32)
for i in xrange(final_num_classes): for i in range(final_num_classes):
final_ids[i::final_num_classes] = class_subsets[i] final_ids[i::final_num_classes] = class_subsets[i]
for i in final_ids: for i in final_ids:
...@@ -845,7 +846,7 @@ class ActionsDataLayer(BaseLayer): ...@@ -845,7 +846,7 @@ class ActionsDataLayer(BaseLayer):
signal.signal(signal.SIGINT, signal.SIG_IGN) signal.signal(signal.SIGINT, signal.SIG_IGN)
if single_run: if single_run:
for _ in xrange(max_size): for _ in range(max_size):
_step() _step()
else: else:
while True: while True:
...@@ -866,7 +867,7 @@ class ActionsDataLayer(BaseLayer): ...@@ -866,7 +867,7 @@ class ActionsDataLayer(BaseLayer):
self._ids_filler_process.start() self._ids_filler_process.start()
self._data_fillers_pool = [] self._data_fillers_pool = []
for _ in xrange(self.num_data_fillers_): for _ in range(self.num_data_fillers_):
new_data_filler = Process(target=self._data_queue_filler, new_data_filler = Process(target=self._data_queue_filler,
args=(self.annotated_images_queue, self.frame_ids_queue, args=(self.annotated_images_queue, self.frame_ids_queue,
self.data_queue_size_, self.single_iter_)) self.data_queue_size_, self.single_iter_))
......
...@@ -12,6 +12,7 @@ ...@@ -12,6 +12,7 @@
""" """
import traceback import traceback
from builtins import range
from collections import namedtuple from collections import namedtuple
import numpy as np import numpy as np
...@@ -238,7 +239,7 @@ class ActionsDetectionOutputLayer(BaseLayer): ...@@ -238,7 +239,7 @@ class ActionsDetectionOutputLayer(BaseLayer):
detections_data = np.array(bottom[0].data) detections_data = np.array(bottom[0].data)
anchors_data = [] anchors_data = []
for i in xrange(self._num_anchors): for i in range(self._num_anchors):
anchors_data.append(np.array(bottom[i + 1].data)) anchors_data.append(np.array(bottom[i + 1].data))
all_detections = self._parse_detections( all_detections = self._parse_detections(
......
...@@ -12,6 +12,7 @@ ...@@ -12,6 +12,7 @@
""" """
import traceback import traceback
from builtins import range
import numpy as np import numpy as np
...@@ -76,7 +77,7 @@ class AdaptiveWeightingLossLayer(BaseLayer): ...@@ -76,7 +77,7 @@ class AdaptiveWeightingLossLayer(BaseLayer):
samples = [] samples = []
losses = [] losses = []
for i in xrange(num_variables): for i in range(num_variables):
loss_value = np.array(bottom[i].data, dtype=np.float32).reshape([-1]) loss_value = np.array(bottom[i].data, dtype=np.float32).reshape([-1])
assert len(loss_value) == 1 assert len(loss_value) == 1
...@@ -93,7 +94,7 @@ class AdaptiveWeightingLossLayer(BaseLayer): ...@@ -93,7 +94,7 @@ class AdaptiveWeightingLossLayer(BaseLayer):
top[0].data[...] = np.sum(losses) if len(losses) > 0 else 0.0 top[0].data[...] = np.sum(losses) if len(losses) > 0 else 0.0
if len(top) == 1 + num_variables: if len(top) == 1 + num_variables:
for i in xrange(num_variables): for i in range(num_variables):
top[i + 1].data[...] = np.copy(bottom[i].data) top[i + 1].data[...] = np.copy(bottom[i].data)
self._samples = samples self._samples = samples
...@@ -112,7 +113,7 @@ class AdaptiveWeightingLossLayer(BaseLayer): ...@@ -112,7 +113,7 @@ class AdaptiveWeightingLossLayer(BaseLayer):
try: try:
num_variables = len(bottom) num_variables = len(bottom)
for i in xrange(num_variables): for i in range(num_variables):
bottom[i].diff[...] = 0.0 bottom[i].diff[...] = 0.0
top_diff_value = top[0].diff[0] top_diff_value = top[0].diff[0]
...@@ -136,5 +137,5 @@ class AdaptiveWeightingLossLayer(BaseLayer): ...@@ -136,5 +137,5 @@ class AdaptiveWeightingLossLayer(BaseLayer):
num_variables = len(bottom) num_variables = len(bottom)
if len(top) == 1 + num_variables: if len(top) == 1 + num_variables:
for i in xrange(num_variables): for i in range(num_variables):
top[i + 1].reshape(1) top[i + 1].reshape(1)
...@@ -12,6 +12,7 @@ ...@@ -12,6 +12,7 @@
""" """
import traceback import traceback
from builtins import range
from collections import namedtuple from collections import namedtuple
import numpy as np import numpy as np
...@@ -205,7 +206,7 @@ class CenterLossLayer(BaseLayer): ...@@ -205,7 +206,7 @@ class CenterLossLayer(BaseLayer):
self._centers = np.array(bottom[1].data) self._centers = np.array(bottom[1].data)
self._embeddings = [] self._embeddings = []
for i in xrange(self._num_anchors): for i in range(self._num_anchors):
self._embeddings.append(np.array(bottom[i + 2].data)) self._embeddings.append(np.array(bottom[i + 2].data))
if self._adaptive_weights: if self._adaptive_weights:
...@@ -266,8 +267,7 @@ class CenterLossLayer(BaseLayer): ...@@ -266,8 +267,7 @@ class CenterLossLayer(BaseLayer):
instance_weights = [class_weights[det.action] for det in valid_detections] instance_weights = [class_weights[det.action] for det in valid_detections]
num_instances = len(valid_detections) num_instances = len(valid_detections)
weighted_sum_losses = np.sum([instance_weights[i] * losses[i] weighted_sum_losses = np.sum([instance_weights[i] * losses[i] for i, _ in enumerate(valid_detections)])
for i in xrange(len(valid_detections))])
top[0].data[...] = weighted_sum_losses / float(num_instances) if num_instances > 0 else 0.0 top[0].data[...] = weighted_sum_losses / float(num_instances) if num_instances > 0 else 0.0
if len(top) > 1: if len(top) > 1:
...@@ -301,14 +301,14 @@ class CenterLossLayer(BaseLayer): ...@@ -301,14 +301,14 @@ class CenterLossLayer(BaseLayer):
centers_diff_data = np.zeros(bottom[1].data.shape) if propagate_down[1] else None centers_diff_data = np.zeros(bottom[1].data.shape) if propagate_down[1] else None
anchor_diff_data = {} anchor_diff_data = {}
for anchor_id in xrange(self._num_anchors): for anchor_id in range(self._num_anchors):
if propagate_down[anchor_id + 2]: if propagate_down[anchor_id + 2]:
anchor_diff_data[anchor_id] = np.zeros(bottom[anchor_id + 2].data.shape) anchor_diff_data[anchor_id] = np.zeros(bottom[anchor_id + 2].data.shape)
if len(self._valid_detections) > 0: if len(self._valid_detections) > 0:
factor = top[0].diff[0] / float(self._num_instances) factor = top[0].diff[0] / float(self._num_instances)
for i in xrange(len(self._valid_detections)): for i, _ in enumerate(self._valid_detections):
det = self._valid_detections[i] det = self._valid_detections[i]
weight = -factor * self._weights[i] weight = -factor * self._weights[i]
...@@ -323,7 +323,7 @@ class CenterLossLayer(BaseLayer): ...@@ -323,7 +323,7 @@ class CenterLossLayer(BaseLayer):
if centers_diff_data is not None: if centers_diff_data is not None:
bottom[1].diff[...] = centers_diff_data bottom[1].diff[...] = centers_diff_data
for anchor_id in xrange(self._num_anchors): for anchor_id in range(self._num_anchors):
if propagate_down[anchor_id + 2]: if propagate_down[anchor_id + 2]:
bottom[anchor_id + 2].diff[...] = anchor_diff_data[anchor_id] bottom[anchor_id + 2].diff[...] = anchor_diff_data[anchor_id]
except Exception: except Exception:
......
...@@ -12,6 +12,7 @@ ...@@ -12,6 +12,7 @@
""" """
import traceback import traceback
from builtins import range
from collections import namedtuple from collections import namedtuple
import numpy as np import numpy as np
...@@ -59,7 +60,7 @@ class DetMatcherLayer(BaseLayer): ...@@ -59,7 +60,7 @@ class DetMatcherLayer(BaseLayer):
return {} return {}
batch_size = int(np.max(records[:, 0])) + 1 batch_size = int(np.max(records[:, 0])) + 1
batch_data = {i: [] for i in xrange(batch_size)} batch_data = {i: [] for i in range(batch_size)}
for record in records: for record in records:
item_id = int(record[0]) item_id = int(record[0])
...@@ -103,7 +104,7 @@ class DetMatcherLayer(BaseLayer): ...@@ -103,7 +104,7 @@ class DetMatcherLayer(BaseLayer):
return {} return {}
batch_size = int(np.max(records[:, 0])) + 1 batch_size = int(np.max(records[:, 0])) + 1
batch_data = {i: [] for i in xrange(batch_size)} batch_data = {i: [] for i in range(batch_size)}
for record in records: for record in records:
item_id = int(record[0]) item_id = int(record[0])
...@@ -239,7 +240,7 @@ class DetMatcherLayer(BaseLayer): ...@@ -239,7 +240,7 @@ class DetMatcherLayer(BaseLayer):
# First: match gt to predictions 1:1 with IoU > min_gt_iou # First: match gt to predictions 1:1 with IoU > min_gt_iou
gt_inds, pred_inds = linear_sum_assignment(distance_matrix) gt_inds, pred_inds = linear_sum_assignment(distance_matrix)
for i in xrange(len(gt_inds)): for i, _ in enumerate(gt_inds):
gt_id = gt_inds[i] gt_id = gt_inds[i]
predicted_id = pred_inds[i] predicted_id = pred_inds[i]
...@@ -277,7 +278,7 @@ class DetMatcherLayer(BaseLayer): ...@@ -277,7 +278,7 @@ class DetMatcherLayer(BaseLayer):
valid_matched_gt_bboxes = [gt_bboxes[i] for i in valid_matched_gt_ids] valid_matched_gt_bboxes = [gt_bboxes[i] for i in valid_matched_gt_ids]
valid_predicted_bboxes = [predicted_bboxes[i] for i in valid_prediction_ids] valid_predicted_bboxes = [predicted_bboxes[i] for i in valid_prediction_ids]
for i in xrange(len(valid_matched_gt_bboxes)): for i, _ in enumerate(valid_matched_gt_bboxes):
gt_bbox = valid_matched_gt_bboxes[i] gt_bbox = valid_matched_gt_bboxes[i]
predicted_bbox = valid_predicted_bboxes[i] predicted_bbox = valid_predicted_bboxes[i]
matches.append(InputDetection(item_id=item_id, matches.append(InputDetection(item_id=item_id,
......
...@@ -12,6 +12,7 @@ ...@@ -12,6 +12,7 @@
""" """
import traceback import traceback
from builtins import range
from collections import namedtuple from collections import namedtuple
import numpy as np import numpy as np
...@@ -136,7 +137,7 @@ class GlobPushLossLayer(BaseLayer): ...@@ -136,7 +137,7 @@ class GlobPushLossLayer(BaseLayer):
num_centers = centers_data.shape[0] num_centers = centers_data.shape[0]
self._embeddings = [] self._embeddings = []
for i in xrange(self._num_anchors): for i in range(self._num_anchors):
self._embeddings.append(np.array(bottom[i + 2].data)) self._embeddings.append(np.array(bottom[i + 2].data))
height, width = self._embeddings[0].shape[2:] height, width = self._embeddings[0].shape[2:]
...@@ -153,11 +154,11 @@ class GlobPushLossLayer(BaseLayer): ...@@ -153,11 +154,11 @@ class GlobPushLossLayer(BaseLayer):
total_num_pairs += self._num_anchors * num_centers * int(np.sum(outer_mask)) total_num_pairs += self._num_anchors * num_centers * int(np.sum(outer_mask))
anchor_masks = [] anchor_masks = []
for anchor_id in xrange(self._num_anchors): for anchor_id in range(self._num_anchors):
anchor_embeddings = self._embeddings[anchor_id][item_id] anchor_embeddings = self._embeddings[anchor_id][item_id]
center_masks = [] center_masks = []
for center_id in xrange(num_centers): for center_id in range(num_centers):
center_embedding = centers_data[center_id].reshape([-1, 1, 1]) center_embedding = centers_data[center_id].reshape([-1, 1, 1])
distances = 1.0 - np.sum(anchor_embeddings * center_embedding, axis=0) distances = 1.0 - np.sum(anchor_embeddings * center_embedding, axis=0)
...@@ -202,22 +203,22 @@ class GlobPushLossLayer(BaseLayer): ...@@ -202,22 +203,22 @@ class GlobPushLossLayer(BaseLayer):
centers_diff_data = np.zeros(bottom[1].data.shape) if propagate_down[1] else None centers_diff_data = np.zeros(bottom[1].data.shape) if propagate_down[1] else None
anchor_diff_data = {} anchor_diff_data = {}
for anchor_id in xrange(self._num_anchors): for anchor_id in range(self._num_anchors):
if propagate_down[anchor_id + 2]: if propagate_down[anchor_id + 2]:
anchor_diff_data[anchor_id] = np.zeros(bottom[anchor_id + 2].data.shape) anchor_diff_data[anchor_id] = np.zeros(bottom[anchor_id + 2].data.shape)
if self._valid_num_pairs > 0: if self._valid_num_pairs > 0:
factor = top[0].diff[0] / float(self._valid_num_pairs) factor = top[0].diff[0] / float(self._valid_num_pairs)
for item_id in xrange(len(self._masks)): for item_id, _ in enumerate(self._masks):
anchor_masks = self._masks[item_id] anchor_masks = self._masks[item_id]
for anchor_id in xrange(len(anchor_masks)): for anchor_id, _ in enumerate(anchor_masks):
embeddings = self._embeddings[anchor_id][item_id] embeddings = self._embeddings[anchor_id][item_id]
diff_data = anchor_diff_data[anchor_id][item_id] diff_data = anchor_diff_data[anchor_id][item_id]
embedding_size = embeddings.shape[0] embedding_size = embeddings.shape[0]
center_masks = anchor_masks[anchor_id] center_masks = anchor_masks[anchor_id]
for center_id in xrange(len(center_masks)): for center_id, _ in enumerate(center_masks):
mask = center_masks[center_id] mask = center_masks[center_id]
num_pairs = int(np.sum(mask)) num_pairs = int(np.sum(mask))
mask = np.tile(np.expand_dims(mask, axis=0), reps=[embedding_size, 1, 1]) mask = np.tile(np.expand_dims(mask, axis=0), reps=[embedding_size, 1, 1])
...@@ -233,7 +234,7 @@ class GlobPushLossLayer(BaseLayer): ...@@ -233,7 +234,7 @@ class GlobPushLossLayer(BaseLayer):
if centers_diff_data is not None: if centers_diff_data is not None:
bottom[1].diff[...] = centers_diff_data bottom[1].diff[...] = centers_diff_data
for anchor_id in xrange(self._num_anchors): for anchor_id in range(self._num_anchors):
if propagate_down[anchor_id + 2]: if propagate_down[anchor_id + 2]:
bottom[anchor_id + 2].diff[...] = anchor_diff_data[anchor_id] bottom[anchor_id + 2].diff[...] = anchor_diff_data[anchor_id]
except Exception: except Exception:
......
...@@ -12,6 +12,7 @@ ...@@ -12,6 +12,7 @@
""" """
import traceback import traceback
from builtins import range
from collections import namedtuple from collections import namedtuple
import numpy as np import numpy as np
...@@ -88,7 +89,7 @@ class LocalPushLossLayer(BaseLayer): ...@@ -88,7 +89,7 @@ class LocalPushLossLayer(BaseLayer):
:return: mask of valid pixels :return: mask of valid pixels
""" """
masks = [np.zeros([height, width], dtype=np.bool) for _ in xrange(num_classes)] masks = [np.zeros([height, width], dtype=np.bool) for _ in range(num_classes)]
for det in detections: for det in detections:
masks[det.action][det.y, det.x] = True masks[det.action][det.y, det.x] = True
return masks return masks
...@@ -188,7 +189,7 @@ class LocalPushLossLayer(BaseLayer): ...@@ -188,7 +189,7 @@ class LocalPushLossLayer(BaseLayer):
num_centers = centers_data.shape[0] num_centers = centers_data.shape[0]
self._embeddings = [] self._embeddings = []
for i in xrange(self._num_anchors): for i in range(self._num_anchors):
self._embeddings.append(np.array(bottom[i + 2].data)) self._embeddings.append(np.array(bottom[i + 2].data))
if self._adaptive_weights: if self._adaptive_weights:
...@@ -220,7 +221,7 @@ class LocalPushLossLayer(BaseLayer): ...@@ -220,7 +221,7 @@ class LocalPushLossLayer(BaseLayer):
pos_distance = 1.0 - np.sum(det_embedding * center_embedding) pos_distance = 1.0 - np.sum(det_embedding * center_embedding)
for center_id in xrange(num_centers): for center_id in range(num_centers):
if center_id == det.action: if center_id == det.action:
continue continue
...@@ -248,13 +249,13 @@ class LocalPushLossLayer(BaseLayer): ...@@ -248,13 +249,13 @@ class LocalPushLossLayer(BaseLayer):
if self._instance_norm: if self._instance_norm:
instance_weights = \ instance_weights = \
[instance_weights[i] / float(instance_counts[pos_matches[i][0].item][pos_matches[i][0].id]) [instance_weights[i] / float(instance_counts[pos_matches[i][0].item][pos_matches[i][0].id])
for i in xrange(len(pos_matches))] for i in range(len(pos_matches))]
num_instances = np.sum([len(counts) for counts in instance_counts.values()]) num_instances = np.sum([len(counts) for counts in instance_counts.values()])
else: else:
instance_weights = [instance_weights[i] for i in xrange(len(pos_matches))] instance_weights = [instance_weights[i] for i, _ in enumerate(pos_matches)]
num_instances = len(pos_matches) num_instances = len(pos_matches)
weighted_sum_losses = np.sum([instance_weights[i] * losses[i] for i in xrange(len(pos_matches))]) weighted_sum_losses = np.sum([instance_weights[i] * losses[i] for i, _ in enumerate(pos_matches)])
top[0].data[...] = weighted_sum_losses / float(num_instances) if num_instances > 0 else 0.0 top[0].data[...] = weighted_sum_losses / float(num_instances) if num_instances > 0 else 0.0
if len(top) == 4: if len(top) == 4:
...@@ -286,14 +287,14 @@ class LocalPushLossLayer(BaseLayer): ...@@ -286,14 +287,14 @@ class LocalPushLossLayer(BaseLayer):
centers_diff_data = np.zeros(bottom[1].data.shape) if propagate_down[1] else None centers_diff_data = np.zeros(bottom[1].data.shape) if propagate_down[1] else None
anchor_diff_data = {} anchor_diff_data = {}
for anchor_id in xrange(self._num_anchors): for anchor_id in range(self._num_anchors):
if propagate_down[anchor_id + 2]: if propagate_down[anchor_id + 2]:
anchor_diff_data[anchor_id] = np.zeros(bottom[anchor_id + 2].data.shape) anchor_diff_data[anchor_id] = np.zeros(bottom[anchor_id + 2].data.shape)
if len(self._pos_matches) > 0: if len(self._pos_matches) > 0:
factor = top[0].diff[0] / float(self._num_instances) factor = top[0].diff[0] / float(self._num_instances)
for i in xrange(len(self._pos_matches)): for i, _ in enumerate(self._pos_matches):
det, center_id = self._pos_matches[i] det, center_id = self._pos_matches[i]
loss_weight = self._weights[i] loss_weight = self._weights[i]
...@@ -310,7 +311,7 @@ class LocalPushLossLayer(BaseLayer): ...@@ -310,7 +311,7 @@ class LocalPushLossLayer(BaseLayer):
if centers_diff_data is not None: if centers_diff_data is not None:
bottom[1].diff[...] = centers_diff_data bottom[1].diff[...] = centers_diff_data
for anchor_id in xrange(self._num_anchors): for anchor_id in range(self._num_anchors):
if propagate_down[anchor_id + 2]: if propagate_down[anchor_id + 2]:
bottom[anchor_id + 2].diff[...] = anchor_diff_data[anchor_id] bottom[anchor_id + 2].diff[...] = anchor_diff_data[anchor_id]
except Exception: except Exception:
......
...@@ -12,6 +12,7 @@ ...@@ -12,6 +12,7 @@
""" """
import traceback import traceback
from builtins import range
from collections import namedtuple from collections import namedtuple
import numpy as np import numpy as np
...@@ -65,7 +66,7 @@ class PushLossLayer(BaseLayer): ...@@ -65,7 +66,7 @@ class PushLossLayer(BaseLayer):
assert data.size % record_size == 0, 'incorrect record_size' assert data.size % record_size == 0, 'incorrect record_size'
records = data.reshape([-1, record_size]) records = data.reshape([-1, record_size])
detections = {i: [] for i in xrange(len(valid_action_ids))} detections = {i: [] for i, _ in enumerate(valid_action_ids)}
for record in records: for record in records:
detection = converter(record) detection = converter(record)
...@@ -168,10 +169,10 @@ class PushLossLayer(BaseLayer): ...@@ -168,10 +169,10 @@ class PushLossLayer(BaseLayer):
detections = self._filter_detections(all_detections, self._max_num_samples) detections = self._filter_detections(all_detections, self._max_num_samples)
classes = detections.keys() classes = detections.keys()
class_pairs = [(classes[i], classes[j]) for i in xrange(len(classes)) for j in xrange(i + 1, len(classes))] class_pairs = [(classes[i], classes[j]) for i, _ in enumerate(classes) for j in range(i + 1, len(classes))]
self._embeddings = [] self._embeddings = []
for i in xrange(self._num_anchors): for i in range(self._num_anchors):
self._embeddings.append(np.array(bottom[i + 1].data)) self._embeddings.append(np.array(bottom[i + 1].data))
all_candidates = [] all_candidates = []
...@@ -182,12 +183,12 @@ class PushLossLayer(BaseLayer): ...@@ -182,12 +183,12 @@ class PushLossLayer(BaseLayer):
if len(detections_i) == 0 or len(detections_j) == 0: if len(detections_i) == 0 or len(detections_j) == 0:
continue continue
for i in xrange(len(detections_i)): for i, _ in enumerate(detections_i):
anchor_det = detections_i[i] anchor_det = detections_i[i]
anchor_embed = self._embeddings[anchor_det.anchor][anchor_det.item, :, anchor_embed = self._embeddings[anchor_det.anchor][anchor_det.item, :,
anchor_det.y, anchor_det.x] anchor_det.y, anchor_det.x]
for j in xrange(len(detections_j)): for j, _ in enumerate(detections_j):
ref_det = detections_j[j] ref_det = detections_j[j]
ref_embed = self._embeddings[ref_det.anchor][ref_det.item, :, ref_det.y, ref_det.x] ref_embed = self._embeddings[ref_det.anchor][ref_det.item, :, ref_det.y, ref_det.x]
...@@ -236,7 +237,7 @@ class PushLossLayer(BaseLayer): ...@@ -236,7 +237,7 @@ class PushLossLayer(BaseLayer):
raise Exception('Cannot propagate down through the matched detections') raise Exception('Cannot propagate down through the matched detections')
diff_data = {} diff_data = {}
for anchor_id in xrange(self._num_anchors): for anchor_id in range(self._num_anchors):
if propagate_down[anchor_id + 1]: if propagate_down[anchor_id + 1]:
diff_data[anchor_id] = np.zeros(bottom[anchor_id + 1].data.shape) diff_data[anchor_id] = np.zeros(bottom[anchor_id + 1].data.shape)
...@@ -250,7 +251,7 @@ class PushLossLayer(BaseLayer): ...@@ -250,7 +251,7 @@ class PushLossLayer(BaseLayer):
diff_data[ref_det.anchor][ref_det.item, :, ref_det.y, ref_det.x] \ diff_data[ref_det.anchor][ref_det.item, :, ref_det.y, ref_det.x] \
+= factor * self._embeddings[anchor_det.anchor][anchor_det.item, :, anchor_det.y, anchor_det.x] += factor * self._embeddings[anchor_det.anchor][anchor_det.item, :, anchor_det.y, anchor_det.x]
for anchor_id in xrange(self._num_anchors): for anchor_id in range(self._num_anchors):
if propagate_down[anchor_id + 1]: if propagate_down[anchor_id + 1]:
bottom[anchor_id + 1].diff[...] = diff_data[anchor_id] bottom[anchor_id + 1].diff[...] = diff_data[anchor_id]
except Exception: except Exception:
......
...@@ -12,6 +12,7 @@ ...@@ -12,6 +12,7 @@
""" """
import traceback import traceback
from builtins import range
from collections import namedtuple from collections import namedtuple
import numpy as np import numpy as np
...@@ -61,7 +62,7 @@ class SamplingExtractorLayer(BaseLayer): ...@@ -61,7 +62,7 @@ class SamplingExtractorLayer(BaseLayer):
assert data.size % record_size == 0, 'incorrect record_size' assert data.size % record_size == 0, 'incorrect record_size'
records = data.reshape([-1, record_size]) records = data.reshape([-1, record_size])
detections = {i: [] for i in xrange(len(valid_action_ids))} detections = {i: [] for i, _ in enumerate(valid_action_ids)}
for record in records: for record in records:
detection = converter(record) detection = converter(record)
...@@ -140,7 +141,7 @@ class SamplingExtractorLayer(BaseLayer): ...@@ -140,7 +141,7 @@ class SamplingExtractorLayer(BaseLayer):
self._valid_action_ids, self._min_conf) self._valid_action_ids, self._min_conf)
self._embeddings = [] self._embeddings = []
for i in xrange(self._num_anchors): for i in range(self._num_anchors):
self._embeddings.append(np.array(bottom[i + 1].data)) self._embeddings.append(np.array(bottom[i + 1].data))
valid_class_ids = all_detections.keys() valid_class_ids = all_detections.keys()
...@@ -169,7 +170,7 @@ class SamplingExtractorLayer(BaseLayer): ...@@ -169,7 +170,7 @@ class SamplingExtractorLayer(BaseLayer):
sample_ids1 = [] sample_ids1 = []
sample_ids2 = [] sample_ids2 = []
for _ in xrange(self._num_steps): for _ in range(self._num_steps):
ids_pair = np.random.choice(valid_ids, 2, replace=False) ids_pair = np.random.choice(valid_ids, 2, replace=False)
sample_ids1.append(ids_pair[0]) sample_ids1.append(ids_pair[0])
sample_ids2.append(ids_pair[1]) sample_ids2.append(ids_pair[1])
...@@ -184,7 +185,7 @@ class SamplingExtractorLayer(BaseLayer): ...@@ -184,7 +185,7 @@ class SamplingExtractorLayer(BaseLayer):
sampled_labels.append(np.full([self._num_steps], float(class_id), dtype=np.float32)) sampled_labels.append(np.full([self._num_steps], float(class_id), dtype=np.float32))
self._samples += [(alpha[i], betta[i], detections[sample_ids1[i]], detections[sample_ids2[i]]) self._samples += [(alpha[i], betta[i], detections[sample_ids1[i]], detections[sample_ids2[i]])
for i in xrange(len(sample_ids1))] for i, _ in enumerate(sample_ids1)]
assert len(self._samples) == len(sampled_vectors) * self._num_steps assert len(self._samples) == len(sampled_vectors) * self._num_steps
...@@ -220,14 +221,14 @@ class SamplingExtractorLayer(BaseLayer): ...@@ -220,14 +221,14 @@ class SamplingExtractorLayer(BaseLayer):
raise Exception('Cannot propagate down through the matched detections') raise Exception('Cannot propagate down through the matched detections')
anchor_diff_data = {} anchor_diff_data = {}
for anchor_id in xrange(self._num_anchors): for anchor_id in range(self._num_anchors):
if propagate_down[anchor_id + 1]: if propagate_down[anchor_id + 1]:
anchor_diff_data[anchor_id] = np.zeros(bottom[anchor_id + 1].data.shape) anchor_diff_data[anchor_id] = np.zeros(bottom[anchor_id + 1].data.shape)
if len(self._samples) > 0: if len(self._samples) > 0:
diff_data = np.array(top[0].diff) diff_data = np.array(top[0].diff)
for out_sample_id in xrange(len(self._samples)): for out_sample_id, _ in enumerate(self._samples):
alpha, betta, det_i, det_j = self._samples[out_sample_id] alpha, betta, det_i, det_j = self._samples[out_sample_id]
current_diff = diff_data[out_sample_id] current_diff = diff_data[out_sample_id]
...@@ -240,7 +241,7 @@ class SamplingExtractorLayer(BaseLayer): ...@@ -240,7 +241,7 @@ class SamplingExtractorLayer(BaseLayer):
anchor_diff_data[det_j.anchor][det_j.item, :, det_j.y, det_j.x] \ anchor_diff_data[det_j.anchor][det_j.item, :, det_j.y, det_j.x] \
+= betta * current_diff += betta * current_diff
for anchor_id in xrange(self._num_anchors): for anchor_id in range(self._num_anchors):
if propagate_down[anchor_id + 1]: if propagate_down[anchor_id + 1]:
bottom[anchor_id + 1].diff[...] = anchor_diff_data[anchor_id] bottom[anchor_id + 1].diff[...] = anchor_diff_data[anchor_id]
except Exception: except Exception:
......
...@@ -12,6 +12,7 @@ ...@@ -12,6 +12,7 @@
""" """
import traceback import traceback
from builtins import range
from collections import namedtuple from collections import namedtuple
import numpy as np import numpy as np
...@@ -150,7 +151,7 @@ class SplitLossLayer(BaseLayer): ...@@ -150,7 +151,7 @@ class SplitLossLayer(BaseLayer):
centers = np.array(bottom[1].data) centers = np.array(bottom[1].data)
self._embeddings = [] self._embeddings = []
for i in xrange(self._num_anchors): for i in range(self._num_anchors):
self._embeddings.append(np.array(bottom[i + 2].data)) self._embeddings.append(np.array(bottom[i + 2].data))
all_candidates = [] all_candidates = []
...@@ -160,9 +161,9 @@ class SplitLossLayer(BaseLayer): ...@@ -160,9 +161,9 @@ class SplitLossLayer(BaseLayer):
for item_id in batch_detections.keys(): for item_id in batch_detections.keys():
detections = batch_detections[item_id] detections = batch_detections[item_id]
for i in xrange(len(detections)): for i, _ in enumerate(detections):
anchor_det = detections[i] anchor_det = detections[i]
for j in xrange(i + 1, len(detections)): for j in range(i + 1, len(detections)):
ref_det = detections[j] ref_det = detections[j]
# exclude same class predictions # exclude same class predictions
...@@ -250,7 +251,7 @@ class SplitLossLayer(BaseLayer): ...@@ -250,7 +251,7 @@ class SplitLossLayer(BaseLayer):
centers_diff_data = np.zeros(bottom[1].data.shape) if propagate_down[1] else None centers_diff_data = np.zeros(bottom[1].data.shape) if propagate_down[1] else None
diff_data = {} diff_data = {}
for anchor_id in xrange(self._num_anchors): for anchor_id in range(self._num_anchors):
if propagate_down[anchor_id + 2]: if propagate_down[anchor_id + 2]:
diff_data[anchor_id] = np.zeros(bottom[anchor_id + 2].data.shape) diff_data[anchor_id] = np.zeros(bottom[anchor_id + 2].data.shape)
...@@ -273,7 +274,7 @@ class SplitLossLayer(BaseLayer): ...@@ -273,7 +274,7 @@ class SplitLossLayer(BaseLayer):
if centers_diff_data is not None: if centers_diff_data is not None:
bottom[1].diff[...] = centers_diff_data bottom[1].diff[...] = centers_diff_data
for anchor_id in xrange(self._num_anchors): for anchor_id in range(self._num_anchors):
if propagate_down[anchor_id + 2]: if propagate_down[anchor_id + 2]:
bottom[anchor_id + 2].diff[...] = diff_data[anchor_id] bottom[anchor_id + 2].diff[...] = diff_data[anchor_id]
except Exception: except Exception:
......
...@@ -136,7 +136,7 @@ class ResizeCropImagesMapper(mapreducer.BasicMapper): ...@@ -136,7 +136,7 @@ class ResizeCropImagesMapper(mapreducer.BasicMapper):
FLAGS.output_side_length) FLAGS.output_side_length)
except Exception as e: except Exception as e:
# we ignore the exception (maybe the image is corrupted?) # we ignore the exception (maybe the image is corrupted?)
print('{}: {}'.format(line, e) print('{}: {}'.format(line, e))
yield value, FLAGS.output_folder yield value, FLAGS.output_folder
mapreducer.REGISTER_DEFAULT_MAPPER(ResizeCropImagesMapper) mapreducer.REGISTER_DEFAULT_MAPPER(ResizeCropImagesMapper)
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册