retrieval.py 13.2 KB
Newer Older
D
dongshuilong 已提交
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
# Copyright (c) 2021 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
#     http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
D
dongshuilong 已提交
17

18
from collections import defaultdict
H
HydrogenSulfate 已提交
19 20

import numpy as np
D
dongshuilong 已提交
21
import paddle
22
import scipy
23

T
Tingquan Gao 已提交
24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75
from ppcls.utils import all_gather, logger


def retrieval_eval(engine, epoch_id=0):
    engine.model.eval()
    # step1. prepare query and gallery features
    if engine.gallery_query_dataloader is not None:
        gallery_feat, gallery_label, gallery_camera = compute_feature(
            engine, "gallery_query")
        query_feat, query_label, query_camera = gallery_feat, gallery_label, gallery_camera
    else:
        gallery_feat, gallery_label, gallery_camera = compute_feature(
            engine, "gallery")
        query_feat, query_label, query_camera = compute_feature(engine,
                                                                "query")

    # step2. split features into feature blocks for saving memory
    num_query = len(query_feat)
    block_size = engine.config["Global"].get("sim_block_size", 64)
    sections = [block_size] * (num_query // block_size)
    if num_query % block_size > 0:
        sections.append(num_query % block_size)

    query_feat_blocks = paddle.split(query_feat, sections)
    query_label_blocks = paddle.split(query_label, sections)
    query_camera_blocks = paddle.split(
        query_camera, sections) if query_camera is not None else None
    metric_key = None

    # step3. compute metric
    if engine.eval_loss_func is None:
        metric_dict = {metric_key: 0.0}
    else:
        use_reranking = engine.config["Global"].get("re_ranking", False)
        logger.info(f"re_ranking={use_reranking}")
        if use_reranking:
            # compute distance matrix
            distmat = compute_re_ranking_dist(
                query_feat, gallery_feat, engine.config["Global"].get(
                    "feature_normalize", True), 20, 6, 0.3)
            # exclude illegal distance
            if query_camera is not None:
                camera_mask = query_camera != gallery_camera.t()
                label_mask = query_label != gallery_label.t()
                keep_mask = label_mask | camera_mask
                distmat = keep_mask.astype(query_feat.dtype) * distmat + (
                    ~keep_mask).astype(query_feat.dtype) * (distmat.max() + 1)
            else:
                keep_mask = None
            # compute metric with all samples
            metric_dict = engine.eval_metric_func(-distmat, query_label,
                                                  gallery_label, keep_mask)
H
HydrogenSulfate 已提交
76
        else:
T
Tingquan Gao 已提交
77 78
            metric_dict = defaultdict(float)
            for block_idx, block_feat in enumerate(query_feat_blocks):
79
                # compute distance matrix
T
Tingquan Gao 已提交
80 81
                distmat = paddle.matmul(
                    block_feat, gallery_feat, transpose_y=True)
82 83
                # exclude illegal distance
                if query_camera is not None:
T
Tingquan Gao 已提交
84 85 86 87
                    camera_mask = query_camera_blocks[
                        block_idx] != gallery_camera.t()
                    label_mask = query_label_blocks[
                        block_idx] != gallery_label.t()
88
                    keep_mask = label_mask | camera_mask
T
Tingquan Gao 已提交
89
                    distmat = keep_mask.astype(query_feat.dtype) * distmat
D
dongshuilong 已提交
90
                else:
H
HydrogenSulfate 已提交
91
                    keep_mask = None
T
Tingquan Gao 已提交
92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131
                # compute metric by block
                metric_block = engine.eval_metric_func(
                    distmat, query_label_blocks[block_idx], gallery_label,
                    keep_mask)
                # accumulate metric
                for key in metric_block:
                    metric_dict[key] += metric_block[key] * block_feat.shape[
                        0] / num_query

    metric_info_list = []
    for key, value in metric_dict.items():
        metric_info_list.append(f"{key}: {value:.5f}")
        if metric_key is None:
            metric_key = key
    metric_msg = ", ".join(metric_info_list)
    logger.info(f"[Eval][Epoch {epoch_id}][Avg]{metric_msg}")

    return metric_dict[metric_key]


def compute_feature(engine, name="gallery"):
    if name == "gallery":
        dataloader = engine.gallery_dataloader
    elif name == "query":
        dataloader = engine.query_dataloader
    elif name == "gallery_query":
        dataloader = engine.gallery_query_dataloader
    else:
        raise ValueError(
            f"Only support gallery or query or gallery_query dataset, but got {name}"
        )

    all_feat = []
    all_label = []
    all_camera = []
    has_camera = False
    for idx, batch in enumerate(dataloader):  # load is very time-consuming
        if idx % engine.config["Global"]["print_batch_step"] == 0:
            logger.info(
                f"{name} feature calculation process: [{idx}/{len(dataloader)}]"
D
dongshuilong 已提交
132
            )
133

T
Tingquan Gao 已提交
134 135 136 137 138
        batch = [paddle.to_tensor(x) for x in batch]
        batch[1] = batch[1].reshape([-1, 1]).astype("int64")
        if len(batch) >= 3:
            has_camera = True
            batch[2] = batch[2].reshape([-1, 1]).astype("int64")
139
        with engine.auto_cast(is_eval=True):
T
Tingquan Gao 已提交
140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167
            out = engine.model(batch[0])
        if "Student" in out:
            out = out["Student"]

        # get features
        if engine.config["Global"].get("retrieval_feature_from",
                                       "features") == "features":
            # use output from neck as feature
            batch_feat = out["features"]
        else:
            # use output from backbone as feature
            batch_feat = out["backbone"]

        # do norm(optional)
        if engine.config["Global"].get("feature_normalize", True):
            batch_feat = paddle.nn.functional.normalize(batch_feat, p=2)

        # do binarize(optional)
        if engine.config["Global"].get("feature_binarize") == "round":
            batch_feat = paddle.round(batch_feat).astype("float32") * 2.0 - 1.0
        elif engine.config["Global"].get("feature_binarize") == "sign":
            batch_feat = paddle.sign(batch_feat).astype("float32")

        if paddle.distributed.get_world_size() > 1:
            all_feat.append(all_gather(batch_feat))
            all_label.append(all_gather(batch[1]))
            if has_camera:
                all_camera.append(all_gather(batch[2]))
D
dongshuilong 已提交
168
        else:
T
Tingquan Gao 已提交
169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192
            all_feat.append(batch_feat)
            all_label.append(batch[1])
            if has_camera:
                all_camera.append(batch[2])

    if engine.use_dali:
        dataloader.reset()

    all_feat = paddle.concat(all_feat)
    all_label = paddle.concat(all_label)
    if has_camera:
        all_camera = paddle.concat(all_camera)
    else:
        all_camera = None
    # discard redundant padding sample(s) at the end
    total_samples = dataloader.size if engine.use_dali else len(
        dataloader.dataset)
    all_feat = all_feat[:total_samples]
    all_label = all_label[:total_samples]
    if has_camera:
        all_camera = all_camera[:total_samples]

    logger.info(f"Build {name} done, all feat shape: {all_feat.shape}")
    return all_feat, all_label, all_camera
H
HydrogenSulfate 已提交
193 194


195 196
def k_reciprocal_neighbor(rank: np.ndarray, p: int, k: int) -> np.ndarray:
    """Implementation of k-reciprocal nearest neighbors, i.e. R(p, k)
H
HydrogenSulfate 已提交
197 198

    Args:
199 200 201
        rank (np.ndarray): Rank mat with shape of [N, N].
        p (int): Probe index.
        k (int): Parameter k for k-reciprocal nearest neighbors algorithm.
H
HydrogenSulfate 已提交
202 203

    Returns:
204
        np.ndarray: K-reciprocal nearest neighbors of probe p with shape of [M, ].
H
HydrogenSulfate 已提交
205
    """
206 207 208 209 210 211 212
    # use k+1 for excluding probe index itself
    forward_k_neigh_index = rank[p, :k + 1]
    backward_k_neigh_index = rank[forward_k_neigh_index, :k + 1]
    candidate = np.where(backward_k_neigh_index == p)[0]
    return forward_k_neigh_index[candidate]


213 214
def compute_re_ranking_dist(query_feat: paddle.Tensor,
                            gallery_feat: paddle.Tensor,
215 216 217 218 219 220 221 222
                            feature_normed: bool=True,
                            k1: int=20,
                            k2: int=6,
                            lamb: float=0.5) -> paddle.Tensor:
    """
    Re-ranking Person Re-identification with k-reciprocal Encoding
    Reference: https://arxiv.org/abs/1701.08398
    Code refernence: https://github.com/michuanhaohao/reid-strong-baseline/blob/master/utils/re_ranking.py
H
HydrogenSulfate 已提交
223

224
    Args:
225 226
        query_feat (paddle.Tensor): Query features with shape of [num_query, feature_dim].
        gallery_feat (paddle.Tensor):  Gallery features with shape of [num_gallery, feature_dim].
227 228 229 230
        feature_normed (bool, optional):  Whether input features are normalized.
        k1 (int, optional): Parameter for K-reciprocal nearest neighbors. Defaults to 20.
        k2 (int, optional): Parameter for K-nearest neighbors. Defaults to 6.
        lamb (float, optional): Penalty factor. Defaults to 0.5.
H
HydrogenSulfate 已提交
231

232 233 234
    Returns:
        paddle.Tensor: (1 - lamb) x Dj + lamb x D, with shape of [num_query, num_gallery].
    """
235 236
    num_query = query_feat.shape[0]
    num_gallery = gallery_feat.shape[0]
237
    num_all = num_query + num_gallery
238
    feat = paddle.concat([query_feat, gallery_feat], 0)
239 240 241 242 243 244 245
    logger.info("Using GPU to compute original distance matrix")
    # use L2 distance
    if feature_normed:
        original_dist = 2 - 2 * paddle.matmul(feat, feat, transpose_y=True)
    else:
        original_dist = paddle.pow(feat, 2).sum(axis=1, keepdim=True).expand([num_all, num_all]) + \
            paddle.pow(feat, 2).sum(axis=1, keepdim=True).expand([num_all, num_all]).t()
246
        original_dist = original_dist.addmm(feat, feat.t(), -2.0, 1.0)
247 248
    original_dist = original_dist.numpy()
    del feat
H
HydrogenSulfate 已提交
249 250 251

    original_dist = np.transpose(original_dist / np.max(original_dist, axis=0))
    V = np.zeros_like(original_dist).astype(np.float16)
悟、's avatar
悟、 已提交
252
    initial_rank = np.argpartition(original_dist, range(1, k1 + 1))
253 254 255 256 257 258 259 260 261 262 263 264
    logger.info("Start re-ranking...")

    for p in range(num_all):
        # compute R(p,k1)
        p_k_reciprocal_ind = k_reciprocal_neighbor(initial_rank, p, k1)

        # compute R*(p,k1)=R(p,k1)∪R(q,k1/2)
        # s.t. |R(p,k1)∩R(q,k1/2)|>=2/3|R(q,k1/2)|, ∀q∈R(p,k1)
        p_k_reciprocal_exp_ind = p_k_reciprocal_ind
        for _, q in enumerate(p_k_reciprocal_ind):
            q_k_reciprocal_ind = k_reciprocal_neighbor(initial_rank, q,
                                                       int(np.around(k1 / 2)))
265 266 267 268 269
            if len(
                    np.intersect1d(
                        p_k_reciprocal_ind,
                        q_k_reciprocal_ind,
                        assume_unique=True)) > 2 / 3 * len(q_k_reciprocal_ind):
270 271 272 273 274 275 276 277 278 279
                p_k_reciprocal_exp_ind = np.append(p_k_reciprocal_exp_ind,
                                                   q_k_reciprocal_ind)
        p_k_reciprocal_exp_ind = np.unique(p_k_reciprocal_exp_ind)
        # reweight distance using gaussian kernel
        weight = np.exp(-original_dist[p, p_k_reciprocal_exp_ind])
        V[p, p_k_reciprocal_exp_ind] = weight / np.sum(weight)

    # local query expansion
    original_dist = original_dist[:num_query, ]
    if k2 > 1:
280 281 282 283 284 285 286 287 288 289 290 291 292 293 294 295 296 297 298
        try:
            # use sparse tensor to speed up query expansion
            indices = (np.repeat(np.arange(num_all), k2),
                       initial_rank[:, :k2].reshape([-1, ]))
            values = np.array(
                [1 / k2 for _ in range(num_all * k2)], dtype="float16")
            V = scipy.sparse.coo_matrix(
                (values, indices), V.shape,
                dtype="float16") @V.astype("float16")
        except Exception as e:
            logger.info(
                f"Failed to do local query expansion with sparse tensor for reason: \n{e}\n"
                f"now use for-loop instead")
            # use vanilla for-loop
            V_qe = np.zeros_like(V, dtype=np.float16)
            for i in range(num_all):
                V_qe[i, :] = np.mean(V[initial_rank[i, :k2], :], axis=0)
            V = V_qe
            del V_qe
H
HydrogenSulfate 已提交
299
    del initial_rank
300 301

    # cache k-reciprocal sets which contains gj
H
HydrogenSulfate 已提交
302
    invIndex = []
303 304
    for gj in range(num_all):
        invIndex.append(np.nonzero(V[:, gj])[0])
H
HydrogenSulfate 已提交
305

306
    # compute jaccard distance
H
HydrogenSulfate 已提交
307
    jaccard_dist = np.zeros_like(original_dist, dtype=np.float16)
308 309 310 311 312 313 314 315 316
    for p in range(num_query):
        sum_min = np.zeros(shape=[1, num_all], dtype=np.float16)
        gj_ind = np.nonzero(V[p, :])[0]
        gj_ind_inv = [invIndex[gj] for gj in gj_ind]
        for j, gj in enumerate(gj_ind):
            gi = gj_ind_inv[j]
            sum_min[0, gi] += np.minimum(V[p, gj], V[gi, gj])
        jaccard_dist[p] = 1 - sum_min / (2 - sum_min)

317 318
    # fuse jaccard distance with original distance
    final_dist = (1 - lamb) * jaccard_dist + lamb * original_dist
H
HydrogenSulfate 已提交
319 320 321
    del original_dist
    del V
    del jaccard_dist
322
    final_dist = final_dist[:num_query, num_query:]
H
HydrogenSulfate 已提交
323 324
    final_dist = paddle.to_tensor(final_dist)
    return final_dist