未验证 提交 93c908c8 编写于 作者: W wuyefeilin 提交者: GitHub

fix some (#418)

* update hrnet yaml

* add benchmark yaml

* update deeplabv3p.yml

* delete utf-8 decalre

* update val.py

* update deeplabv3p.yml
上级 140025b7
...@@ -28,6 +28,7 @@ model: ...@@ -28,6 +28,7 @@ model:
backbone: backbone:
type: ResNet50_vd type: ResNet50_vd
output_stride: 8 output_stride: 8
multi_grid: [1, 2, 4]
num_classes: 19 num_classes: 19
backbone_indices: [0, 3] backbone_indices: [0, 3]
aspp_ratios: [1, 12, 24, 36] aspp_ratios: [1, 12, 24, 36]
......
...@@ -19,7 +19,7 @@ import tqdm ...@@ -19,7 +19,7 @@ import tqdm
import cv2 import cv2
import paddle import paddle
import paddle.nn.functional as F import paddle.nn.functional as F
from paddle import to_variable from paddle import to_tensor
import paddleseg.utils.logger as logger import paddleseg.utils.logger as logger
from paddleseg.utils import ConfusionMatrix from paddleseg.utils import ConfusionMatrix
...@@ -47,7 +47,7 @@ def evaluate(model, ...@@ -47,7 +47,7 @@ def evaluate(model,
timer.start() timer.start()
for iter, (im, im_info, label) in tqdm.tqdm( for iter, (im, im_info, label) in tqdm.tqdm(
enumerate(eval_dataset), total=total_iters): enumerate(eval_dataset), total=total_iters):
im = to_variable(im) im = to_tensor(im)
logits = model(im) logits = model(im)
pred = paddle.argmax(logits[0], axis=1) pred = paddle.argmax(logits[0], axis=1)
pred = pred.numpy().astype('float32') pred = pred.numpy().astype('float32')
......
# -*- encoding: utf-8 -*-
# Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved. # Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved.
# #
# Licensed under the Apache License, Version 2.0 (the "License"); # Licensed under the Apache License, Version 2.0 (the "License");
......
# -*- encoding: utf-8 -*-
# Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved. # Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved.
# #
# Licensed under the Apache License, Version 2.0 (the "License"); # Licensed under the Apache License, Version 2.0 (the "License");
......
# coding:utf-8
# Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved. # Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved.
# #
# Licensed under the Apache License, Version 2.0 (the "License" # Licensed under the Apache License, Version 2.0 (the "License"
......
# -*- encoding: utf-8 -*-
# Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved. # Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved.
# #
# Licensed under the Apache License, Version 2.0 (the "License"); # Licensed under the Apache License, Version 2.0 (the "License");
...@@ -15,4 +14,4 @@ ...@@ -15,4 +14,4 @@
from . import layer_libs from . import layer_libs
from . import activation from . import activation
from . import pyramid_pool from . import pyramid_pool
\ No newline at end of file
# -*- encoding: utf-8 -*-
# Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved. # Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved.
# #
# Licensed under the Apache License, Version 2.0 (the "License"); # Licensed under the Apache License, Version 2.0 (the "License");
...@@ -28,8 +27,8 @@ class Activation(nn.Layer): ...@@ -28,8 +27,8 @@ class Activation(nn.Layer):
>>> print(sigmoid) >>> print(sigmoid)
<class 'paddle.nn.layer.activation.Sigmoid'> <class 'paddle.nn.layer.activation.Sigmoid'>
>>> not_exit_one = Activation("not_exit_one") >>> not_exit_one = Activation("not_exit_one")
KeyError: "not_exit_one does not exist in the current dict_keys(['elu', 'gelu', 'hardshrink', KeyError: "not_exit_one does not exist in the current dict_keys(['elu', 'gelu', 'hardshrink',
'tanh', 'hardtanh', 'prelu', 'relu', 'relu6', 'selu', 'leakyrelu', 'sigmoid', 'softmax', 'tanh', 'hardtanh', 'prelu', 'relu', 'relu6', 'selu', 'leakyrelu', 'sigmoid', 'softmax',
'softplus', 'softshrink', 'softsign', 'tanhshrink', 'logsigmoid', 'logsoftmax', 'hsigmoid'])" 'softplus', 'softshrink', 'softsign', 'tanhshrink', 'logsigmoid', 'logsoftmax', 'hsigmoid'])"
Args: Args:
...@@ -57,4 +56,4 @@ class Activation(nn.Layer): ...@@ -57,4 +56,4 @@ class Activation(nn.Layer):
if self._act is not None: if self._act is not None:
return self.act_func(x) return self.act_func(x)
else: else:
return x return x
\ No newline at end of file
# -*- encoding: utf-8 -*-
# Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved. # Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved.
# #
# Licensed under the Apache License, Version 2.0 (the "License"); # Licensed under the Apache License, Version 2.0 (the "License");
......
# -*- encoding: utf-8 -*-
# Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved. # Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved.
# #
# Licensed under the Apache License, Version 2.0 (the "License"); # Licensed under the Apache License, Version 2.0 (the "License");
...@@ -87,7 +86,7 @@ class ASPPModule(nn.Layer): ...@@ -87,7 +86,7 @@ class ASPPModule(nn.Layer):
img_avg = self.global_avg_pool(x) img_avg = self.global_avg_pool(x)
img_avg = F.resize_bilinear(img_avg, out_shape=x.shape[2:]) img_avg = F.resize_bilinear(img_avg, out_shape=x.shape[2:])
outputs.append(img_avg) outputs.append(img_avg)
x = paddle.concat(outputs, axis=1) x = paddle.concat(outputs, axis=1)
x = self.conv_bn_relu(x) x = self.conv_bn_relu(x)
x = self.dropout(x) x = self.dropout(x)
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册