Skip to content
体验新版
项目
组织
正在加载...
登录
切换导航
打开侧边栏
PaddlePaddle
PaddleClas
提交
aa8f4c16
P
PaddleClas
项目概览
PaddlePaddle
/
PaddleClas
大约 1 年 前同步成功
通知
115
Star
4999
Fork
1114
代码
文件
提交
分支
Tags
贡献者
分支图
Diff
Issue
19
列表
看板
标记
里程碑
合并请求
6
Wiki
0
Wiki
分析
仓库
DevOps
项目成员
Pages
P
PaddleClas
项目概览
项目概览
详情
发布
仓库
仓库
文件
提交
分支
标签
贡献者
分支图
比较
Issue
19
Issue
19
列表
看板
标记
里程碑
合并请求
6
合并请求
6
Pages
分析
分析
仓库分析
DevOps
Wiki
0
Wiki
成员
成员
收起侧边栏
关闭侧边栏
动态
分支图
创建新Issue
提交
Issue看板
提交
aa8f4c16
编写于
5月 12, 2022
作者:
Z
zhiboniu
浏览文件
操作
浏览文件
下载
电子邮件补丁
差异文件
attr ma test ok
上级
0a3ecf60
变更
9
显示空白变更内容
内联
并排
Showing
9 changed file
with
240 addition
and
27 deletion
+240
-27
ppcls/arch/backbone/legendary_models/resnet.py
ppcls/arch/backbone/legendary_models/resnet.py
+17
-7
ppcls/arch/backbone/model_zoo/strongbaseline_attr.py
ppcls/arch/backbone/model_zoo/strongbaseline_attr.py
+2
-1
ppcls/configs/Attr/StrongBaselineAttr.yaml
ppcls/configs/Attr/StrongBaselineAttr.yaml
+7
-5
ppcls/data/dataloader/__init__.py
ppcls/data/dataloader/__init__.py
+1
-0
ppcls/data/dataloader/attr_dataset.py
ppcls/data/dataloader/attr_dataset.py
+82
-0
ppcls/data/dataloader/common_dataset.py
ppcls/data/dataloader/common_dataset.py
+6
-6
ppcls/data/preprocess/__init__.py
ppcls/data/preprocess/__init__.py
+1
-0
ppcls/data/preprocess/ops/operators.py
ppcls/data/preprocess/ops/operators.py
+101
-2
ppcls/loss/multilabelloss.py
ppcls/loss/multilabelloss.py
+23
-6
未找到文件。
ppcls/arch/backbone/legendary_models/resnet.py
浏览文件 @
aa8f4c16
...
@@ -20,9 +20,10 @@ import numpy as np
...
@@ -20,9 +20,10 @@ import numpy as np
import
paddle
import
paddle
from
paddle
import
ParamAttr
from
paddle
import
ParamAttr
import
paddle.nn
as
nn
import
paddle.nn
as
nn
from
paddle.nn
import
Conv2D
,
BatchNorm
,
Linear
from
paddle.nn
import
Conv2D
,
BatchNorm
,
Linear
,
BatchNorm2D
from
paddle.nn
import
AdaptiveAvgPool2D
,
MaxPool2D
,
AvgPool2D
from
paddle.nn
import
AdaptiveAvgPool2D
,
MaxPool2D
,
AvgPool2D
from
paddle.nn.initializer
import
Uniform
from
paddle.nn.initializer
import
Uniform
from
paddle.regularizer
import
L2Decay
import
math
import
math
from
ppcls.arch.backbone.base.theseus_layer
import
TheseusLayer
from
ppcls.arch.backbone.base.theseus_layer
import
TheseusLayer
...
@@ -113,6 +114,7 @@ class ConvBNLayer(TheseusLayer):
...
@@ -113,6 +114,7 @@ class ConvBNLayer(TheseusLayer):
filter_size
,
filter_size
,
stride
=
1
,
stride
=
1
,
groups
=
1
,
groups
=
1
,
norm_decay
=
0.0005
,
is_vd_mode
=
False
,
is_vd_mode
=
False
,
act
=
None
,
act
=
None
,
lr_mult
=
1.0
,
lr_mult
=
1.0
,
...
@@ -132,11 +134,18 @@ class ConvBNLayer(TheseusLayer):
...
@@ -132,11 +134,18 @@ class ConvBNLayer(TheseusLayer):
weight_attr
=
ParamAttr
(
learning_rate
=
lr_mult
),
weight_attr
=
ParamAttr
(
learning_rate
=
lr_mult
),
bias_attr
=
False
,
bias_attr
=
False
,
data_format
=
data_format
)
data_format
=
data_format
)
self
.
bn
=
BatchNorm
(
num_filters
,
param_attr
=
ParamAttr
(
param_attr
=
ParamAttr
(
learning_rate
=
lr_mult
),
learning_rate
=
lr_mult
,
bias_attr
=
ParamAttr
(
learning_rate
=
lr_mult
),
regularizer
=
L2Decay
(
norm_decay
),
data_layout
=
data_format
)
trainable
=
True
)
bias_attr
=
ParamAttr
(
learning_rate
=
lr_mult
,
regularizer
=
L2Decay
(
norm_decay
),
trainable
=
True
)
self
.
bn
=
BatchNorm2D
(
num_filters
,
weight_attr
=
param_attr
,
bias_attr
=
bias_attr
)
self
.
relu
=
nn
.
ReLU
()
self
.
relu
=
nn
.
ReLU
()
def
forward
(
self
,
x
):
def
forward
(
self
,
x
):
...
@@ -192,6 +201,7 @@ class BottleneckBlock(TheseusLayer):
...
@@ -192,6 +201,7 @@ class BottleneckBlock(TheseusLayer):
is_vd_mode
=
False
if
if_first
else
True
,
is_vd_mode
=
False
if
if_first
else
True
,
lr_mult
=
lr_mult
,
lr_mult
=
lr_mult
,
data_format
=
data_format
)
data_format
=
data_format
)
self
.
relu
=
nn
.
ReLU
()
self
.
relu
=
nn
.
ReLU
()
self
.
shortcut
=
shortcut
self
.
shortcut
=
shortcut
...
@@ -312,7 +322,7 @@ class ResNet(TheseusLayer):
...
@@ -312,7 +322,7 @@ class ResNet(TheseusLayer):
[[
input_image_channel
,
32
,
3
,
2
],
[
32
,
32
,
3
,
1
],
[
32
,
64
,
3
,
1
]]
[[
input_image_channel
,
32
,
3
,
2
],
[
32
,
32
,
3
,
1
],
[
32
,
64
,
3
,
1
]]
}
}
self
.
stem
=
nn
.
Sequential
(
*
[
self
.
stem
=
nn
.
Sequential
(
*
[
ConvBNLayer
(
ConvBNLayer
(
num_channels
=
in_c
,
num_channels
=
in_c
,
num_filters
=
out_c
,
num_filters
=
out_c
,
...
...
ppcls/arch/backbone/model_zoo/strongbaseline_attr.py
浏览文件 @
aa8f4c16
...
@@ -55,7 +55,7 @@ class StrongBaselinePAR(nn.Layer):
...
@@ -55,7 +55,7 @@ class StrongBaselinePAR(nn.Layer):
def
forward
(
self
,
x
):
def
forward
(
self
,
x
):
fc_feat
=
self
.
backbone
(
x
)
fc_feat
=
self
.
backbone
(
x
)
output
=
F
.
sigmoid
(
fc_feat
)
output
=
F
.
sigmoid
(
fc_feat
)
return
outpu
t
return
fc_fea
t
def
_load_pretrained
(
pretrained
,
model
,
model_url
,
use_ssld
):
def
_load_pretrained
(
pretrained
,
model
,
model_url
,
use_ssld
):
...
@@ -95,4 +95,5 @@ def load_pretrained(model, local_weight_path):
...
@@ -95,4 +95,5 @@ def load_pretrained(model, local_weight_path):
def
StrongBaselineAttr
(
pretrained
=
True
,
use_ssld
=
False
,
**
kwargs
):
def
StrongBaselineAttr
(
pretrained
=
True
,
use_ssld
=
False
,
**
kwargs
):
model
=
StrongBaselinePAR
(
**
kwargs
)
model
=
StrongBaselinePAR
(
**
kwargs
)
_load_pretrained
(
MODEL_URLS
[
"StrongBaselineAttr"
],
model
,
None
,
None
)
_load_pretrained
(
MODEL_URLS
[
"StrongBaselineAttr"
],
model
,
None
,
None
)
# load_pretrained(model, MODEL_URLS["StrongBaselineAttr"])
return
model
return
model
ppcls/configs/Attr/StrongBaselineAttr.yaml
浏览文件 @
aa8f4c16
...
@@ -55,12 +55,14 @@ DataLoader:
...
@@ -55,12 +55,14 @@ DataLoader:
-
DecodeImage
:
-
DecodeImage
:
to_rgb
:
True
to_rgb
:
True
channel_first
:
False
channel_first
:
False
# - ResizeImage:
-
ResizeImage
:
# size: [192, 256]
size
:
[
192
,
256
]
-
RandCropImage
:
-
Pad
:
size
:
[
212
,
276
]
pad_mode
:
1
fill_value
:
0
-
RandomCropImage
:
size
:
[
192
,
256
]
size
:
[
192
,
256
]
scale
:
[
0.9
,
1.1
]
ratio
:
[
0.75
,
0.75
]
-
RandFlipImage
:
-
RandFlipImage
:
flip_code
:
1
flip_code
:
1
-
NormalizeImage
:
-
NormalizeImage
:
...
...
ppcls/data/dataloader/__init__.py
浏览文件 @
aa8f4c16
...
@@ -10,3 +10,4 @@ from ppcls.data.dataloader.mix_sampler import MixSampler
...
@@ -10,3 +10,4 @@ from ppcls.data.dataloader.mix_sampler import MixSampler
from
ppcls.data.dataloader.multi_scale_sampler
import
MultiScaleSampler
from
ppcls.data.dataloader.multi_scale_sampler
import
MultiScaleSampler
from
ppcls.data.dataloader.pk_sampler
import
PKSampler
from
ppcls.data.dataloader.pk_sampler
import
PKSampler
from
ppcls.data.dataloader.person_dataset
import
Market1501
,
MSMT17
from
ppcls.data.dataloader.person_dataset
import
Market1501
,
MSMT17
from
ppcls.data.dataloader.attr_dataset
import
AttrDataset
ppcls/data/dataloader/attr_dataset.py
0 → 100644
浏览文件 @
aa8f4c16
# Copyright (c) 2021 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from
__future__
import
print_function
import
numpy
as
np
import
os
import
pickle
from
.common_dataset
import
CommonDataset
from
ppcls.data.preprocess
import
transform
class
AttrDataset
(
CommonDataset
):
def
_load_anno
(
self
,
seed
=
None
,
split
=
'trainval'
):
assert
os
.
path
.
exists
(
self
.
_cls_path
)
assert
os
.
path
.
exists
(
self
.
_img_root
)
anno_path
=
self
.
_cls_path
image_dir
=
self
.
_img_root
self
.
images
=
[]
self
.
labels
=
[]
dataset_info
=
pickle
.
load
(
open
(
anno_path
,
'rb+'
))
img_id
=
dataset_info
.
image_name
attr_label
=
dataset_info
.
label
attr_label
[
attr_label
==
2
]
=
0
attr_id
=
dataset_info
.
attr_name
if
'label_idx'
in
dataset_info
.
keys
():
eval_attr_idx
=
dataset_info
.
label_idx
.
eval
attr_label
=
attr_label
[:,
eval_attr_idx
]
attr_id
=
[
attr_id
[
i
]
for
i
in
eval_attr_idx
]
attr_num
=
len
(
attr_id
)
# mapping category name to class id
# first_class:0, second_class:1, ...
cname2cid
=
{
attr_id
[
i
]:
i
for
i
in
range
(
attr_num
)}
assert
split
in
dataset_info
.
partition
.
keys
(
),
f
'split
{
split
}
is not exist'
img_idx
=
dataset_info
.
partition
[
split
]
if
isinstance
(
img_idx
,
list
):
img_idx
=
img_idx
[
0
]
# default partition 0
img_num
=
img_idx
.
shape
[
0
]
img_id
=
[
img_id
[
i
]
for
i
in
img_idx
]
label
=
attr_label
[
img_idx
]
# [:, [0, 12]]
self
.
label_ratio
=
label
.
mean
(
0
)
print
(
"label_ratio:"
,
self
.
label_ratio
)
for
i
,
(
img_i
,
label_i
)
in
enumerate
(
zip
(
img_id
,
label
)):
imgname
=
os
.
path
.
join
(
image_dir
,
img_i
)
self
.
images
.
append
(
imgname
)
self
.
labels
.
append
(
np
.
int64
(
label_i
))
def
__getitem__
(
self
,
idx
):
try
:
with
open
(
self
.
images
[
idx
],
'rb'
)
as
f
:
img
=
f
.
read
()
if
self
.
_transform_ops
:
img
=
transform
(
img
,
self
.
_transform_ops
)
img
=
img
.
transpose
((
2
,
0
,
1
))
return
(
img
,
[
self
.
labels
[
idx
],
self
.
label_ratio
])
except
Exception
as
ex
:
logger
.
error
(
"Exception occured when parse line: {} with msg: {}"
.
format
(
self
.
images
[
idx
],
ex
))
rnd_idx
=
np
.
random
.
randint
(
self
.
__len__
())
return
self
.
__getitem__
(
rnd_idx
)
ppcls/data/dataloader/common_dataset.py
浏览文件 @
aa8f4c16
...
@@ -44,11 +44,11 @@ def create_operators(params):
...
@@ -44,11 +44,11 @@ def create_operators(params):
class
CommonDataset
(
Dataset
):
class
CommonDataset
(
Dataset
):
def
__init__
(
def
__init__
(
self
,
self
,
image_root
,
image_root
,
cls_label_path
,
cls_label_path
,
transform_ops
=
None
,
):
transform_ops
=
None
,
split
=
'trainval'
):
self
.
_img_root
=
image_root
self
.
_img_root
=
image_root
self
.
_cls_path
=
cls_label_path
self
.
_cls_path
=
cls_label_path
if
transform_ops
:
if
transform_ops
:
...
@@ -56,7 +56,7 @@ class CommonDataset(Dataset):
...
@@ -56,7 +56,7 @@ class CommonDataset(Dataset):
self
.
images
=
[]
self
.
images
=
[]
self
.
labels
=
[]
self
.
labels
=
[]
self
.
_load_anno
()
self
.
_load_anno
(
split
=
split
)
def
_load_anno
(
self
):
def
_load_anno
(
self
):
pass
pass
...
...
ppcls/data/preprocess/__init__.py
浏览文件 @
aa8f4c16
...
@@ -33,6 +33,7 @@ from ppcls.data.preprocess.ops.operators import AugMix
...
@@ -33,6 +33,7 @@ from ppcls.data.preprocess.ops.operators import AugMix
from
ppcls.data.preprocess.ops.operators
import
Pad
from
ppcls.data.preprocess.ops.operators
import
Pad
from
ppcls.data.preprocess.ops.operators
import
ToTensor
from
ppcls.data.preprocess.ops.operators
import
ToTensor
from
ppcls.data.preprocess.ops.operators
import
Normalize
from
ppcls.data.preprocess.ops.operators
import
Normalize
from
ppcls.data.preprocess.ops.operators
import
RandomCropImage
from
ppcls.data.preprocess.batch_ops.batch_operators
import
MixupOperator
,
CutmixOperator
,
OpSampler
,
FmixOperator
from
ppcls.data.preprocess.batch_ops.batch_operators
import
MixupOperator
,
CutmixOperator
,
OpSampler
,
FmixOperator
...
...
ppcls/data/preprocess/ops/operators.py
浏览文件 @
aa8f4c16
...
@@ -190,6 +190,105 @@ class CropImage(object):
...
@@ -190,6 +190,105 @@ class CropImage(object):
return
img
[
h_start
:
h_end
,
w_start
:
w_end
,
:]
return
img
[
h_start
:
h_end
,
w_start
:
w_end
,
:]
class
Pad
(
object
):
def
__init__
(
self
,
size
=
None
,
size_divisor
=
32
,
pad_mode
=
0
,
offsets
=
None
,
fill_value
=
(
127.5
,
127.5
,
127.5
)):
"""
Pad image to a specified size or multiple of size_divisor.
Args:
size (int, list): image target size, if None, pad to multiple of size_divisor, default None
size_divisor (int): size divisor, default 32
pad_mode (int): pad mode, currently only supports four modes [-1, 0, 1, 2]. if -1, use specified offsets
if 0, only pad to right and bottom. if 1, pad according to center. if 2, only pad left and top
offsets (list): [offset_x, offset_y], specify offset while padding, only supported pad_mode=-1
fill_value (bool): rgb value of pad area, default (127.5, 127.5, 127.5)
"""
if
not
isinstance
(
size
,
(
int
,
list
)):
raise
TypeError
(
"Type of target_size is invalid when random_size is True.
\
Must be List, now is {}"
.
format
(
type
(
size
)))
if
isinstance
(
size
,
int
):
size
=
[
size
,
size
]
assert
pad_mode
in
[
-
1
,
0
,
1
,
2
],
'currently only supports four modes [-1, 0, 1, 2]'
if
pad_mode
==
-
1
:
assert
offsets
,
'if pad_mode is -1, offsets should not be None'
self
.
size
=
size
self
.
size_divisor
=
size_divisor
self
.
pad_mode
=
pad_mode
self
.
fill_value
=
fill_value
self
.
offsets
=
offsets
def
apply_image
(
self
,
image
,
offsets
,
im_size
,
size
):
x
,
y
=
offsets
im_h
,
im_w
=
im_size
h
,
w
=
size
canvas
=
np
.
ones
((
h
,
w
,
3
),
dtype
=
np
.
float32
)
canvas
*=
np
.
array
(
self
.
fill_value
,
dtype
=
np
.
float32
)
canvas
[
y
:
y
+
im_h
,
x
:
x
+
im_w
,
:]
=
image
.
astype
(
np
.
float32
)
return
canvas
def
__call__
(
self
,
img
):
im_h
,
im_w
=
img
.
shape
[:
2
]
if
self
.
size
:
w
,
h
=
self
.
size
assert
(
im_h
<=
h
and
im_w
<=
w
),
'(h, w) of target size should be greater than (im_h, im_w)'
else
:
h
=
int
(
np
.
ceil
(
im_h
/
self
.
size_divisor
)
*
self
.
size_divisor
)
w
=
int
(
np
.
ceil
(
im_w
/
self
.
size_divisor
)
*
self
.
size_divisor
)
if
h
==
im_h
and
w
==
im_w
:
return
img
.
astype
(
np
.
float32
)
if
self
.
pad_mode
==
-
1
:
offset_x
,
offset_y
=
self
.
offsets
elif
self
.
pad_mode
==
0
:
offset_y
,
offset_x
=
0
,
0
elif
self
.
pad_mode
==
1
:
offset_y
,
offset_x
=
(
h
-
im_h
)
//
2
,
(
w
-
im_w
)
//
2
else
:
offset_y
,
offset_x
=
h
-
im_h
,
w
-
im_w
offsets
,
im_size
,
size
=
[
offset_x
,
offset_y
],
[
im_h
,
im_w
],
[
h
,
w
]
return
self
.
apply_image
(
img
,
offsets
,
im_size
,
size
)
class
RandomCropImage
(
object
):
"""Random crop image only
"""
def
__init__
(
self
,
size
):
super
(
RandomCropImage
,
self
).
__init__
()
if
isinstance
(
size
,
int
):
size
=
[
size
,
size
]
self
.
size
=
size
def
__call__
(
self
,
img
):
h
,
w
=
img
.
shape
[:
2
]
tw
,
th
=
self
.
size
i
=
random
.
randint
(
0
,
h
-
th
)
j
=
random
.
randint
(
0
,
w
-
tw
)
img
=
img
[
i
:
i
+
th
,
j
:
j
+
tw
,
:]
if
img
.
shape
[
0
]
!=
256
or
img
.
shape
[
1
]
!=
192
:
raise
ValueError
(
'sample: '
,
h
,
w
,
i
,
j
,
th
,
tw
,
img
.
shape
)
return
img
class
RandCropImage
(
object
):
class
RandCropImage
(
object
):
""" random crop image """
""" random crop image """
...
@@ -463,8 +562,8 @@ class Pad(object):
...
@@ -463,8 +562,8 @@ class Pad(object):
# Process fill color for affine transforms
# Process fill color for affine transforms
major_found
,
minor_found
=
(
int
(
v
)
major_found
,
minor_found
=
(
int
(
v
)
for
v
in
PILLOW_VERSION
.
split
(
'.'
)[:
2
])
for
v
in
PILLOW_VERSION
.
split
(
'.'
)[:
2
])
major_required
,
minor_required
=
(
major_required
,
minor_required
=
(
int
(
v
)
for
v
in
int
(
v
)
for
v
in
min_pil_version
.
split
(
'.'
)[:
2
])
min_pil_version
.
split
(
'.'
)[:
2
])
if
major_found
<
major_required
or
(
major_found
==
major_required
and
if
major_found
<
major_required
or
(
major_found
==
major_required
and
minor_found
<
minor_required
):
minor_found
<
minor_required
):
if
fill
is
None
:
if
fill
is
None
:
...
...
ppcls/loss/multilabelloss.py
浏览文件 @
aa8f4c16
...
@@ -3,16 +3,28 @@ import paddle.nn as nn
...
@@ -3,16 +3,28 @@ import paddle.nn as nn
import
paddle.nn.functional
as
F
import
paddle.nn.functional
as
F
def
ratio2weight
(
targets
,
ratio
):
pos_weights
=
targets
*
(
1.
-
ratio
)
neg_weights
=
(
1.
-
targets
)
*
ratio
weights
=
paddle
.
exp
(
neg_weights
+
pos_weights
)
# for RAP dataloader, targets element may be 2, with or without smooth, some element must great than 1
weights
=
weights
-
weights
*
(
targets
>
1
)
return
weights
class
MultiLabelLoss
(
nn
.
Layer
):
class
MultiLabelLoss
(
nn
.
Layer
):
"""
"""
Multi-label loss
Multi-label loss
"""
"""
def
__init__
(
self
,
epsilon
=
None
):
def
__init__
(
self
,
epsilon
=
None
,
weight_ratio
=
None
):
super
().
__init__
()
super
().
__init__
()
if
epsilon
is
not
None
and
(
epsilon
<=
0
or
epsilon
>=
1
):
if
epsilon
is
not
None
and
(
epsilon
<=
0
or
epsilon
>=
1
):
epsilon
=
None
epsilon
=
None
self
.
epsilon
=
epsilon
self
.
epsilon
=
epsilon
self
.
weight_ratio
=
weight_ratio
def
_labelsmoothing
(
self
,
target
,
class_num
):
def
_labelsmoothing
(
self
,
target
,
class_num
):
if
target
.
ndim
==
1
or
target
.
shape
[
-
1
]
!=
class_num
:
if
target
.
ndim
==
1
or
target
.
shape
[
-
1
]
!=
class_num
:
...
@@ -26,11 +38,16 @@ class MultiLabelLoss(nn.Layer):
...
@@ -26,11 +38,16 @@ class MultiLabelLoss(nn.Layer):
def
_binary_crossentropy
(
self
,
input
,
target
,
class_num
):
def
_binary_crossentropy
(
self
,
input
,
target
,
class_num
):
if
self
.
epsilon
is
not
None
:
if
self
.
epsilon
is
not
None
:
target
=
self
.
_labelsmoothing
(
target
,
class_num
)
target
=
self
.
_labelsmoothing
(
target
,
class_num
)
cost
=
F
.
binary_cross_entropy_with_logits
(
cost
=
F
.
binary_cross_entropy_with_logits
(
logit
=
input
,
label
=
target
)
logit
=
input
,
label
=
target
)
else
:
if
self
.
weight_ratio
is
not
None
:
cost
=
F
.
binary_cross_entropy_with_logits
(
targets_mask
=
paddle
.
cast
(
target
>
0.5
,
'float32'
)
logit
=
input
,
label
=
target
)
weight
=
ratio2weight
(
targets_mask
,
paddle
.
to_tensor
(
self
.
weight_ratio
))
weight
=
weight
*
(
target
>
-
1
)
cost
=
cost
*
weight
import
pdb
pdb
.
set_trace
()
return
cost
return
cost
...
...
编辑
预览
Markdown
is supported
0%
请重试
或
添加新附件
.
添加附件
取消
You are about to add
0
people
to the discussion. Proceed with caution.
先完成此消息的编辑!
取消
想要评论请
注册
或
登录