From 567e90e44e5e260a0042538b044ab3d69ddefd2b Mon Sep 17 00:00:00 2001 From: whs Date: Mon, 18 May 2020 14:24:36 +0800 Subject: [PATCH] Rename fluid.layers.one_hot to fluid.one_hot (#292) --- demo/darts/train_imagenet.py | 2 +- demo/models/slimfacenet.py | 2 +- paddleslim/models/slimfacenet.py | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/demo/darts/train_imagenet.py b/demo/darts/train_imagenet.py index 2bbf738c..09a90a96 100644 --- a/demo/darts/train_imagenet.py +++ b/demo/darts/train_imagenet.py @@ -68,7 +68,7 @@ add_arg('use_data_parallel', ast.literal_eval, False, "The flag indicating whet def cross_entropy_label_smooth(preds, targets, epsilon): preds = fluid.layers.softmax(preds) - targets_one_hot = fluid.layers.one_hot(input=targets, depth=args.class_num) + targets_one_hot = fluid.one_hot(input=targets, depth=args.class_num) targets_smooth = fluid.layers.label_smooth( targets_one_hot, epsilon=epsilon, dtype="float32") loss = fluid.layers.cross_entropy( diff --git a/demo/models/slimfacenet.py b/demo/models/slimfacenet.py index 6a1c0a7d..33a4deab 100644 --- a/demo/models/slimfacenet.py +++ b/demo/models/slimfacenet.py @@ -334,7 +334,7 @@ class SlimFaceNet(): else: pass - one_hot = fluid.layers.one_hot(input=label, depth=out_dim) + one_hot = fluid.one_hot(input=label, depth=out_dim) output = fluid.layers.elementwise_mul( one_hot, phi) + fluid.layers.elementwise_mul( (1.0 - one_hot), cosine) diff --git a/paddleslim/models/slimfacenet.py b/paddleslim/models/slimfacenet.py index 95b445fc..5276a515 100644 --- a/paddleslim/models/slimfacenet.py +++ b/paddleslim/models/slimfacenet.py @@ -334,7 +334,7 @@ class SlimFaceNet(): else: pass - one_hot = fluid.layers.one_hot(input=label, depth=out_dim) + one_hot = fluid.one_hot(input=label, depth=out_dim) output = fluid.layers.elementwise_mul( one_hot, phi) + fluid.layers.elementwise_mul( (1.0 - one_hot), cosine) -- GitLab