diff --git a/02.recognize_digits/README.cn.md b/02.recognize_digits/README.cn.md
index 110ee1219903f867199ee4115583db2c7b7aca7d..ff6324b2f1ca2e1130c12e459e044843f69ed2e9 100644
--- a/02.recognize_digits/README.cn.md
+++ b/02.recognize_digits/README.cn.md
@@ -38,9 +38,9 @@ $$ y_i = \text{softmax}(\sum_j W_{i,j}x_j + b_i) $$
对于有 $N$ 个类别的多分类问题,指定 $N$ 个输出节点,$N$ 维结果向量经过softmax将归一化为 $N$ 个[0,1]范围内的实数值,分别表示该样本属于这 $N$ 个类别的概率。此处的 $y_i$ 即对应该图片为数字 $i$ 的预测概率。
-在分类问题中,我们一般采用交叉熵代价损失函数(cross entropy),公式如下:
+在分类问题中,我们一般采用交叉熵代价损失函数(cross entropy loss),公式如下:
-$$ \text{crossentropy}(label, y) = -\sum_i label_ilog(y_i) $$
+$$ \text{_L_cross-entropy}(label, y) = -\sum_i label_ilog(y_i) $$
图2为softmax回归的网络图,图中权重用蓝线表示、偏置用红线表示、+1代表偏置参数的系数为1。
@@ -155,6 +155,7 @@ PaddlePaddle在API中提供了自动加载[MNIST](http://yann.lecun.com/exdb/mni
```python
import paddle
import paddle.fluid as fluid
+from __future__ import print_function
```
### Program Functions 配置
@@ -294,8 +295,8 @@ def event_handler(event):
if event.step % 100 == 0:
# event.metrics maps with train program return arguments.
# event.metrics[0] will yeild avg_cost and event.metrics[1] will yeild acc in this example.
- print "Pass %d, Batch %d, Cost %f" % (
- event.step, event.epoch, event.metrics[0])
+ print("Pass %d, Batch %d, Cost %f" % (
+ event.step, event.epoch, event.metrics[0]))
if isinstance(event, fluid.EndEpochEvent):
avg_cost, acc = trainer.test(
@@ -419,7 +420,7 @@ img = load_image(cur_dir + '/image/infer_3.png')
```python
results = inferencer.infer({'img': img})
lab = np.argsort(results) # probs and lab are the results of one batch data
-print "Label of image/infer_3.png is: %d" % lab[0][0][-1]
+print ("Inference result of image/infer_3.png is: %d" % lab[0][0][-1])
```
## 总结
diff --git a/02.recognize_digits/README.md b/02.recognize_digits/README.md
index 412c7ffc12690439cf156e753b429ea30487485f..97e05cf627c644ae63cf6c0ff7e23fa804fbd68b 100644
--- a/02.recognize_digits/README.md
+++ b/02.recognize_digits/README.md
@@ -50,7 +50,7 @@ For an $N$-class classification problem with $N$ output nodes, Softmax normalize
In such a classification problem, we usually use the cross entropy loss function:
-$$ \text{crossentropy}(label, y) = -\sum_i label_ilog(y_i) $$
+$$ \text{_L_cross-entropy}(label, y) = -\sum_i label_ilog(y_i) $$
Fig. 2 illustrates a softmax regression network, with the weights in blue, and the bias in red. `+1` indicates that the bias is $1$.
@@ -161,6 +161,7 @@ A PaddlePaddle program starts from importing the API package:
```python
import paddle
import paddle.fluid as fluid
+from __future__ import print_function
```
### Program Functions Configuration
@@ -300,8 +301,8 @@ def event_handler(event):
if event.step % 100 == 0:
# event.metrics maps with train program return arguments.
# event.metrics[0] will yeild avg_cost and event.metrics[1] will yeild acc in this example.
- print "Pass %d, Batch %d, Cost %f" % (
- event.step, event.epoch, event.metrics[0])
+ print("Pass %d, Batch %d, Cost %f" % (
+ event.step, event.epoch, event.metrics[0]))
if isinstance(event, fluid.EndEpochEvent):
avg_cost, acc = trainer.test(
@@ -432,7 +433,7 @@ Now we are ready to do inference.
```python
results = inferencer.infer({'img': img})
lab = np.argsort(results) # probs and lab are the results of one batch data
-print "Label of image/infer_3.png is: %d" % lab[0][0][-1]
+print("Inference result of image/infer_3.png is: %d" % lab[0][0][-1])
```
diff --git a/02.recognize_digits/index.cn.html b/02.recognize_digits/index.cn.html
index 6a7e018ce26d66cffe5676381cb37e551fe44adc..b2c6e6da715c6e908a9be70aa901bd3ca275d275 100644
--- a/02.recognize_digits/index.cn.html
+++ b/02.recognize_digits/index.cn.html
@@ -80,9 +80,9 @@ $$ y_i = \text{softmax}(\sum_j W_{i,j}x_j + b_i) $$
对于有 $N$ 个类别的多分类问题,指定 $N$ 个输出节点,$N$ 维结果向量经过softmax将归一化为 $N$ 个[0,1]范围内的实数值,分别表示该样本属于这 $N$ 个类别的概率。此处的 $y_i$ 即对应该图片为数字 $i$ 的预测概率。
-在分类问题中,我们一般采用交叉熵代价损失函数(cross entropy),公式如下:
+在分类问题中,我们一般采用交叉熵代价损失函数(cross entropy loss),公式如下:
-$$ \text{crossentropy}(label, y) = -\sum_i label_ilog(y_i) $$
+$$ \text{_L_cross-entropy}(label, y) = -\sum_i label_ilog(y_i) $$
图2为softmax回归的网络图,图中权重用蓝线表示、偏置用红线表示、+1代表偏置参数的系数为1。
@@ -197,6 +197,7 @@ PaddlePaddle在API中提供了自动加载[MNIST](http://yann.lecun.com/exdb/mni
```python
import paddle
import paddle.fluid as fluid
+from __future__ import print_function
```
### Program Functions 配置
@@ -336,8 +337,8 @@ def event_handler(event):
if event.step % 100 == 0:
# event.metrics maps with train program return arguments.
# event.metrics[0] will yeild avg_cost and event.metrics[1] will yeild acc in this example.
- print "Pass %d, Batch %d, Cost %f" % (
- event.step, event.epoch, event.metrics[0])
+ print("Pass %d, Batch %d, Cost %f" % (
+ event.step, event.epoch, event.metrics[0]))
if isinstance(event, fluid.EndEpochEvent):
avg_cost, acc = trainer.test(
@@ -461,7 +462,7 @@ img = load_image(cur_dir + '/image/infer_3.png')
```python
results = inferencer.infer({'img': img})
lab = np.argsort(results) # probs and lab are the results of one batch data
-print "Label of image/infer_3.png is: %d" % lab[0][0][-1]
+print ("Inference result of image/infer_3.png is: %d" % lab[0][0][-1])
```
## 总结
diff --git a/02.recognize_digits/index.html b/02.recognize_digits/index.html
index 2102b759380116b3f3e487f01a91fc476ba75330..e508366805a5912ea80ecac435bba4e35e7e3941 100644
--- a/02.recognize_digits/index.html
+++ b/02.recognize_digits/index.html
@@ -92,7 +92,7 @@ For an $N$-class classification problem with $N$ output nodes, Softmax normalize
In such a classification problem, we usually use the cross entropy loss function:
-$$ \text{crossentropy}(label, y) = -\sum_i label_ilog(y_i) $$
+$$ \text{_L_cross-entropy}(label, y) = -\sum_i label_ilog(y_i) $$
Fig. 2 illustrates a softmax regression network, with the weights in blue, and the bias in red. `+1` indicates that the bias is $1$.
@@ -203,6 +203,7 @@ A PaddlePaddle program starts from importing the API package:
```python
import paddle
import paddle.fluid as fluid
+from __future__ import print_function
```
### Program Functions Configuration
@@ -342,8 +343,8 @@ def event_handler(event):
if event.step % 100 == 0:
# event.metrics maps with train program return arguments.
# event.metrics[0] will yeild avg_cost and event.metrics[1] will yeild acc in this example.
- print "Pass %d, Batch %d, Cost %f" % (
- event.step, event.epoch, event.metrics[0])
+ print("Pass %d, Batch %d, Cost %f" % (
+ event.step, event.epoch, event.metrics[0]))
if isinstance(event, fluid.EndEpochEvent):
avg_cost, acc = trainer.test(
@@ -474,7 +475,7 @@ Now we are ready to do inference.
```python
results = inferencer.infer({'img': img})
lab = np.argsort(results) # probs and lab are the results of one batch data
-print "Label of image/infer_3.png is: %d" % lab[0][0][-1]
+print("Inference result of image/infer_3.png is: %d" % lab[0][0][-1])
```