Skip to content
体验新版
项目
组织
正在加载...
登录
切换导航
打开侧边栏
YiRan_17
Python专题
提交
1f49bc61
Python专题
项目概览
YiRan_17
/
Python专题
与 Fork 源项目一致
Fork自
GitCode官方 / Python专题
通知
2
Star
1
Fork
1
代码
文件
提交
分支
Tags
贡献者
分支图
Diff
Issue
0
列表
看板
标记
里程碑
合并请求
0
DevOps
流水线
流水线任务
计划
Wiki
0
Wiki
分析
仓库
DevOps
项目成员
Pages
Python专题
项目概览
项目概览
详情
发布
仓库
仓库
文件
提交
分支
标签
贡献者
分支图
比较
Issue
0
Issue
0
列表
看板
标记
里程碑
合并请求
0
合并请求
0
Pages
DevOps
DevOps
流水线
流水线任务
计划
分析
分析
仓库分析
DevOps
Wiki
0
Wiki
成员
成员
收起侧边栏
关闭侧边栏
动态
分支图
创建新Issue
流水线任务
提交
Issue看板
体验新版 GitCode,发现更多精彩内容 >>
提交
1f49bc61
编写于
7月 20, 2021
作者:
M
MaoXianxin
浏览文件
操作
浏览文件
下载
电子邮件补丁
差异文件
tf2_quickstart_for_experts
上级
0e7b5e83
变更
1
显示空白变更内容
内联
并排
Showing
1 changed file
with
236 addition
and
0 deletion
+236
-0
tf2_quickstart_for_experts.ipynb
tf2_quickstart_for_experts.ipynb
+236
-0
未找到文件。
tf2_quickstart_for_experts.ipynb
0 → 100644
浏览文件 @
1f49bc61
{
"cells": [
{
"cell_type": "code",
"execution_count": 1,
"metadata": {
"collapsed": true
},
"outputs": [],
"source": [
"import tensorflow as tf\n",
"\n",
"from tensorflow.keras.layers import Dense, Flatten, Conv2D\n",
"from tensorflow.keras import Model"
]
},
{
"cell_type": "code",
"execution_count": 2,
"outputs": [],
"source": [
"mnist = tf.keras.datasets.mnist\n",
"\n",
"(x_train, y_train), (x_test, y_test) = mnist.load_data()\n",
"x_train, x_test = x_train / 255.0, x_test / 255.0\n",
"\n",
"x_train = x_train[..., tf.newaxis].astype('float32')\n",
"x_test = x_test[..., tf.newaxis].astype('float32')"
],
"metadata": {
"collapsed": false,
"pycharm": {
"name": "#%%\n"
}
}
},
{
"cell_type": "code",
"execution_count": 3,
"outputs": [],
"source": [
"train_ds = tf.data.Dataset.from_tensor_slices((x_train, y_train)).shuffle(10000).batch(32)\n",
"test_ds = tf.data.Dataset.from_tensor_slices((x_test, y_test)).batch(32)"
],
"metadata": {
"collapsed": false,
"pycharm": {
"name": "#%%\n"
}
}
},
{
"cell_type": "code",
"execution_count": 4,
"outputs": [],
"source": [
"class MyModel(Model):\n",
" def __init__(self):\n",
" super(MyModel, self).__init__()\n",
" self.conv1 = Conv2D(32, 3, activation='relu')\n",
" self.flatten = Flatten()\n",
" self.d1 = Dense(128, activation='relu')\n",
" self.d2 = Dense(10)\n",
"\n",
" def call(self, x):\n",
" x = self.conv1(x)\n",
" x = self.flatten(x)\n",
" x = self.d1(x)\n",
" return self.d2(x)\n",
"\n",
"model = MyModel()"
],
"metadata": {
"collapsed": false,
"pycharm": {
"name": "#%%\n"
}
}
},
{
"cell_type": "code",
"execution_count": 5,
"outputs": [],
"source": [
"loss_object = tf.keras.losses.SparseCategoricalCrossentropy(from_logits=True)\n",
"optimizer = tf.keras.optimizers.Adam()"
],
"metadata": {
"collapsed": false,
"pycharm": {
"name": "#%%\n"
}
}
},
{
"cell_type": "code",
"execution_count": 6,
"outputs": [],
"source": [
"train_loss = tf.keras.metrics.Mean(name='train_loss')\n",
"train_accuracy = tf.keras.metrics.SparseCategoricalAccuracy(name='train_accuracy')\n",
"\n",
"test_loss = tf.keras.metrics.Mean(name='test_loss')\n",
"test_accuracy = tf.keras.metrics.SparseCategoricalAccuracy(name='test accuracy')"
],
"metadata": {
"collapsed": false,
"pycharm": {
"name": "#%%\n"
}
}
},
{
"cell_type": "code",
"execution_count": 7,
"outputs": [],
"source": [
"@tf.function\n",
"def train_step(images, labels):\n",
" with tf.GradientTape() as tape:\n",
" predictions = model(images, training=True)\n",
" loss = loss_object(labels, predictions)\n",
" gradients = tape.gradient(loss, model.trainable_variables)\n",
" optimizer.apply_gradients(zip(gradients, model.trainable_variables))\n",
"\n",
" train_loss(loss)\n",
" train_accuracy(labels, predictions)"
],
"metadata": {
"collapsed": false,
"pycharm": {
"name": "#%%\n"
}
}
},
{
"cell_type": "code",
"execution_count": 8,
"outputs": [],
"source": [
"@tf.function\n",
"def test_step(images, labels):\n",
" predictions = model(images, training=False)\n",
" t_loss = loss_object(labels, predictions)\n",
"\n",
" test_loss(t_loss)\n",
" test_accuracy(labels, predictions)"
],
"metadata": {
"collapsed": false,
"pycharm": {
"name": "#%%\n"
}
}
},
{
"cell_type": "code",
"execution_count": 9,
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"Epoch 1, Loss: 0.1377670019865036, Accuracy: 95.8133316040039, Test Loss: 0.06627238541841507, Test Accuracy: 97.80999755859375\n",
"Epoch 2, Loss: 0.04135768860578537, Accuracy: 98.72999572753906, Test Loss: 0.06060675159096718, Test Accuracy: 98.11000061035156\n",
"Epoch 3, Loss: 0.0216764397919178, Accuracy: 99.2733383178711, Test Loss: 0.05681402236223221, Test Accuracy: 98.36000061035156\n",
"Epoch 4, Loss: 0.013888753019273281, Accuracy: 99.5433349609375, Test Loss: 0.058001551777124405, Test Accuracy: 98.3499984741211\n",
"Epoch 5, Loss: 0.008770273067057133, Accuracy: 99.70999908447266, Test Loss: 0.05913984403014183, Test Accuracy: 98.38999938964844\n"
]
}
],
"source": [
"EPOCHS = 5\n",
"\n",
"for epoch in range(EPOCHS):\n",
" train_loss.reset_states()\n",
" train_accuracy.reset_states()\n",
" test_loss.reset_states()\n",
" test_accuracy.reset_states()\n",
"\n",
" for images, labels in train_ds:\n",
" train_step(images, labels)\n",
"\n",
" for test_images, test_labels in test_ds:\n",
" test_step(test_images, test_labels)\n",
"\n",
" print(\n",
" f'Epoch {epoch + 1}, '\n",
" f'Loss: {train_loss.result()}, '\n",
" f'Accuracy: {train_accuracy.result() * 100}, '\n",
" f'Test Loss: {test_loss.result()}, '\n",
" f'Test Accuracy: {test_accuracy.result() * 100}'\n",
" )"
],
"metadata": {
"collapsed": false,
"pycharm": {
"name": "#%%\n"
}
}
},
{
"cell_type": "code",
"execution_count": 9,
"outputs": [],
"source": [],
"metadata": {
"collapsed": false,
"pycharm": {
"name": "#%%\n"
}
}
}
],
"metadata": {
"kernelspec": {
"display_name": "Python 3",
"language": "python",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 2
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython2",
"version": "2.7.6"
}
},
"nbformat": 4,
"nbformat_minor": 0
}
\ No newline at end of file
编辑
预览
Markdown
is supported
0%
请重试
或
添加新附件
.
添加附件
取消
You are about to add
0
people
to the discussion. Proceed with caution.
先完成此消息的编辑!
取消
想要评论请
注册
或
登录