diff --git a/tf2_quickstart_for_experts.ipynb b/tf2_quickstart_for_experts.ipynb new file mode 100644 index 0000000000000000000000000000000000000000..910d3405d28e375ddfe98eb3d98e4f1514caadac --- /dev/null +++ b/tf2_quickstart_for_experts.ipynb @@ -0,0 +1,236 @@ +{ + "cells": [ + { + "cell_type": "code", + "execution_count": 1, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [ + "import tensorflow as tf\n", + "\n", + "from tensorflow.keras.layers import Dense, Flatten, Conv2D\n", + "from tensorflow.keras import Model" + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "outputs": [], + "source": [ + "mnist = tf.keras.datasets.mnist\n", + "\n", + "(x_train, y_train), (x_test, y_test) = mnist.load_data()\n", + "x_train, x_test = x_train / 255.0, x_test / 255.0\n", + "\n", + "x_train = x_train[..., tf.newaxis].astype('float32')\n", + "x_test = x_test[..., tf.newaxis].astype('float32')" + ], + "metadata": { + "collapsed": false, + "pycharm": { + "name": "#%%\n" + } + } + }, + { + "cell_type": "code", + "execution_count": 3, + "outputs": [], + "source": [ + "train_ds = tf.data.Dataset.from_tensor_slices((x_train, y_train)).shuffle(10000).batch(32)\n", + "test_ds = tf.data.Dataset.from_tensor_slices((x_test, y_test)).batch(32)" + ], + "metadata": { + "collapsed": false, + "pycharm": { + "name": "#%%\n" + } + } + }, + { + "cell_type": "code", + "execution_count": 4, + "outputs": [], + "source": [ + "class MyModel(Model):\n", + " def __init__(self):\n", + " super(MyModel, self).__init__()\n", + " self.conv1 = Conv2D(32, 3, activation='relu')\n", + " self.flatten = Flatten()\n", + " self.d1 = Dense(128, activation='relu')\n", + " self.d2 = Dense(10)\n", + "\n", + " def call(self, x):\n", + " x = self.conv1(x)\n", + " x = self.flatten(x)\n", + " x = self.d1(x)\n", + " return self.d2(x)\n", + "\n", + "model = MyModel()" + ], + "metadata": { + "collapsed": false, + "pycharm": { + "name": "#%%\n" + } + } + }, + { + "cell_type": "code", + "execution_count": 5, + "outputs": [], + "source": [ + "loss_object = tf.keras.losses.SparseCategoricalCrossentropy(from_logits=True)\n", + "optimizer = tf.keras.optimizers.Adam()" + ], + "metadata": { + "collapsed": false, + "pycharm": { + "name": "#%%\n" + } + } + }, + { + "cell_type": "code", + "execution_count": 6, + "outputs": [], + "source": [ + "train_loss = tf.keras.metrics.Mean(name='train_loss')\n", + "train_accuracy = tf.keras.metrics.SparseCategoricalAccuracy(name='train_accuracy')\n", + "\n", + "test_loss = tf.keras.metrics.Mean(name='test_loss')\n", + "test_accuracy = tf.keras.metrics.SparseCategoricalAccuracy(name='test accuracy')" + ], + "metadata": { + "collapsed": false, + "pycharm": { + "name": "#%%\n" + } + } + }, + { + "cell_type": "code", + "execution_count": 7, + "outputs": [], + "source": [ + "@tf.function\n", + "def train_step(images, labels):\n", + " with tf.GradientTape() as tape:\n", + " predictions = model(images, training=True)\n", + " loss = loss_object(labels, predictions)\n", + " gradients = tape.gradient(loss, model.trainable_variables)\n", + " optimizer.apply_gradients(zip(gradients, model.trainable_variables))\n", + "\n", + " train_loss(loss)\n", + " train_accuracy(labels, predictions)" + ], + "metadata": { + "collapsed": false, + "pycharm": { + "name": "#%%\n" + } + } + }, + { + "cell_type": "code", + "execution_count": 8, + "outputs": [], + "source": [ + "@tf.function\n", + "def test_step(images, labels):\n", + " predictions = model(images, training=False)\n", + " t_loss = loss_object(labels, predictions)\n", + "\n", + " test_loss(t_loss)\n", + " test_accuracy(labels, predictions)" + ], + "metadata": { + "collapsed": false, + "pycharm": { + "name": "#%%\n" + } + } + }, + { + "cell_type": "code", + "execution_count": 9, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Epoch 1, Loss: 0.1377670019865036, Accuracy: 95.8133316040039, Test Loss: 0.06627238541841507, Test Accuracy: 97.80999755859375\n", + "Epoch 2, Loss: 0.04135768860578537, Accuracy: 98.72999572753906, Test Loss: 0.06060675159096718, Test Accuracy: 98.11000061035156\n", + "Epoch 3, Loss: 0.0216764397919178, Accuracy: 99.2733383178711, Test Loss: 0.05681402236223221, Test Accuracy: 98.36000061035156\n", + "Epoch 4, Loss: 0.013888753019273281, Accuracy: 99.5433349609375, Test Loss: 0.058001551777124405, Test Accuracy: 98.3499984741211\n", + "Epoch 5, Loss: 0.008770273067057133, Accuracy: 99.70999908447266, Test Loss: 0.05913984403014183, Test Accuracy: 98.38999938964844\n" + ] + } + ], + "source": [ + "EPOCHS = 5\n", + "\n", + "for epoch in range(EPOCHS):\n", + " train_loss.reset_states()\n", + " train_accuracy.reset_states()\n", + " test_loss.reset_states()\n", + " test_accuracy.reset_states()\n", + "\n", + " for images, labels in train_ds:\n", + " train_step(images, labels)\n", + "\n", + " for test_images, test_labels in test_ds:\n", + " test_step(test_images, test_labels)\n", + "\n", + " print(\n", + " f'Epoch {epoch + 1}, '\n", + " f'Loss: {train_loss.result()}, '\n", + " f'Accuracy: {train_accuracy.result() * 100}, '\n", + " f'Test Loss: {test_loss.result()}, '\n", + " f'Test Accuracy: {test_accuracy.result() * 100}'\n", + " )" + ], + "metadata": { + "collapsed": false, + "pycharm": { + "name": "#%%\n" + } + } + }, + { + "cell_type": "code", + "execution_count": 9, + "outputs": [], + "source": [], + "metadata": { + "collapsed": false, + "pycharm": { + "name": "#%%\n" + } + } + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 2 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython2", + "version": "2.7.6" + } + }, + "nbformat": 4, + "nbformat_minor": 0 +} \ No newline at end of file