1010 " <td align=\" center\" ><a target=\" _blank\" href=\" http://introtodeeplearning.com\" >\n " ,
1111 " <img src=\" https://i.ibb.co/Jr88sn2/mit.png\" style=\" padding-bottom:5px;\" />\n " ,
1212 " Visit MIT Deep Learning</a></td>\n " ,
13- " <td align=\" center\" ><a target=\" _blank\" href=\" https://colab.research.google.com/github/aamini/introtodeeplearning/blob/master/lab2/solutions/TF_Part1_MNIST_Solution .ipynb\" >\n " ,
13+ " <td align=\" center\" ><a target=\" _blank\" href=\" https://colab.research.google.com/github/aamini/introtodeeplearning/blob/master/lab2/TF_Part1_MNIST .ipynb\" >\n " ,
1414 " <img src=\" https://i.ibb.co/2P3SLwK/colab.png\" style=\" padding-bottom:5px;\" />Run in Google Colab</a></td>\n " ,
15- " <td align=\" center\" ><a target=\" _blank\" href=\" https://github.com/aamini/introtodeeplearning/blob/master/lab2/solutions/TF_Part1_MNIST_Solution .ipynb\" >\n " ,
15+ " <td align=\" center\" ><a target=\" _blank\" href=\" https://github.com/aamini/introtodeeplearning/blob/master/lab2/TF_Part1_MNIST .ipynb\" >\n " ,
1616 " <img src=\" https://i.ibb.co/xfJbPmL/github.png\" height=\" 70px\" style=\" padding-bottom:5px;\" />View Source on GitHub</a></td>\n " ,
1717 " </table>\n " ,
1818 " \n " ,
216216 " tf.keras.layers.Flatten(),\n " ,
217217 " \n " ,
218218 " # '''TODO: Define the activation function for the first fully connected (Dense) layer.'''\n " ,
219- " tf.keras.layers.Dense(128, activation=tf.nn.relu),\n " ,
220- " # tf.keras.layers.Dense(128, activation= '''TODO'''),\n " ,
219+ " tf.keras.layers.Dense(128, activation= '''TODO'''),\n " ,
221220 " \n " ,
222221 " # '''TODO: Define the second Dense layer to output the classification probabilities'''\n " ,
223- " tf.keras.layers.Dense(10, activation=tf.nn.softmax)\n " ,
224- " # [TODO Dense layer to output classification probabilities]\n " ,
222+ " '''[TODO Dense layer to output classification probabilities]'''\n " ,
225223 " \n " ,
226224 " ])\n " ,
227225 " return fc_model\n " ,
348346 "outputs" : [],
349347 "source" : [
350348 " '''TODO: Use the evaluate method to test the model!'''\n " ,
351- " test_loss, test_acc = model.evaluate(test_images, test_labels) # TODO\n " ,
352- " # test_loss, test_acc = # TODO\n " ,
349+ " test_loss, test_acc = # TODO\n " ,
353350 " \n " ,
354351 " print('Test accuracy:', test_acc)"
355352 ]
410407 " cnn_model = tf.keras.Sequential([\n " ,
411408 " \n " ,
412409 " # TODO: Define the first convolutional layer\n " ,
413- " tf.keras.layers.Conv2D(filters=24, kernel_size=(3,3), activation=tf.nn.relu),\n " ,
414- " # tf.keras.layers.Conv2D('''TODO''')\n " ,
410+ " tf.keras.layers.Conv2D('''TODO''')\n " ,
415411 " \n " ,
416412 " # TODO: Define the first max pooling layer\n " ,
417- " tf.keras.layers.MaxPool2D(pool_size=(2,2)),\n " ,
418- " # tf.keras.layers.MaxPool2D('''TODO''')\n " ,
413+ " tf.keras.layers.MaxPool2D('''TODO''')\n " ,
419414 " \n " ,
420415 " # TODO: Define the second convolutional layer\n " ,
421- " tf.keras.layers.Conv2D(filters=36, kernel_size=(3,3), activation=tf.nn.relu),\n " ,
422- " # tf.keras.layers.Conv2D('''TODO''')\n " ,
416+ " tf.keras.layers.Conv2D('''TODO''')\n " ,
423417 " \n " ,
424418 " # TODO: Define the second max pooling layer\n " ,
425- " tf.keras.layers.MaxPool2D(pool_size=(2,2)),\n " ,
426- " # tf.keras.layers.MaxPool2D('''TODO''')\n " ,
419+ " tf.keras.layers.MaxPool2D('''TODO''')\n " ,
427420 " \n " ,
428421 " tf.keras.layers.Flatten(),\n " ,
429422 " tf.keras.layers.Dense(128, activation=tf.nn.relu),\n " ,
430423 " \n " ,
431424 " # TODO: Define the last Dense layer to output the classification\n " ,
432425 " # probabilities. Pay attention to the activation needed a probability\n " ,
433426 " # output\n " ,
434- " tf.keras.layers.Dense(10, activation=tf.nn.softmax)\n " ,
435- " # [TODO Dense layer to output classification probabilities]\n " ,
427+ " '''[TODO Dense layer to output classification probabilities]'''\n " ,
436428 " ])\n " ,
437429 " \n " ,
438430 " return cnn_model\n " ,
467459 " comet_model_2 = comet_ml.Experiment()\n " ,
468460 " \n " ,
469461 " '''TODO: Define the compile operation with your optimizer and learning rate of choice'''\n " ,
470- " cnn_model.compile(optimizer=tf.keras.optimizers.Adam(learning_rate=1e-3),\n " ,
471- " loss='sparse_categorical_crossentropy',\n " ,
472- " metrics=['accuracy'])\n " ,
473- " # cnn_model.compile(optimizer='''TODO''', loss='''TODO''', metrics=['accuracy']) # TODO"
462+ " cnn_model.compile(optimizer='''TODO''', loss='''TODO''', metrics=['accuracy']) # TODO"
474463 ]
475464 },
476465 {
491480 "outputs" : [],
492481 "source" : [
493482 " '''TODO: Use model.fit to train the CNN model, with the same batch_size and number of epochs previously used.'''\n " ,
494- " cnn_model.fit(train_images, train_labels, batch_size=BATCH_SIZE, epochs=EPOCHS)\n " ,
495- " # cnn_model.fit('''TODO''')\n " ,
483+ " cnn_model.fit('''TODO''')\n " ,
496484 " # comet_model_2.end()"
497485 ]
498486 },
514502 "outputs" : [],
515503 "source" : [
516504 " '''TODO: Use the evaluate method to test the model!'''\n " ,
517- " test_loss, test_acc = cnn_model.evaluate(test_images, test_labels)\n " ,
518- " # test_loss, test_acc = # TODO\n " ,
505+ " test_loss, test_acc = # TODO\n " ,
519506 " \n " ,
520507 " print('Test accuracy:', test_acc)"
521508 ]
594581 "source" : [
595582 " '''TODO: identify the digit with the highest confidence prediction for the first\n " ,
596583 " image in the test dataset. '''\n " ,
597- " prediction = np.argmax(predictions[0])\n " ,
598- " # prediction = # TODO\n " ,
584+ " prediction = # TODO\n " ,
599585 " \n " ,
600586 " print(prediction)"
601587 ]
725711 " # GradientTape to record differentiation operations\n " ,
726712 " with tf.GradientTape() as tape:\n " ,
727713 " #'''TODO: feed the images into the model and obtain the predictions'''\n " ,
728- " logits = cnn_model(images)\n " ,
729- " # logits = # TODO\n " ,
714+ " logits = # TODO\n " ,
730715 " \n " ,
731716 " #'''TODO: compute the categorical cross entropy loss\n " ,
732- " loss_value = tf.keras.backend.sparse_categorical_crossentropy(labels, logits) \n " ,
717+ " loss_value = tf.keras.backend.sparse_categorical_crossentropy('''TODO''', '''TODO''') # TODO \n " ,
733718 " comet_model_3.log_metric(\" loss\" , loss_value.numpy().mean(), step=idx)\n " ,
734- " # loss_value = tf.keras.backend.sparse_categorical_crossentropy('''TODO''', '''TODO''') # TODO\n " ,
735719 " \n " ,
736720 " loss_history.append(loss_value.numpy().mean()) # append the loss to the loss_history record\n " ,
737721 " plotter.plot(loss_history.get())\n " ,
738722 " \n " ,
739723 " # Backpropagation\n " ,
740724 " '''TODO: Use the tape to compute the gradient against all parameters in the CNN model.\n " ,
741725 " Use cnn_model.trainable_variables to access these parameters.'''\n " ,
742- " grads = tape.gradient(loss_value, cnn_model.trainable_variables)\n " ,
743- " # grads = # TODO\n " ,
726+ " grads = # TODO\n " ,
744727 " optimizer.apply_gradients(zip(grads, cnn_model.trainable_variables))\n " ,
745728 " \n " ,
746729 " comet_model_3.log_figure(figure=plt)\n " ,
786769 },
787770 "nbformat" : 4 ,
788771 "nbformat_minor" : 0
789- }
772+ }
0 commit comments