0% found this document useful (0 votes)
16 views1 page

Task Digits Numbers

Copyright
© © All Rights Reserved
We take content rights seriously. If you suspect this is your content, claim it here.
Available Formats
Download as PDF, TXT or read online on Scribd
0% found this document useful (0 votes)
16 views1 page

Task Digits Numbers

Copyright
© © All Rights Reserved
We take content rights seriously. If you suspect this is your content, claim it here.
Available Formats
Download as PDF, TXT or read online on Scribd
You are on page 1/ 1

In [1]: import numpy as np # linear algebra

import pandas as pd # data processing, CSV file I/O (e.g. pd.read_csv)


import tensorflow as tf
import matplotlib.pyplot as plt

In [4]: digits = tf.keras.datasets.mnist.load_data()


digits

((array([[[0, 0, 0, ..., 0, 0, 0],


Out[4]:
[0, 0, 0, ..., 0, 0, 0],
[0, 0, 0, ..., 0, 0, 0],
...,
[0, 0, 0, ..., 0, 0, 0],
[0, 0, 0, ..., 0, 0, 0],
[0, 0, 0, ..., 0, 0, 0]],

[[0, 0, 0, ..., 0, 0, 0],


[0, 0, 0, ..., 0, 0, 0],
[0, 0, 0, ..., 0, 0, 0],
...,
[0, 0, 0, ..., 0, 0, 0],
[0, 0, 0, ..., 0, 0, 0],
[0, 0, 0, ..., 0, 0, 0]],

[[0, 0, 0, ..., 0, 0, 0],


[0, 0, 0, ..., 0, 0, 0],
[0, 0, 0, ..., 0, 0, 0],
...,
[0, 0, 0, ..., 0, 0, 0],
[0, 0, 0, ..., 0, 0, 0],
[0, 0, 0, ..., 0, 0, 0]],

...,

[[0, 0, 0, ..., 0, 0, 0],


[0, 0, 0, ..., 0, 0, 0],
[0, 0, 0, ..., 0, 0, 0],
...,
[0, 0, 0, ..., 0, 0, 0],
[0, 0, 0, ..., 0, 0, 0],
[0, 0, 0, ..., 0, 0, 0]],

[[0, 0, 0, ..., 0, 0, 0],


[0, 0, 0, ..., 0, 0, 0],
[0, 0, 0, ..., 0, 0, 0],
...,
[0, 0, 0, ..., 0, 0, 0],
[0, 0, 0, ..., 0, 0, 0],
[0, 0, 0, ..., 0, 0, 0]],

[[0, 0, 0, ..., 0, 0, 0],


[0, 0, 0, ..., 0, 0, 0],
[0, 0, 0, ..., 0, 0, 0],
...,
[0, 0, 0, ..., 0, 0, 0],
[0, 0, 0, ..., 0, 0, 0],
[0, 0, 0, ..., 0, 0, 0]]], dtype=uint8),
array([5, 0, 4, ..., 5, 6, 8], dtype=uint8)),
(array([[[0, 0, 0, ..., 0, 0, 0],
[0, 0, 0, ..., 0, 0, 0],
[0, 0, 0, ..., 0, 0, 0],
...,
[0, 0, 0, ..., 0, 0, 0],
[0, 0, 0, ..., 0, 0, 0],
[0, 0, 0, ..., 0, 0, 0]],

[[0, 0, 0, ..., 0, 0, 0],


[0, 0, 0, ..., 0, 0, 0],
[0, 0, 0, ..., 0, 0, 0],
...,
[0, 0, 0, ..., 0, 0, 0],
[0, 0, 0, ..., 0, 0, 0],
[0, 0, 0, ..., 0, 0, 0]],

[[0, 0, 0, ..., 0, 0, 0],


[0, 0, 0, ..., 0, 0, 0],
[0, 0, 0, ..., 0, 0, 0],
...,
[0, 0, 0, ..., 0, 0, 0],
[0, 0, 0, ..., 0, 0, 0],
[0, 0, 0, ..., 0, 0, 0]],

...,

[[0, 0, 0, ..., 0, 0, 0],


[0, 0, 0, ..., 0, 0, 0],
[0, 0, 0, ..., 0, 0, 0],
...,
[0, 0, 0, ..., 0, 0, 0],
[0, 0, 0, ..., 0, 0, 0],
[0, 0, 0, ..., 0, 0, 0]],

[[0, 0, 0, ..., 0, 0, 0],


[0, 0, 0, ..., 0, 0, 0],
[0, 0, 0, ..., 0, 0, 0],
...,
[0, 0, 0, ..., 0, 0, 0],
[0, 0, 0, ..., 0, 0, 0],
[0, 0, 0, ..., 0, 0, 0]],

[[0, 0, 0, ..., 0, 0, 0],


[0, 0, 0, ..., 0, 0, 0],
[0, 0, 0, ..., 0, 0, 0],
...,
[0, 0, 0, ..., 0, 0, 0],
[0, 0, 0, ..., 0, 0, 0],
[0, 0, 0, ..., 0, 0, 0]]], dtype=uint8),
array([7, 2, 1, ..., 4, 5, 6], dtype=uint8)))

In [6]: print(len(digits[0][1]))

(x_train, y_train), (x_test, y_test) = digits

60000

In [7]: print(len(x_train),len(y_train))
print(len(x_test),len(y_test))

print(x_train[0].shape)
print(x_test[0].shape)

60000 60000
10000 10000
(28, 28)
(28, 28)

In [8]: for i in range(4):


plt.figure()
plt.imshow(x_train[i])
plt.colorbar()
plt.grid(False)
plt.show()

In [9]: x_train = x_train/255.0


x_test = x_test/255.0

In [10]: class_names = ['0','1', '2', '3', '4','5','6','7','8','9']

In [11]: plt.figure(figsize=(10,10))
for i in range(25):
plt.subplot(5,5,i+1)
plt.xticks([])
plt.yticks([])
plt.grid(False)
plt.imshow(x_train[i], cmap=plt.cm.binary)
plt.show()

In [12]: model = tf.keras.Sequential([


tf.keras.layers.Flatten(input_shape=(28, 28)),
tf.keras.layers.Dense(128, activation='relu'),
tf.keras.layers.Dense(10)
])

In [13]: model.compile(optimizer='adam',
loss=tf.keras.losses.SparseCategoricalCrossentropy(from_logits=True),
metrics=['accuracy'])

In [15]: model.fit(x_train,y_train,epochs = 15)

Epoch 1/15
1875/1875 [==============================] - 3s 2ms/step - loss: 0.0136 - accuracy: 0.9956
Epoch 2/15
1875/1875 [==============================] - 3s 2ms/step - loss: 0.0116 - accuracy: 0.9965
Epoch 3/15
1875/1875 [==============================] - 3s 2ms/step - loss: 0.0095 - accuracy: 0.9971
Epoch 4/15
1875/1875 [==============================] - 3s 2ms/step - loss: 0.0097 - accuracy: 0.9969
Epoch 5/15
1875/1875 [==============================] - 3s 2ms/step - loss: 0.0079 - accuracy: 0.9977
Epoch 6/15
1875/1875 [==============================] - 3s 2ms/step - loss: 0.0067 - accuracy: 0.9979
Epoch 7/15
1875/1875 [==============================] - 3s 2ms/step - loss: 0.0074 - accuracy: 0.9975
Epoch 8/15
1875/1875 [==============================] - 3s 2ms/step - loss: 0.0058 - accuracy: 0.9982
Epoch 9/15
1875/1875 [==============================] - 3s 2ms/step - loss: 0.0046 - accuracy: 0.9986
Epoch 10/15
1875/1875 [==============================] - 3s 2ms/step - loss: 0.0070 - accuracy: 0.9975
Epoch 11/15
1875/1875 [==============================] - 3s 2ms/step - loss: 0.0037 - accuracy: 0.9989
Epoch 12/15
1875/1875 [==============================] - 3s 2ms/step - loss: 0.0061 - accuracy: 0.9980
Epoch 13/15
1875/1875 [==============================] - 3s 1ms/step - loss: 0.0039 - accuracy: 0.9988
Epoch 14/15
1875/1875 [==============================] - 3s 2ms/step - loss: 0.0048 - accuracy: 0.9985
Epoch 15/15
1875/1875 [==============================] - 3s 1ms/step - loss: 0.0049 - accuracy: 0.9984
<keras.callbacks.History at 0x16be8530370>
Out[15]:

In [16]: test_loss, test_acc = model.evaluate(x_test, y_test, verbose=2)

print('\nTest accuracy:', test_acc)

313/313 - 0s - loss: 0.1110 - accuracy: 0.9802 - 467ms/epoch - 1ms/step

Test accuracy: 0.9801999926567078

In [18]: pred = tf.keras.Sequential([model,tf.keras.layers.Softmax()])

predict = pred.predict(x_test)

313/313 [==============================] - 0s 986us/step

In [19]: plt.matshow(x_test[0])

<matplotlib.image.AxesImage at 0x16be8533490>
Out[19]:

In [20]: np.argmax(predict[0])

7
Out[20]:

In [21]: def plot_image(i, predictions_array, true_label, img):


true_label, img = true_label[i], img[i]
plt.grid(False)
plt.xticks([])
plt.yticks([])

plt.imshow(img, cmap=plt.cm.binary)

predicted_label = np.argmax(predictions_array)
if predicted_label == true_label:
color = 'blue'
else:
color = 'red'

plt.xlabel("{} {:2.0f}% ({})".format(class_names[predicted_label],


100*np.max(predictions_array),
class_names[true_label]),
color=color)

def plot_value_array(i, predictions_array, true_label):


true_label = true_label[i]
plt.grid(False)
plt.xticks(range(10))
plt.yticks([])
thisplot = plt.bar(range(10), predictions_array, color="#777777")
plt.ylim([0, 1])
predicted_label = np.argmax(predictions_array)

thisplot[predicted_label].set_color('red')
thisplot[true_label].set_color('blue')

In [22]: i = 0
plt.figure(figsize=(6,3))
plt.subplot(1,2,1)
plot_image(i, predict[i], y_test, x_test)
plt.subplot(1,2,2)
plot_value_array(i, predict[i], y_test)
plt.show()

In [23]: i = 3
plt.figure(figsize=(6,3))
plt.subplot(1,2,1)
plot_image(i, predict[i], y_test, x_test)
plt.subplot(1,2,2)
plot_value_array(i, predict[i], y_test)
plt.show()

In [24]: i = 12
plt.figure(figsize=(6,3))
plt.subplot(1,2,1)
plot_image(i, predict[i], y_test, x_test)
plt.subplot(1,2,2)
plot_value_array(i, predict[i], y_test)
plt.show()7

In [25]: num_rows = 5
num_cols = 3
num_images = num_rows*num_cols
plt.figure(figsize=(2*2*num_cols, 2*num_rows))
for i in range(num_images):
plt.subplot(num_rows, 2*num_cols, 2*i+1)
plot_image(i, predict[i], y_test, x_test)
plt.subplot(num_rows, 2*num_cols, 2*i+2)
plot_value_array(i, predict[i], y_test)
plt.tight_layout()
plt.show()

In [26]: model.save("Digits_Recognize.h5")

You might also like

pFad - Phonifier reborn

Pfad - The Proxy pFad of © 2024 Garber Painting. All rights reserved.

Note: This service is not intended for secure transactions such as banking, social media, email, or purchasing. Use at your own risk. We assume no liability whatsoever for broken pages.


Alternative Proxies:

Alternative Proxy

pFad Proxy

pFad v3 Proxy

pFad v4 Proxy