경기도 인공지능 개발 과정/Python

[Python] 텐서플로 플래튼

agingcurve 2022. 7. 12. 11:13
반응형

텐서플로 플래튼

  • 플래튼 레이어(Flatten Layer)를 활용한 이미지 학습 모델을 만들어 보자
  • 플래튼은 영상을 일차원으로 바꿔주는 역할을 한다.
  • 플래튼이 없는 모델과 있는 모델의 차이를 알아보자
In [30]:
import tensorflow as tf
import pandas as pd
In [31]:
# with reshape
(독립, 종속),_ = tf.keras.datasets.mnist.load_data()
독립 = 독립.reshape(60000, 784)
종속 = pd.get_dummies(종속)
print(독립.shape, 종속.shape)
(60000, 784) (60000, 10)
In [32]:
# 모델을 만들고
X = tf.keras.layers.Input(shape = [784])
H = tf.keras.layers.Dense(84, activation="swish")(X)
Y = tf.keras.layers.Dense(10, activation="softmax")(H)
model = tf.keras.models.Model(X, Y)
model.compile(loss= "categorical_crossentropy", metrics="accuracy")
In [33]:
# 모델을 학습
model.fit(독립, 종속, epochs=10)
Epoch 1/10
1875/1875 [==============================] - 8s 4ms/step - loss: 2.5143 - accuracy: 0.8558
Epoch 2/10
1875/1875 [==============================] - 4s 2ms/step - loss: 0.5202 - accuracy: 0.9189
Epoch 3/10
1875/1875 [==============================] - 4s 2ms/step - loss: 0.4487 - accuracy: 0.9320
Epoch 4/10
1875/1875 [==============================] - 5s 2ms/step - loss: 0.4227 - accuracy: 0.9373
Epoch 5/10
1875/1875 [==============================] - 4s 2ms/step - loss: 0.4010 - accuracy: 0.9423
Epoch 6/10
1875/1875 [==============================] - 5s 2ms/step - loss: 0.3709 - accuracy: 0.9460
Epoch 7/10
1875/1875 [==============================] - 5s 2ms/step - loss: 0.3642 - accuracy: 0.9484
Epoch 8/10
1875/1875 [==============================] - 4s 2ms/step - loss: 0.3633 - accuracy: 0.9505
Epoch 9/10
1875/1875 [==============================] - 4s 2ms/step - loss: 0.3304 - accuracy: 0.9526
Epoch 10/10
1875/1875 [==============================] - 4s 2ms/step - loss: 0.3420 - accuracy: 0.9535
Out[33]:
<keras.callbacks.History at 0x7f26a60f16d0>
In [34]:
# 모델을 이용한다.
pred = model.predict(독립[0:5])
print(pd.DataFrame(pred).round(2))
print(종속[:5])
     0    1    2    3    4    5    6    7    8    9
0  0.0  0.0  0.0  0.0  0.0  1.0  0.0  0.0  0.0  0.0
1  1.0  0.0  0.0  0.0  0.0  0.0  0.0  0.0  0.0  0.0
2  0.0  0.0  0.0  0.0  1.0  0.0  0.0  0.0  0.0  0.0
3  0.0  1.0  0.0  0.0  0.0  0.0  0.0  0.0  0.0  0.0
4  0.0  0.0  0.0  0.0  0.0  0.0  0.0  0.0  0.0  1.0
   0  1  2  3  4  5  6  7  8  9
0  0  0  0  0  0  1  0  0  0  0
1  1  0  0  0  0  0  0  0  0  0
2  0  0  0  0  1  0  0  0  0  0
3  0  1  0  0  0  0  0  0  0  0
4  0  0  0  0  0  0  0  0  0  1
In [35]:
# Flatten 모델 만들기
(독립, 종속),_ = tf.keras.datasets.mnist.load_data()
종속 = pd.get_dummies(종속)
print(독립.shape, 종속.shape)
(60000, 28, 28) (60000, 10)
In [36]:
# 모델을 만들고
X = tf.keras.layers.Input(shape = [28, 28])
H = tf.keras.layers.Flatten()(X)
H = tf.keras.layers.Dense(84, activation="swish")(H)
Y = tf.keras.layers.Dense(10, activation="softmax")(H)
model = tf.keras.models.Model(X, Y)
model.compile(loss= "categorical_crossentropy", metrics="accuracy")
In [37]:
# 모델을 학습
model.fit(독립, 종속, epochs=10)
Epoch 1/10
1875/1875 [==============================] - 5s 2ms/step - loss: 2.6496 - accuracy: 0.8538
Epoch 2/10
1875/1875 [==============================] - 4s 2ms/step - loss: 0.5788 - accuracy: 0.9189
Epoch 3/10
1875/1875 [==============================] - 4s 2ms/step - loss: 0.4780 - accuracy: 0.9324
Epoch 4/10
1875/1875 [==============================] - 4s 2ms/step - loss: 0.4360 - accuracy: 0.9397
Epoch 5/10
1875/1875 [==============================] - 5s 3ms/step - loss: 0.3810 - accuracy: 0.9445
Epoch 6/10
1875/1875 [==============================] - 4s 2ms/step - loss: 0.3898 - accuracy: 0.9492
Epoch 7/10
1875/1875 [==============================] - 4s 2ms/step - loss: 0.3637 - accuracy: 0.9498
Epoch 8/10
1875/1875 [==============================] - 4s 2ms/step - loss: 0.3617 - accuracy: 0.9518
Epoch 9/10
1875/1875 [==============================] - 5s 2ms/step - loss: 0.3431 - accuracy: 0.9544
Epoch 10/10
1875/1875 [==============================] - 4s 2ms/step - loss: 0.3424 - accuracy: 0.9546
Out[37]:
<keras.callbacks.History at 0x7f26a5792e90>
In [39]:
# 모델을 이용한다.
pred = model.predict(독립[0:5])
print(pd.DataFrame(pred).round(2))
print(종속[:5])
     0    1    2    3     4    5    6    7    8     9
0  0.0  0.0  0.0  0.0  0.00  1.0  0.0  0.0  0.0  0.00
1  1.0  0.0  0.0  0.0  0.00  0.0  0.0  0.0  0.0  0.00
2  0.0  0.0  0.0  0.0  1.00  0.0  0.0  0.0  0.0  0.00
3  0.0  1.0  0.0  0.0  0.00  0.0  0.0  0.0  0.0  0.00
4  0.0  0.0  0.0  0.0  0.99  0.0  0.0  0.0  0.0  0.01
   0  1  2  3  4  5  6  7  8  9
0  0  0  0  0  0  1  0  0  0  0
1  1  0  0  0  0  0  0  0  0  0
2  0  0  0  0  1  0  0  0  0  0
3  0  1  0  0  0  0  0  0  0  0
4  0  0  0  0  0  0  0  0  0  1