Вы не можете выбрать более 25 тем Темы должны начинаться с буквы или цифры, могут содержать дефисы(-) и должны содержать не более 35 символов.

532 KiB

import os
os.chdir('/content/drive/MyDrive/Colab Notebooks')
# импорт модулей
from tensorflow import keras
import matplotlib.pyplot as plt
import numpy as np
import sklearn
# загрузка датасета
from keras.datasets import mnist
(X_train, y_train), (X_test, y_test) = mnist.load_data()
from sklearn.model_selection import train_test_split
# объединяем в один набор
X = np.concatenate((X_train, X_test))
y = np.concatenate((y_train, y_test))
# разбиваем по вариантам
X_train, X_test, y_train, y_test = train_test_split(X, y,test_size = 10000,train_size = 60000, random_state = 15)
# вывод размерностей
print('Shape of X train:', X_train.shape)
print('Shape of y train:', y_train.shape)
Shape of X train: (60000, 28, 28)
Shape of y train: (60000,)
# Создаем subplot для 4 изображений
fig, axes = plt.subplots(1, 4, figsize=(10, 3))

for i in range(4):
    axes[i].imshow(X_train[i], cmap=plt.get_cmap('gray'))
    axes[i].set_title(f'Label: {y_train[i]}')  # Добавляем метку как заголовок

plt.show()

# развернем каждое изображение 28*28 в вектор 784
num_pixels = X_train.shape[1] * X_train.shape[2]
X_train = X_train.reshape(X_train.shape[0], num_pixels) / 255
X_test = X_test.reshape(X_test.shape[0], num_pixels) / 255
print('Shape of transformed X train:', X_train.shape)
Shape of transformed X train: (60000, 784)
# переведем метки в one-hot
from keras.utils import to_categorical
y_train = to_categorical(y_train)
y_test = to_categorical(y_test)
print('Shape of transformed y train:', y_train.shape)
num_classes = y_train.shape[1]
Shape of transformed y train: (60000, 10)
from keras.models import Sequential
from keras.layers import Dense
# 1. создаем модель - объявляем ее объектом класса Sequential
model = Sequential()
# 2. добавляем первый скрытый слой
model.add(Dense(units=300, input_dim=num_pixels, activation='sigmoid'))
# 3. добавляем второй скрытый слой
model.add(Dense(units=100, activation='sigmoid'))
# 4. добавляем выходной слой
model.add(Dense(units=num_classes, activation='softmax'))
# 5. компилируем модель
model.compile(loss='categorical_crossentropy', optimizer='sgd', metrics=['accuracy'])
/usr/local/lib/python3.12/dist-packages/keras/src/layers/core/dense.py:93: UserWarning: Do not pass an `input_shape`/`input_dim` argument to a layer. When using Sequential models, prefer using an `Input(shape)` object as the first layer in the model instead.
  super().__init__(activity_regularizer=activity_regularizer, **kwargs)
# вывод информации об архитектуре модели
print(model.summary())
Model: "sequential_7"
┏━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━━━┓
┃ Layer (type)                     Output Shape                  Param # ┃
┡━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━╇━━━━━━━━━━━━━━━━━━━━━━━━╇━━━━━━━━━━━━━━━┩
│ dense_16 (Dense)                │ (None, 300)            │       235,500 │
├─────────────────────────────────┼────────────────────────┼───────────────┤
│ dense_17 (Dense)                │ (None, 100)            │        30,100 │
├─────────────────────────────────┼────────────────────────┼───────────────┤
│ dense_18 (Dense)                │ (None, 10)             │         1,010 │
└─────────────────────────────────┴────────────────────────┴───────────────┘
 Total params: 266,610 (1.02 MB)
 Trainable params: 266,610 (1.02 MB)
 Non-trainable params: 0 (0.00 B)
None
# Обучаем модель
H = model.fit(X_train, y_train, validation_split=0.1, epochs=15)
Epoch 1/15
1688/1688 ━━━━━━━━━━━━━━━━━━━━ 12s 7ms/step - accuracy: 0.2517 - loss: 2.2636 - val_accuracy: 0.5822 - val_loss: 1.9925
Epoch 2/15
1688/1688 ━━━━━━━━━━━━━━━━━━━━ 19s 5ms/step - accuracy: 0.6028 - loss: 1.8278 - val_accuracy: 0.7020 - val_loss: 1.2828
Epoch 3/15
1688/1688 ━━━━━━━━━━━━━━━━━━━━ 9s 5ms/step - accuracy: 0.7341 - loss: 1.1469 - val_accuracy: 0.7930 - val_loss: 0.8675
Epoch 4/15
1688/1688 ━━━━━━━━━━━━━━━━━━━━ 8s 5ms/step - accuracy: 0.8049 - loss: 0.8009 - val_accuracy: 0.8337 - val_loss: 0.6765
Epoch 5/15
1688/1688 ━━━━━━━━━━━━━━━━━━━━ 12s 5ms/step - accuracy: 0.8362 - loss: 0.6381 - val_accuracy: 0.8537 - val_loss: 0.5671
Epoch 6/15
1688/1688 ━━━━━━━━━━━━━━━━━━━━ 9s 5ms/step - accuracy: 0.8607 - loss: 0.5364 - val_accuracy: 0.8643 - val_loss: 0.5022
Epoch 7/15
1688/1688 ━━━━━━━━━━━━━━━━━━━━ 7s 4ms/step - accuracy: 0.8715 - loss: 0.4791 - val_accuracy: 0.8753 - val_loss: 0.4607
Epoch 8/15
1688/1688 ━━━━━━━━━━━━━━━━━━━━ 9s 5ms/step - accuracy: 0.8826 - loss: 0.4328 - val_accuracy: 0.8827 - val_loss: 0.4276
Epoch 9/15
1688/1688 ━━━━━━━━━━━━━━━━━━━━ 9s 5ms/step - accuracy: 0.8877 - loss: 0.4082 - val_accuracy: 0.8858 - val_loss: 0.4072
Epoch 10/15
1688/1688 ━━━━━━━━━━━━━━━━━━━━ 9s 4ms/step - accuracy: 0.8937 - loss: 0.3827 - val_accuracy: 0.8915 - val_loss: 0.3894
Epoch 11/15
1688/1688 ━━━━━━━━━━━━━━━━━━━━ 11s 5ms/step - accuracy: 0.8955 - loss: 0.3692 - val_accuracy: 0.8928 - val_loss: 0.3753
Epoch 12/15
1688/1688 ━━━━━━━━━━━━━━━━━━━━ 9s 5ms/step - accuracy: 0.9017 - loss: 0.3506 - val_accuracy: 0.8945 - val_loss: 0.3677
Epoch 13/15
1688/1688 ━━━━━━━━━━━━━━━━━━━━ 7s 4ms/step - accuracy: 0.9022 - loss: 0.3425 - val_accuracy: 0.8982 - val_loss: 0.3540
Epoch 14/15
1688/1688 ━━━━━━━━━━━━━━━━━━━━ 9s 5ms/step - accuracy: 0.9059 - loss: 0.3290 - val_accuracy: 0.8980 - val_loss: 0.3482
Epoch 15/15
1688/1688 ━━━━━━━━━━━━━━━━━━━━ 8s 4ms/step - accuracy: 0.9065 - loss: 0.3252 - val_accuracy: 0.9015 - val_loss: 0.3401
scores=model.evaluate(X_test,y_test)
print('Lossontestdata:',scores[0])
print('Accuracyontestdata:',scores[1])
313/313 ━━━━━━━━━━━━━━━━━━━━ 1s 3ms/step - accuracy: 0.9152 - loss: 0.3057
Lossontestdata: 0.3149861991405487
Accuracyontestdata: 0.913100004196167
# Пункт 6. Однослойная ИНС
# 1. создаем модель - объявляем ее объектом класса Sequential
model_1 = Sequential()
model_1.add(Dense(units=num_classes,input_dim=num_pixels, activation='softmax'))
# 2. компилируем модель
model_1.compile(loss='categorical_crossentropy', optimizer='sgd', metrics=['accuracy'])
print("Архитектура нейронной сети:")
model_1.summary()
Архитектура нейронной сети:
Model: "sequential_8"
┏━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━━━┓
┃ Layer (type)                     Output Shape                  Param # ┃
┡━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━╇━━━━━━━━━━━━━━━━━━━━━━━━╇━━━━━━━━━━━━━━━┩
│ dense_19 (Dense)                │ (None, 10)             │         7,850 │
└─────────────────────────────────┴────────────────────────┴───────────────┘
 Total params: 7,850 (30.66 KB)
 Trainable params: 7,850 (30.66 KB)
 Non-trainable params: 0 (0.00 B)
# Обучаем модель
history = model_1.fit(
    X_train, y_train,
    validation_split=0.1,
    epochs=50
)
Epoch 1/50
1688/1688 ━━━━━━━━━━━━━━━━━━━━ 5s 3ms/step - accuracy: 0.7106 - loss: 1.1677 - val_accuracy: 0.8667 - val_loss: 0.5285
Epoch 2/50
1688/1688 ━━━━━━━━━━━━━━━━━━━━ 3s 2ms/step - accuracy: 0.8719 - loss: 0.4933 - val_accuracy: 0.8805 - val_loss: 0.4439
Epoch 3/50
1688/1688 ━━━━━━━━━━━━━━━━━━━━ 5s 2ms/step - accuracy: 0.8886 - loss: 0.4152 - val_accuracy: 0.8880 - val_loss: 0.4078
Epoch 4/50
1688/1688 ━━━━━━━━━━━━━━━━━━━━ 5s 2ms/step - accuracy: 0.8946 - loss: 0.3877 - val_accuracy: 0.8903 - val_loss: 0.3882
Epoch 5/50
1688/1688 ━━━━━━━━━━━━━━━━━━━━ 5s 2ms/step - accuracy: 0.8988 - loss: 0.3700 - val_accuracy: 0.8967 - val_loss: 0.3736
Epoch 6/50
1688/1688 ━━━━━━━━━━━━━━━━━━━━ 7s 3ms/step - accuracy: 0.8987 - loss: 0.3613 - val_accuracy: 0.8973 - val_loss: 0.3630
Epoch 7/50
1688/1688 ━━━━━━━━━━━━━━━━━━━━ 4s 2ms/step - accuracy: 0.9038 - loss: 0.3461 - val_accuracy: 0.9002 - val_loss: 0.3560
Epoch 8/50
1688/1688 ━━━━━━━━━━━━━━━━━━━━ 3s 2ms/step - accuracy: 0.9073 - loss: 0.3322 - val_accuracy: 0.9017 - val_loss: 0.3488
Epoch 9/50
1688/1688 ━━━━━━━━━━━━━━━━━━━━ 4s 2ms/step - accuracy: 0.9081 - loss: 0.3267 - val_accuracy: 0.9022 - val_loss: 0.3438
Epoch 10/50
1688/1688 ━━━━━━━━━━━━━━━━━━━━ 5s 2ms/step - accuracy: 0.9077 - loss: 0.3220 - val_accuracy: 0.9047 - val_loss: 0.3394
Epoch 11/50
1688/1688 ━━━━━━━━━━━━━━━━━━━━ 4s 2ms/step - accuracy: 0.9116 - loss: 0.3187 - val_accuracy: 0.9043 - val_loss: 0.3355
Epoch 12/50
1688/1688 ━━━━━━━━━━━━━━━━━━━━ 6s 3ms/step - accuracy: 0.9104 - loss: 0.3207 - val_accuracy: 0.9058 - val_loss: 0.3320
Epoch 13/50
1688/1688 ━━━━━━━━━━━━━━━━━━━━ 4s 2ms/step - accuracy: 0.9113 - loss: 0.3162 - val_accuracy: 0.9060 - val_loss: 0.3292
Epoch 14/50
1688/1688 ━━━━━━━━━━━━━━━━━━━━ 5s 2ms/step - accuracy: 0.9151 - loss: 0.3056 - val_accuracy: 0.9075 - val_loss: 0.3268
Epoch 15/50
1688/1688 ━━━━━━━━━━━━━━━━━━━━ 5s 3ms/step - accuracy: 0.9155 - loss: 0.3025 - val_accuracy: 0.9083 - val_loss: 0.3246
Epoch 16/50
1688/1688 ━━━━━━━━━━━━━━━━━━━━ 4s 2ms/step - accuracy: 0.9154 - loss: 0.3005 - val_accuracy: 0.9097 - val_loss: 0.3225
Epoch 17/50
1688/1688 ━━━━━━━━━━━━━━━━━━━━ 5s 2ms/step - accuracy: 0.9164 - loss: 0.3049 - val_accuracy: 0.9095 - val_loss: 0.3203
Epoch 18/50
1688/1688 ━━━━━━━━━━━━━━━━━━━━ 4s 2ms/step - accuracy: 0.9171 - loss: 0.2980 - val_accuracy: 0.9088 - val_loss: 0.3194
Epoch 19/50
1688/1688 ━━━━━━━━━━━━━━━━━━━━ 5s 2ms/step - accuracy: 0.9148 - loss: 0.3072 - val_accuracy: 0.9088 - val_loss: 0.3186
Epoch 20/50
1688/1688 ━━━━━━━━━━━━━━━━━━━━ 4s 2ms/step - accuracy: 0.9152 - loss: 0.3040 - val_accuracy: 0.9113 - val_loss: 0.3152
Epoch 21/50
1688/1688 ━━━━━━━━━━━━━━━━━━━━ 6s 3ms/step - accuracy: 0.9167 - loss: 0.2958 - val_accuracy: 0.9118 - val_loss: 0.3143
Epoch 22/50
1688/1688 ━━━━━━━━━━━━━━━━━━━━ 4s 2ms/step - accuracy: 0.9190 - loss: 0.2932 - val_accuracy: 0.9115 - val_loss: 0.3133
Epoch 23/50
1688/1688 ━━━━━━━━━━━━━━━━━━━━ 5s 2ms/step - accuracy: 0.9192 - loss: 0.2921 - val_accuracy: 0.9123 - val_loss: 0.3120
Epoch 24/50
1688/1688 ━━━━━━━━━━━━━━━━━━━━ 7s 3ms/step - accuracy: 0.9200 - loss: 0.2910 - val_accuracy: 0.9125 - val_loss: 0.3113
Epoch 25/50
1688/1688 ━━━━━━━━━━━━━━━━━━━━ 3s 2ms/step - accuracy: 0.9202 - loss: 0.2908 - val_accuracy: 0.9120 - val_loss: 0.3103
Epoch 26/50
1688/1688 ━━━━━━━━━━━━━━━━━━━━ 5s 2ms/step - accuracy: 0.9197 - loss: 0.2890 - val_accuracy: 0.9135 - val_loss: 0.3087
Epoch 27/50
1688/1688 ━━━━━━━━━━━━━━━━━━━━ 6s 3ms/step - accuracy: 0.9188 - loss: 0.2865 - val_accuracy: 0.9145 - val_loss: 0.3081
Epoch 28/50
1688/1688 ━━━━━━━━━━━━━━━━━━━━ 4s 2ms/step - accuracy: 0.9185 - loss: 0.2913 - val_accuracy: 0.9137 - val_loss: 0.3074
Epoch 29/50
1688/1688 ━━━━━━━━━━━━━━━━━━━━ 3s 2ms/step - accuracy: 0.9179 - loss: 0.2910 - val_accuracy: 0.9138 - val_loss: 0.3065
Epoch 30/50
1688/1688 ━━━━━━━━━━━━━━━━━━━━ 6s 3ms/step - accuracy: 0.9219 - loss: 0.2845 - val_accuracy: 0.9147 - val_loss: 0.3058
Epoch 31/50
1688/1688 ━━━━━━━━━━━━━━━━━━━━ 4s 2ms/step - accuracy: 0.9202 - loss: 0.2826 - val_accuracy: 0.9140 - val_loss: 0.3056
Epoch 32/50
1688/1688 ━━━━━━━━━━━━━━━━━━━━ 5s 2ms/step - accuracy: 0.9191 - loss: 0.2896 - val_accuracy: 0.9130 - val_loss: 0.3049
Epoch 33/50
1688/1688 ━━━━━━━━━━━━━━━━━━━━ 6s 2ms/step - accuracy: 0.9204 - loss: 0.2786 - val_accuracy: 0.9152 - val_loss: 0.3039
Epoch 34/50
1688/1688 ━━━━━━━━━━━━━━━━━━━━ 5s 2ms/step - accuracy: 0.9202 - loss: 0.2798 - val_accuracy: 0.9145 - val_loss: 0.3033
Epoch 35/50
1688/1688 ━━━━━━━━━━━━━━━━━━━━ 4s 2ms/step - accuracy: 0.9232 - loss: 0.2744 - val_accuracy: 0.9152 - val_loss: 0.3043
Epoch 36/50
1688/1688 ━━━━━━━━━━━━━━━━━━━━ 5s 2ms/step - accuracy: 0.9186 - loss: 0.2892 - val_accuracy: 0.9145 - val_loss: 0.3027
Epoch 37/50
1688/1688 ━━━━━━━━━━━━━━━━━━━━ 3s 2ms/step - accuracy: 0.9238 - loss: 0.2755 - val_accuracy: 0.9152 - val_loss: 0.3014
Epoch 38/50
1688/1688 ━━━━━━━━━━━━━━━━━━━━ 6s 2ms/step - accuracy: 0.9236 - loss: 0.2751 - val_accuracy: 0.9138 - val_loss: 0.3016
Epoch 39/50
1688/1688 ━━━━━━━━━━━━━━━━━━━━ 4s 2ms/step - accuracy: 0.9219 - loss: 0.2796 - val_accuracy: 0.9133 - val_loss: 0.3012
Epoch 40/50
1688/1688 ━━━━━━━━━━━━━━━━━━━━ 3s 2ms/step - accuracy: 0.9220 - loss: 0.2749 - val_accuracy: 0.9148 - val_loss: 0.3001
Epoch 41/50
1688/1688 ━━━━━━━━━━━━━━━━━━━━ 4s 2ms/step - accuracy: 0.9234 - loss: 0.2729 - val_accuracy: 0.9150 - val_loss: 0.3007
Epoch 42/50
1688/1688 ━━━━━━━━━━━━━━━━━━━━ 6s 2ms/step - accuracy: 0.9235 - loss: 0.2731 - val_accuracy: 0.9142 - val_loss: 0.3001
Epoch 43/50
1688/1688 ━━━━━━━━━━━━━━━━━━━━ 3s 2ms/step - accuracy: 0.9221 - loss: 0.2780 - val_accuracy: 0.9158 - val_loss: 0.2998
Epoch 44/50
1688/1688 ━━━━━━━━━━━━━━━━━━━━ 3s 2ms/step - accuracy: 0.9239 - loss: 0.2741 - val_accuracy: 0.9147 - val_loss: 0.2992
Epoch 45/50
1688/1688 ━━━━━━━━━━━━━━━━━━━━ 6s 2ms/step - accuracy: 0.9217 - loss: 0.2805 - val_accuracy: 0.9155 - val_loss: 0.2987
Epoch 46/50
1688/1688 ━━━━━━━━━━━━━━━━━━━━ 3s 2ms/step - accuracy: 0.9252 - loss: 0.2695 - val_accuracy: 0.9148 - val_loss: 0.2982
Epoch 47/50
1688/1688 ━━━━━━━━━━━━━━━━━━━━ 6s 2ms/step - accuracy: 0.9227 - loss: 0.2772 - val_accuracy: 0.9170 - val_loss: 0.2976
Epoch 48/50
1688/1688 ━━━━━━━━━━━━━━━━━━━━ 5s 2ms/step - accuracy: 0.9245 - loss: 0.2756 - val_accuracy: 0.9153 - val_loss: 0.2977
Epoch 49/50
1688/1688 ━━━━━━━━━━━━━━━━━━━━ 4s 2ms/step - accuracy: 0.9249 - loss: 0.2716 - val_accuracy: 0.9167 - val_loss: 0.2974
Epoch 50/50
1688/1688 ━━━━━━━━━━━━━━━━━━━━ 6s 3ms/step - accuracy: 0.9248 - loss: 0.2711 - val_accuracy: 0.9152 - val_loss: 0.2983
# Выводим график функции ошибки
plt.figure(figsize=(12, 5))

plt.subplot(1, 2, 1)
plt.plot(history.history['loss'], label='Обучающая ошибка')
plt.plot(history.history['val_loss'], label='Валидационная ошибка')
plt.title('Функция ошибки по эпохам')
plt.xlabel('Эпохи')
plt.ylabel('Categorical Crossentropy')
plt.legend()
plt.grid(True)

scores=model_1.evaluate(X_test,y_test)
print('Lossontestdata:',scores[0])
print('Accuracyontestdata:',scores[1])
313/313 ━━━━━━━━━━━━━━━━━━━━ 1s 2ms/step - accuracy: 0.9316 - loss: 0.2666
Lossontestdata: 0.2741525173187256
Accuracyontestdata: 0.928600013256073
#Пункт 8
model_2l_100 = Sequential()
model_2l_100.add(Dense(units=100,input_dim=num_pixels, activation='sigmoid'))
model_2l_100.add(Dense(units=num_classes, activation='softmax'))
# 2. компилируем модель
model_2l_100.compile(loss='categorical_crossentropy', optimizer='sgd', metrics=['accuracy'])
print("Архитектура нейронной сети:")
model_2l_100.summary()
Архитектура нейронной сети:
Model: "sequential_9"
┏━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━━━┓
┃ Layer (type)                     Output Shape                  Param # ┃
┡━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━╇━━━━━━━━━━━━━━━━━━━━━━━━╇━━━━━━━━━━━━━━━┩
│ dense_20 (Dense)                │ (None, 100)            │        78,500 │
├─────────────────────────────────┼────────────────────────┼───────────────┤
│ dense_21 (Dense)                │ (None, 10)             │         1,010 │
└─────────────────────────────────┴────────────────────────┴───────────────┘
 Total params: 79,510 (310.59 KB)
 Trainable params: 79,510 (310.59 KB)
 Non-trainable params: 0 (0.00 B)
# Обучаем модель
history_2l_100 = model_2l_100.fit(
    X_train, y_train,
    validation_split=0.1,
    epochs=50
)
Epoch 1/50
1688/1688 ━━━━━━━━━━━━━━━━━━━━ 5s 3ms/step - accuracy: 0.5431 - loss: 1.8730 - val_accuracy: 0.8193 - val_loss: 0.9612
Epoch 2/50
1688/1688 ━━━━━━━━━━━━━━━━━━━━ 6s 3ms/step - accuracy: 0.8325 - loss: 0.8374 - val_accuracy: 0.8562 - val_loss: 0.6289
Epoch 3/50
1688/1688 ━━━━━━━━━━━━━━━━━━━━ 6s 3ms/step - accuracy: 0.8661 - loss: 0.5818 - val_accuracy: 0.8730 - val_loss: 0.5130
Epoch 4/50
1688/1688 ━━━━━━━━━━━━━━━━━━━━ 5s 3ms/step - accuracy: 0.8795 - loss: 0.4818 - val_accuracy: 0.8825 - val_loss: 0.4548
Epoch 5/50
1688/1688 ━━━━━━━━━━━━━━━━━━━━ 7s 4ms/step - accuracy: 0.8853 - loss: 0.4311 - val_accuracy: 0.8900 - val_loss: 0.4174
Epoch 6/50
1688/1688 ━━━━━━━━━━━━━━━━━━━━ 5s 3ms/step - accuracy: 0.8964 - loss: 0.3925 - val_accuracy: 0.8943 - val_loss: 0.3931
Epoch 7/50
1688/1688 ━━━━━━━━━━━━━━━━━━━━ 5s 3ms/step - accuracy: 0.8989 - loss: 0.3714 - val_accuracy: 0.8983 - val_loss: 0.3744
Epoch 8/50
1688/1688 ━━━━━━━━━━━━━━━━━━━━ 6s 3ms/step - accuracy: 0.9005 - loss: 0.3600 - val_accuracy: 0.9008 - val_loss: 0.3600
Epoch 9/50
1688/1688 ━━━━━━━━━━━━━━━━━━━━ 11s 4ms/step - accuracy: 0.9024 - loss: 0.3443 - val_accuracy: 0.9010 - val_loss: 0.3484
Epoch 10/50
1688/1688 ━━━━━━━━━━━━━━━━━━━━ 5s 3ms/step - accuracy: 0.9051 - loss: 0.3332 - val_accuracy: 0.9027 - val_loss: 0.3393
Epoch 11/50
1688/1688 ━━━━━━━━━━━━━━━━━━━━ 5s 3ms/step - accuracy: 0.9101 - loss: 0.3199 - val_accuracy: 0.9047 - val_loss: 0.3316
Epoch 12/50
1688/1688 ━━━━━━━━━━━━━━━━━━━━ 7s 4ms/step - accuracy: 0.9086 - loss: 0.3159 - val_accuracy: 0.9055 - val_loss: 0.3241
Epoch 13/50
1688/1688 ━━━━━━━━━━━━━━━━━━━━ 10s 4ms/step - accuracy: 0.9107 - loss: 0.3140 - val_accuracy: 0.9068 - val_loss: 0.3186
Epoch 14/50
1688/1688 ━━━━━━━━━━━━━━━━━━━━ 9s 3ms/step - accuracy: 0.9164 - loss: 0.2948 - val_accuracy: 0.9093 - val_loss: 0.3120
Epoch 15/50
1688/1688 ━━━━━━━━━━━━━━━━━━━━ 10s 3ms/step - accuracy: 0.9140 - loss: 0.3004 - val_accuracy: 0.9093 - val_loss: 0.3057
Epoch 16/50
1688/1688 ━━━━━━━━━━━━━━━━━━━━ 7s 4ms/step - accuracy: 0.9169 - loss: 0.2937 - val_accuracy: 0.9120 - val_loss: 0.3015
Epoch 17/50
1688/1688 ━━━━━━━━━━━━━━━━━━━━ 6s 3ms/step - accuracy: 0.9185 - loss: 0.2836 - val_accuracy: 0.9133 - val_loss: 0.2969
Epoch 18/50
1688/1688 ━━━━━━━━━━━━━━━━━━━━ 6s 3ms/step - accuracy: 0.9198 - loss: 0.2789 - val_accuracy: 0.9132 - val_loss: 0.2924
Epoch 19/50
1688/1688 ━━━━━━━━━━━━━━━━━━━━ 7s 4ms/step - accuracy: 0.9215 - loss: 0.2758 - val_accuracy: 0.9147 - val_loss: 0.2882
Epoch 20/50
1688/1688 ━━━━━━━━━━━━━━━━━━━━ 10s 4ms/step - accuracy: 0.9227 - loss: 0.2687 - val_accuracy: 0.9168 - val_loss: 0.2844
Epoch 21/50
1688/1688 ━━━━━━━━━━━━━━━━━━━━ 9s 3ms/step - accuracy: 0.9246 - loss: 0.2651 - val_accuracy: 0.9183 - val_loss: 0.2807
Epoch 22/50
1688/1688 ━━━━━━━━━━━━━━━━━━━━ 7s 4ms/step - accuracy: 0.9247 - loss: 0.2627 - val_accuracy: 0.9198 - val_loss: 0.2771
Epoch 23/50
1688/1688 ━━━━━━━━━━━━━━━━━━━━ 8s 3ms/step - accuracy: 0.9257 - loss: 0.2584 - val_accuracy: 0.9193 - val_loss: 0.2739
Epoch 24/50
1688/1688 ━━━━━━━━━━━━━━━━━━━━ 7s 4ms/step - accuracy: 0.9281 - loss: 0.2531 - val_accuracy: 0.9212 - val_loss: 0.2704
Epoch 25/50
1688/1688 ━━━━━━━━━━━━━━━━━━━━ 5s 3ms/step - accuracy: 0.9280 - loss: 0.2521 - val_accuracy: 0.9225 - val_loss: 0.2674
Epoch 26/50
1688/1688 ━━━━━━━━━━━━━━━━━━━━ 6s 4ms/step - accuracy: 0.9272 - loss: 0.2518 - val_accuracy: 0.9237 - val_loss: 0.2646
Epoch 27/50
1688/1688 ━━━━━━━━━━━━━━━━━━━━ 9s 3ms/step - accuracy: 0.9289 - loss: 0.2488 - val_accuracy: 0.9243 - val_loss: 0.2610
Epoch 28/50
1688/1688 ━━━━━━━━━━━━━━━━━━━━ 7s 4ms/step - accuracy: 0.9310 - loss: 0.2410 - val_accuracy: 0.9242 - val_loss: 0.2594
Epoch 29/50
1688/1688 ━━━━━━━━━━━━━━━━━━━━ 9s 3ms/step - accuracy: 0.9317 - loss: 0.2382 - val_accuracy: 0.9260 - val_loss: 0.2554
Epoch 30/50
1688/1688 ━━━━━━━━━━━━━━━━━━━━ 7s 4ms/step - accuracy: 0.9326 - loss: 0.2389 - val_accuracy: 0.9250 - val_loss: 0.2531
Epoch 31/50
1688/1688 ━━━━━━━━━━━━━━━━━━━━ 5s 3ms/step - accuracy: 0.9333 - loss: 0.2279 - val_accuracy: 0.9278 - val_loss: 0.2508
Epoch 32/50
1688/1688 ━━━━━━━━━━━━━━━━━━━━ 7s 4ms/step - accuracy: 0.9326 - loss: 0.2319 - val_accuracy: 0.9273 - val_loss: 0.2475
Epoch 33/50
1688/1688 ━━━━━━━━━━━━━━━━━━━━ 5s 3ms/step - accuracy: 0.9336 - loss: 0.2272 - val_accuracy: 0.9282 - val_loss: 0.2448
Epoch 34/50
1688/1688 ━━━━━━━━━━━━━━━━━━━━ 5s 3ms/step - accuracy: 0.9364 - loss: 0.2236 - val_accuracy: 0.9282 - val_loss: 0.2429
Epoch 35/50
1688/1688 ━━━━━━━━━━━━━━━━━━━━ 10s 3ms/step - accuracy: 0.9346 - loss: 0.2283 - val_accuracy: 0.9302 - val_loss: 0.2400
Epoch 36/50
1688/1688 ━━━━━━━━━━━━━━━━━━━━ 7s 4ms/step - accuracy: 0.9379 - loss: 0.2202 - val_accuracy: 0.9298 - val_loss: 0.2379
Epoch 37/50
1688/1688 ━━━━━━━━━━━━━━━━━━━━ 9s 3ms/step - accuracy: 0.9375 - loss: 0.2177 - val_accuracy: 0.9312 - val_loss: 0.2353
Epoch 38/50
1688/1688 ━━━━━━━━━━━━━━━━━━━━ 7s 4ms/step - accuracy: 0.9369 - loss: 0.2201 - val_accuracy: 0.9323 - val_loss: 0.2337
Epoch 39/50
1688/1688 ━━━━━━━━━━━━━━━━━━━━ 9s 3ms/step - accuracy: 0.9398 - loss: 0.2111 - val_accuracy: 0.9337 - val_loss: 0.2307
Epoch 40/50
1688/1688 ━━━━━━━━━━━━━━━━━━━━ 10s 3ms/step - accuracy: 0.9398 - loss: 0.2086 - val_accuracy: 0.9348 - val_loss: 0.2291
Epoch 41/50
1688/1688 ━━━━━━━━━━━━━━━━━━━━ 10s 3ms/step - accuracy: 0.9392 - loss: 0.2096 - val_accuracy: 0.9350 - val_loss: 0.2269
Epoch 42/50
1688/1688 ━━━━━━━━━━━━━━━━━━━━ 5s 3ms/step - accuracy: 0.9417 - loss: 0.2056 - val_accuracy: 0.9350 - val_loss: 0.2251
Epoch 43/50
1688/1688 ━━━━━━━━━━━━━━━━━━━━ 6s 4ms/step - accuracy: 0.9419 - loss: 0.2057 - val_accuracy: 0.9353 - val_loss: 0.2236
Epoch 44/50
1688/1688 ━━━━━━━━━━━━━━━━━━━━ 5s 3ms/step - accuracy: 0.9426 - loss: 0.1992 - val_accuracy: 0.9362 - val_loss: 0.2217
Epoch 45/50
1688/1688 ━━━━━━━━━━━━━━━━━━━━ 7s 4ms/step - accuracy: 0.9423 - loss: 0.2054 - val_accuracy: 0.9368 - val_loss: 0.2196
Epoch 46/50
1688/1688 ━━━━━━━━━━━━━━━━━━━━ 9s 3ms/step - accuracy: 0.9451 - loss: 0.1942 - val_accuracy: 0.9373 - val_loss: 0.2172
Epoch 47/50
1688/1688 ━━━━━━━━━━━━━━━━━━━━ 10s 3ms/step - accuracy: 0.9444 - loss: 0.1979 - val_accuracy: 0.9382 - val_loss: 0.2155
Epoch 48/50
1688/1688 ━━━━━━━━━━━━━━━━━━━━ 7s 4ms/step - accuracy: 0.9459 - loss: 0.1897 - val_accuracy: 0.9388 - val_loss: 0.2139
Epoch 49/50
1688/1688 ━━━━━━━━━━━━━━━━━━━━ 9s 3ms/step - accuracy: 0.9460 - loss: 0.1890 - val_accuracy: 0.9392 - val_loss: 0.2122
Epoch 50/50
1688/1688 ━━━━━━━━━━━━━━━━━━━━ 10s 3ms/step - accuracy: 0.9474 - loss: 0.1889 - val_accuracy: 0.9400 - val_loss: 0.2104
# Выводим график функции ошибки
plt.figure(figsize=(12, 5))

plt.subplot(1, 2, 1)
plt.plot(history_2l_100.history['loss'], label='Обучающая ошибка')
plt.plot(history_2l_100.history['val_loss'], label='Валидационная ошибка')
plt.title('Функция ошибки по эпохам')
plt.xlabel('Эпохи')
plt.ylabel('Categorical Crossentropy')
plt.legend()
plt.grid(True)

scores_2l_100=model_2l_100.evaluate(X_test,y_test)
print('Lossontestdata:',scores_2l_100[0])
print('Accuracyontestdata:',scores_2l_100[1])
313/313 ━━━━━━━━━━━━━━━━━━━━ 1s 2ms/step - accuracy: 0.9482 - loss: 0.1875
Lossontestdata: 0.19283892214298248
Accuracyontestdata: 0.9462000131607056
#Пункт 8
model_2l_300 = Sequential()
model_2l_300.add(Dense(units=300,input_dim=num_pixels, activation='sigmoid'))
model_2l_300.add(Dense(units=num_classes, activation='softmax'))
# 2. компилируем модель
model_2l_300.compile(loss='categorical_crossentropy', optimizer='sgd', metrics=['accuracy'])
print("Архитектура нейронной сети:")
model_2l_300.summary()
Архитектура нейронной сети:
Model: "sequential_10"
┏━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━━━┓
┃ Layer (type)                     Output Shape                  Param # ┃
┡━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━╇━━━━━━━━━━━━━━━━━━━━━━━━╇━━━━━━━━━━━━━━━┩
│ dense_22 (Dense)                │ (None, 300)            │       235,500 │
├─────────────────────────────────┼────────────────────────┼───────────────┤
│ dense_23 (Dense)                │ (None, 10)             │         3,010 │
└─────────────────────────────────┴────────────────────────┴───────────────┘
 Total params: 238,510 (931.68 KB)
 Trainable params: 238,510 (931.68 KB)
 Non-trainable params: 0 (0.00 B)
# Обучаем модель
history_2l_300 = model_2l_300.fit(
    X_train, y_train,
    validation_split=0.1,
    epochs=50
)
Epoch 1/50
1688/1688 ━━━━━━━━━━━━━━━━━━━━ 9s 5ms/step - accuracy: 0.5804 - loss: 1.7583 - val_accuracy: 0.8300 - val_loss: 0.8481
Epoch 2/50
1688/1688 ━━━━━━━━━━━━━━━━━━━━ 9s 5ms/step - accuracy: 0.8406 - loss: 0.7464 - val_accuracy: 0.8615 - val_loss: 0.5755
Epoch 3/50
1688/1688 ━━━━━━━━━━━━━━━━━━━━ 7s 4ms/step - accuracy: 0.8697 - loss: 0.5313 - val_accuracy: 0.8772 - val_loss: 0.4808
Epoch 4/50
1688/1688 ━━━━━━━━━━━━━━━━━━━━ 12s 5ms/step - accuracy: 0.8800 - loss: 0.4584 - val_accuracy: 0.8845 - val_loss: 0.4344
Epoch 5/50
1688/1688 ━━━━━━━━━━━━━━━━━━━━ 8s 5ms/step - accuracy: 0.8880 - loss: 0.4133 - val_accuracy: 0.8873 - val_loss: 0.4070
Epoch 6/50
1688/1688 ━━━━━━━━━━━━━━━━━━━━ 8s 5ms/step - accuracy: 0.8926 - loss: 0.3830 - val_accuracy: 0.8932 - val_loss: 0.3855
Epoch 7/50
1688/1688 ━━━━━━━━━━━━━━━━━━━━ 9s 5ms/step - accuracy: 0.8962 - loss: 0.3680 - val_accuracy: 0.8960 - val_loss: 0.3718
Epoch 8/50
1688/1688 ━━━━━━━━━━━━━━━━━━━━ 7s 4ms/step - accuracy: 0.8993 - loss: 0.3526 - val_accuracy: 0.8972 - val_loss: 0.3617
Epoch 9/50
1688/1688 ━━━━━━━━━━━━━━━━━━━━ 9s 5ms/step - accuracy: 0.9011 - loss: 0.3445 - val_accuracy: 0.8997 - val_loss: 0.3518
Epoch 10/50
1688/1688 ━━━━━━━━━━━━━━━━━━━━ 7s 4ms/step - accuracy: 0.9036 - loss: 0.3365 - val_accuracy: 0.9017 - val_loss: 0.3438
Epoch 11/50
1688/1688 ━━━━━━━━━━━━━━━━━━━━ 9s 5ms/step - accuracy: 0.9048 - loss: 0.3286 - val_accuracy: 0.9030 - val_loss: 0.3396
Epoch 12/50
1688/1688 ━━━━━━━━━━━━━━━━━━━━ 10s 5ms/step - accuracy: 0.9075 - loss: 0.3222 - val_accuracy: 0.9028 - val_loss: 0.3324
Epoch 13/50
1688/1688 ━━━━━━━━━━━━━━━━━━━━ 7s 4ms/step - accuracy: 0.9108 - loss: 0.3119 - val_accuracy: 0.9050 - val_loss: 0.3270
Epoch 14/50
1688/1688 ━━━━━━━━━━━━━━━━━━━━ 12s 5ms/step - accuracy: 0.9118 - loss: 0.3063 - val_accuracy: 0.9065 - val_loss: 0.3235
Epoch 15/50
1688/1688 ━━━━━━━━━━━━━━━━━━━━ 8s 5ms/step - accuracy: 0.9141 - loss: 0.3018 - val_accuracy: 0.9070 - val_loss: 0.3199
Epoch 16/50
1688/1688 ━━━━━━━━━━━━━━━━━━━━ 8s 4ms/step - accuracy: 0.9141 - loss: 0.3003 - val_accuracy: 0.9065 - val_loss: 0.3150
Epoch 17/50
1688/1688 ━━━━━━━━━━━━━━━━━━━━ 12s 5ms/step - accuracy: 0.9152 - loss: 0.2934 - val_accuracy: 0.9063 - val_loss: 0.3122
Epoch 18/50
1688/1688 ━━━━━━━━━━━━━━━━━━━━ 8s 5ms/step - accuracy: 0.9147 - loss: 0.2955 - val_accuracy: 0.9085 - val_loss: 0.3087
Epoch 19/50
1688/1688 ━━━━━━━━━━━━━━━━━━━━ 8s 5ms/step - accuracy: 0.9158 - loss: 0.2941 - val_accuracy: 0.9097 - val_loss: 0.3053
Epoch 20/50
1688/1688 ━━━━━━━━━━━━━━━━━━━━ 10s 6ms/step - accuracy: 0.9163 - loss: 0.2893 - val_accuracy: 0.9092 - val_loss: 0.3031
Epoch 21/50
1688/1688 ━━━━━━━━━━━━━━━━━━━━ 20s 6ms/step - accuracy: 0.9179 - loss: 0.2878 - val_accuracy: 0.9117 - val_loss: 0.2999
Epoch 22/50
1688/1688 ━━━━━━━━━━━━━━━━━━━━ 10s 6ms/step - accuracy: 0.9199 - loss: 0.2765 - val_accuracy: 0.9128 - val_loss: 0.2982
Epoch 23/50
1688/1688 ━━━━━━━━━━━━━━━━━━━━ 8s 5ms/step - accuracy: 0.9174 - loss: 0.2831 - val_accuracy: 0.9130 - val_loss: 0.2954
Epoch 24/50
1688/1688 ━━━━━━━━━━━━━━━━━━━━ 12s 6ms/step - accuracy: 0.9197 - loss: 0.2765 - val_accuracy: 0.9138 - val_loss: 0.2923
Epoch 25/50
1688/1688 ━━━━━━━━━━━━━━━━━━━━ 10s 6ms/step - accuracy: 0.9198 - loss: 0.2786 - val_accuracy: 0.9150 - val_loss: 0.2908
Epoch 26/50
1688/1688 ━━━━━━━━━━━━━━━━━━━━ 8s 5ms/step - accuracy: 0.9229 - loss: 0.2727 - val_accuracy: 0.9150 - val_loss: 0.2870
Epoch 27/50
1688/1688 ━━━━━━━━━━━━━━━━━━━━ 12s 6ms/step - accuracy: 0.9218 - loss: 0.2688 - val_accuracy: 0.9160 - val_loss: 0.2850
Epoch 28/50
1688/1688 ━━━━━━━━━━━━━━━━━━━━ 10s 6ms/step - accuracy: 0.9235 - loss: 0.2645 - val_accuracy: 0.9183 - val_loss: 0.2832
Epoch 29/50
1688/1688 ━━━━━━━━━━━━━━━━━━━━ 9s 5ms/step - accuracy: 0.9245 - loss: 0.2652 - val_accuracy: 0.9188 - val_loss: 0.2805
Epoch 30/50
1688/1688 ━━━━━━━━━━━━━━━━━━━━ 12s 6ms/step - accuracy: 0.9244 - loss: 0.2626 - val_accuracy: 0.9190 - val_loss: 0.2774
Epoch 31/50
1688/1688 ━━━━━━━━━━━━━━━━━━━━ 9s 5ms/step - accuracy: 0.9242 - loss: 0.2614 - val_accuracy: 0.9188 - val_loss: 0.2759
Epoch 32/50
1688/1688 ━━━━━━━━━━━━━━━━━━━━ 10s 5ms/step - accuracy: 0.9251 - loss: 0.2596 - val_accuracy: 0.9193 - val_loss: 0.2752
Epoch 33/50
1688/1688 ━━━━━━━━━━━━━━━━━━━━ 10s 6ms/step - accuracy: 0.9253 - loss: 0.2609 - val_accuracy: 0.9202 - val_loss: 0.2719
Epoch 34/50
1688/1688 ━━━━━━━━━━━━━━━━━━━━ 8s 5ms/step - accuracy: 0.9291 - loss: 0.2497 - val_accuracy: 0.9192 - val_loss: 0.2698
Epoch 35/50
1688/1688 ━━━━━━━━━━━━━━━━━━━━ 10s 6ms/step - accuracy: 0.9305 - loss: 0.2445 - val_accuracy: 0.9222 - val_loss: 0.2670
Epoch 36/50
1688/1688 ━━━━━━━━━━━━━━━━━━━━ 9s 5ms/step - accuracy: 0.9304 - loss: 0.2436 - val_accuracy: 0.9225 - val_loss: 0.2650
Epoch 37/50
1688/1688 ━━━━━━━━━━━━━━━━━━━━ 9s 5ms/step - accuracy: 0.9314 - loss: 0.2405 - val_accuracy: 0.9235 - val_loss: 0.2626
Epoch 38/50
1688/1688 ━━━━━━━━━━━━━━━━━━━━ 10s 6ms/step - accuracy: 0.9300 - loss: 0.2407 - val_accuracy: 0.9243 - val_loss: 0.2600
Epoch 39/50
1688/1688 ━━━━━━━━━━━━━━━━━━━━ 8s 5ms/step - accuracy: 0.9307 - loss: 0.2394 - val_accuracy: 0.9255 - val_loss: 0.2585
Epoch 40/50
1688/1688 ━━━━━━━━━━━━━━━━━━━━ 10s 6ms/step - accuracy: 0.9331 - loss: 0.2361 - val_accuracy: 0.9265 - val_loss: 0.2565
Epoch 41/50
1688/1688 ━━━━━━━━━━━━━━━━━━━━ 9s 5ms/step - accuracy: 0.9321 - loss: 0.2386 - val_accuracy: 0.9275 - val_loss: 0.2542
Epoch 42/50
1688/1688 ━━━━━━━━━━━━━━━━━━━━ 9s 4ms/step - accuracy: 0.9342 - loss: 0.2312 - val_accuracy: 0.9285 - val_loss: 0.2543
Epoch 43/50
1688/1688 ━━━━━━━━━━━━━━━━━━━━ 10s 4ms/step - accuracy: 0.9328 - loss: 0.2363 - val_accuracy: 0.9282 - val_loss: 0.2497
Epoch 44/50
1688/1688 ━━━━━━━━━━━━━━━━━━━━ 12s 5ms/step - accuracy: 0.9355 - loss: 0.2233 - val_accuracy: 0.9292 - val_loss: 0.2478
Epoch 45/50
1688/1688 ━━━━━━━━━━━━━━━━━━━━ 9s 5ms/step - accuracy: 0.9371 - loss: 0.2166 - val_accuracy: 0.9287 - val_loss: 0.2461
Epoch 46/50
1688/1688 ━━━━━━━━━━━━━━━━━━━━ 9s 4ms/step - accuracy: 0.9355 - loss: 0.2252 - val_accuracy: 0.9297 - val_loss: 0.2434
Epoch 47/50
1688/1688 ━━━━━━━━━━━━━━━━━━━━ 9s 5ms/step - accuracy: 0.9362 - loss: 0.2210 - val_accuracy: 0.9297 - val_loss: 0.2421
Epoch 48/50
1688/1688 ━━━━━━━━━━━━━━━━━━━━ 7s 4ms/step - accuracy: 0.9374 - loss: 0.2172 - val_accuracy: 0.9315 - val_loss: 0.2404
Epoch 49/50
1688/1688 ━━━━━━━━━━━━━━━━━━━━ 8s 5ms/step - accuracy: 0.9389 - loss: 0.2135 - val_accuracy: 0.9305 - val_loss: 0.2377
Epoch 50/50
1688/1688 ━━━━━━━━━━━━━━━━━━━━ 11s 5ms/step - accuracy: 0.9406 - loss: 0.2072 - val_accuracy: 0.9308 - val_loss: 0.2365
# Выводим график функции ошибки
plt.figure(figsize=(12, 5))

plt.subplot(1, 2, 1)
plt.plot(history_2l_300.history['loss'], label='Обучающая ошибка')
plt.plot(history_2l_300.history['val_loss'], label='Валидационная ошибка')
plt.title('Функция ошибки по эпохам')
plt.xlabel('Эпохи')
plt.ylabel('Categorical Crossentropy')
plt.legend()
plt.grid(True)

scores_2l_300=model_2l_300.evaluate(X_test,y_test)
print('Lossontestdata:',scores_2l_300[0])
print('Accuracyontestdata:',scores_2l_300[1])
313/313 ━━━━━━━━━━━━━━━━━━━━ 1s 2ms/step - accuracy: 0.9437 - loss: 0.2113
Lossontestdata: 0.2168053537607193
Accuracyontestdata: 0.9412000179290771
#Пункт 8
model_2l_500 = Sequential()
model_2l_500.add(Dense(units=500,input_dim=num_pixels, activation='sigmoid'))
model_2l_500.add(Dense(units=num_classes, activation='softmax'))
# 2. компилируем модель
model_2l_500.compile(loss='categorical_crossentropy', optimizer='sgd', metrics=['accuracy'])
print("Архитектура нейронной сети:")
model_2l_500.summary()
Архитектура нейронной сети:
Model: "sequential_11"
┏━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━━━┓
┃ Layer (type)                     Output Shape                  Param # ┃
┡━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━╇━━━━━━━━━━━━━━━━━━━━━━━━╇━━━━━━━━━━━━━━━┩
│ dense_24 (Dense)                │ (None, 500)            │       392,500 │
├─────────────────────────────────┼────────────────────────┼───────────────┤
│ dense_25 (Dense)                │ (None, 10)             │         5,010 │
└─────────────────────────────────┴────────────────────────┴───────────────┘
 Total params: 397,510 (1.52 MB)
 Trainable params: 397,510 (1.52 MB)
 Non-trainable params: 0 (0.00 B)
# Обучаем модель
history_2l_500 = model_2l_500.fit(
    X_train, y_train,
    validation_split=0.1,
    epochs=50
)
Epoch 1/50
1688/1688 ━━━━━━━━━━━━━━━━━━━━ 10s 6ms/step - accuracy: 0.5580 - loss: 1.7493 - val_accuracy: 0.8328 - val_loss: 0.8208
Epoch 2/50
1688/1688 ━━━━━━━━━━━━━━━━━━━━ 8s 5ms/step - accuracy: 0.8438 - loss: 0.7269 - val_accuracy: 0.8607 - val_loss: 0.5631
Epoch 3/50
1688/1688 ━━━━━━━━━━━━━━━━━━━━ 12s 6ms/step - accuracy: 0.8707 - loss: 0.5200 - val_accuracy: 0.8755 - val_loss: 0.4721
Epoch 4/50
1688/1688 ━━━━━━━━━━━━━━━━━━━━ 9s 6ms/step - accuracy: 0.8816 - loss: 0.4488 - val_accuracy: 0.8838 - val_loss: 0.4282
Epoch 5/50
1688/1688 ━━━━━━━━━━━━━━━━━━━━ 8s 5ms/step - accuracy: 0.8907 - loss: 0.4021 - val_accuracy: 0.8875 - val_loss: 0.4031
Epoch 6/50
1688/1688 ━━━━━━━━━━━━━━━━━━━━ 11s 5ms/step - accuracy: 0.8906 - loss: 0.3913 - val_accuracy: 0.8925 - val_loss: 0.3831
Epoch 7/50
1688/1688 ━━━━━━━━━━━━━━━━━━━━ 11s 6ms/step - accuracy: 0.8976 - loss: 0.3632 - val_accuracy: 0.8953 - val_loss: 0.3700
Epoch 8/50
1688/1688 ━━━━━━━━━━━━━━━━━━━━ 9s 5ms/step - accuracy: 0.8991 - loss: 0.3526 - val_accuracy: 0.8970 - val_loss: 0.3595
Epoch 9/50
1688/1688 ━━━━━━━━━━━━━━━━━━━━ 9s 5ms/step - accuracy: 0.9014 - loss: 0.3450 - val_accuracy: 0.8980 - val_loss: 0.3531
Epoch 10/50
1688/1688 ━━━━━━━━━━━━━━━━━━━━ 11s 6ms/step - accuracy: 0.9042 - loss: 0.3312 - val_accuracy: 0.8995 - val_loss: 0.3439
Epoch 11/50
1688/1688 ━━━━━━━━━━━━━━━━━━━━ 9s 5ms/step - accuracy: 0.9063 - loss: 0.3262 - val_accuracy: 0.9007 - val_loss: 0.3384
Epoch 12/50
1688/1688 ━━━━━━━━━━━━━━━━━━━━ 9s 5ms/step - accuracy: 0.9087 - loss: 0.3212 - val_accuracy: 0.9023 - val_loss: 0.3355
Epoch 13/50
1688/1688 ━━━━━━━━━━━━━━━━━━━━ 11s 5ms/step - accuracy: 0.9063 - loss: 0.3191 - val_accuracy: 0.9037 - val_loss: 0.3305
Epoch 14/50
1688/1688 ━━━━━━━━━━━━━━━━━━━━ 11s 6ms/step - accuracy: 0.9081 - loss: 0.3162 - val_accuracy: 0.9040 - val_loss: 0.3258
Epoch 15/50
1688/1688 ━━━━━━━━━━━━━━━━━━━━ 9s 5ms/step - accuracy: 0.9090 - loss: 0.3131 - val_accuracy: 0.9052 - val_loss: 0.3212
Epoch 16/50
1688/1688 ━━━━━━━━━━━━━━━━━━━━ 10s 5ms/step - accuracy: 0.9123 - loss: 0.3005 - val_accuracy: 0.9063 - val_loss: 0.3184
Epoch 17/50
1688/1688 ━━━━━━━━━━━━━━━━━━━━ 9s 6ms/step - accuracy: 0.9126 - loss: 0.3023 - val_accuracy: 0.9040 - val_loss: 0.3163
Epoch 18/50
1688/1688 ━━━━━━━━━━━━━━━━━━━━ 9s 5ms/step - accuracy: 0.9136 - loss: 0.2982 - val_accuracy: 0.9078 - val_loss: 0.3149
Epoch 19/50
1688/1688 ━━━━━━━━━━━━━━━━━━━━ 8s 5ms/step - accuracy: 0.9148 - loss: 0.2991 - val_accuracy: 0.9090 - val_loss: 0.3113
Epoch 20/50
1688/1688 ━━━━━━━━━━━━━━━━━━━━ 10s 6ms/step - accuracy: 0.9143 - loss: 0.2930 - val_accuracy: 0.9087 - val_loss: 0.3090
Epoch 21/50
1688/1688 ━━━━━━━━━━━━━━━━━━━━ 9s 5ms/step - accuracy: 0.9169 - loss: 0.2878 - val_accuracy: 0.9098 - val_loss: 0.3057
Epoch 22/50
1688/1688 ━━━━━━━━━━━━━━━━━━━━ 9s 5ms/step - accuracy: 0.9171 - loss: 0.2843 - val_accuracy: 0.9100 - val_loss: 0.3047
Epoch 23/50
1688/1688 ━━━━━━━━━━━━━━━━━━━━ 13s 7ms/step - accuracy: 0.9186 - loss: 0.2797 - val_accuracy: 0.9122 - val_loss: 0.3032
Epoch 24/50
1688/1688 ━━━━━━━━━━━━━━━━━━━━ 16s 5ms/step - accuracy: 0.9185 - loss: 0.2827 - val_accuracy: 0.9130 - val_loss: 0.3002
Epoch 25/50
1688/1688 ━━━━━━━━━━━━━━━━━━━━ 10s 6ms/step - accuracy: 0.9189 - loss: 0.2820 - val_accuracy: 0.9132 - val_loss: 0.2987
Epoch 26/50
1688/1688 ━━━━━━━━━━━━━━━━━━━━ 10s 6ms/step - accuracy: 0.9197 - loss: 0.2784 - val_accuracy: 0.9140 - val_loss: 0.2965
Epoch 27/50
1688/1688 ━━━━━━━━━━━━━━━━━━━━ 8s 5ms/step - accuracy: 0.9196 - loss: 0.2782 - val_accuracy: 0.9150 - val_loss: 0.2951
Epoch 28/50
1688/1688 ━━━━━━━━━━━━━━━━━━━━ 11s 5ms/step - accuracy: 0.9200 - loss: 0.2754 - val_accuracy: 0.9143 - val_loss: 0.2941
Epoch 29/50
1688/1688 ━━━━━━━━━━━━━━━━━━━━ 10s 6ms/step - accuracy: 0.9196 - loss: 0.2761 - val_accuracy: 0.9162 - val_loss: 0.2913
Epoch 30/50
1688/1688 ━━━━━━━━━━━━━━━━━━━━ 9s 5ms/step - accuracy: 0.9229 - loss: 0.2723 - val_accuracy: 0.9147 - val_loss: 0.2893
Epoch 31/50
1688/1688 ━━━━━━━━━━━━━━━━━━━━ 9s 6ms/step - accuracy: 0.9205 - loss: 0.2688 - val_accuracy: 0.9172 - val_loss: 0.2883
Epoch 32/50
1688/1688 ━━━━━━━━━━━━━━━━━━━━ 11s 6ms/step - accuracy: 0.9243 - loss: 0.2632 - val_accuracy: 0.9143 - val_loss: 0.2900
Epoch 33/50
1688/1688 ━━━━━━━━━━━━━━━━━━━━ 8s 5ms/step - accuracy: 0.9235 - loss: 0.2613 - val_accuracy: 0.9177 - val_loss: 0.2845
Epoch 34/50
1688/1688 ━━━━━━━━━━━━━━━━━━━━ 10s 6ms/step - accuracy: 0.9254 - loss: 0.2616 - val_accuracy: 0.9175 - val_loss: 0.2838
Epoch 35/50
1688/1688 ━━━━━━━━━━━━━━━━━━━━ 10s 6ms/step - accuracy: 0.9255 - loss: 0.2613 - val_accuracy: 0.9185 - val_loss: 0.2812
Epoch 36/50
1688/1688 ━━━━━━━━━━━━━━━━━━━━ 8s 5ms/step - accuracy: 0.9250 - loss: 0.2632 - val_accuracy: 0.9188 - val_loss: 0.2815
Epoch 37/50
1688/1688 ━━━━━━━━━━━━━━━━━━━━ 10s 6ms/step - accuracy: 0.9251 - loss: 0.2612 - val_accuracy: 0.9202 - val_loss: 0.2787
Epoch 38/50
1688/1688 ━━━━━━━━━━━━━━━━━━━━ 10s 6ms/step - accuracy: 0.9247 - loss: 0.2642 - val_accuracy: 0.9205 - val_loss: 0.2780
Epoch 39/50
1688/1688 ━━━━━━━━━━━━━━━━━━━━ 8s 5ms/step - accuracy: 0.9257 - loss: 0.2592 - val_accuracy: 0.9212 - val_loss: 0.2750
Epoch 40/50
1688/1688 ━━━━━━━━━━━━━━━━━━━━ 11s 5ms/step - accuracy: 0.9261 - loss: 0.2550 - val_accuracy: 0.9190 - val_loss: 0.2748
Epoch 41/50
1688/1688 ━━━━━━━━━━━━━━━━━━━━ 10s 6ms/step - accuracy: 0.9289 - loss: 0.2518 - val_accuracy: 0.9218 - val_loss: 0.2733
Epoch 42/50
1688/1688 ━━━━━━━━━━━━━━━━━━━━ 9s 5ms/step - accuracy: 0.9301 - loss: 0.2454 - val_accuracy: 0.9252 - val_loss: 0.2696
Epoch 43/50
1688/1688 ━━━━━━━━━━━━━━━━━━━━ 9s 5ms/step - accuracy: 0.9282 - loss: 0.2498 - val_accuracy: 0.9230 - val_loss: 0.2679
Epoch 44/50
1688/1688 ━━━━━━━━━━━━━━━━━━━━ 11s 6ms/step - accuracy: 0.9306 - loss: 0.2417 - val_accuracy: 0.9238 - val_loss: 0.2668
Epoch 45/50
1688/1688 ━━━━━━━━━━━━━━━━━━━━ 10s 5ms/step - accuracy: 0.9309 - loss: 0.2398 - val_accuracy: 0.9263 - val_loss: 0.2657
Epoch 46/50
1688/1688 ━━━━━━━━━━━━━━━━━━━━ 8s 5ms/step - accuracy: 0.9308 - loss: 0.2461 - val_accuracy: 0.9243 - val_loss: 0.2639
Epoch 47/50
1688/1688 ━━━━━━━━━━━━━━━━━━━━ 11s 6ms/step - accuracy: 0.9330 - loss: 0.2383 - val_accuracy: 0.9257 - val_loss: 0.2620
Epoch 48/50
1688/1688 ━━━━━━━━━━━━━━━━━━━━ 18s 5ms/step - accuracy: 0.9322 - loss: 0.2344 - val_accuracy: 0.9260 - val_loss: 0.2599
Epoch 49/50
1688/1688 ━━━━━━━━━━━━━━━━━━━━ 10s 6ms/step - accuracy: 0.9318 - loss: 0.2371 - val_accuracy: 0.9258 - val_loss: 0.2588
Epoch 50/50
1688/1688 ━━━━━━━━━━━━━━━━━━━━ 10s 6ms/step - accuracy: 0.9326 - loss: 0.2363 - val_accuracy: 0.9277 - val_loss: 0.2564
# Выводим график функции ошибки
plt.figure(figsize=(12, 5))

plt.subplot(1, 2, 1)
plt.plot(history_2l_500.history['loss'], label='Обучающая ошибка')
plt.plot(history_2l_500.history['val_loss'], label='Валидационная ошибка')
plt.title('Функция ошибки по эпохам')
plt.xlabel('Эпохи')
plt.ylabel('Categorical Crossentropy')
plt.legend()
plt.grid(True)

scores_2l_500=model_2l_500.evaluate(X_test,y_test)
print('Lossontestdata:',scores_2l_500[0])
print('Accuracyontestdata:',scores_2l_500[1])
313/313 ━━━━━━━━━━━━━━━━━━━━ 1s 3ms/step - accuracy: 0.9396 - loss: 0.2295
Lossontestdata: 0.23596525192260742
Accuracyontestdata: 0.9369999766349792

Как мы видим, лучшая метрика получилась равной 0.9465000033378601 при архитектуре со 100 нейронами в скрытом слое, поэтому для дальнейших пунктов используем ее.

#9 пункт
model_3l_100_50 = Sequential()
model_3l_100_50.add(Dense(units=100, input_dim=num_pixels, activation='sigmoid'))
model_3l_100_50.add(Dense(units=50, activation='sigmoid'))
model_3l_100_50.add(Dense(units=num_classes, activation='softmax'))

model_3l_100_50.compile(loss='categorical_crossentropy', optimizer='sgd', metrics=['accuracy'])
print("Архитектура нейронной сети:")
model_3l_100_50.summary()
Архитектура нейронной сети:
Model: "sequential_12"
┏━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━━━┓
┃ Layer (type)                     Output Shape                  Param # ┃
┡━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━╇━━━━━━━━━━━━━━━━━━━━━━━━╇━━━━━━━━━━━━━━━┩
│ dense_26 (Dense)                │ (None, 100)            │        78,500 │
├─────────────────────────────────┼────────────────────────┼───────────────┤
│ dense_27 (Dense)                │ (None, 50)             │         5,050 │
├─────────────────────────────────┼────────────────────────┼───────────────┤
│ dense_28 (Dense)                │ (None, 10)             │           510 │
└─────────────────────────────────┴────────────────────────┴───────────────┘
 Total params: 84,060 (328.36 KB)
 Trainable params: 84,060 (328.36 KB)
 Non-trainable params: 0 (0.00 B)
# Обучаем модель
history_3l_100_50 = model_3l_100_50.fit(
    X_train, y_train,
    validation_split=0.1,
    epochs=50
)
Epoch 1/50
1688/1688 ━━━━━━━━━━━━━━━━━━━━ 6s 3ms/step - accuracy: 0.2333 - loss: 2.2703 - val_accuracy: 0.5425 - val_loss: 2.1027
Epoch 2/50
1688/1688 ━━━━━━━━━━━━━━━━━━━━ 7s 4ms/step - accuracy: 0.6070 - loss: 1.9965 - val_accuracy: 0.6730 - val_loss: 1.5702
Epoch 3/50
1688/1688 ━━━━━━━━━━━━━━━━━━━━ 5s 3ms/step - accuracy: 0.6855 - loss: 1.4374 - val_accuracy: 0.7502 - val_loss: 1.0896
Epoch 4/50
1688/1688 ━━━━━━━━━━━━━━━━━━━━ 6s 3ms/step - accuracy: 0.7654 - loss: 1.0119 - val_accuracy: 0.8085 - val_loss: 0.8186
Epoch 5/50
1688/1688 ━━━━━━━━━━━━━━━━━━━━ 6s 3ms/step - accuracy: 0.8195 - loss: 0.7722 - val_accuracy: 0.8425 - val_loss: 0.6650
Epoch 6/50
1688/1688 ━━━━━━━━━━━━━━━━━━━━ 11s 4ms/step - accuracy: 0.8454 - loss: 0.6291 - val_accuracy: 0.8573 - val_loss: 0.5729
Epoch 7/50
1688/1688 ━━━━━━━━━━━━━━━━━━━━ 9s 3ms/step - accuracy: 0.8600 - loss: 0.5463 - val_accuracy: 0.8703 - val_loss: 0.5112
Epoch 8/50
1688/1688 ━━━━━━━━━━━━━━━━━━━━ 7s 4ms/step - accuracy: 0.8730 - loss: 0.4905 - val_accuracy: 0.8788 - val_loss: 0.4693
Epoch 9/50
1688/1688 ━━━━━━━━━━━━━━━━━━━━ 5s 3ms/step - accuracy: 0.8824 - loss: 0.4476 - val_accuracy: 0.8848 - val_loss: 0.4383
Epoch 10/50
1688/1688 ━━━━━━━━━━━━━━━━━━━━ 6s 4ms/step - accuracy: 0.8876 - loss: 0.4203 - val_accuracy: 0.8877 - val_loss: 0.4152
Epoch 11/50
1688/1688 ━━━━━━━━━━━━━━━━━━━━ 9s 3ms/step - accuracy: 0.8922 - loss: 0.3942 - val_accuracy: 0.8915 - val_loss: 0.3972
Epoch 12/50
1688/1688 ━━━━━━━━━━━━━━━━━━━━ 10s 3ms/step - accuracy: 0.8932 - loss: 0.3820 - val_accuracy: 0.8938 - val_loss: 0.3814
Epoch 13/50
1688/1688 ━━━━━━━━━━━━━━━━━━━━ 6s 3ms/step - accuracy: 0.8998 - loss: 0.3615 - val_accuracy: 0.8952 - val_loss: 0.3710
Epoch 14/50
1688/1688 ━━━━━━━━━━━━━━━━━━━━ 6s 3ms/step - accuracy: 0.9016 - loss: 0.3525 - val_accuracy: 0.8988 - val_loss: 0.3586
Epoch 15/50
1688/1688 ━━━━━━━━━━━━━━━━━━━━ 11s 4ms/step - accuracy: 0.9049 - loss: 0.3386 - val_accuracy: 0.9017 - val_loss: 0.3492
Epoch 16/50
1688/1688 ━━━━━━━━━━━━━━━━━━━━ 5s 3ms/step - accuracy: 0.9065 - loss: 0.3283 - val_accuracy: 0.9028 - val_loss: 0.3410
Epoch 17/50
1688/1688 ━━━━━━━━━━━━━━━━━━━━ 7s 4ms/step - accuracy: 0.9070 - loss: 0.3231 - val_accuracy: 0.9057 - val_loss: 0.3335
Epoch 18/50
1688/1688 ━━━━━━━━━━━━━━━━━━━━ 9s 3ms/step - accuracy: 0.9085 - loss: 0.3163 - val_accuracy: 0.9075 - val_loss: 0.3271
Epoch 19/50
1688/1688 ━━━━━━━━━━━━━━━━━━━━ 7s 4ms/step - accuracy: 0.9100 - loss: 0.3146 - val_accuracy: 0.9103 - val_loss: 0.3214
Epoch 20/50
1688/1688 ━━━━━━━━━━━━━━━━━━━━ 9s 3ms/step - accuracy: 0.9112 - loss: 0.3063 - val_accuracy: 0.9107 - val_loss: 0.3144
Epoch 21/50
1688/1688 ━━━━━━━━━━━━━━━━━━━━ 10s 3ms/step - accuracy: 0.9133 - loss: 0.2954 - val_accuracy: 0.9127 - val_loss: 0.3090
Epoch 22/50
1688/1688 ━━━━━━━━━━━━━━━━━━━━ 10s 3ms/step - accuracy: 0.9175 - loss: 0.2852 - val_accuracy: 0.9137 - val_loss: 0.3036
Epoch 23/50
1688/1688 ━━━━━━━━━━━━━━━━━━━━ 7s 4ms/step - accuracy: 0.9172 - loss: 0.2874 - val_accuracy: 0.9128 - val_loss: 0.2997
Epoch 24/50
1688/1688 ━━━━━━━━━━━━━━━━━━━━ 9s 3ms/step - accuracy: 0.9197 - loss: 0.2789 - val_accuracy: 0.9152 - val_loss: 0.2937
Epoch 25/50
1688/1688 ━━━━━━━━━━━━━━━━━━━━ 10s 3ms/step - accuracy: 0.9202 - loss: 0.2748 - val_accuracy: 0.9165 - val_loss: 0.2903
Epoch 26/50
1688/1688 ━━━━━━━━━━━━━━━━━━━━ 5s 3ms/step - accuracy: 0.9199 - loss: 0.2729 - val_accuracy: 0.9168 - val_loss: 0.2850
Epoch 27/50
1688/1688 ━━━━━━━━━━━━━━━━━━━━ 7s 4ms/step - accuracy: 0.9241 - loss: 0.2639 - val_accuracy: 0.9180 - val_loss: 0.2814
Epoch 28/50
1688/1688 ━━━━━━━━━━━━━━━━━━━━ 10s 4ms/step - accuracy: 0.9250 - loss: 0.2573 - val_accuracy: 0.9185 - val_loss: 0.2765
Epoch 29/50
1688/1688 ━━━━━━━━━━━━━━━━━━━━ 9s 3ms/step - accuracy: 0.9251 - loss: 0.2609 - val_accuracy: 0.9195 - val_loss: 0.2726
Epoch 30/50
1688/1688 ━━━━━━━━━━━━━━━━━━━━ 7s 4ms/step - accuracy: 0.9253 - loss: 0.2557 - val_accuracy: 0.9210 - val_loss: 0.2688
Epoch 31/50
1688/1688 ━━━━━━━━━━━━━━━━━━━━ 5s 3ms/step - accuracy: 0.9250 - loss: 0.2529 - val_accuracy: 0.9232 - val_loss: 0.2655
Epoch 32/50
1688/1688 ━━━━━━━━━━━━━━━━━━━━ 11s 3ms/step - accuracy: 0.9292 - loss: 0.2456 - val_accuracy: 0.9225 - val_loss: 0.2619
Epoch 33/50
1688/1688 ━━━━━━━━━━━━━━━━━━━━ 12s 4ms/step - accuracy: 0.9291 - loss: 0.2462 - val_accuracy: 0.9233 - val_loss: 0.2598
Epoch 34/50
1688/1688 ━━━━━━━━━━━━━━━━━━━━ 5s 3ms/step - accuracy: 0.9314 - loss: 0.2403 - val_accuracy: 0.9260 - val_loss: 0.2549
Epoch 35/50
1688/1688 ━━━━━━━━━━━━━━━━━━━━ 6s 4ms/step - accuracy: 0.9302 - loss: 0.2431 - val_accuracy: 0.9262 - val_loss: 0.2519
Epoch 36/50
1688/1688 ━━━━━━━━━━━━━━━━━━━━ 6s 3ms/step - accuracy: 0.9326 - loss: 0.2326 - val_accuracy: 0.9270 - val_loss: 0.2494
Epoch 37/50
1688/1688 ━━━━━━━━━━━━━━━━━━━━ 5s 3ms/step - accuracy: 0.9327 - loss: 0.2316 - val_accuracy: 0.9285 - val_loss: 0.2458
Epoch 38/50
1688/1688 ━━━━━━━━━━━━━━━━━━━━ 7s 4ms/step - accuracy: 0.9331 - loss: 0.2303 - val_accuracy: 0.9293 - val_loss: 0.2430
Epoch 39/50
1688/1688 ━━━━━━━━━━━━━━━━━━━━ 5s 3ms/step - accuracy: 0.9333 - loss: 0.2298 - val_accuracy: 0.9305 - val_loss: 0.2404
Epoch 40/50
1688/1688 ━━━━━━━━━━━━━━━━━━━━ 10s 3ms/step - accuracy: 0.9369 - loss: 0.2231 - val_accuracy: 0.9308 - val_loss: 0.2376
Epoch 41/50
1688/1688 ━━━━━━━━━━━━━━━━━━━━ 5s 3ms/step - accuracy: 0.9374 - loss: 0.2158 - val_accuracy: 0.9318 - val_loss: 0.2346
Epoch 42/50
1688/1688 ━━━━━━━━━━━━━━━━━━━━ 6s 4ms/step - accuracy: 0.9367 - loss: 0.2183 - val_accuracy: 0.9323 - val_loss: 0.2321
Epoch 43/50
1688/1688 ━━━━━━━━━━━━━━━━━━━━ 5s 3ms/step - accuracy: 0.9388 - loss: 0.2101 - val_accuracy: 0.9338 - val_loss: 0.2291
Epoch 44/50
1688/1688 ━━━━━━━━━━━━━━━━━━━━ 11s 3ms/step - accuracy: 0.9393 - loss: 0.2089 - val_accuracy: 0.9338 - val_loss: 0.2263
Epoch 45/50
1688/1688 ━━━━━━━━━━━━━━━━━━━━ 7s 4ms/step - accuracy: 0.9397 - loss: 0.2092 - val_accuracy: 0.9347 - val_loss: 0.2235
Epoch 46/50
1688/1688 ━━━━━━━━━━━━━━━━━━━━ 9s 3ms/step - accuracy: 0.9409 - loss: 0.2064 - val_accuracy: 0.9357 - val_loss: 0.2221
Epoch 47/50
1688/1688 ━━━━━━━━━━━━━━━━━━━━ 7s 4ms/step - accuracy: 0.9416 - loss: 0.2024 - val_accuracy: 0.9370 - val_loss: 0.2199
Epoch 48/50
1688/1688 ━━━━━━━━━━━━━━━━━━━━ 5s 3ms/step - accuracy: 0.9419 - loss: 0.2008 - val_accuracy: 0.9370 - val_loss: 0.2168
Epoch 49/50
1688/1688 ━━━━━━━━━━━━━━━━━━━━ 6s 4ms/step - accuracy: 0.9427 - loss: 0.2007 - val_accuracy: 0.9402 - val_loss: 0.2143
Epoch 50/50
1688/1688 ━━━━━━━━━━━━━━━━━━━━ 10s 3ms/step - accuracy: 0.9458 - loss: 0.1921 - val_accuracy: 0.9387 - val_loss: 0.2130
# Выводим график функции ошибки
plt.figure(figsize=(12, 5))

plt.subplot(1, 2, 1)
plt.plot(history_3l_100_50.history['loss'], label='Обучающая ошибка')
plt.plot(history_3l_100_50.history['val_loss'], label='Валидационная ошибка')
plt.title('Функция ошибки по эпохам')
plt.xlabel('Эпохи')
plt.ylabel('Categorical Crossentropy')
plt.legend()
plt.grid(True)

scores_3l_100_50=model_3l_100_50.evaluate(X_test,y_test)
print('Lossontestdata:',scores_3l_100_50[0])
print('Accuracyontestdata:',scores_3l_100_50[1])
313/313 ━━━━━━━━━━━━━━━━━━━━ 1s 2ms/step - accuracy: 0.9459 - loss: 0.1914
Lossontestdata: 0.1960301399230957
Accuracyontestdata: 0.9444000124931335
#9 пункт
model_3l_100_100 = Sequential()
model_3l_100_100.add(Dense(units=100, input_dim=num_pixels, activation='sigmoid'))
model_3l_100_100.add(Dense(units=100, activation='sigmoid'))
model_3l_100_100.add(Dense(units=num_classes, activation='softmax'))

model_3l_100_100.compile(loss='categorical_crossentropy', optimizer='sgd', metrics=['accuracy'])
print("Архитектура нейронной сети:")
model_3l_100_100.summary()
Архитектура нейронной сети:
Model: "sequential_13"
┏━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━━━┓
┃ Layer (type)                     Output Shape                  Param # ┃
┡━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━╇━━━━━━━━━━━━━━━━━━━━━━━━╇━━━━━━━━━━━━━━━┩
│ dense_29 (Dense)                │ (None, 100)            │        78,500 │
├─────────────────────────────────┼────────────────────────┼───────────────┤
│ dense_30 (Dense)                │ (None, 100)            │        10,100 │
├─────────────────────────────────┼────────────────────────┼───────────────┤
│ dense_31 (Dense)                │ (None, 10)             │         1,010 │
└─────────────────────────────────┴────────────────────────┴───────────────┘
 Total params: 89,610 (350.04 KB)
 Trainable params: 89,610 (350.04 KB)
 Non-trainable params: 0 (0.00 B)
# Обучаем модель
history_3l_100_100 = model_3l_100_100.fit(
    X_train, y_train,
    validation_split=0.1,
    epochs=50
)
Epoch 1/50
1688/1688 ━━━━━━━━━━━━━━━━━━━━ 7s 4ms/step - accuracy: 0.2194 - loss: 2.2793 - val_accuracy: 0.4952 - val_loss: 2.0919
Epoch 2/50
1688/1688 ━━━━━━━━━━━━━━━━━━━━ 11s 4ms/step - accuracy: 0.5646 - loss: 1.9686 - val_accuracy: 0.6503 - val_loss: 1.4959
Epoch 3/50
1688/1688 ━━━━━━━━━━━━━━━━━━━━ 8s 3ms/step - accuracy: 0.7034 - loss: 1.3398 - val_accuracy: 0.7640 - val_loss: 0.9908
Epoch 4/50
1688/1688 ━━━━━━━━━━━━━━━━━━━━ 10s 3ms/step - accuracy: 0.7881 - loss: 0.9110 - val_accuracy: 0.8203 - val_loss: 0.7452
Epoch 5/50
1688/1688 ━━━━━━━━━━━━━━━━━━━━ 11s 4ms/step - accuracy: 0.8294 - loss: 0.6966 - val_accuracy: 0.8447 - val_loss: 0.6150
Epoch 6/50
1688/1688 ━━━━━━━━━━━━━━━━━━━━ 5s 3ms/step - accuracy: 0.8519 - loss: 0.5810 - val_accuracy: 0.8595 - val_loss: 0.5386
Epoch 7/50
1688/1688 ━━━━━━━━━━━━━━━━━━━━ 7s 4ms/step - accuracy: 0.8672 - loss: 0.5061 - val_accuracy: 0.8737 - val_loss: 0.4873
Epoch 8/50
1688/1688 ━━━━━━━━━━━━━━━━━━━━ 9s 3ms/step - accuracy: 0.8786 - loss: 0.4580 - val_accuracy: 0.8768 - val_loss: 0.4526
Epoch 9/50
1688/1688 ━━━━━━━━━━━━━━━━━━━━ 6s 4ms/step - accuracy: 0.8847 - loss: 0.4247 - val_accuracy: 0.8867 - val_loss: 0.4250
Epoch 10/50
1688/1688 ━━━━━━━━━━━━━━━━━━━━ 11s 4ms/step - accuracy: 0.8911 - loss: 0.3978 - val_accuracy: 0.8887 - val_loss: 0.4065
Epoch 11/50
1688/1688 ━━━━━━━━━━━━━━━━━━━━ 9s 3ms/step - accuracy: 0.8940 - loss: 0.3847 - val_accuracy: 0.8902 - val_loss: 0.3894
Epoch 12/50
1688/1688 ━━━━━━━━━━━━━━━━━━━━ 7s 4ms/step - accuracy: 0.8972 - loss: 0.3695 - val_accuracy: 0.8945 - val_loss: 0.3755
Epoch 13/50
1688/1688 ━━━━━━━━━━━━━━━━━━━━ 6s 3ms/step - accuracy: 0.8999 - loss: 0.3563 - val_accuracy: 0.8972 - val_loss: 0.3645
Epoch 14/50
1688/1688 ━━━━━━━━━━━━━━━━━━━━ 10s 3ms/step - accuracy: 0.9009 - loss: 0.3473 - val_accuracy: 0.8977 - val_loss: 0.3551
Epoch 15/50
1688/1688 ━━━━━━━━━━━━━━━━━━━━ 12s 4ms/step - accuracy: 0.9033 - loss: 0.3372 - val_accuracy: 0.9015 - val_loss: 0.3466
Epoch 16/50
1688/1688 ━━━━━━━━━━━━━━━━━━━━ 10s 4ms/step - accuracy: 0.9072 - loss: 0.3248 - val_accuracy: 0.9028 - val_loss: 0.3385
Epoch 17/50
1688/1688 ━━━━━━━━━━━━━━━━━━━━ 6s 3ms/step - accuracy: 0.9097 - loss: 0.3146 - val_accuracy: 0.9058 - val_loss: 0.3309
Epoch 18/50
1688/1688 ━━━━━━━━━━━━━━━━━━━━ 12s 4ms/step - accuracy: 0.9118 - loss: 0.3103 - val_accuracy: 0.9067 - val_loss: 0.3239
Epoch 19/50
1688/1688 ━━━━━━━━━━━━━━━━━━━━ 5s 3ms/step - accuracy: 0.9137 - loss: 0.2979 - val_accuracy: 0.9097 - val_loss: 0.3184
Epoch 20/50
1688/1688 ━━━━━━━━━━━━━━━━━━━━ 7s 4ms/step - accuracy: 0.9144 - loss: 0.2994 - val_accuracy: 0.9103 - val_loss: 0.3116
Epoch 21/50
1688/1688 ━━━━━━━━━━━━━━━━━━━━ 9s 3ms/step - accuracy: 0.9155 - loss: 0.2897 - val_accuracy: 0.9107 - val_loss: 0.3062
Epoch 22/50
1688/1688 ━━━━━━━━━━━━━━━━━━━━ 10s 3ms/step - accuracy: 0.9176 - loss: 0.2852 - val_accuracy: 0.9125 - val_loss: 0.3012
Epoch 23/50
1688/1688 ━━━━━━━━━━━━━━━━━━━━ 6s 4ms/step - accuracy: 0.9179 - loss: 0.2803 - val_accuracy: 0.9143 - val_loss: 0.2961
Epoch 24/50
1688/1688 ━━━━━━━━━━━━━━━━━━━━ 5s 3ms/step - accuracy: 0.9185 - loss: 0.2793 - val_accuracy: 0.9165 - val_loss: 0.2909
Epoch 25/50
1688/1688 ━━━━━━━━━━━━━━━━━━━━ 5s 3ms/step - accuracy: 0.9212 - loss: 0.2723 - val_accuracy: 0.9168 - val_loss: 0.2865
Epoch 26/50
1688/1688 ━━━━━━━━━━━━━━━━━━━━ 10s 3ms/step - accuracy: 0.9233 - loss: 0.2660 - val_accuracy: 0.9195 - val_loss: 0.2813
Epoch 27/50
1688/1688 ━━━━━━━━━━━━━━━━━━━━ 7s 4ms/step - accuracy: 0.9243 - loss: 0.2643 - val_accuracy: 0.9185 - val_loss: 0.2766
Epoch 28/50
1688/1688 ━━━━━━━━━━━━━━━━━━━━ 8s 3ms/step - accuracy: 0.9259 - loss: 0.2574 - val_accuracy: 0.9195 - val_loss: 0.2731
Epoch 29/50
1688/1688 ━━━━━━━━━━━━━━━━━━━━ 10s 3ms/step - accuracy: 0.9270 - loss: 0.2527 - val_accuracy: 0.9217 - val_loss: 0.2682
Epoch 30/50
1688/1688 ━━━━━━━━━━━━━━━━━━━━ 6s 4ms/step - accuracy: 0.9257 - loss: 0.2535 - val_accuracy: 0.9228 - val_loss: 0.2654
Epoch 31/50
1688/1688 ━━━━━━━━━━━━━━━━━━━━ 9s 3ms/step - accuracy: 0.9283 - loss: 0.2459 - val_accuracy: 0.9242 - val_loss: 0.2603
Epoch 32/50
1688/1688 ━━━━━━━━━━━━━━━━━━━━ 10s 3ms/step - accuracy: 0.9292 - loss: 0.2460 - val_accuracy: 0.9253 - val_loss: 0.2559
Epoch 33/50
1688/1688 ━━━━━━━━━━━━━━━━━━━━ 6s 4ms/step - accuracy: 0.9304 - loss: 0.2371 - val_accuracy: 0.9253 - val_loss: 0.2533
Epoch 34/50
1688/1688 ━━━━━━━━━━━━━━━━━━━━ 9s 3ms/step - accuracy: 0.9307 - loss: 0.2373 - val_accuracy: 0.9272 - val_loss: 0.2490
Epoch 35/50
1688/1688 ━━━━━━━━━━━━━━━━━━━━ 10s 3ms/step - accuracy: 0.9342 - loss: 0.2265 - val_accuracy: 0.9290 - val_loss: 0.2451
Epoch 36/50
1688/1688 ━━━━━━━━━━━━━━━━━━━━ 11s 4ms/step - accuracy: 0.9327 - loss: 0.2291 - val_accuracy: 0.9288 - val_loss: 0.2422
Epoch 37/50
1688/1688 ━━━━━━━━━━━━━━━━━━━━ 5s 3ms/step - accuracy: 0.9345 - loss: 0.2284 - val_accuracy: 0.9322 - val_loss: 0.2379
Epoch 38/50
1688/1688 ━━━━━━━━━━━━━━━━━━━━ 10s 3ms/step - accuracy: 0.9348 - loss: 0.2238 - val_accuracy: 0.9337 - val_loss: 0.2351
Epoch 39/50
1688/1688 ━━━━━━━━━━━━━━━━━━━━ 7s 4ms/step - accuracy: 0.9379 - loss: 0.2124 - val_accuracy: 0.9325 - val_loss: 0.2322
Epoch 40/50
1688/1688 ━━━━━━━━━━━━━━━━━━━━ 8s 3ms/step - accuracy: 0.9385 - loss: 0.2143 - val_accuracy: 0.9343 - val_loss: 0.2285
Epoch 41/50
1688/1688 ━━━━━━━━━━━━━━━━━━━━ 7s 4ms/step - accuracy: 0.9391 - loss: 0.2112 - val_accuracy: 0.9342 - val_loss: 0.2259
Epoch 42/50
1688/1688 ━━━━━━━━━━━━━━━━━━━━ 9s 3ms/step - accuracy: 0.9389 - loss: 0.2117 - val_accuracy: 0.9353 - val_loss: 0.2228
Epoch 43/50
1688/1688 ━━━━━━━━━━━━━━━━━━━━ 10s 3ms/step - accuracy: 0.9400 - loss: 0.2059 - val_accuracy: 0.9367 - val_loss: 0.2199
Epoch 44/50
1688/1688 ━━━━━━━━━━━━━━━━━━━━ 7s 4ms/step - accuracy: 0.9402 - loss: 0.2074 - val_accuracy: 0.9372 - val_loss: 0.2178
Epoch 45/50
1688/1688 ━━━━━━━━━━━━━━━━━━━━ 8s 3ms/step - accuracy: 0.9409 - loss: 0.2012 - val_accuracy: 0.9377 - val_loss: 0.2148
Epoch 46/50
1688/1688 ━━━━━━━━━━━━━━━━━━━━ 10s 3ms/step - accuracy: 0.9410 - loss: 0.2027 - val_accuracy: 0.9387 - val_loss: 0.2117
Epoch 47/50
1688/1688 ━━━━━━━━━━━━━━━━━━━━ 7s 4ms/step - accuracy: 0.9441 - loss: 0.1951 - val_accuracy: 0.9388 - val_loss: 0.2101
Epoch 48/50
1688/1688 ━━━━━━━━━━━━━━━━━━━━ 8s 3ms/step - accuracy: 0.9455 - loss: 0.1887 - val_accuracy: 0.9395 - val_loss: 0.2080
Epoch 49/50
1688/1688 ━━━━━━━━━━━━━━━━━━━━ 7s 4ms/step - accuracy: 0.9455 - loss: 0.1879 - val_accuracy: 0.9400 - val_loss: 0.2049
Epoch 50/50
1688/1688 ━━━━━━━━━━━━━━━━━━━━ 5s 3ms/step - accuracy: 0.9458 - loss: 0.1879 - val_accuracy: 0.9412 - val_loss: 0.2024
# Выводим график функции ошибки
plt.figure(figsize=(12, 5))

plt.subplot(1, 2, 1)
plt.plot(history_3l_100_100.history['loss'], label='Обучающая ошибка')
plt.plot(history_3l_100_100.history['val_loss'], label='Валидационная ошибка')
plt.title('Функция ошибки по эпохам')
plt.xlabel('Эпохи')
plt.ylabel('Categorical Crossentropy')
plt.legend()
plt.grid(True)

scores_3l_100_100=model_3l_100_100.evaluate(X_test,y_test)
print('Lossontestdata:',scores_3l_100_100[0])
print('Accuracyontestdata:',scores_3l_100_100[1])
313/313 ━━━━━━━━━━━━━━━━━━━━ 1s 2ms/step - accuracy: 0.9488 - loss: 0.1810
Lossontestdata: 0.18787769973278046
Accuracyontestdata: 0.9467999935150146
import pandas as pd

data = {
    'Слои': [0, 1, 1, 1, 2, 2],
    'Нейроны 1': ['-', 100, 300, 500, 100, 100],
    'Нейроны 2': ['-', '-', '-', '-', 50, 100],
    'Метрика': [0.913100004196167, 0.9462000131607056, 0.9412000179290771, 0.9369999766349792, 0.9444000124931335, 0.9467999935150146]
}

df = pd.DataFrame(data)
df
Слои Нейроны 1 Нейроны 2 Метрика
0 0 - - 0.9131
1 1 100 - 0.9462
2 1 300 - 0.9412
3 1 500 - 0.9370
4 2 100 50 0.9444
5 2 100 100 0.9468
# сохранение модели на диск, к примеру, в папку best_model
# В общем случае может быть указан произвольный путь
model_2l_100.save(filepath='best_model.keras')
# вывод тестового изображения и результата распознавания
n = 333
result = model.predict(X_test[n:n+1])
print('NN output:', result)

plt.imshow(X_test[n].reshape(28,28), cmap=plt.get_cmap('gray'))
plt.show()
print('Real mark: ', str(np.argmax(y_test[n])))
print('NN answer: ', str(np.argmax(result)))
1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 91ms/step
NN output: [[3.0055828e-02 1.7918642e-06 1.0183058e-05 1.3000262e-04 2.2273003e-05
  9.6671683e-01 3.1997326e-05 6.5717955e-05 2.9293287e-03 3.6015103e-05]]

Real mark:  5
NN answer:  5
# вывод тестового изображения и результата распознавания
n = 555
result = model.predict(X_test[n:n+1])
print('NN output:', result)

plt.imshow(X_test[n].reshape(28,28), cmap=plt.get_cmap('gray'))
plt.show()
print('Real mark: ', str(np.argmax(y_test[n])))
print('NN answer: ', str(np.argmax(result)))
1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 49ms/step
NN output: [[9.8050815e-01 5.7898621e-08 9.2301030e-05 8.2087971e-04 5.6250155e-06
  1.8371470e-02 9.3076023e-06 1.4318567e-04 2.3332947e-05 2.5768295e-05]]

Real mark:  0
NN answer:  0
#загрузка собственного изображения
from PIL import Image
file_1_data = Image.open('1.png')
file_1_data = file_1_data.convert('L') #перевод в градации серого
test_1_img = np.array(file_1_data)
#вывод собственного изображения
plt.imshow(test_1_img, cmap=plt.get_cmap('gray'))
plt.show()

#предобработка
test_1_img = test_1_img / 255
test_1_img = test_1_img.reshape(1, num_pixels)
#распознавание
result_1 = model.predict(test_1_img)
print('I think it\'s', np.argmax(result_1))
1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 41ms/step
I think it's 1
file_2_data = Image.open('2.png')
file_2_data = file_2_data.convert('L') #перевод в градации серого
test_2_img = np.array(file_2_data)

plt.imshow(test_2_img, cmap=plt.get_cmap('gray'))
plt.show()

test_2_img = test_2_img / 255
test_2_img = test_2_img.reshape(1, num_pixels)

result_2 = model.predict(test_2_img)
print('I think it\'s', np.argmax(result_2))
1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 40ms/step
I think it's 2

Сеть не ошиблась и корректно распознала обе цифры на изображениях

file_1_90_data = Image.open('1_90.png')
file_1_90_data = file_1_90_data.convert('L') #перевод в градации серого
test_1_90_img = np.array(file_1_90_data)

plt.imshow(test_1_90_img, cmap=plt.get_cmap('gray'))
plt.show()

test_1_90_img = test_1_90_img / 255
test_1_90_img = test_1_90_img.reshape(1, num_pixels)

result_1_90 = model.predict(test_1_90_img)
print('I think it\'s', np.argmax(result_1_90))
1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 41ms/step
I think it's 4
file_2_90_data = Image.open('2_90.png')
file_2_90_data = file_2_90_data.convert('L') #перевод в градации серого
test_2_90_img = np.array(file_2_90_data)

plt.imshow(test_2_90_img, cmap=plt.get_cmap('gray'))
plt.show()

test_2_90_img = test_2_90_img / 255
test_2_90_img = test_2_90_img.reshape(1, num_pixels)

result_2_90 = model.predict(test_2_90_img)
print('I think it\'s', np.argmax(result_2_90))
1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 59ms/step
I think it's 5

При повороте изображений сеть не распознала цифры правильно. Так как она не обучалась на повернутых изображениях.