diff --git a/Challenge_2_results.png b/Challenge_2_results.png new file mode 100644 index 0000000..50c19ff Binary files /dev/null and b/Challenge_2_results.png differ diff --git a/your-code/challenge-1.ipynb b/your-code/challenge-1.ipynb index 2487c5f..2c16080 100644 --- a/your-code/challenge-1.ipynb +++ b/your-code/challenge-1.ipynb @@ -34,11 +34,98 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 4, "metadata": {}, "outputs": [], "source": [ - "# your code here" + "# Importing necessary libraries\n", + "import pandas as pd\n", + "import numpy as np\n", + "from sklearn.preprocessing import LabelEncoder\n", + "from tensorflow import keras\n", + "from keras.models import Sequential\n", + "from sklearn.model_selection import train_test_split\n", + "from keras.models import Sequential\n", + "from keras.layers import Dense, Dropout\n", + "from keras.optimizers import Adam, RMSprop, SGD\n", + "from keras.callbacks import EarlyStopping\n", + "from sklearn.metrics import accuracy_score\n", + "from keras.models import load_model\n", + "import random\n", + "from sklearn.model_selection import train_test_split, cross_val_score, StratifiedKFold\n", + "\n" + ] + }, + { + "cell_type": "code", + "execution_count": 5, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "(958, 10)\n", + " TL TM TR ML MM MR BL BM BR class\n", + "0 x x x x o o x o o True\n", + "1 x x x x o o o x o True\n", + "2 x x x x o o o o x True\n", + "3 x x x x o o o b b True\n", + "4 x x x x o o b o b True\n", + " TL TM TR ML MM MR BL BM BR class\n", + "count 958 958 958 958 958 958 958 958 958 958\n", + "unique 3 3 3 3 3 3 3 3 3 2\n", + "top x x x x x x x x x True\n", + "freq 418 378 418 378 458 378 418 378 418 626\n", + " TL TM TR ML MM MR BL BM BR class\n", + "0 2 2 2 2 1 1 2 1 1 1\n", + "1 2 2 2 2 1 1 1 2 1 1\n", + "2 2 2 2 2 1 1 1 1 2 1\n", + "3 2 2 2 2 1 1 1 0 0 1\n", + "4 2 2 2 2 1 1 0 1 0 1\n", + " TL TM TR ML MM MR BL BM BR\n", + "0 2 2 2 2 1 1 2 1 1\n", + "1 2 2 2 2 1 1 1 2 1\n", + "2 2 2 2 2 1 1 1 1 2\n", + "3 2 2 2 2 1 1 1 0 0\n", + "4 2 2 2 2 1 1 0 1 0\n", + "0 1\n", + "1 1\n", + "2 1\n", + "3 1\n", + "4 1\n", + "Name: class, dtype: int64\n" + ] + } + ], + "source": [ + "# Importing data into dataframe\n", + "data = pd.read_csv(\"tic-tac-toe.csv\")\n", + "\n", + "# Inspecting dataset\n", + "print(data.shape)\n", + "print(data.head())\n", + "print(data.describe())\n", + "\n", + "# Replace categorical data with nuemeric through label encoding\n", + "data_transformed = data.copy()\n", + "label_encoders = {}\n", + "for column in data.columns:\n", + " le = LabelEncoder()\n", + " data_transformed[column] = le.fit_transform(data[column])\n", + " label_encoders[column] = le\n", + "print(data_transformed.head())\n", + "\n", + "# Seperate inputs and output columns\n", + "data_columns = data_transformed.columns\n", + "\n", + "predictors = data_transformed[data_columns[data_columns != \"class\"]] # all columns except class\n", + "target = data_transformed[\"class\"] # class column\n", + "print(predictors.head())\n", + "print(target.head())\n", + "\n", + "# Normalising the input data is not necessary, as we are not working with large differences in numbers\n", + "# Rather, we only have categorical values from 0-2, which do not necessarily need normalisation\n" ] }, { @@ -60,11 +147,246 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 10, "metadata": {}, - "outputs": [], + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Epoch 1/100\n", + "21/21 - 0s - loss: 0.6766 - accuracy: 0.6045 - 332ms/epoch - 16ms/step\n", + "Epoch 2/100\n", + "21/21 - 0s - loss: 0.6207 - accuracy: 0.6478 - 19ms/epoch - 907us/step\n", + "Epoch 3/100\n", + "21/21 - 0s - loss: 0.6066 - accuracy: 0.6761 - 19ms/epoch - 907us/step\n", + "Epoch 4/100\n", + "21/21 - 0s - loss: 0.5928 - accuracy: 0.6731 - 22ms/epoch - 1ms/step\n", + "Epoch 5/100\n", + "21/21 - 0s - loss: 0.5825 - accuracy: 0.7075 - 21ms/epoch - 979us/step\n", + "Epoch 6/100\n", + "21/21 - 0s - loss: 0.5634 - accuracy: 0.7284 - 22ms/epoch - 1ms/step\n", + "Epoch 7/100\n", + "21/21 - 0s - loss: 0.5508 - accuracy: 0.7328 - 21ms/epoch - 1ms/step\n", + "Epoch 8/100\n", + "21/21 - 0s - loss: 0.5339 - accuracy: 0.7493 - 22ms/epoch - 1ms/step\n", + "Epoch 9/100\n", + "21/21 - 0s - loss: 0.5212 - accuracy: 0.7597 - 20ms/epoch - 931us/step\n", + "Epoch 10/100\n", + "21/21 - 0s - loss: 0.5060 - accuracy: 0.7672 - 22ms/epoch - 1ms/step\n", + "Epoch 11/100\n", + "21/21 - 0s - loss: 0.4928 - accuracy: 0.7806 - 23ms/epoch - 1ms/step\n", + "Epoch 12/100\n", + "21/21 - 0s - loss: 0.4800 - accuracy: 0.7761 - 23ms/epoch - 1ms/step\n", + "Epoch 13/100\n", + "21/21 - 0s - loss: 0.4657 - accuracy: 0.7940 - 21ms/epoch - 1ms/step\n", + "Epoch 14/100\n", + "21/21 - 0s - loss: 0.4492 - accuracy: 0.8119 - 22ms/epoch - 1ms/step\n", + "Epoch 15/100\n", + "21/21 - 0s - loss: 0.4337 - accuracy: 0.8299 - 21ms/epoch - 1ms/step\n", + "Epoch 16/100\n", + "21/21 - 0s - loss: 0.4200 - accuracy: 0.8343 - 22ms/epoch - 1ms/step\n", + "Epoch 17/100\n", + "21/21 - 0s - loss: 0.4076 - accuracy: 0.8358 - 22ms/epoch - 1ms/step\n", + "Epoch 18/100\n", + "21/21 - 0s - loss: 0.3948 - accuracy: 0.8403 - 24ms/epoch - 1ms/step\n", + "Epoch 19/100\n", + "21/21 - 0s - loss: 0.3928 - accuracy: 0.8433 - 22ms/epoch - 1ms/step\n", + "Epoch 20/100\n", + "21/21 - 0s - loss: 0.3758 - accuracy: 0.8612 - 20ms/epoch - 955us/step\n", + "Epoch 21/100\n", + "21/21 - 0s - loss: 0.3657 - accuracy: 0.8567 - 21ms/epoch - 1ms/step\n", + "Epoch 22/100\n", + "21/21 - 0s - loss: 0.3597 - accuracy: 0.8612 - 22ms/epoch - 1ms/step\n", + "Epoch 23/100\n", + "21/21 - 0s - loss: 0.3518 - accuracy: 0.8552 - 23ms/epoch - 1ms/step\n", + "Epoch 24/100\n", + "21/21 - 0s - loss: 0.3521 - accuracy: 0.8597 - 23ms/epoch - 1ms/step\n", + "Epoch 25/100\n", + "21/21 - 0s - loss: 0.3382 - accuracy: 0.8746 - 21ms/epoch - 1ms/step\n", + "Epoch 26/100\n", + "21/21 - 0s - loss: 0.3232 - accuracy: 0.8806 - 21ms/epoch - 1ms/step\n", + "Epoch 27/100\n", + "21/21 - 0s - loss: 0.3190 - accuracy: 0.8701 - 23ms/epoch - 1ms/step\n", + "Epoch 28/100\n", + "21/21 - 0s - loss: 0.3181 - accuracy: 0.8806 - 20ms/epoch - 966us/step\n", + "Epoch 29/100\n", + "21/21 - 0s - loss: 0.3145 - accuracy: 0.8746 - 21ms/epoch - 1ms/step\n", + "Epoch 30/100\n", + "21/21 - 0s - loss: 0.3020 - accuracy: 0.8881 - 21ms/epoch - 980us/step\n", + "Epoch 31/100\n", + "21/21 - 0s - loss: 0.2947 - accuracy: 0.8910 - 21ms/epoch - 1ms/step\n", + "Epoch 32/100\n", + "21/21 - 0s - loss: 0.2883 - accuracy: 0.8940 - 21ms/epoch - 1ms/step\n", + "Epoch 33/100\n", + "21/21 - 0s - loss: 0.2837 - accuracy: 0.8896 - 22ms/epoch - 1ms/step\n", + "Epoch 34/100\n", + "21/21 - 0s - loss: 0.2792 - accuracy: 0.8955 - 23ms/epoch - 1ms/step\n", + "Epoch 35/100\n", + "21/21 - 0s - loss: 0.2722 - accuracy: 0.9015 - 22ms/epoch - 1ms/step\n", + "Epoch 36/100\n", + "21/21 - 0s - loss: 0.2666 - accuracy: 0.9015 - 21ms/epoch - 1ms/step\n", + "Epoch 37/100\n", + "21/21 - 0s - loss: 0.2614 - accuracy: 0.9030 - 22ms/epoch - 1ms/step\n", + "Epoch 38/100\n", + "21/21 - 0s - loss: 0.2569 - accuracy: 0.9000 - 20ms/epoch - 955us/step\n", + "Epoch 39/100\n", + "21/21 - 0s - loss: 0.2513 - accuracy: 0.9060 - 22ms/epoch - 1ms/step\n", + "Epoch 40/100\n", + "21/21 - 0s - loss: 0.2511 - accuracy: 0.9015 - 41ms/epoch - 2ms/step\n", + "Epoch 41/100\n", + "21/21 - 0s - loss: 0.2469 - accuracy: 0.9090 - 22ms/epoch - 1ms/step\n", + "Epoch 42/100\n", + "21/21 - 0s - loss: 0.2402 - accuracy: 0.9045 - 21ms/epoch - 997us/step\n", + "Epoch 43/100\n", + "21/21 - 0s - loss: 0.2340 - accuracy: 0.9149 - 23ms/epoch - 1ms/step\n", + "Epoch 44/100\n", + "21/21 - 0s - loss: 0.2302 - accuracy: 0.9179 - 22ms/epoch - 1ms/step\n", + "Epoch 45/100\n", + "21/21 - 0s - loss: 0.2239 - accuracy: 0.9149 - 28ms/epoch - 1ms/step\n", + "Epoch 46/100\n", + "21/21 - 0s - loss: 0.2254 - accuracy: 0.9090 - 21ms/epoch - 988us/step\n", + "Epoch 47/100\n", + "21/21 - 0s - loss: 0.2199 - accuracy: 0.9164 - 21ms/epoch - 1ms/step\n", + "Epoch 48/100\n", + "21/21 - 0s - loss: 0.2204 - accuracy: 0.9090 - 22ms/epoch - 1ms/step\n", + "Epoch 49/100\n", + "21/21 - 0s - loss: 0.2161 - accuracy: 0.9179 - 21ms/epoch - 982us/step\n", + "Epoch 50/100\n", + "21/21 - 0s - loss: 0.2024 - accuracy: 0.9209 - 21ms/epoch - 980us/step\n", + "Epoch 51/100\n", + "21/21 - 0s - loss: 0.1993 - accuracy: 0.9254 - 22ms/epoch - 1ms/step\n", + "Epoch 52/100\n", + "21/21 - 0s - loss: 0.1987 - accuracy: 0.9239 - 23ms/epoch - 1ms/step\n", + "Epoch 53/100\n", + "21/21 - 0s - loss: 0.1946 - accuracy: 0.9343 - 21ms/epoch - 1ms/step\n", + "Epoch 54/100\n", + "21/21 - 0s - loss: 0.1880 - accuracy: 0.9343 - 22ms/epoch - 1ms/step\n", + "Epoch 55/100\n", + "21/21 - 0s - loss: 0.1835 - accuracy: 0.9313 - 20ms/epoch - 968us/step\n", + "Epoch 56/100\n", + "21/21 - 0s - loss: 0.1818 - accuracy: 0.9299 - 21ms/epoch - 1ms/step\n", + "Epoch 57/100\n", + "21/21 - 0s - loss: 0.1816 - accuracy: 0.9313 - 21ms/epoch - 1ms/step\n", + "Epoch 58/100\n", + "21/21 - 0s - loss: 0.1731 - accuracy: 0.9299 - 20ms/epoch - 955us/step\n", + "Epoch 59/100\n", + "21/21 - 0s - loss: 0.1707 - accuracy: 0.9418 - 21ms/epoch - 980us/step\n", + "Epoch 60/100\n", + "21/21 - 0s - loss: 0.1768 - accuracy: 0.9284 - 22ms/epoch - 1ms/step\n", + "Epoch 61/100\n", + "21/21 - 0s - loss: 0.1800 - accuracy: 0.9343 - 23ms/epoch - 1ms/step\n", + "Epoch 62/100\n", + "21/21 - 0s - loss: 0.1644 - accuracy: 0.9433 - 22ms/epoch - 1ms/step\n", + "Epoch 63/100\n", + "21/21 - 0s - loss: 0.1726 - accuracy: 0.9448 - 21ms/epoch - 979us/step\n", + "Epoch 64/100\n", + "21/21 - 0s - loss: 0.1562 - accuracy: 0.9418 - 20ms/epoch - 955us/step\n", + "Epoch 65/100\n", + "21/21 - 0s - loss: 0.1528 - accuracy: 0.9507 - 21ms/epoch - 1ms/step\n", + "Epoch 66/100\n", + "21/21 - 0s - loss: 0.1468 - accuracy: 0.9478 - 21ms/epoch - 1ms/step\n", + "Epoch 67/100\n", + "21/21 - 0s - loss: 0.1429 - accuracy: 0.9597 - 21ms/epoch - 993us/step\n", + "Epoch 68/100\n", + "21/21 - 0s - loss: 0.1386 - accuracy: 0.9657 - 22ms/epoch - 1ms/step\n", + "Epoch 69/100\n", + "21/21 - 0s - loss: 0.1359 - accuracy: 0.9612 - 21ms/epoch - 1ms/step\n", + "Epoch 70/100\n", + "21/21 - 0s - loss: 0.1384 - accuracy: 0.9597 - 22ms/epoch - 1ms/step\n", + "Epoch 71/100\n", + "21/21 - 0s - loss: 0.1361 - accuracy: 0.9567 - 21ms/epoch - 979us/step\n", + "Epoch 72/100\n", + "21/21 - 0s - loss: 0.1333 - accuracy: 0.9567 - 21ms/epoch - 1ms/step\n", + "Epoch 73/100\n", + "21/21 - 0s - loss: 0.1274 - accuracy: 0.9612 - 20ms/epoch - 955us/step\n", + "Epoch 74/100\n", + "21/21 - 0s - loss: 0.1236 - accuracy: 0.9657 - 22ms/epoch - 1ms/step\n", + "Epoch 75/100\n", + "21/21 - 0s - loss: 0.1212 - accuracy: 0.9761 - 20ms/epoch - 955us/step\n", + "Epoch 76/100\n", + "21/21 - 0s - loss: 0.1161 - accuracy: 0.9657 - 23ms/epoch - 1ms/step\n", + "Epoch 77/100\n", + "21/21 - 0s - loss: 0.1125 - accuracy: 0.9776 - 20ms/epoch - 955us/step\n", + "Epoch 78/100\n", + "21/21 - 0s - loss: 0.1120 - accuracy: 0.9716 - 21ms/epoch - 1ms/step\n", + "Epoch 79/100\n", + "21/21 - 0s - loss: 0.1116 - accuracy: 0.9701 - 21ms/epoch - 1ms/step\n", + "Epoch 80/100\n", + "21/21 - 0s - loss: 0.1087 - accuracy: 0.9716 - 22ms/epoch - 1ms/step\n", + "Epoch 81/100\n", + "21/21 - 0s - loss: 0.1137 - accuracy: 0.9701 - 20ms/epoch - 955us/step\n", + "Epoch 82/100\n", + "21/21 - 0s - loss: 0.1032 - accuracy: 0.9776 - 20ms/epoch - 955us/step\n", + "Epoch 83/100\n", + "21/21 - 0s - loss: 0.1011 - accuracy: 0.9791 - 22ms/epoch - 1ms/step\n", + "Epoch 84/100\n", + "21/21 - 0s - loss: 0.1016 - accuracy: 0.9731 - 20ms/epoch - 955us/step\n", + "Epoch 85/100\n", + "21/21 - 0s - loss: 0.0929 - accuracy: 0.9851 - 20ms/epoch - 955us/step\n", + "Epoch 86/100\n", + "21/21 - 0s - loss: 0.0912 - accuracy: 0.9806 - 21ms/epoch - 1ms/step\n", + "Epoch 87/100\n", + "21/21 - 0s - loss: 0.0882 - accuracy: 0.9821 - 22ms/epoch - 1ms/step\n", + "Epoch 88/100\n", + "21/21 - 0s - loss: 0.0878 - accuracy: 0.9821 - 21ms/epoch - 1ms/step\n", + "Epoch 89/100\n", + "21/21 - 0s - loss: 0.0832 - accuracy: 0.9851 - 22ms/epoch - 1ms/step\n", + "Epoch 90/100\n", + "21/21 - 0s - loss: 0.0810 - accuracy: 0.9896 - 20ms/epoch - 955us/step\n", + "Epoch 91/100\n", + "21/21 - 0s - loss: 0.0846 - accuracy: 0.9821 - 22ms/epoch - 1ms/step\n", + "Epoch 92/100\n", + "21/21 - 0s - loss: 0.0762 - accuracy: 0.9940 - 21ms/epoch - 1ms/step\n", + "Epoch 93/100\n", + "21/21 - 0s - loss: 0.0752 - accuracy: 0.9910 - 27ms/epoch - 1ms/step\n", + "Epoch 94/100\n", + "21/21 - 0s - loss: 0.0759 - accuracy: 0.9925 - 20ms/epoch - 955us/step\n", + "Epoch 95/100\n", + "21/21 - 0s - loss: 0.0738 - accuracy: 0.9896 - 21ms/epoch - 1ms/step\n", + "Epoch 96/100\n", + "21/21 - 0s - loss: 0.0732 - accuracy: 0.9896 - 22ms/epoch - 1ms/step\n", + "Epoch 97/100\n", + "21/21 - 0s - loss: 0.0727 - accuracy: 0.9896 - 21ms/epoch - 979us/step\n", + "Epoch 98/100\n", + "21/21 - 0s - loss: 0.0686 - accuracy: 0.9910 - 20ms/epoch - 955us/step\n", + "Epoch 99/100\n", + "21/21 - 0s - loss: 0.0656 - accuracy: 0.9955 - 21ms/epoch - 1ms/step\n", + "Epoch 100/100\n", + "21/21 - 0s - loss: 0.0661 - accuracy: 0.9896 - 21ms/epoch - 1ms/step\n", + "9/9 - 0s - loss: 0.2615 - accuracy: 0.9028 - 162ms/epoch - 18ms/step\n", + "Test accuracy: 0.9027777910232544\n" + ] + } + ], "source": [ - "# your code here" + "# Split the dataset into training and test sets\n", + "X_train, X_test, y_train, y_test = train_test_split(predictors, target, test_size=0.3, random_state=42)\n", + "\n", + "# Define regression model\n", + "def regression_model():\n", + " # Create model with 2 hidden layers and dropout\n", + " model = Sequential()\n", + " model.add(Dense(50, activation = \"relu\", input_shape=(X_train.shape[1], )))\n", + " model.add(Dense(50, activation = \"relu\"))\n", + " model.add(Dense(2, activation = \"softmax\"))\n", + " \n", + " # Compile model\n", + " model.compile(optimizer =\"adam\", loss =\"sparse_categorical_crossentropy\", metrics = \"accuracy\")\n", + " return model\n", + "\n", + "# Build the model\n", + "model = regression_model()\n", + "\n", + "# Fit the model\n", + "model.fit(X_train, y_train, epochs=100, verbose=2)\n", + "\n", + "# Evaluate the model on the test set\n", + "accuracy = model.evaluate(X_test, y_test, verbose=2)[1]\n", + "print(f'Test accuracy: {accuracy}')\n", + "\n", + "# Save the entire model\n", + "model.save('tic_tac_toe.model.h5')" ] }, { @@ -78,11 +400,61 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 11, "metadata": {}, - "outputs": [], + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "9/9 [==============================] - 0s 1ms/step\n", + "Sample Index: 2\n", + "Predicted: 0, Actual: 1\n", + "Incorrect\n", + "------------------------------\n", + "Sample Index: 226\n", + "Predicted: 0, Actual: 1\n", + "Incorrect\n", + "------------------------------\n", + "Sample Index: 227\n", + "Predicted: 1, Actual: 1\n", + "Correct\n", + "------------------------------\n", + "Sample Index: 64\n", + "Predicted: 0, Actual: 1\n", + "Incorrect\n", + "------------------------------\n", + "Sample Index: 186\n", + "Predicted: 0, Actual: 0\n", + "Correct\n", + "------------------------------\n", + "9/9 - 0s - loss: 0.2615 - accuracy: 0.9028 - 159ms/epoch - 18ms/step\n", + "Test accuracy (loaded model): 0.9027777910232544\n" + ] + } + ], "source": [ - "# your code here" + "# Load the model\n", + "loaded_model = load_model('tic_tac_toe.model.h5')\n", + "\n", + "# Make predictions on the test set\n", + "y_pred = model.predict(X_test)\n", + "predicted_classes = (y_pred > 0.5).astype(int).reshape(-1)\n", + "\n", + "# Select random rows from the test set\n", + "num_samples = 5 # Number of random samples to check\n", + "random_indices = random.sample(range(X_test.shape[0]), num_samples)\n", + "\n", + "# Compare predictions with actual labels\n", + "for index in random_indices:\n", + " print(f\"Sample Index: {index}\")\n", + " print(f\"Predicted: {predicted_classes[index]}, Actual: {y_test.iloc[index]}\")\n", + " print(\"Correct\" if predicted_classes[index] == y_test.iloc[index] else \"Incorrect\")\n", + " print('-' * 30)\n", + "\n", + "# Evaluate the loaded model on the test set and get accuracy\n", + "accuracy = loaded_model.evaluate(X_test, y_test, verbose=2)[1]\n", + "print(f'Test accuracy (loaded model): {accuracy}')" ] }, { @@ -104,11 +476,547 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 58, "metadata": {}, - "outputs": [], + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Epoch 1/250\n", + "14/14 - 1s - loss: 0.6891 - accuracy: 0.6224 - 703ms/epoch - 50ms/step\n", + "Epoch 2/250\n", + "14/14 - 0s - loss: 0.6688 - accuracy: 0.6463 - 30ms/epoch - 2ms/step\n", + "Epoch 3/250\n", + "14/14 - 0s - loss: 0.6505 - accuracy: 0.6463 - 26ms/epoch - 2ms/step\n", + "Epoch 4/250\n", + "14/14 - 0s - loss: 0.6400 - accuracy: 0.6463 - 24ms/epoch - 2ms/step\n", + "Epoch 5/250\n", + "14/14 - 0s - loss: 0.6373 - accuracy: 0.6463 - 22ms/epoch - 2ms/step\n", + "Epoch 6/250\n", + "14/14 - 0s - loss: 0.6307 - accuracy: 0.6463 - 20ms/epoch - 1ms/step\n", + "Epoch 7/250\n", + "14/14 - 0s - loss: 0.6266 - accuracy: 0.6463 - 21ms/epoch - 2ms/step\n", + "Epoch 8/250\n", + "14/14 - 0s - loss: 0.6250 - accuracy: 0.6463 - 20ms/epoch - 1ms/step\n", + "Epoch 9/250\n", + "14/14 - 0s - loss: 0.6147 - accuracy: 0.6463 - 21ms/epoch - 2ms/step\n", + "Epoch 10/250\n", + "14/14 - 0s - loss: 0.6172 - accuracy: 0.6552 - 21ms/epoch - 1ms/step\n", + "Epoch 11/250\n", + "14/14 - 0s - loss: 0.6132 - accuracy: 0.6597 - 21ms/epoch - 2ms/step\n", + "Epoch 12/250\n", + "14/14 - 0s - loss: 0.6011 - accuracy: 0.6955 - 20ms/epoch - 1ms/step\n", + "Epoch 13/250\n", + "14/14 - 0s - loss: 0.6130 - accuracy: 0.6701 - 20ms/epoch - 1ms/step\n", + "Epoch 14/250\n", + "14/14 - 0s - loss: 0.5898 - accuracy: 0.6985 - 21ms/epoch - 1ms/step\n", + "Epoch 15/250\n", + "14/14 - 0s - loss: 0.5851 - accuracy: 0.7104 - 25ms/epoch - 2ms/step\n", + "Epoch 16/250\n", + "14/14 - 0s - loss: 0.5665 - accuracy: 0.7075 - 22ms/epoch - 2ms/step\n", + "Epoch 17/250\n", + "14/14 - 0s - loss: 0.5449 - accuracy: 0.7448 - 20ms/epoch - 1ms/step\n", + "Epoch 18/250\n", + "14/14 - 0s - loss: 0.5389 - accuracy: 0.7463 - 29ms/epoch - 2ms/step\n", + "Epoch 19/250\n", + "14/14 - 0s - loss: 0.5320 - accuracy: 0.7522 - 22ms/epoch - 2ms/step\n", + "Epoch 20/250\n", + "14/14 - 0s - loss: 0.5184 - accuracy: 0.7746 - 21ms/epoch - 2ms/step\n", + "Epoch 21/250\n", + "14/14 - 0s - loss: 0.4981 - accuracy: 0.7627 - 21ms/epoch - 2ms/step\n", + "Epoch 22/250\n", + "14/14 - 0s - loss: 0.4957 - accuracy: 0.7716 - 21ms/epoch - 1ms/step\n", + "Epoch 23/250\n", + "14/14 - 0s - loss: 0.4706 - accuracy: 0.7970 - 21ms/epoch - 2ms/step\n", + "Epoch 24/250\n", + "14/14 - 0s - loss: 0.4651 - accuracy: 0.7836 - 21ms/epoch - 2ms/step\n", + "Epoch 25/250\n", + "14/14 - 0s - loss: 0.4504 - accuracy: 0.7896 - 20ms/epoch - 1ms/step\n", + "Epoch 26/250\n", + "14/14 - 0s - loss: 0.4654 - accuracy: 0.8030 - 21ms/epoch - 2ms/step\n", + "Epoch 27/250\n", + "14/14 - 0s - loss: 0.4435 - accuracy: 0.8060 - 19ms/epoch - 1ms/step\n", + "Epoch 28/250\n", + "14/14 - 0s - loss: 0.4662 - accuracy: 0.7821 - 20ms/epoch - 1ms/step\n", + "Epoch 29/250\n", + "14/14 - 0s - loss: 0.4302 - accuracy: 0.8164 - 19ms/epoch - 1ms/step\n", + "Epoch 30/250\n", + "14/14 - 0s - loss: 0.4292 - accuracy: 0.8164 - 20ms/epoch - 1ms/step\n", + "Epoch 31/250\n", + "14/14 - 0s - loss: 0.4125 - accuracy: 0.8239 - 21ms/epoch - 1ms/step\n", + "Epoch 32/250\n", + "14/14 - 0s - loss: 0.4387 - accuracy: 0.8090 - 19ms/epoch - 1ms/step\n", + "Epoch 33/250\n", + "14/14 - 0s - loss: 0.4269 - accuracy: 0.8149 - 20ms/epoch - 1ms/step\n", + "Epoch 34/250\n", + "14/14 - 0s - loss: 0.4247 - accuracy: 0.8104 - 19ms/epoch - 1ms/step\n", + "Epoch 35/250\n", + "14/14 - 0s - loss: 0.3972 - accuracy: 0.8403 - 21ms/epoch - 2ms/step\n", + "Epoch 36/250\n", + "14/14 - 0s - loss: 0.4130 - accuracy: 0.8149 - 20ms/epoch - 1ms/step\n", + "Epoch 37/250\n", + "14/14 - 0s - loss: 0.3897 - accuracy: 0.8343 - 22ms/epoch - 2ms/step\n", + "Epoch 38/250\n", + "14/14 - 0s - loss: 0.4036 - accuracy: 0.8284 - 20ms/epoch - 1ms/step\n", + "Epoch 39/250\n", + "14/14 - 0s - loss: 0.3641 - accuracy: 0.8612 - 23ms/epoch - 2ms/step\n", + "Epoch 40/250\n", + "14/14 - 0s - loss: 0.3803 - accuracy: 0.8418 - 20ms/epoch - 1ms/step\n", + "Epoch 41/250\n", + "14/14 - 0s - loss: 0.3613 - accuracy: 0.8358 - 20ms/epoch - 1ms/step\n", + "Epoch 42/250\n", + "14/14 - 0s - loss: 0.3645 - accuracy: 0.8463 - 20ms/epoch - 1ms/step\n", + "Epoch 43/250\n", + "14/14 - 0s - loss: 0.3852 - accuracy: 0.8194 - 21ms/epoch - 2ms/step\n", + "Epoch 44/250\n", + "14/14 - 0s - loss: 0.3975 - accuracy: 0.8388 - 21ms/epoch - 1ms/step\n", + "Epoch 45/250\n", + "14/14 - 0s - loss: 0.3594 - accuracy: 0.8448 - 19ms/epoch - 1ms/step\n", + "Epoch 46/250\n", + "14/14 - 0s - loss: 0.3488 - accuracy: 0.8448 - 20ms/epoch - 1ms/step\n", + "Epoch 47/250\n", + "14/14 - 0s - loss: 0.3693 - accuracy: 0.8388 - 19ms/epoch - 1ms/step\n", + "Epoch 48/250\n", + "14/14 - 0s - loss: 0.3411 - accuracy: 0.8478 - 21ms/epoch - 2ms/step\n", + "Epoch 49/250\n", + "14/14 - 0s - loss: 0.3750 - accuracy: 0.8299 - 21ms/epoch - 1ms/step\n", + "Epoch 50/250\n", + "14/14 - 0s - loss: 0.3999 - accuracy: 0.8254 - 19ms/epoch - 1ms/step\n", + "Epoch 51/250\n", + "14/14 - 0s - loss: 0.3808 - accuracy: 0.8388 - 20ms/epoch - 1ms/step\n", + "Epoch 52/250\n", + "14/14 - 0s - loss: 0.3773 - accuracy: 0.8328 - 20ms/epoch - 1ms/step\n", + "Epoch 53/250\n", + "14/14 - 0s - loss: 0.3805 - accuracy: 0.8343 - 22ms/epoch - 2ms/step\n", + "Epoch 54/250\n", + "14/14 - 0s - loss: 0.3442 - accuracy: 0.8522 - 20ms/epoch - 1ms/step\n", + "Epoch 55/250\n", + "14/14 - 0s - loss: 0.3816 - accuracy: 0.8284 - 20ms/epoch - 1ms/step\n", + "Epoch 56/250\n", + "14/14 - 0s - loss: 0.3588 - accuracy: 0.8448 - 20ms/epoch - 1ms/step\n", + "Epoch 57/250\n", + "14/14 - 0s - loss: 0.3705 - accuracy: 0.8388 - 20ms/epoch - 1ms/step\n", + "Epoch 58/250\n", + "14/14 - 0s - loss: 0.3454 - accuracy: 0.8552 - 22ms/epoch - 2ms/step\n", + "Epoch 59/250\n", + "14/14 - 0s - loss: 0.3631 - accuracy: 0.8403 - 20ms/epoch - 1ms/step\n", + "Epoch 60/250\n", + "14/14 - 0s - loss: 0.3734 - accuracy: 0.8328 - 24ms/epoch - 2ms/step\n", + "Epoch 61/250\n", + "14/14 - 0s - loss: 0.3407 - accuracy: 0.8597 - 22ms/epoch - 2ms/step\n", + "Epoch 62/250\n", + "14/14 - 0s - loss: 0.3469 - accuracy: 0.8433 - 22ms/epoch - 2ms/step\n", + "Epoch 63/250\n", + "14/14 - 0s - loss: 0.3354 - accuracy: 0.8478 - 19ms/epoch - 1ms/step\n", + "Epoch 64/250\n", + "14/14 - 0s - loss: 0.3266 - accuracy: 0.8597 - 20ms/epoch - 1ms/step\n", + "Epoch 65/250\n", + "14/14 - 0s - loss: 0.3254 - accuracy: 0.8493 - 19ms/epoch - 1ms/step\n", + "Epoch 66/250\n", + "14/14 - 0s - loss: 0.3268 - accuracy: 0.8627 - 20ms/epoch - 1ms/step\n", + "Epoch 67/250\n", + "14/14 - 0s - loss: 0.3342 - accuracy: 0.8567 - 19ms/epoch - 1ms/step\n", + "Epoch 68/250\n", + "14/14 - 0s - loss: 0.3483 - accuracy: 0.8493 - 20ms/epoch - 1ms/step\n", + "Epoch 69/250\n", + "14/14 - 0s - loss: 0.3133 - accuracy: 0.8552 - 21ms/epoch - 2ms/step\n", + "Epoch 70/250\n", + "14/14 - 0s - loss: 0.3337 - accuracy: 0.8522 - 19ms/epoch - 1ms/step\n", + "Epoch 71/250\n", + "14/14 - 0s - loss: 0.2964 - accuracy: 0.8701 - 22ms/epoch - 2ms/step\n", + "Epoch 72/250\n", + "14/14 - 0s - loss: 0.3099 - accuracy: 0.8657 - 19ms/epoch - 1ms/step\n", + "Epoch 73/250\n", + "14/14 - 0s - loss: 0.3419 - accuracy: 0.8507 - 23ms/epoch - 2ms/step\n", + "Epoch 74/250\n", + "14/14 - 0s - loss: 0.3807 - accuracy: 0.8313 - 20ms/epoch - 1ms/step\n", + "Epoch 75/250\n", + "14/14 - 0s - loss: 0.3279 - accuracy: 0.8597 - 22ms/epoch - 2ms/step\n", + "Epoch 76/250\n", + "14/14 - 0s - loss: 0.3305 - accuracy: 0.8478 - 20ms/epoch - 1ms/step\n", + "Epoch 77/250\n", + "14/14 - 0s - loss: 0.3339 - accuracy: 0.8522 - 20ms/epoch - 1ms/step\n", + "Epoch 78/250\n", + "14/14 - 0s - loss: 0.2924 - accuracy: 0.8716 - 20ms/epoch - 1ms/step\n", + "Epoch 79/250\n", + "14/14 - 0s - loss: 0.3199 - accuracy: 0.8701 - 19ms/epoch - 1ms/step\n", + "Epoch 80/250\n", + "14/14 - 0s - loss: 0.3354 - accuracy: 0.8597 - 21ms/epoch - 1ms/step\n", + "Epoch 81/250\n", + "14/14 - 0s - loss: 0.3263 - accuracy: 0.8552 - 20ms/epoch - 1ms/step\n", + "Epoch 82/250\n", + "14/14 - 0s - loss: 0.2993 - accuracy: 0.8746 - 22ms/epoch - 2ms/step\n", + "Epoch 83/250\n", + "14/14 - 0s - loss: 0.2932 - accuracy: 0.8567 - 22ms/epoch - 2ms/step\n", + "Epoch 84/250\n", + "14/14 - 0s - loss: 0.3141 - accuracy: 0.8642 - 20ms/epoch - 1ms/step\n", + "Epoch 85/250\n", + "14/14 - 0s - loss: 0.3302 - accuracy: 0.8731 - 22ms/epoch - 2ms/step\n", + "Epoch 86/250\n", + "14/14 - 0s - loss: 0.3079 - accuracy: 0.8537 - 19ms/epoch - 1ms/step\n", + "Epoch 87/250\n", + "14/14 - 0s - loss: 0.3195 - accuracy: 0.8433 - 20ms/epoch - 1ms/step\n", + "Epoch 88/250\n", + "14/14 - 0s - loss: 0.2844 - accuracy: 0.8866 - 19ms/epoch - 1ms/step\n", + "Epoch 89/250\n", + "14/14 - 0s - loss: 0.3153 - accuracy: 0.8522 - 21ms/epoch - 2ms/step\n", + "Epoch 90/250\n", + "14/14 - 0s - loss: 0.3029 - accuracy: 0.8567 - 19ms/epoch - 1ms/step\n", + "Epoch 91/250\n", + "14/14 - 0s - loss: 0.3159 - accuracy: 0.8612 - 20ms/epoch - 1ms/step\n", + "Epoch 92/250\n", + "14/14 - 0s - loss: 0.3009 - accuracy: 0.8716 - 21ms/epoch - 2ms/step\n", + "Epoch 93/250\n", + "14/14 - 0s - loss: 0.2895 - accuracy: 0.8806 - 19ms/epoch - 1ms/step\n", + "Epoch 94/250\n", + "14/14 - 0s - loss: 0.2700 - accuracy: 0.8836 - 20ms/epoch - 1ms/step\n", + "Epoch 95/250\n", + "14/14 - 0s - loss: 0.2970 - accuracy: 0.8731 - 20ms/epoch - 1ms/step\n", + "Epoch 96/250\n", + "14/14 - 0s - loss: 0.3116 - accuracy: 0.8612 - 19ms/epoch - 1ms/step\n", + "Epoch 97/250\n", + "14/14 - 0s - loss: 0.3074 - accuracy: 0.8627 - 25ms/epoch - 2ms/step\n", + "Epoch 98/250\n", + "14/14 - 0s - loss: 0.3179 - accuracy: 0.8687 - 21ms/epoch - 2ms/step\n", + "Epoch 99/250\n", + "14/14 - 0s - loss: 0.3277 - accuracy: 0.8478 - 21ms/epoch - 2ms/step\n", + "Epoch 100/250\n", + "14/14 - 0s - loss: 0.2717 - accuracy: 0.8821 - 19ms/epoch - 1ms/step\n", + "Epoch 101/250\n", + "14/14 - 0s - loss: 0.2808 - accuracy: 0.8866 - 21ms/epoch - 2ms/step\n", + "Epoch 102/250\n", + "14/14 - 0s - loss: 0.2589 - accuracy: 0.8985 - 20ms/epoch - 1ms/step\n", + "Epoch 103/250\n", + "14/14 - 0s - loss: 0.2559 - accuracy: 0.8881 - 21ms/epoch - 2ms/step\n", + "Epoch 104/250\n", + "14/14 - 0s - loss: 0.2732 - accuracy: 0.8791 - 21ms/epoch - 2ms/step\n", + "Epoch 105/250\n", + "14/14 - 0s - loss: 0.2667 - accuracy: 0.8716 - 19ms/epoch - 1ms/step\n", + "Epoch 106/250\n", + "14/14 - 0s - loss: 0.2945 - accuracy: 0.8776 - 22ms/epoch - 2ms/step\n", + "Epoch 107/250\n", + "14/14 - 0s - loss: 0.3030 - accuracy: 0.8627 - 22ms/epoch - 2ms/step\n", + "Epoch 108/250\n", + "14/14 - 0s - loss: 0.3107 - accuracy: 0.8597 - 21ms/epoch - 2ms/step\n", + "Epoch 109/250\n", + "14/14 - 0s - loss: 0.3009 - accuracy: 0.8642 - 21ms/epoch - 2ms/step\n", + "Epoch 110/250\n", + "14/14 - 0s - loss: 0.2709 - accuracy: 0.8776 - 19ms/epoch - 1ms/step\n", + "Epoch 111/250\n", + "14/14 - 0s - loss: 0.2728 - accuracy: 0.8701 - 21ms/epoch - 1ms/step\n", + "Epoch 112/250\n", + "14/14 - 0s - loss: 0.2963 - accuracy: 0.8791 - 20ms/epoch - 1ms/step\n", + "Epoch 113/250\n", + "14/14 - 0s - loss: 0.2487 - accuracy: 0.8925 - 19ms/epoch - 1ms/step\n", + "Epoch 114/250\n", + "14/14 - 0s - loss: 0.2723 - accuracy: 0.8910 - 21ms/epoch - 2ms/step\n", + "Epoch 115/250\n", + "14/14 - 0s - loss: 0.2407 - accuracy: 0.8970 - 21ms/epoch - 1ms/step\n", + "Epoch 116/250\n", + "14/14 - 0s - loss: 0.2547 - accuracy: 0.8896 - 23ms/epoch - 2ms/step\n", + "Epoch 117/250\n", + "14/14 - 0s - loss: 0.2427 - accuracy: 0.8970 - 21ms/epoch - 2ms/step\n", + "Epoch 118/250\n", + "14/14 - 0s - loss: 0.2675 - accuracy: 0.8791 - 23ms/epoch - 2ms/step\n", + "Epoch 119/250\n", + "14/14 - 0s - loss: 0.2886 - accuracy: 0.8642 - 22ms/epoch - 2ms/step\n", + "Epoch 120/250\n", + "14/14 - 0s - loss: 0.2583 - accuracy: 0.8836 - 22ms/epoch - 2ms/step\n", + "Epoch 121/250\n", + "14/14 - 0s - loss: 0.2206 - accuracy: 0.9030 - 22ms/epoch - 2ms/step\n", + "Epoch 122/250\n", + "14/14 - 0s - loss: 0.2664 - accuracy: 0.8866 - 23ms/epoch - 2ms/step\n", + "Epoch 123/250\n", + "14/14 - 0s - loss: 0.2639 - accuracy: 0.8940 - 21ms/epoch - 1ms/step\n", + "Epoch 124/250\n", + "14/14 - 0s - loss: 0.2741 - accuracy: 0.8761 - 22ms/epoch - 2ms/step\n", + "Epoch 125/250\n", + "14/14 - 0s - loss: 0.2592 - accuracy: 0.8612 - 24ms/epoch - 2ms/step\n", + "Epoch 126/250\n", + "14/14 - 0s - loss: 0.2520 - accuracy: 0.8910 - 27ms/epoch - 2ms/step\n", + "Epoch 127/250\n", + "14/14 - 0s - loss: 0.2562 - accuracy: 0.8925 - 22ms/epoch - 2ms/step\n", + "Epoch 128/250\n", + "14/14 - 0s - loss: 0.2374 - accuracy: 0.9000 - 21ms/epoch - 2ms/step\n", + "Epoch 129/250\n", + "14/14 - 0s - loss: 0.2262 - accuracy: 0.8925 - 23ms/epoch - 2ms/step\n", + "Epoch 130/250\n", + "14/14 - 0s - loss: 0.2316 - accuracy: 0.8940 - 20ms/epoch - 1ms/step\n", + "Epoch 131/250\n", + "14/14 - 0s - loss: 0.2599 - accuracy: 0.9000 - 22ms/epoch - 2ms/step\n", + "Epoch 132/250\n", + "14/14 - 0s - loss: 0.2716 - accuracy: 0.8761 - 21ms/epoch - 2ms/step\n", + "Epoch 133/250\n", + "14/14 - 0s - loss: 0.2366 - accuracy: 0.8970 - 22ms/epoch - 2ms/step\n", + "Epoch 134/250\n", + "14/14 - 0s - loss: 0.2430 - accuracy: 0.8955 - 21ms/epoch - 2ms/step\n", + "Epoch 135/250\n", + "14/14 - 0s - loss: 0.2595 - accuracy: 0.8851 - 23ms/epoch - 2ms/step\n", + "Epoch 136/250\n", + "14/14 - 0s - loss: 0.2550 - accuracy: 0.8866 - 20ms/epoch - 1ms/step\n", + "Epoch 137/250\n", + "14/14 - 0s - loss: 0.2173 - accuracy: 0.9060 - 21ms/epoch - 2ms/step\n", + "Epoch 138/250\n", + "14/14 - 0s - loss: 0.2541 - accuracy: 0.8970 - 20ms/epoch - 1ms/step\n", + "Epoch 139/250\n", + "14/14 - 0s - loss: 0.2166 - accuracy: 0.9075 - 20ms/epoch - 1ms/step\n", + "Epoch 140/250\n", + "14/14 - 0s - loss: 0.2228 - accuracy: 0.9119 - 21ms/epoch - 1ms/step\n", + "Epoch 141/250\n", + "14/14 - 0s - loss: 0.2398 - accuracy: 0.8925 - 21ms/epoch - 2ms/step\n", + "Epoch 142/250\n", + "14/14 - 0s - loss: 0.2243 - accuracy: 0.9119 - 20ms/epoch - 1ms/step\n", + "Epoch 143/250\n", + "14/14 - 0s - loss: 0.2299 - accuracy: 0.8985 - 41ms/epoch - 3ms/step\n", + "Epoch 144/250\n", + "14/14 - 0s - loss: 0.2193 - accuracy: 0.8985 - 21ms/epoch - 2ms/step\n", + "Epoch 145/250\n", + "14/14 - 0s - loss: 0.2267 - accuracy: 0.9015 - 21ms/epoch - 2ms/step\n", + "Epoch 146/250\n", + "14/14 - 0s - loss: 0.2395 - accuracy: 0.8836 - 22ms/epoch - 2ms/step\n", + "Epoch 147/250\n", + "14/14 - 0s - loss: 0.2475 - accuracy: 0.8925 - 19ms/epoch - 1ms/step\n", + "Epoch 148/250\n", + "14/14 - 0s - loss: 0.2380 - accuracy: 0.8910 - 22ms/epoch - 2ms/step\n", + "Epoch 149/250\n", + "14/14 - 0s - loss: 0.2099 - accuracy: 0.9134 - 20ms/epoch - 1ms/step\n", + "Epoch 150/250\n", + "14/14 - 0s - loss: 0.2107 - accuracy: 0.9179 - 24ms/epoch - 2ms/step\n", + "Epoch 151/250\n", + "14/14 - 0s - loss: 0.2291 - accuracy: 0.8970 - 22ms/epoch - 2ms/step\n", + "Epoch 152/250\n", + "14/14 - 0s - loss: 0.2278 - accuracy: 0.8970 - 24ms/epoch - 2ms/step\n", + "Epoch 153/250\n", + "14/14 - 0s - loss: 0.2192 - accuracy: 0.9104 - 19ms/epoch - 1ms/step\n", + "Epoch 154/250\n", + "14/14 - 0s - loss: 0.2436 - accuracy: 0.8821 - 21ms/epoch - 2ms/step\n", + "Epoch 155/250\n", + "14/14 - 0s - loss: 0.2332 - accuracy: 0.8985 - 20ms/epoch - 1ms/step\n", + "Epoch 156/250\n", + "14/14 - 0s - loss: 0.2147 - accuracy: 0.9045 - 23ms/epoch - 2ms/step\n", + "Epoch 157/250\n", + "14/14 - 0s - loss: 0.2227 - accuracy: 0.9134 - 19ms/epoch - 1ms/step\n", + "Epoch 158/250\n", + "14/14 - 0s - loss: 0.1983 - accuracy: 0.9075 - 21ms/epoch - 2ms/step\n", + "Epoch 159/250\n", + "14/14 - 0s - loss: 0.2154 - accuracy: 0.9104 - 21ms/epoch - 2ms/step\n", + "Epoch 160/250\n", + "14/14 - 0s - loss: 0.2196 - accuracy: 0.9134 - 22ms/epoch - 2ms/step\n", + "Epoch 161/250\n", + "14/14 - 0s - loss: 0.2232 - accuracy: 0.9015 - 20ms/epoch - 1ms/step\n", + "Epoch 162/250\n", + "14/14 - 0s - loss: 0.2502 - accuracy: 0.9060 - 21ms/epoch - 2ms/step\n", + "Epoch 163/250\n", + "14/14 - 0s - loss: 0.1988 - accuracy: 0.9239 - 21ms/epoch - 2ms/step\n", + "Epoch 164/250\n", + "14/14 - 0s - loss: 0.1978 - accuracy: 0.9254 - 22ms/epoch - 2ms/step\n", + "Epoch 165/250\n", + "14/14 - 0s - loss: 0.2431 - accuracy: 0.8881 - 20ms/epoch - 1ms/step\n", + "Epoch 166/250\n", + "14/14 - 0s - loss: 0.2573 - accuracy: 0.8955 - 23ms/epoch - 2ms/step\n", + "Epoch 167/250\n", + "14/14 - 0s - loss: 0.2315 - accuracy: 0.8985 - 22ms/epoch - 2ms/step\n", + "Epoch 168/250\n", + "14/14 - 0s - loss: 0.1981 - accuracy: 0.9224 - 25ms/epoch - 2ms/step\n", + "Epoch 169/250\n", + "14/14 - 0s - loss: 0.1853 - accuracy: 0.9284 - 21ms/epoch - 1ms/step\n", + "Epoch 170/250\n", + "14/14 - 0s - loss: 0.1816 - accuracy: 0.9299 - 20ms/epoch - 1ms/step\n", + "Epoch 171/250\n", + "14/14 - 0s - loss: 0.2126 - accuracy: 0.8970 - 21ms/epoch - 2ms/step\n", + "Epoch 172/250\n", + "14/14 - 0s - loss: 0.2075 - accuracy: 0.9269 - 23ms/epoch - 2ms/step\n", + "Epoch 173/250\n", + "14/14 - 0s - loss: 0.2282 - accuracy: 0.9045 - 22ms/epoch - 2ms/step\n", + "Epoch 174/250\n", + "14/14 - 0s - loss: 0.2187 - accuracy: 0.9090 - 20ms/epoch - 1ms/step\n", + "Epoch 175/250\n", + "14/14 - 0s - loss: 0.1671 - accuracy: 0.9284 - 22ms/epoch - 2ms/step\n", + "Epoch 176/250\n", + "14/14 - 0s - loss: 0.1974 - accuracy: 0.9134 - 21ms/epoch - 2ms/step\n", + "Epoch 177/250\n", + "14/14 - 0s - loss: 0.2076 - accuracy: 0.9119 - 27ms/epoch - 2ms/step\n", + "Epoch 178/250\n", + "14/14 - 0s - loss: 0.2185 - accuracy: 0.9224 - 23ms/epoch - 2ms/step\n", + "Epoch 179/250\n", + "14/14 - 0s - loss: 0.2519 - accuracy: 0.8910 - 22ms/epoch - 2ms/step\n", + "Epoch 180/250\n", + "14/14 - 0s - loss: 0.1897 - accuracy: 0.9299 - 23ms/epoch - 2ms/step\n", + "Epoch 181/250\n", + "14/14 - 0s - loss: 0.1814 - accuracy: 0.9194 - 20ms/epoch - 1ms/step\n", + "Epoch 182/250\n", + "14/14 - 0s - loss: 0.1749 - accuracy: 0.9239 - 24ms/epoch - 2ms/step\n", + "Epoch 183/250\n", + "14/14 - 0s - loss: 0.1917 - accuracy: 0.9254 - 20ms/epoch - 1ms/step\n", + "Epoch 184/250\n", + "14/14 - 0s - loss: 0.1723 - accuracy: 0.9299 - 23ms/epoch - 2ms/step\n", + "Epoch 185/250\n", + "14/14 - 0s - loss: 0.1883 - accuracy: 0.9179 - 21ms/epoch - 1ms/step\n", + "Epoch 186/250\n", + "14/14 - 0s - loss: 0.1806 - accuracy: 0.9239 - 21ms/epoch - 2ms/step\n", + "Epoch 187/250\n", + "14/14 - 0s - loss: 0.1754 - accuracy: 0.9358 - 23ms/epoch - 2ms/step\n", + "Epoch 188/250\n", + "14/14 - 0s - loss: 0.2012 - accuracy: 0.9075 - 22ms/epoch - 2ms/step\n", + "Epoch 189/250\n", + "14/14 - 0s - loss: 0.1991 - accuracy: 0.9104 - 23ms/epoch - 2ms/step\n", + "Epoch 190/250\n", + "14/14 - 0s - loss: 0.2027 - accuracy: 0.9194 - 20ms/epoch - 1ms/step\n", + "Epoch 191/250\n", + "14/14 - 0s - loss: 0.1858 - accuracy: 0.9313 - 23ms/epoch - 2ms/step\n", + "Epoch 192/250\n", + "14/14 - 0s - loss: 0.1969 - accuracy: 0.9269 - 21ms/epoch - 2ms/step\n", + "Epoch 193/250\n", + "14/14 - 0s - loss: 0.1959 - accuracy: 0.9060 - 24ms/epoch - 2ms/step\n", + "Epoch 194/250\n", + "14/14 - 0s - loss: 0.1944 - accuracy: 0.9164 - 23ms/epoch - 2ms/step\n", + "Epoch 195/250\n", + "14/14 - 0s - loss: 0.1989 - accuracy: 0.9194 - 23ms/epoch - 2ms/step\n", + "Epoch 196/250\n", + "14/14 - 0s - loss: 0.1959 - accuracy: 0.9164 - 22ms/epoch - 2ms/step\n", + "Epoch 197/250\n", + "14/14 - 0s - loss: 0.1723 - accuracy: 0.9284 - 22ms/epoch - 2ms/step\n", + "Epoch 198/250\n", + "14/14 - 0s - loss: 0.1846 - accuracy: 0.9284 - 22ms/epoch - 2ms/step\n", + "Epoch 199/250\n", + "14/14 - 0s - loss: 0.1663 - accuracy: 0.9328 - 21ms/epoch - 2ms/step\n", + "Epoch 200/250\n", + "14/14 - 0s - loss: 0.1854 - accuracy: 0.9179 - 20ms/epoch - 1ms/step\n", + "Epoch 201/250\n", + "14/14 - 0s - loss: 0.1975 - accuracy: 0.9164 - 23ms/epoch - 2ms/step\n", + "Epoch 202/250\n", + "14/14 - 0s - loss: 0.1686 - accuracy: 0.9299 - 21ms/epoch - 2ms/step\n", + "Epoch 203/250\n", + "14/14 - 0s - loss: 0.1624 - accuracy: 0.9418 - 22ms/epoch - 2ms/step\n", + "Epoch 204/250\n", + "14/14 - 0s - loss: 0.1783 - accuracy: 0.9299 - 21ms/epoch - 2ms/step\n", + "Epoch 205/250\n", + "14/14 - 0s - loss: 0.1961 - accuracy: 0.9254 - 22ms/epoch - 2ms/step\n", + "Epoch 206/250\n", + "14/14 - 0s - loss: 0.1822 - accuracy: 0.9209 - 21ms/epoch - 2ms/step\n", + "Epoch 207/250\n", + "14/14 - 0s - loss: 0.1645 - accuracy: 0.9373 - 24ms/epoch - 2ms/step\n", + "Epoch 208/250\n", + "14/14 - 0s - loss: 0.1777 - accuracy: 0.9254 - 22ms/epoch - 2ms/step\n", + "Epoch 209/250\n", + "14/14 - 0s - loss: 0.1726 - accuracy: 0.9284 - 23ms/epoch - 2ms/step\n", + "Epoch 210/250\n", + "14/14 - 0s - loss: 0.1719 - accuracy: 0.9418 - 21ms/epoch - 2ms/step\n", + "Epoch 211/250\n", + "14/14 - 0s - loss: 0.1633 - accuracy: 0.9269 - 21ms/epoch - 2ms/step\n", + "Epoch 212/250\n", + "14/14 - 0s - loss: 0.1627 - accuracy: 0.9358 - 20ms/epoch - 1ms/step\n", + "Epoch 213/250\n", + "14/14 - 0s - loss: 0.1562 - accuracy: 0.9328 - 21ms/epoch - 2ms/step\n", + "Epoch 214/250\n", + "14/14 - 0s - loss: 0.1275 - accuracy: 0.9537 - 20ms/epoch - 1ms/step\n", + "Epoch 215/250\n", + "14/14 - 0s - loss: 0.1543 - accuracy: 0.9463 - 21ms/epoch - 2ms/step\n", + "Epoch 216/250\n", + "14/14 - 0s - loss: 0.1697 - accuracy: 0.9343 - 20ms/epoch - 1ms/step\n", + "Epoch 217/250\n", + "14/14 - 0s - loss: 0.1765 - accuracy: 0.9164 - 21ms/epoch - 2ms/step\n", + "Epoch 218/250\n", + "14/14 - 0s - loss: 0.1681 - accuracy: 0.9209 - 20ms/epoch - 1ms/step\n", + "Epoch 219/250\n", + "14/14 - 0s - loss: 0.1681 - accuracy: 0.9328 - 21ms/epoch - 2ms/step\n", + "Epoch 220/250\n", + "14/14 - 0s - loss: 0.1628 - accuracy: 0.9328 - 20ms/epoch - 1ms/step\n", + "Epoch 221/250\n", + "14/14 - 0s - loss: 0.1882 - accuracy: 0.9194 - 22ms/epoch - 2ms/step\n", + "Epoch 222/250\n", + "14/14 - 0s - loss: 0.1642 - accuracy: 0.9418 - 21ms/epoch - 1ms/step\n", + "Epoch 223/250\n", + "14/14 - 0s - loss: 0.1663 - accuracy: 0.9269 - 22ms/epoch - 2ms/step\n", + "Epoch 224/250\n", + "14/14 - 0s - loss: 0.1573 - accuracy: 0.9448 - 21ms/epoch - 2ms/step\n", + "Epoch 225/250\n", + "14/14 - 0s - loss: 0.1600 - accuracy: 0.9269 - 22ms/epoch - 2ms/step\n", + "Epoch 226/250\n", + "14/14 - 0s - loss: 0.1411 - accuracy: 0.9507 - 21ms/epoch - 2ms/step\n", + "Epoch 227/250\n", + "14/14 - 0s - loss: 0.1574 - accuracy: 0.9418 - 23ms/epoch - 2ms/step\n", + "Epoch 228/250\n", + "14/14 - 0s - loss: 0.1611 - accuracy: 0.9343 - 20ms/epoch - 1ms/step\n", + "Epoch 229/250\n", + "14/14 - 0s - loss: 0.1524 - accuracy: 0.9448 - 24ms/epoch - 2ms/step\n", + "Epoch 230/250\n", + "14/14 - 0s - loss: 0.1526 - accuracy: 0.9463 - 20ms/epoch - 1ms/step\n", + "Epoch 231/250\n", + "14/14 - 0s - loss: 0.1485 - accuracy: 0.9448 - 23ms/epoch - 2ms/step\n", + "Epoch 232/250\n", + "14/14 - 0s - loss: 0.1450 - accuracy: 0.9328 - 21ms/epoch - 2ms/step\n", + "Epoch 233/250\n", + "14/14 - 0s - loss: 0.2029 - accuracy: 0.9179 - 21ms/epoch - 2ms/step\n", + "Epoch 234/250\n", + "14/14 - 0s - loss: 0.1536 - accuracy: 0.9358 - 20ms/epoch - 1ms/step\n", + "Epoch 235/250\n", + "14/14 - 0s - loss: 0.1602 - accuracy: 0.9343 - 22ms/epoch - 2ms/step\n", + "Epoch 236/250\n", + "14/14 - 0s - loss: 0.1748 - accuracy: 0.9313 - 21ms/epoch - 2ms/step\n", + "Epoch 237/250\n", + "14/14 - 0s - loss: 0.1307 - accuracy: 0.9567 - 21ms/epoch - 2ms/step\n", + "Epoch 238/250\n", + "14/14 - 0s - loss: 0.1372 - accuracy: 0.9507 - 22ms/epoch - 2ms/step\n", + "Epoch 239/250\n", + "14/14 - 0s - loss: 0.1442 - accuracy: 0.9522 - 28ms/epoch - 2ms/step\n", + "Epoch 240/250\n", + "14/14 - 0s - loss: 0.1649 - accuracy: 0.9239 - 22ms/epoch - 2ms/step\n", + "Epoch 241/250\n", + "14/14 - 0s - loss: 0.1413 - accuracy: 0.9433 - 22ms/epoch - 2ms/step\n", + "Epoch 242/250\n", + "14/14 - 0s - loss: 0.1590 - accuracy: 0.9373 - 23ms/epoch - 2ms/step\n", + "Epoch 243/250\n", + "14/14 - 0s - loss: 0.1301 - accuracy: 0.9463 - 22ms/epoch - 2ms/step\n", + "Epoch 244/250\n", + "14/14 - 0s - loss: 0.1301 - accuracy: 0.9463 - 24ms/epoch - 2ms/step\n", + "Epoch 245/250\n", + "14/14 - 0s - loss: 0.1387 - accuracy: 0.9552 - 22ms/epoch - 2ms/step\n", + "Epoch 246/250\n", + "14/14 - 0s - loss: 0.1114 - accuracy: 0.9567 - 21ms/epoch - 2ms/step\n", + "Epoch 247/250\n", + "14/14 - 0s - loss: 0.1484 - accuracy: 0.9299 - 24ms/epoch - 2ms/step\n", + "Epoch 248/250\n", + "14/14 - 0s - loss: 0.1608 - accuracy: 0.9418 - 22ms/epoch - 2ms/step\n", + "Epoch 249/250\n", + "14/14 - 0s - loss: 0.1392 - accuracy: 0.9522 - 32ms/epoch - 2ms/step\n", + "Epoch 250/250\n", + "14/14 - 0s - loss: 0.1199 - accuracy: 0.9418 - 24ms/epoch - 2ms/step\n", + "9/9 - 0s - loss: 0.1511 - accuracy: 0.9514 - 180ms/epoch - 20ms/step\n", + "Test accuracy: 0.9513888955116272\n" + ] + } + ], "source": [ - "# your code here" + "# Define the regression model\n", + "def regression_model(optimizer='adam', init='uniform', dropout_rate=0.2):\n", + " model = Sequential()\n", + " model.add(Dense(64, kernel_initializer=init, activation='relu', input_shape=(X_train.shape[1],)))\n", + " model.add(Dropout(dropout_rate))\n", + " model.add(Dense(50, kernel_initializer=init, activation='relu'))\n", + " model.add(Dense(64, kernel_initializer=init, activation='relu'))\n", + " model.add(Dropout(dropout_rate))\n", + " model.add(Dense(2, kernel_initializer=init, activation='softmax'))\n", + "\n", + " # Try different learning rate\n", + " optimizer = Adam(learning_rate=0.001)\n", + "\n", + " model.compile(optimizer=optimizer, loss='sparse_categorical_crossentropy', metrics=['accuracy'])\n", + " return model\n", + "\n", + "# Build the model\n", + "model = regression_model()\n", + "\n", + "# Fit the model\n", + "model.fit(X_train, y_train, epochs=250, batch_size = 50, verbose=2)\n", + "\n", + "# Evaluate the model on the test set\n", + "accuracy = model.evaluate(X_test, y_test, verbose=2)[1]\n", + "print(f'Test accuracy: {accuracy}')\n", + "\n", + "# Save the entire model\n", + "model.save('tic_tac_toe_improved.model.h5')\n" ] }, { @@ -119,12 +1027,15 @@ ] }, { - "cell_type": "code", - "execution_count": null, + "cell_type": "markdown", "metadata": {}, - "outputs": [], "source": [ - "# your answer here" + "- I noticed a high increase in performance by changing the following parameters:\n", + " - Number of layers\n", + " - Dropout rate\n", + " - Number of neurons per layer\n", + " - Learning rate\n", + " - Batch size" ] } ], @@ -144,7 +1055,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.7.3" + "version": "3.10.13" } }, "nbformat": 4, diff --git a/your-code/tic_tac_toe.model.h5 b/your-code/tic_tac_toe.model.h5 new file mode 100644 index 0000000..3fe049d Binary files /dev/null and b/your-code/tic_tac_toe.model.h5 differ diff --git a/your-code/tic_tac_toe_improved.model.h5 b/your-code/tic_tac_toe_improved.model.h5 new file mode 100644 index 0000000..c1e9fc0 Binary files /dev/null and b/your-code/tic_tac_toe_improved.model.h5 differ