@ -240,23 +240,629 @@
},
},
{
{
"cell_type": "code",
"cell_type": "code",
"execution_count": 27 ,
"execution_count": 28 ,
"metadata": {},
"metadata": {},
"outputs": [
"outputs": [
{
{
"ename": "ValueError",
"name": "stdout",
"evalue": "Found array with dim 4. DecisionTreeClassifier expected <= 2.",
"output_type": "stream",
"output_type": "error",
"text": [
"traceback": [
"[[[[0.16862745 0.18823529 0.19215686]\n",
"\u001b[1;31m---------------------------------------------------------------------------\u001b[0m",
" [0.16862745 0.18823529 0.19215686]\n",
"\u001b[1;31mValueError\u001b[0m Traceback (most recent call last)",
" [0.17254902 0.19215686 0.20392157]\n",
"Cell \u001b[1;32mIn[27], line 10\u001b[0m\n\u001b[0;32m 7\u001b[0m clf \u001b[38;5;241m=\u001b[39m tree\u001b[38;5;241m.\u001b[39mDecisionTreeClassifier()\n\u001b[0;32m 9\u001b[0m \u001b[38;5;66;03m# Train the model\u001b[39;00m\n\u001b[1;32m---> 10\u001b[0m clf \u001b[38;5;241m=\u001b[39m \u001b[43mclf\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mfit\u001b[49m\u001b[43m(\u001b[49m\u001b[43mX_train\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43my_train\u001b[49m\u001b[43m)\u001b[49m\n\u001b[0;32m 12\u001b[0m \u001b[38;5;66;03m# Predict on the test set\u001b[39;00m\n\u001b[0;32m 13\u001b[0m predictions \u001b[38;5;241m=\u001b[39m clf\u001b[38;5;241m.\u001b[39mpredict(X_test)\n",
" ...\n",
"File \u001b[1;32m~\\AppData\\Roaming\\Python\\Python312\\site-packages\\sklearn\\base.py:1351\u001b[0m, in \u001b[0;36m_fit_context.<locals>.decorator.<locals>.wrapper\u001b[1;34m(estimator, *args, **kwargs)\u001b[0m\n\u001b[0;32m 1344\u001b[0m estimator\u001b[38;5;241m.\u001b[39m_validate_params()\n\u001b[0;32m 1346\u001b[0m \u001b[38;5;28;01mwith\u001b[39;00m config_context(\n\u001b[0;32m 1347\u001b[0m skip_parameter_validation\u001b[38;5;241m=\u001b[39m(\n\u001b[0;32m 1348\u001b[0m prefer_skip_nested_validation \u001b[38;5;129;01mor\u001b[39;00m global_skip_validation\n\u001b[0;32m 1349\u001b[0m )\n\u001b[0;32m 1350\u001b[0m ):\n\u001b[1;32m-> 1351\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[43mfit_method\u001b[49m\u001b[43m(\u001b[49m\u001b[43mestimator\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43margs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n",
" [0.49411765 0.60392157 0.67058824]\n",
"File \u001b[1;32m~\\AppData\\Roaming\\Python\\Python312\\site-packages\\sklearn\\tree\\_classes.py:1009\u001b[0m, in \u001b[0;36mDecisionTreeClassifier.fit\u001b[1;34m(self, X, y, sample_weight, check_input)\u001b[0m\n\u001b[0;32m 978\u001b[0m \u001b[38;5;129m@_fit_context\u001b[39m(prefer_skip_nested_validation\u001b[38;5;241m=\u001b[39m\u001b[38;5;28;01mTrue\u001b[39;00m)\n\u001b[0;32m 979\u001b[0m \u001b[38;5;28;01mdef\u001b[39;00m \u001b[38;5;21mfit\u001b[39m(\u001b[38;5;28mself\u001b[39m, X, y, sample_weight\u001b[38;5;241m=\u001b[39m\u001b[38;5;28;01mNone\u001b[39;00m, check_input\u001b[38;5;241m=\u001b[39m\u001b[38;5;28;01mTrue\u001b[39;00m):\n\u001b[0;32m 980\u001b[0m \u001b[38;5;250m \u001b[39m\u001b[38;5;124;03m\"\"\"Build a decision tree classifier from the training set (X, y).\u001b[39;00m\n\u001b[0;32m 981\u001b[0m \n\u001b[0;32m 982\u001b[0m \u001b[38;5;124;03m Parameters\u001b[39;00m\n\u001b[1;32m (...)\u001b[0m\n\u001b[0;32m 1006\u001b[0m \u001b[38;5;124;03m Fitted estimator.\u001b[39;00m\n\u001b[0;32m 1007\u001b[0m \u001b[38;5;124;03m \"\"\"\u001b[39;00m\n\u001b[1;32m-> 1009\u001b[0m \u001b[38;5;28;43msuper\u001b[39;49m\u001b[43m(\u001b[49m\u001b[43m)\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_fit\u001b[49m\u001b[43m(\u001b[49m\n\u001b[0;32m 1010\u001b[0m \u001b[43m \u001b[49m\u001b[43mX\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 1011\u001b[0m \u001b[43m \u001b[49m\u001b[43my\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 1012\u001b[0m \u001b[43m \u001b[49m\u001b[43msample_weight\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43msample_weight\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 1013\u001b[0m \u001b[43m \u001b[49m\u001b[43mcheck_input\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mcheck_input\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 1014\u001b[0m \u001b[43m \u001b[49m\u001b[43m)\u001b[49m\n\u001b[0;32m 1015\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28mself\u001b[39m\n",
" [0.49019608 0.6 0.66666667]\n",
"File \u001b[1;32m~\\AppData\\Roaming\\Python\\Python312\\site-packages\\sklearn\\tree\\_classes.py:252\u001b[0m, in \u001b[0;36mBaseDecisionTree._fit\u001b[1;34m(self, X, y, sample_weight, check_input, missing_values_in_feature_mask)\u001b[0m\n\u001b[0;32m 248\u001b[0m check_X_params \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mdict\u001b[39m(\n\u001b[0;32m 249\u001b[0m dtype\u001b[38;5;241m=\u001b[39mDTYPE, accept_sparse\u001b[38;5;241m=\u001b[39m\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mcsc\u001b[39m\u001b[38;5;124m\"\u001b[39m, force_all_finite\u001b[38;5;241m=\u001b[39m\u001b[38;5;28;01mFalse\u001b[39;00m\n\u001b[0;32m 250\u001b[0m )\n\u001b[0;32m 251\u001b[0m check_y_params \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mdict\u001b[39m(ensure_2d\u001b[38;5;241m=\u001b[39m\u001b[38;5;28;01mFalse\u001b[39;00m, dtype\u001b[38;5;241m=\u001b[39m\u001b[38;5;28;01mNone\u001b[39;00m)\n\u001b[1;32m--> 252\u001b[0m X, y \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_validate_data\u001b[49m\u001b[43m(\u001b[49m\n\u001b[0;32m 253\u001b[0m \u001b[43m \u001b[49m\u001b[43mX\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43my\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mvalidate_separately\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43m(\u001b[49m\u001b[43mcheck_X_params\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mcheck_y_params\u001b[49m\u001b[43m)\u001b[49m\n\u001b[0;32m 254\u001b[0m \u001b[43m\u001b[49m\u001b[43m)\u001b[49m\n\u001b[0;32m 256\u001b[0m missing_values_in_feature_mask \u001b[38;5;241m=\u001b[39m (\n\u001b[0;32m 257\u001b[0m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_compute_missing_values_in_feature_mask(X)\n\u001b[0;32m 258\u001b[0m )\n\u001b[0;32m 259\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m issparse(X):\n",
" [0.49019608 0.6 0.66666667]]\n",
"File \u001b[1;32m~\\AppData\\Roaming\\Python\\Python312\\site-packages\\sklearn\\base.py:645\u001b[0m, in \u001b[0;36mBaseEstimator._validate_data\u001b[1;34m(self, X, y, reset, validate_separately, cast_to_ndarray, **check_params)\u001b[0m\n\u001b[0;32m 643\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mestimator\u001b[39m\u001b[38;5;124m\"\u001b[39m \u001b[38;5;129;01mnot\u001b[39;00m \u001b[38;5;129;01min\u001b[39;00m check_X_params:\n\u001b[0;32m 644\u001b[0m check_X_params \u001b[38;5;241m=\u001b[39m {\u001b[38;5;241m*\u001b[39m\u001b[38;5;241m*\u001b[39mdefault_check_params, \u001b[38;5;241m*\u001b[39m\u001b[38;5;241m*\u001b[39mcheck_X_params}\n\u001b[1;32m--> 645\u001b[0m X \u001b[38;5;241m=\u001b[39m \u001b[43mcheck_array\u001b[49m\u001b[43m(\u001b[49m\u001b[43mX\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43minput_name\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43mX\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mcheck_X_params\u001b[49m\u001b[43m)\u001b[49m\n\u001b[0;32m 646\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mestimator\u001b[39m\u001b[38;5;124m\"\u001b[39m \u001b[38;5;129;01mnot\u001b[39;00m \u001b[38;5;129;01min\u001b[39;00m check_y_params:\n\u001b[0;32m 647\u001b[0m check_y_params \u001b[38;5;241m=\u001b[39m {\u001b[38;5;241m*\u001b[39m\u001b[38;5;241m*\u001b[39mdefault_check_params, \u001b[38;5;241m*\u001b[39m\u001b[38;5;241m*\u001b[39mcheck_y_params}\n",
"\n",
"File \u001b[1;32m~\\AppData\\Roaming\\Python\\Python312\\site-packages\\sklearn\\utils\\validation.py:997\u001b[0m, in \u001b[0;36mcheck_array\u001b[1;34m(array, accept_sparse, accept_large_sparse, dtype, order, copy, force_all_finite, ensure_2d, allow_nd, ensure_min_samples, ensure_min_features, estimator, input_name)\u001b[0m\n\u001b[0;32m 992\u001b[0m \u001b[38;5;28;01mraise\u001b[39;00m \u001b[38;5;167;01mValueError\u001b[39;00m(\n\u001b[0;32m 993\u001b[0m \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mdtype=\u001b[39m\u001b[38;5;124m'\u001b[39m\u001b[38;5;124mnumeric\u001b[39m\u001b[38;5;124m'\u001b[39m\u001b[38;5;124m is not compatible with arrays of bytes/strings.\u001b[39m\u001b[38;5;124m\"\u001b[39m\n\u001b[0;32m 994\u001b[0m \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mConvert your data to numeric values explicitly instead.\u001b[39m\u001b[38;5;124m\"\u001b[39m\n\u001b[0;32m 995\u001b[0m )\n\u001b[0;32m 996\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;129;01mnot\u001b[39;00m allow_nd \u001b[38;5;129;01mand\u001b[39;00m array\u001b[38;5;241m.\u001b[39mndim \u001b[38;5;241m>\u001b[39m\u001b[38;5;241m=\u001b[39m \u001b[38;5;241m3\u001b[39m:\n\u001b[1;32m--> 997\u001b[0m \u001b[38;5;28;01mraise\u001b[39;00m \u001b[38;5;167;01mValueError\u001b[39;00m(\n\u001b[0;32m 998\u001b[0m \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mFound array with dim \u001b[39m\u001b[38;5;132;01m%d\u001b[39;00m\u001b[38;5;124m. \u001b[39m\u001b[38;5;132;01m%s\u001b[39;00m\u001b[38;5;124m expected <= 2.\u001b[39m\u001b[38;5;124m\"\u001b[39m\n\u001b[0;32m 999\u001b[0m \u001b[38;5;241m%\u001b[39m (array\u001b[38;5;241m.\u001b[39mndim, estimator_name)\n\u001b[0;32m 1000\u001b[0m )\n\u001b[0;32m 1002\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m force_all_finite:\n\u001b[0;32m 1003\u001b[0m _assert_all_finite(\n\u001b[0;32m 1004\u001b[0m array,\n\u001b[0;32m 1005\u001b[0m input_name\u001b[38;5;241m=\u001b[39minput_name,\n\u001b[0;32m 1006\u001b[0m estimator_name\u001b[38;5;241m=\u001b[39mestimator_name,\n\u001b[0;32m 1007\u001b[0m allow_nan\u001b[38;5;241m=\u001b[39mforce_all_finite \u001b[38;5;241m==\u001b[39m \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mallow-nan\u001b[39m\u001b[38;5;124m\"\u001b[39m,\n\u001b[0;32m 1008\u001b[0m )\n",
" [[0.16862745 0.18823529 0.19215686]\n",
"\u001b[1;31mValueError\u001b[0m: Found array with dim 4. DecisionTreeClassifier expected <= 2."
" [0.17254902 0.19215686 0.19607843]\n",
" [0.17647059 0.19607843 0.20784314]\n",
" ...\n",
" [0.49411765 0.60392157 0.67058824]\n",
" [0.49411765 0.60392157 0.67058824]\n",
" [0.49411765 0.60392157 0.67058824]]\n",
"\n",
" [[0.17254902 0.19215686 0.20392157]\n",
" [0.17254902 0.19215686 0.20392157]\n",
" [0.17647059 0.19607843 0.20784314]\n",
" ...\n",
" [0.50196078 0.60392157 0.67058824]\n",
" [0.49803922 0.6 0.66666667]\n",
" [0.49803922 0.6 0.66666667]]\n",
"\n",
" ...\n",
"\n",
" [[0.41568627 0.38823529 0.37647059]\n",
" [0.41568627 0.38823529 0.37647059]\n",
" [0.41568627 0.38823529 0.37647059]\n",
" ...\n",
" [0.58039216 0.63921569 0.65098039]\n",
" [0.61176471 0.6627451 0.69411765]\n",
" [0.6 0.6627451 0.69019608]]\n",
"\n",
" [[0.38431373 0.35686275 0.34509804]\n",
" [0.38039216 0.35294118 0.34117647]\n",
" [0.37647059 0.34901961 0.3372549 ]\n",
" ...\n",
" [0.58039216 0.63921569 0.65098039]\n",
" [0.60784314 0.65882353 0.69019608]\n",
" [0.6 0.65098039 0.68235294]]\n",
"\n",
" [[0.36078431 0.33333333 0.32156863]\n",
" [0.34117647 0.31372549 0.30196078]\n",
" [0.3254902 0.29803922 0.28627451]\n",
" ...\n",
" [0.59607843 0.64313725 0.65882353]\n",
" [0.58431373 0.63529412 0.66666667]\n",
" [0.58039216 0.63137255 0.6627451 ]]]\n",
"\n",
"\n",
" [[[0.85882353 0.7372549 0.60784314]\n",
" [0.8627451 0.74117647 0.61176471]\n",
" [0.8627451 0.74117647 0.61176471]\n",
" ...\n",
" [0.87058824 0.74509804 0.60784314]\n",
" [0.86666667 0.74117647 0.60392157]\n",
" [0.8627451 0.7372549 0.6 ]]\n",
"\n",
" [[0.85882353 0.7372549 0.60784314]\n",
" [0.86666667 0.74509804 0.61568627]\n",
" [0.86666667 0.74509804 0.61568627]\n",
" ...\n",
" [0.87058824 0.74509804 0.60784314]\n",
" [0.87058824 0.74509804 0.60784314]\n",
" [0.86666667 0.74117647 0.60392157]]\n",
"\n",
" [[0.85882353 0.7372549 0.60784314]\n",
" [0.86666667 0.74509804 0.61568627]\n",
" [0.86666667 0.74509804 0.61568627]\n",
" ...\n",
" [0.8627451 0.74117647 0.61176471]\n",
" [0.8627451 0.74117647 0.61176471]\n",
" [0.85882353 0.7372549 0.60784314]]\n",
"\n",
" ...\n",
"\n",
" [[0.8745098 0.74509804 0.61568627]\n",
" [0.87058824 0.74117647 0.61176471]\n",
" [0.88235294 0.74509804 0.61568627]\n",
" ...\n",
" [0.85882353 0.7372549 0.60784314]\n",
" [0.85882353 0.7372549 0.60784314]\n",
" [0.85882353 0.7372549 0.60784314]]\n",
"\n",
" [[0.88235294 0.74509804 0.61568627]\n",
" [0.87843137 0.74117647 0.61176471]\n",
" [0.89411765 0.74509804 0.61960784]\n",
" ...\n",
" [0.85490196 0.73333333 0.60392157]\n",
" [0.85490196 0.73333333 0.60392157]\n",
" [0.85490196 0.73333333 0.60392157]]\n",
"\n",
" [[0.8745098 0.7372549 0.60784314]\n",
" [0.8745098 0.7372549 0.60784314]\n",
" [0.89019608 0.74117647 0.61568627]\n",
" ...\n",
" [0.85490196 0.73333333 0.60392157]\n",
" [0.85490196 0.73333333 0.60392157]\n",
" [0.85490196 0.73333333 0.60392157]]]\n",
"\n",
"\n",
" [[[0.35686275 0.56862745 0.56862745]\n",
" [0.36078431 0.57254902 0.57254902]\n",
" [0.36862745 0.58039216 0.58039216]\n",
" ...\n",
" [0.11764706 0.33333333 0.2745098 ]\n",
" [0.11764706 0.32941176 0.27843137]\n",
" [0.12156863 0.33333333 0.28235294]]\n",
"\n",
" [[0.34509804 0.55686275 0.55686275]\n",
" [0.34901961 0.56078431 0.56078431]\n",
" [0.35686275 0.56862745 0.56862745]\n",
" ...\n",
" [0.11764706 0.33333333 0.2745098 ]\n",
" [0.12156863 0.33333333 0.28235294]\n",
" [0.12156863 0.33333333 0.28235294]]\n",
"\n",
" [[0.31764706 0.54117647 0.5372549 ]\n",
" [0.32156863 0.54509804 0.54117647]\n",
" [0.32941176 0.55294118 0.54901961]\n",
" ...\n",
" [0.11372549 0.3372549 0.27843137]\n",
" [0.11372549 0.33333333 0.28235294]\n",
" [0.11764706 0.3372549 0.28627451]]\n",
"\n",
" ...\n",
"\n",
" [[0.28627451 0.23921569 0.23921569]\n",
" [0.28627451 0.23921569 0.23921569]\n",
" [0.28627451 0.23921569 0.23921569]\n",
" ...\n",
" [0.31372549 0.39607843 0.52156863]\n",
" [0.31764706 0.4 0.5254902 ]\n",
" [0.31372549 0.39607843 0.52156863]]\n",
"\n",
" [[0.27843137 0.23921569 0.23921569]\n",
" [0.27843137 0.23921569 0.23921569]\n",
" [0.28235294 0.24313725 0.24313725]\n",
" ...\n",
" [0.22352941 0.30588235 0.43137255]\n",
" [0.23137255 0.31372549 0.43529412]\n",
" [0.23921569 0.32156863 0.44313725]]\n",
"\n",
" [[0.27843137 0.23921569 0.23921569]\n",
" [0.28235294 0.24313725 0.24313725]\n",
" [0.28627451 0.24705882 0.24705882]\n",
" ...\n",
" [0.23921569 0.32156863 0.44705882]\n",
" [0.2745098 0.35686275 0.47843137]\n",
" [0.31372549 0.39607843 0.51764706]]]\n",
"\n",
"\n",
" ...\n",
"\n",
"\n",
" [[[0.74509804 0.7372549 0.7372549 ]\n",
" [0.74901961 0.74117647 0.74117647]\n",
" [0.74509804 0.74509804 0.74509804]\n",
" ...\n",
" [0.69803922 0.69019608 0.69019608]\n",
" [0.69019608 0.68235294 0.68235294]\n",
" [0.68627451 0.67843137 0.67843137]]\n",
"\n",
" [[0.74901961 0.74117647 0.74117647]\n",
" [0.75294118 0.74509804 0.74509804]\n",
" [0.74901961 0.74901961 0.74901961]\n",
" ...\n",
" [0.69803922 0.69019608 0.69019608]\n",
" [0.69019608 0.68235294 0.68235294]\n",
" [0.68627451 0.67843137 0.67843137]]\n",
"\n",
" [[0.74901961 0.74117647 0.74117647]\n",
" [0.75294118 0.74509804 0.74509804]\n",
" [0.75686275 0.74901961 0.74901961]\n",
" ...\n",
" [0.69803922 0.69019608 0.69019608]\n",
" [0.69411765 0.68627451 0.68627451]\n",
" [0.69019608 0.68235294 0.68235294]]\n",
"\n",
" ...\n",
"\n",
" [[0.76078431 0.77254902 0.78823529]\n",
" [0.77647059 0.78823529 0.80392157]\n",
" [0.61568627 0.62745098 0.64313725]\n",
" ...\n",
" [0.70196078 0.68235294 0.67843137]\n",
" [0.69803922 0.67843137 0.6745098 ]\n",
" [0.69411765 0.6745098 0.67058824]]\n",
"\n",
" [[0.78039216 0.79607843 0.81568627]\n",
" [0.75294118 0.76862745 0.78823529]\n",
" [0.62352941 0.63921569 0.65882353]\n",
" ...\n",
" [0.70196078 0.68235294 0.67843137]\n",
" [0.69803922 0.67843137 0.6745098 ]\n",
" [0.69411765 0.6745098 0.67058824]]\n",
"\n",
" [[0.73333333 0.74901961 0.76862745]\n",
" [0.71764706 0.73333333 0.75294118]\n",
" [0.70980392 0.7254902 0.74509804]\n",
" ...\n",
" [0.70196078 0.68235294 0.67843137]\n",
" [0.69803922 0.67843137 0.6745098 ]\n",
" [0.69411765 0.6745098 0.67058824]]]\n",
"\n",
"\n",
" [[[0.00784314 0.2627451 0.2 ]\n",
" [0.00784314 0.2627451 0.2 ]\n",
" [0.00784314 0.2627451 0.20392157]\n",
" ...\n",
" [0.0745098 0.75294118 0.63529412]\n",
" [0.07843137 0.75686275 0.63921569]\n",
" [0.07843137 0.75686275 0.63921569]]\n",
"\n",
" [[0.00392157 0.25882353 0.19607843]\n",
" [0.00784314 0.2627451 0.2 ]\n",
" [0.01176471 0.26666667 0.20784314]\n",
" ...\n",
" [0.07843137 0.75686275 0.63921569]\n",
" [0.08235294 0.76078431 0.64313725]\n",
" [0.08627451 0.76470588 0.64705882]]\n",
"\n",
" [[0.00392157 0.25882353 0.19607843]\n",
" [0.00784314 0.2627451 0.2 ]\n",
" [0.01176471 0.26666667 0.20784314]\n",
" ...\n",
" [0.08235294 0.76470588 0.64705882]\n",
" [0.09019608 0.77254902 0.65490196]\n",
" [0.09411765 0.77647059 0.65882353]]\n",
"\n",
" ...\n",
"\n",
" [[0.1372549 0.30980392 0.19607843]\n",
" [0.1254902 0.29803922 0.18431373]\n",
" [0.11372549 0.29411765 0.18039216]\n",
" ...\n",
" [0.35686275 0.94509804 0.85098039]\n",
" [0.34509804 0.94509804 0.84705882]\n",
" [0.34509804 0.94509804 0.84705882]]\n",
"\n",
" [[0.15686275 0.31764706 0.21176471]\n",
" [0.1372549 0.29803922 0.19215686]\n",
" [0.1254902 0.29411765 0.18823529]\n",
" ...\n",
" [0.4 0.97254902 0.8745098 ]\n",
" [0.37254902 0.97254902 0.8745098 ]\n",
" [0.35294118 0.95686275 0.85882353]]\n",
"\n",
" [[0.16862745 0.32156863 0.21960784]\n",
" [0.14509804 0.30588235 0.2 ]\n",
" [0.1372549 0.29803922 0.19215686]\n",
" ...\n",
" [0.41960784 0.99215686 0.89411765]\n",
" [0.38431373 0.97254902 0.87843137]\n",
" [0.36078431 0.96470588 0.86666667]]]\n",
"\n",
"\n",
" [[[0.41176471 0.40784314 0.39215686]\n",
" [0.41176471 0.40784314 0.39215686]\n",
" [0.41960784 0.41568627 0.4 ]\n",
" ...\n",
" [0.5254902 0.52156863 0.50588235]\n",
" [0.52156863 0.51764706 0.50196078]\n",
" [0.51372549 0.50980392 0.49411765]]\n",
"\n",
" [[0.40784314 0.40392157 0.38823529]\n",
" [0.41176471 0.40784314 0.39215686]\n",
" [0.41960784 0.41568627 0.4 ]\n",
" ...\n",
" [0.52156863 0.51764706 0.50196078]\n",
" [0.51764706 0.51372549 0.49803922]\n",
" [0.50980392 0.50588235 0.49019608]]\n",
"\n",
" [[0.40784314 0.40784314 0.38431373]\n",
" [0.41176471 0.41176471 0.38823529]\n",
" [0.42352941 0.41960784 0.40392157]\n",
" ...\n",
" [0.50980392 0.50588235 0.49019608]\n",
" [0.50588235 0.50588235 0.48235294]\n",
" [0.50196078 0.50196078 0.47843137]]\n",
"\n",
" ...\n",
"\n",
" [[0.88235294 0.84313725 0.84313725]\n",
" [0.90588235 0.86666667 0.86666667]\n",
" [0.90980392 0.87058824 0.87058824]\n",
" ...\n",
" [0.39607843 0.38039216 0.37647059]\n",
" [0.39215686 0.37254902 0.36078431]\n",
" [0.38431373 0.36470588 0.35294118]]\n",
"\n",
" [[0.8745098 0.83137255 0.83921569]\n",
" [0.89803922 0.85490196 0.8627451 ]\n",
" [0.90196078 0.85882353 0.86666667]\n",
" ...\n",
" [0.40392157 0.38823529 0.38431373]\n",
" [0.39607843 0.37647059 0.36470588]\n",
" [0.38823529 0.36862745 0.35686275]]\n",
"\n",
" [[0.8745098 0.83137255 0.83921569]\n",
" [0.89803922 0.85490196 0.8627451 ]\n",
" [0.89411765 0.85098039 0.85882353]\n",
" ...\n",
" [0.41568627 0.4 0.39607843]\n",
" [0.40392157 0.38431373 0.37254902]\n",
" [0.39215686 0.37254902 0.36078431]]]]\n",
"[[[[0.36078431 0.27843137 0.27058824]\n",
" [0.36470588 0.28235294 0.2745098 ]\n",
" [0.36862745 0.29019608 0.27058824]\n",
" ...\n",
" [0.26666667 0.20784314 0.22352941]\n",
" [0.26666667 0.21176471 0.21960784]\n",
" [0.2627451 0.20784314 0.21568627]]\n",
"\n",
" [[0.35686275 0.2745098 0.26666667]\n",
" [0.36078431 0.27843137 0.27058824]\n",
" [0.36470588 0.28627451 0.26666667]\n",
" ...\n",
" [0.26666667 0.20784314 0.22352941]\n",
" [0.25882353 0.20392157 0.21176471]\n",
" [0.25098039 0.19607843 0.20392157]]\n",
"\n",
" [[0.35294118 0.27058824 0.2627451 ]\n",
" [0.36078431 0.27843137 0.27058824]\n",
" [0.36470588 0.28235294 0.2745098 ]\n",
" ...\n",
" [0.25882353 0.20392157 0.21176471]\n",
" [0.25098039 0.19607843 0.20392157]\n",
" [0.24705882 0.19215686 0.2 ]]\n",
"\n",
" ...\n",
"\n",
" [[0.98823529 0.99607843 0.99607843]\n",
" [0.98823529 0.99607843 0.99607843]\n",
" [0.99607843 0.99607843 0.99607843]\n",
" ...\n",
" [0.61176471 0.6627451 0.71764706]\n",
" [0.61960784 0.67058824 0.73333333]\n",
" [0.62745098 0.67843137 0.73333333]]\n",
"\n",
" [[0.99215686 0.99215686 0.99215686]\n",
" [0.99215686 0.99215686 0.99215686]\n",
" [0.99607843 0.99607843 0.99607843]\n",
" ...\n",
" [0.69411765 0.74117647 0.78823529]\n",
" [0.70196078 0.74509804 0.8 ]\n",
" [0.69411765 0.74117647 0.78823529]]\n",
"\n",
" [[1. 1. 1. ]\n",
" [1. 1. 1. ]\n",
" [1. 1. 1. ]\n",
" ...\n",
" [0.71764706 0.76470588 0.81176471]\n",
" [0.7254902 0.76078431 0.81176471]\n",
" [0.71764706 0.75294118 0.80392157]]]\n",
"\n",
"\n",
" [[[0.40392157 0.45882353 0.45490196]\n",
" [0.36862745 0.42352941 0.41960784]\n",
" [0.34901961 0.41176471 0.40784314]\n",
" ...\n",
" [0.40392157 0.38431373 0.38039216]\n",
" [0.39607843 0.37647059 0.37254902]\n",
" [0.39215686 0.37254902 0.36862745]]\n",
"\n",
" [[0.2627451 0.3254902 0.32156863]\n",
" [0.31372549 0.37647059 0.37254902]\n",
" [0.37254902 0.44313725 0.43921569]\n",
" ...\n",
" [0.31372549 0.29411765 0.29019608]\n",
" [0.32941176 0.30980392 0.30588235]\n",
" [0.34901961 0.32941176 0.3254902 ]]\n",
"\n",
" [[0.17254902 0.2627451 0.24705882]\n",
" [0.20392157 0.29411765 0.27843137]\n",
" [0.24705882 0.3372549 0.32156863]\n",
" ...\n",
" [0.2627451 0.23921569 0.24313725]\n",
" [0.25490196 0.23137255 0.23529412]\n",
" [0.25490196 0.23137255 0.23529412]]\n",
"\n",
" ...\n",
"\n",
" [[0.39215686 0.32941176 0.30196078]\n",
" [0.35686275 0.30588235 0.2745098 ]\n",
" [0.34901961 0.29803922 0.26666667]\n",
" ...\n",
" [0.16470588 0.27843137 0.26666667]\n",
" [0.17647059 0.29019608 0.27843137]\n",
" [0.16470588 0.28627451 0.2745098 ]]\n",
"\n",
" [[0.36078431 0.30588235 0.28235294]\n",
" [0.35294118 0.30588235 0.28235294]\n",
" [0.34117647 0.29411765 0.27058824]\n",
" ...\n",
" [0.21568627 0.34117647 0.32156863]\n",
" [0.23921569 0.36470588 0.34509804]\n",
" [0.23137255 0.36470588 0.34117647]]\n",
"\n",
" [[0.37647059 0.32941176 0.30588235]\n",
" [0.36470588 0.31764706 0.29411765]\n",
" [0.38823529 0.34117647 0.31764706]\n",
" ...\n",
" [0.30980392 0.43529412 0.41568627]\n",
" [0.37647059 0.50980392 0.48627451]\n",
" [0.38431373 0.51764706 0.49411765]]]\n",
"\n",
"\n",
" [[[0.25882353 0.25882353 0.25882353]\n",
" [0.25490196 0.25490196 0.25490196]\n",
" [0.25490196 0.25490196 0.25490196]\n",
" ...\n",
" [0.30588235 0.28235294 0.28627451]\n",
" [0.31764706 0.29411765 0.29803922]\n",
" [0.3254902 0.30196078 0.30588235]]\n",
"\n",
" [[0.25882353 0.25882353 0.25882353]\n",
" [0.25490196 0.25490196 0.25490196]\n",
" [0.25490196 0.25490196 0.25490196]\n",
" ...\n",
" [0.30196078 0.27843137 0.28235294]\n",
" [0.30588235 0.28235294 0.28627451]\n",
" [0.30980392 0.28627451 0.29019608]]\n",
"\n",
" [[0.25882353 0.25490196 0.2627451 ]\n",
" [0.25490196 0.25098039 0.25882353]\n",
" [0.25490196 0.25490196 0.25490196]\n",
" ...\n",
" [0.29803922 0.2745098 0.27843137]\n",
" [0.29803922 0.2745098 0.27843137]\n",
" [0.29803922 0.2745098 0.27843137]]\n",
"\n",
" ...\n",
"\n",
" [[0.25490196 0.25882353 0.2745098 ]\n",
" [0.25490196 0.25882353 0.2745098 ]\n",
" [0.25490196 0.25882353 0.2745098 ]\n",
" ...\n",
" [0.58431373 0.62745098 0.70588235]\n",
" [0.49411765 0.5372549 0.62352941]\n",
" [0.42352941 0.46666667 0.55294118]]\n",
"\n",
" [[0.25490196 0.25882353 0.2745098 ]\n",
" [0.25490196 0.25882353 0.2745098 ]\n",
" [0.25490196 0.25882353 0.2745098 ]\n",
" ...\n",
" [0.34117647 0.39607843 0.46666667]\n",
" [0.28235294 0.3372549 0.41176471]\n",
" [0.31764706 0.37254902 0.44705882]]\n",
"\n",
" [[0.25490196 0.25882353 0.2745098 ]\n",
" [0.25490196 0.25882353 0.2745098 ]\n",
" [0.25490196 0.25882353 0.2745098 ]\n",
" ...\n",
" [0.4745098 0.52941176 0.6 ]\n",
" [0.51372549 0.56862745 0.64313725]\n",
" [0.49019608 0.54509804 0.61960784]]]\n",
"\n",
"\n",
" ...\n",
"\n",
"\n",
" [[[0.70588235 0.52941176 0.39607843]\n",
" [0.70196078 0.5254902 0.39215686]\n",
" [0.69803922 0.52156863 0.38823529]\n",
" ...\n",
" [0.68235294 0.50588235 0.37254902]\n",
" [0.67843137 0.50196078 0.36862745]\n",
" [0.6745098 0.49803922 0.36470588]]\n",
"\n",
" [[0.70196078 0.5254902 0.39215686]\n",
" [0.70196078 0.5254902 0.39215686]\n",
" [0.69803922 0.52156863 0.38823529]\n",
" ...\n",
" [0.68235294 0.50588235 0.37254902]\n",
" [0.67843137 0.50196078 0.36862745]\n",
" [0.67843137 0.50196078 0.36862745]]\n",
"\n",
" [[0.70196078 0.5254902 0.39215686]\n",
" [0.70196078 0.5254902 0.39215686]\n",
" [0.70196078 0.5254902 0.39215686]\n",
" ...\n",
" [0.67843137 0.50196078 0.36862745]\n",
" [0.67843137 0.50196078 0.36862745]\n",
" [0.67843137 0.50196078 0.36862745]]\n",
"\n",
" ...\n",
"\n",
" [[0.68627451 0.5254902 0.39607843]\n",
" [0.68627451 0.52941176 0.39215686]\n",
" [0.69411765 0.52941176 0.38431373]\n",
" ...\n",
" [0.6745098 0.50588235 0.37647059]\n",
" [0.6745098 0.50588235 0.37647059]\n",
" [0.6745098 0.50588235 0.37647059]]\n",
"\n",
" [[0.6745098 0.5254902 0.39215686]\n",
" [0.68627451 0.52941176 0.39215686]\n",
" [0.69411765 0.52941176 0.38431373]\n",
" ...\n",
" [0.67843137 0.50980392 0.38039216]\n",
" [0.67843137 0.50980392 0.38039216]\n",
" [0.67843137 0.50980392 0.38039216]]\n",
"\n",
" [[0.6745098 0.5254902 0.39215686]\n",
" [0.68235294 0.5254902 0.38823529]\n",
" [0.69411765 0.52941176 0.38431373]\n",
" ...\n",
" [0.67843137 0.50980392 0.38039216]\n",
" [0.67843137 0.50980392 0.38039216]\n",
" [0.6745098 0.50588235 0.37647059]]]\n",
"\n",
"\n",
" [[[0.38431373 0.36862745 0.36470588]\n",
" [0.38431373 0.36862745 0.36470588]\n",
" [0.38823529 0.37254902 0.36862745]\n",
" ...\n",
" [0.35294118 0.34509804 0.34117647]\n",
" [0.34901961 0.34117647 0.3372549 ]\n",
" [0.35294118 0.34509804 0.34117647]]\n",
"\n",
" [[0.38039216 0.36470588 0.36078431]\n",
" [0.38431373 0.36862745 0.36470588]\n",
" [0.38431373 0.36862745 0.36470588]\n",
" ...\n",
" [0.35294118 0.34509804 0.34117647]\n",
" [0.35294118 0.34509804 0.34117647]\n",
" [0.35294118 0.34509804 0.34117647]]\n",
"\n",
" [[0.38039216 0.36470588 0.36078431]\n",
" [0.38039216 0.36470588 0.36078431]\n",
" [0.38431373 0.36862745 0.36470588]\n",
" ...\n",
" [0.36078431 0.34509804 0.34117647]\n",
" [0.35294118 0.34509804 0.34117647]\n",
" [0.35294118 0.34509804 0.34117647]]\n",
"\n",
" ...\n",
"\n",
" [[0.0627451 0.19607843 0.24705882]\n",
" [0.09411765 0.22745098 0.27843137]\n",
" [0.09803922 0.23137255 0.28235294]\n",
" ...\n",
" [0.29803922 0.28235294 0.27843137]\n",
" [0.30196078 0.28627451 0.28235294]\n",
" [0.30196078 0.28627451 0.28235294]]\n",
"\n",
" [[0.16862745 0.30588235 0.34509804]\n",
" [0.21176471 0.34901961 0.38823529]\n",
" [0.2 0.3372549 0.38823529]\n",
" ...\n",
" [0.29803922 0.28235294 0.27843137]\n",
" [0.30196078 0.28627451 0.28235294]\n",
" [0.30196078 0.28627451 0.28235294]]\n",
"\n",
" [[0.14117647 0.27843137 0.31764706]\n",
" [0.16470588 0.30196078 0.34117647]\n",
" [0.21568627 0.35294118 0.40392157]\n",
" ...\n",
" [0.29803922 0.28235294 0.27843137]\n",
" [0.30196078 0.28627451 0.28235294]\n",
" [0.29803922 0.28235294 0.27843137]]]\n",
"\n",
"\n",
" [[[0.29411765 0.30588235 0.32156863]\n",
" [0.30980392 0.32156863 0.3372549 ]\n",
" [0.32156863 0.34117647 0.35294118]\n",
" ...\n",
" [0.24705882 0.25882353 0.2745098 ]\n",
" [0.24313725 0.25490196 0.27058824]\n",
" [0.23921569 0.25098039 0.26666667]]\n",
"\n",
" [[0.32156863 0.33333333 0.34901961]\n",
" [0.33333333 0.34509804 0.36078431]\n",
" [0.34509804 0.36470588 0.37647059]\n",
" ...\n",
" [0.28627451 0.29803922 0.31372549]\n",
" [0.27843137 0.29019608 0.30588235]\n",
" [0.2745098 0.28627451 0.30196078]]\n",
"\n",
" [[0.33333333 0.34509804 0.36078431]\n",
" [0.34901961 0.36078431 0.37647059]\n",
" [0.36470588 0.37647059 0.39215686]\n",
" ...\n",
" [0.32941176 0.34509804 0.36470588]\n",
" [0.31764706 0.33333333 0.35294118]\n",
" [0.31372549 0.32941176 0.34901961]]\n",
"\n",
" ...\n",
"\n",
" [[0.25098039 0.25490196 0.27058824]\n",
" [0.25882353 0.27058824 0.28627451]\n",
" [0.27843137 0.29411765 0.29803922]\n",
" ...\n",
" [0.19215686 0.18039216 0.18823529]\n",
" [0.18823529 0.17647059 0.18431373]\n",
" [0.18039216 0.16862745 0.17647059]]\n",
"\n",
" [[0.23137255 0.22745098 0.24313725]\n",
" [0.23529412 0.23921569 0.25490196]\n",
" [0.24313725 0.24705882 0.2627451 ]\n",
" ...\n",
" [0.19215686 0.18039216 0.18823529]\n",
" [0.18823529 0.17647059 0.18431373]\n",
" [0.18039216 0.16862745 0.17647059]]\n",
"\n",
" [[0.22745098 0.22352941 0.23921569]\n",
" [0.22745098 0.22352941 0.23921569]\n",
" [0.21568627 0.21960784 0.23529412]\n",
" ...\n",
" [0.18823529 0.17647059 0.18431373]\n",
" [0.18039216 0.16862745 0.17647059]\n",
" [0.17254902 0.16078431 0.16862745]]]]\n",
"[2, 2, 2, 1, 2, 2, 2, 1, 1, 2, 1, 1, 2, 1, 2, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 2, 1, 2, 2, 1, 1, 2, 1, 1, 2, 1, 2, 2, 2, 1, 2, 2, 1, 2, 2, 2, 1, 2, 2, 1, 2, 2, 1, 2, 2, 1, 2, 1, 2, 2, 2, 2, 1, 2, 2, 2, 1, 2, 1, 2, 1, 1, 2, 2, 1, 2, 2, 1, 1, 1, 2, 2, 1, 2, 2, 2, 2, 2, 1, 2, 1, 2, 1, 1, 1, 2, 2, 2, 2, 1, 1, 2, 2, 1, 1, 2, 1, 1, 2, 2, 2, 1, 2, 1, 2, 2, 2, 1, 1, 1, 2, 2, 2, 2, 1, 2, 2, 1, 2, 1, 1, 1, 2, 2, 2, 2, 2, 1, 1, 1, 2, 1, 1, 2, 1, 1, 2, 2, 1, 1, 2, 2, 1, 2, 2, 1, 2, 2, 1, 2, 2, 2, 2, 1, 1, 1, 2, 2, 2, 2, 1, 2, 2, 2, 2, 2, 1, 1, 1, 1, 1, 1, 1, 1, 2, 1, 2, 1, 2, 1, 2, 2, 2, 1, 1, 1, 1, 1, 2, 1, 1, 2, 2, 1, 2, 2, 1, 1, 2, 2, 2, 2, 1, 1, 2, 2, 1, 2, 2, 2, 2, 2, 1, 2, 1, 1, 2, 1, 2, 1, 2, 1, 1, 2, 1]\n",
"[1, 1, 2, 2, 2, 1, 1, 2, 1, 1, 1, 1, 2, 1, 1, 2, 1, 2, 2, 1, 2, 2, 2, 1, 2, 1, 1, 2, 2, 1, 2, 1, 2, 2, 1, 1, 1, 2, 2, 2, 1, 2, 1, 1, 2, 2, 2, 2, 1, 1, 2, 1, 2, 1, 1, 2, 1, 1, 2]\n"
]
]
}
}
],
],
@ -265,6 +871,10 @@
"from sklearn.model_selection import train_test_split\n",
"from sklearn.model_selection import train_test_split\n",
"\n",
"\n",
"X_train, X_test, y_train, y_test = train_test_split(processed_images, labels, test_size=0.2, random_state=42)\n",
"X_train, X_test, y_train, y_test = train_test_split(processed_images, labels, test_size=0.2, random_state=42)\n",
"print(X_train)\n",
"print(X_test)\n",
"print(y_train)\n",
"print(y_test)\n",
"\n",
"\n",
"# Initialize the model\n",
"# Initialize the model\n",
"#clf = tree.DecisionTreeClassifier()\n",
"#clf = tree.DecisionTreeClassifier()\n",