File size: 12,852 Bytes
99c3bcf
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
{
 "cells": [
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "94226cca",
   "metadata": {},
   "outputs": [
    {
     "data": {
      "image/png": "iVBORw0KGgoAAAANSUhEUgAAAnwAAAKSCAYAAABIowakAAAAOnRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjEwLjEsIGh0dHBzOi8vbWF0cGxvdGxpYi5vcmcvc2/+5QAAAAlwSFlzAAAPYQAAD2EBqD+naQAAF71JREFUeJzt3X+slnX9x/H3Lb89OIKBJlQHQYn5g9UQxVDBzI6V/aBO4VbB0VRmSTjXmDQbqNUfrLE10CLn/EkNonLNEMvEuQlq/4g/GmYoNssEKxeFwZDr+4fj5OmAGKX0ffF4bGc753N/7uv6nJuNPXfd9/U5raZpmgIAINZhB3sBAAC8uQQfAEA4wQcAEE7wAQCEE3wAAOEEHwBAOMEHABBO8AEAhBN8AADhBB/wplu4cGG1Wq0eY6NHj66urq63dB0333xztVqt2rx581t6XoCDTfDBIeyxxx6rzs7Oam9vr4EDB9aoUaPqnHPOqSVLlvSY981vfrPuuOOOg7PIg+iBBx6o6dOn11FHHVUDBgyo0aNH1+zZs+t3v/vdAR9z+/bttXDhwrrvvvv+ewt9HatXr66FCxe+JecC/ncJPjhErVu3rk4++eTasGFDXXzxxbV06dK66KKL6rDDDqtvf/vbPea+GcH35JNP1g033PBfPeZ/05IlS+qMM86oxx57rObMmVPXX399dXZ21ooVK2rChAm1bt26Azru9u3b6+qrr35Lg+/qq69+S84F/O/qe7AXABwc3/jGN2rIkCH1q1/9qt72trf1eGzLli1v+vkHDBjwpp/jQD3wwAN1+eWX1+mnn15r1qypww8/vPuxSy+9tKZMmVKdnZ31xBNP1NChQw/iSgHeGFf44BC1adOmOuGEE3rFXlXVkUce2f19q9Wqv//973XLLbdUq9WqVqtVXV1dtXbt2mq1WvWTn/yk1/O///3vV6vVqvXr1+/z/P/6Gb49x97b12s/c7dx48bq7OysYcOG1cCBA+vkk0+un/70p72O/8QTT9T73//+GjRoUL3jHe+or3/967V79+439Npce+211Wq16pZbbukRe1VVY8eOrUWLFtXzzz9fy5Yt6x6fNm1aTZs2rdexurq6avTo0VVVtXnz5hoxYkRVVV199dXdv9+et1y7urpq8ODB9fTTT1dHR0e1tbXVyJEj65prrqmmabqPed9991Wr1ep1lXDz5s3VarXq5ptv7j7eddddV1U9X1/g0OMKHxyi2tvba/369fX444/XiSeeuM95t912W1100UV1yimn1CWXXFJVr0bP5MmT653vfGctX768pk+f3uM5y5cvr7Fjx9Zpp532htdz22239Rq76qqrasuWLTV48OCqejXipkyZUqNGjaorr7yy2traauXKlfWJT3yifvSjH3Wv449//GOdddZZtWvXru553/ve92rQoEH7Xcf27dvrl7/8ZZ1xxhl1zDHH7HXOjBkz6pJLLqk777yzrrzyyjf8O44YMaK+853v1KWXXlrTp0+vT37yk1VVNWHChO45r7zySp177rk1efLkWrRoUa1Zs6YWLFhQu3btqmuuueYNn6uqavbs2fWHP/yhfvGLX+z19QUOIQ1wSPr5z3/e9OnTp+nTp09z2mmnNfPmzWvuvvvuZufOnb3mtrW1NbNmzeo1Pn/+/GbAgAHNSy+91D22ZcuWpm/fvs2CBQu6xxYsWND863837e3tez3mHosWLWqqqrn11lu7x84+++zmpJNOav7xj390j+3evbt53/ve1xx33HHdY5dffnlTVc1DDz3UY11Dhgxpqqp55pln9nneRx55pKmqZu7cufuc0zRNM2HChGbYsGHdP0+dOrWZOnVqr3mzZs1q2tvbu3/eunVrU1U9Xp/Xzq2qZs6cOT1+v4985CNN//79m61btzZN0zRr165tqqpZu3Ztj+c/88wzTVU1N910U/fYl770pV6vPXDo8ZYuHKLOOeecWr9+fX3sYx+rDRs21KJFi6qjo6NGjRq117dI92bmzJm1Y8eOWrVqVffYihUrateuXfW5z33ugNe2du3amj9/fs2ZM6c+//nPV1XVn//857r33nvrM5/5TG3btq1efPHFevHFF+tPf/pTdXR01FNPPVW///3vq+rVGxUmT55cp5xySvcxR4wYUZ/97Gf3e+5t27ZVVdURRxzxuvOOOOKI+utf/3qgv+Lruuyyy7q/b7Vaddlll9XOnTvrnnvueVPOB+QTfHAImzRpUv34xz+uv/zlL/Xwww/X/Pnza9u2bdXZ2Vm//vWv9/v88ePH16RJk2r58uXdY8uXL6/JkyfXsccee0Breu6552rGjBk1ZcqUWrx4cff4b3/722qapr72ta/ViBEjenwtWLCgqv55s8mzzz5bxx13XK9jv/vd797v+feE3p7w25dt27btNwoPxGGHHVZjxozpMTZu3LiqKvsHAgfMZ/iA6t+/f02aNKkmTZpU48aNqwsuuKB++MMfdofU65k5c2bNnTu3nnvuudqxY0c9+OCDtXTp0gNax86dO6uzs7MGDBhQK1eurL59//lf1J4bLr7yla9UR0fHXp9/oJH5r8fo27dvPfroo/ucs2PHjnryySfr5JNP7h5rtVo9bqzY45VXXvmP1/Sv9nXjxZtxLiCD4AN62BMxzz//fPfY693Zef7559cVV1xRP/jBD+rll1+ufv361YwZMw7o3F/+8pfrkUceqfvvv7+OOuqoHo/tuerVr1+/+sAHPvC6x2lvb6+nnnqq1/iTTz653zW0tbXVWWedVffee289++yz1d7e3mvOypUra8eOHXXeeed1jw0dOrSefvrpXnOfffbZHj/v7y7Z3bt319NPP919Va+q6je/+U1VVffdvnu2gnnppZde91xv5HzAocFbunCIWrt27V6vSK1evbqqer792dbW1isu9hg+fHh96EMfqttvv72WL19e5557bg0fPvzfXs9NN91Uy5Ytq+uuu67HZ+/2OPLII2vatGm1bNmyHjG6x9atW7u///CHP1wPPvhgPfzwwz0ef+1bz6/nqquuqqZpqqurq15++eUejz3zzDM1b968Ovroo2v27Nnd42PHjq2NGzf2WMeGDRvqgQce6PH8Pdu87Ov1rKoeV0ibpqmlS5dWv3796uyzz66qV4O2T58+df/99/d43vXXX9/rWG1tbfs9H5DPFT44RM2ZM6e2b99e06dPr/Hjx9fOnTtr3bp1tWLFiho9enRdcMEF3XMnTpxY99xzTy1evLhGjhxZxxxzTJ166qndj8+cObM6Ozur6tU97P5dL774Yn3xi1+s448/vgYMGFC33357j8enT59ebW1tdd1119Xpp59eJ510Ul188cU1ZsyYeuGFF2r9+vX13HPP1YYNG6qqat68eXXbbbfVueeeW3Pnzu3elqW9vf1136rd48wzz6xvfetbdcUVV9SECROqq6urjj766Nq4cWPdcMMNtXv37lq9enWPTZcvvPDCWrx4cXV0dNQXvvCF2rJlS333u9+tE044ocfNHYMGDarjjz++VqxYUePGjathw4bViSee2L01zsCBA2vNmjU1a9asOvXUU+uuu+6qn/3sZ/XVr361ew+/IUOG1Kc//elasmRJtVqtGjt2bN1555173TB74sSJVfXq1dOOjo7q06dPnX/++f/mvxDw/95BvUcYOGjuuuuu5sILL2zGjx/fDB48uOnfv39z7LHHNnPmzGleeOGFHnM3btzYnHnmmc2gQYOaquq1ncqOHTuaoUOHNkOGDGlefvnlXufa37Yse7YT2dfXa7dR2bRpUzNz5szm7W9/e9OvX79m1KhRzXnnndesWrWqx/EfffTRZurUqc3AgQObUaNGNddee21z44037ndblte6//77m49//OPN8OHDm379+jXvete7mosvvrjZvHnzXufffvvtzZgxY5r+/fs373nPe5q7776717YsTdM069atayZOnNj079+/xxYts2bNatra2ppNmzY1H/zgB5vDDz+8Oeqoo5oFCxY0r7zySo9jbN26tfnUpz7VHH744c3QoUOb2bNnN48//nivbVl27drVzJkzpxkxYkTTarVs0QKHqFbT7OU9HYB/w65du2rkyJH10Y9+tG688caDvZz/t7q6umrVqlX1t7/97WAvBQjjM3zAf+yOO+6orVu31syZMw/2UgDYC5/hAw7YQw89VI8++mhde+219d73vremTp16sJcEwF64wgccsD1/F/bII4+sW2+99WAvB4B98Bk+AIBwrvABAIQTfAAA4QQfAEC4N3yXrr/HCADwv+WN3orhCh8AQDjBBwAQTvABAIQTfAAA4QQfAEA4wQcAEE7wAQCEE3wAAOEEHwBAOMEHABBO8AEAhBN8AADhBB8AQDjBBwAQTvABAIQTfAAA4QQfAEA4wQcAEE7wAQCEE3wAAOEEHwBAOMEHABBO8AEAhBN8AADhBB8AQDjBBwAQTvABAIQTfAAA4QQfAEA4wQcAEE7wAQCEE3wAAOEEHwBAOMEHABBO8AEAhBN8AADhBB8AQDjBBwAQTvABAIQTfAAA4QQfAEA4wQcAEE7wAQCEE3wAAOEEHwBAOMEHABBO8AEAhBN8AADhBB8AQDjBBwAQTvABAIQTfAAA4QQfAEA4wQcAEE7wAQCEE3wAAOEEHwBAOMEHABBO8AEAhBN8AADhBB8AQDjBBwAQTvABAIQTfAAA4QQfAEA4wQcAEE7wAQCEE3wAAOEEHwBAOMEHABBO8AEAhBN8AADhBB8AQDjBBwAQTvABAIQTfAAA4QQfAEA4wQcAEE7wAQCEE3wAAOEEHwBAOMEHABBO8AEAhBN8AADhBB8AQDjBBwAQTvABAIQTfAAA4QQfAEA4wQcAEE7wAQCEE3wAAOEEHwBAOMEHABBO8AEAhBN8AADhBB8AQDjBBwAQTvABAIQTfAAA4QQfAEA4wQcAEE7wAQCEE3wAAOEEHwBAOMEHABBO8AEAhBN8AADhBB8AQDjBBwAQTvABAIQTfAAA4QQfAEA4wQcAEE7wAQCEE3wAAOEEHwBAOMEHABBO8AEAhBN8AADhBB8AQDjBBwAQTvABAIQTfAAA4QQfAEA4wQcAEE7wAQCEE3wAAOEEHwBAOMEHABBO8AEAhBN8AADhBB8AQDjBBwAQTvABAIQTfAAA4QQfAEA4wQcAEE7wAQCEE3wAAOEEHwBAOMEHABBO8AEAhBN8AADhBB8AQDjBBwAQTvABAIQTfAAA4QQfAEA4wQcAEE7wAQCEE3wAAOEEHwBAOMEHABBO8AEAhBN8AADhBB8AQDjBBwAQTvABAIQTfAAA4QQfAEA4wQcAEE7wAQCEE3wAAOEEHwBAOMEHABBO8AEAhBN8AADhBB8AQDjBBwAQTvABAIQTfAAA4QQfAEA4wQcAEE7wAQCEE3wAAOEEHwBAOMEHABBO8AEAhBN8AADhBB8AQDjBBwAQTvABAIQTfAAA4QQfAEA4wQcAEE7wAQCEE3wAAOEEHwBAOMEHABBO8AEAhBN8AADhBB8AQDjBBwAQTvABAIQTfAAA4QQfAEA4wQcAEE7wAQCEE3wAAOEEHwBAOMEHABBO8AEAhBN8AADhBB8AQDjBBwAQTvABAIQTfAAA4QQfAEA4wQcAEE7wAQCEE3wAAOEEHwBAOMEHABBO8AEAhBN8AADhBB8AQDjBBwAQTvABAIQTfAAA4QQfAEA4wQcAEE7wAQCEE3wAAOEEHwBAOMEHABBO8AEAhBN8AADhBB8AQDjBBwAQTvABAIQTfAAA4QQfAEA4wQcAEE7wAQCEE3wAAOEEHwBAOMEHABBO8AEAhBN8AADhBB8AQDjBBwAQTvABAIQTfAAA4QQfAEA4wQcAEE7wAQCEE3wAAOEEHwBAOMEHABBO8AEAhBN8AADhBB8AQDjBBwAQTvABAIQTfAAA4QQfAEA4wQcAEE7wAQCEE3wAAOEEHwBAOMEHABBO8AEAhBN8AADhBB8AQDjBBwAQTvABAIQTfAAA4QQfAEA4wQcAEE7wAQCEE3wAAOEEHwBAOMEHABBO8AEAhBN8AADhBB8AQDjBBwAQTvABAIQTfAAA4QQfAEA4wQcAEE7wAQCEE3wAAOEEHwBAOMEHABBO8AEAhBN8AADhBB8AQDjBBwAQTvABAIQTfAAA4QQfAEA4wQcAEE7wAQCEE3wAAOEEHwBAOMEHABBO8AEAhBN8AADhBB8AQDjBBwAQTvABAIQTfAAA4QQfAEA4wQcAEE7wAQCEE3wAAOEEHwBAOMEHABBO8AEAhBN8AADhBB8AQDjBBwAQTvABAIQTfAAA4QQfAEA4wQcAEE7wAQCEE3wAAOEEHwBAOMEHABBO8AEAhBN8AADhBB8AQDjBBwAQTvABAIQTfAAA4QQfAEA4wQcAEE7wAQCEE3wAAOEEHwBAOMEHABBO8AEAhBN8AADhBB8AQDjBBwAQTvABAIQTfAAA4QQfAEA4wQcAEE7wAQCEE3wAAOEEHwBAOMEHABBO8AEAhBN8AADhBB8AQDjBBwAQTvABAIQTfAAA4QQfAEA4wQcAEE7wAQCEE3wAAOEEHwBAOMEHABBO8AEAhBN8AADhBB8AQDjBBwAQTvABAIQTfAAA4QQfAEA4wQcAEE7wAQCEE3wAAOEEHwBAOMEHABBO8AEAhBN8AADhBB8AQDjBBwAQTvABAIQTfAAA4QQfAEA4wQcAEE7wAQCEE3wAAOEEHwBAOMEHABBO8AEAhBN8AADhBB8AQDjBBwAQTvABAIQTfAAA4QQfAEA4wQcAEE7wAQCEE3wAAOEEHwBAOMEHABBO8AEAhBN8AADhBB8AQDjBBwAQTvABAIQTfAAA4QQfAEA4wQcAEE7wAQCEE3wAAOEEHwBAOMEHABBO8AEAhBN8AADhBB8AQDjBBwAQTvABAIQTfAAA4QQfAEA4wQcAEE7wAQCEE3wAAOEEHwBAOMEHABBO8AEAhBN8AADhBB8AQDjBBwAQTvABAIQTfAAA4QQfAEA4wQcAEE7wAQCEE3wAAOEEHwBAOMEHABBO8AEAhBN8AADhBB8AQDjBBwAQTvABAIQTfAAA4QQfAEA4wQcAEE7wAQCEE3wAAOEEHwBAOMEHABBO8AEAhBN8AADhBB8AQDjBBwAQTvABAIQTfAAA4QQfAEA4wQcAEE7wAQCEE3wAAOEEHwBAOMEHABBO8AEAhBN8AADhBB8AQDjBBwAQTvABAIQTfAAA4QQfAEA4wQcAEE7wAQCEE3wAAOEEHwBAOMEHABBO8AEAhBN8AADhBB8AQDjBBwAQTvABAIQTfAAA4QQfAEA4wQcAEE7wAQCEE3wAAOEEHwBAOMEHABBO8AEAhBN8AADhBB8AQDjBBwAQTvABAIQTfAAA4QQfAEA4wQcAEE7wAQCEE3wAAOEEHwBAOMEHABBO8AEAhBN8AADhBB8AQDjBBwAQTvABAIQTfAAA4QQfAEA4wQcAEE7wAQCEE3wAAOEEHwBAOMEHABBO8AEAhBN8AADhBB8AQDjBBwAQTvABAIQTfAAA4QQfAEA4wQcAEE7wAQCEE3wAAOEEHwBAOMEHABBO8AEAhBN8AADhBB8AQDjBBwAQTvABAIQTfAAA4QQfAEA4wQcAEE7wAQCEE3wAAOEEHwBAOMEHABBO8AEAhBN8AADhBB8AQDjBBwAQTvABAIQTfAAA4QQfAEA4wQcAEE7wAQCEE3wAAOEEHwBAOMEHABBO8AEAhBN8AADhBB8AQDjBBwAQTvABAIQTfAAA4QQfAEA4wQcAEE7wAQCEE3wAAOEEHwBAOMEHABBO8AEAhBN8AADhBB8AQDjBBwAQTvABAIQTfAAA4QQfAEA4wQcAEE7wAQCEE3wAAOEEHwBAOMEHABBO8AEAhBN8AADhBB8AQDjBBwAQTvABAIQTfAAA4QQfAEA4wQcAEE7wAQCEE3wAAOEEHwBAOMEHABBO8AEAhBN8AADhBB8AQDjBBwAQTvABAIQTfAAA4QQfAEA4wQcAEE7wAQCEE3wAAOEEHwBAOMEHABBO8AEAhBN8AADhBB8AQDjBBwAQTvABAIQTfAAA4QQfAEA4wQcAEE7wAQCEE3wAAOEEHwBAOMEHABBO8AEAhBN8AADhBB8AQDjBBwAQTvABAIQTfAAA4QQfAEA4wQcAEE7wAQCEE3wAAOEEHwBAOMEHABBO8AEAhBN8AADhBB8AQDjBBwAQTvABAITr+0YnNk3zZq4DAIA3iSt8AADhBB8AQDjBBwAQTvABAIQTfAAA4QQfAEA4wQcAEE7wAQCEE3wAAOH+D/upl+a9UCCHAAAAAElFTkSuQmCC",
      "text/plain": [
       "<Figure size 800x800 with 1 Axes>"
      ]
     },
     "metadata": {},
     "output_type": "display_data"
    }
   ],
   "source": [
    "import torch\n",
    "import torchvision.transforms as transforms\n",
    "from PIL import Image\n",
    "import matplotlib.pyplot as plt\n",
    "import os\n",
    "from model import TransformerNet  # Ensure this matches your model definition file\n",
    "\n",
    "# Path to your trained model\n",
    "model_path = \"style_transformer.pth\"\n",
    "\n",
    "device = torch.device(\"cuda\" if torch.cuda.is_available() else \"cpu\")\n",
    "model = TransformerNet().to(device)\n",
    "model.load_state_dict(torch.load(model_path, map_location=device))\n",
    "model.eval()\n",
    "\n",
    "# Path to your input image\n",
    "input_image_path = \"Testing3.jpg\"\n",
    "output_image_path = \"stylized_output3.jpg\"\n",
    "\n",
    "# Define the image transformation\n",
    "transform = transforms.Compose([\n",
    "    transforms.Resize(512),\n",
    "    transforms.CenterCrop(512),\n",
    "    transforms.ToTensor(),\n",
    "    transforms.Lambda(lambda x: x.mul(255))  # Scale to [0,255]\n",
    "])\n",
    "\n",
    "image = Image.open(\"Testing3.jpg\").convert('RGB')\n",
    "image_tensor = transform(image).unsqueeze(0).to(device)\n",
    "\n",
    "with torch.no_grad():\n",
    "    output = model(image_tensor).clamp(0, 255)\n",
    "\n",
    "# Convert output tensor to PIL image\n",
    "output_data = output.cpu().squeeze(0)\n",
    "output_image = transforms.ToPILImage()(output_data / 255.0)\n",
    "output_image.save(output_image_path)\n",
    "\n",
    "plt.figure(figsize=(8, 8))\n",
    "plt.imshow(output_image)\n",
    "plt.title(\"Stylized Output\")\n",
    "plt.axis(\"off\")\n",
    "plt.show()\n"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 4,
   "id": "02ec0219",
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Styled image saved to styled_output_old.jpg\n"
     ]
    }
   ],
   "source": [
    "import torch\n",
    "from torchvision import transforms\n",
    "from PIL import Image\n",
    "import matplotlib.pyplot as plt\n",
    "from model import TransformerNet\n",
    "\n",
    "# Load and preprocess the input image\n",
    "def load_image(image_path, size=512):\n",
    "    image = Image.open(image_path).convert('RGB')\n",
    "    transform = transforms.Compose([\n",
    "        transforms.Resize(size),\n",
    "        transforms.ToTensor(),\n",
    "        transforms.Lambda(lambda x: x.mul(255))  # Multiply by 255 to match training scale\n",
    "    ])\n",
    "    return transform(image).unsqueeze(0)  # Add batch dimension\n",
    "\n",
    "# Postprocess and save the output image\n",
    "def save_image(tensor, output_path):\n",
    "    tensor = tensor.clone().detach().cpu().squeeze(0)\n",
    "    image = tensor.clamp(0, 255).div(255)\n",
    "    image = transforms.ToPILImage()(image)\n",
    "    image.save(output_path)\n",
    "\n",
    "# Load the trained model\n",
    "device = torch.device(\"cuda\" if torch.cuda.is_available() else \"cpu\")\n",
    "model = TransformerNet().to(device)\n",
    "model.load_state_dict(torch.load(\"final_style_model.pth\", map_location=device))\n",
    "model.eval()\n",
    "\n",
    "# Process a new image\n",
    "input_image_path = \"Testing3.jpg\"       # Replace with your image\n",
    "output_image_path = \"styled_output_old.jpg\"   # Output path\n",
    "\n",
    "input_tensor = load_image(input_image_path).to(device)\n",
    "with torch.no_grad():\n",
    "    output_tensor = model(input_tensor)\n",
    "\n",
    "save_image(output_tensor, output_image_path)\n",
    "print(f\"Styled image saved to {output_image_path}\")\n"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "5d2b6f60",
   "metadata": {},
   "outputs": [],
   "source": []
  }
 ],
 "metadata": {
  "kernelspec": {
   "display_name": "cudagpu",
   "language": "python",
   "name": "cuda"
  },
  "language_info": {
   "codemirror_mode": {
    "name": "ipython",
    "version": 3
   },
   "file_extension": ".py",
   "mimetype": "text/x-python",
   "name": "python",
   "nbconvert_exporter": "python",
   "pygments_lexer": "ipython3",
   "version": "3.12.5"
  }
 },
 "nbformat": 4,
 "nbformat_minor": 5
}