diff --git "a/object_detection2.ipynb" "b/object_detection2.ipynb" new file mode 100644--- /dev/null +++ "b/object_detection2.ipynb" @@ -0,0 +1,2951 @@ +{ + "cells": [ + { + "cell_type": "code", + "execution_count": 2, + "metadata": {}, + "outputs": [], + "source": [ + "import torch\n", + "import torchvision\n", + "import os\n", + "import cv2\n", + "import numpy as np\n", + "import xml.etree.ElementTree as ET\n", + "from torch.utils.data import Dataset, DataLoader\n", + "from torchvision.models.detection.faster_rcnn import FastRCNNPredictor\n", + "from torchvision.transforms import functional as F" + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Using device: cpu\n" + ] + } + ], + "source": [ + "device = torch.device(\"cuda\" if torch.cuda.is_available() else \"cpu\")\n", + "print(f\"Using device: {device}\")" + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "metadata": {}, + "outputs": [], + "source": [ + "train_dir = \"D:/New download/archive/VOC2012_train_val/VOC2012_train_val\"\n", + "test_dir = \"D:/New download/archive/VOC2012_test/VOC2012_test\"\n", + "image_dir = os.path.join(train_dir, \"JPEGImages\")\n", + "annotation_dir = os.path.join(train_dir, \"Annotations\")\n", + "image_filenames = sorted(os.listdir(image_dir))[:500]" + ] + }, + { + "cell_type": "code", + "execution_count": 5, + "metadata": {}, + "outputs": [], + "source": [ + "class VOCDataset(Dataset):\n", + " def __init__(self, image_filenames):\n", + " self.image_filenames = image_filenames\n", + " self.classes = ['__background__', 'aeroplane', 'bicycle', 'bird', 'boat', 'bottle', 'bus', 'car', 'cat', \n", + " 'chair', 'cow', 'diningtable', 'dog', 'horse', 'motorbike', 'person', 'pottedplant', \n", + " 'sheep', 'sofa', 'train', 'tvmonitor']\n", + "\n", + " def __len__(self):\n", + " return len(self.image_filenames)\n", + "\n", + " def __getitem__(self, idx):\n", + " image_filename = self.image_filenames[idx]\n", + " img_path = os.path.join(image_dir, image_filename)\n", + " ann_path = os.path.join(annotation_dir, image_filename.replace(\".jpg\", \".xml\"))\n", + " \n", + " if not os.path.exists(ann_path):\n", + " return None \n", + "\n", + " img = cv2.imread(img_path)\n", + " img = cv2.cvtColor(img, cv2.COLOR_BGR2RGB)\n", + " img = F.to_tensor(img) \n", + "\n", + " tree = ET.parse(ann_path)\n", + " root = tree.getroot()\n", + " boxes, labels = [], []\n", + "\n", + " for obj in root.findall(\"object\"):\n", + " label = self.classes.index(obj.find(\"name\").text)\n", + " bbox = obj.find(\"bndbox\")\n", + " xmin, ymin, xmax, ymax = [int(bbox.find(tag).text) for tag in (\"xmin\", \"ymin\", \"xmax\", \"ymax\")]\n", + " boxes.append([xmin, ymin, xmax, ymax])\n", + " labels.append(label)\n", + "\n", + " target = {\n", + " 'boxes': torch.tensor(boxes, dtype=torch.float32),\n", + " 'labels': torch.tensor(labels, dtype=torch.int64)\n", + " }\n", + "\n", + " return img, target" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "def collate_fn(batch):\n", + " batch = [b for b in batch if b is not None]\n", + " return tuple(zip(*batch))\n", + "\n", + "dataset = VOCDataset(image_filenames)\n", + "dataloader = DataLoader(dataset, batch_size=1, shuffle=True, collate_fn=collate_fn)" + ] + }, + { + "cell_type": "code", + "execution_count": 7, + "metadata": {}, + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "c:\\Users\\gouth\\AppData\\Local\\Programs\\Python\\Python310\\lib\\site-packages\\torchvision\\models\\_utils.py:208: UserWarning: The parameter 'pretrained' is deprecated since 0.13 and may be removed in the future, please use 'weights' instead.\n", + " warnings.warn(\n", + "c:\\Users\\gouth\\AppData\\Local\\Programs\\Python\\Python310\\lib\\site-packages\\torchvision\\models\\_utils.py:223: UserWarning: Arguments other than a weight enum or `None` for 'weights' are deprecated since 0.13 and may be removed in the future. The current behavior is equivalent to passing `weights=FasterRCNN_ResNet50_FPN_Weights.COCO_V1`. You can also use `weights=FasterRCNN_ResNet50_FPN_Weights.DEFAULT` to get the most up-to-date weights.\n", + " warnings.warn(msg)\n" + ] + }, + { + "data": { + "text/plain": [ + "FasterRCNN(\n", + " (transform): GeneralizedRCNNTransform(\n", + " Normalize(mean=[0.485, 0.456, 0.406], std=[0.229, 0.224, 0.225])\n", + " Resize(min_size=(800,), max_size=1333, mode='bilinear')\n", + " )\n", + " (backbone): BackboneWithFPN(\n", + " (body): IntermediateLayerGetter(\n", + " (conv1): Conv2d(3, 64, kernel_size=(7, 7), stride=(2, 2), padding=(3, 3), bias=False)\n", + " (bn1): FrozenBatchNorm2d(64, eps=0.0)\n", + " (relu): ReLU(inplace=True)\n", + " (maxpool): MaxPool2d(kernel_size=3, stride=2, padding=1, dilation=1, ceil_mode=False)\n", + " (layer1): Sequential(\n", + " (0): Bottleneck(\n", + " (conv1): Conv2d(64, 64, kernel_size=(1, 1), stride=(1, 1), bias=False)\n", + " (bn1): FrozenBatchNorm2d(64, eps=0.0)\n", + " (conv2): Conv2d(64, 64, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n", + " (bn2): FrozenBatchNorm2d(64, eps=0.0)\n", + " (conv3): Conv2d(64, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)\n", + " (bn3): FrozenBatchNorm2d(256, eps=0.0)\n", + " (relu): ReLU(inplace=True)\n", + " (downsample): Sequential(\n", + " (0): Conv2d(64, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)\n", + " (1): FrozenBatchNorm2d(256, eps=0.0)\n", + " )\n", + " )\n", + " (1): Bottleneck(\n", + " (conv1): Conv2d(256, 64, kernel_size=(1, 1), stride=(1, 1), bias=False)\n", + " (bn1): FrozenBatchNorm2d(64, eps=0.0)\n", + " (conv2): Conv2d(64, 64, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n", + " (bn2): FrozenBatchNorm2d(64, eps=0.0)\n", + " (conv3): Conv2d(64, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)\n", + " (bn3): FrozenBatchNorm2d(256, eps=0.0)\n", + " (relu): ReLU(inplace=True)\n", + " )\n", + " (2): Bottleneck(\n", + " (conv1): Conv2d(256, 64, kernel_size=(1, 1), stride=(1, 1), bias=False)\n", + " (bn1): FrozenBatchNorm2d(64, eps=0.0)\n", + " (conv2): Conv2d(64, 64, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n", + " (bn2): FrozenBatchNorm2d(64, eps=0.0)\n", + " (conv3): Conv2d(64, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)\n", + " (bn3): FrozenBatchNorm2d(256, eps=0.0)\n", + " (relu): ReLU(inplace=True)\n", + " )\n", + " )\n", + " (layer2): Sequential(\n", + " (0): Bottleneck(\n", + " (conv1): Conv2d(256, 128, kernel_size=(1, 1), stride=(1, 1), bias=False)\n", + " (bn1): FrozenBatchNorm2d(128, eps=0.0)\n", + " (conv2): Conv2d(128, 128, kernel_size=(3, 3), stride=(2, 2), padding=(1, 1), bias=False)\n", + " (bn2): FrozenBatchNorm2d(128, eps=0.0)\n", + " (conv3): Conv2d(128, 512, kernel_size=(1, 1), stride=(1, 1), bias=False)\n", + " (bn3): FrozenBatchNorm2d(512, eps=0.0)\n", + " (relu): ReLU(inplace=True)\n", + " (downsample): Sequential(\n", + " (0): Conv2d(256, 512, kernel_size=(1, 1), stride=(2, 2), bias=False)\n", + " (1): FrozenBatchNorm2d(512, eps=0.0)\n", + " )\n", + " )\n", + " (1): Bottleneck(\n", + " (conv1): Conv2d(512, 128, kernel_size=(1, 1), stride=(1, 1), bias=False)\n", + " (bn1): FrozenBatchNorm2d(128, eps=0.0)\n", + " (conv2): Conv2d(128, 128, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n", + " (bn2): FrozenBatchNorm2d(128, eps=0.0)\n", + " (conv3): Conv2d(128, 512, kernel_size=(1, 1), stride=(1, 1), bias=False)\n", + " (bn3): FrozenBatchNorm2d(512, eps=0.0)\n", + " (relu): ReLU(inplace=True)\n", + " )\n", + " (2): Bottleneck(\n", + " (conv1): Conv2d(512, 128, kernel_size=(1, 1), stride=(1, 1), bias=False)\n", + " (bn1): FrozenBatchNorm2d(128, eps=0.0)\n", + " (conv2): Conv2d(128, 128, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n", + " (bn2): FrozenBatchNorm2d(128, eps=0.0)\n", + " (conv3): Conv2d(128, 512, kernel_size=(1, 1), stride=(1, 1), bias=False)\n", + " (bn3): FrozenBatchNorm2d(512, eps=0.0)\n", + " (relu): ReLU(inplace=True)\n", + " )\n", + " (3): Bottleneck(\n", + " (conv1): Conv2d(512, 128, kernel_size=(1, 1), stride=(1, 1), bias=False)\n", + " (bn1): FrozenBatchNorm2d(128, eps=0.0)\n", + " (conv2): Conv2d(128, 128, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n", + " (bn2): FrozenBatchNorm2d(128, eps=0.0)\n", + " (conv3): Conv2d(128, 512, kernel_size=(1, 1), stride=(1, 1), bias=False)\n", + " (bn3): FrozenBatchNorm2d(512, eps=0.0)\n", + " (relu): ReLU(inplace=True)\n", + " )\n", + " )\n", + " (layer3): Sequential(\n", + " (0): Bottleneck(\n", + " (conv1): Conv2d(512, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)\n", + " (bn1): FrozenBatchNorm2d(256, eps=0.0)\n", + " (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(2, 2), padding=(1, 1), bias=False)\n", + " (bn2): FrozenBatchNorm2d(256, eps=0.0)\n", + " (conv3): Conv2d(256, 1024, kernel_size=(1, 1), stride=(1, 1), bias=False)\n", + " (bn3): FrozenBatchNorm2d(1024, eps=0.0)\n", + " (relu): ReLU(inplace=True)\n", + " (downsample): Sequential(\n", + " (0): Conv2d(512, 1024, kernel_size=(1, 1), stride=(2, 2), bias=False)\n", + " (1): FrozenBatchNorm2d(1024, eps=0.0)\n", + " )\n", + " )\n", + " (1): Bottleneck(\n", + " (conv1): Conv2d(1024, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)\n", + " (bn1): FrozenBatchNorm2d(256, eps=0.0)\n", + " (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n", + " (bn2): FrozenBatchNorm2d(256, eps=0.0)\n", + " (conv3): Conv2d(256, 1024, kernel_size=(1, 1), stride=(1, 1), bias=False)\n", + " (bn3): FrozenBatchNorm2d(1024, eps=0.0)\n", + " (relu): ReLU(inplace=True)\n", + " )\n", + " (2): Bottleneck(\n", + " (conv1): Conv2d(1024, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)\n", + " (bn1): FrozenBatchNorm2d(256, eps=0.0)\n", + " (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n", + " (bn2): FrozenBatchNorm2d(256, eps=0.0)\n", + " (conv3): Conv2d(256, 1024, kernel_size=(1, 1), stride=(1, 1), bias=False)\n", + " (bn3): FrozenBatchNorm2d(1024, eps=0.0)\n", + " (relu): ReLU(inplace=True)\n", + " )\n", + " (3): Bottleneck(\n", + " (conv1): Conv2d(1024, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)\n", + " (bn1): FrozenBatchNorm2d(256, eps=0.0)\n", + " (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n", + " (bn2): FrozenBatchNorm2d(256, eps=0.0)\n", + " (conv3): Conv2d(256, 1024, kernel_size=(1, 1), stride=(1, 1), bias=False)\n", + " (bn3): FrozenBatchNorm2d(1024, eps=0.0)\n", + " (relu): ReLU(inplace=True)\n", + " )\n", + " (4): Bottleneck(\n", + " (conv1): Conv2d(1024, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)\n", + " (bn1): FrozenBatchNorm2d(256, eps=0.0)\n", + " (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n", + " (bn2): FrozenBatchNorm2d(256, eps=0.0)\n", + " (conv3): Conv2d(256, 1024, kernel_size=(1, 1), stride=(1, 1), bias=False)\n", + " (bn3): FrozenBatchNorm2d(1024, eps=0.0)\n", + " (relu): ReLU(inplace=True)\n", + " )\n", + " (5): Bottleneck(\n", + " (conv1): Conv2d(1024, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)\n", + " (bn1): FrozenBatchNorm2d(256, eps=0.0)\n", + " (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n", + " (bn2): FrozenBatchNorm2d(256, eps=0.0)\n", + " (conv3): Conv2d(256, 1024, kernel_size=(1, 1), stride=(1, 1), bias=False)\n", + " (bn3): FrozenBatchNorm2d(1024, eps=0.0)\n", + " (relu): ReLU(inplace=True)\n", + " )\n", + " )\n", + " (layer4): Sequential(\n", + " (0): Bottleneck(\n", + " (conv1): Conv2d(1024, 512, kernel_size=(1, 1), stride=(1, 1), bias=False)\n", + " (bn1): FrozenBatchNorm2d(512, eps=0.0)\n", + " (conv2): Conv2d(512, 512, kernel_size=(3, 3), stride=(2, 2), padding=(1, 1), bias=False)\n", + " (bn2): FrozenBatchNorm2d(512, eps=0.0)\n", + " (conv3): Conv2d(512, 2048, kernel_size=(1, 1), stride=(1, 1), bias=False)\n", + " (bn3): FrozenBatchNorm2d(2048, eps=0.0)\n", + " (relu): ReLU(inplace=True)\n", + " (downsample): Sequential(\n", + " (0): Conv2d(1024, 2048, kernel_size=(1, 1), stride=(2, 2), bias=False)\n", + " (1): FrozenBatchNorm2d(2048, eps=0.0)\n", + " )\n", + " )\n", + " (1): Bottleneck(\n", + " (conv1): Conv2d(2048, 512, kernel_size=(1, 1), stride=(1, 1), bias=False)\n", + " (bn1): FrozenBatchNorm2d(512, eps=0.0)\n", + " (conv2): Conv2d(512, 512, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n", + " (bn2): FrozenBatchNorm2d(512, eps=0.0)\n", + " (conv3): Conv2d(512, 2048, kernel_size=(1, 1), stride=(1, 1), bias=False)\n", + " (bn3): FrozenBatchNorm2d(2048, eps=0.0)\n", + " (relu): ReLU(inplace=True)\n", + " )\n", + " (2): Bottleneck(\n", + " (conv1): Conv2d(2048, 512, kernel_size=(1, 1), stride=(1, 1), bias=False)\n", + " (bn1): FrozenBatchNorm2d(512, eps=0.0)\n", + " (conv2): Conv2d(512, 512, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n", + " (bn2): FrozenBatchNorm2d(512, eps=0.0)\n", + " (conv3): Conv2d(512, 2048, kernel_size=(1, 1), stride=(1, 1), bias=False)\n", + " (bn3): FrozenBatchNorm2d(2048, eps=0.0)\n", + " (relu): ReLU(inplace=True)\n", + " )\n", + " )\n", + " )\n", + " (fpn): FeaturePyramidNetwork(\n", + " (inner_blocks): ModuleList(\n", + " (0): Conv2dNormActivation(\n", + " (0): Conv2d(256, 256, kernel_size=(1, 1), stride=(1, 1))\n", + " )\n", + " (1): Conv2dNormActivation(\n", + " (0): Conv2d(512, 256, kernel_size=(1, 1), stride=(1, 1))\n", + " )\n", + " (2): Conv2dNormActivation(\n", + " (0): Conv2d(1024, 256, kernel_size=(1, 1), stride=(1, 1))\n", + " )\n", + " (3): Conv2dNormActivation(\n", + " (0): Conv2d(2048, 256, kernel_size=(1, 1), stride=(1, 1))\n", + " )\n", + " )\n", + " (layer_blocks): ModuleList(\n", + " (0-3): 4 x Conv2dNormActivation(\n", + " (0): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))\n", + " )\n", + " )\n", + " (extra_blocks): LastLevelMaxPool()\n", + " )\n", + " )\n", + " (rpn): RegionProposalNetwork(\n", + " (anchor_generator): AnchorGenerator()\n", + " (head): RPNHead(\n", + " (conv): Sequential(\n", + " (0): Conv2dNormActivation(\n", + " (0): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))\n", + " (1): ReLU(inplace=True)\n", + " )\n", + " )\n", + " (cls_logits): Conv2d(256, 3, kernel_size=(1, 1), stride=(1, 1))\n", + " (bbox_pred): Conv2d(256, 12, kernel_size=(1, 1), stride=(1, 1))\n", + " )\n", + " )\n", + " (roi_heads): RoIHeads(\n", + " (box_roi_pool): MultiScaleRoIAlign(featmap_names=['0', '1', '2', '3'], output_size=(7, 7), sampling_ratio=2)\n", + " (box_head): TwoMLPHead(\n", + " (fc6): Linear(in_features=12544, out_features=1024, bias=True)\n", + " (fc7): Linear(in_features=1024, out_features=1024, bias=True)\n", + " )\n", + " (box_predictor): FastRCNNPredictor(\n", + " (cls_score): Linear(in_features=1024, out_features=21, bias=True)\n", + " (bbox_pred): Linear(in_features=1024, out_features=84, bias=True)\n", + " )\n", + " )\n", + ")" + ] + }, + "execution_count": 7, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "model = torchvision.models.detection.fasterrcnn_resnet50_fpn(pretrained=True)\n", + "num_classes = len(dataset.classes)\n", + "in_features = model.roi_heads.box_predictor.cls_score.in_features\n", + "model.roi_heads.box_predictor = FastRCNNPredictor(in_features, num_classes)\n", + "model.to(device)" + ] + }, + { + "cell_type": "code", + "execution_count": 8, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Batch 1/500 - Loss: 3.3085\n", + "Batch 2/500 - Loss: 3.1905\n", + "Batch 3/500 - Loss: 1.0270\n", + "Batch 4/500 - Loss: 0.2609\n", + "Batch 5/500 - Loss: 1.4213\n", + "Batch 6/500 - Loss: 0.3054\n", + "Batch 7/500 - Loss: 2.6878\n", + "Batch 8/500 - Loss: 0.5472\n", + "Batch 9/500 - Loss: 0.3734\n", + "Batch 10/500 - Loss: 0.2750\n", + "Batch 11/500 - Loss: 0.3269\n", + "Batch 12/500 - Loss: 0.6221\n", + "Batch 13/500 - Loss: 2.1492\n", + "Batch 14/500 - Loss: 0.7095\n", + "Batch 15/500 - Loss: 1.5102\n", + "Batch 16/500 - Loss: 0.5892\n", + "Batch 17/500 - Loss: 0.6129\n", + "Batch 18/500 - Loss: 0.3469\n", + "Batch 19/500 - Loss: 0.9954\n", + "Batch 20/500 - Loss: 0.3992\n", + "Batch 21/500 - Loss: 0.3185\n", + "Batch 22/500 - Loss: 1.3941\n", + "Batch 23/500 - Loss: 0.4153\n", + "Batch 24/500 - Loss: 1.0522\n", + "Batch 25/500 - Loss: 0.5917\n", + "Batch 26/500 - Loss: 0.8474\n", + "Batch 27/500 - Loss: 0.3330\n", + "Batch 28/500 - Loss: 0.2826\n", + "Batch 29/500 - Loss: 0.2402\n", + "Batch 30/500 - Loss: 0.1679\n", + "Batch 31/500 - Loss: 0.5174\n", + "Batch 32/500 - Loss: 0.2082\n", + "Batch 33/500 - Loss: 0.4365\n", + "Batch 34/500 - Loss: 1.2866\n", + "Batch 35/500 - Loss: 0.8272\n", + "Batch 36/500 - Loss: 0.3018\n", + "Batch 37/500 - Loss: 0.5879\n", + "Batch 38/500 - Loss: 0.2441\n", + "Batch 39/500 - Loss: 0.4904\n", + "Batch 40/500 - Loss: 1.5337\n", + "Batch 41/500 - Loss: 0.8711\n", + "Batch 42/500 - Loss: 0.7538\n", + "Batch 43/500 - Loss: 0.5703\n", + "Batch 44/500 - Loss: 1.2006\n", + "Batch 45/500 - Loss: 1.0413\n", + "Batch 46/500 - Loss: 0.2920\n", + "Batch 47/500 - Loss: 0.6934\n", + "Batch 48/500 - Loss: 0.8857\n", + "Batch 49/500 - Loss: 0.4237\n", + "Batch 50/500 - Loss: 0.7677\n", + "Batch 51/500 - Loss: 0.1976\n", + "Batch 52/500 - Loss: 2.6390\n", + "Batch 53/500 - Loss: 0.6833\n", + "Batch 54/500 - Loss: 0.8689\n", + "Batch 55/500 - Loss: 0.8017\n", + "Batch 56/500 - Loss: 1.0395\n", + "Batch 57/500 - Loss: 0.3221\n", + "Batch 58/500 - Loss: 1.9084\n", + "Batch 59/500 - Loss: 0.3682\n", + "Batch 60/500 - Loss: 0.2807\n", + "Batch 61/500 - Loss: 0.3082\n", + "Batch 62/500 - Loss: 0.6070\n", + "Batch 63/500 - Loss: 0.2633\n", + "Batch 64/500 - Loss: 0.1550\n", + "Batch 65/500 - Loss: 0.2712\n", + "Batch 66/500 - Loss: 0.6891\n", + "Batch 67/500 - Loss: 0.9240\n", + "Batch 68/500 - Loss: 0.4534\n", + "Batch 69/500 - Loss: 0.6587\n", + "Batch 70/500 - Loss: 0.5797\n", + "Batch 71/500 - Loss: 0.9501\n", + "Batch 72/500 - Loss: 0.8499\n", + "Batch 73/500 - Loss: 0.1701\n", + "Batch 74/500 - Loss: 1.0978\n", + "Batch 75/500 - Loss: 0.6253\n", + "Batch 76/500 - Loss: 1.6765\n", + "Batch 77/500 - Loss: 0.8877\n", + "Batch 78/500 - Loss: 0.6830\n", + "Batch 79/500 - Loss: 0.7526\n", + "Batch 80/500 - Loss: 0.2644\n", + "Batch 81/500 - Loss: 0.3370\n", + "Batch 82/500 - Loss: 0.1948\n", + "Batch 83/500 - Loss: 0.2668\n", + "Batch 84/500 - Loss: 0.4193\n", + "Batch 85/500 - Loss: 1.3693\n", + "Batch 86/500 - Loss: 0.9207\n", + "Batch 87/500 - Loss: 0.4815\n", + "Batch 88/500 - Loss: 0.8637\n", + "Batch 89/500 - Loss: 0.9334\n", + "Batch 90/500 - Loss: 0.5400\n", + "Batch 91/500 - Loss: 0.6314\n", + "Batch 92/500 - Loss: 0.3984\n", + "Batch 93/500 - Loss: 0.6234\n", + "Batch 94/500 - Loss: 0.6751\n", + "Batch 95/500 - Loss: 0.7935\n", + "Batch 96/500 - Loss: 0.6191\n", + "Batch 97/500 - Loss: 0.4556\n", + "Batch 98/500 - Loss: 0.5977\n", + "Batch 99/500 - Loss: 0.5764\n", + "Batch 100/500 - Loss: 0.5509\n", + "Batch 101/500 - Loss: 0.2516\n", + "Batch 102/500 - Loss: 0.6729\n", + "Batch 103/500 - Loss: 0.1696\n", + "Batch 104/500 - Loss: 0.2384\n", + "Batch 105/500 - Loss: 0.3681\n", + "Batch 106/500 - Loss: 0.3141\n", + "Batch 107/500 - Loss: 0.6312\n", + "Batch 108/500 - Loss: 0.7516\n", + "Batch 109/500 - Loss: 0.8349\n", + "Batch 110/500 - Loss: 1.0557\n", + "Batch 111/500 - Loss: 0.9165\n", + "Batch 112/500 - Loss: 0.8278\n", + "Batch 113/500 - Loss: 0.3002\n", + "Batch 114/500 - Loss: 1.5806\n", + "Batch 115/500 - Loss: 0.7962\n", + "Batch 116/500 - Loss: 0.5657\n", + "Batch 117/500 - Loss: 1.7456\n", + "Batch 118/500 - Loss: 0.3074\n", + "Batch 119/500 - Loss: 0.2578\n", + "Batch 120/500 - Loss: 0.2387\n", + "Batch 121/500 - Loss: 0.3301\n", + "Batch 122/500 - Loss: 0.1720\n", + "Batch 123/500 - Loss: 1.2363\n", + "Batch 124/500 - Loss: 0.3718\n", + "Batch 125/500 - Loss: 0.4984\n", + "Batch 126/500 - Loss: 0.4462\n", + "Batch 127/500 - Loss: 0.7160\n", + "Batch 128/500 - Loss: 0.3125\n", + "Batch 129/500 - Loss: 0.6423\n", + "Batch 130/500 - Loss: 0.6174\n", + "Batch 131/500 - Loss: 0.2465\n", + "Batch 132/500 - Loss: 0.4121\n", + "Batch 133/500 - Loss: 1.9752\n", + "Batch 134/500 - Loss: 0.3381\n", + "Batch 135/500 - Loss: 0.2036\n", + "Batch 136/500 - Loss: 0.2489\n", + "Batch 137/500 - Loss: 0.8738\n", + "Batch 138/500 - Loss: 0.2782\n", + "Batch 139/500 - Loss: 0.3066\n", + "Batch 140/500 - Loss: 0.7049\n", + "Batch 141/500 - Loss: 0.4803\n", + "Batch 142/500 - Loss: 0.3442\n", + "Batch 143/500 - Loss: 0.6142\n", + "Batch 144/500 - Loss: 0.6567\n", + "Batch 145/500 - Loss: 0.5247\n", + "Batch 146/500 - Loss: 0.2941\n", + "Batch 147/500 - Loss: 0.8641\n", + "Batch 148/500 - Loss: 0.4485\n", + "Batch 149/500 - Loss: 0.6086\n", + "Batch 150/500 - Loss: 0.4703\n", + "Batch 151/500 - Loss: 0.1780\n", + "Batch 152/500 - Loss: 0.9045\n", + "Batch 153/500 - Loss: 0.1167\n", + "Batch 154/500 - Loss: 0.9005\n", + "Batch 155/500 - Loss: 0.6240\n", + "Batch 156/500 - Loss: 0.1916\n", + "Batch 157/500 - Loss: 2.7273\n", + "Batch 158/500 - Loss: 0.1715\n", + "Batch 159/500 - Loss: 1.6193\n", + "Batch 160/500 - Loss: 0.2703\n", + "Batch 161/500 - Loss: 2.0520\n", + "Batch 162/500 - Loss: 0.5995\n", + "Batch 163/500 - Loss: 0.8738\n", + "Batch 164/500 - Loss: 0.6902\n", + "Batch 165/500 - Loss: 0.4275\n", + "Batch 166/500 - Loss: 0.5270\n", + "Batch 167/500 - Loss: 0.3911\n", + "Batch 168/500 - Loss: 1.8643\n", + "Batch 169/500 - Loss: 0.6867\n", + "Batch 170/500 - Loss: 0.1595\n", + "Batch 171/500 - Loss: 0.1068\n", + "Batch 172/500 - Loss: 1.6507\n", + "Batch 173/500 - Loss: 0.4307\n", + "Batch 174/500 - Loss: 0.3898\n", + "Batch 175/500 - Loss: 0.9914\n", + "Batch 176/500 - Loss: 0.9302\n", + "Batch 177/500 - Loss: 2.2621\n", + "Batch 178/500 - Loss: 0.1541\n", + "Batch 179/500 - Loss: 0.3138\n", + "Batch 180/500 - Loss: 0.2854\n", + "Batch 181/500 - Loss: 0.0897\n", + "Batch 182/500 - Loss: 1.2044\n", + "Batch 183/500 - Loss: 0.1391\n", + "Batch 184/500 - Loss: 0.4137\n", + "Batch 185/500 - Loss: 0.6351\n", + "Batch 186/500 - Loss: 0.4183\n", + "Batch 187/500 - Loss: 0.2448\n", + "Batch 188/500 - Loss: 0.3175\n", + "Batch 189/500 - Loss: 0.3840\n", + "Batch 190/500 - Loss: 0.2342\n", + "Batch 191/500 - Loss: 0.1513\n", + "Batch 192/500 - Loss: 0.1828\n", + "Batch 193/500 - Loss: 1.8057\n", + "Batch 194/500 - Loss: 0.3202\n", + "Batch 195/500 - Loss: 0.1284\n", + "Batch 196/500 - Loss: 0.3384\n", + "Batch 197/500 - Loss: 0.7914\n", + "Batch 198/500 - Loss: 0.9407\n", + "Batch 199/500 - Loss: 0.2528\n", + "Batch 200/500 - Loss: 0.5091\n", + "Batch 201/500 - Loss: 0.6580\n", + "Batch 202/500 - Loss: 0.2623\n", + "Batch 203/500 - Loss: 0.5384\n", + "Batch 204/500 - Loss: 0.7834\n", + "Batch 205/500 - Loss: 0.6068\n", + "Batch 206/500 - Loss: 0.8598\n", + "Batch 207/500 - Loss: 0.5871\n", + "Batch 208/500 - Loss: 0.2359\n", + "Batch 209/500 - Loss: 0.1900\n", + "Batch 210/500 - Loss: 0.7869\n", + "Batch 211/500 - Loss: 0.2649\n", + "Batch 212/500 - Loss: 0.5921\n", + "Batch 213/500 - Loss: 0.7403\n", + "Batch 214/500 - Loss: 0.5119\n", + "Batch 215/500 - Loss: 1.8799\n", + "Batch 216/500 - Loss: 1.1682\n", + "Batch 217/500 - Loss: 1.8595\n", + "Batch 218/500 - Loss: 0.2855\n", + "Batch 219/500 - Loss: 0.4953\n", + "Batch 220/500 - Loss: 0.3601\n", + "Batch 221/500 - Loss: 0.5627\n", + "Batch 222/500 - Loss: 0.4185\n", + "Batch 223/500 - Loss: 0.5284\n", + "Batch 224/500 - Loss: 0.4938\n", + "Batch 225/500 - Loss: 0.4968\n", + "Batch 226/500 - Loss: 0.2968\n", + "Batch 227/500 - Loss: 0.2941\n", + "Batch 228/500 - Loss: 0.2529\n", + "Batch 229/500 - Loss: 0.2360\n", + "Batch 230/500 - Loss: 0.2240\n", + "Batch 231/500 - Loss: 0.6112\n", + "Batch 232/500 - Loss: 0.5552\n", + "Batch 233/500 - Loss: 0.2331\n", + "Batch 234/500 - Loss: 2.1481\n", + "Batch 235/500 - Loss: 0.6190\n", + "Batch 236/500 - Loss: 0.3855\n", + "Batch 237/500 - Loss: 0.5856\n", + "Batch 238/500 - Loss: 0.3190\n", + "Batch 239/500 - Loss: 0.5297\n", + "Batch 240/500 - Loss: 0.5273\n", + "Batch 241/500 - Loss: 0.1791\n", + "Batch 242/500 - Loss: 0.3907\n", + "Batch 243/500 - Loss: 0.4155\n", + "Batch 244/500 - Loss: 0.1832\n", + "Batch 245/500 - Loss: 0.1831\n", + "Batch 246/500 - Loss: 1.3184\n", + "Batch 247/500 - Loss: 0.1947\n", + "Batch 248/500 - Loss: 0.7222\n", + "Batch 249/500 - Loss: 0.6549\n", + "Batch 250/500 - Loss: 0.2365\n", + "Batch 251/500 - Loss: 0.5596\n", + "Batch 252/500 - Loss: 0.5402\n", + "Batch 253/500 - Loss: 0.2187\n", + "Batch 254/500 - Loss: 0.1813\n", + "Batch 255/500 - Loss: 0.6110\n", + "Batch 256/500 - Loss: 0.2345\n", + "Batch 257/500 - Loss: 1.6255\n", + "Batch 258/500 - Loss: 0.7595\n", + "Batch 259/500 - Loss: 2.4876\n", + "Batch 260/500 - Loss: 0.2579\n", + "Batch 261/500 - Loss: 0.1558\n", + "Batch 262/500 - Loss: 0.3492\n", + "Batch 263/500 - Loss: 1.2011\n", + "Batch 264/500 - Loss: 0.3748\n", + "Batch 265/500 - Loss: 0.2467\n", + "Batch 266/500 - Loss: 0.7715\n", + "Batch 267/500 - Loss: 0.9416\n", + "Batch 268/500 - Loss: 0.2123\n", + "Batch 269/500 - Loss: 0.2730\n", + "Batch 270/500 - Loss: 1.3177\n", + "Batch 271/500 - Loss: 0.0847\n", + "Batch 272/500 - Loss: 1.8053\n", + "Batch 273/500 - Loss: 1.3845\n", + "Batch 274/500 - Loss: 0.5267\n", + "Batch 275/500 - Loss: 0.6248\n", + "Batch 276/500 - Loss: 0.3188\n", + "Batch 277/500 - Loss: 0.1963\n", + "Batch 278/500 - Loss: 0.4583\n", + "Batch 279/500 - Loss: 0.5953\n", + "Batch 280/500 - Loss: 0.7744\n", + "Batch 281/500 - Loss: 0.4058\n", + "Batch 282/500 - Loss: 0.5041\n", + "Batch 283/500 - Loss: 1.0200\n", + "Batch 284/500 - Loss: 1.9679\n", + "Batch 285/500 - Loss: 0.4445\n", + "Batch 286/500 - Loss: 0.2542\n", + "Batch 287/500 - Loss: 0.1870\n", + "Batch 288/500 - Loss: 1.5175\n", + "Batch 289/500 - Loss: 0.7345\n", + "Batch 290/500 - Loss: 0.3826\n", + "Batch 291/500 - Loss: 0.1648\n", + "Batch 292/500 - Loss: 0.3963\n", + "Batch 293/500 - Loss: 0.3635\n", + "Batch 294/500 - Loss: 0.5942\n", + "Batch 295/500 - Loss: 0.2166\n", + "Batch 296/500 - Loss: 0.9418\n", + "Batch 297/500 - Loss: 0.8144\n", + "Batch 298/500 - Loss: 0.5375\n", + "Batch 299/500 - Loss: 0.6001\n", + "Batch 300/500 - Loss: 0.1982\n", + "Batch 301/500 - Loss: 0.2087\n", + "Batch 302/500 - Loss: 0.1675\n", + "Batch 303/500 - Loss: 0.3808\n", + "Batch 304/500 - Loss: 0.2409\n", + "Batch 305/500 - Loss: 0.2085\n", + "Batch 306/500 - Loss: 0.7725\n", + "Batch 307/500 - Loss: 0.5090\n", + "Batch 308/500 - Loss: 0.1957\n", + "Batch 309/500 - Loss: 0.6968\n", + "Batch 310/500 - Loss: 1.2435\n", + "Batch 311/500 - Loss: 1.6647\n", + "Batch 312/500 - Loss: 0.2109\n", + "Batch 313/500 - Loss: 0.2919\n", + "Batch 314/500 - Loss: 0.1148\n", + "Batch 315/500 - Loss: 0.9620\n", + "Batch 316/500 - Loss: 0.5265\n", + "Batch 317/500 - Loss: 0.5539\n", + "Batch 318/500 - Loss: 2.4056\n", + "Batch 319/500 - Loss: 0.5253\n", + "Batch 320/500 - Loss: 0.6183\n", + "Batch 321/500 - Loss: 0.4152\n", + "Batch 322/500 - Loss: 0.2766\n", + "Batch 323/500 - Loss: 0.3290\n", + "Batch 324/500 - Loss: 0.4823\n", + "Batch 325/500 - Loss: 0.2259\n", + "Batch 326/500 - Loss: 0.9582\n", + "Batch 327/500 - Loss: 0.2612\n", + "Batch 328/500 - Loss: 0.4281\n", + "Batch 329/500 - Loss: 0.5298\n", + "Batch 330/500 - Loss: 0.3300\n", + "Batch 331/500 - Loss: 0.6668\n", + "Batch 332/500 - Loss: 0.3056\n", + "Batch 333/500 - Loss: 0.6846\n", + "Batch 334/500 - Loss: 0.9227\n", + "Batch 335/500 - Loss: 0.5817\n", + "Batch 336/500 - Loss: 0.4935\n", + "Batch 337/500 - Loss: 0.1254\n", + "Batch 338/500 - Loss: 0.2050\n", + "Batch 339/500 - Loss: 0.1077\n", + "Batch 340/500 - Loss: 0.1154\n", + "Batch 341/500 - Loss: 0.1983\n", + "Batch 342/500 - Loss: 0.1049\n", + "Batch 343/500 - Loss: 0.3334\n", + "Batch 344/500 - Loss: 0.3210\n", + "Batch 345/500 - Loss: 0.8136\n", + "Batch 346/500 - Loss: 0.4866\n", + "Batch 347/500 - Loss: 1.3154\n", + "Batch 348/500 - Loss: 0.2133\n", + "Batch 349/500 - Loss: 0.2372\n", + "Batch 350/500 - Loss: 0.4076\n", + "Batch 351/500 - Loss: 0.3110\n", + "Batch 352/500 - Loss: 0.5916\n", + "Batch 353/500 - Loss: 1.5421\n", + "Batch 354/500 - Loss: 0.9286\n", + "Batch 355/500 - Loss: 0.2944\n", + "Batch 356/500 - Loss: 0.2369\n", + "Batch 357/500 - Loss: 0.6404\n", + "Batch 358/500 - Loss: 0.3203\n", + "Batch 359/500 - Loss: 0.8721\n", + "Batch 360/500 - Loss: 0.3015\n", + "Batch 361/500 - Loss: 0.7397\n", + "Batch 362/500 - Loss: 1.3261\n", + "Batch 363/500 - Loss: 0.2004\n", + "Batch 364/500 - Loss: 0.4049\n", + "Batch 365/500 - Loss: 0.5928\n", + "Batch 366/500 - Loss: 0.6276\n", + "Batch 367/500 - Loss: 0.5116\n", + "Batch 368/500 - Loss: 0.0862\n", + "Batch 369/500 - Loss: 1.5380\n", + "Batch 370/500 - Loss: 0.5197\n", + "Batch 371/500 - Loss: 0.1965\n", + "Batch 372/500 - Loss: 0.7762\n", + "Batch 373/500 - Loss: 1.0113\n", + "Batch 374/500 - Loss: 0.6063\n", + "Batch 375/500 - Loss: 0.6052\n", + "Batch 376/500 - Loss: 0.4741\n", + "Batch 377/500 - Loss: 1.1790\n", + "Batch 378/500 - Loss: 0.5342\n", + "Batch 379/500 - Loss: 1.1831\n", + "Batch 380/500 - Loss: 0.3250\n", + "Batch 381/500 - Loss: 0.6995\n", + "Batch 382/500 - Loss: 1.1463\n", + "Batch 383/500 - Loss: 0.2642\n", + "Batch 384/500 - Loss: 0.5267\n", + "Batch 385/500 - Loss: 1.5219\n", + "Batch 386/500 - Loss: 0.5224\n", + "Batch 387/500 - Loss: 1.1842\n", + "Batch 388/500 - Loss: 0.3608\n", + "Batch 389/500 - Loss: 0.8222\n", + "Batch 390/500 - Loss: 1.0832\n", + "Batch 391/500 - Loss: 0.4555\n", + "Batch 392/500 - Loss: 0.3809\n", + "Batch 393/500 - Loss: 1.5257\n", + "Batch 394/500 - Loss: 0.6969\n", + "Batch 395/500 - Loss: 1.1191\n", + "Batch 396/500 - Loss: 0.4091\n", + "Batch 397/500 - Loss: 1.3394\n", + "Batch 398/500 - Loss: 0.2630\n", + "Batch 399/500 - Loss: 0.2576\n", + "Batch 400/500 - Loss: 1.3914\n", + "Batch 401/500 - Loss: 0.1923\n", + "Batch 402/500 - Loss: 1.2705\n", + "Batch 403/500 - Loss: 0.8919\n", + "Batch 404/500 - Loss: 0.2056\n", + "Batch 405/500 - Loss: 0.2972\n", + "Batch 406/500 - Loss: 0.5182\n", + "Batch 407/500 - Loss: 0.2819\n", + "Batch 408/500 - Loss: 0.3828\n", + "Batch 409/500 - Loss: 0.8579\n", + "Batch 410/500 - Loss: 0.9494\n", + "Batch 411/500 - Loss: 0.1666\n", + "Batch 412/500 - Loss: 0.1899\n", + "Batch 413/500 - Loss: 0.2394\n", + "Batch 414/500 - Loss: 0.2098\n", + "Batch 415/500 - Loss: 0.4275\n", + "Batch 416/500 - Loss: 0.2192\n", + "Batch 417/500 - Loss: 1.2712\n", + "Batch 418/500 - Loss: 1.1556\n", + "Batch 419/500 - Loss: 0.6924\n", + "Batch 420/500 - Loss: 0.7337\n", + "Batch 421/500 - Loss: 0.2464\n", + "Batch 422/500 - Loss: 0.2104\n", + "Batch 423/500 - Loss: 1.0642\n", + "Batch 424/500 - Loss: 0.4655\n", + "Batch 425/500 - Loss: 0.2538\n", + "Batch 426/500 - Loss: 0.6754\n", + "Batch 427/500 - Loss: 0.7212\n", + "Batch 428/500 - Loss: 0.1722\n", + "Batch 429/500 - Loss: 0.3381\n", + "Batch 430/500 - Loss: 0.1992\n", + "Batch 431/500 - Loss: 0.7085\n", + "Batch 432/500 - Loss: 0.6762\n", + "Batch 433/500 - Loss: 0.2398\n", + "Batch 434/500 - Loss: 0.3704\n", + "Batch 435/500 - Loss: 0.1911\n", + "Batch 436/500 - Loss: 0.9771\n", + "Batch 437/500 - Loss: 0.1693\n", + "Batch 438/500 - Loss: 0.2286\n", + "Batch 439/500 - Loss: 0.2753\n", + "Batch 440/500 - Loss: 0.1574\n", + "Batch 441/500 - Loss: 0.3429\n", + "Batch 442/500 - Loss: 0.5899\n", + "Batch 443/500 - Loss: 0.5808\n", + "Batch 444/500 - Loss: 0.4383\n", + "Batch 445/500 - Loss: 0.3323\n", + "Batch 446/500 - Loss: 1.6701\n", + "Batch 447/500 - Loss: 0.0947\n", + "Batch 448/500 - Loss: 0.1441\n", + "Batch 449/500 - Loss: 0.2274\n", + "Batch 450/500 - Loss: 0.2813\n", + "Batch 451/500 - Loss: 0.2409\n", + "Batch 452/500 - Loss: 0.5025\n", + "Batch 453/500 - Loss: 0.2388\n", + "Batch 454/500 - Loss: 1.1612\n", + "Batch 455/500 - Loss: 0.2509\n", + "Batch 456/500 - Loss: 0.2495\n", + "Batch 457/500 - Loss: 0.1826\n", + "Batch 458/500 - Loss: 0.2085\n", + "Batch 459/500 - Loss: 0.7202\n", + "Batch 460/500 - Loss: 0.6447\n", + "Batch 461/500 - Loss: 0.5162\n", + "Batch 462/500 - Loss: 0.1169\n", + "Batch 463/500 - Loss: 1.5438\n", + "Batch 464/500 - Loss: 1.1735\n", + "Batch 465/500 - Loss: 0.5937\n", + "Batch 466/500 - Loss: 0.1435\n", + "Batch 467/500 - Loss: 0.3014\n", + "Batch 468/500 - Loss: 0.2753\n", + "Batch 469/500 - Loss: 1.0527\n", + "Batch 470/500 - Loss: 0.4341\n", + "Batch 471/500 - Loss: 0.7446\n", + "Batch 472/500 - Loss: 0.2833\n", + "Batch 473/500 - Loss: 0.3389\n", + "Batch 474/500 - Loss: 0.2785\n", + "Batch 475/500 - Loss: 0.4745\n", + "Batch 476/500 - Loss: 0.4434\n", + "Batch 477/500 - Loss: 0.2298\n", + "Batch 478/500 - Loss: 0.2377\n", + "Batch 479/500 - Loss: 0.1883\n", + "Batch 480/500 - Loss: 0.1907\n", + "Batch 481/500 - Loss: 0.9062\n", + "Batch 482/500 - Loss: 0.1334\n", + "Batch 483/500 - Loss: 0.6892\n", + "Batch 484/500 - Loss: 0.7903\n", + "Batch 485/500 - Loss: 0.3610\n", + "Batch 486/500 - Loss: 0.2908\n", + "Batch 487/500 - Loss: 0.0921\n", + "Batch 488/500 - Loss: 0.0861\n", + "Batch 489/500 - Loss: 0.4026\n", + "Batch 490/500 - Loss: 0.1016\n", + "Batch 491/500 - Loss: 0.3960\n", + "Batch 492/500 - Loss: 0.6094\n", + "Batch 493/500 - Loss: 0.7407\n", + "Batch 494/500 - Loss: 0.5979\n", + "Batch 495/500 - Loss: 0.1516\n", + "Batch 496/500 - Loss: 0.2606\n", + "Batch 497/500 - Loss: 0.6461\n", + "Batch 498/500 - Loss: 0.6364\n", + "Batch 499/500 - Loss: 0.7658\n", + "Batch 500/500 - Loss: 0.2939\n", + "Epoch 1 - Total Loss: 306.2372\n", + "Batch 1/500 - Loss: 0.2295\n", + "Batch 2/500 - Loss: 0.2793\n", + "Batch 3/500 - Loss: 0.2609\n", + "Batch 4/500 - Loss: 0.3009\n", + "Batch 5/500 - Loss: 0.1511\n", + "Batch 6/500 - Loss: 0.6770\n", + "Batch 7/500 - Loss: 1.0193\n", + "Batch 8/500 - Loss: 0.1508\n", + "Batch 9/500 - Loss: 0.8697\n", + "Batch 10/500 - Loss: 0.5195\n", + "Batch 11/500 - Loss: 0.2454\n", + "Batch 12/500 - Loss: 0.1103\n", + "Batch 13/500 - Loss: 0.3584\n", + "Batch 14/500 - Loss: 0.1931\n", + "Batch 15/500 - Loss: 0.2093\n", + "Batch 16/500 - Loss: 0.1174\n", + "Batch 17/500 - Loss: 0.4241\n", + "Batch 18/500 - Loss: 0.0823\n", + "Batch 19/500 - Loss: 0.2368\n", + "Batch 20/500 - Loss: 0.9994\n", + "Batch 21/500 - Loss: 0.0705\n", + "Batch 22/500 - Loss: 0.1551\n", + "Batch 23/500 - Loss: 0.2830\n", + "Batch 24/500 - Loss: 0.4255\n", + "Batch 25/500 - Loss: 0.4872\n", + "Batch 26/500 - Loss: 1.1917\n", + "Batch 27/500 - Loss: 0.6791\n", + "Batch 28/500 - Loss: 0.1974\n", + "Batch 29/500 - Loss: 1.4334\n", + "Batch 30/500 - Loss: 0.3943\n", + "Batch 31/500 - Loss: 0.1387\n", + "Batch 32/500 - Loss: 0.9019\n", + "Batch 33/500 - Loss: 0.3208\n", + "Batch 34/500 - Loss: 0.5698\n", + "Batch 35/500 - Loss: 0.9284\n", + "Batch 36/500 - Loss: 0.4273\n", + "Batch 37/500 - Loss: 0.3215\n", + "Batch 38/500 - Loss: 0.5430\n", + "Batch 39/500 - Loss: 0.7199\n", + "Batch 40/500 - Loss: 0.2904\n", + "Batch 41/500 - Loss: 0.3478\n", + "Batch 42/500 - Loss: 0.2410\n", + "Batch 43/500 - Loss: 0.1771\n", + "Batch 44/500 - Loss: 0.1407\n", + "Batch 45/500 - Loss: 1.7443\n", + "Batch 46/500 - Loss: 0.1583\n", + "Batch 47/500 - Loss: 0.4406\n", + "Batch 48/500 - Loss: 0.3726\n", + "Batch 49/500 - Loss: 0.4099\n", + "Batch 50/500 - Loss: 0.3842\n", + "Batch 51/500 - Loss: 0.2209\n", + "Batch 52/500 - Loss: 0.6019\n", + "Batch 53/500 - Loss: 0.1758\n", + "Batch 54/500 - Loss: 2.0507\n", + "Batch 55/500 - Loss: 0.9363\n", + "Batch 56/500 - Loss: 0.4224\n", + "Batch 57/500 - Loss: 0.2150\n", + "Batch 58/500 - Loss: 0.2679\n", + "Batch 59/500 - Loss: 0.1624\n", + "Batch 60/500 - Loss: 0.2152\n", + "Batch 61/500 - Loss: 0.1471\n", + "Batch 62/500 - Loss: 0.6409\n", + "Batch 63/500 - Loss: 0.3178\n", + "Batch 64/500 - Loss: 0.7325\n", + "Batch 65/500 - Loss: 0.1371\n", + "Batch 66/500 - Loss: 0.1802\n", + "Batch 67/500 - Loss: 0.2421\n", + "Batch 68/500 - Loss: 0.2421\n", + "Batch 69/500 - Loss: 1.4959\n", + "Batch 70/500 - Loss: 0.6616\n", + "Batch 71/500 - Loss: 0.0690\n", + "Batch 72/500 - Loss: 0.2096\n", + "Batch 73/500 - Loss: 0.2926\n", + "Batch 74/500 - Loss: 1.0509\n", + "Batch 75/500 - Loss: 1.1748\n", + "Batch 76/500 - Loss: 0.4732\n", + "Batch 77/500 - Loss: 1.5413\n", + "Batch 78/500 - Loss: 0.5382\n", + "Batch 79/500 - Loss: 1.1866\n", + "Batch 80/500 - Loss: 0.1764\n", + "Batch 81/500 - Loss: 0.1468\n", + "Batch 82/500 - Loss: 0.7941\n", + "Batch 83/500 - Loss: 0.2411\n", + "Batch 84/500 - Loss: 0.2396\n", + "Batch 85/500 - Loss: 0.1803\n", + "Batch 86/500 - Loss: 0.5541\n", + "Batch 87/500 - Loss: 0.4871\n", + "Batch 88/500 - Loss: 0.5365\n", + "Batch 89/500 - Loss: 0.2502\n", + "Batch 90/500 - Loss: 1.1757\n", + "Batch 91/500 - Loss: 0.1324\n", + "Batch 92/500 - Loss: 0.0756\n", + "Batch 93/500 - Loss: 0.3389\n", + "Batch 94/500 - Loss: 0.5581\n", + "Batch 95/500 - Loss: 0.5713\n", + "Batch 96/500 - Loss: 1.0307\n", + "Batch 97/500 - Loss: 0.4721\n", + "Batch 98/500 - Loss: 0.3804\n", + "Batch 99/500 - Loss: 0.2547\n", + "Batch 100/500 - Loss: 0.8273\n", + "Batch 101/500 - Loss: 0.7522\n", + "Batch 102/500 - Loss: 0.3575\n", + "Batch 103/500 - Loss: 0.5059\n", + "Batch 104/500 - Loss: 0.7597\n", + "Batch 105/500 - Loss: 0.1621\n", + "Batch 106/500 - Loss: 0.4254\n", + "Batch 107/500 - Loss: 0.4869\n", + "Batch 108/500 - Loss: 0.8246\n", + "Batch 109/500 - Loss: 0.4288\n", + "Batch 110/500 - Loss: 0.1964\n", + "Batch 111/500 - Loss: 0.4767\n", + "Batch 112/500 - Loss: 0.0865\n", + "Batch 113/500 - Loss: 0.2191\n", + "Batch 114/500 - Loss: 0.5478\n", + "Batch 115/500 - Loss: 0.4818\n", + "Batch 116/500 - Loss: 0.4580\n", + "Batch 117/500 - Loss: 0.2181\n", + "Batch 118/500 - Loss: 0.1380\n", + "Batch 119/500 - Loss: 0.3489\n", + "Batch 120/500 - Loss: 0.1315\n", + "Batch 121/500 - Loss: 0.1723\n", + "Batch 122/500 - Loss: 0.7459\n", + "Batch 123/500 - Loss: 0.6333\n", + "Batch 124/500 - Loss: 0.2204\n", + "Batch 125/500 - Loss: 0.9417\n", + "Batch 126/500 - Loss: 0.1813\n", + "Batch 127/500 - Loss: 0.2528\n", + "Batch 128/500 - Loss: 0.2621\n", + "Batch 129/500 - Loss: 0.0971\n", + "Batch 130/500 - Loss: 0.3381\n", + "Batch 131/500 - Loss: 0.1909\n", + "Batch 132/500 - Loss: 0.3275\n", + "Batch 133/500 - Loss: 0.5778\n", + "Batch 134/500 - Loss: 0.7111\n", + "Batch 135/500 - Loss: 0.2160\n", + "Batch 136/500 - Loss: 0.7779\n", + "Batch 137/500 - Loss: 0.1570\n", + "Batch 138/500 - Loss: 0.4203\n", + "Batch 139/500 - Loss: 0.5339\n", + "Batch 140/500 - Loss: 0.1458\n", + "Batch 141/500 - Loss: 0.1874\n", + "Batch 142/500 - Loss: 0.5726\n", + "Batch 143/500 - Loss: 0.4742\n", + "Batch 144/500 - Loss: 0.8628\n", + "Batch 145/500 - Loss: 0.3590\n", + "Batch 146/500 - Loss: 0.4011\n", + "Batch 147/500 - Loss: 0.0936\n", + "Batch 148/500 - Loss: 0.1947\n", + "Batch 149/500 - Loss: 1.1895\n", + "Batch 150/500 - Loss: 0.2714\n", + "Batch 151/500 - Loss: 0.2862\n", + "Batch 152/500 - Loss: 0.2536\n", + "Batch 153/500 - Loss: 0.7226\n", + "Batch 154/500 - Loss: 0.8150\n", + "Batch 155/500 - Loss: 1.0328\n", + "Batch 156/500 - Loss: 0.5101\n", + "Batch 157/500 - Loss: 0.1894\n", + "Batch 158/500 - Loss: 0.7888\n", + "Batch 159/500 - Loss: 0.3145\n", + "Batch 160/500 - Loss: 0.0864\n", + "Batch 161/500 - Loss: 0.4108\n", + "Batch 162/500 - Loss: 0.1021\n", + "Batch 163/500 - Loss: 0.2203\n", + "Batch 164/500 - Loss: 0.5848\n", + "Batch 165/500 - Loss: 0.4642\n", + "Batch 166/500 - Loss: 0.3898\n", + "Batch 167/500 - Loss: 0.2956\n", + "Batch 168/500 - Loss: 0.1696\n", + "Batch 169/500 - Loss: 1.2263\n", + "Batch 170/500 - Loss: 0.2976\n", + "Batch 171/500 - Loss: 0.5962\n", + "Batch 172/500 - Loss: 1.1770\n", + "Batch 173/500 - Loss: 0.1294\n", + "Batch 174/500 - Loss: 0.3098\n", + "Batch 175/500 - Loss: 0.7521\n", + "Batch 176/500 - Loss: 0.5368\n", + "Batch 177/500 - Loss: 0.2948\n", + "Batch 178/500 - Loss: 0.4376\n", + "Batch 179/500 - Loss: 0.4714\n", + "Batch 180/500 - Loss: 0.1571\n", + "Batch 181/500 - Loss: 0.6520\n", + "Batch 182/500 - Loss: 0.2558\n", + "Batch 183/500 - Loss: 0.1295\n", + "Batch 184/500 - Loss: 0.6103\n", + "Batch 185/500 - Loss: 0.6628\n", + "Batch 186/500 - Loss: 0.1209\n", + "Batch 187/500 - Loss: 0.4911\n", + "Batch 188/500 - Loss: 0.5665\n", + "Batch 189/500 - Loss: 0.2122\n", + "Batch 190/500 - Loss: 0.2403\n", + "Batch 191/500 - Loss: 0.2592\n", + "Batch 192/500 - Loss: 0.1855\n", + "Batch 193/500 - Loss: 0.1450\n", + "Batch 194/500 - Loss: 0.3966\n", + "Batch 195/500 - Loss: 0.3077\n", + "Batch 196/500 - Loss: 0.3637\n", + "Batch 197/500 - Loss: 0.9795\n", + "Batch 198/500 - Loss: 0.2147\n", + "Batch 199/500 - Loss: 0.2667\n", + "Batch 200/500 - Loss: 0.3364\n", + "Batch 201/500 - Loss: 0.9267\n", + "Batch 202/500 - Loss: 0.1710\n", + "Batch 203/500 - Loss: 0.6598\n", + "Batch 204/500 - Loss: 0.1480\n", + "Batch 205/500 - Loss: 2.6784\n", + "Batch 206/500 - Loss: 0.1964\n", + "Batch 207/500 - Loss: 0.2319\n", + "Batch 208/500 - Loss: 0.6233\n", + "Batch 209/500 - Loss: 0.6186\n", + "Batch 210/500 - Loss: 0.5357\n", + "Batch 211/500 - Loss: 0.1932\n", + "Batch 212/500 - Loss: 0.5361\n", + "Batch 213/500 - Loss: 0.4499\n", + "Batch 214/500 - Loss: 0.1086\n", + "Batch 215/500 - Loss: 0.2089\n", + "Batch 216/500 - Loss: 0.5943\n", + "Batch 217/500 - Loss: 0.7905\n", + "Batch 218/500 - Loss: 0.3430\n", + "Batch 219/500 - Loss: 0.2295\n", + "Batch 220/500 - Loss: 0.1522\n", + "Batch 221/500 - Loss: 0.0969\n", + "Batch 222/500 - Loss: 0.9098\n", + "Batch 223/500 - Loss: 0.3544\n", + "Batch 224/500 - Loss: 0.3615\n", + "Batch 225/500 - Loss: 0.9080\n", + "Batch 226/500 - Loss: 0.3039\n", + "Batch 227/500 - Loss: 1.5228\n", + "Batch 228/500 - Loss: 0.2453\n", + "Batch 229/500 - Loss: 0.5371\n", + "Batch 230/500 - Loss: 0.3949\n", + "Batch 231/500 - Loss: 0.5189\n", + "Batch 232/500 - Loss: 0.2500\n", + "Batch 233/500 - Loss: 0.3392\n", + "Batch 234/500 - Loss: 0.2078\n", + "Batch 235/500 - Loss: 0.2386\n", + "Batch 236/500 - Loss: 1.0537\n", + "Batch 237/500 - Loss: 0.7498\n", + "Batch 238/500 - Loss: 0.5030\n", + "Batch 239/500 - Loss: 0.6290\n", + "Batch 240/500 - Loss: 0.4805\n", + "Batch 241/500 - Loss: 0.1811\n", + "Batch 242/500 - Loss: 0.5702\n", + "Batch 243/500 - Loss: 0.5537\n", + "Batch 244/500 - Loss: 0.1669\n", + "Batch 245/500 - Loss: 0.2440\n", + "Batch 246/500 - Loss: 0.1175\n", + "Batch 247/500 - Loss: 0.9290\n", + "Batch 248/500 - Loss: 1.6092\n", + "Batch 249/500 - Loss: 0.3320\n", + "Batch 250/500 - Loss: 0.5151\n", + "Batch 251/500 - Loss: 0.1643\n", + "Batch 252/500 - Loss: 0.8786\n", + "Batch 253/500 - Loss: 0.6802\n", + "Batch 254/500 - Loss: 0.7572\n", + "Batch 255/500 - Loss: 0.6046\n", + "Batch 256/500 - Loss: 0.4150\n", + "Batch 257/500 - Loss: 0.2932\n", + "Batch 258/500 - Loss: 0.3019\n", + "Batch 259/500 - Loss: 0.1651\n", + "Batch 260/500 - Loss: 0.1248\n", + "Batch 261/500 - Loss: 0.3445\n", + "Batch 262/500 - Loss: 0.4489\n", + "Batch 263/500 - Loss: 0.2562\n", + "Batch 264/500 - Loss: 0.2889\n", + "Batch 265/500 - Loss: 1.0244\n", + "Batch 266/500 - Loss: 0.2376\n", + "Batch 267/500 - Loss: 0.9933\n", + "Batch 268/500 - Loss: 0.5913\n", + "Batch 269/500 - Loss: 0.1156\n", + "Batch 270/500 - Loss: 0.1600\n", + "Batch 271/500 - Loss: 1.1833\n", + "Batch 272/500 - Loss: 0.4904\n", + "Batch 273/500 - Loss: 0.2964\n", + "Batch 274/500 - Loss: 0.5104\n", + "Batch 275/500 - Loss: 0.2832\n", + "Batch 276/500 - Loss: 0.1818\n", + "Batch 277/500 - Loss: 0.2970\n", + "Batch 278/500 - Loss: 0.1585\n", + "Batch 279/500 - Loss: 0.1618\n", + "Batch 280/500 - Loss: 0.5185\n", + "Batch 281/500 - Loss: 0.2314\n", + "Batch 282/500 - Loss: 0.1558\n", + "Batch 283/500 - Loss: 1.1014\n", + "Batch 284/500 - Loss: 1.2626\n", + "Batch 285/500 - Loss: 1.0815\n", + "Batch 286/500 - Loss: 0.1185\n", + "Batch 287/500 - Loss: 1.3231\n", + "Batch 288/500 - Loss: 1.1520\n", + "Batch 289/500 - Loss: 0.4746\n", + "Batch 290/500 - Loss: 0.0761\n", + "Batch 291/500 - Loss: 0.2458\n", + "Batch 292/500 - Loss: 0.2775\n", + "Batch 293/500 - Loss: 0.5371\n", + "Batch 294/500 - Loss: 0.3214\n", + "Batch 295/500 - Loss: 0.4239\n", + "Batch 296/500 - Loss: 0.2453\n", + "Batch 297/500 - Loss: 0.0950\n", + "Batch 298/500 - Loss: 0.7630\n", + "Batch 299/500 - Loss: 0.1983\n", + "Batch 300/500 - Loss: 0.2928\n", + "Batch 301/500 - Loss: 0.1623\n", + "Batch 302/500 - Loss: 0.5263\n", + "Batch 303/500 - Loss: 1.1219\n", + "Batch 304/500 - Loss: 0.3282\n", + "Batch 305/500 - Loss: 2.1542\n", + "Batch 306/500 - Loss: 0.2354\n", + "Batch 307/500 - Loss: 0.5782\n", + "Batch 308/500 - Loss: 0.3108\n", + "Batch 309/500 - Loss: 0.2882\n", + "Batch 310/500 - Loss: 0.3443\n", + "Batch 311/500 - Loss: 0.3423\n", + "Batch 312/500 - Loss: 0.2715\n", + "Batch 313/500 - Loss: 0.5705\n", + "Batch 314/500 - Loss: 0.3132\n", + "Batch 315/500 - Loss: 0.7737\n", + "Batch 316/500 - Loss: 0.4593\n", + "Batch 317/500 - Loss: 0.1696\n", + "Batch 318/500 - Loss: 0.4142\n", + "Batch 319/500 - Loss: 0.5427\n", + "Batch 320/500 - Loss: 0.3636\n", + "Batch 321/500 - Loss: 0.4879\n", + "Batch 322/500 - Loss: 0.5107\n", + "Batch 323/500 - Loss: 0.0815\n", + "Batch 324/500 - Loss: 0.4473\n", + "Batch 325/500 - Loss: 0.5438\n", + "Batch 326/500 - Loss: 0.5522\n", + "Batch 327/500 - Loss: 0.8341\n", + "Batch 328/500 - Loss: 0.2355\n", + "Batch 329/500 - Loss: 0.4715\n", + "Batch 330/500 - Loss: 0.4899\n", + "Batch 331/500 - Loss: 0.3399\n", + "Batch 332/500 - Loss: 1.8443\n", + "Batch 333/500 - Loss: 1.5541\n", + "Batch 334/500 - Loss: 0.4246\n", + "Batch 335/500 - Loss: 0.1510\n", + "Batch 336/500 - Loss: 0.2631\n", + "Batch 337/500 - Loss: 0.3442\n", + "Batch 338/500 - Loss: 0.2552\n", + "Batch 339/500 - Loss: 0.2306\n", + "Batch 340/500 - Loss: 0.1917\n", + "Batch 341/500 - Loss: 0.1885\n", + "Batch 342/500 - Loss: 1.4538\n", + "Batch 343/500 - Loss: 0.1745\n", + "Batch 344/500 - Loss: 1.6092\n", + "Batch 345/500 - Loss: 0.1116\n", + "Batch 346/500 - Loss: 0.1476\n", + "Batch 347/500 - Loss: 0.7254\n", + "Batch 348/500 - Loss: 0.4982\n", + "Batch 349/500 - Loss: 0.6372\n", + "Batch 350/500 - Loss: 0.2280\n", + "Batch 351/500 - Loss: 0.8090\n", + "Batch 352/500 - Loss: 0.5270\n", + "Batch 353/500 - Loss: 0.2437\n", + "Batch 354/500 - Loss: 0.5044\n", + "Batch 355/500 - Loss: 0.2406\n", + "Batch 356/500 - Loss: 0.4518\n", + "Batch 357/500 - Loss: 0.3306\n", + "Batch 358/500 - Loss: 0.7378\n", + "Batch 359/500 - Loss: 0.4526\n", + "Batch 360/500 - Loss: 0.6369\n", + "Batch 361/500 - Loss: 0.5627\n", + "Batch 362/500 - Loss: 0.3533\n", + "Batch 363/500 - Loss: 1.4243\n", + "Batch 364/500 - Loss: 0.2804\n", + "Batch 365/500 - Loss: 1.4755\n", + "Batch 366/500 - Loss: 0.1839\n", + "Batch 367/500 - Loss: 0.8822\n", + "Batch 368/500 - Loss: 0.1899\n", + "Batch 369/500 - Loss: 0.3181\n", + "Batch 370/500 - Loss: 0.5979\n", + "Batch 371/500 - Loss: 0.8044\n", + "Batch 372/500 - Loss: 0.6908\n", + "Batch 373/500 - Loss: 0.1354\n", + "Batch 374/500 - Loss: 1.1710\n", + "Batch 375/500 - Loss: 0.3945\n", + "Batch 376/500 - Loss: 0.6068\n", + "Batch 377/500 - Loss: 0.5182\n", + "Batch 378/500 - Loss: 0.2400\n", + "Batch 379/500 - Loss: 0.0962\n", + "Batch 380/500 - Loss: 1.2604\n", + "Batch 381/500 - Loss: 0.1331\n", + "Batch 382/500 - Loss: 0.2606\n", + "Batch 383/500 - Loss: 0.5115\n", + "Batch 384/500 - Loss: 0.7471\n", + "Batch 385/500 - Loss: 0.3406\n", + "Batch 386/500 - Loss: 0.7837\n", + "Batch 387/500 - Loss: 0.1908\n", + "Batch 388/500 - Loss: 0.2006\n", + "Batch 389/500 - Loss: 0.4613\n", + "Batch 390/500 - Loss: 0.1407\n", + "Batch 391/500 - Loss: 0.0619\n", + "Batch 392/500 - Loss: 0.1540\n", + "Batch 393/500 - Loss: 0.1261\n", + "Batch 394/500 - Loss: 0.6795\n", + "Batch 395/500 - Loss: 0.3158\n", + "Batch 396/500 - Loss: 0.6239\n", + "Batch 397/500 - Loss: 0.2598\n", + "Batch 398/500 - Loss: 0.5142\n", + "Batch 399/500 - Loss: 0.2059\n", + "Batch 400/500 - Loss: 0.1060\n", + "Batch 401/500 - Loss: 0.0939\n", + "Batch 402/500 - Loss: 0.1725\n", + "Batch 403/500 - Loss: 0.1213\n", + "Batch 404/500 - Loss: 0.2209\n", + "Batch 405/500 - Loss: 0.7507\n", + "Batch 406/500 - Loss: 0.1712\n", + "Batch 407/500 - Loss: 0.2198\n", + "Batch 408/500 - Loss: 0.1619\n", + "Batch 409/500 - Loss: 0.5045\n", + "Batch 410/500 - Loss: 0.4886\n", + "Batch 411/500 - Loss: 0.6406\n", + "Batch 412/500 - Loss: 0.4940\n", + "Batch 413/500 - Loss: 0.1330\n", + "Batch 414/500 - Loss: 0.3646\n", + "Batch 415/500 - Loss: 0.4285\n", + "Batch 416/500 - Loss: 0.3695\n", + "Batch 417/500 - Loss: 0.4894\n", + "Batch 418/500 - Loss: 0.3718\n", + "Batch 419/500 - Loss: 0.3966\n", + "Batch 420/500 - Loss: 0.4355\n", + "Batch 421/500 - Loss: 0.1273\n", + "Batch 422/500 - Loss: 1.0138\n", + "Batch 423/500 - Loss: 0.3633\n", + "Batch 424/500 - Loss: 0.2309\n", + "Batch 425/500 - Loss: 0.2216\n", + "Batch 426/500 - Loss: 0.1712\n", + "Batch 427/500 - Loss: 0.2231\n", + "Batch 428/500 - Loss: 0.2001\n", + "Batch 429/500 - Loss: 0.6152\n", + "Batch 430/500 - Loss: 0.2411\n", + "Batch 431/500 - Loss: 0.5981\n", + "Batch 432/500 - Loss: 0.6951\n", + "Batch 433/500 - Loss: 0.3876\n", + "Batch 434/500 - Loss: 0.5217\n", + "Batch 435/500 - Loss: 0.0710\n", + "Batch 436/500 - Loss: 0.2666\n", + "Batch 437/500 - Loss: 0.7726\n", + "Batch 438/500 - Loss: 1.0525\n", + "Batch 439/500 - Loss: 0.5252\n", + "Batch 440/500 - Loss: 1.4532\n", + "Batch 441/500 - Loss: 0.1222\n", + "Batch 442/500 - Loss: 0.3765\n", + "Batch 443/500 - Loss: 1.2930\n", + "Batch 444/500 - Loss: 0.2354\n", + "Batch 445/500 - Loss: 0.2833\n", + "Batch 446/500 - Loss: 0.2111\n", + "Batch 447/500 - Loss: 1.2899\n", + "Batch 448/500 - Loss: 1.1139\n", + "Batch 449/500 - Loss: 0.1876\n", + "Batch 450/500 - Loss: 0.1562\n", + "Batch 451/500 - Loss: 0.4247\n", + "Batch 452/500 - Loss: 0.2453\n", + "Batch 453/500 - Loss: 0.1149\n", + "Batch 454/500 - Loss: 0.5044\n", + "Batch 455/500 - Loss: 0.2382\n", + "Batch 456/500 - Loss: 0.1734\n", + "Batch 457/500 - Loss: 0.3817\n", + "Batch 458/500 - Loss: 0.5705\n", + "Batch 459/500 - Loss: 0.1494\n", + "Batch 460/500 - Loss: 0.5500\n", + "Batch 461/500 - Loss: 0.8679\n", + "Batch 462/500 - Loss: 0.0929\n", + "Batch 463/500 - Loss: 0.9183\n", + "Batch 464/500 - Loss: 0.3390\n", + "Batch 465/500 - Loss: 0.2704\n", + "Batch 466/500 - Loss: 0.3182\n", + "Batch 467/500 - Loss: 0.5042\n", + "Batch 468/500 - Loss: 0.4248\n", + "Batch 469/500 - Loss: 0.6958\n", + "Batch 470/500 - Loss: 0.2340\n", + "Batch 471/500 - Loss: 0.5951\n", + "Batch 472/500 - Loss: 0.4495\n", + "Batch 473/500 - Loss: 0.3190\n", + "Batch 474/500 - Loss: 0.7035\n", + "Batch 475/500 - Loss: 0.5358\n", + "Batch 476/500 - Loss: 0.3466\n", + "Batch 477/500 - Loss: 0.2078\n", + "Batch 478/500 - Loss: 0.4744\n", + "Batch 479/500 - Loss: 0.7295\n", + "Batch 480/500 - Loss: 0.1058\n", + "Batch 481/500 - Loss: 0.8527\n", + "Batch 482/500 - Loss: 0.6562\n", + "Batch 483/500 - Loss: 0.2465\n", + "Batch 484/500 - Loss: 0.2522\n", + "Batch 485/500 - Loss: 0.5806\n", + "Batch 486/500 - Loss: 0.1412\n", + "Batch 487/500 - Loss: 0.1010\n", + "Batch 488/500 - Loss: 0.1525\n", + "Batch 489/500 - Loss: 0.2156\n", + "Batch 490/500 - Loss: 0.1350\n", + "Batch 491/500 - Loss: 0.1604\n", + "Batch 492/500 - Loss: 0.4268\n", + "Batch 493/500 - Loss: 0.2103\n", + "Batch 494/500 - Loss: 0.9540\n", + "Batch 495/500 - Loss: 0.1543\n", + "Batch 496/500 - Loss: 0.5233\n", + "Batch 497/500 - Loss: 0.8858\n", + "Batch 498/500 - Loss: 0.4644\n", + "Batch 499/500 - Loss: 0.2616\n", + "Batch 500/500 - Loss: 0.1543\n", + "Epoch 2 - Total Loss: 232.1112\n", + "Model checkpoint saved at epoch 2\n", + "Batch 1/500 - Loss: 0.4265\n", + "Batch 2/500 - Loss: 0.1516\n", + "Batch 3/500 - Loss: 0.3710\n", + "Batch 4/500 - Loss: 0.1771\n", + "Batch 5/500 - Loss: 0.5207\n", + "Batch 6/500 - Loss: 0.7386\n", + "Batch 7/500 - Loss: 0.2882\n", + "Batch 8/500 - Loss: 0.6216\n", + "Batch 9/500 - Loss: 0.1913\n", + "Batch 10/500 - Loss: 0.5649\n", + "Batch 11/500 - Loss: 0.4610\n", + "Batch 12/500 - Loss: 0.7923\n", + "Batch 13/500 - Loss: 0.3053\n", + "Batch 14/500 - Loss: 0.1838\n", + "Batch 15/500 - Loss: 1.2187\n", + "Batch 16/500 - Loss: 0.4019\n", + "Batch 17/500 - Loss: 0.3574\n", + "Batch 18/500 - Loss: 0.1663\n", + "Batch 19/500 - Loss: 0.3876\n", + "Batch 20/500 - Loss: 0.8889\n", + "Batch 21/500 - Loss: 1.4647\n", + "Batch 22/500 - Loss: 0.4062\n", + "Batch 23/500 - Loss: 1.0073\n", + "Batch 24/500 - Loss: 0.2815\n", + "Batch 25/500 - Loss: 0.3383\n", + "Batch 26/500 - Loss: 0.3189\n", + "Batch 27/500 - Loss: 1.1413\n", + "Batch 28/500 - Loss: 0.3857\n", + "Batch 29/500 - Loss: 0.2109\n", + "Batch 30/500 - Loss: 0.0980\n", + "Batch 31/500 - Loss: 0.3805\n", + "Batch 32/500 - Loss: 0.4371\n", + "Batch 33/500 - Loss: 0.4038\n", + "Batch 34/500 - Loss: 0.1275\n", + "Batch 35/500 - Loss: 0.5829\n", + "Batch 36/500 - Loss: 0.1709\n", + "Batch 37/500 - Loss: 0.2262\n", + "Batch 38/500 - Loss: 0.2170\n", + "Batch 39/500 - Loss: 0.0927\n", + "Batch 40/500 - Loss: 0.8578\n", + "Batch 41/500 - Loss: 0.5190\n", + "Batch 42/500 - Loss: 0.6384\n", + "Batch 43/500 - Loss: 0.1724\n", + "Batch 44/500 - Loss: 0.5722\n", + "Batch 45/500 - Loss: 0.2013\n", + "Batch 46/500 - Loss: 0.2589\n", + "Batch 47/500 - Loss: 0.2515\n", + "Batch 48/500 - Loss: 0.1308\n", + "Batch 49/500 - Loss: 0.1369\n", + "Batch 50/500 - Loss: 0.1502\n", + "Batch 51/500 - Loss: 0.3156\n", + "Batch 52/500 - Loss: 0.2319\n", + "Batch 53/500 - Loss: 0.1793\n", + "Batch 54/500 - Loss: 0.2811\n", + "Batch 55/500 - Loss: 0.2027\n", + "Batch 56/500 - Loss: 0.0651\n", + "Batch 57/500 - Loss: 0.3969\n", + "Batch 58/500 - Loss: 0.0592\n", + "Batch 59/500 - Loss: 0.2572\n", + "Batch 60/500 - Loss: 0.1295\n", + "Batch 61/500 - Loss: 0.2594\n", + "Batch 62/500 - Loss: 0.3133\n", + "Batch 63/500 - Loss: 0.3547\n", + "Batch 64/500 - Loss: 0.0942\n", + "Batch 65/500 - Loss: 0.4642\n", + "Batch 66/500 - Loss: 0.1395\n", + "Batch 67/500 - Loss: 0.1982\n", + "Batch 68/500 - Loss: 0.3066\n", + "Batch 69/500 - Loss: 0.3199\n", + "Batch 70/500 - Loss: 0.2936\n", + "Batch 71/500 - Loss: 0.7208\n", + "Batch 72/500 - Loss: 0.2232\n", + "Batch 73/500 - Loss: 0.3394\n", + "Batch 74/500 - Loss: 0.1001\n", + "Batch 75/500 - Loss: 0.1976\n", + "Batch 76/500 - Loss: 0.2844\n", + "Batch 77/500 - Loss: 0.1053\n", + "Batch 78/500 - Loss: 0.1615\n", + "Batch 79/500 - Loss: 0.0785\n", + "Batch 80/500 - Loss: 0.1724\n", + "Batch 81/500 - Loss: 0.4493\n", + "Batch 82/500 - Loss: 0.1905\n", + "Batch 83/500 - Loss: 0.7766\n", + "Batch 84/500 - Loss: 0.2061\n", + "Batch 85/500 - Loss: 0.1787\n", + "Batch 86/500 - Loss: 0.2576\n", + "Batch 87/500 - Loss: 0.2042\n", + "Batch 88/500 - Loss: 0.4044\n", + "Batch 89/500 - Loss: 0.2151\n", + "Batch 90/500 - Loss: 0.1316\n", + "Batch 91/500 - Loss: 0.1267\n", + "Batch 92/500 - Loss: 0.6327\n", + "Batch 93/500 - Loss: 0.1970\n", + "Batch 94/500 - Loss: 1.1099\n", + "Batch 95/500 - Loss: 0.1484\n", + "Batch 96/500 - Loss: 0.3027\n", + "Batch 97/500 - Loss: 0.2926\n", + "Batch 98/500 - Loss: 0.1443\n", + "Batch 99/500 - Loss: 0.1753\n", + "Batch 100/500 - Loss: 0.2773\n", + "Batch 101/500 - Loss: 0.1426\n", + "Batch 102/500 - Loss: 0.4887\n", + "Batch 103/500 - Loss: 0.5614\n", + "Batch 104/500 - Loss: 0.7222\n", + "Batch 105/500 - Loss: 0.1926\n", + "Batch 106/500 - Loss: 1.0210\n", + "Batch 107/500 - Loss: 0.1872\n", + "Batch 108/500 - Loss: 0.2228\n", + "Batch 109/500 - Loss: 1.0993\n", + "Batch 110/500 - Loss: 0.3475\n", + "Batch 111/500 - Loss: 0.4241\n", + "Batch 112/500 - Loss: 0.1317\n", + "Batch 113/500 - Loss: 0.2326\n", + "Batch 114/500 - Loss: 0.1966\n", + "Batch 115/500 - Loss: 0.0769\n", + "Batch 116/500 - Loss: 0.1274\n", + "Batch 117/500 - Loss: 0.5406\n", + "Batch 118/500 - Loss: 0.3529\n", + "Batch 119/500 - Loss: 0.5496\n", + "Batch 120/500 - Loss: 0.0716\n", + "Batch 121/500 - Loss: 0.7687\n", + "Batch 122/500 - Loss: 0.7728\n", + "Batch 123/500 - Loss: 0.0982\n", + "Batch 124/500 - Loss: 0.5667\n", + "Batch 125/500 - Loss: 1.5220\n", + "Batch 126/500 - Loss: 0.1868\n", + "Batch 127/500 - Loss: 0.2503\n", + "Batch 128/500 - Loss: 0.3442\n", + "Batch 129/500 - Loss: 0.1878\n", + "Batch 130/500 - Loss: 1.2278\n", + "Batch 131/500 - Loss: 0.1944\n", + "Batch 132/500 - Loss: 0.2119\n", + "Batch 133/500 - Loss: 0.2064\n", + "Batch 134/500 - Loss: 0.7441\n", + "Batch 135/500 - Loss: 0.8646\n", + "Batch 136/500 - Loss: 1.8104\n", + "Batch 137/500 - Loss: 0.2693\n", + "Batch 138/500 - Loss: 0.2546\n", + "Batch 139/500 - Loss: 0.3306\n", + "Batch 140/500 - Loss: 0.6087\n", + "Batch 141/500 - Loss: 0.0738\n", + "Batch 142/500 - Loss: 0.1991\n", + "Batch 143/500 - Loss: 0.4814\n", + "Batch 144/500 - Loss: 0.1121\n", + "Batch 145/500 - Loss: 0.4014\n", + "Batch 146/500 - Loss: 0.2321\n", + "Batch 147/500 - Loss: 0.4920\n", + "Batch 148/500 - Loss: 0.4239\n", + "Batch 149/500 - Loss: 0.7213\n", + "Batch 150/500 - Loss: 1.0704\n", + "Batch 151/500 - Loss: 0.2091\n", + "Batch 152/500 - Loss: 0.1586\n", + "Batch 153/500 - Loss: 0.2761\n", + "Batch 154/500 - Loss: 0.6834\n", + "Batch 155/500 - Loss: 0.5372\n", + "Batch 156/500 - Loss: 0.2107\n", + "Batch 157/500 - Loss: 0.4190\n", + "Batch 158/500 - Loss: 0.6793\n", + "Batch 159/500 - Loss: 0.4874\n", + "Batch 160/500 - Loss: 0.0932\n", + "Batch 161/500 - Loss: 0.1850\n", + "Batch 162/500 - Loss: 0.3131\n", + "Batch 163/500 - Loss: 0.1322\n", + "Batch 164/500 - Loss: 0.3503\n", + "Batch 165/500 - Loss: 0.1275\n", + "Batch 166/500 - Loss: 0.1640\n", + "Batch 167/500 - Loss: 0.3723\n", + "Batch 168/500 - Loss: 0.4698\n", + "Batch 169/500 - Loss: 0.0527\n", + "Batch 170/500 - Loss: 0.0962\n", + "Batch 171/500 - Loss: 0.1593\n", + "Batch 172/500 - Loss: 0.1403\n", + "Batch 173/500 - Loss: 0.1591\n", + "Batch 174/500 - Loss: 0.2857\n", + "Batch 175/500 - Loss: 0.2337\n", + "Batch 176/500 - Loss: 0.7899\n", + "Batch 177/500 - Loss: 0.6179\n", + "Batch 178/500 - Loss: 0.1462\n", + "Batch 179/500 - Loss: 0.2349\n", + "Batch 180/500 - Loss: 0.1601\n", + "Batch 181/500 - Loss: 0.1053\n", + "Batch 182/500 - Loss: 0.4238\n", + "Batch 183/500 - Loss: 0.4180\n", + "Batch 184/500 - Loss: 0.1635\n", + "Batch 185/500 - Loss: 0.5910\n", + "Batch 186/500 - Loss: 0.2591\n", + "Batch 187/500 - Loss: 0.2673\n", + "Batch 188/500 - Loss: 0.1571\n", + "Batch 189/500 - Loss: 0.1630\n", + "Batch 190/500 - Loss: 0.1009\n", + "Batch 191/500 - Loss: 0.3283\n", + "Batch 192/500 - Loss: 0.3085\n", + "Batch 193/500 - Loss: 0.4672\n", + "Batch 194/500 - Loss: 0.2652\n", + "Batch 195/500 - Loss: 0.5707\n", + "Batch 196/500 - Loss: 0.0950\n", + "Batch 197/500 - Loss: 0.2167\n", + "Batch 198/500 - Loss: 1.2317\n", + "Batch 199/500 - Loss: 0.5120\n", + "Batch 200/500 - Loss: 0.2466\n", + "Batch 201/500 - Loss: 0.4672\n", + "Batch 202/500 - Loss: 0.1583\n", + "Batch 203/500 - Loss: 0.2399\n", + "Batch 204/500 - Loss: 1.0044\n", + "Batch 205/500 - Loss: 1.2016\n", + "Batch 206/500 - Loss: 0.1051\n", + "Batch 207/500 - Loss: 0.1185\n", + "Batch 208/500 - Loss: 0.5560\n", + "Batch 209/500 - Loss: 0.5760\n", + "Batch 210/500 - Loss: 0.4039\n", + "Batch 211/500 - Loss: 0.1782\n", + "Batch 212/500 - Loss: 0.2168\n", + "Batch 213/500 - Loss: 0.3112\n", + "Batch 214/500 - Loss: 0.4984\n", + "Batch 215/500 - Loss: 0.1675\n", + "Batch 216/500 - Loss: 0.5460\n", + "Batch 217/500 - Loss: 0.1992\n", + "Batch 218/500 - Loss: 0.1153\n", + "Batch 219/500 - Loss: 0.1158\n", + "Batch 220/500 - Loss: 0.3617\n", + "Batch 221/500 - Loss: 0.2202\n", + "Batch 222/500 - Loss: 0.6362\n", + "Batch 223/500 - Loss: 0.6621\n", + "Batch 224/500 - Loss: 0.1303\n", + "Batch 225/500 - Loss: 0.1646\n", + "Batch 226/500 - Loss: 0.1936\n", + "Batch 227/500 - Loss: 0.2182\n", + "Batch 228/500 - Loss: 0.0858\n", + "Batch 229/500 - Loss: 0.2096\n", + "Batch 230/500 - Loss: 0.9541\n", + "Batch 231/500 - Loss: 1.7526\n", + "Batch 232/500 - Loss: 0.7076\n", + "Batch 233/500 - Loss: 0.5317\n", + "Batch 234/500 - Loss: 0.1288\n", + "Batch 235/500 - Loss: 0.5438\n", + "Batch 236/500 - Loss: 0.6362\n", + "Batch 237/500 - Loss: 0.1578\n", + "Batch 238/500 - Loss: 1.3832\n", + "Batch 239/500 - Loss: 0.2970\n", + "Batch 240/500 - Loss: 0.1806\n", + "Batch 241/500 - Loss: 0.1866\n", + "Batch 242/500 - Loss: 0.8609\n", + "Batch 243/500 - Loss: 0.1082\n", + "Batch 244/500 - Loss: 0.1123\n", + "Batch 245/500 - Loss: 0.7041\n", + "Batch 246/500 - Loss: 0.1284\n", + "Batch 247/500 - Loss: 0.4010\n", + "Batch 248/500 - Loss: 0.1011\n", + "Batch 249/500 - Loss: 0.4068\n", + "Batch 250/500 - Loss: 0.1995\n", + "Batch 251/500 - Loss: 0.4700\n", + "Batch 252/500 - Loss: 0.5376\n", + "Batch 253/500 - Loss: 0.4257\n", + "Batch 254/500 - Loss: 0.2276\n", + "Batch 255/500 - Loss: 0.6708\n", + "Batch 256/500 - Loss: 0.0708\n", + "Batch 257/500 - Loss: 0.3869\n", + "Batch 258/500 - Loss: 0.7975\n", + "Batch 259/500 - Loss: 0.1781\n", + "Batch 260/500 - Loss: 0.0286\n", + "Batch 261/500 - Loss: 0.1425\n", + "Batch 262/500 - Loss: 0.2990\n", + "Batch 263/500 - Loss: 0.0557\n", + "Batch 264/500 - Loss: 0.1835\n", + "Batch 265/500 - Loss: 0.5325\n", + "Batch 266/500 - Loss: 0.1774\n", + "Batch 267/500 - Loss: 0.1384\n", + "Batch 268/500 - Loss: 0.2980\n", + "Batch 269/500 - Loss: 0.3421\n", + "Batch 270/500 - Loss: 0.3366\n", + "Batch 271/500 - Loss: 0.3167\n", + "Batch 272/500 - Loss: 0.1538\n", + "Batch 273/500 - Loss: 0.1713\n", + "Batch 274/500 - Loss: 0.1559\n", + "Batch 275/500 - Loss: 0.1729\n", + "Batch 276/500 - Loss: 0.4858\n", + "Batch 277/500 - Loss: 0.0719\n", + "Batch 278/500 - Loss: 0.0941\n", + "Batch 279/500 - Loss: 0.4956\n", + "Batch 280/500 - Loss: 0.1535\n", + "Batch 281/500 - Loss: 0.3814\n", + "Batch 282/500 - Loss: 0.4777\n", + "Batch 283/500 - Loss: 0.2360\n", + "Batch 284/500 - Loss: 0.0929\n", + "Batch 285/500 - Loss: 0.0678\n", + "Batch 286/500 - Loss: 1.0031\n", + "Batch 287/500 - Loss: 0.2044\n", + "Batch 288/500 - Loss: 0.9223\n", + "Batch 289/500 - Loss: 0.3554\n", + "Batch 290/500 - Loss: 0.4911\n", + "Batch 291/500 - Loss: 0.2102\n", + "Batch 292/500 - Loss: 0.9273\n", + "Batch 293/500 - Loss: 0.2926\n", + "Batch 294/500 - Loss: 0.6840\n", + "Batch 295/500 - Loss: 0.4835\n", + "Batch 296/500 - Loss: 0.1498\n", + "Batch 297/500 - Loss: 0.7786\n", + "Batch 298/500 - Loss: 0.1918\n", + "Batch 299/500 - Loss: 0.1162\n", + "Batch 300/500 - Loss: 0.1619\n", + "Batch 301/500 - Loss: 0.1798\n", + "Batch 302/500 - Loss: 0.2149\n", + "Batch 303/500 - Loss: 0.1404\n", + "Batch 304/500 - Loss: 0.2569\n", + "Batch 305/500 - Loss: 0.4648\n", + "Batch 306/500 - Loss: 0.3518\n", + "Batch 307/500 - Loss: 0.2218\n", + "Batch 308/500 - Loss: 0.1296\n", + "Batch 309/500 - Loss: 1.4485\n", + "Batch 310/500 - Loss: 0.3062\n", + "Batch 311/500 - Loss: 0.1620\n", + "Batch 312/500 - Loss: 0.1654\n", + "Batch 313/500 - Loss: 0.2407\n", + "Batch 314/500 - Loss: 0.6782\n", + "Batch 315/500 - Loss: 0.1364\n", + "Batch 316/500 - Loss: 0.2585\n", + "Batch 317/500 - Loss: 0.3378\n", + "Batch 318/500 - Loss: 0.4318\n", + "Batch 319/500 - Loss: 0.2568\n", + "Batch 320/500 - Loss: 0.5133\n", + "Batch 321/500 - Loss: 0.8458\n", + "Batch 322/500 - Loss: 0.0565\n", + "Batch 323/500 - Loss: 0.2073\n", + "Batch 324/500 - Loss: 0.1284\n", + "Batch 325/500 - Loss: 0.2025\n", + "Batch 326/500 - Loss: 0.4331\n", + "Batch 327/500 - Loss: 0.3794\n", + "Batch 328/500 - Loss: 0.1510\n", + "Batch 329/500 - Loss: 0.0931\n", + "Batch 330/500 - Loss: 1.0033\n", + "Batch 331/500 - Loss: 0.5928\n", + "Batch 332/500 - Loss: 0.1040\n", + "Batch 333/500 - Loss: 0.3689\n", + "Batch 334/500 - Loss: 0.6746\n", + "Batch 335/500 - Loss: 0.5340\n", + "Batch 336/500 - Loss: 0.3137\n", + "Batch 337/500 - Loss: 0.1575\n", + "Batch 338/500 - Loss: 0.1554\n", + "Batch 339/500 - Loss: 0.4193\n", + "Batch 340/500 - Loss: 0.2825\n", + "Batch 341/500 - Loss: 0.0676\n", + "Batch 342/500 - Loss: 0.2661\n", + "Batch 343/500 - Loss: 0.4641\n", + "Batch 344/500 - Loss: 0.4225\n", + "Batch 345/500 - Loss: 0.3502\n", + "Batch 346/500 - Loss: 0.6988\n", + "Batch 347/500 - Loss: 0.5938\n", + "Batch 348/500 - Loss: 0.4359\n", + "Batch 349/500 - Loss: 0.1864\n", + "Batch 350/500 - Loss: 0.5937\n", + "Batch 351/500 - Loss: 0.1344\n", + "Batch 352/500 - Loss: 0.6208\n", + "Batch 353/500 - Loss: 0.4012\n", + "Batch 354/500 - Loss: 0.3957\n", + "Batch 355/500 - Loss: 0.2852\n", + "Batch 356/500 - Loss: 0.4713\n", + "Batch 357/500 - Loss: 0.5100\n", + "Batch 358/500 - Loss: 0.7117\n", + "Batch 359/500 - Loss: 0.4196\n", + "Batch 360/500 - Loss: 0.8527\n", + "Batch 361/500 - Loss: 0.5089\n", + "Batch 362/500 - Loss: 0.1196\n", + "Batch 363/500 - Loss: 0.8389\n", + "Batch 364/500 - Loss: 0.6834\n", + "Batch 365/500 - Loss: 0.1990\n", + "Batch 366/500 - Loss: 0.2043\n", + "Batch 367/500 - Loss: 0.0708\n", + "Batch 368/500 - Loss: 0.3348\n", + "Batch 369/500 - Loss: 0.5550\n", + "Batch 370/500 - Loss: 0.7988\n", + "Batch 371/500 - Loss: 0.7459\n", + "Batch 372/500 - Loss: 0.1402\n", + "Batch 373/500 - Loss: 0.2241\n", + "Batch 374/500 - Loss: 0.1613\n", + "Batch 375/500 - Loss: 0.3693\n", + "Batch 376/500 - Loss: 0.2016\n", + "Batch 377/500 - Loss: 0.7111\n", + "Batch 378/500 - Loss: 0.4070\n", + "Batch 379/500 - Loss: 0.2480\n", + "Batch 380/500 - Loss: 0.2101\n", + "Batch 381/500 - Loss: 0.2514\n", + "Batch 382/500 - Loss: 0.6249\n", + "Batch 383/500 - Loss: 0.1511\n", + "Batch 384/500 - Loss: 0.3827\n", + "Batch 385/500 - Loss: 0.7084\n", + "Batch 386/500 - Loss: 0.1928\n", + "Batch 387/500 - Loss: 0.1004\n", + "Batch 388/500 - Loss: 0.3518\n", + "Batch 389/500 - Loss: 0.0902\n", + "Batch 390/500 - Loss: 0.0619\n", + "Batch 391/500 - Loss: 0.1144\n", + "Batch 392/500 - Loss: 0.3837\n", + "Batch 393/500 - Loss: 0.6071\n", + "Batch 394/500 - Loss: 0.1150\n", + "Batch 395/500 - Loss: 0.2476\n", + "Batch 396/500 - Loss: 0.2068\n", + "Batch 397/500 - Loss: 0.2585\n", + "Batch 398/500 - Loss: 0.6127\n", + "Batch 399/500 - Loss: 0.0956\n", + "Batch 400/500 - Loss: 0.5082\n", + "Batch 401/500 - Loss: 0.3081\n", + "Batch 402/500 - Loss: 0.2664\n", + "Batch 403/500 - Loss: 0.1400\n", + "Batch 404/500 - Loss: 0.4316\n", + "Batch 405/500 - Loss: 0.7494\n", + "Batch 406/500 - Loss: 0.1082\n", + "Batch 407/500 - Loss: 0.2292\n", + "Batch 408/500 - Loss: 0.2081\n", + "Batch 409/500 - Loss: 0.5092\n", + "Batch 410/500 - Loss: 0.3356\n", + "Batch 411/500 - Loss: 1.1211\n", + "Batch 412/500 - Loss: 0.1752\n", + "Batch 413/500 - Loss: 0.3744\n", + "Batch 414/500 - Loss: 0.2257\n", + "Batch 415/500 - Loss: 0.2345\n", + "Batch 416/500 - Loss: 0.8167\n", + "Batch 417/500 - Loss: 0.2596\n", + "Batch 418/500 - Loss: 0.2161\n", + "Batch 419/500 - Loss: 0.2109\n", + "Batch 420/500 - Loss: 0.2939\n", + "Batch 421/500 - Loss: 0.2047\n", + "Batch 422/500 - Loss: 0.1280\n", + "Batch 423/500 - Loss: 0.1261\n", + "Batch 424/500 - Loss: 0.1266\n", + "Batch 425/500 - Loss: 0.5587\n", + "Batch 426/500 - Loss: 0.1307\n", + "Batch 427/500 - Loss: 0.0651\n", + "Batch 428/500 - Loss: 0.8875\n", + "Batch 429/500 - Loss: 0.7849\n", + "Batch 430/500 - Loss: 0.2580\n", + "Batch 431/500 - Loss: 0.9530\n", + "Batch 432/500 - Loss: 0.9561\n", + "Batch 433/500 - Loss: 0.5544\n", + "Batch 434/500 - Loss: 0.6486\n", + "Batch 435/500 - Loss: 0.4875\n", + "Batch 436/500 - Loss: 0.2569\n", + "Batch 437/500 - Loss: 0.1951\n", + "Batch 438/500 - Loss: 0.4742\n", + "Batch 439/500 - Loss: 0.3610\n", + "Batch 440/500 - Loss: 0.1631\n", + "Batch 441/500 - Loss: 0.1382\n", + "Batch 442/500 - Loss: 0.3233\n", + "Batch 443/500 - Loss: 0.1807\n", + "Batch 444/500 - Loss: 0.1211\n", + "Batch 445/500 - Loss: 0.7083\n", + "Batch 446/500 - Loss: 0.1518\n", + "Batch 447/500 - Loss: 0.4809\n", + "Batch 448/500 - Loss: 0.4021\n", + "Batch 449/500 - Loss: 0.5771\n", + "Batch 450/500 - Loss: 0.0943\n", + "Batch 451/500 - Loss: 0.2730\n", + "Batch 452/500 - Loss: 0.3871\n", + "Batch 453/500 - Loss: 0.1345\n", + "Batch 454/500 - Loss: 0.2011\n", + "Batch 455/500 - Loss: 1.0172\n", + "Batch 456/500 - Loss: 0.3922\n", + "Batch 457/500 - Loss: 0.4044\n", + "Batch 458/500 - Loss: 0.4750\n", + "Batch 459/500 - Loss: 0.1782\n", + "Batch 460/500 - Loss: 0.8116\n", + "Batch 461/500 - Loss: 0.2428\n", + "Batch 462/500 - Loss: 0.2792\n", + "Batch 463/500 - Loss: 0.3606\n", + "Batch 464/500 - Loss: 0.1792\n", + "Batch 465/500 - Loss: 0.8138\n", + "Batch 466/500 - Loss: 0.5684\n", + "Batch 467/500 - Loss: 0.1633\n", + "Batch 468/500 - Loss: 0.3389\n", + "Batch 469/500 - Loss: 0.5570\n", + "Batch 470/500 - Loss: 0.3266\n", + "Batch 471/500 - Loss: 0.5747\n", + "Batch 472/500 - Loss: 0.3532\n", + "Batch 473/500 - Loss: 0.9148\n", + "Batch 474/500 - Loss: 0.1770\n", + "Batch 475/500 - Loss: 0.1477\n", + "Batch 476/500 - Loss: 0.1160\n", + "Batch 477/500 - Loss: 0.1474\n", + "Batch 478/500 - Loss: 0.9700\n", + "Batch 479/500 - Loss: 0.4635\n", + "Batch 480/500 - Loss: 0.1634\n", + "Batch 481/500 - Loss: 0.2562\n", + "Batch 482/500 - Loss: 0.6406\n", + "Batch 483/500 - Loss: 0.1444\n", + "Batch 484/500 - Loss: 0.4170\n", + "Batch 485/500 - Loss: 0.0913\n", + "Batch 486/500 - Loss: 0.2939\n", + "Batch 487/500 - Loss: 0.1216\n", + "Batch 488/500 - Loss: 0.2500\n", + "Batch 489/500 - Loss: 0.1552\n", + "Batch 490/500 - Loss: 0.3786\n", + "Batch 491/500 - Loss: 0.1049\n", + "Batch 492/500 - Loss: 0.7841\n", + "Batch 493/500 - Loss: 0.7897\n", + "Batch 494/500 - Loss: 0.2101\n", + "Batch 495/500 - Loss: 0.1689\n", + "Batch 496/500 - Loss: 0.3691\n", + "Batch 497/500 - Loss: 0.2300\n", + "Batch 498/500 - Loss: 0.9526\n", + "Batch 499/500 - Loss: 0.2657\n", + "Batch 500/500 - Loss: 0.8996\n", + "Epoch 3 - Total Loss: 186.6012\n", + "Batch 1/500 - Loss: 0.9686\n", + "Batch 2/500 - Loss: 0.4208\n", + "Batch 3/500 - Loss: 0.4431\n", + "Batch 4/500 - Loss: 0.0772\n", + "Batch 5/500 - Loss: 0.2819\n", + "Batch 6/500 - Loss: 0.1116\n", + "Batch 7/500 - Loss: 0.1554\n", + "Batch 8/500 - Loss: 0.2441\n", + "Batch 9/500 - Loss: 0.4399\n", + "Batch 10/500 - Loss: 0.3543\n", + "Batch 11/500 - Loss: 0.3874\n", + "Batch 12/500 - Loss: 0.2306\n", + "Batch 13/500 - Loss: 0.4776\n", + "Batch 14/500 - Loss: 0.2439\n", + "Batch 15/500 - Loss: 0.5563\n", + "Batch 16/500 - Loss: 0.6773\n", + "Batch 17/500 - Loss: 0.2013\n", + "Batch 18/500 - Loss: 0.2149\n", + "Batch 19/500 - Loss: 0.6190\n", + "Batch 20/500 - Loss: 0.7080\n", + "Batch 21/500 - Loss: 0.1715\n", + "Batch 22/500 - Loss: 0.5550\n", + "Batch 23/500 - Loss: 0.1683\n", + "Batch 24/500 - Loss: 0.1660\n", + "Batch 25/500 - Loss: 0.1236\n", + "Batch 26/500 - Loss: 0.1569\n", + "Batch 27/500 - Loss: 0.1302\n", + "Batch 28/500 - Loss: 0.9561\n", + "Batch 29/500 - Loss: 0.6471\n", + "Batch 30/500 - Loss: 0.3369\n", + "Batch 31/500 - Loss: 0.1116\n", + "Batch 32/500 - Loss: 0.1047\n", + "Batch 33/500 - Loss: 0.2410\n", + "Batch 34/500 - Loss: 0.2448\n", + "Batch 35/500 - Loss: 0.1243\n", + "Batch 36/500 - Loss: 0.4815\n", + "Batch 37/500 - Loss: 0.0534\n", + "Batch 38/500 - Loss: 0.0964\n", + "Batch 39/500 - Loss: 0.3188\n", + "Batch 40/500 - Loss: 0.8740\n", + "Batch 41/500 - Loss: 0.0750\n", + "Batch 42/500 - Loss: 0.2755\n", + "Batch 43/500 - Loss: 0.4445\n", + "Batch 44/500 - Loss: 0.3610\n", + "Batch 45/500 - Loss: 0.0671\n", + "Batch 46/500 - Loss: 0.5846\n", + "Batch 47/500 - Loss: 0.4588\n", + "Batch 48/500 - Loss: 0.0958\n", + "Batch 49/500 - Loss: 0.1828\n", + "Batch 50/500 - Loss: 0.2535\n", + "Batch 51/500 - Loss: 0.1837\n", + "Batch 52/500 - Loss: 0.2557\n", + "Batch 53/500 - Loss: 0.2226\n", + "Batch 54/500 - Loss: 0.0698\n", + "Batch 55/500 - Loss: 0.0976\n", + "Batch 56/500 - Loss: 0.6091\n", + "Batch 57/500 - Loss: 0.3196\n", + "Batch 58/500 - Loss: 0.2123\n", + "Batch 59/500 - Loss: 0.1297\n", + "Batch 60/500 - Loss: 0.1145\n", + "Batch 61/500 - Loss: 0.5405\n", + "Batch 62/500 - Loss: 0.1140\n", + "Batch 63/500 - Loss: 0.6110\n", + "Batch 64/500 - Loss: 0.3597\n", + "Batch 65/500 - Loss: 0.9178\n", + "Batch 66/500 - Loss: 0.3630\n", + "Batch 67/500 - Loss: 0.3449\n", + "Batch 68/500 - Loss: 0.9306\n", + "Batch 69/500 - Loss: 0.6784\n", + "Batch 70/500 - Loss: 0.5375\n", + "Batch 71/500 - Loss: 0.2489\n", + "Batch 72/500 - Loss: 0.0761\n", + "Batch 73/500 - Loss: 0.4840\n", + "Batch 74/500 - Loss: 0.1121\n", + "Batch 75/500 - Loss: 0.0812\n", + "Batch 76/500 - Loss: 0.2936\n", + "Batch 77/500 - Loss: 0.1044\n", + "Batch 78/500 - Loss: 0.1229\n", + "Batch 79/500 - Loss: 0.2682\n", + "Batch 80/500 - Loss: 0.1290\n", + "Batch 81/500 - Loss: 0.4488\n", + "Batch 82/500 - Loss: 0.5017\n", + "Batch 83/500 - Loss: 0.0984\n", + "Batch 84/500 - Loss: 0.1892\n", + "Batch 85/500 - Loss: 0.3934\n", + "Batch 86/500 - Loss: 0.0626\n", + "Batch 87/500 - Loss: 0.2360\n", + "Batch 88/500 - Loss: 0.1291\n", + "Batch 89/500 - Loss: 0.4068\n", + "Batch 90/500 - Loss: 0.3794\n", + "Batch 91/500 - Loss: 0.3642\n", + "Batch 92/500 - Loss: 0.8149\n", + "Batch 93/500 - Loss: 0.5150\n", + "Batch 94/500 - Loss: 0.2466\n", + "Batch 95/500 - Loss: 0.1064\n", + "Batch 96/500 - Loss: 0.1598\n", + "Batch 97/500 - Loss: 0.1883\n", + "Batch 98/500 - Loss: 0.0846\n", + "Batch 99/500 - Loss: 0.5780\n", + "Batch 100/500 - Loss: 0.0679\n", + "Batch 101/500 - Loss: 0.3355\n", + "Batch 102/500 - Loss: 0.5566\n", + "Batch 103/500 - Loss: 0.3689\n", + "Batch 104/500 - Loss: 0.0571\n", + "Batch 105/500 - Loss: 0.1499\n", + "Batch 106/500 - Loss: 0.5140\n", + "Batch 107/500 - Loss: 0.1155\n", + "Batch 108/500 - Loss: 0.3211\n", + "Batch 109/500 - Loss: 0.5738\n", + "Batch 110/500 - Loss: 0.5095\n", + "Batch 111/500 - Loss: 0.1991\n", + "Batch 112/500 - Loss: 0.2035\n", + "Batch 113/500 - Loss: 0.2287\n", + "Batch 114/500 - Loss: 0.1242\n", + "Batch 115/500 - Loss: 0.1093\n", + "Batch 116/500 - Loss: 0.1844\n", + "Batch 117/500 - Loss: 0.1454\n", + "Batch 118/500 - Loss: 0.1830\n", + "Batch 119/500 - Loss: 0.2893\n", + "Batch 120/500 - Loss: 0.1232\n", + "Batch 121/500 - Loss: 0.7088\n", + "Batch 122/500 - Loss: 0.1175\n", + "Batch 123/500 - Loss: 0.4654\n", + "Batch 124/500 - Loss: 0.1813\n", + "Batch 125/500 - Loss: 0.0933\n", + "Batch 126/500 - Loss: 0.0948\n", + "Batch 127/500 - Loss: 0.4848\n", + "Batch 128/500 - Loss: 0.1210\n", + "Batch 129/500 - Loss: 1.2120\n", + "Batch 130/500 - Loss: 0.2556\n", + "Batch 131/500 - Loss: 0.9122\n", + "Batch 132/500 - Loss: 0.2055\n", + "Batch 133/500 - Loss: 0.4270\n", + "Batch 134/500 - Loss: 0.0841\n", + "Batch 135/500 - Loss: 0.2280\n", + "Batch 136/500 - Loss: 0.4577\n", + "Batch 137/500 - Loss: 0.1193\n", + "Batch 138/500 - Loss: 0.6492\n", + "Batch 139/500 - Loss: 0.1087\n", + "Batch 140/500 - Loss: 0.0985\n", + "Batch 141/500 - Loss: 0.8649\n", + "Batch 142/500 - Loss: 0.1817\n", + "Batch 143/500 - Loss: 0.0925\n", + "Batch 144/500 - Loss: 0.1023\n", + "Batch 145/500 - Loss: 0.6524\n", + "Batch 146/500 - Loss: 0.3391\n", + "Batch 147/500 - Loss: 0.1778\n", + "Batch 148/500 - Loss: 0.1909\n", + "Batch 149/500 - Loss: 0.1243\n", + "Batch 150/500 - Loss: 0.7875\n", + "Batch 151/500 - Loss: 0.1508\n", + "Batch 152/500 - Loss: 0.3199\n", + "Batch 153/500 - Loss: 0.3203\n", + "Batch 154/500 - Loss: 0.4347\n", + "Batch 155/500 - Loss: 1.4849\n", + "Batch 156/500 - Loss: 0.5213\n", + "Batch 157/500 - Loss: 0.5842\n", + "Batch 158/500 - Loss: 0.1436\n", + "Batch 159/500 - Loss: 0.1312\n", + "Batch 160/500 - Loss: 0.1952\n", + "Batch 161/500 - Loss: 0.5211\n", + "Batch 162/500 - Loss: 0.7260\n", + "Batch 163/500 - Loss: 0.1239\n", + "Batch 164/500 - Loss: 0.0643\n", + "Batch 165/500 - Loss: 0.1530\n", + "Batch 166/500 - Loss: 0.3556\n", + "Batch 167/500 - Loss: 0.0845\n", + "Batch 168/500 - Loss: 0.2560\n", + "Batch 169/500 - Loss: 0.1297\n", + "Batch 170/500 - Loss: 0.7669\n", + "Batch 171/500 - Loss: 0.3308\n", + "Batch 172/500 - Loss: 0.0731\n", + "Batch 173/500 - Loss: 1.2811\n", + "Batch 174/500 - Loss: 0.4443\n", + "Batch 175/500 - Loss: 0.7396\n", + "Batch 176/500 - Loss: 0.1051\n", + "Batch 177/500 - Loss: 0.2669\n", + "Batch 178/500 - Loss: 0.1290\n", + "Batch 179/500 - Loss: 0.1545\n", + "Batch 180/500 - Loss: 0.0859\n", + "Batch 181/500 - Loss: 0.1726\n", + "Batch 182/500 - Loss: 1.0445\n", + "Batch 183/500 - Loss: 0.2782\n", + "Batch 184/500 - Loss: 0.2733\n", + "Batch 185/500 - Loss: 0.2049\n", + "Batch 186/500 - Loss: 0.1848\n", + "Batch 187/500 - Loss: 0.0887\n", + "Batch 188/500 - Loss: 0.1204\n", + "Batch 189/500 - Loss: 0.7292\n", + "Batch 190/500 - Loss: 0.1322\n", + "Batch 191/500 - Loss: 0.0703\n", + "Batch 192/500 - Loss: 0.3628\n", + "Batch 193/500 - Loss: 0.0340\n", + "Batch 194/500 - Loss: 0.1020\n", + "Batch 195/500 - Loss: 0.2571\n", + "Batch 196/500 - Loss: 0.2348\n", + "Batch 197/500 - Loss: 0.1348\n", + "Batch 198/500 - Loss: 0.1758\n", + "Batch 199/500 - Loss: 0.2808\n", + "Batch 200/500 - Loss: 0.1785\n", + "Batch 201/500 - Loss: 0.4143\n", + "Batch 202/500 - Loss: 0.4999\n", + "Batch 203/500 - Loss: 0.1457\n", + "Batch 204/500 - Loss: 0.0870\n", + "Batch 205/500 - Loss: 0.6826\n", + "Batch 206/500 - Loss: 0.2388\n", + "Batch 207/500 - Loss: 0.5494\n", + "Batch 208/500 - Loss: 0.6295\n", + "Batch 209/500 - Loss: 0.8118\n", + "Batch 210/500 - Loss: 0.5292\n", + "Batch 211/500 - Loss: 0.5129\n", + "Batch 212/500 - Loss: 0.1893\n", + "Batch 213/500 - Loss: 0.2999\n", + "Batch 214/500 - Loss: 0.2113\n", + "Batch 215/500 - Loss: 0.7190\n", + "Batch 216/500 - Loss: 0.1876\n", + "Batch 217/500 - Loss: 0.0788\n", + "Batch 218/500 - Loss: 0.1685\n", + "Batch 219/500 - Loss: 0.2177\n", + "Batch 220/500 - Loss: 0.3225\n", + "Batch 221/500 - Loss: 0.4692\n", + "Batch 222/500 - Loss: 0.1629\n", + "Batch 223/500 - Loss: 0.2234\n", + "Batch 224/500 - Loss: 0.0655\n", + "Batch 225/500 - Loss: 0.0704\n", + "Batch 226/500 - Loss: 1.0523\n", + "Batch 227/500 - Loss: 0.9610\n", + "Batch 228/500 - Loss: 0.2981\n", + "Batch 229/500 - Loss: 0.1106\n", + "Batch 230/500 - Loss: 0.2768\n", + "Batch 231/500 - Loss: 0.3445\n", + "Batch 232/500 - Loss: 0.1623\n", + "Batch 233/500 - Loss: 0.2262\n", + "Batch 234/500 - Loss: 0.3755\n", + "Batch 235/500 - Loss: 0.5030\n", + "Batch 236/500 - Loss: 0.1968\n", + "Batch 237/500 - Loss: 0.1277\n", + "Batch 238/500 - Loss: 0.0899\n", + "Batch 239/500 - Loss: 0.1381\n", + "Batch 240/500 - Loss: 0.8177\n", + "Batch 241/500 - Loss: 0.1912\n", + "Batch 242/500 - Loss: 0.5903\n", + "Batch 243/500 - Loss: 0.2232\n", + "Batch 244/500 - Loss: 0.7192\n", + "Batch 245/500 - Loss: 0.2394\n", + "Batch 246/500 - Loss: 0.4753\n", + "Batch 247/500 - Loss: 0.1605\n", + "Batch 248/500 - Loss: 0.2669\n", + "Batch 249/500 - Loss: 0.1471\n", + "Batch 250/500 - Loss: 1.1897\n", + "Batch 251/500 - Loss: 0.1625\n", + "Batch 252/500 - Loss: 0.2244\n", + "Batch 253/500 - Loss: 0.1260\n", + "Batch 254/500 - Loss: 0.1505\n", + "Batch 255/500 - Loss: 0.4957\n", + "Batch 256/500 - Loss: 0.1244\n", + "Batch 257/500 - Loss: 0.4758\n", + "Batch 258/500 - Loss: 0.4976\n", + "Batch 259/500 - Loss: 0.0784\n", + "Batch 260/500 - Loss: 0.2894\n", + "Batch 261/500 - Loss: 1.4303\n", + "Batch 262/500 - Loss: 0.2420\n", + "Batch 263/500 - Loss: 0.6280\n", + "Batch 264/500 - Loss: 0.8151\n", + "Batch 265/500 - Loss: 0.3511\n", + "Batch 266/500 - Loss: 0.1202\n", + "Batch 267/500 - Loss: 0.1306\n", + "Batch 268/500 - Loss: 0.3101\n", + "Batch 269/500 - Loss: 0.0773\n", + "Batch 270/500 - Loss: 0.3500\n", + "Batch 271/500 - Loss: 0.1331\n", + "Batch 272/500 - Loss: 0.1692\n", + "Batch 273/500 - Loss: 0.1392\n", + "Batch 274/500 - Loss: 1.1867\n", + "Batch 275/500 - Loss: 0.0528\n", + "Batch 276/500 - Loss: 0.1758\n", + "Batch 277/500 - Loss: 0.3017\n", + "Batch 278/500 - Loss: 0.0954\n", + "Batch 279/500 - Loss: 0.3325\n", + "Batch 280/500 - Loss: 0.1279\n", + "Batch 281/500 - Loss: 0.5042\n", + "Batch 282/500 - Loss: 0.3386\n", + "Batch 283/500 - Loss: 0.1551\n", + "Batch 284/500 - Loss: 0.0765\n", + "Batch 285/500 - Loss: 0.3893\n", + "Batch 286/500 - Loss: 0.2783\n", + "Batch 287/500 - Loss: 0.8932\n", + "Batch 288/500 - Loss: 0.1219\n", + "Batch 289/500 - Loss: 0.0907\n", + "Batch 290/500 - Loss: 0.4809\n", + "Batch 291/500 - Loss: 0.0499\n", + "Batch 292/500 - Loss: 0.2595\n", + "Batch 293/500 - Loss: 0.3697\n", + "Batch 294/500 - Loss: 0.1565\n", + "Batch 295/500 - Loss: 0.2431\n", + "Batch 296/500 - Loss: 0.4853\n", + "Batch 297/500 - Loss: 0.7122\n", + "Batch 298/500 - Loss: 0.1947\n", + "Batch 299/500 - Loss: 0.3387\n", + "Batch 300/500 - Loss: 0.2384\n", + "Batch 301/500 - Loss: 0.2788\n", + "Batch 302/500 - Loss: 0.5063\n", + "Batch 303/500 - Loss: 0.1341\n", + "Batch 304/500 - Loss: 0.1478\n", + "Batch 305/500 - Loss: 0.7875\n", + "Batch 306/500 - Loss: 0.7529\n", + "Batch 307/500 - Loss: 0.1535\n", + "Batch 308/500 - Loss: 0.0897\n", + "Batch 309/500 - Loss: 0.0622\n", + "Batch 310/500 - Loss: 0.1314\n", + "Batch 311/500 - Loss: 0.1661\n", + "Batch 312/500 - Loss: 0.1077\n", + "Batch 313/500 - Loss: 0.2242\n", + "Batch 314/500 - Loss: 0.6852\n", + "Batch 315/500 - Loss: 0.2503\n", + "Batch 316/500 - Loss: 0.4618\n", + "Batch 317/500 - Loss: 0.2938\n", + "Batch 318/500 - Loss: 0.1201\n", + "Batch 319/500 - Loss: 0.3379\n", + "Batch 320/500 - Loss: 0.3693\n", + "Batch 321/500 - Loss: 0.2206\n", + "Batch 322/500 - Loss: 0.6656\n", + "Batch 323/500 - Loss: 0.1628\n", + "Batch 324/500 - Loss: 0.2400\n", + "Batch 325/500 - Loss: 0.2923\n", + "Batch 326/500 - Loss: 0.6235\n", + "Batch 327/500 - Loss: 0.8450\n", + "Batch 328/500 - Loss: 0.0603\n", + "Batch 329/500 - Loss: 0.1367\n", + "Batch 330/500 - Loss: 0.6863\n", + "Batch 331/500 - Loss: 0.5162\n", + "Batch 332/500 - Loss: 0.5464\n", + "Batch 333/500 - Loss: 0.2750\n", + "Batch 334/500 - Loss: 0.1253\n", + "Batch 335/500 - Loss: 0.3755\n", + "Batch 336/500 - Loss: 0.3969\n", + "Batch 337/500 - Loss: 1.2888\n", + "Batch 338/500 - Loss: 0.4268\n", + "Batch 339/500 - Loss: 0.1328\n", + "Batch 340/500 - Loss: 0.3707\n", + "Batch 341/500 - Loss: 0.2298\n", + "Batch 342/500 - Loss: 0.3454\n", + "Batch 343/500 - Loss: 0.4881\n", + "Batch 344/500 - Loss: 0.7313\n", + "Batch 345/500 - Loss: 0.2807\n", + "Batch 346/500 - Loss: 0.1280\n", + "Batch 347/500 - Loss: 0.1577\n", + "Batch 348/500 - Loss: 0.2003\n", + "Batch 349/500 - Loss: 0.5809\n", + "Batch 350/500 - Loss: 0.3474\n", + "Batch 351/500 - Loss: 0.2254\n", + "Batch 352/500 - Loss: 0.1716\n", + "Batch 353/500 - Loss: 0.2574\n", + "Batch 354/500 - Loss: 0.8427\n", + "Batch 355/500 - Loss: 0.0904\n", + "Batch 356/500 - Loss: 0.2395\n", + "Batch 357/500 - Loss: 1.2008\n", + "Batch 358/500 - Loss: 0.7084\n", + "Batch 359/500 - Loss: 0.3188\n", + "Batch 360/500 - Loss: 0.1841\n", + "Batch 361/500 - Loss: 0.8622\n", + "Batch 362/500 - Loss: 0.1315\n", + "Batch 363/500 - Loss: 0.3804\n", + "Batch 364/500 - Loss: 0.2421\n", + "Batch 365/500 - Loss: 0.4438\n", + "Batch 366/500 - Loss: 0.1898\n", + "Batch 367/500 - Loss: 0.1529\n", + "Batch 368/500 - Loss: 0.4213\n", + "Batch 369/500 - Loss: 0.1404\n", + "Batch 370/500 - Loss: 0.5048\n", + "Batch 371/500 - Loss: 0.2865\n", + "Batch 372/500 - Loss: 0.1256\n", + "Batch 373/500 - Loss: 0.2924\n", + "Batch 374/500 - Loss: 0.4869\n", + "Batch 375/500 - Loss: 0.0772\n", + "Batch 376/500 - Loss: 0.2545\n", + "Batch 377/500 - Loss: 0.1624\n", + "Batch 378/500 - Loss: 0.3642\n", + "Batch 379/500 - Loss: 0.4850\n", + "Batch 380/500 - Loss: 0.3367\n", + "Batch 381/500 - Loss: 0.9095\n", + "Batch 382/500 - Loss: 0.1389\n", + "Batch 383/500 - Loss: 1.0748\n", + "Batch 384/500 - Loss: 0.2935\n", + "Batch 385/500 - Loss: 0.3051\n", + "Batch 386/500 - Loss: 0.2635\n", + "Batch 387/500 - Loss: 0.3780\n", + "Batch 388/500 - Loss: 0.2261\n", + "Batch 389/500 - Loss: 0.1010\n", + "Batch 390/500 - Loss: 0.3650\n", + "Batch 391/500 - Loss: 0.3075\n", + "Batch 392/500 - Loss: 0.0834\n", + "Batch 393/500 - Loss: 0.2991\n", + "Batch 394/500 - Loss: 0.0767\n", + "Batch 395/500 - Loss: 0.4056\n", + "Batch 396/500 - Loss: 0.1122\n", + "Batch 397/500 - Loss: 0.8094\n", + "Batch 398/500 - Loss: 0.0626\n", + "Batch 399/500 - Loss: 0.3582\n", + "Batch 400/500 - Loss: 0.1165\n", + "Batch 401/500 - Loss: 0.3878\n", + "Batch 402/500 - Loss: 0.0989\n", + "Batch 403/500 - Loss: 0.2203\n", + "Batch 404/500 - Loss: 0.1313\n", + "Batch 405/500 - Loss: 0.3783\n", + "Batch 406/500 - Loss: 0.6316\n", + "Batch 407/500 - Loss: 0.1158\n", + "Batch 408/500 - Loss: 0.0413\n", + "Batch 409/500 - Loss: 0.3419\n", + "Batch 410/500 - Loss: 0.5152\n", + "Batch 411/500 - Loss: 0.2303\n", + "Batch 412/500 - Loss: 0.1443\n", + "Batch 413/500 - Loss: 0.7070\n", + "Batch 414/500 - Loss: 0.3266\n", + "Batch 415/500 - Loss: 0.9444\n", + "Batch 416/500 - Loss: 0.0714\n", + "Batch 417/500 - Loss: 0.1251\n", + "Batch 418/500 - Loss: 0.2898\n", + "Batch 419/500 - Loss: 0.1085\n", + "Batch 420/500 - Loss: 0.4756\n", + "Batch 421/500 - Loss: 0.2250\n", + "Batch 422/500 - Loss: 0.5773\n", + "Batch 423/500 - Loss: 0.6136\n", + "Batch 424/500 - Loss: 0.3124\n", + "Batch 425/500 - Loss: 0.5033\n", + "Batch 426/500 - Loss: 0.3412\n", + "Batch 427/500 - Loss: 0.5507\n", + "Batch 428/500 - Loss: 0.3463\n", + "Batch 429/500 - Loss: 0.3675\n", + "Batch 430/500 - Loss: 0.2752\n", + "Batch 431/500 - Loss: 0.6201\n", + "Batch 432/500 - Loss: 0.1652\n", + "Batch 433/500 - Loss: 0.7164\n", + "Batch 434/500 - Loss: 0.2901\n", + "Batch 435/500 - Loss: 0.6070\n", + "Batch 436/500 - Loss: 0.2194\n", + "Batch 437/500 - Loss: 0.3123\n", + "Batch 438/500 - Loss: 0.2233\n", + "Batch 439/500 - Loss: 0.3441\n", + "Batch 440/500 - Loss: 0.3249\n", + "Batch 441/500 - Loss: 0.2403\n", + "Batch 442/500 - Loss: 0.4628\n", + "Batch 443/500 - Loss: 0.2265\n", + "Batch 444/500 - Loss: 0.1510\n", + "Batch 445/500 - Loss: 0.2511\n", + "Batch 446/500 - Loss: 0.0615\n", + "Batch 447/500 - Loss: 0.6864\n", + "Batch 448/500 - Loss: 0.1568\n", + "Batch 449/500 - Loss: 0.2983\n", + "Batch 450/500 - Loss: 0.2521\n", + "Batch 451/500 - Loss: 0.1160\n", + "Batch 452/500 - Loss: 0.6169\n", + "Batch 453/500 - Loss: 0.0979\n", + "Batch 454/500 - Loss: 0.2853\n", + "Batch 455/500 - Loss: 0.0824\n", + "Batch 456/500 - Loss: 0.1820\n", + "Batch 457/500 - Loss: 0.2836\n", + "Batch 458/500 - Loss: 0.1754\n", + "Batch 459/500 - Loss: 0.0943\n", + "Batch 460/500 - Loss: 0.1902\n", + "Batch 461/500 - Loss: 0.3863\n", + "Batch 462/500 - Loss: 0.4342\n", + "Batch 463/500 - Loss: 0.2240\n", + "Batch 464/500 - Loss: 0.2201\n", + "Batch 465/500 - Loss: 0.5995\n", + "Batch 466/500 - Loss: 0.0349\n", + "Batch 467/500 - Loss: 0.3580\n", + "Batch 468/500 - Loss: 0.0737\n", + "Batch 469/500 - Loss: 0.1992\n", + "Batch 470/500 - Loss: 0.1651\n", + "Batch 471/500 - Loss: 0.2961\n", + "Batch 472/500 - Loss: 0.2289\n", + "Batch 473/500 - Loss: 0.3617\n", + "Batch 474/500 - Loss: 0.4944\n", + "Batch 475/500 - Loss: 0.3407\n", + "Batch 476/500 - Loss: 0.2885\n", + "Batch 477/500 - Loss: 0.2081\n", + "Batch 478/500 - Loss: 0.2380\n", + "Batch 479/500 - Loss: 0.7236\n", + "Batch 480/500 - Loss: 0.4623\n", + "Batch 481/500 - Loss: 0.1877\n", + "Batch 482/500 - Loss: 0.3608\n", + "Batch 483/500 - Loss: 0.3214\n", + "Batch 484/500 - Loss: 0.6271\n", + "Batch 485/500 - Loss: 0.3240\n", + "Batch 486/500 - Loss: 0.0858\n", + "Batch 487/500 - Loss: 0.1321\n", + "Batch 488/500 - Loss: 0.6706\n", + "Batch 489/500 - Loss: 0.2685\n", + "Batch 490/500 - Loss: 0.0940\n", + "Batch 491/500 - Loss: 0.3303\n", + "Batch 492/500 - Loss: 0.1390\n", + "Batch 493/500 - Loss: 0.3408\n", + "Batch 494/500 - Loss: 0.1312\n", + "Batch 495/500 - Loss: 0.3021\n", + "Batch 496/500 - Loss: 0.3355\n", + "Batch 497/500 - Loss: 0.7932\n", + "Batch 498/500 - Loss: 1.0957\n", + "Batch 499/500 - Loss: 0.2225\n", + "Batch 500/500 - Loss: 0.4685\n", + "Epoch 4 - Total Loss: 166.5891\n", + "Model checkpoint saved at epoch 4\n", + "Batch 1/500 - Loss: 0.2667\n", + "Batch 2/500 - Loss: 0.1030\n", + "Batch 3/500 - Loss: 0.1118\n", + "Batch 4/500 - Loss: 0.3377\n", + "Batch 5/500 - Loss: 0.4263\n", + "Batch 6/500 - Loss: 0.3537\n", + "Batch 7/500 - Loss: 0.3697\n", + "Batch 8/500 - Loss: 0.9047\n", + "Batch 9/500 - Loss: 0.6515\n", + "Batch 10/500 - Loss: 0.1086\n", + "Batch 11/500 - Loss: 0.5107\n", + "Batch 12/500 - Loss: 0.3157\n", + "Batch 13/500 - Loss: 0.3176\n", + "Batch 14/500 - Loss: 0.4237\n", + "Batch 15/500 - Loss: 0.6649\n", + "Batch 16/500 - Loss: 0.2732\n", + "Batch 17/500 - Loss: 0.3500\n", + "Batch 18/500 - Loss: 0.2679\n", + "Batch 19/500 - Loss: 0.0943\n", + "Batch 20/500 - Loss: 0.0838\n", + "Batch 21/500 - Loss: 0.2432\n", + "Batch 22/500 - Loss: 0.1431\n", + "Batch 23/500 - Loss: 0.1016\n", + "Batch 24/500 - Loss: 0.3194\n", + "Batch 25/500 - Loss: 0.2237\n", + "Batch 26/500 - Loss: 0.1930\n", + "Batch 27/500 - Loss: 0.6681\n", + "Batch 28/500 - Loss: 0.0781\n", + "Batch 29/500 - Loss: 0.2189\n", + "Batch 30/500 - Loss: 0.3693\n", + "Batch 31/500 - Loss: 0.0578\n", + "Batch 32/500 - Loss: 0.4836\n", + "Batch 33/500 - Loss: 0.3186\n", + "Batch 34/500 - Loss: 0.4412\n", + "Batch 35/500 - Loss: 0.1583\n", + "Batch 36/500 - Loss: 0.1085\n", + "Batch 37/500 - Loss: 0.1714\n", + "Batch 38/500 - Loss: 0.1297\n", + "Batch 39/500 - Loss: 0.3188\n", + "Batch 40/500 - Loss: 0.1455\n", + "Batch 41/500 - Loss: 0.1925\n", + "Batch 42/500 - Loss: 0.6325\n", + "Batch 43/500 - Loss: 0.2917\n", + "Batch 44/500 - Loss: 0.2164\n", + "Batch 45/500 - Loss: 0.3667\n", + "Batch 46/500 - Loss: 0.1239\n", + "Batch 47/500 - Loss: 0.2030\n", + "Batch 48/500 - Loss: 0.1035\n", + "Batch 49/500 - Loss: 0.2342\n", + "Batch 50/500 - Loss: 0.1595\n", + "Batch 51/500 - Loss: 0.2549\n", + "Batch 52/500 - Loss: 0.1245\n", + "Batch 53/500 - Loss: 1.5810\n", + "Batch 54/500 - Loss: 0.1108\n", + "Batch 55/500 - Loss: 0.3554\n", + "Batch 56/500 - Loss: 0.5443\n", + "Batch 57/500 - Loss: 0.2084\n", + "Batch 58/500 - Loss: 0.1145\n", + "Batch 59/500 - Loss: 0.4171\n", + "Batch 60/500 - Loss: 0.2386\n", + "Batch 61/500 - Loss: 0.1164\n", + "Batch 62/500 - Loss: 0.1137\n", + "Batch 63/500 - Loss: 0.3646\n", + "Batch 64/500 - Loss: 0.0798\n", + "Batch 65/500 - Loss: 0.1858\n", + "Batch 66/500 - Loss: 0.2386\n", + "Batch 67/500 - Loss: 0.1260\n", + "Batch 68/500 - Loss: 0.1324\n", + "Batch 69/500 - Loss: 0.1234\n", + "Batch 70/500 - Loss: 0.1232\n", + "Batch 71/500 - Loss: 0.3451\n", + "Batch 72/500 - Loss: 0.0889\n", + "Batch 73/500 - Loss: 0.0747\n", + "Batch 74/500 - Loss: 0.1560\n", + "Batch 75/500 - Loss: 0.1571\n", + "Batch 76/500 - Loss: 0.1839\n", + "Batch 77/500 - Loss: 0.6183\n", + "Batch 78/500 - Loss: 0.3578\n", + "Batch 79/500 - Loss: 0.0231\n", + "Batch 80/500 - Loss: 0.0543\n", + "Batch 81/500 - Loss: 0.1777\n", + "Batch 82/500 - Loss: 0.4858\n", + "Batch 83/500 - Loss: 0.0323\n", + "Batch 84/500 - Loss: 0.0559\n", + "Batch 85/500 - Loss: 0.1068\n", + "Batch 86/500 - Loss: 0.0975\n", + "Batch 87/500 - Loss: 0.2919\n", + "Batch 88/500 - Loss: 0.2963\n", + "Batch 89/500 - Loss: 0.4075\n", + "Batch 90/500 - Loss: 0.6353\n", + "Batch 91/500 - Loss: 0.5386\n", + "Batch 92/500 - Loss: 0.1202\n", + "Batch 93/500 - Loss: 0.1073\n", + "Batch 94/500 - Loss: 0.7504\n", + "Batch 95/500 - Loss: 0.2204\n", + "Batch 96/500 - Loss: 0.1904\n", + "Batch 97/500 - Loss: 0.1065\n", + "Batch 98/500 - Loss: 0.2679\n", + "Batch 99/500 - Loss: 0.2248\n", + "Batch 100/500 - Loss: 0.3470\n", + "Batch 101/500 - Loss: 0.1356\n", + "Batch 102/500 - Loss: 0.8742\n", + "Batch 103/500 - Loss: 0.2630\n", + "Batch 104/500 - Loss: 0.0894\n", + "Batch 105/500 - Loss: 0.4056\n", + "Batch 106/500 - Loss: 0.0777\n", + "Batch 107/500 - Loss: 0.2800\n", + "Batch 108/500 - Loss: 0.3670\n", + "Batch 109/500 - Loss: 0.1605\n", + "Batch 110/500 - Loss: 0.1799\n", + "Batch 111/500 - Loss: 0.3157\n", + "Batch 112/500 - Loss: 0.3002\n", + "Batch 113/500 - Loss: 0.1650\n", + "Batch 114/500 - Loss: 0.1875\n", + "Batch 115/500 - Loss: 0.0692\n", + "Batch 116/500 - Loss: 0.3355\n", + "Batch 117/500 - Loss: 0.1725\n", + "Batch 118/500 - Loss: 0.2174\n", + "Batch 119/500 - Loss: 0.1364\n", + "Batch 120/500 - Loss: 0.1350\n", + "Batch 121/500 - Loss: 0.0859\n", + "Batch 122/500 - Loss: 0.1024\n", + "Batch 123/500 - Loss: 0.5533\n", + "Batch 124/500 - Loss: 0.0624\n", + "Batch 125/500 - Loss: 0.2538\n", + "Batch 126/500 - Loss: 0.3044\n", + "Batch 127/500 - Loss: 0.1629\n", + "Batch 128/500 - Loss: 0.4045\n", + "Batch 129/500 - Loss: 0.2156\n", + "Batch 130/500 - Loss: 0.2771\n", + "Batch 131/500 - Loss: 0.1434\n", + "Batch 132/500 - Loss: 0.4467\n", + "Batch 133/500 - Loss: 0.3029\n", + "Batch 134/500 - Loss: 0.4181\n", + "Batch 135/500 - Loss: 0.1224\n", + "Batch 136/500 - Loss: 0.4201\n", + "Batch 137/500 - Loss: 0.0873\n", + "Batch 138/500 - Loss: 0.4336\n", + "Batch 139/500 - Loss: 0.4708\n", + "Batch 140/500 - Loss: 0.3749\n", + "Batch 141/500 - Loss: 0.2298\n", + "Batch 142/500 - Loss: 0.2591\n", + "Batch 143/500 - Loss: 0.0805\n", + "Batch 144/500 - Loss: 0.1316\n", + "Batch 145/500 - Loss: 0.1899\n", + "Batch 146/500 - Loss: 0.5707\n", + "Batch 147/500 - Loss: 1.0653\n", + "Batch 148/500 - Loss: 0.3165\n", + "Batch 149/500 - Loss: 0.0688\n", + "Batch 150/500 - Loss: 0.1563\n", + "Batch 151/500 - Loss: 0.1442\n", + "Batch 152/500 - Loss: 0.4617\n", + "Batch 153/500 - Loss: 0.0903\n", + "Batch 154/500 - Loss: 0.0536\n", + "Batch 155/500 - Loss: 0.1007\n", + "Batch 156/500 - Loss: 0.5016\n", + "Batch 157/500 - Loss: 0.4316\n", + "Batch 158/500 - Loss: 0.1832\n", + "Batch 159/500 - Loss: 0.0836\n", + "Batch 160/500 - Loss: 0.2950\n", + "Batch 161/500 - Loss: 0.2136\n", + "Batch 162/500 - Loss: 0.0923\n", + "Batch 163/500 - Loss: 0.3303\n", + "Batch 164/500 - Loss: 0.2405\n", + "Batch 165/500 - Loss: 0.1498\n", + "Batch 166/500 - Loss: 0.0975\n", + "Batch 167/500 - Loss: 0.2674\n", + "Batch 168/500 - Loss: 0.3135\n", + "Batch 169/500 - Loss: 0.0510\n", + "Batch 170/500 - Loss: 0.7703\n", + "Batch 171/500 - Loss: 0.0777\n", + "Batch 172/500 - Loss: 0.0898\n", + "Batch 173/500 - Loss: 0.5471\n", + "Batch 174/500 - Loss: 0.0852\n", + "Batch 175/500 - Loss: 0.4420\n", + "Batch 176/500 - Loss: 0.1340\n", + "Batch 177/500 - Loss: 0.2574\n", + "Batch 178/500 - Loss: 0.0826\n", + "Batch 179/500 - Loss: 0.0892\n", + "Batch 180/500 - Loss: 0.4561\n", + "Batch 181/500 - Loss: 0.1753\n", + "Batch 182/500 - Loss: 0.3187\n", + "Batch 183/500 - Loss: 0.6967\n", + "Batch 184/500 - Loss: 0.3846\n", + "Batch 185/500 - Loss: 0.4081\n", + "Batch 186/500 - Loss: 0.2092\n", + "Batch 187/500 - Loss: 0.2139\n", + "Batch 188/500 - Loss: 0.2708\n", + "Batch 189/500 - Loss: 0.2065\n", + "Batch 190/500 - Loss: 0.6639\n", + "Batch 191/500 - Loss: 0.1353\n", + "Batch 192/500 - Loss: 0.2446\n", + "Batch 193/500 - Loss: 0.0515\n", + "Batch 194/500 - Loss: 0.1859\n", + "Batch 195/500 - Loss: 0.9128\n", + "Batch 196/500 - Loss: 0.0642\n", + "Batch 197/500 - Loss: 0.3347\n", + "Batch 198/500 - Loss: 0.0955\n", + "Batch 199/500 - Loss: 0.0888\n", + "Batch 200/500 - Loss: 0.4394\n", + "Batch 201/500 - Loss: 0.1143\n", + "Batch 202/500 - Loss: 0.2335\n", + "Batch 203/500 - Loss: 0.0964\n", + "Batch 204/500 - Loss: 0.0919\n", + "Batch 205/500 - Loss: 0.0883\n", + "Batch 206/500 - Loss: 0.2062\n", + "Batch 207/500 - Loss: 0.4056\n", + "Batch 208/500 - Loss: 0.3706\n", + "Batch 209/500 - Loss: 0.4281\n", + "Batch 210/500 - Loss: 0.0869\n", + "Batch 211/500 - Loss: 0.4314\n", + "Batch 212/500 - Loss: 0.8261\n", + "Batch 213/500 - Loss: 0.3807\n", + "Batch 214/500 - Loss: 0.2437\n", + "Batch 215/500 - Loss: 0.1245\n", + "Batch 216/500 - Loss: 0.3607\n", + "Batch 217/500 - Loss: 0.1158\n", + "Batch 218/500 - Loss: 0.1729\n", + "Batch 219/500 - Loss: 0.5422\n", + "Batch 220/500 - Loss: 0.1126\n", + "Batch 221/500 - Loss: 0.3218\n", + "Batch 222/500 - Loss: 0.0926\n", + "Batch 223/500 - Loss: 0.2074\n", + "Batch 224/500 - Loss: 0.6327\n", + "Batch 225/500 - Loss: 0.5612\n", + "Batch 226/500 - Loss: 0.4512\n", + "Batch 227/500 - Loss: 0.2036\n", + "Batch 228/500 - Loss: 0.7187\n", + "Batch 229/500 - Loss: 0.0790\n", + "Batch 230/500 - Loss: 0.4769\n", + "Batch 231/500 - Loss: 0.1082\n", + "Batch 232/500 - Loss: 0.2224\n", + "Batch 233/500 - Loss: 0.5222\n", + "Batch 234/500 - Loss: 0.1800\n", + "Batch 235/500 - Loss: 0.1973\n", + "Batch 236/500 - Loss: 0.4183\n", + "Batch 237/500 - Loss: 0.2725\n", + "Batch 238/500 - Loss: 0.6699\n", + "Batch 239/500 - Loss: 0.1804\n", + "Batch 240/500 - Loss: 0.1815\n", + "Batch 241/500 - Loss: 0.6202\n", + "Batch 242/500 - Loss: 0.1914\n", + "Batch 243/500 - Loss: 0.1747\n", + "Batch 244/500 - Loss: 0.2567\n", + "Batch 245/500 - Loss: 0.4905\n", + "Batch 246/500 - Loss: 0.1785\n", + "Batch 247/500 - Loss: 0.1420\n", + "Batch 248/500 - Loss: 0.2206\n", + "Batch 249/500 - Loss: 0.1748\n", + "Batch 250/500 - Loss: 0.8775\n", + "Batch 251/500 - Loss: 0.0930\n", + "Batch 252/500 - Loss: 0.1806\n", + "Batch 253/500 - Loss: 0.0946\n", + "Batch 254/500 - Loss: 0.1616\n", + "Batch 255/500 - Loss: 0.1547\n", + "Batch 256/500 - Loss: 0.1179\n", + "Batch 257/500 - Loss: 0.2485\n", + "Batch 258/500 - Loss: 0.3028\n", + "Batch 259/500 - Loss: 0.2374\n", + "Batch 260/500 - Loss: 0.7493\n", + "Batch 261/500 - Loss: 0.2191\n", + "Batch 262/500 - Loss: 0.0843\n", + "Batch 263/500 - Loss: 0.1097\n", + "Batch 264/500 - Loss: 0.4451\n", + "Batch 265/500 - Loss: 0.3272\n", + "Batch 266/500 - Loss: 0.3332\n", + "Batch 267/500 - Loss: 0.2832\n", + "Batch 268/500 - Loss: 0.1830\n", + "Batch 269/500 - Loss: 0.1227\n", + "Batch 270/500 - Loss: 0.2330\n", + "Batch 271/500 - Loss: 0.1196\n", + "Batch 272/500 - Loss: 0.0685\n", + "Batch 273/500 - Loss: 0.4278\n", + "Batch 274/500 - Loss: 0.0603\n", + "Batch 275/500 - Loss: 0.1824\n", + "Batch 276/500 - Loss: 0.6970\n", + "Batch 277/500 - Loss: 0.0686\n", + "Batch 278/500 - Loss: 0.2634\n", + "Batch 279/500 - Loss: 0.8443\n", + "Batch 280/500 - Loss: 0.1655\n", + "Batch 281/500 - Loss: 0.0825\n", + "Batch 282/500 - Loss: 0.0970\n", + "Batch 283/500 - Loss: 0.4392\n", + "Batch 284/500 - Loss: 0.1687\n", + "Batch 285/500 - Loss: 0.1804\n", + "Batch 286/500 - Loss: 0.1848\n", + "Batch 287/500 - Loss: 0.0788\n", + "Batch 288/500 - Loss: 0.0905\n", + "Batch 289/500 - Loss: 0.0863\n", + "Batch 290/500 - Loss: 0.0736\n", + "Batch 291/500 - Loss: 0.0656\n", + "Batch 292/500 - Loss: 0.1736\n", + "Batch 293/500 - Loss: 0.3347\n", + "Batch 294/500 - Loss: 0.1513\n", + "Batch 295/500 - Loss: 0.0553\n", + "Batch 296/500 - Loss: 0.2568\n", + "Batch 297/500 - Loss: 0.3282\n", + "Batch 298/500 - Loss: 0.3295\n", + "Batch 299/500 - Loss: 0.1826\n", + "Batch 300/500 - Loss: 0.1710\n", + "Batch 301/500 - Loss: 0.1044\n", + "Batch 302/500 - Loss: 0.0665\n", + "Batch 303/500 - Loss: 0.6141\n", + "Batch 304/500 - Loss: 0.1281\n", + "Batch 305/500 - Loss: 0.0903\n", + "Batch 306/500 - Loss: 0.4516\n", + "Batch 307/500 - Loss: 0.2570\n", + "Batch 308/500 - Loss: 0.0471\n", + "Batch 309/500 - Loss: 0.1200\n", + "Batch 310/500 - Loss: 0.5449\n", + "Batch 311/500 - Loss: 0.1037\n", + "Batch 312/500 - Loss: 0.1981\n", + "Batch 313/500 - Loss: 0.2555\n", + "Batch 314/500 - Loss: 0.0535\n", + "Batch 315/500 - Loss: 0.1479\n", + "Batch 316/500 - Loss: 1.1511\n", + "Batch 317/500 - Loss: 0.5917\n", + "Batch 318/500 - Loss: 0.3239\n", + "Batch 319/500 - Loss: 0.1648\n", + "Batch 320/500 - Loss: 0.2547\n", + "Batch 321/500 - Loss: 0.2010\n", + "Batch 322/500 - Loss: 0.2966\n", + "Batch 323/500 - Loss: 0.6262\n", + "Batch 324/500 - Loss: 0.3061\n", + "Batch 325/500 - Loss: 0.3433\n", + "Batch 326/500 - Loss: 0.2296\n", + "Batch 327/500 - Loss: 0.0771\n", + "Batch 328/500 - Loss: 0.4961\n", + "Batch 329/500 - Loss: 0.1803\n", + "Batch 330/500 - Loss: 0.7212\n", + "Batch 331/500 - Loss: 0.1268\n", + "Batch 332/500 - Loss: 0.1691\n", + "Batch 333/500 - Loss: 0.1178\n", + "Batch 334/500 - Loss: 0.1025\n", + "Batch 335/500 - Loss: 0.1398\n", + "Batch 336/500 - Loss: 0.5700\n", + "Batch 337/500 - Loss: 0.2652\n", + "Batch 338/500 - Loss: 0.2517\n", + "Batch 339/500 - Loss: 0.1272\n", + "Batch 340/500 - Loss: 0.1753\n", + "Batch 341/500 - Loss: 0.2158\n", + "Batch 342/500 - Loss: 0.1614\n", + "Batch 343/500 - Loss: 0.3417\n", + "Batch 344/500 - Loss: 0.0438\n", + "Batch 345/500 - Loss: 0.4962\n", + "Batch 346/500 - Loss: 0.1854\n", + "Batch 347/500 - Loss: 0.1030\n", + "Batch 348/500 - Loss: 0.1104\n", + "Batch 349/500 - Loss: 0.0751\n", + "Batch 350/500 - Loss: 0.7502\n", + "Batch 351/500 - Loss: 0.3599\n", + "Batch 352/500 - Loss: 0.5234\n", + "Batch 353/500 - Loss: 0.7168\n", + "Batch 354/500 - Loss: 0.1618\n", + "Batch 355/500 - Loss: 0.4682\n", + "Batch 356/500 - Loss: 0.3514\n", + "Batch 357/500 - Loss: 0.6153\n", + "Batch 358/500 - Loss: 0.3473\n", + "Batch 359/500 - Loss: 0.3019\n", + "Batch 360/500 - Loss: 0.5604\n", + "Batch 361/500 - Loss: 0.1096\n", + "Batch 362/500 - Loss: 0.1163\n", + "Batch 363/500 - Loss: 0.1962\n", + "Batch 364/500 - Loss: 0.1219\n", + "Batch 365/500 - Loss: 0.0899\n", + "Batch 366/500 - Loss: 0.5629\n", + "Batch 367/500 - Loss: 0.5149\n", + "Batch 368/500 - Loss: 0.3062\n", + "Batch 369/500 - Loss: 0.1156\n", + "Batch 370/500 - Loss: 0.4846\n", + "Batch 371/500 - Loss: 0.6553\n", + "Batch 372/500 - Loss: 0.1051\n", + "Batch 373/500 - Loss: 0.4290\n", + "Batch 374/500 - Loss: 0.2221\n", + "Batch 375/500 - Loss: 0.2579\n", + "Batch 376/500 - Loss: 0.8101\n", + "Batch 377/500 - Loss: 0.2643\n", + "Batch 378/500 - Loss: 0.1263\n", + "Batch 379/500 - Loss: 0.5005\n", + "Batch 380/500 - Loss: 0.2090\n", + "Batch 381/500 - Loss: 0.2087\n", + "Batch 382/500 - Loss: 0.1714\n", + "Batch 383/500 - Loss: 0.1469\n", + "Batch 384/500 - Loss: 0.2147\n", + "Batch 385/500 - Loss: 0.1152\n", + "Batch 386/500 - Loss: 0.3439\n", + "Batch 387/500 - Loss: 0.1594\n", + "Batch 388/500 - Loss: 0.2598\n", + "Batch 389/500 - Loss: 0.7644\n", + "Batch 390/500 - Loss: 0.1863\n", + "Batch 391/500 - Loss: 0.3792\n", + "Batch 392/500 - Loss: 1.0234\n", + "Batch 393/500 - Loss: 0.1994\n", + "Batch 394/500 - Loss: 0.3494\n", + "Batch 395/500 - Loss: 0.2038\n", + "Batch 396/500 - Loss: 0.0400\n", + "Batch 397/500 - Loss: 0.6992\n", + "Batch 398/500 - Loss: 0.1349\n", + "Batch 399/500 - Loss: 0.0673\n", + "Batch 400/500 - Loss: 0.1293\n", + "Batch 401/500 - Loss: 0.4349\n", + "Batch 402/500 - Loss: 1.0445\n", + "Batch 403/500 - Loss: 0.0894\n", + "Batch 404/500 - Loss: 0.1056\n", + "Batch 405/500 - Loss: 0.4698\n", + "Batch 406/500 - Loss: 0.1249\n", + "Batch 407/500 - Loss: 0.3848\n", + "Batch 408/500 - Loss: 0.2504\n", + "Batch 409/500 - Loss: 0.2250\n", + "Batch 410/500 - Loss: 0.3758\n", + "Batch 411/500 - Loss: 0.1071\n", + "Batch 412/500 - Loss: 0.1106\n", + "Batch 413/500 - Loss: 0.0902\n", + "Batch 414/500 - Loss: 0.1802\n", + "Batch 415/500 - Loss: 0.1430\n", + "Batch 416/500 - Loss: 0.2953\n", + "Batch 417/500 - Loss: 0.5009\n", + "Batch 418/500 - Loss: 0.2318\n", + "Batch 419/500 - Loss: 0.3382\n", + "Batch 420/500 - Loss: 0.6467\n", + "Batch 421/500 - Loss: 0.1065\n", + "Batch 422/500 - Loss: 0.1799\n", + "Batch 423/500 - Loss: 0.3623\n", + "Batch 424/500 - Loss: 0.1021\n", + "Batch 425/500 - Loss: 0.1344\n", + "Batch 426/500 - Loss: 0.4057\n", + "Batch 427/500 - Loss: 0.1862\n", + "Batch 428/500 - Loss: 0.3567\n", + "Batch 429/500 - Loss: 0.4425\n", + "Batch 430/500 - Loss: 0.2412\n", + "Batch 431/500 - Loss: 0.4378\n", + "Batch 432/500 - Loss: 0.5105\n", + "Batch 433/500 - Loss: 0.3715\n", + "Batch 434/500 - Loss: 0.5242\n", + "Batch 435/500 - Loss: 0.0956\n", + "Batch 436/500 - Loss: 0.0779\n", + "Batch 437/500 - Loss: 0.0845\n", + "Batch 438/500 - Loss: 0.8495\n", + "Batch 439/500 - Loss: 0.2766\n", + "Batch 440/500 - Loss: 0.4188\n", + "Batch 441/500 - Loss: 0.9636\n", + "Batch 442/500 - Loss: 0.0620\n", + "Batch 443/500 - Loss: 0.4478\n", + "Batch 444/500 - Loss: 0.3372\n", + "Batch 445/500 - Loss: 0.1423\n", + "Batch 446/500 - Loss: 0.6848\n", + "Batch 447/500 - Loss: 0.0532\n", + "Batch 448/500 - Loss: 0.5396\n", + "Batch 449/500 - Loss: 0.0837\n", + "Batch 450/500 - Loss: 0.1182\n", + "Batch 451/500 - Loss: 0.0530\n", + "Batch 452/500 - Loss: 0.1481\n", + "Batch 453/500 - Loss: 0.4325\n", + "Batch 454/500 - Loss: 0.0854\n", + "Batch 455/500 - Loss: 0.1357\n", + "Batch 456/500 - Loss: 0.2220\n", + "Batch 457/500 - Loss: 0.4093\n", + "Batch 458/500 - Loss: 0.1276\n", + "Batch 459/500 - Loss: 0.0549\n", + "Batch 460/500 - Loss: 0.2486\n", + "Batch 461/500 - Loss: 0.6931\n", + "Batch 462/500 - Loss: 0.4147\n", + "Batch 463/500 - Loss: 0.5868\n", + "Batch 464/500 - Loss: 0.2095\n", + "Batch 465/500 - Loss: 0.3165\n", + "Batch 466/500 - Loss: 0.5854\n", + "Batch 467/500 - Loss: 0.1197\n", + "Batch 468/500 - Loss: 0.1313\n", + "Batch 469/500 - Loss: 0.0897\n", + "Batch 470/500 - Loss: 0.5632\n", + "Batch 471/500 - Loss: 0.1157\n", + "Batch 472/500 - Loss: 0.0982\n", + "Batch 473/500 - Loss: 0.4069\n", + "Batch 474/500 - Loss: 0.2576\n", + "Batch 475/500 - Loss: 0.2190\n", + "Batch 476/500 - Loss: 0.5988\n", + "Batch 477/500 - Loss: 0.9996\n", + "Batch 478/500 - Loss: 0.4847\n", + "Batch 479/500 - Loss: 0.1734\n", + "Batch 480/500 - Loss: 0.2216\n", + "Batch 481/500 - Loss: 0.5654\n", + "Batch 482/500 - Loss: 0.2234\n", + "Batch 483/500 - Loss: 0.5161\n", + "Batch 484/500 - Loss: 0.1021\n", + "Batch 485/500 - Loss: 0.3001\n", + "Batch 486/500 - Loss: 0.1323\n", + "Batch 487/500 - Loss: 0.3587\n", + "Batch 488/500 - Loss: 0.2552\n", + "Batch 489/500 - Loss: 0.1585\n", + "Batch 490/500 - Loss: 0.4709\n", + "Batch 491/500 - Loss: 0.1039\n", + "Batch 492/500 - Loss: 0.2904\n", + "Batch 493/500 - Loss: 0.0884\n", + "Batch 494/500 - Loss: 0.3643\n", + "Batch 495/500 - Loss: 0.6350\n", + "Batch 496/500 - Loss: 0.2698\n", + "Batch 497/500 - Loss: 0.2205\n", + "Batch 498/500 - Loss: 0.3022\n", + "Batch 499/500 - Loss: 0.2336\n", + "Batch 500/500 - Loss: 0.3639\n", + "Epoch 5 - Total Loss: 141.7802\n", + "🎉 Training Complete! Model saved as 'objectdetection_model.pth'\n" + ] + } + ], + "source": [ + "optimizer = torch.optim.SGD(model.parameters(), lr=0.005, momentum=0.9, weight_decay=0.0005)\n", + "num_epochs = 5\n", + "checkpoint_path = \"model_checkpoint.pth\"\n", + "\n", + "if os.path.exists(checkpoint_path):\n", + " model.load_state_dict(torch.load(checkpoint_path))\n", + " print(\"Loaded checkpoint\")\n", + "\n", + "for epoch in range(num_epochs):\n", + " model.train()\n", + " total_loss = 0\n", + "\n", + " for batch_idx, (images, targets) in enumerate(dataloader):\n", + " images = [img.to(device) for img in images]\n", + " targets = [{k: v.to(device) for k, v in t.items()} for t in targets]\n", + "\n", + " optimizer.zero_grad()\n", + " loss_dict = model(images, targets)\n", + " loss = sum(loss for loss in loss_dict.values())\n", + " loss.backward()\n", + " optimizer.step()\n", + "\n", + " total_loss += loss.item()\n", + " print(f\"Batch {batch_idx+1}/{len(dataloader)} - Loss: {loss.item():.4f}\")\n", + "\n", + " print(f\"Epoch {epoch+1} - Total Loss: {total_loss:.4f}\")\n", + "\n", + " if (epoch + 1) % 2 == 0:\n", + " torch.save(model.state_dict(), checkpoint_path)\n", + " print(f\"Model checkpoint saved at epoch {epoch+1}\")\n", + "\n", + "torch.save(model.state_dict(), \"objectdetection_model.pth\")\n", + "print(\"🎉 Training Complete! Model saved as 'objectdetection_model.pth'\")" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.10.0" + } + }, + "nbformat": 4, + "nbformat_minor": 2 +}