{ "cells": [ { "cell_type": "markdown", "metadata": { "id": "Cc9RN1YOcK3w" }, "source": [ "# Chapter 10 - Blip3o SFT Script" ] }, { "cell_type": "code", "execution_count": null, "metadata": { "colab": { "base_uri": "https://localhost:8080/" }, "id": "f_stBSUjZyfX", "outputId": "96d99c0e-1fb5-46ff-ccac-5250e4a3cb51" }, "outputs": [ { "output_type": "stream", "name": "stdout", "text": [ "/usr/bin/python3 -m pip uninstall -y gcsfs transformers diffusers accelerate peft datasets fsspec\n", "/usr/bin/python3 -m pip install -q -U pip setuptools wheel\n", "/usr/bin/python3 -m pip install -q tokenizers sentencepiece shortuuid transformers==4.51.3 accelerate==0.34.2 peft==0.15.2 diffusers==0.32.2 bitsandbytes pydantic markdown2[all] numpy scikit-learn requests uvicorn fastapi einops==0.8.1 einops-exts==0.0.4 timm>=0.6.13 ftfy datasets==2.16.1 fsspec==2023.10.0 tabulate ninja qwen_vl_utils huggingface_hub wandb torchvision pillow\n", "\n", "Install complete.\n", "RESTART the runtime now, then continue with the next cell.\n" ] } ], "source": [ "# Install a BLIP3o-compatible stack.\n", "# Run this cell ONCE in a fresh runtime, then RESTART the runtime before continuing.\n", "\n", "import sys\n", "import subprocess\n", "\n", "\n", "def pip(*args):\n", " cmd = [sys.executable, \"-m\", \"pip\", *args]\n", " print(\" \".join(cmd))\n", " return subprocess.run(cmd, check=False)\n", "\n", "\n", "# Remove conflicting preinstalls / prior runs\n", "pip(\"uninstall\", \"-y\", \"gcsfs\", \"transformers\", \"diffusers\", \"accelerate\", \"peft\", \"datasets\", \"fsspec\")\n", "\n", "# Packaging tools\n", "pip(\"install\", \"-q\", \"-U\", \"pip\", \"setuptools\", \"wheel\")\n", "\n", "# Stable stack for this notebook\n", "pip(\n", " \"install\", \"-q\",\n", " \"tokenizers\",\n", " \"sentencepiece\",\n", " \"shortuuid\",\n", " \"transformers==4.51.3\",\n", " \"accelerate==0.34.2\",\n", " \"peft==0.15.2\",\n", " \"diffusers==0.32.2\",\n", " \"bitsandbytes\",\n", " \"pydantic\",\n", " \"markdown2[all]\",\n", " \"numpy\",\n", " \"scikit-learn\",\n", " \"requests\",\n", " \"uvicorn\",\n", " \"fastapi\",\n", " \"einops==0.8.1\",\n", " \"einops-exts==0.0.4\",\n", " \"timm>=0.6.13\",\n", " \"ftfy\",\n", " \"datasets==2.16.1\",\n", " \"fsspec==2023.10.0\",\n", " \"tabulate\",\n", " \"ninja\",\n", " \"qwen_vl_utils\",\n", " \"huggingface_hub\",\n", " \"wandb\",\n", " \"torchvision\",\n", " \"pillow\",\n", ")\n", "\n", "print(\"\\nInstall complete.\")\n", "print(\"RESTART the runtime now, then continue with the next cell.\")" ] }, { "cell_type": "markdown", "metadata": { "id": "4r2reN4Qb9Vk" }, "source": [ "Here we import the core libraries and define our training configuration" ] }, { "cell_type": "code", "execution_count": null, "metadata": { "colab": { "base_uri": "https://localhost:8080/" }, "id": "YWUiBg4wZ4VP", "outputId": "64c081d8-cbe1-42ea-f86a-366735640dd6" }, "outputs": [ { "output_type": "stream", "name": "stdout", "text": [ "Python : 3.12.12\n", "transformers: 4.51.3 /usr/local/lib/python3.12/dist-packages/transformers/__init__.py\n", "diffusers : 0.32.2 /usr/local/lib/python3.12/dist-packages/diffusers/__init__.py\n", "accelerate : 0.34.2 /usr/local/lib/python3.12/dist-packages/accelerate/__init__.py\n", "peft : 0.15.2 /usr/local/lib/python3.12/dist-packages/peft/__init__.py\n", "NOTE: in this transformers version, use eval_strategy=... (not evaluation_strategy=...).\n", "CUDA available: True\n", "GPU: NVIDIA RTX PRO 6000 Blackwell Server Edition\n" ] } ], "source": [ "import os\n", "import re\n", "import gc\n", "import json\n", "import math\n", "import sys\n", "import copy\n", "import torch\n", "\n", "import transformers\n", "import accelerate\n", "import diffusers\n", "import peft\n", "\n", "from datasets import load_dataset\n", "from diffusers import AutoencoderKL\n", "from huggingface_hub import snapshot_download\n", "from peft import LoraConfig, get_peft_model, PeftModel\n", "from PIL import Image\n", "from safetensors.torch import load_file\n", "from torch.nn.utils.rnn import pad_sequence\n", "from torch.utils.data import Dataset\n", "from torchvision import transforms\n", "from torchvision.transforms import InterpolationMode\n", "from transformers import (\n", " AutoConfig,\n", " AutoModelForCausalLM,\n", " AutoProcessor,\n", " Trainer,\n", " TrainingArguments,\n", ")\n", "\n", "print(\"Python :\", sys.version.split()[0])\n", "print(\"transformers:\", transformers.__version__, transformers.__file__)\n", "print(\"diffusers :\", diffusers.__version__, diffusers.__file__)\n", "print(\"accelerate :\", accelerate.__version__, accelerate.__file__)\n", "print(\"peft :\", peft.__version__, peft.__file__)\n", "\n", "MODEL_ID = \"orrzohar/BLIP3o-4B-v3-TEST\"\n", "DIFFUSION_ID = \"orrzohar/BLIP3o-4B-Diffusion-Decoder\"\n", "PROCESSOR_ID = \"Qwen/Qwen2.5-VL-3B-Instruct\"\n", "DATASET_ID = \"orrzohar/BLIP3o-Visual-Reasoning\"\n", "OUTPUT_DIR = \"./outputs_visual_jigsaw\"\n", "\n", "IGNORE_INDEX = -100\n", "IMAGE_TOKEN_IDX = 151667\n", "IMAGE_SIZE = 448\n", "LATENT_QUERIES = 64\n", "MAX_LENGTH = 4096\n", "\n", "print(\"CUDA available:\", torch.cuda.is_available())\n", "if torch.cuda.is_available():\n", " print(\"GPU:\", torch.cuda.get_device_name(0))" ] }, { "cell_type": "markdown", "metadata": { "id": "4f9RYVQlZymI" }, "source": [ "## Dataset and Data Loading\n", "\n", "The `VisualReasoningDataset` class prepares our training data by:\n", "\n", "1. Processing the input: Takes a puzzle image and question, tokenizes them using the Qwen processor\n", "2. Parsing the reasoning chain: The model's response contains interleaved text and `` tokens. We split these apart and:\n", " - Text segments are tokenized normally\n", " - `` tags are replaced with 64 latent query tokens (placeholders for generated images)\n", "3. Building training labels: We mask the input prompt (the model shouldn't be trained to predict the question) and only supervise on the assistant's reasoning response\n", "\n", "\n", "The `collate_fn` handles batching by padding sequences to equal length, this is necessary because different samples have different numbers of reasoning steps and images." ] }, { "cell_type": "code", "execution_count": null, "metadata": { "id": "1q5WckeBZyvQ" }, "outputs": [], "source": [ "class VisualReasoningDataset(Dataset):\n", " def __init__(self, hf_dataset, processor):\n", " self.data = hf_dataset\n", " self.processor = processor\n", " self.tokenizer = getattr(processor, \"tokenizer\", processor)\n", " self.pad_token_id = self.tokenizer.pad_token_id or 0\n", "\n", " self.gen_transform = transforms.Compose([\n", " transforms.Resize((IMAGE_SIZE, IMAGE_SIZE), interpolation=InterpolationMode.BICUBIC),\n", " transforms.ToTensor(),\n", " transforms.Normalize(\n", " mean=[0.48145466, 0.4578275, 0.40821073],\n", " std=[0.26862954, 0.26130258, 0.27577711],\n", " ),\n", " ])\n", " self.blank_image = self.gen_transform(Image.new(\"RGB\", (IMAGE_SIZE, IMAGE_SIZE)))\n", "\n", " def __len__(self):\n", " return len(self.data)\n", "\n", " def __getitem__(self, idx):\n", " sample = self.data[idx]\n", " problem_image = sample[\"problem_image\"]\n", "\n", " reasoning_images = [\n", " self.gen_transform(sample[k])\n", " for k in [\"reasoning_image_1\", \"reasoning_image_2\", \"reasoning_image_3\", \"reasoning_image_4\"]\n", " if sample.get(k) is not None\n", " ]\n", "\n", " processed = self.processor(\n", " text=sample[\"prefix_text\"],\n", " images=[problem_image],\n", " padding=False,\n", " return_tensors=\"pt\",\n", " )\n", "\n", " prefix_ids = processed[\"input_ids\"][0]\n", "\n", " assistant_text = sample[\"assistant_text\"]\n", " segments = re.split(r\"()\", assistant_text)\n", "\n", " assistant_ids = []\n", " gen_images = []\n", " labels_parts = []\n", "\n", " latent_block = torch.full((LATENT_QUERIES,), IMAGE_TOKEN_IDX, dtype=torch.long)\n", " reasoning_queue = list(reasoning_images)\n", "\n", " for seg in segments:\n", " if not seg:\n", " continue\n", " if seg == \"\" and reasoning_queue:\n", " assistant_ids.append(latent_block)\n", " gen_images.append(reasoning_queue.pop(0))\n", " labels_parts.append((\"latent\", LATENT_QUERIES))\n", " elif seg != \"\" and seg.strip():\n", " ids = self.tokenizer(seg.strip(), add_special_tokens=False, return_tensors=\"pt\")[\"input_ids\"][0]\n", " assistant_ids.append(ids)\n", " labels_parts.append((\"text\", ids))\n", "\n", " eos = torch.tensor([self.tokenizer.eos_token_id], dtype=torch.long)\n", " assistant_ids.append(eos)\n", "\n", " assistant_ids = torch.cat(assistant_ids) if assistant_ids else torch.tensor([], dtype=torch.long)\n", " input_ids = torch.cat([prefix_ids, assistant_ids])\n", " attention_mask = torch.ones_like(input_ids)\n", "\n", " labels = [torch.full_like(prefix_ids, IGNORE_INDEX)]\n", " for kind, val in labels_parts:\n", " if kind == \"text\":\n", " labels.append(val.clone())\n", " else:\n", " labels.append(torch.full((val,), IMAGE_TOKEN_IDX, dtype=torch.long))\n", " labels.append(eos.clone())\n", " labels = torch.cat(labels)\n", "\n", " if len(input_ids) > MAX_LENGTH:\n", " input_ids = input_ids[:MAX_LENGTH]\n", " labels = labels[:MAX_LENGTH]\n", " attention_mask = attention_mask[:MAX_LENGTH]\n", "\n", " gen_images = torch.stack(gen_images) if gen_images else self.blank_image.unsqueeze(0)\n", "\n", " result = {\n", " \"input_ids\": input_ids,\n", " \"attention_mask\": attention_mask,\n", " \"labels\": labels,\n", " \"i_s_pos\": torch.tensor(len(prefix_ids), dtype=torch.long),\n", " \"gen_images\": gen_images,\n", " \"num_reasoning_images\": torch.tensor(len(reasoning_images), dtype=torch.long),\n", " \"pad_token_id\": torch.tensor(self.pad_token_id, dtype=torch.long),\n", " }\n", " if processed.get(\"pixel_values\") is not None:\n", " result[\"pixel_values\"] = processed[\"pixel_values\"]\n", " if processed.get(\"image_grid_thw\") is not None:\n", " result[\"image_grid_thw\"] = processed[\"image_grid_thw\"]\n", " return result\n", "\n", "\n", "def collate_fn(features):\n", " pad_id = int(features[0][\"pad_token_id\"])\n", " batch = {\n", " \"input_ids\": pad_sequence([f[\"input_ids\"] for f in features], batch_first=True, padding_value=pad_id),\n", " \"attention_mask\": pad_sequence([f[\"attention_mask\"] for f in features], batch_first=True, padding_value=0),\n", " \"labels\": pad_sequence([f[\"labels\"] for f in features], batch_first=True, padding_value=IGNORE_INDEX),\n", " \"i_s_pos\": torch.stack([f[\"i_s_pos\"] for f in features]),\n", " \"num_reasoning_images\": torch.stack([f[\"num_reasoning_images\"] for f in features]),\n", " }\n", "\n", " pv_list = [f[\"pixel_values\"] for f in features if \"pixel_values\" in f]\n", " if pv_list:\n", " batch[\"pixel_values\"] = torch.cat(pv_list, dim=0)\n", "\n", " grid_list = [f[\"image_grid_thw\"] for f in features if \"image_grid_thw\" in f]\n", " if grid_list:\n", " batch[\"image_grid_thw\"] = torch.cat(grid_list, dim=0)\n", "\n", " gen_list = [f[\"gen_images\"] for f in features]\n", " max_n = max(t.shape[0] for t in gen_list)\n", " padded = []\n", " for t in gen_list:\n", " if t.shape[0] < max_n:\n", " pad = torch.zeros((max_n - t.shape[0],) + t.shape[1:], dtype=t.dtype)\n", " t = torch.cat([t, pad], dim=0)\n", " padded.append(t)\n", "\n", " # IMPORTANT: model.forward expects gen_images (plural), not gen_image.\n", " batch[\"gen_images\"] = torch.stack(padded)\n", " return batch" ] }, { "cell_type": "markdown", "metadata": { "id": "KdYrzfn9Zy7j" }, "source": [ "## Inference Helper and troubleshooting functions" ] }, { "cell_type": "code", "execution_count": null, "metadata": { "id": "axuTFElZZzCY" }, "outputs": [], "source": [ "from torch.amp import autocast\n", "\n", "\n", "def move_batch_to_cuda(batch):\n", " return {k: (v.to(\"cuda\") if torch.is_tensor(v) else v) for k, v in batch.items()}\n", "\n", "\n", "def show_sample(tag, s):\n", " print(\"=\" * 70)\n", " print(tag)\n", " print(\"=\" * 70)\n", " print(f\"Prompt:\\n{s['prefix_text'][:500]}...\")\n", " print(f\"\\nGround truth:\\n{s['assistant_text'][:500]}...\")\n", "\n", "\n", "@torch.no_grad()\n", "def run_inference_bf16(model, processor, sample, max_new_tokens=64):\n", " # Text generation is only a side-check for this notebook.\n", " # The primary evaluation is the custom forward-path loss comparison in the final section.\n", " model.eval()\n", " device = next(model.parameters()).device\n", "\n", " try:\n", " inputs = processor(\n", " text=sample[\"prefix_text\"],\n", " images=[sample[\"problem_image\"]],\n", " return_tensors=\"pt\",\n", " )\n", " except Exception:\n", " inputs = processor(\n", " text=sample[\"prefix_text\"],\n", " images=sample[\"problem_image\"],\n", " return_tensors=\"pt\",\n", " )\n", "\n", " inputs = {k: (v.to(device) if torch.is_tensor(v) else v) for k, v in inputs.items()}\n", "\n", " with autocast(device_type=\"cuda\", dtype=torch.bfloat16):\n", " out = model.generate(\n", " **inputs,\n", " max_new_tokens=max_new_tokens,\n", " do_sample=False,\n", " pad_token_id=processor.tokenizer.pad_token_id,\n", " )\n", "\n", " prompt_len = inputs[\"input_ids\"].shape[1]\n", " return processor.tokenizer.decode(out[0][prompt_len:], skip_special_tokens=True).strip()\n", "\n", "\n", "@torch.no_grad()\n", "def eval_losses(model, batch):\n", " model.eval()\n", " with autocast(device_type=\"cuda\", dtype=torch.bfloat16):\n", " out = model(**batch)\n", " return {\n", " \"loss\": float(out.loss) if getattr(out, \"loss\", None) is not None else None,\n", " \"text_loss\": float(out.text_loss) if getattr(out, \"text_loss\", None) is not None else None,\n", " \"img_loss\": float(out.img_loss) if getattr(out, \"img_loss\", None) is not None else None,\n", " }\n", "\n", "\n", "def print_triplet(title, vals):\n", " print(\"=\" * 70)\n", " print(title)\n", " print(\"=\" * 70)\n", " for k, v in vals.items():\n", " print(f\"{k:>10}: {v:.4f}\" if v is not None else f\"{k:>10}: None\")\n", "\n", "\n", "def get_trainable_state(model):\n", " return {\n", " n: p.detach().cpu().clone()\n", " for n, p in model.named_parameters()\n", " if p.requires_grad\n", " }\n", "\n", "\n", "def load_trainable_state(model, state_dict):\n", " name_to_param = dict(model.named_parameters())\n", " with torch.no_grad():\n", " for n, tensor in state_dict.items():\n", " if n in name_to_param:\n", " name_to_param[n].copy_(tensor.to(device=name_to_param[n].device, dtype=name_to_param[n].dtype))\n", "\n", "\n", "def save_initial_trainable_state(model, output_dir):\n", " os.makedirs(output_dir, exist_ok=True)\n", " path = os.path.join(output_dir, \"initial_trainable_state.pt\")\n", " torch.save(get_trainable_state(model), path)\n", " print(f\"Saved initial trainable state to {path}\")" ] }, { "cell_type": "markdown", "metadata": { "id": "cqGIdwNGZzIv" }, "source": [ "## Load Dataset and Model\n", "\n", "Now we load everything from HuggingFace:\n", "\n", "1. Processor: Handles tokenization and image preprocessing (from Qwen2.5-VL)\n", "2. Dataset: Visual reasoning puzzles with step-by-step solutions\n", "3. Model: BLIP3o-4B with its diffusion components:\n", " - Base LLM (Qwen-based)\n", " - VAE decoder (for image generation)\n", " - Generation vision tower (EVA-CLIP encoder)\n" ] }, { "cell_type": "code", "execution_count": null, "metadata": { "colab": { "base_uri": "https://localhost:8080/", "height": 1000, "referenced_widgets": [ "18a4f17280284bc49dcdef8878b20373", "0fca1ffecc984acd8bd371405ca92b41", "5f27ff3dcd044403b072c78bd11513ea", "223eeaa5039e4158b2331279f5be7190", "d19baf3bdb314781878fa0df7ad9c75d", "bc6d1c451a62450fbb44f44fec736a35", "701eaecf4e724a858603a0f4d6f64ca8", "dcf9ae378ada47eba1aa47af6b940ff1", "abe33063ff55474f98a3d2ff7e08411e", "09e46c1197ef490788b88c7de6c981ce", "a4b8bfc81dd84701a540f4b5996f10d9", "f94f1a1920ea4396ba9387e3dd5db578", "9503ea774e19475992ea174d03bde177", "8ab4bb5316614be9b220a4db489edcad", "7204754064a444eca3f304a8f5e711c1", "cfb9235d9f1e489fb7401667bcf036b6", "c9275a9fbab448c9b1e6a5a3d8ab8019", "049c05d38dbb4d9ba53b38fb24fe54f6", "a3b083f522df448e97d4321b6612f938", "9721bb35eae9437280fc16d3fb1388e5", "3d30eccc348d4137bb63b88176b01e96", "ce8adc902ae84cd997095db83a0de2c9", "172c288845e1487a9f51732065784cc5", "7325c6104041472288f68c0977830c05", "dc42a1f245d74294979ccda4965e5e61", "b476f571f0014884b92080d99a813e56", "21ab7c075fd64c39ac5c22647305af71", "678dbd5264874ee9ab3d468c31f5d44b", "35843112744045edb918995840f0b2ed", "ed45b3e387094316bb5cf1f555f9db32", "f1c8c1bf30034bbabb615a03794fd990", "8bb1bbfd7afd4656a00d0f4ee8e4c7e4", "3da53683e6734a1086a2051ff98400d8", "0deb4aa0445b4a0bbdaf4b2d4a6db59e", "973ad50df4a7401cabbe2c0a210ae60a", "ca590049b2fe4d849ca2a20d45bd9ec1", "60a366e8c8af48409b5c9df8c6f693f6", "a9dc6049c41646319f00ad01b522d164", "2c0ed098bf42482582b6b2ba6c6e5941", "689457f0727b41aa80e207db62e3db68", "82b9ba880e924ee59e570afe9eb1b027", "0d5e0ced573c46aeb32e0bd190c28b81", "44379226404641b3880295a69ed1f184", "883e550e4c7c4e53a122cb2701b8d1bf", "6c9aa12448d340fca80239eb50efad98", "2e6417ba4fb64a4a9a14c088510cde5d", "94132d1ddbc04a9e83544a2641762b34", "6d788c9ad0c74600b2145772cffdccd5", "d53ff4b1c7614540a495f3399d8d0530", "0e99798266ac4758b24f8160df12c320", "0059e7791d3f449b8d94c4904ccad710", "bc43a6b16e8a40d482f29f24bdb343ee", "ae82bba6f6a2484bae3662f04a786741", "11564ef079454e67bd858531fba544f2", "b859cd6e50d643959f32a66d5393b1fb", "3263c9730c694815892c4fecfab0699f", "b04aeabcedd04a0fa04f7e7d74b66c43", "7c71b7effeb449e39449c24fa2ea9f2e", "e0296472b3184bc6aea0411a56865d87", "95b49c6b3c5542a4baf48ee248d9e3d2", "3a49a1ccd28d49738152d6d0b8299301", "3d36c7105ba444d8af96dcf57f55f679", "6377a89f3fad4e28b6adf0900f097935", "569e8452361a45b68d3bc01a3bb8886f", "58fd833447c64cbea72844f03cf12ef6", "a036b1fd887a492db0e5727cdee4254b", "440ff611cd14499e8b8d0d89440d01db", "f3ecce48caa240ceafbd42a7281409e5", "b6d03ed8d997468eb556db356ee7033c", "189981e8159a4e2abcc8b0ceb006e697", "b0e58d221dda46aea9689a80cd84e57e", "110a2a6a49324c6ebc48061771da2d52", "7a81a438835343dc97cf1a65616bfef0", "8ad43c99333f49a68e72c921cf5c87fe", "bdcb0da1b713484985dd35564770dd2e", "871f9ecca80b4110953b4fde5bc28d53", "e8169cf8c07c4653afa8ff98f5b243da", "85a6575b42874c9794e393e6a32bf614", "8675295fb842428191f4044ccd42d26d", "8abf1ce4c3c44c19bb8546571537bfc8", "faf80014f04040ada8fed595df3605ed", "21d11d8bf6474a95ba15c095925fe192", "db9c985503a9459db609f7dca9cbc360", "4184b9c3e2d8466c8971563ac974cce5", "cb967c2c9f674397932c179da5f37a22", "6cb03c2d96624941a49be79f558fd9e0", "7808aceada1048778bc2c55eac4da4d1", "973327122f1041e7b3b6ac0e4a2624a9", "ca4fdc88f8144ab68e7b9da5c74362be", "0c2f041fe0164e9487664162219c83e8", "c6a963200a854cd488342499533b3154", "433fa590f4ff422199871a72c3e46b5e", "e5a7b947171c49e692b688fefa804157", "cce9c494664b4a729df324fd5f463cde", "d75329be21f64d89b45c25d1d0f9a981", "ec483866ae3c452f85972f3cc16341e6", "bef918b0cc5d4ca385b59f869f4b219e", "8997a56875274f4d8ac6c4a76e0679e7", "8c4fb4b730c04a039a3628cdcff4292b", "901dfb7bc2a344a1ba8c205774f77f8c", "36c41e428bfb41f2887c45130bec7cdb", "acfc318376564165b1b15775d6efef61", "0afa97b1d4ef44febb58afa61679c44e", "37787ee6e4bd4f32a7f031f3eecf7447", "424362a0fc554f0ba10b32ce32f96d5e", "ff77d34c072c4439a2707e4c1a21120d", "cb9c41197d40451b9e53fe68e6c88163", "0f55e8bc64e246da940c82b85a0d485e", "369537129ffc48c7b9c740de2585262a", "7fd71459ba654111a53cdba048e72b4f", "d62e8ce2fd624fa596dd01ec0ac429f3", "b6e7b6585172407c9310c387f211942e", "3fa37abae25d4bb98f6627a86026de22", "07502542160e424e82aaf3eccb16c44f", "5b51fe5b0db24210840f2386b650ceac", "edf83b5eec2b41cca325abc11e8c968a", "c4898949c0b44443a6b6bb26e662d1e2", "61c9a3e2bd5d4993b8d42430747b2e81", "d7f94da5785f4f7ca0a3876e565196bb", "9027ba056b9a431c972e094e5bc0aad1", "6c88a10689024f509ce0f50feabfc025", "8dec80b101d8460a8f6ee18ed91684a2", "61bc3a5f3d5643b2888b4d387960d735", "8cb3b367783245c78f21fe3eeb3bb5cd", "24d7c039ab284343b27b73927380f2db", "0d6f35d1b4c642b88a1a022d99bfa4b0", "c3802a75718b41c58d4486f3b1771a04", "8d969182b58e4de19b4bf806e9c1df9a", "cfd91b2500e44b8aa1f6033caf8436df", "b265e0dee24342dc9e75d449ced7be25", "814427a74a5345cca6f2e3ea25e63bd7", "8be158c412614b38ae7c4f1e37a2aa6d", "e6151af5d0ce45f9ae7510a003e44fa5", "c7c31a44b3a445b6aabd831b2a0d3a60", "1e76928b85e14e39b62055a7d37c8a4a", "a96a08d0aa6a43729913e127f4dc56ec", "8257494abc9c492e9fd7ecfd56b3c87d", "da11e2c7480146879975959942e171c5", "5727c9dda7d342f4a4db778c3ca83736", "32f8468c9b4a4ce5875529a8e5c15022", "899801081d8c491f91324fb66e566973", "a8efb9198a514248a74c88e7e0ca9637", "47bd1009fdb449398931c28658e88f88", "1f749aace57f4a58bec6aa6288e6a51e", "1fae1f3027c349f4bc7d7608582f7807", "49a817981817434989287ea5fa81557a", "32e4bcae05c747bfb02950eb34b6f2bf", "74a75efe13094cef95196c81508d9544", "d2e9fec0c07e441991dba2bb3a4755b9", "5c8d7bfe9e6548558d5ad4a796a2ca8c", "f88b9e6f36114375afe2c9f5ee3af5a0", "427b147cec2a450fa7d2ce451a8b7ba7", "eedf7c25bd294ad3b0dafb59b1b9f605", "693bb5ca99fa4dc38dd5b60f279d652d", "3c61dda0a4054cec92afddf6f9ecc4ec", "d8f053e081d649d7b3173ec37caaef43", "2e7f231b64db4c96b7b7ba0c109438b4", "1b0b62ef61624b90a0adfeb305bc485d", "8ddc9dba1351426db769b1bef1c52050", "c0372d660fa944ed98594d342e98df4e", "a32cc8dd749f4f30b88fc2b378cba25a", "5c440330316f45d8b73fb5439e527d85", "e81519b82dd24daaa462b81856911864", "2b0d546876a648a9a9f21f8e55d042c3", "4f67d65c08864fc58a9bc0c79aa2008f", "2ea3cebabe00412b80cb59ead5e0bed5", "e61fb55b72074c68978110e469e6f2be", "1abdeaca280c4e9182762a04fb6bd5e7", "40d3cd91b0ca4efdb8faf2cfe36363df", "9828831fb0de4138bafa027999c5ceaa", "a035d77c933d46bc8e08a65f3f8ce92b", "86aecb71da1d42888dfea647b0d65341", "6192015aadb54c27bbc2fe454eeb2c1d", "0b8e37eaf15142bcb237ca3c9dfc9d21", "043a602d4fb44bf2ad95bd8b2edf7154", "db984d5a0e974cbbb7253f4e720d2ffa", "ae552225ba274f31abefd808388415a9", "d0308613048c4ba9900f39a3a93e7315", "c7102db192a84372b181ce1cf51e2ea0", "8e13de6cc19741eb940155eaf18b670e", "3f26b0eb0ac3466cb3754abfe8caa588", "8a7056526413430cab570e936ad277b8", "9c40d7139394426aa73116782a5e8fc3", "41dad354b5b34611b55d9253babaf54e", "4b4bd1e5bce340fc97e649eb8ba1c439", "e7097693e16b4fd5897111fdf1957de0", "3d441dee60a34daab3052dae83d4f11f", "4402ff61d5f44429bbf1e30a9067938e", "3686086951cc4a7da1dc97642cb38cc9", "ac15e75fc4ea4c9e9a946a5a69353acc", "7f7ad4f8779645f5aabb412e38f60c3c", "9b344ff132d047d8a7dbf74b33f36bb0", "265793bb754446f392524a47490c68cd", "e1771769f809497791a3179047f77f71", "e977f6a811204bc7bf1c0e20912d81d6", "b4052a29df5d4968b0e4bf003d279355", "8a228ee2ed8d4873b64148dd2e7f2b08", "6f64de010f3c43f88320f0b3f332b196", "5edf2db2061e4204ba078b5fbcd75430", "abeff971a86d4261837420e0ba7cb4cf", "2f19664b7d3a4522bf3c4ca627b4a466", "8f70f3eec8c74f58b39c825791f13bd6", "db8ccbe2d92c4ec28a62cb10aa68af14", "7edb7af0fc944dde8ece610b6880ee55", "91f7ca850d9e40b09a2190d0b5710235", "cb7612db76ff428b978b4bc3b129479f", "4b75a79e73ea4a568d7c069607f026ee", "2bacac88a5344ed1b428c27ee413302b", "7b5e5638a7c948c5bc20bb3daeff31bc", "cfa5bbd5094849a592559c8edfc6fbd1", "77da3a84f4c641f7bd4ab82b84b08708", "73f0390f13e54dacace50c27c8036d02", "2a023c2961874f13b492164f01b80a0a", "4aabdcafc45746f7b8ad6647cbd902aa", "2ade2bca9a6a4492816bde91019d9f78", "f3400b75a50447848f477ab12b7c9505", "d1cc15c9cd994eafa40387a7305047a3", "6157b71f48144caf83882535fb986d88", "3659c23d1c73432f92ab51c4d01c75c4", "5363aba1cba54b27a7c5c2a5665efc87", "55cb0fae3d2b46cab33da7312ec487b0", "77325947b92941c5a4a8456c53282c38", "ddb57767a9234106a5aa7885cb9033f1", "6f8698d4cf2148e8b16741abaf1be0f5", "5170e5874b32410c9d7eb626b9a34e69", "d07ddd13ebdd4f1c9c17023ffab30486", "3b210b7bcdd24c0996e7a35fb8bb23a2", "9febc7528d474789b338f6941c53d203", "216ff6a93e924a03bc4b0c6b13bc7319", "5547a3584c0e4c88adbe5dc1022c2cfd", "ff62c5cba01349a3ab7f21fb69d9132b", "2ac11fbde4574292be2d0a0ff584205b", "6d9260132cc74166887f3bba8004bbbe", "45a2b3041dec43939054399c183229c1", "218006e8a5fd4748a2f2061c1b1f8a18", "a2fe99e3474e47a683bca492f2637d34", "f09a6c24513a430e8d873c6602632581", "a35328e1f38241b8861f4ea033900459", "e164ed2aaa944a52b69886984047ddc6", "41c8c2b79d184f50afd7c2f3426beab7", "f45c1302b3674a82abb23824dfcbd4ce", "0daeeb520309450da33de6dc940b9154", "d83b6c29a8214cd2b57fd5c9c3366f42", "1b7fe495ffec462191538b33901f49e0", "a0bcc2d49b7e414a86416966f6c5a165", "7a81958fbbcd422ebe83f477ed375688", "975f2286918245018c4809a3f4518148", "d8c300f181e14c59b97fed3c6e0766d1", "571a19a16b7f468aade79f7f3a783551", "3c3524bee71a47e7ae1619bae92f48f6", "5ae8777efb2747b2b84c5ed70b1c5452", "53f8147cbf1249bfbc7926110eb27215", "04005f76639f4780aee48f963e7f8bec", "b3ec38a752a34cef97dee35636aeddd5", "acce8cebf0eb422e8f723b6b7303aa1b", "e1b6d438a6fe4b1fa4369c56b320601a", "ed3b4f220e05467b8c18351f1188f59a", "30883ed19db44642b492dafbdb4ea8e7", "004a7c2251dc4c48a1f75f506e1dfab8", "a6fa0259a6474486a8e5f4bb0441f9c0", "978c4417f368454c8ae2bdb4fad54502", "b1921dd1cb0e472383a25a44feb73b8c", "d549f2319d0247caa2d17bd74b9a3839", "8cc2bd46b3af428ab866a2988b1c4dd0", "6e6df1fad5e249dfb462ec060aeefa86", "5fc781752e4341fc800260d0d42bbbbc", "084c28667d024318ab5200cfde971393", "97fb269294ab4e6bbc749682bb09f8c1", "6230037cb2ba448a9b7c067f1304c3c4", "926c370e40bc4782bfbcb40108e5d79d", "b8445259b7c14d2485aced9f4c94ce1b", "6b5a1858f6004729b6060a2d2defc008", "13169b3d935a4b89af72ae1f264f6a65", "b6c7cfccbac747e7879d7afea5e69a26", "c5a6fb0c417a4616bf67607f1ed4943e", "a3ce3ad0ba5646b1a0a0dd9b1adb19d5", "6f64dbf80c9245d3abfec2d2f39ec7cf", "ef12f3f4a1d34075834bf0311aada85b", "95794f3219a64236bb76869161de80e4", "960e86004abf4aadb7b6e7c8073775f5", "59b64771a86b45ffaa8ba350de69f710", "b546c275643741479922dcc452e40af5", "ecc57c1c09744b57bf3dc900f36730d4", "4d1d870dc0ab455db615d0f973d774da", "cfc3ff1defb14a989ffd571583e02105", "5f0243b03b8b4284b1e3f8b4d01b06db", "d99b4afabb8847089c6a78e1ae560a38", "1a62a566b0a746e6a9c2dc5dc245785a", "0c1283c9e5fe469380afed2cd1a58ede", "844d577be7334385afacff619b576042", "d959620de5764d02a8334beeef8d1adc", "6cc1bc37a8d24dc884bb1913e265a1f8", "a7bebfa747c04be892547a619d7ccae7", "a1118b5f5a1748e098ac6a3311716317", "738bb087e9974044b83ec21cdbf6720a", "d2cf1df73aea4349a398c95c1de0f6e7", "a43ddd4103a248d68a321bbc2b93ae79", "55893115b4964271b4b89eca2460ea7c", "5dbc364bd4b243c9babe351114b02a11", "719774655a9a47b18abfe0a40ebe3a9c", "b8033cce8f9c44318374e6b489c2ad56", "e33f6e428b07426c8526e5cc172f6344", "1a7073b27ff24d72877f1a937268286d", "5ccd25143aa140a19a75305c809cfe63", "4550c0f407d144c0838b198501740946", "3dd26871d5cb46468ec587ebe1b791ae", "9379dc071d074d7aaa4a47a247039db5", "e0180a694b63438bb721e228816f60bf", "14c356e421e24ca6817371c9cec7b17a", "965d3585eb874ac6a052f73021503f4c", "f4216e23c36847fc91f6b894d2317097", "870e30288acf4150b784c89626cef857", "e1c7814803bd427e9af99f0de38fa9f6", "e2d48e21d9c547c58775efd6c30bfb83", "4e65173476fe45a0a9e7749207d50e3e", "a422b7d6a3da49d48f94e7b5041842c4", "09b9e48aaf0848f8b3343e5e00108c42", "3dd506b4ae4544d381ad2eda5c7c7716", "2e2a527f8dee470b920709b1600add00", "a91a1649307e476091496cbf18121223", "3b8035bc32f34c1399a40d712a6c6869", "2121356323414cd2b92b9ae47f111884", "aeef63af164c437bb1ea4b32f5d74bb4", "e2b86b1718634b03b3a01a7bf7bf946b", "252525709860463e9584956071131263", "c5cfe2a10ac84147b5a3bd70cf00098f", "9d96bffbf28747eb8b5da86d62065788", "f37ad077528d438687c1468d040326e6", "021058c89a004dfb9ea64748c3df5587", "3cd41670e99f4c88b27ecb9363cb6f3d", "84b7ccce6cba4d74a5f0ea49811fdf6a", "0aeb4fe962074b908624f6b40ebaf058", "57ce1715c36e4e0ca5bf9e64e929bb89", "7a3d430c43bd47ce849f5b40eab2305e", "d22f5eaf26c74b6caee68077e08cb875", "25b7d1b8b8194c8da13f75f4088c2819", "0d22309bb39647f8becc31f3d2cb8d98", "6ac161362ffe46539b1fb0607e646e94", "7622858dd67e45c9b50f401207e3b2f8", "050be32a739246fb84702f5fc489d36a", "1f522a9ceb0e45fba00b76af7da542a1", "5273ad9468bc4b5a964a8899dce37ea7", "f506ba031e1c45ffba87a7d4e1bcedf3", "72e46621bcca42f9910bc921534f5cd2", "ddbea41bafac47bdbdd838e1296dac1d", "b99e204e7145409e904e0154e1134c96", "6f1c6d488fb24dd283a589d1412636cd", "26a9601a8dd84baca798ebaa6185ab85", "711f4b5e510948d893c18e8dd71363f8", "568bc2080b0844f9a2f1be931488763b", "f18c88f9e67a42a48609266e3e329ccf", "249bd43ef3f443bea6dc50bad34fde3b", "35bae4fd331246e78b3bc42c2a65f141", "b015622050c143149803ada075753ef4", "23eeba72c5414bf4b484380d435b5a25", "563aa0d6c3ff4501895483ad7c966c92", "0b2ff07e85f44b48aeb391879542a285", "7b6f785d27364f079afb9ed80c157267", "8876a9dcae9948cb9bc3557ed0929643", "643dc1c2e5fb4bebb346dc5eb3dda240", "8b979f7532094df9a7c64225291875b6", "45b31409a6f843f0ad21e78d9636aa96", "f9b13b47b65b42b088b1dbada6668d88", "dabb4da075874e6caa557d6cde87fe7a", "c5e24384796e49d8b06b4c1b4e4df542", "57a5cd3dd1a04862838bb349121636d7", "41e6809f34334afeb1149327588acccc", "dd31d9cf837d436fa61dc9ac44eabce0", "4e96ca8f080544b18eb01609381c8b66", "de8aa2f11bb9466e8af25538f28afedf", "7d704099791c41a18f5838d8419b2932", "6073fddb9d534e77bcaabe78ac978227", "82ca4599c4e04f708f245f9d8b927e60", "92f558606a8145599681e6f6eeaf79b1", "ac1b5ae4d599492a8b14198a0ec033cc", "f47a543eb9eb40f08f40df695b223cd9", "0e51033da33d4c78994f2785cd060dfc", "e24afb5bbbad4056ad8716df2eea7894", "55df4cd9eea64b1395170447b9be399a", "31386241ce94441586bf103cae1bfe94", "6e0a8fb39e474103944a7a0566ef413c", "7e2c93b702814b308bc7897773af4987", "303e754326444610a86fe29abc7d2a47", "237924c9fe094ce89bbd2f3045c010fc", "0830d3889d654ba98be88528e7dcd55d", "9fbcb6607f254330b78ed3e56376b2a2", "92c5af5513174b8994ef4ae4eff7c088", "d952f1f5af444df48a82b4eef3801d16", "cb4cf5954a9d4034976ce00e9ac1b4a9", "53166e25a41f4a56836285fabf7b5d1a", "56814902fb154e5e8ebc142ece12e0c0", "4221581308164723927366b6d46dac1e", "fd7566ec41e74e6db2ab39edea6e0eca", "d2386018a6e4489b852892b03fa81944", "55f1b836af1b4cd88d8b6f7a3ea6db13", "ea280424ecfe4cec96799022ef1d2848", "f6f0da3866554e8e80b8df714d0be090", "1b9432a846464bf784bc8d519cbbde65", "e04a38c5bff54c35b29300eddf3f24c0", "43c69f348c914a94aaedae78db7865f1", "5f7d3823545644a58814bbf9e1cd6660", "cad730e827e3479eb6bf0bdba5c7056e", "77320a2b665643d4adb1e99969453552", "df949124c60c43928ab5fa7b962ba890", "95ae4f06c9034aa0b25d37423548c5bd", "7bcfb052efae42259fc2e8e1b945b383", "5bb22f1111d44a72b9719ecd3c453aed", "5c6ab1409f1d46aab829acb4ddcf7d07", "4155e0d752fc4a28882c3e3407c7b3e0", "85884128ed40408eb5f3ca64a43beb22", "3b96f872e65141a19641b030207f9428", "24be34f4ffe440c688f43340277fad3b", "3b2795edce33443a94f707d79197c765", "ddddcd2f97414f3584b43f696fba5a73", "1b39650dbdad429ca335e3998dbdd49e", "23d450ee35ed4cf380f430e19aa55f6a", "20cceeb0877b4515b99dec60c6738f60", "d68b8804e5de4894a0dc3ec816ef8542", "4e8bdddaf4a0431a9937858aca46a16c", "9c3689448a5b42c09fdd8dadd4157d88", "ac1a367ef9ef46e2a2078657ea70cf74", "3be276fac1944c69a248d423204401e0", "001a41cbb31a45a2a4aa14274bc01705", "55dc619a5f644249afca678ea746a121", "c1b362519a474f71b3dd36e3747b9738", "1210d9a6b7cd4dd88e2592e028697d2e", "6e05a5f6df6b4b4696b9fceee0410587", "1404ac966de5453d87e7c778308be22a", "102580c3f7c144229b0116d4f846c17a", "a986e68ddefd424cb7ec65ffb60a72d4", "2ff5ccc0c8624366a8f1e4d597ab24e6", "669750bcc63a4456b404cac30efd1803", "119fe20b8d2b48b188fd9f7aceef8e29", "15b80f5b9e6f477ea9c1d2a612d61fc3", "f72110294fe445eea27ad69e3851c287", "226173838c9c4a988b171f8dee1ab7db", "ebcccdc3279b44429ce1bd10440f52f3", "b94613db636f4ce7b78e1205bce4d805", "f0a4506a4bd2444cbadc76613a65c457", "9eed75604bc8441d8eea28e85e36ae0b", "ba51e6b558b84326aba6fb8ed32a63ab", "ab030f4869e74b2b85d8bff4b32a711b", "f13d91d33a9c4bf09ca271a5320d26ad", "0bc490e9932c420494d8b0ff2c5a2b29", "c9854716b660415e80f94f682eaa1f24", "08b0607e1df6465ba0de2ef1cee13fb8", "9a2d4e76ead7479d8fdfa018a84ab443", "26cf36488503490c8d123083ccb0a4b9", "99d790779b8f40e69170068b1a8152ea", "23c89daa5ea84b0282d1c7feadcb8d31", "b6a24af8167048748358d6bfcd5edb5f", "8a9e7e884a734164bc7665f992c3ba3a", "fde3c1d092a44880ae346c1308f64312", "2a8219dfe88e4501820f841e324495dc", "bf8e68c6db9646adb289aa55ad61a62c", "92b27a0bc2064341add398886c9707ba", "a5a71a2b9b0a4f8fbaa11000c8db03ab", "b933f2e9acb84111addd97b68c4c322a", "b22c0f51461c43de8ce6d5a95433e5af", "293df73bf1774b5484fba3227d93acaf", "8f01a425ab33459a8fd07943addd05f2", "a1a5a5f7ec1544a3bc2813d2890e8ad3", "9f0b2887f478412898c52001e2758b41", "9659a6f9273c4b9a9386b611b77c0a6e", "e8d2a06bf4fb4192b6ddd0f6d0c34644", "c39a723354e34d47bddb7c3d0eaac46c", "1613fba6bea24cd6835ce675ad8ed6b6", "d3cd2e2a4fd647eb90bcb2508cc6dc9d", "a4b0dc9099e34ce282fc99e2b39167d7", "d5c24a23ff344f03a1e9209919057bcd", "5270c7b00b2a469d81dd723ce2332032", "6b66deaed53f43f0b387b667be51fb91", "f586f00177104caaa4a9202147765274", "49db80faad2949cd8261dfda5c74a24f", "373edeca7b3a43698cea46b7225e6f4c", "ef3b3ab918864db39ffd4f913ca8665c", "7858fe77df6c459186cdb813df0c376b", "20b8359be2e448429976e717806e0952", "4d150699418244339d5f0557ebf571ba", "996d7c569619400ebf5654155a4aba47", "94a33e0807c9444a8ef559f563e4d93f", "eab766cf788346168e4e32dc209b0f11", "39b1a1250fdf41249b0aa5c56b15a5bd", "624625d73bed4811b3b1d36efc96aeb2", "951ca520b85f48bbbcc44e74dcec861d", "18ce35af583c43a1b887c3ad2311bd5a", "5c82f7245ebb41b9a714236c77d43e61", "1b556f116c7c44988f0e447a95a5218e", "7b9ec44c76364367906f1d6bc6fce0e5", "9ee25f94137e4b71a22327a2a42c786d", "61486d48a70b4cf9baeeaed5afa2f0e4", "a9731573f68a48cb9f34ffd36c7eb3f1", "349d979ea64f4fdbbc08a2f955d1acdf", "b614d5b512104a83a0fa2e175cff8770", "f38fef79275242e0b1469f381936bf37", "52c3899eb97a4c6985b8c1b842e0895e", "8ff258d4485b4154a6e25d152a715ae8", "880c275302124d0b98e5e894a75309d8", "1f58f2126c3b4127b3c55b61bb3ca645", "278d4e2add2142b39b4ace665ba11f05", "ec4e4bbed85f4dd49dea14fe58c8e5af", "f913b9d5764e420caf8f50bf654e23f5", "641b9273199c4f08b1001bdbc925fc7f", "0d115db280e34e1b87faaca56d72472f", "c1bf808e9ecb4c399f71cbcab566f8e5", "8502476f98ba4ab995005d3c4e51466c", "f1920096f41c4ccc832f4b8a85b61955", "72c7fd00697a43758a84d64adf9eff9a", "29a6bf3c6e1c44d8a71ddd023958ff24", "98aaf7d8dffa480e820bc0e4fefa45a7", "d3797a8a544f477aa7728a8fedf7e075", "9d70e867ba174d5283c78776f6619daa", "753693917b264cd6bcd22902eab56cd7", "9ff41f0e6b65424e8137501aacac4aab", "a277a0f090204b788c906af63a29f1b7", "6e9cc4969204425889da3c2809d0a5d8", "0917d9a7e95944c28688ad23090a9d8a", "bc797ce993a14ba6ad89e62239979298", "89c91395ab704883b6344fca7dd1d5b0", "ebeadfcc2ef14b2b96107abd240da5d4", "afa454e4508d491eaecb87cb44d33093", "bd2b28761d08415586df26ad7450a8a2", "8af532cbe4b2457ea406cdbf5e40558b", "10297598eb1c4968901cacbcfb5c800d", "52bf1e03c34143ab958c88226d9d239b", "48a9ee94de6b474cb16a5a9e5bc5ef52", "30e6b4d5cb3c45ae832dcebae64a10d3", "27d0b6578fe84ca881185e52cc4a4216", "9a6765b97af64dd996749a5a641609a8", "4610943ad3304929b4e8dc077ec781d0", "05e740c62b1447e2a7f946eac8368267", "e23eb00d952d4daab62e179b0c853f87", "d037069091f345968916f858d35b0cb9", "10b1ef4f69e84a1e8fa0402ee3d79987", "db75caedc5f643c09874cb78c44cfd4d", "cf83c6a105354434ae7f3a142299363c", "1aa7120c041648798f85a7b50b6589b9", "05cd0db8d5454c50a834566c654b595f", "96cfbcf9941d4c09bd682f2887a53e3d", "74a38ec226964271bdb46fa9f080a05e", "4d44029b9712489ca6042614c3414ab2", "3b50df4fb6df450e8f57b8ada8125c3b", "d552f29b66f24bc9b429b347c5562f35", "cdc30b26be62495e82fc42f452271589", "9a9623a8fbb6472ab60d95552f89d1f5", "ef5b7e48774b49298485864449ceaec5", "8bebb4b60e39453d945f0c866fb30df1", "f25bc171d21243148c943e7449cc50c4", "43e13f9927914c5fa705649471b8865c", "7dcf8e449b80424db5f2c893e66b01c0", "3529a8894a1b4c02ab17dce6f450ef6b", "88869f08c27945988a2c98f00e145e13", "50c5538e45384154aef286a353e60599", "70ef1e4f07d34a55be9a62d3cb89bea0", "619c824a930d4f5d8915a2f9713a9c12", "b731684a0cf14bf88ad9f72c61064468", "a0f6b817052b454a85df6503f1f5abc0", "9fa75d799cc54a1baa8ca41ffa70dfbc", "12c6f2c36c874523b4729e638638e1af", "71dc080ad3bd47b0a8766529f18c909c", "2fa81f6fec7442c9a1827997602f5b03", "43aaaa917a0f408b990012020dd6b54b", "bf89f9c59b824641b08df0a69e3969b2", "6771ceb8cabe48949c2492f280023ef2", "4b257b3fca3b4f2ab2395ca0e13bea5c", "35222095c77b4a9db8761c282220b54e", "ee0934d4fe84427399921e402a666334", "4d6ba958396f4d12b6d4f219c82ef28e", "0c6c22ee2d6747ada94eee6dc5f2c363", "aa32abc4794f4659b08b67db865be5fa", "d0ca8ec2265f44b8aed930f04c27e14b", "724dfb01717c4c7fb3d8555a469d9f44", "8df5b8a513ed4847b6daef1bd32182ba", "7ea9e5670ef646b3ba25976dd4bff17d", "3898d9ca91ca47ce986153a8c6011e21", "108ef7a051194d5cb47f00f060304649", "9a314b7d965c4f7b979e066f50521031", "d725bdc43bbd46d38e98fa44544a3c16", "bbfef33c7c304147aefa970d6886c367", "4b0094155028425aa830609ade048e06", "2dec55ee60164d27b7c826463207bb58", "a2254c98ef534445b187121948f728b6", "29ae64fba46d40158297062f52d4d1e2", "62977e7a23754c87939dfdcdf8721c32", "8444ee2884ca4bf8bb088d7aecb34adf", "204318d3224c4ca9904c84f0424649ba", "b55c8cc08b624316beb9bf6b97b9c452", "b457c87a64fb47e6b9855331a3162203", "75bb2423d4f847b288a10c9296cef3df", "dae7845af4024a418414d13af6417cc8", "bd986d339a6144aabad81dba873c1d8a", "56560d1778894952a2ee40258e81a6e1", "f312630ded8d4dc8813ec5f3abf8e5cc", "ec002559d5274e5ab713c33f5811dee5", "1977299f6a024f0e94fe98117c1dd5f3", "dc7241c573724cabbaf84263b73d80b7", "4932104f206d496fb1a58e4186401843", "cd45f05fc3f842ce97e9820a4b4f0952", "cde9ae853e5944d7b77e14ef086c4635", "afd7efad7c3a4355843ef896b1e0979b", "69e67ef3f2cf410789554547cb6d330c", "4cd69e8e2b224128b9f3982a9daa0bc0", "fab236c672094fed8c4ad4b0972da767", "afe28ff37bfe4624a451fc5092cf20f0", "689b8ed5d76a4a88935d2a2422d16cef", "0da299c9a6a448cf992b3f88b5e9ec37", "7708c25be5f548428ddc3e0fd648fb19", "7a14c3d10ce041bbb88e11d5a2de43a5", "7097aac95da14b019f61bbe163e3a80c", "ebcf9b306b14412d963e2e4e7c0b57f0", "8802ac3e68654f0b91490df255561d11", "3f4105a0ae37491fa5a551d93eebc152", "5e4a3e34dbea47f9987bb8e6060b8671", "91c9ee8b4e384455ba46683ade998126", "0e0d7d7f982845718c786e72e010141e", "5f52c38d9c0c4bc983518c3dda4eaba1", "c029ffe381074306bf6d784468af3805", "bb9a428aaed7467bb7608cf8f739cbcf", "1666e1cd01844fb4ad25880a7ba5693e", "a422521eacf74798a8c89291363d7b8c", "0e9a9d00bce948cd86f29f2d7672e19a", "60e7e3a65d904a62b774344a1178d9e7", "30193ec08fdb4e9d9bfdb09194451a24", "a344044166604d88b1e3c9832b009669", "92567443bd96457a9ae37acfc325d921", "049bf1f0dcd344199dbcc163b8ca93e9", "0e64d9c8711a475c9eef57e41d2d4299", "78f672de02154d8a9dcb78b393bb3931", "6d1ecad30e804ea0b62a08e71f48b004", "56458b0264cc485187827f19c1f514ee", "07fc7e01f8584bdb83b9ed725e9acf93", "4dd180cab8574589b643ff82ebef3fbf", "9fc053b0a17445c586e06632176aa795", "800d5902eed740d380bee0bcad70d14b", "3f3576ee77ad4017bdf49f4344edab21", "d70f6b7476b64a63ab38f8f4ddd837db", "66f6a75c1dce43beb50d86a216e03921", "6bbf5d86460a414f9a45eb76bd3d788e", "6b8ad873b45b4fc0bab361934b807405", "6af7f908174941f994ee649015bd10ae", "c1204cefb0d24c1f83cfcad989f8833b", "ab1bd4708eec4a9e8df4a48aedfd265c", "a41e514b726746e4a7aa1a15d073396e", "4934ceb6a0d74f5599b5aa7001340bb5", "70f1655d7d15476cab15e5c512212e99", "7b87d3e5be1c4b959872c930e0c05418", "cd534cdf735d4747806b7a01d1316864", "0314713dcb4a440dbc1eb0020bf41248", "857953bca3a942829b3c7f52941c8446", "97861feac61a4c2985cf351e4c0b1aa9", "4f91ac60616f4ca88ce3b5111150aa50", "f9016e5b1aae4eaba6d37a4ed77818fe", "6f07e5343cc14635b93620c7aaa3f569", "65761d7e1e074140970f8d0fdc5e80a7", "6a70a8f2fa594e44a000b0b84a5cbe55", "7a2fac68478742578094aff44aacc642", "0730161566ed4fb7ad1a8ac8ceb6be0e", "36f7a59cf64440a39038eb78a18142d9", "d833f2151d1e42738d82605ed5b2d81c", "9628225334324db5a2e325af01d48ba0", "609c1c8ddff44a2a8c42c24a2ae9a018", "8092dc33cc3f4b459aa5aa5f8761f542", "5559a160c38c4beb81c29d8558dbcc95", "35f91f0a27d948c7a7dd52a938e1ca59", "b7a47b81e4d94a85bcb7b85ce5b8589d", "83706e4d6e45443582c66314880a5b03", "0d6afa226e074fd99b56204a58cd0e01", "aef10e22a8f541b397ecfc503218615d", "bca6a1626461434b882c3b229dcf5790", "629e6d079cd64a81b6b9dde4d13e7e5d", "2cedc6bd40b34c8c984b2846f4938fa7", "ab8f529e4ef74591ab8dcf3ed969ae81", "273b83c53b7c4047a1cea1ceee912e69", "0db6f606abdc45d0af75a8e07f994f10", "54f30f5dcf9c4760ba08032dc84be3e9", "27121fa0b55747e7917713768515f73d", "567812abaf494bc4b8124948b8ded2ab", "908704b3229b4ec6ae12d2b772e37ee7", "2fadd567ea3f43abbeb584687d275021", "e35ac5e88b3445c4bedfb3442f5378f6", "a6241b2846b24072a658b6bdb088affe", "e8d847373911492eb5486e32c44e334c", "a5f54a1d001e478ca27da65b9668ba5f", "ec0e69603e4f485990cdeac448beeb28", "a0576ef7b0ce4253b796a09e07783377", "b3b4f9910acf447e8ce0590baba3482c", "25a79a7a5cab4b6da10c2af890d43a03", "d8339fa4975d48ccb84847c77b1e7bd2", "abbebe10b049449f94925457e01fefaa", "c2e68fe8b2424a51a44c9c4bac230267", "e3fd59c6355c48e3b78a7dc6e6185593", "1b3e9bc5ae1644c1ba32a5f202332fae", "dde71b0ad6e54f8ab1dc3c2d6465dbce", "f5bbd9fc1967420482c721ef9d7d1853", "2cebc308159c472ba6511263c615f720", "ed1bdfa1a2d64cffb5b254fbe97524a5", "e5f64ca96aea4c2e87986c39f01a1b9d", "c4c5366cc895425fbd4919876803a2b5", "7d1b3445427b413f99b352b7e31d2563", "080ea2f89c2b463a984376b00aeffd06", "c6bcae97b35a47a79006e00cab73b0a8", "ad81620bb6644a7c8932d92e50f4c36e", "0d0a4d9828e34749b6b1be7e1c1aa6e2", "e629518398864748ba44394860c39d5c", "5aaae03df7274c1d865368353359053f", "43f9f150bc284a258d9b58dc681beb7f", "d6fed3d574b14de5a406084882077a07", "a92ea4d4e00a4ce0ba90744aae3fce7c", "e4c5e8a9fae643e78ff360e52621b462", "0c257d7fc4544928b8a0bef6625bffa8", "ceb99175650446b49f14a308892cd26b", "f1cb640fda8943189704bcc882a2fc32", "9632a9c6f5f54154aea53fffda591153", "f73e9e0c4b8b4dc5aeefe113c9ac121d", "8dff74cba3d842cd8ede01e48d2fb078", "e21190d9e9ca42fda569f669931d0567", "9a07e4dc566a4eb8827fcc6ba022dac5", "7f9e97055a7d47ffbeb07b5e7f5cd538", "718a5152889f4a138ceb94b954eb0728", "24519795e8e1421299873007755d3d81", "1e6d05e0ecbf493e8433b6edf62dc126", "19bef8b3134f4ea39a1cfb2d4649c23c", "8208dd56897a4ac083639e8ce4c7880a", "a451d0460e82432eb5513839bf8bee9f", "f07a0c05ee6742e5a1b8e06849ea1c03", "2885e7811222420d9fbc1bef2c95e7da", "81a4ea335ee945e6a7620899dbb78e13", "31eae0d8d84840ffb99c8b051274d70a", "d6e78fc409314bb191dff6499acd55ab", "921295acfc2946ec861cf976fb89b39b", "e8b366a9fe894de78af8ad2951520583", "54be328ec7474e839e0ae997fcccaa33", "cb44fa2a478c43a7af8fc273508278dc", "f5c5320385ae4c87a18a61acdd9941e8", "5b4144f66d7a4e97b43dfc08d4c6834f", "88ee57f2828042aaafb28a2830012f12", "f8bb54f8891948b2818ac3f4ff14420a", "d351a34ac84c4f1a97a7ba19fd119237", "0fa6c738a58540c1805f87f218df35fd", "ef807aefef784f32866ce1d959635497", "d6ad8fd856ca4a149dcbad7f9ac80e67", "d801219748aa4ae88fea7062866e3694", "dce03373e1824770abfcf2c2c65dce22", "a1bc006900584ca3aee0a9b98c4ec02d", "aac85c81dcdf445c88a644137fc96e60", "a7a83ce1fccd4deb8b493c647fdd5eb9", "e9ce7fbe79cb41a8a5f30eb1e64e5c02", "f5b2e0384c324227b473c677351c9681", "ab28680e224449fd8d17999bf4bcf1d8", "1af9218e43104b8b86c3537f59daf698", "e38e853e60f64e21946e28a77dfd7597", "aede5897281245cc9e372f5f7bd5c9b2", "8e8e25cda0ba49389fd2995b6b5f43f4", "02e5f048ffae4e5e90bfdc65389ae9af", "82d51008eda84be4ad03c3133bfa29fb", "48801010753943108afc3177dce8f855", "3b3c7724b569418189dd18eaf0a075fa", "e2875131145541be8bf858bc3a441131", "f126575d466747338a859562fbd7479c" ] }, "id": "EGDblHKaZzOE", "outputId": "3958d701-d7d9-4175-cdf6-75edcf65727a" }, "outputs": [ { "output_type": "display_data", "data": { "text/plain": [ "preprocessor_config.json: 0%| | 0.00/350 [00:0012} {n}\")\n", "\n", "# Save the PRE-TRAIN trainable state for later base-vs-FT comparison\n", "save_initial_trainable_state(model, OUTPUT_DIR)" ] }, { "cell_type": "markdown", "metadata": { "id": "GzOZAJihaG_E" }, "source": [ "## Train\n", "\n", "Time to fine-tune! We use HuggingFace's `Trainer` with the following settings:\n", "\n", "| Setting | Value | Notes |\n", "|---------|-------|-------|\n", "| Epochs | 1 | |\n", "| Batch size | 1 | Per GPU |\n", "| Gradient accumulation | 8 | Effective batch = 8 |\n", "| Learning rate | 5e-6 | Standard for LoRA fine-tuning |\n", "| Precision | bf16 | Faster training, lower memory |\n", "\n", "Expect ~2-3 hours on A100\n", "\n", "The model checkpoint and LoRA weights will be saved to `./outputs_visual_jigsaw/`." ] }, { "cell_type": "code", "execution_count": null, "metadata": { "colab": { "base_uri": "https://localhost:8080/", "height": 1000 }, "id": "QjGgsJjzaHJS", "outputId": "82d12a5c-26f3-457c-87f0-650ec1cdcf92" }, "outputs": [ { "output_type": "stream", "name": "stdout", "text": [ "Train size: 17519\n", "Eval size : 256\n" ] }, { "output_type": "display_data", "data": { "text/plain": [ "" ], "text/html": [ "\n", "
\n", " \n", " \n", " [2189/2189 1:27:05, Epoch 0/1]\n", "
\n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", "
StepTraining Loss
2074.215400
4073.051700
6073.284700
8069.921500
10068.856500
12068.805700
14065.283300
16062.040900
18062.603200
20059.966400
22059.769700
24058.763800
26058.071400
28054.955500
30054.997300
32055.678800
34054.260200
36053.613800
38052.474200
40052.671100
42052.163400
44051.484000
46052.211500
48052.576900
50052.568000
52051.872700
54050.699600
56049.514000
58051.186100
60048.643500
62050.923600
64049.254300
66049.956400
68049.023300
70050.039900
72049.281800
74049.348700
76047.275300
78048.755700
80048.802600
82048.922300
84048.866400
86048.138600
88048.962400
90048.140700
92048.373100
94048.867900
96048.381200
98047.425400
100048.110500
102048.289400
104048.828000
106048.521500
108047.279900
110048.097300
112048.242800
114048.392300
116049.316700
118047.752100
120048.042000
122047.196800
124047.514800
126047.917400
128047.487600
130048.974700
132048.147600
134046.374100
136047.151900
138047.777600
140047.540200
142047.462300
144047.420800
146048.025200
148047.306500
150047.101700
152048.389300
154047.891500
156047.502500
158048.424300
160047.898100
162046.529600
164048.400500
166047.289800
168047.384200
170048.224500
172047.300500
174047.739700
176047.792600
178047.794200
180047.899300
182048.186200
184047.395200
186047.592100
188048.399700
190047.406000
192046.739000
194047.278800
196048.537100
198047.272100
200047.797400
202047.651100
204047.461500
206047.185100
208047.916000
210047.069500
212047.966300
214047.573000
216045.989600
218046.839600

" ] }, "metadata": {} }, { "output_type": "stream", "name": "stdout", "text": [ "TrainOutput(global_step=2189, training_loss=50.79135859573525, metrics={'train_runtime': 5229.0577, 'train_samples_per_second': 3.35, 'train_steps_per_second': 0.419, 'total_flos': 6.002255702801996e+17, 'train_loss': 50.79135859573525, 'epoch': 0.9996004338147154})\n", "Training complete. Model saved to ./outputs_visual_jigsaw\n" ] } ], "source": [ "# Full-data one-epoch run\n", "train_dataset = VisualReasoningDataset(split[\"train\"], processor)\n", "eval_dataset = VisualReasoningDataset(split[\"test\"].select(range(min(256, len(split[\"test\"])))), processor)\n", "\n", "print(\"Train size:\", len(train_dataset))\n", "print(\"Eval size :\", len(eval_dataset))\n", "\n", "model.config.text_loss_weight = 1.0\n", "model.config.img_loss_weight = 0.1\n", "\n", "try:\n", " model.gradient_checkpointing_disable()\n", "except Exception:\n", " pass\n", "\n", "model.config.use_cache = False\n", "\n", "args = TrainingArguments(\n", " output_dir=OUTPUT_DIR,\n", " num_train_epochs=1,\n", " per_device_train_batch_size=1,\n", " gradient_accumulation_steps=8,\n", " learning_rate=5e-6,\n", " warmup_steps=20,\n", " logging_steps=20,\n", " save_strategy=\"steps\",\n", " save_steps=500,\n", " save_total_limit=1,\n", " bf16=True,\n", " remove_unused_columns=False,\n", " report_to=\"none\",\n", " dataloader_num_workers=2,\n", " label_names=[\"labels\"],\n", " gradient_checkpointing=False,\n", " eval_strategy=\"no\",\n", " load_best_model_at_end=False,\n", ")\n", "\n", "trainer = Trainer(\n", " model=model,\n", " args=args,\n", " train_dataset=train_dataset,\n", " eval_dataset=eval_dataset,\n", " data_collator=collate_fn,\n", ")\n", "\n", "train_result = trainer.train()\n", "print(train_result)\n", "\n", "trainer.save_model(OUTPUT_DIR)\n", "latent = model.get_model().latent_queries.detach().cpu()\n", "torch.save({\"latent_queries\": latent}, os.path.join(OUTPUT_DIR, \"latent_queries.pt\"))\n", "print(f\"Training complete. Model saved to {OUTPUT_DIR}\")" ] }, { "cell_type": "markdown", "metadata": { "id": "HGsGG9J8aIyl" }, "source": [ "## Compare Before vs After\n", "\n", "Let's see the impact of fine-tuning!" ] }, { "cell_type": "code", "execution_count": null, "metadata": { "id": "RKqiMg8Ezdr5", "colab": { "base_uri": "https://localhost:8080/" }, "outputId": "a236c17f-a577-4901-ddcd-f9e146469b1e" }, "outputs": [ { "output_type": "stream", "name": "stdout", "text": [ "Base avg loss : 9.086186123390991\n", "FT avg loss : 5.9237147619326915\n", "Avg total delta : -3.1624713614582998\n", "Avg text delta : -3.129117973148823\n", "Avg image delta : -0.33353377444048715\n", "\n", "Improved samples: 64\n", "Worsened samples: 0\n", "Flat samples : 0\n" ] } ], "source": [ "import os\n", "import numpy as np\n", "import torch\n", "import gc\n", "\n", "torch.cuda.empty_cache()\n", "gc.collect()\n", "\n", "def seeded_eval_losses(model, batch, seed):\n", " torch.manual_seed(seed)\n", " if torch.cuda.is_available():\n", " torch.cuda.manual_seed_all(seed)\n", " return eval_losses(model, batch)\n", "\n", "n_eval = min(64, len(eval_dataset))\n", "indices = list(range(n_eval))\n", "seeds = [0, 1, 2]\n", "\n", "initial_state = torch.load(os.path.join(OUTPUT_DIR, \"initial_trainable_state.pt\"), map_location=\"cpu\")\n", "ft_state = get_trainable_state(model)\n", "\n", "base_total, ft_total = [], []\n", "base_text, ft_text = [], []\n", "base_img, ft_img = [], []\n", "\n", "improved = 0\n", "worsened = 0\n", "flat = 0\n", "\n", "for idx in indices:\n", " batch = move_batch_to_cuda(collate_fn([eval_dataset[idx]]))\n", "\n", " base_runs = []\n", " ft_runs = []\n", "\n", " for seed in seeds:\n", " load_trainable_state(model, initial_state)\n", " base_vals = seeded_eval_losses(model, batch, seed)\n", " base_runs.append(base_vals)\n", "\n", " load_trainable_state(model, ft_state)\n", " ft_vals = seeded_eval_losses(model, batch, seed)\n", " ft_runs.append(ft_vals)\n", "\n", " base_loss = np.mean([x[\"loss\"] for x in base_runs])\n", " ft_loss = np.mean([x[\"loss\"] for x in ft_runs])\n", "\n", " base_t = np.mean([x[\"text_loss\"] for x in base_runs])\n", " ft_t = np.mean([x[\"text_loss\"] for x in ft_runs])\n", "\n", " base_i = np.mean([x[\"img_loss\"] for x in base_runs])\n", " ft_i = np.mean([x[\"img_loss\"] for x in ft_runs])\n", "\n", " base_total.append(base_loss)\n", " ft_total.append(ft_loss)\n", " base_text.append(base_t)\n", " ft_text.append(ft_t)\n", " base_img.append(base_i)\n", " ft_img.append(ft_i)\n", "\n", " delta = ft_loss - base_loss\n", " if delta < -1e-4:\n", " improved += 1\n", " elif delta > 1e-4:\n", " worsened += 1\n", " else:\n", " flat += 1\n", "\n", "print(\"Base avg loss :\", float(np.mean(base_total)))\n", "print(\"FT avg loss :\", float(np.mean(ft_total)))\n", "print(\"Avg total delta :\", float(np.mean(ft_total) - np.mean(base_total)))\n", "print(\"Avg text delta :\", float(np.mean(ft_text) - np.mean(base_text)))\n", "print(\"Avg image delta :\", float(np.mean(ft_img) - np.mean(base_img)))\n", "print()\n", "print(\"Improved samples:\", improved)\n", "print(\"Worsened samples:\", worsened)\n", "print(\"Flat samples :\", flat)" ] }, { "cell_type": "markdown", "source": [ "What did we learn? After stabilizing the BLIP3o training path and evaluating on a 64-example held-out slice with repeated seeded loss estimates, the fine-tuned adapter reduced average loss from 9.09 to 5.92, improving all 64 evaluated examples relative to the adapter’s initialization baseline. Most of the gain came from lower text loss, with a smaller but still positive improvement in image loss." ], "metadata": { "id": "Rd2Ac_rJ6UNp" } } ], "metadata": { "accelerator": "GPU", "colab": { "gpuType": "G4", "provenance": [], "toc_visible": true, "machine_shape": "hm" }, "kernelspec": { "display_name": "Python 3", "name": "python3" }, "language_info": { "name": "python" } }, "nbformat": 4, "nbformat_minor": 0 }