File size: 3,597 Bytes
7c6f54d 007a81d 7c6f54d 66f0ffb 7c6f54d b2a72c4 7c6f54d b2a72c4 7c6f54d 007a81d 7c6f54d 10c7c72 7c6f54d 6483b0b 7c6f54d 6483b0b 7c6f54d 007a81d | 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 | import os
import sys
import torch
import pandas as pd
import requests
import random
import argparse
from pathlib import Path
from torch.utils.data import Dataset
from torchvision.models import resnet18
import torchvision.transforms as transforms
# config
BASE = Path(__file__).parent
PUB_PATH = BASE / "pub.pt"
PRIV_PATH = BASE / "priv.pt"
MODEL_PATH = BASE / "model.pt"
OUTPUT_CSV = BASE / "submission.csv"
BASE_URL = "http://34.63.153.158" #DONOT CHANGE
API_KEY = "YOUR_API_KEY_HERE"
TASK_ID = "01-mia" #DONOT CHANGE
# dataset classes
class TaskDataset(Dataset):
def __init__(self, transform=None):
self.ids = []
self.imgs = []
self.labels = []
self.transform = transform
def __getitem__(self, index):
id_ = self.ids[index]
img = self.imgs[index]
if self.transform is not None:
img = self.transform(img)
label = self.labels[index]
return id_, img, label
def __len__(self):
return len(self.ids)
class MembershipDataset(TaskDataset):
def __init__(self, transform=None):
super().__init__(transform)
self.membership = []
def __getitem__(self, index):
id_, img, label = super().__getitem__(index)
return id_, img, label, self.membership[index]
# load datasets
print("Loading datasets...")
pub_ds = torch.load(PUB_PATH, weights_only=False)
priv_ds = torch.load(PRIV_PATH, weights_only=False)
# normalization (same as training)
MEAN = [0.7406, 0.5331, 0.7059]
STD = [0.1491, 0.1864, 0.1301]
transform = transforms.Compose([
transforms.Resize(32),
transforms.Normalize(mean=MEAN, std=STD),
])
pub_ds.transform = transform
priv_ds.transform = transform
# load model
print("Loading model...")
model = resnet18(weights=None)
model.conv1 = torch.nn.Conv2d(3, 64, 3, 1, 1, bias=False)
model.maxpool = torch.nn.Identity()
model.fc = torch.nn.Linear(512, 9)
model.load_state_dict(torch.load(MODEL_PATH, map_location="cpu"))
model.eval()
# create random submission (remove this later or it will rewrite your actual submission)
print("Creating random submission...")
ids = [str(i) for i in priv_ds.ids]
df = pd.DataFrame({
"id": ids,
"score": [random.random() for _ in ids]
})
df.to_csv(OUTPUT_CSV, index=False)
print("Saved:", OUTPUT_CSV)
# submit
def die(msg):
print(msg, file=sys.stderr)
sys.exit(1)
parser = argparse.ArgumentParser(description="Submit a CSV file to the server.")
args = parser.parse_args()
submit_path = OUTPUT_CSV
if not submit_path.exists():
die(f"File not found: {submit_path}")
try:
with open(submit_path, "rb") as f:
resp = requests.post(
f"{BASE_URL}/submit/{TASK_ID}",
headers={"X-API-Key": API_KEY},
files={"file": (submit_path.name, f, "application/csv")},
timeout=(10, 600),
)
try:
body = resp.json()
except Exception:
body = {"raw_text": resp.text}
if resp.status_code == 413:
die("Upload rejected: file too large (HTTP 413).")
resp.raise_for_status()
print("Successfully submitted.")
print("Server response:", body)
submission_id = body.get("submission_id")
if submission_id:
print(f"Submission ID: {submission_id}")
except requests.exceptions.RequestException as e:
detail = getattr(e, "response", None)
print(f"Submission error: {e}")
if detail is not None:
try:
print("Server response:", detail.json())
except Exception:
print("Server response (text):", detail.text)
sys.exit(1) |