vlordier commited on
Commit
600d133
·
verified ·
1 Parent(s): 2adf024

Upload hf_job_nsfw.py with huggingface_hub

Browse files
Files changed (1) hide show
  1. hf_job_nsfw.py +179 -0
hf_job_nsfw.py ADDED
@@ -0,0 +1,179 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #!/usr/bin/env python3
2
+ """
3
+ NSFW Classification Job - Process images with EraX YOLO
4
+ Outputs: Per-image NSFW detections with bboxes and confidence scores
5
+ """
6
+ import argparse
7
+ import os
8
+ from pathlib import Path
9
+ import warnings
10
+ warnings.filterwarnings('ignore')
11
+ import logging
12
+ import sys
13
+
14
+ logging.basicConfig(
15
+ level=logging.INFO,
16
+ format='[%(asctime)s] %(levelname)s: %(message)s',
17
+ datefmt='%Y-%m-%d %H:%M:%S',
18
+ stream=sys.stdout,
19
+ force=True
20
+ )
21
+ logger = logging.getLogger(__name__)
22
+
23
+ import numpy as np
24
+ import torch
25
+ from datasets import load_dataset, Dataset as HFDataset, Features, Value, Sequence
26
+ from PIL import Image
27
+ import json
28
+ from huggingface_hub import snapshot_download
29
+ from ultralytics import YOLO
30
+
31
+
32
+ def process_batch(batch):
33
+ """Process batch of images with NSFW detection"""
34
+ images = batch['image']
35
+ image_paths = batch.get('image_path', [f'img_{i:06d}' for i in range(len(images))])
36
+
37
+ results_list = []
38
+
39
+ # Convert all images to numpy arrays
40
+ crops = []
41
+ for image_pil in images:
42
+ image_rgb = np.array(image_pil.convert('RGB'))
43
+ crops.append(image_rgb)
44
+
45
+ # Batch inference
46
+ if crops:
47
+ try:
48
+ yolo_results = model(crops, conf=0.2, iou=0.3, verbose=False)
49
+
50
+ for idx, result in enumerate(yolo_results):
51
+ image_id = Path(image_paths[idx]).stem if image_paths[idx] else f'img_{idx:06d}'
52
+ img_width, img_height = images[idx].size
53
+
54
+ detections = []
55
+ if result.boxes:
56
+ for box in result.boxes:
57
+ class_id = int(box.cls.item())
58
+ confidence = box.conf.item()
59
+ class_names = ['anus', 'make_love', 'nipple', 'penis', 'vagina']
60
+ class_name = class_names[class_id] if class_id < len(class_names) else f'class_{class_id}'
61
+
62
+ x1, y1, x2, y2 = box.xyxy[0].tolist()
63
+
64
+ detections.append({
65
+ 'class': class_name,
66
+ 'confidence': confidence,
67
+ 'bbox': [x1, y1, x2, y2]
68
+ })
69
+
70
+ if not detections:
71
+ detections = [{'class': 'safe', 'confidence': 1.0, 'bbox': [0, 0, img_width, img_height]}]
72
+
73
+ results_list.append({
74
+ 'image_id': image_id,
75
+ 'detections': detections,
76
+ 'num_detections': len(detections)
77
+ })
78
+ except Exception as e:
79
+ logger.error(f"NSFW batch failed: {e}")
80
+ # Fallback: mark all as safe
81
+ for idx, image_path in enumerate(image_paths):
82
+ image_id = Path(image_path).stem if image_path else f'img_{idx:06d}'
83
+ img_width, img_height = images[idx].size
84
+ results_list.append({
85
+ 'image_id': image_id,
86
+ 'detections': [{'class': 'safe', 'confidence': 1.0, 'bbox': [0, 0, img_width, img_height]}],
87
+ 'num_detections': 1
88
+ })
89
+
90
+ return {
91
+ 'image_id': [r['image_id'] for r in results_list],
92
+ 'detections_json': [json.dumps(r['detections']) for r in results_list],
93
+ 'num_detections': [r['num_detections'] for r in results_list]
94
+ }
95
+
96
+
97
+ def main():
98
+ global model
99
+
100
+ logger.info("="*60)
101
+ logger.info("NSFW Classification with EraX YOLO")
102
+ logger.info("="*60)
103
+
104
+ ap = argparse.ArgumentParser()
105
+ ap.add_argument('--input-dataset', type=str, required=True)
106
+ ap.add_argument('--output-dataset', type=str, required=True)
107
+ ap.add_argument('--split', type=str, default='train')
108
+ ap.add_argument('--batch-size', type=int, default=8)
109
+ ap.add_argument('--shard-index', type=int, default=0)
110
+ ap.add_argument('--num-shards', type=int, default=1)
111
+ args = ap.parse_args()
112
+
113
+ logger.info(f"Arguments: {vars(args)}")
114
+
115
+ device = torch.device('cuda' if torch.cuda.is_available() else 'cpu')
116
+ logger.info(f"Using device: {device}")
117
+
118
+ # Download and load NSFW model
119
+ logger.info("Downloading EraX-NSFW model...")
120
+ snapshot_download(repo_id="erax-ai/EraX-NSFW-V1.0", local_dir="./", force_download=False)
121
+
122
+ logger.info("Loading YOLO model...")
123
+ model = YOLO('erax_nsfw_yolo11m.pt')
124
+ logger.info("✓ Model loaded")
125
+
126
+ # Load dataset
127
+ logger.info(f"Loading dataset {args.input_dataset}...")
128
+ ds = load_dataset(args.input_dataset, split=args.split, streaming=True)
129
+
130
+ if args.num_shards > 1:
131
+ ds = ds.shard(num_shards=args.num_shards, index=args.shard_index)
132
+ logger.info(f"Using shard {args.shard_index+1}/{args.num_shards}")
133
+
134
+ # Process with batching
135
+ logger.info(f"Processing with batch_size={args.batch_size}")
136
+
137
+ processed_ds = ds.map(
138
+ process_batch,
139
+ batched=True,
140
+ batch_size=args.batch_size,
141
+ remove_columns=ds.column_names
142
+ )
143
+
144
+ # Collect results
145
+ results = []
146
+ for batch_idx, item in enumerate(processed_ds):
147
+ results.append(item)
148
+
149
+ if (batch_idx + 1) % 100 == 0:
150
+ logger.info(f"Processed {batch_idx + 1} images")
151
+
152
+ logger.info(f"✓ Processed {len(results)} images")
153
+
154
+ # Create output dataset
155
+ features = Features({
156
+ 'image_id': Value('string'),
157
+ 'detections_json': Value('string'),
158
+ 'num_detections': Value('int32')
159
+ })
160
+
161
+ output_ds = HFDataset.from_dict({
162
+ 'image_id': [r['image_id'] for r in results],
163
+ 'detections_json': [r['detections_json'] for r in results],
164
+ 'num_detections': [r['num_detections'] for r in results]
165
+ }, features=features)
166
+
167
+ # Upload
168
+ logger.info(f"Uploading to {args.output_dataset}...")
169
+ output_ds.push_to_hub(
170
+ args.output_dataset,
171
+ split=args.split,
172
+ token=os.environ.get('HF_TOKEN'),
173
+ private=True
174
+ )
175
+ logger.info("✓ Upload complete")
176
+
177
+
178
+ if __name__ == '__main__':
179
+ main()