DrAbbas commited on
Commit
976b5ad
ยท
verified ยท
1 Parent(s): c17695a

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +209 -176
app.py CHANGED
@@ -1,37 +1,39 @@
1
  """
2
  โ•”โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•—
3
- โ•‘ ๐Ÿ”ฑ SONAR-AI v17.2 โ€” Swin-V2 Upgrade Patch โ•‘
4
- โ•‘ ุฃุถู ู‡ุฐุง ุงู„ูƒูˆุฏ ุนู„ู‰ v16.8 ุงู„ุฃุตู„ูŠ โ•‘
5
  โ• โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•ฃ
6
- โ•‘ โœ… ูŠุญุงูุธ ุนู„ู‰ ูƒู„ ุดูŠุก ููŠ v16.8 (Physics + PRMI + VTA) โ•‘
7
- โ•‘ โœ… ูŠุถูŠู Swin-V2 Classification (94.1% F1, 43 classes) โ•‘
8
- โ•‘ โœ… ูŠุถูŠู Swin-V2 Concealment Detection (98.9% F1) โ•‘
9
- โ•‘ โœ… ูŠุถูŠู Swin-V2 Risk Assessment (97.2% F1) โ•‘
10
- โ•‘ โœ… ูŠุนุฑุถ ู†ุชุงุฆุฌ Swin-V2 ููŠ ุตูุญุฉ ุงู„ุชุญู„ูŠู„ โ•‘
 
 
 
 
 
 
11
  โ•šโ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•
12
-
13
- ุงู„ุชุนู„ูŠู…ุงุช:
14
- โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€
15
- 1. ุงูุชุญ app.py ุงู„ุฃุตู„ูŠ (v16.8) ููŠ HuggingFace
16
- 2. ุงุจุญุซ ุนู† ูƒู„ "=== ADD HERE ===" ูˆุฃุถู ุงู„ูƒูˆุฏ ููŠ ุงู„ู…ูƒุงู† ุงู„ู…ุญุฏุฏ
17
- 3. ุบูŠู‘ุฑ VERSION ู…ู† "16.8" ุฅู„ู‰ "17.2"
18
  """
19
 
20
- # โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•
21
- # ุงู„ุชุนุฏูŠู„ 1: ุบูŠู‘ุฑ ุงู„ุฅุตุฏุงุฑ
22
- # ุงุจุญุซ ุนู†: VERSION = "16.8"
23
- # ุงุณุชุจุฏู„ ุจู€:
24
- # โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•
25
 
26
- VERSION = "17.2" # โ† ุบูŠู‘ุฑ ู‡ุฐุง ุงู„ุณุทุฑ
 
 
 
 
 
 
 
 
 
27
 
28
  # โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•
29
- # ุงู„ุชุนุฏูŠู„ 2: ุฃุถู SWINV2_CATEGORIES
30
- # ุงุจุญุซ ุนู†: HS_CHAPTERS = {
31
- # ุฃุถู ุงู„ูƒูˆุฏ ุงู„ุชุงู„ูŠ ู‚ุจู„ู‡ุง ู…ุจุงุดุฑุฉ:
32
  # โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•
33
 
34
- # ๐Ÿ†• ูุฆุงุช Swin-V2 ุงู„ู…ุฏุฑุจุฉ (43 ุตู†ู โ€” ู…ุฑุชุจุฉ ุฃุจุฌุฏูŠุงู‹)
35
  SWINV2_CATEGORIES = sorted([
36
  'appliances', 'auto_parts', 'bags', 'banana', 'batteries', 'beverages',
37
  'cables', 'canned_food', 'ceramic', 'chemicals', 'cleaning', 'clothes',
@@ -45,206 +47,237 @@ SWINV2_CATEGORIES = sorted([
45
  SWINV2_CONCEALMENT_CLASSES = ['match', 'no_match']
46
  SWINV2_RISK_CLASSES = ['critical', 'high', 'low', 'medium', 'safe']
47
 
48
-
49
  # โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•
50
- # ุงู„ุชุนุฏูŠู„ 3: ุฃุถู ุฏุงู„ุฉ transforms ุฎุงุตุฉ ุจู€ Swin-V2
51
- # ุงุจุญุซ ุนู†: def get_transforms():
52
- # ุฃุถู ุจุนุฏู‡ุง ู…ุจุงุดุฑุฉ:
53
  # โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•
54
 
55
- def get_swin_transforms():
56
- """Transforms for Swin-V2 trained models (256ร—256)"""
57
- if torch is None:
 
 
 
 
58
  return None
59
  return transforms.Compose([
60
- transforms.Resize((256, 256)),
61
  transforms.ToTensor(),
62
  transforms.Normalize([0.485, 0.456, 0.406], [0.229, 0.224, 0.225])
63
  ])
64
 
65
 
66
- # โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•
67
- # ุงู„ุชุนุฏูŠู„ 4: ุฃุถู ุชุญู…ูŠู„ Swin-V2 ููŠ load_all_models
68
- # ุงุจุญุซ ุนู†: MODELS_LOADED = len(CLASSIFICATION_MODELS) > 0
69
- # ุฃุถู ุงู„ูƒูˆุฏ ุงู„ุชุงู„ูŠ ู‚ุจู„ู‡ุง:
70
- # โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•
 
 
 
 
 
 
 
 
 
 
 
71
 
72
- # === ADD BEFORE "MODELS_LOADED = len(CLASSIFICATION_MODELS) > 0" ===
73
 
74
- # ๐Ÿ†• Swin-V2 Classification (43 classes, 94.1% F1)
75
- SWIN_CLS_MODEL = None
76
- path = find_model("swin_v2_cls_best.pt")
 
 
 
 
 
 
 
 
 
 
 
 
 
 
77
  if path:
78
  try:
79
  import timm
80
- nc_swin = len(SWINV2_CATEGORIES)
81
- m = timm.create_model('swinv2_tiny_window8_256', pretrained=False, num_classes=nc_swin)
82
- checkpoint = torch.load(path, map_location=DEVICE)
83
- if isinstance(checkpoint, dict) and 'model_state_dict' in checkpoint:
84
- m.load_state_dict(checkpoint['model_state_dict'])
85
  else:
86
- m.load_state_dict(checkpoint)
87
- m.to(DEVICE).eval()
88
- CLASSIFICATION_MODELS['Swin-V2'] = m
89
- SWIN_CLS_MODEL = m
90
- print(f"โœ… Swin-V2 Classification loaded ({nc_swin}cls, 94.1% F1)")
91
  except Exception as e:
92
  print(f"โš ๏ธ Swin-V2 Classification: {e}")
93
-
94
- # ๐Ÿ†• Swin-V2 Concealment Detection (98.9% F1)
95
- global CONCEALMENT_MODEL
96
- CONCEALMENT_MODEL = None
97
- path = find_model("swin_v2_concealment_best.pt")
98
- if path:
99
  try:
100
  import timm
101
- m = timm.create_model('swinv2_tiny_window8_256', pretrained=False, num_classes=2)
102
- checkpoint = torch.load(path, map_location=DEVICE)
103
- if isinstance(checkpoint, dict) and 'model_state_dict' in checkpoint:
104
- m.load_state_dict(checkpoint['model_state_dict'])
 
 
 
 
 
105
  else:
106
- m.load_state_dict(checkpoint)
107
- m.to(DEVICE).eval()
108
- CONCEALMENT_MODEL = m
109
- print(f"โœ… Swin-V2 Concealment loaded (98.9% F1)")
 
110
  except Exception as e:
111
- print(f"โš ๏ธ Swin-V2 Concealment: {e}")
112
-
113
- # ๐Ÿ†• Swin-V2 Risk Assessment (5 levels, 97.2% F1)
114
- global RISK_ASSESSMENT_MODEL
115
- RISK_ASSESSMENT_MODEL = None
116
- path = find_model("swin_v2_risk_best.pt")
117
- if path:
118
  try:
119
  import timm
120
  m = timm.create_model('swinv2_tiny_window8_256', pretrained=False, num_classes=5)
121
- checkpoint = torch.load(path, map_location=DEVICE)
122
- if isinstance(checkpoint, dict) and 'model_state_dict' in checkpoint:
123
- m.load_state_dict(checkpoint['model_state_dict'])
124
  else:
125
- m.load_state_dict(checkpoint)
126
- m.to(DEVICE).eval()
127
- RISK_ASSESSMENT_MODEL = m
128
  print(f"โœ… Swin-V2 Risk loaded (5 levels, 97.2% F1)")
129
  except Exception as e:
 
130
  print(f"โš ๏ธ Swin-V2 Risk: {e}")
 
 
 
131
 
132
 
133
- # โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•
134
- # ุงู„ุชุนุฏูŠู„ 5: ุฅุตู„ุงุญ classify_image ู„ู€ Swin-V2
135
- # ุงุจุญุซ ุนู† "if name in ('YOLOv10x-cls', 'YOLO11x-cls'):" ููŠ ุฏุงู„ุฉ classify_image
136
- # ุงุณุชุจุฏู„ ุงู„ู€ else (ุงู„ุฐูŠ ูŠุดุชุบู„ ุนู„ู‰ ู†ู…ุงุฐุฌ PyTorch) ุจู€:
137
- # โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•
138
-
139
- # === REPLACE the "else:" block in classify_image with: ===
140
- else:
141
- # Choose correct transforms and categories
142
- if name == 'Swin-V2':
143
- img_transform = get_swin_transforms()
144
- cats = SWINV2_CATEGORIES
145
- else:
146
- img_transform = get_transforms()
147
- cats = CATEGORIES
148
-
149
- input_t = img_transform(img).unsqueeze(0).to(DEVICE)
150
- with torch.no_grad():
151
- out = model(input_t)
152
- probs = torch.softmax(out, dim=1)[0].cpu().numpy()
153
- for idx in range(len(probs)):
154
- if idx < len(cats):
155
- cat = cats[idx]
156
- all_preds.setdefault(cat, []).append(float(probs[idx]))
157
- top_idx = probs.argmax()
158
- if top_idx < len(cats):
159
- model_results[name] = {'top1': cats[top_idx], 'confidence': float(probs[top_idx])}
160
-
161
-
162
- # โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•
163
- # ุงู„ุชุนุฏูŠู„ 6: ุนุฑุถ Swin-V2 ููŠ analyze_image
164
- # ุงุจุญุซ ุนู† "ar_desc = " ููŠ analyze_image
165
- # ุฃุถู ุจุนุฏู‡ุง ุงู„ูƒูˆุฏ ุงู„ุชุงู„ูŠ ู„ุนุฑุถ Swin-V2 results:
166
- # โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•
167
 
168
- # === ADD AFTER "ar_desc = ..." line ===
169
 
170
- # ๐Ÿ†• Swin-V2 Concealment + Risk Assessment
171
- swin_concealment_html = ""
172
- swin_risk_html = ""
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
173
 
174
- if 'CONCEALMENT_MODEL' in dir() and CONCEALMENT_MODEL is not None:
 
175
  try:
176
- swin_t = get_swin_transforms()
177
- img_t = swin_t(img.convert('RGB')).unsqueeze(0).to(DEVICE)
 
178
  with torch.no_grad():
179
- out = CONCEALMENT_MODEL(img_t)
180
  probs = torch.softmax(out, dim=1)[0].cpu().numpy()
181
  conc_idx = probs.argmax()
182
  conc_label = SWINV2_CONCEALMENT_CLASSES[conc_idx]
183
  conc_conf = probs[conc_idx]
184
 
185
  if conc_label == 'no_match':
186
- is_match = False
187
- conc_color = '#E53935'
188
- conc_text = f"โš ๏ธ ุบูŠุฑ ู…ุทุงุจู‚ ({conc_conf:.1%})"
189
  else:
190
- conc_color = '#2E7D32'
191
- conc_text = f"โœ… ู…ุทุงุจู‚ ({conc_conf:.1%})"
192
-
193
- swin_concealment_html = f"<div style='background:{conc_color}15;border:1px solid {conc_color}40;border-radius:8px;padding:6px 10px;margin-top:4px;font-size:12px;'><b style='color:{conc_color};'>๐Ÿ” Swin-V2 Concealment (98.9% F1): {conc_text}</b></div>"
194
- except:
195
- pass
 
196
 
197
- if 'RISK_ASSESSMENT_MODEL' in dir() and RISK_ASSESSMENT_MODEL is not None:
 
198
  try:
199
- swin_t = get_swin_transforms()
200
- img_t = swin_t(img.convert('RGB')).unsqueeze(0).to(DEVICE)
201
  with torch.no_grad():
202
- out = RISK_ASSESSMENT_MODEL(img_t)
203
  probs = torch.softmax(out, dim=1)[0].cpu().numpy()
204
- risk_idx = probs.argmax()
205
- risk_label = SWINV2_RISK_CLASSES[risk_idx]
206
- risk_conf = probs[risk_idx]
207
 
208
  risk_ar = {'critical':'ุญุฑุฌ','high':'ุนุงู„ูŠ','low':'ู…ู†ุฎูุถ','medium':'ู…ุชูˆุณุท','safe':'ุขู…ู†'}
209
- risk_colors = {'critical':'#B71C1C','high':'#D84315','medium':'#F9A825','low':'#558B2F','safe':'#2E7D32'}
210
- r_c = risk_colors.get(risk_label, '#666')
211
 
212
- swin_risk_html = f"<div style='background:{r_c}15;border:1px solid {r_c}40;border-radius:8px;padding:6px 10px;margin-top:4px;font-size:12px;'><b style='color:{r_c};'>โšก Swin-V2 Risk (97.2% F1): {risk_ar.get(risk_label, risk_label)} ({risk_conf:.1%})</b></div>"
213
- except:
214
- pass
215
-
216
-
217
- # โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•
218
- # ุงู„ุชุนุฏูŠู„ 7: ุฃุถู HTML ู„ู€ Swin-V2 ููŠ ู‚ุงู„ุจ HTML
219
- # ุงุจุญุซ ุนู†: {anomaly_html}
220
- # ุฃุถู ุจุนุฏู‡ุง: {swin_concealment_html}
221
- # {swin_risk_html}
222
- # โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•
223
-
224
- # ููŠ HTML templateุŒ ุงุจุญุซ ุนู†:
225
- # {anomaly_html}
226
- # ูˆุงุณุชุจุฏู„ ุจู€:
227
- # {anomaly_html}
228
- # {swin_concealment_html}
229
- # {swin_risk_html}
230
-
231
-
232
- # โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•
233
- # ุงู„ุชุนุฏูŠู„ 8: ุฃุถู Swin-V2 ููŠ ุนู†ูˆุงู† ุงู„ุชุทุจูŠู‚
234
- # ุงุจุญุซ ุนู†: 6 Classification + Detection
235
- # ุงุณุชุจุฏู„ ุจู€: 6+1 Classification + 3 Swin-V2 + Detection
236
- # โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•
237
-
238
- # โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•
239
- # ู…ู„ุฎุต ุงู„ุชุนุฏูŠู„ุงุช:
240
- # 1. VERSION = "17.2"
241
- # 2. ุฃุถู SWINV2_CATEGORIES + CONCEALMENT + RISK classes
242
- # 3. ุฃุถู get_swin_transforms()
243
- # 4. ุฃุถู ุชุญู…ูŠู„ 3 ู†ู…ุงุฐุฌ Swin-V2 ููŠ load_all_models
244
- # 5. ุฃุตู„ุญ classify_image ู„ุงุณุชุฎุฏุงู… transforms ู…ุฎุชู„ูุฉ ู„ู€ Swin-V2
245
- # 6. ุฃุถู ูƒุดู ุงู„ุฅุฎูุงุก + ุชู‚ูŠูŠู… ุงู„ุฎุทูˆุฑุฉ ููŠ analyze_image
246
- # 7. ุฃุถู ุนุฑุถ ุงู„ู†ุชุงุฆุฌ ููŠ HTML
247
- # 8. ุบูŠู‘ุฑ ุงู„ุนู†ูˆุงู†
248
- #
249
- # ูƒู„ ุงู„ุชุญู„ูŠู„ ุงู„ููŠุฒูŠุงุฆูŠ (8 ุชู‚ู†ูŠุงุช + PRMI + VTA) ูŠุจู‚ู‰ ูƒู…ุง ู‡ูˆ!
250
- # โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•
 
1
  """
2
  โ•”โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•—
3
+ โ•‘ ๐Ÿ”ฑ swin_v2_addon.py โ€” Swin-V2 Module for SONAR-AI โ•‘
4
+ โ•‘ โœ… ุถุน ู‡ุฐุง ุงู„ู…ู„ู ุจุฌุงู†ุจ app.py ุนู„ู‰ HuggingFace โ•‘
5
  โ• โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•ฃ
6
+ โ•‘ ููŠ app.py ุงู„ุฃุตู„ูŠ (v16.8) ุฃุถู ูู‚ุท 3 ุฃุณุทุฑ: โ•‘
7
+ โ•‘ โ•‘
8
+ โ•‘ 1. ููŠ ุฃุนู„ู‰ ุงู„ู…ู„ู: โ•‘
9
+ โ•‘ from swin_v2_addon import * โ•‘
10
+ โ•‘ โ•‘
11
+ โ•‘ 2. ููŠ ู†ู‡ุงูŠุฉ load_all_models() ู‚ุจู„ MODELS_LOADED: โ•‘
12
+ โ•‘ load_swinv2_models(CLASSIFICATION_MODELS, DEVICE) โ•‘
13
+ โ•‘ โ•‘
14
+ โ•‘ 3. ููŠ analyze_image() ู‚ุจู„ en_desc: โ•‘
15
+ โ•‘ swin_html = run_swinv2_analysis(img, DEVICE) โ•‘
16
+ โ•‘ ุซู… ุฃุถู {swin_html} ููŠ HTML ุจุนุฏ {anomaly_html} โ•‘
17
  โ•šโ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•
 
 
 
 
 
 
18
  """
19
 
20
+ import numpy as np
 
 
 
 
21
 
22
+ # Try importing torch
23
+ try:
24
+ import torch
25
+ import torch.nn as nn
26
+ from torchvision import transforms
27
+ TORCH_OK = True
28
+ ADDON_DEVICE = torch.device('cuda' if torch.cuda.is_available() else 'cpu')
29
+ except ImportError:
30
+ TORCH_OK = False
31
+ ADDON_DEVICE = None
32
 
33
  # โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•
34
+ # ๐Ÿท๏ธ Categories
 
 
35
  # โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•
36
 
 
37
  SWINV2_CATEGORIES = sorted([
38
  'appliances', 'auto_parts', 'bags', 'banana', 'batteries', 'beverages',
39
  'cables', 'canned_food', 'ceramic', 'chemicals', 'cleaning', 'clothes',
 
47
  SWINV2_CONCEALMENT_CLASSES = ['match', 'no_match']
48
  SWINV2_RISK_CLASSES = ['critical', 'high', 'low', 'medium', 'safe']
49
 
 
50
  # โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•
51
+ # Global model references
 
 
52
  # โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•
53
 
54
+ SWINV2_CLASSIFICATION = None
55
+ SWINV2_CONCEALMENT = None
56
+ SWINV2_RISK = None
57
+
58
+
59
+ def get_swinv2_transforms(img_size=256):
60
+ if not TORCH_OK:
61
  return None
62
  return transforms.Compose([
63
+ transforms.Resize((img_size, img_size)),
64
  transforms.ToTensor(),
65
  transforms.Normalize([0.485, 0.456, 0.406], [0.229, 0.224, 0.225])
66
  ])
67
 
68
 
69
+ def find_swin_model(name):
70
+ """Find model file locally or download from HuggingFace"""
71
+ import os, glob
72
+ MODEL_REPO = "DrAbbas/SONAR-AI-Models"
73
+
74
+ for base in [".", "/app", os.path.dirname(os.path.abspath(__file__))]:
75
+ path = os.path.join(base, name)
76
+ if os.path.exists(path):
77
+ return path
78
+ try:
79
+ from huggingface_hub import hf_hub_download
80
+ path = hf_hub_download(repo_id=MODEL_REPO, filename=name)
81
+ return path
82
+ except:
83
+ pass
84
+ return None
85
 
 
86
 
87
+ def load_swinv2_models(classification_models_dict, device=None):
88
+ """
89
+ ุชุญู…ูŠู„ 3 ู†ู…ุงุฐุฌ Swin-V2.
90
+ ุงุณุชุฏุนูŠู‡ุง ููŠ load_all_models() ููŠ app.py:
91
+ load_swinv2_models(CLASSIFICATION_MODELS, DEVICE)
92
+ """
93
+ global SWINV2_CLASSIFICATION, SWINV2_CONCEALMENT, SWINV2_RISK
94
+
95
+ if not TORCH_OK:
96
+ print("โš ๏ธ Swin-V2: PyTorch not available")
97
+ return
98
+
99
+ if device is None:
100
+ device = ADDON_DEVICE
101
+
102
+ # 1. Swin-V2 Classification (43 classes, 94.1% F1)
103
+ path = find_swin_model("classification/best_swinv2_43cls.pth")
104
  if path:
105
  try:
106
  import timm
107
+ nc = len(SWINV2_CATEGORIES)
108
+ m = timm.create_model('swinv2_tiny_window8_256', pretrained=False, num_classes=nc)
109
+ ckpt = torch.load(path, map_location=device)
110
+ if isinstance(ckpt, dict) and 'model_state_dict' in ckpt:
111
+ m.load_state_dict(ckpt['model_state_dict'])
112
  else:
113
+ m.load_state_dict(ckpt)
114
+ m.to(device).eval()
115
+ classification_models_dict['Swin-V2'] = m
116
+ SWINV2_CLASSIFICATION = m
117
+ print(f"โœ… Swin-V2 Classification loaded ({nc}cls, 94.1% F1)")
118
  except Exception as e:
119
  print(f"โš ๏ธ Swin-V2 Classification: {e}")
120
+
121
+ # 2. Concealment Detection (98.9% F1)
122
+ conc_path = find_swin_model("concealment/best_eva02.pth") or find_swin_model("concealment/best_swinv2.pth")
123
+ if conc_path:
 
 
124
  try:
125
  import timm
126
+ if 'eva02' in conc_path:
127
+ m = timm.create_model('eva02_tiny_patch14_224', pretrained=False, num_classes=2)
128
+ img_size_conc = 224
129
+ else:
130
+ m = timm.create_model('swinv2_tiny_window8_256', pretrained=False, num_classes=2)
131
+ img_size_conc = 256
132
+ ckpt = torch.load(conc_path, map_location=device)
133
+ if isinstance(ckpt, dict) and 'model_state_dict' in ckpt:
134
+ m.load_state_dict(ckpt['model_state_dict'])
135
  else:
136
+ m.load_state_dict(ckpt)
137
+ m.to(device).eval()
138
+ m._conc_img_size = img_size_conc
139
+ SWINV2_CONCEALMENT = m
140
+ print(f"โœ… Concealment loaded (98.9% F1)")
141
  except Exception as e:
142
+ SWINV2_CONCEALMENT = None
143
+ print(f"โš ๏ธ Concealment: {e}")
144
+
145
+ # 3. Risk Assessment (5 levels, 97.2% F1)
146
+ risk_path = find_swin_model("risk/best_swinv2_risk.pth")
147
+ if risk_path:
 
148
  try:
149
  import timm
150
  m = timm.create_model('swinv2_tiny_window8_256', pretrained=False, num_classes=5)
151
+ ckpt = torch.load(risk_path, map_location=device)
152
+ if isinstance(ckpt, dict) and 'model_state_dict' in ckpt:
153
+ m.load_state_dict(ckpt['model_state_dict'])
154
  else:
155
+ m.load_state_dict(ckpt)
156
+ m.to(device).eval()
157
+ SWINV2_RISK = m
158
  print(f"โœ… Swin-V2 Risk loaded (5 levels, 97.2% F1)")
159
  except Exception as e:
160
+ SWINV2_RISK = None
161
  print(f"โš ๏ธ Swin-V2 Risk: {e}")
162
+
163
+ swin_count = sum(1 for x in [SWINV2_CLASSIFICATION, SWINV2_CONCEALMENT, SWINV2_RISK] if x is not None)
164
+ print(f"๐Ÿ”ฑ Swin-V2: {swin_count}/3 models loaded")
165
 
166
 
167
+ def classify_swinv2(img, device=None):
168
+ """
169
+ ุชุตู†ูŠู ุตูˆุฑุฉ ุจู€ Swin-V2 โ€” ูŠูุฑุฌุน (category, confidence)
170
+ """
171
+ if SWINV2_CLASSIFICATION is None or not TORCH_OK:
172
+ return None, 0.0
173
+
174
+ if device is None:
175
+ device = ADDON_DEVICE
176
+
177
+ try:
178
+ t = get_swinv2_transforms(256)
179
+ input_t = t(img.convert('RGB')).unsqueeze(0).to(device)
180
+ with torch.no_grad():
181
+ out = SWINV2_CLASSIFICATION(input_t)
182
+ probs = torch.softmax(out, dim=1)[0].cpu().numpy()
183
+ top_idx = probs.argmax()
184
+ if top_idx < len(SWINV2_CATEGORIES):
185
+ return SWINV2_CATEGORIES[top_idx], float(probs[top_idx])
186
+ except:
187
+ pass
188
+ return None, 0.0
 
 
 
 
 
 
 
 
 
 
 
 
189
 
 
190
 
191
+ def run_swinv2_analysis(img, device=None):
192
+ """
193
+ ุชุดุบูŠู„ 3 ู†ู…ุงุฐุฌ Swin-V2 ูˆุฅุฑุฌุงุน HTML.
194
+ ุงุณุชุฏุนูŠู‡ุง ููŠ analyze_image() ููŠ app.py:
195
+ swin_html = run_swinv2_analysis(img, DEVICE)
196
+ ุซู… ุฃุถู {swin_html} ููŠ HTML ุจุนุฏ {anomaly_html}
197
+ """
198
+ if img is None or not TORCH_OK:
199
+ return ""
200
+
201
+ if device is None:
202
+ device = ADDON_DEVICE
203
+
204
+ html = ""
205
+
206
+ # Swin-V2 Classification
207
+ if SWINV2_CLASSIFICATION is not None:
208
+ try:
209
+ t = get_swinv2_transforms(256)
210
+ input_t = t(img.convert('RGB')).unsqueeze(0).to(device)
211
+ with torch.no_grad():
212
+ out = SWINV2_CLASSIFICATION(input_t)
213
+ probs = torch.softmax(out, dim=1)[0].cpu().numpy()
214
+ top_idx = probs.argmax()
215
+ if top_idx < len(SWINV2_CATEGORIES):
216
+ cat = SWINV2_CATEGORIES[top_idx]
217
+ conf = probs[top_idx]
218
+ from PIL import Image as _PILImage
219
+ # Get HS info if CARGO_DATABASE is available
220
+ try:
221
+ from __main__ import CARGO_DATABASE, get_hs_info
222
+ info = get_hs_info(cat)
223
+ hs_txt = f" โ€” {info['hs']} {info['ar']}"
224
+ except:
225
+ hs_txt = ""
226
+
227
+ html += f"<div style='background:#E8EAF6;border:2px solid #3F51B5;border-radius:10px;padding:8px;margin-top:6px;'>"
228
+ html += f"<div style='background:#3F51B5;color:white;padding:4px 10px;border-radius:6px;font-size:11px;font-weight:bold;margin-bottom:4px;'>๐Ÿ”ฑ Swin-V2 Classification โ€” 94.1% F1 (43 classes)</div>"
229
+ html += f"<div style='display:flex;align-items:center;gap:8px;'>"
230
+ html += f"<div style='background:#3F51B5;color:white;padding:6px 14px;border-radius:8px;font-weight:bold;font-size:14px;'>{cat.upper()}{hs_txt}</div>"
231
+ html += f"<div style='flex:1;background:#ddd;border-radius:10px;height:20px;overflow:hidden;'><div style='background:#3F51B5;height:100%;width:{conf*100:.0f}%;border-radius:10px;'></div></div>"
232
+ html += f"<div style='font-weight:bold;color:#3F51B5;'>{conf:.0%}</div>"
233
+ html += f"</div></div>"
234
+ except Exception as e:
235
+ print(f"โš ๏ธ Swin-V2 cls display: {e}")
236
 
237
+ # Concealment Detection
238
+ if SWINV2_CONCEALMENT is not None:
239
  try:
240
+ conc_size = getattr(SWINV2_CONCEALMENT, '_conc_img_size', 256)
241
+ t = get_swinv2_transforms(conc_size)
242
+ input_t = t(img.convert('RGB')).unsqueeze(0).to(device)
243
  with torch.no_grad():
244
+ out = SWINV2_CONCEALMENT(input_t)
245
  probs = torch.softmax(out, dim=1)[0].cpu().numpy()
246
  conc_idx = probs.argmax()
247
  conc_label = SWINV2_CONCEALMENT_CLASSES[conc_idx]
248
  conc_conf = probs[conc_idx]
249
 
250
  if conc_label == 'no_match':
251
+ cc = '#E53935'
252
+ ct = f"โš ๏ธ ุบูŠุฑ ู…ุทุงุจู‚ ({conc_conf:.1%})"
 
253
  else:
254
+ cc = '#2E7D32'
255
+ ct = f"โœ… ู…ุทุงุจู‚ ({conc_conf:.1%})"
256
+
257
+ html += f"<div style='background:{cc}15;border:1px solid {cc}40;border-radius:8px;padding:6px 10px;margin-top:4px;font-size:12px;'>"
258
+ html += f"<b style='color:{cc};'>๐Ÿ” Concealment Detection (98.9% F1): {ct}</b></div>"
259
+ except Exception as e:
260
+ print(f"โš ๏ธ Concealment display: {e}")
261
 
262
+ # Risk Assessment
263
+ if SWINV2_RISK is not None:
264
  try:
265
+ t = get_swinv2_transforms(256)
266
+ input_t = t(img.convert('RGB')).unsqueeze(0).to(device)
267
  with torch.no_grad():
268
+ out = SWINV2_RISK(input_t)
269
  probs = torch.softmax(out, dim=1)[0].cpu().numpy()
270
+ ridx = probs.argmax()
271
+ rlabel = SWINV2_RISK_CLASSES[ridx]
272
+ rconf = probs[ridx]
273
 
274
  risk_ar = {'critical':'ุญุฑุฌ','high':'ุนุงู„ูŠ','low':'ู…ู†ุฎูุถ','medium':'ู…ุชูˆุณุท','safe':'ุขู…ู†'}
275
+ risk_clrs = {'critical':'#B71C1C','high':'#D84315','medium':'#F9A825','low':'#558B2F','safe':'#2E7D32'}
276
+ rc2 = risk_clrs.get(rlabel, '#666')
277
 
278
+ html += f"<div style='background:{rc2}15;border:1px solid {rc2}40;border-radius:8px;padding:6px 10px;margin-top:4px;font-size:12px;'>"
279
+ html += f"<b style='color:{rc2};'>โšก Risk Assessment (97.2% F1): {risk_ar.get(rlabel, rlabel)} ({rconf:.1%})</b></div>"
280
+ except Exception as e:
281
+ print(f"โš ๏ธ Risk display: {e}")
282
+
283
+ return html