Jasur05 commited on
Commit
7b34aa7
·
1 Parent(s): 10d5153

initial commit

Browse files
.gitattributes CHANGED
@@ -33,3 +33,5 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
 
 
 
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
36
+ effnetb2_traffic_sign_recognition.pth filter=lfs diff=lfs merge=lfs -text
37
+ .pth filter=lfs diff=lfs merge=lfs -text
app.py ADDED
@@ -0,0 +1,97 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+
2
+ import gradio as gr
3
+ import os
4
+ import torch
5
+ from model import create_effnetb2_model
6
+ from timeit import default_timer as timer
7
+ from typing import Tuple, Dict
8
+
9
+ class_classes = [
10
+ "Speed limit (20km/h)",
11
+ "Speed limit (30km/h)",
12
+ "Speed limit (50km/h)",
13
+ "Speed limit (60km/h)",
14
+ "Speed limit (70km/h)",
15
+ "Speed limit (80km/h)",
16
+ "End of speed limit (80km/h)",
17
+ "Speed limit (100km/h)",
18
+ "Speed limit (120km/h)",
19
+ "No passing",
20
+ "No passing for vehicles over 3.5 metric tons",
21
+ "Right-of-way at the next intersection",
22
+ "Priority road",
23
+ "Yield",
24
+ "Stop",
25
+ "No vehicles",
26
+ "Vehicles over 3.5 metric tons prohibited",
27
+ "No entry",
28
+ "General caution",
29
+ "Dangerous curve to the left",
30
+ "Dangerous curve to the right",
31
+ "Double curve",
32
+ "Bumpy road",
33
+ "Slippery road",
34
+ "Road narrows on the right",
35
+ "Road work",
36
+ "Traffic signals",
37
+ "Pedestrians",
38
+ "Children crossing",
39
+ "Bicycles crossing",
40
+ "Beware of ice/snow",
41
+ "Wild animals crossing",
42
+ "End of all speed and passing limits",
43
+ "Turn right ahead",
44
+ "Turn left ahead",
45
+ "Ahead only",
46
+ "Go straight or right",
47
+ "Go straight or left",
48
+ "Keep right",
49
+ "Keep left",
50
+ "Roundabout mandatory",
51
+ "End of no passing",
52
+ "End of no passing by vehicles over 3.5 metric tons"
53
+ ]
54
+
55
+ # 2. model and transfomrs prep
56
+
57
+ effnetb2, effnetb2_transforms = create_effnetb2_model(3)
58
+
59
+ effnetb2.load_state_dict(torch.load(f="traffic_sign_classification/effnetb2_traffic_sign_recognition.pth", map_location=torch.device("cpu")))
60
+
61
+ # predict function
62
+
63
+ def predict(
64
+ img,
65
+ model=effnetb2,
66
+ transform=effnetb2_transforms,
67
+ class_classes: List[str] = class_classes, # 43 human-readable names
68
+ k: int = 3
69
+ ) -> Tuple[Dict[str, float], float]:
70
+ """
71
+ Returns:
72
+ • dict of top-k {label: prob} sorted by prob desc
73
+ • inference time (sec)
74
+ """
75
+ start = timer()
76
+
77
+ # 1. Pre-process
78
+ img_t = transform(img).unsqueeze(0)
79
+
80
+ # 2. Forward pass
81
+ model.eval()
82
+ with torch.inference_mode():
83
+ logits = model(img_t)
84
+ probs = torch.softmax(logits, dim=1).squeeze(0) # shape [43]
85
+
86
+ # 3. Top-k
87
+ top_probs, top_idxs = probs.topk(k) # tensors of length k
88
+ pred_topk = {
89
+ class_classes[int(idx)]: float(prob)
90
+ for idx, prob in zip(top_idxs, top_probs)
91
+ }
92
+
93
+ pred_time = round(timer() - start, 4)
94
+ return pred_topk, pred_time
95
+
96
+ example_list = [["examples/" + example] for example in os.listdir("examples")]
97
+ # 4. gradio app
effnetb2_traffic_sign_recognition.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:020a082f6adb11370ecb526c2b7a169162a9d9417cbe95972cc6211027f32839
3
+ size 31513082
examples/00130.ppm ADDED
examples/00323.ppm ADDED
examples/00838.ppm ADDED
model.py ADDED
@@ -0,0 +1,47 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import torch
2
+ import torchvision
3
+ from torch import nn
4
+ device = "cuda" if torch.cuda.is_available() else "cpu"
5
+ device
6
+
7
+ def create_effnetb2_model(num_classes=43,
8
+ seed: int=42):
9
+
10
+ import torch
11
+ from torch import nn
12
+ import torchvision
13
+ from torchvision import datasets
14
+ from torchvision import transforms
15
+ from torchvision.transforms import ToTensor
16
+
17
+
18
+
19
+ # set up pretrained effnetb2 weights
20
+
21
+ weights = torchvision.models.EfficientNet_B2_Weights.DEFAULT
22
+
23
+ #2 get effnetb2 transforms
24
+
25
+ transforms = weights.transforms()
26
+
27
+ #instance
28
+
29
+ model = torchvision.models.efficientnet_b2(weights=weights)
30
+
31
+ # freeze base layer ≈ zero gradd
32
+
33
+ for param in model.parameters():
34
+ param.requires_grad = False
35
+
36
+
37
+
38
+ model.classifier = nn.Sequential(
39
+ nn.Dropout(p=0.3, inplace=True),
40
+ nn.Linear(in_features=1408,
41
+ out_features=num_classes,
42
+ bias=True))
43
+ return model, transforms
44
+
45
+
46
+
47
+
requirements.txt ADDED
@@ -0,0 +1,4 @@
 
 
 
 
 
1
+ torch==2.6.0
2
+ torchvision==0.21.0
3
+ gradio==5.35
4
+