udayjawheri commited on
Commit
ad5f807
·
verified ·
1 Parent(s): 913aef8

Upload folder using huggingface_hub

Browse files
Files changed (7) hide show
  1. .gitattributes +3 -35
  2. README.md +2 -8
  3. app.ipynb +134 -0
  4. app.py +85 -0
  5. main.ipynb +0 -0
  6. model.h5 +3 -0
  7. requirements.txt +0 -0
.gitattributes CHANGED
@@ -1,35 +1,3 @@
1
- *.7z filter=lfs diff=lfs merge=lfs -text
2
- *.arrow filter=lfs diff=lfs merge=lfs -text
3
- *.bin filter=lfs diff=lfs merge=lfs -text
4
- *.bz2 filter=lfs diff=lfs merge=lfs -text
5
- *.ckpt filter=lfs diff=lfs merge=lfs -text
6
- *.ftz filter=lfs diff=lfs merge=lfs -text
7
- *.gz filter=lfs diff=lfs merge=lfs -text
8
- *.h5 filter=lfs diff=lfs merge=lfs -text
9
- *.joblib filter=lfs diff=lfs merge=lfs -text
10
- *.lfs.* filter=lfs diff=lfs merge=lfs -text
11
- *.mlmodel filter=lfs diff=lfs merge=lfs -text
12
- *.model filter=lfs diff=lfs merge=lfs -text
13
- *.msgpack filter=lfs diff=lfs merge=lfs -text
14
- *.npy filter=lfs diff=lfs merge=lfs -text
15
- *.npz filter=lfs diff=lfs merge=lfs -text
16
- *.onnx filter=lfs diff=lfs merge=lfs -text
17
- *.ot filter=lfs diff=lfs merge=lfs -text
18
- *.parquet filter=lfs diff=lfs merge=lfs -text
19
- *.pb filter=lfs diff=lfs merge=lfs -text
20
- *.pickle filter=lfs diff=lfs merge=lfs -text
21
- *.pkl filter=lfs diff=lfs merge=lfs -text
22
- *.pt filter=lfs diff=lfs merge=lfs -text
23
- *.pth filter=lfs diff=lfs merge=lfs -text
24
- *.rar filter=lfs diff=lfs merge=lfs -text
25
- *.safetensors filter=lfs diff=lfs merge=lfs -text
26
- saved_model/**/* filter=lfs diff=lfs merge=lfs -text
27
- *.tar.* filter=lfs diff=lfs merge=lfs -text
28
- *.tar filter=lfs diff=lfs merge=lfs -text
29
- *.tflite filter=lfs diff=lfs merge=lfs -text
30
- *.tgz filter=lfs diff=lfs merge=lfs -text
31
- *.wasm filter=lfs diff=lfs merge=lfs -text
32
- *.xz filter=lfs diff=lfs merge=lfs -text
33
- *.zip filter=lfs diff=lfs merge=lfs -text
34
- *.zst filter=lfs diff=lfs merge=lfs -text
35
- *tfevents* filter=lfs diff=lfs merge=lfs -text
 
1
+ # Auto detect text files and perform LF normalization
2
+ * text=auto
3
+ model.h5 filter=lfs diff=lfs merge=lfs -text
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
README.md CHANGED
@@ -1,12 +1,6 @@
1
  ---
2
- title: Trafic Sing
3
- emoji: 🏆
4
- colorFrom: red
5
- colorTo: blue
6
  sdk: gradio
7
  sdk_version: 4.19.2
8
- app_file: app.py
9
- pinned: false
10
  ---
11
-
12
- Check out the configuration reference at https://huggingface.co/docs/hub/spaces-config-reference
 
1
  ---
2
+ title: trafic_sing
3
+ app_file: app.py
 
 
4
  sdk: gradio
5
  sdk_version: 4.19.2
 
 
6
  ---
 
 
app.ipynb ADDED
@@ -0,0 +1,134 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "cells": [
3
+ {
4
+ "cell_type": "code",
5
+ "execution_count": null,
6
+ "metadata": {},
7
+ "outputs": [],
8
+ "source": [
9
+ "import gradio as gr\n",
10
+ "import tensorflow as tf\n",
11
+ "import numpy as np\n",
12
+ "from PIL import Image"
13
+ ]
14
+ },
15
+ {
16
+ "cell_type": "code",
17
+ "execution_count": null,
18
+ "metadata": {},
19
+ "outputs": [],
20
+ "source": [
21
+ "# Loading trained model\n",
22
+ "model = tf.keras.models.load_model(\"model.h5\") "
23
+ ]
24
+ },
25
+ {
26
+ "cell_type": "code",
27
+ "execution_count": null,
28
+ "metadata": {},
29
+ "outputs": [],
30
+ "source": [
31
+ "\n",
32
+ "# Label Overview\n",
33
+ "class_num = { 0:'Speed limit (20km/h)',\n",
34
+ " 1:'Speed limit (30km/h)', +\n",
35
+ " 2:'Speed limit (50km/h)', \n",
36
+ " 3:'Speed limit (60km/h)', \n",
37
+ " 4:'Speed limit (70km/h)', \n",
38
+ " 5:'Speed limit (80km/h)', \n",
39
+ " 6:'End of speed limit (80km/h)', \n",
40
+ " 7:'Speed limit (100km/h)', \n",
41
+ " 8:'Speed limit (120km/h)', \n",
42
+ " 9:'No passing', \n",
43
+ " 10:'No passing veh over 3.5 tons', \n",
44
+ " 11:'Right-of-way at intersection', \n",
45
+ " 12:'Priority road', \n",
46
+ " 13:'Yield', \n",
47
+ " 14:'Stop', \n",
48
+ " 15:'No vehicles', \n",
49
+ " 16:'Veh > 3.5 tons prohibited', \n",
50
+ " 17:'No entry', \n",
51
+ " 18:'General caution', \n",
52
+ " 19:'Dangerous curve left', \n",
53
+ " 20:'Dangerous curve right', \n",
54
+ " 21:'Double curve', \n",
55
+ " 22:'Bumpy road', \n",
56
+ " 23:'Slippery road', \n",
57
+ " 24:'Road narrows on the right', \n",
58
+ " 25:'Road work', \n",
59
+ " 26:'Traffic signals', \n",
60
+ " 27:'Pedestrians', \n",
61
+ " 28:'Children crossing', \n",
62
+ " 29:'Bicycles crossing', \n",
63
+ " 30:'Beware of ice/snow',\n",
64
+ " 31:'Wild animals crossing', \n",
65
+ " 32:'End speed + passing limits', \n",
66
+ " 33:'Turn right ahead', \n",
67
+ " 34:'Turn left ahead', \n",
68
+ " 35:'Ahead only', \n",
69
+ " 36:'Go straight or right', \n",
70
+ " 37:'Go straight or left', \n",
71
+ " 38:'Keep right', \n",
72
+ " 39:'Keep left', \n",
73
+ " 40:'Roundabout mandatory', \n",
74
+ " 41:'End of no passing', \n",
75
+ " 42:'End no passing veh > 3.5 tons' }\n"
76
+ ]
77
+ },
78
+ {
79
+ "cell_type": "code",
80
+ "execution_count": null,
81
+ "metadata": {},
82
+ "outputs": [],
83
+ "source": [
84
+ "\n",
85
+ "# Define a function to preprocess input image\n",
86
+ "def preprocess_image(image):\n",
87
+ " # Resize the image to the required dimensions\n",
88
+ " image = image.resize((30, 30))\n",
89
+ " # Convert the PIL image to a NumPy array\n",
90
+ " image_array = np.array(image)\n",
91
+ " # Normalize pixel values to be between 0 and 1\n",
92
+ " image_array = image_array / 255.0\n",
93
+ " # Add batch dimension to the image\n",
94
+ " image_array = np.expand_dims(image_array, axis=0)\n",
95
+ " return image_array\n",
96
+ "\n",
97
+ "# Define a function to make predictions\n",
98
+ "def predict_class(image):\n",
99
+ " # Preprocess the input image\n",
100
+ " processed_image = preprocess_image(image)\n",
101
+ " # Make predictions using the loaded model\n",
102
+ " predictions = model.predict(processed_image)\n",
103
+ " # Get the predicted class index\n",
104
+ " predicted_class_index = np.argmax(predictions)\n",
105
+ " # Return the predicted class label and class number\n",
106
+ " return f\"{class_num[predicted_class_index]} (Class {predicted_class_index})\"\n",
107
+ "\n",
108
+ "# Create Gradio interface\n",
109
+ "iface = gr.Interface(\n",
110
+ " fn=predict_class,\n",
111
+ " inputs=gr.Image(type=\"pil\", label=\"Upload an image\"),\n",
112
+ " outputs=\"text\"\n",
113
+ ")\n"
114
+ ]
115
+ },
116
+ {
117
+ "cell_type": "code",
118
+ "execution_count": null,
119
+ "metadata": {},
120
+ "outputs": [],
121
+ "source": [
122
+ "# Launch the Gradio interface\n",
123
+ "iface.launch()\n"
124
+ ]
125
+ }
126
+ ],
127
+ "metadata": {
128
+ "language_info": {
129
+ "name": "python"
130
+ }
131
+ },
132
+ "nbformat": 4,
133
+ "nbformat_minor": 2
134
+ }
app.py ADDED
@@ -0,0 +1,85 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import gradio as gr
2
+ import tensorflow as tf
3
+ import numpy as np
4
+ from PIL import Image
5
+
6
+ # Loading trained model
7
+ model = tf.keras.models.load_model("model.h5")
8
+
9
+ # Label Overview
10
+ class_num = { 0:'Speed limit (20km/h)',
11
+ 1:'Speed limit (30km/h)', +
12
+ 2:'Speed limit (50km/h)',
13
+ 3:'Speed limit (60km/h)',
14
+ 4:'Speed limit (70km/h)',
15
+ 5:'Speed limit (80km/h)',
16
+ 6:'End of speed limit (80km/h)',
17
+ 7:'Speed limit (100km/h)',
18
+ 8:'Speed limit (120km/h)',
19
+ 9:'No passing',
20
+ 10:'No passing veh over 3.5 tons',
21
+ 11:'Right-of-way at intersection',
22
+ 12:'Priority road',
23
+ 13:'Yield',
24
+ 14:'Stop',
25
+ 15:'No vehicles',
26
+ 16:'Veh > 3.5 tons prohibited',
27
+ 17:'No entry',
28
+ 18:'General caution',
29
+ 19:'Dangerous curve left',
30
+ 20:'Dangerous curve right',
31
+ 21:'Double curve',
32
+ 22:'Bumpy road',
33
+ 23:'Slippery road',
34
+ 24:'Road narrows on the right',
35
+ 25:'Road work',
36
+ 26:'Traffic signals',
37
+ 27:'Pedestrians',
38
+ 28:'Children crossing',
39
+ 29:'Bicycles crossing',
40
+ 30:'Beware of ice/snow',
41
+ 31:'Wild animals crossing',
42
+ 32:'End speed + passing limits',
43
+ 33:'Turn right ahead',
44
+ 34:'Turn left ahead',
45
+ 35:'Ahead only',
46
+ 36:'Go straight or right',
47
+ 37:'Go straight or left',
48
+ 38:'Keep right',
49
+ 39:'Keep left',
50
+ 40:'Roundabout mandatory',
51
+ 41:'End of no passing',
52
+ 42:'End no passing veh > 3.5 tons' }
53
+
54
+ # Define a function to preprocess input image
55
+ def preprocess_image(image):
56
+ # Resize the image to the required dimensions
57
+ image = image.resize((30, 30))
58
+ # Convert the PIL image to a NumPy array
59
+ image_array = np.array(image)
60
+ # Normalize pixel values to be between 0 and 1
61
+ image_array = image_array / 255.0
62
+ # Add batch dimension to the image
63
+ image_array = np.expand_dims(image_array, axis=0)
64
+ return image_array
65
+
66
+ # Define a function to make predictions
67
+ def predict_class(image):
68
+ # Preprocess the input image
69
+ processed_image = preprocess_image(image)
70
+ # Make predictions using the loaded model
71
+ predictions = model.predict(processed_image)
72
+ # Get the predicted class index
73
+ predicted_class_index = np.argmax(predictions)
74
+ # Return the predicted class label and class number
75
+ return f"{class_num[predicted_class_index]} (Class {predicted_class_index})"
76
+
77
+ # Create Gradio interface
78
+ iface = gr.Interface(
79
+ fn=predict_class,
80
+ inputs=gr.Image(type="pil", label="Upload an image"),
81
+ outputs="text"
82
+ )
83
+
84
+ # Launch the Gradio interface
85
+ iface.launch()
main.ipynb ADDED
The diff for this file is too large to render. See raw diff
 
model.h5 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:1ef22e9ce57a458756d8ab375a2386c42df42d108c5e7f176f7bb189a2a56028
3
+ size 8478464
requirements.txt ADDED
Binary file (3.29 kB). View file