A-I4All commited on
Commit
a477e16
·
verified ·
1 Parent(s): 93e2977

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +36 -31
app.py CHANGED
@@ -1,18 +1,21 @@
1
  import os
2
 
3
- # ✅ Use temp directory for safe model caching (fixes permission issues)
4
  os.environ["TRANSFORMERS_CACHE"] = "/tmp/hf_cache"
5
 
6
  import streamlit as st
7
  from PIL import Image
8
  import random
9
  import torch
10
- from transformers import AutoImageProcessor, SiglipForImageClassification
11
 
12
- # --- Constants ---
 
 
 
13
  MODEL_NAME = "prithivMLmods/Recycling-Net-11"
14
 
15
- # Sustainability Tips
16
  TIPS = [
17
  "Rinse containers before recycling to avoid contamination.",
18
  "Avoid using plastic bags for recyclables – use bins or boxes.",
@@ -26,7 +29,7 @@ TIPS = [
26
  "Avoid single-use plastics whenever possible.",
27
  ]
28
 
29
- # Government recycling resources
30
  GOVERNMENT_LINKS = {
31
  "Pakistan": "https://environment.gov.pk/",
32
  "India": "https://www.cpcb.nic.in/",
@@ -38,15 +41,20 @@ GOVERNMENT_LINKS = {
38
  "Germany": "https://www.bmu.de/en/topics/water-waste-soil/waste-management",
39
  }
40
 
41
- # --- Load Model ---
42
- @st.cache_resource(show_spinner=False)
43
  def load_model():
44
- processor = AutoImageProcessor.from_pretrained(MODEL_NAME)
45
- model = SiglipForImageClassification.from_pretrained(MODEL_NAME)
46
- model.eval()
47
- return processor, model
48
-
49
- # --- Predict Function ---
 
 
 
 
 
50
  def predict(image: Image.Image, processor, model):
51
  inputs = processor(images=image, return_tensors="pt")
52
  with torch.no_grad():
@@ -58,9 +66,9 @@ def predict(image: Image.Image, processor, model):
58
  confidence = conf.item()
59
  return class_name, confidence
60
 
61
- # --- Suggestion Function ---
62
  def get_suggestion(label: str) -> str:
63
- tips = {
64
  "aluminium": "Rinse and recycle aluminum cans. They are infinitely recyclable.",
65
  "batteries": "Do not throw in the trash. Use proper e-waste collection centers.",
66
  "cardboard": "Flatten and keep dry. Avoid greasy pizza boxes.",
@@ -72,17 +80,16 @@ def get_suggestion(label: str) -> str:
72
  "soft plastics": "Often require store drop-off. Don’t mix with other recyclables.",
73
  "takeaway cups": "Check local rules. Many are lined and not recyclable curbside.",
74
  }
75
- return tips.get(label, "Please check your local rules for proper disposal of this item.")
76
 
77
- # --- Streamlit App ---
78
  def main():
79
  st.set_page_config(page_title="♻️ Recycling Helper AI", layout="centered")
80
  st.title("♻️ Recycling Helper AI")
81
  st.subheader("An AI-powered app to identify recyclable materials and promote sustainability.")
82
-
83
  st.markdown("---")
84
 
85
- # Sidebar content
86
  with st.sidebar:
87
  st.header("📘 About This App")
88
  st.markdown(
@@ -102,10 +109,10 @@ def main():
102
  tip = random.choice(TIPS)
103
  st.success(tip)
104
 
105
- # Load the model
106
  processor, model = load_model()
107
 
108
- # File upload
109
  st.markdown("### 📤 Upload Waste Image")
110
  uploaded_file = st.file_uploader("Upload an image of a recyclable item", type=["png", "jpg", "jpeg"])
111
 
@@ -114,24 +121,22 @@ def main():
114
  image = Image.open(uploaded_file).convert("RGB")
115
  st.image(image, caption="Uploaded Image", use_column_width=True)
116
 
117
- with st.spinner("Analyzing with AI model..."):
118
- class_name, confidence = predict(image, processor, model)
119
-
120
- st.success(f"**Predicted Material:** `{class_name}` \n**Confidence:** `{confidence:.2%}`")
121
 
122
- suggestion = get_suggestion(class_name)
123
- st.info(f"**Tip:** {suggestion}")
124
 
125
  except Exception as e:
126
- st.error(f"Something went wrong during prediction: {e}")
 
127
 
128
- # Optional: Show class labels
129
- with st.expander("🔍 Show Model Classes"):
130
  st.write(model.config.id2label)
131
 
132
  st.markdown("---")
133
  st.caption("Made with 💚 for a sustainable future | Hackathon 2025")
134
 
135
- # Run app
136
  if __name__ == "__main__":
137
  main()
 
1
  import os
2
 
3
+ # ✅ Use temp dir for safe model caching in Spaces/Docker
4
  os.environ["TRANSFORMERS_CACHE"] = "/tmp/hf_cache"
5
 
6
  import streamlit as st
7
  from PIL import Image
8
  import random
9
  import torch
10
+ from transformers import AutoImageProcessor, SiglipForImageClassification, logging
11
 
12
+ # Optional: show more debug info if something fails
13
+ logging.set_verbosity_error()
14
+
15
+ # Constants
16
  MODEL_NAME = "prithivMLmods/Recycling-Net-11"
17
 
18
+ # Daily sustainability tips
19
  TIPS = [
20
  "Rinse containers before recycling to avoid contamination.",
21
  "Avoid using plastic bags for recyclables – use bins or boxes.",
 
29
  "Avoid single-use plastics whenever possible.",
30
  ]
31
 
32
+ # Government recycling links
33
  GOVERNMENT_LINKS = {
34
  "Pakistan": "https://environment.gov.pk/",
35
  "India": "https://www.cpcb.nic.in/",
 
41
  "Germany": "https://www.bmu.de/en/topics/water-waste-soil/waste-management",
42
  }
43
 
44
+ # Load model and processor
45
+ @st.cache_resource(show_spinner="🔄 Loading AI model...")
46
  def load_model():
47
+ try:
48
+ processor = AutoImageProcessor.from_pretrained(MODEL_NAME, revision="main")
49
+ model = SiglipForImageClassification.from_pretrained(MODEL_NAME, revision="main")
50
+ model.eval()
51
+ return processor, model
52
+ except Exception as e:
53
+ st.error("❌ Failed to load the model. Please check the model name or your connection.")
54
+ st.exception(e)
55
+ raise e
56
+
57
+ # Prediction function
58
  def predict(image: Image.Image, processor, model):
59
  inputs = processor(images=image, return_tensors="pt")
60
  with torch.no_grad():
 
66
  confidence = conf.item()
67
  return class_name, confidence
68
 
69
+ # Recycling tip per label
70
  def get_suggestion(label: str) -> str:
71
+ suggestions = {
72
  "aluminium": "Rinse and recycle aluminum cans. They are infinitely recyclable.",
73
  "batteries": "Do not throw in the trash. Use proper e-waste collection centers.",
74
  "cardboard": "Flatten and keep dry. Avoid greasy pizza boxes.",
 
80
  "soft plastics": "Often require store drop-off. Don’t mix with other recyclables.",
81
  "takeaway cups": "Check local rules. Many are lined and not recyclable curbside.",
82
  }
83
+ return suggestions.get(label, "Please check your local rules for proper disposal of this item.")
84
 
85
+ # Main app
86
  def main():
87
  st.set_page_config(page_title="♻️ Recycling Helper AI", layout="centered")
88
  st.title("♻️ Recycling Helper AI")
89
  st.subheader("An AI-powered app to identify recyclable materials and promote sustainability.")
 
90
  st.markdown("---")
91
 
92
+ # Sidebar
93
  with st.sidebar:
94
  st.header("📘 About This App")
95
  st.markdown(
 
109
  tip = random.choice(TIPS)
110
  st.success(tip)
111
 
112
+ # Load model
113
  processor, model = load_model()
114
 
115
+ # Upload image
116
  st.markdown("### 📤 Upload Waste Image")
117
  uploaded_file = st.file_uploader("Upload an image of a recyclable item", type=["png", "jpg", "jpeg"])
118
 
 
121
  image = Image.open(uploaded_file).convert("RGB")
122
  st.image(image, caption="Uploaded Image", use_column_width=True)
123
 
124
+ with st.spinner("🔍 Classifying image..."):
125
+ label, confidence = predict(image, processor, model)
 
 
126
 
127
+ st.success(f"**Predicted Material:** `{label}` \n**Confidence:** `{confidence:.2%}`")
128
+ st.info(f"**Disposal Tip:** {get_suggestion(label)}")
129
 
130
  except Exception as e:
131
+ st.error("An error occurred during prediction.")
132
+ st.exception(e)
133
 
134
+ with st.expander("🔍 Show All Recognizable Materials"):
 
135
  st.write(model.config.id2label)
136
 
137
  st.markdown("---")
138
  st.caption("Made with 💚 for a sustainable future | Hackathon 2025")
139
 
140
+ # Run
141
  if __name__ == "__main__":
142
  main()