Spaces:
Runtime error
Runtime error
Commit ·
d9d1fcf
1
Parent(s): df5ae61
Added project
Browse files- README.md +56 -5
- SuEatableLife_Food_Fooprint_database.xlsx +0 -0
- app.py +327 -0
- carbon_model.py +41 -0
- mcp_server.py +62 -0
- meal_planner.py +142 -0
- parser.py +71 -0
- requirements.txt +7 -0
- spoilage_data.json +323 -0
- spoilage_model.py +74 -0
README.md
CHANGED
|
@@ -1,13 +1,64 @@
|
|
| 1 |
---
|
| 2 |
title: EcoChef
|
| 3 |
-
emoji:
|
| 4 |
-
|
| 5 |
-
|
|
|
|
| 6 |
sdk: gradio
|
| 7 |
sdk_version: 5.33.1
|
| 8 |
app_file: app.py
|
| 9 |
-
pinned:
|
| 10 |
short_description: Eco-Friendly AI Meal Planner with Food Carbon Insights
|
| 11 |
---
|
| 12 |
|
| 13 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
---
|
| 2 |
title: EcoChef
|
| 3 |
+
emoji: 🌍
|
| 4 |
+
tag: mcp-server-track
|
| 5 |
+
colorFrom: purple
|
| 6 |
+
colorTo: green
|
| 7 |
sdk: gradio
|
| 8 |
sdk_version: 5.33.1
|
| 9 |
app_file: app.py
|
| 10 |
+
pinned: true
|
| 11 |
short_description: Eco-Friendly AI Meal Planner with Food Carbon Insights
|
| 12 |
---
|
| 13 |
|
| 14 |
+
# 🥗 EcoChef
|
| 15 |
+
|
| 16 |
+
**Eco-Friendly AI Meal Planner with Food Carbon Insights**
|
| 17 |
+
|
| 18 |
+
---
|
| 19 |
+
|
| 20 |
+
### 🚀 Purpose
|
| 21 |
+
|
| 22 |
+
EcoChef helps reduce food waste and carbon emissions by analyzing your fridge ingredients, predicting spoilage risk, estimating environmental impact, and suggesting optimized meal plans using AI.
|
| 23 |
+
|
| 24 |
+
[Watch EcoChef in action](https://www.loom.com/share/5b76af542a7b4149a9797dea0e26f789?sid=4a35d86a-c73b-4179-90f6-ddfef1b85023)
|
| 25 |
+
|
| 26 |
+
---
|
| 27 |
+
|
| 28 |
+
### ✨ Key Features
|
| 29 |
+
|
| 30 |
+
* 🧊 Spoilage risk detection for common food items
|
| 31 |
+
* 🌍 Carbon footprint estimation (CO₂e per ingredient)
|
| 32 |
+
* 🍽️ AI-generated meal plans using LLMs
|
| 33 |
+
* 🔄 Smart substitutions to minimize waste
|
| 34 |
+
* 🧾 Interactive Gradio UI + MCP integration
|
| 35 |
+
|
| 36 |
+
---
|
| 37 |
+
|
| 38 |
+
### 🧠 Models and APIs Used
|
| 39 |
+
|
| 40 |
+
* **LLM:** meta-llama/Llama-3.3-70B-Instruct model via Nebius API
|
| 41 |
+
* **Carbon Footprint Data:** Taken from Su-EATABLE LIFE dataset which provides standardized carbon and water footprint values for thousands of food items based on harmonized LCA methodologies
|
| 42 |
+
* **Spoilage Model:** Custom rules-based + JSON dataset of food ingredient curated specially for this project.
|
| 43 |
+
* **Nebius API:** Used to inference llama model for suggesting meal.
|
| 44 |
+
|
| 45 |
+
---
|
| 46 |
+
|
| 47 |
+
### 🛠️ Functionality Overview
|
| 48 |
+
|
| 49 |
+
1. Input your fridge ingredients
|
| 50 |
+
2. EcoChef estimates spoilage and carbon impact
|
| 51 |
+
3. Generates optimized recipes and substitutes
|
| 52 |
+
4. Visualizes results in a user-friendly interface
|
| 53 |
+
|
| 54 |
+
---
|
| 55 |
+
|
| 56 |
+
### 🧪 Install dependencies with:
|
| 57 |
+
|
| 58 |
+
```bash
|
| 59 |
+
pip install -r requirements.txt
|
| 60 |
+
```
|
| 61 |
+
|
| 62 |
+
Made with ❤️ using Gradio, Llama, Nebius APIs for [Gradio Agents & MCP Hackathon 2025](https://huggingface.co/Agents-MCP-Hackathon) on [Hugging Face](https://huggingface.co)🤗
|
| 63 |
+
|
| 64 |
+
Created by [tejasashinde](https://huggingface.co/tejasashinde)
|
SuEatableLife_Food_Fooprint_database.xlsx
ADDED
|
Binary file (25.3 kB). View file
|
|
|
app.py
ADDED
|
@@ -0,0 +1,327 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import gradio as gr
|
| 2 |
+
from mcp_server import handle_tool_call
|
| 3 |
+
import re
|
| 4 |
+
import json
|
| 5 |
+
|
| 6 |
+
# --- Load Shelf Life Data ---
|
| 7 |
+
with open("spoilage_data.json", "r") as f:
|
| 8 |
+
shelf_life_data = json.load(f)
|
| 9 |
+
|
| 10 |
+
# --- Backend Logic Wrapping ---
|
| 11 |
+
def run_all_tools(text):
|
| 12 |
+
parsed = handle_tool_call("parse_ingredients", {"text": text})
|
| 13 |
+
spoilage = handle_tool_call("predict_spoilage", {"items": parsed})
|
| 14 |
+
carbon = handle_tool_call("estimate_carbon", {"items": parsed})
|
| 15 |
+
|
| 16 |
+
items_with_risk = [
|
| 17 |
+
{**item, "risk": spoilage[i]} for i, item in enumerate(parsed)
|
| 18 |
+
]
|
| 19 |
+
|
| 20 |
+
meal_plan = handle_tool_call(
|
| 21 |
+
"generate_meal_plan",
|
| 22 |
+
{"items": items_with_risk, "spoilage": spoilage, "carbon": carbon, "parsed": parsed}
|
| 23 |
+
)
|
| 24 |
+
print(meal_plan)
|
| 25 |
+
|
| 26 |
+
return parsed, spoilage, carbon, meal_plan
|
| 27 |
+
|
| 28 |
+
# --- Output Formatters ---
|
| 29 |
+
|
| 30 |
+
def format_items_table(meal_plan_json):
|
| 31 |
+
if isinstance(meal_plan_json, str):
|
| 32 |
+
try:
|
| 33 |
+
meal_plan_json = json.loads(meal_plan_json)
|
| 34 |
+
except json.JSONDecodeError:
|
| 35 |
+
pass
|
| 36 |
+
|
| 37 |
+
ingredient_data = []
|
| 38 |
+
|
| 39 |
+
if isinstance(meal_plan_json, list):
|
| 40 |
+
for block in meal_plan_json:
|
| 41 |
+
if isinstance(block, dict):
|
| 42 |
+
for val in block.values():
|
| 43 |
+
if isinstance(val, str):
|
| 44 |
+
match = re.search(r"```json\n(.*?)```", val, re.DOTALL)
|
| 45 |
+
if not match:
|
| 46 |
+
match = re.search(r"```(.*?)```", val, re.DOTALL)
|
| 47 |
+
if match:
|
| 48 |
+
try:
|
| 49 |
+
extracted_json = json.loads(match.group(1))
|
| 50 |
+
if isinstance(extracted_json, dict):
|
| 51 |
+
ingredient_data = extracted_json.get("ingredientDetails", [])
|
| 52 |
+
elif isinstance(extracted_json, list):
|
| 53 |
+
ingredient_data = extracted_json
|
| 54 |
+
break
|
| 55 |
+
except json.JSONDecodeError:
|
| 56 |
+
continue
|
| 57 |
+
|
| 58 |
+
if not ingredient_data:
|
| 59 |
+
return "<div style='color: gray;'>No ingredient data available in meal plan.</div>"
|
| 60 |
+
|
| 61 |
+
html = """
|
| 62 |
+
<style>
|
| 63 |
+
table {
|
| 64 |
+
width: 100%;
|
| 65 |
+
border-collapse: collapse;
|
| 66 |
+
font-family: 'Segoe UI', Tahoma, Geneva, Verdana, sans-serif;
|
| 67 |
+
margin-bottom: 30px;
|
| 68 |
+
}
|
| 69 |
+
th, td {
|
| 70 |
+
border: 1px solid #ddd;
|
| 71 |
+
padding: 10px;
|
| 72 |
+
text-align: left;
|
| 73 |
+
}
|
| 74 |
+
th {
|
| 75 |
+
background-color: #2a9d8f;
|
| 76 |
+
color: white;
|
| 77 |
+
}
|
| 78 |
+
.expiry-short { color: #d62828; font-weight: bold; }
|
| 79 |
+
.expiry-medium { color: #f77f00; font-weight: bold; }
|
| 80 |
+
.expiry-long { color: #2a9d8f; font-weight: bold; }
|
| 81 |
+
</style>
|
| 82 |
+
<table>
|
| 83 |
+
<thead>
|
| 84 |
+
<tr>
|
| 85 |
+
<th>Ingredient</th>
|
| 86 |
+
<th>Expiry / Shelf Life</th>
|
| 87 |
+
<th>Spoilage Risk</th>
|
| 88 |
+
<th>Carbon Footprint (kg CO₂e/kg)</th>
|
| 89 |
+
</tr>
|
| 90 |
+
</thead>
|
| 91 |
+
<tbody>
|
| 92 |
+
"""
|
| 93 |
+
|
| 94 |
+
for item in ingredient_data:
|
| 95 |
+
name = item.get("Ingredient", "Unknown").title()
|
| 96 |
+
expiry_display = item.get("Expiry / Shelf Life", "N/A")
|
| 97 |
+
risk = item.get("Spoilage Risk", "Unknown").capitalize()
|
| 98 |
+
carbon_fp_str = str(item.get("Carbon Footprint", "N/A")).replace(" kg CO₂e/kg", "").strip()
|
| 99 |
+
|
| 100 |
+
days_match = re.search(r"(\d+)", expiry_display)
|
| 101 |
+
if days_match:
|
| 102 |
+
days = int(days_match.group(1))
|
| 103 |
+
if days <= 2:
|
| 104 |
+
expiry_class = "expiry-short"
|
| 105 |
+
icon = "⏰"
|
| 106 |
+
elif days <= 5:
|
| 107 |
+
expiry_class = "expiry-medium"
|
| 108 |
+
icon = "📆"
|
| 109 |
+
else:
|
| 110 |
+
expiry_class = "expiry-long"
|
| 111 |
+
icon = "✅"
|
| 112 |
+
else:
|
| 113 |
+
expiry_class = ""
|
| 114 |
+
icon = "❓"
|
| 115 |
+
|
| 116 |
+
expiry_html = f'<span class="{expiry_class}">{icon} {expiry_display}</span>'
|
| 117 |
+
|
| 118 |
+
risk_color = {"High": "red", "Medium": "orange", "Low": "green"}.get(risk, "gray")
|
| 119 |
+
|
| 120 |
+
try:
|
| 121 |
+
carbon_value = float(carbon_fp_str)
|
| 122 |
+
intensity = "🌍" * int(min(carbon_value / 0.5, 5))
|
| 123 |
+
carbon_html = f"{carbon_value:.2f} {intensity}"
|
| 124 |
+
except ValueError:
|
| 125 |
+
carbon_html = "🌫️ N/A"
|
| 126 |
+
|
| 127 |
+
html += f"""
|
| 128 |
+
<tr>
|
| 129 |
+
<td><b>{name}</b></td>
|
| 130 |
+
<td>{expiry_html}</td>
|
| 131 |
+
<td style="color:{risk_color}; font-weight:bold;">{risk}</td>
|
| 132 |
+
<td>{carbon_html}</td>
|
| 133 |
+
</tr>
|
| 134 |
+
"""
|
| 135 |
+
|
| 136 |
+
html += "</tbody></table>"
|
| 137 |
+
return html
|
| 138 |
+
|
| 139 |
+
def format_meal_output(meal_plan_json):
|
| 140 |
+
import uuid
|
| 141 |
+
|
| 142 |
+
if isinstance(meal_plan_json, str):
|
| 143 |
+
try:
|
| 144 |
+
meal_plan_json = json.loads(meal_plan_json)
|
| 145 |
+
except json.JSONDecodeError:
|
| 146 |
+
return "<div style='color: red;'>Invalid meal plan format.</div>"
|
| 147 |
+
|
| 148 |
+
combined_text = ""
|
| 149 |
+
for block in meal_plan_json:
|
| 150 |
+
for val in block.values():
|
| 151 |
+
if isinstance(val, str):
|
| 152 |
+
combined_text += "\n" + val
|
| 153 |
+
|
| 154 |
+
sections = {
|
| 155 |
+
"b": {"title": "🌍 Carbon Footprint Overview", "content": ""},
|
| 156 |
+
"c": {"title": "📋 Suggested Meals", "content": ""},
|
| 157 |
+
"d": {"title": "🔁 Alternative Ingredients", "content": ""},
|
| 158 |
+
"e": {"title": "🧠 Reducing Waste & Emissions", "content": ""}
|
| 159 |
+
}
|
| 160 |
+
|
| 161 |
+
matches = re.findall(r"### \((b|c|d|e)\) (.+?)\n(.*?)(?=\n### \(|\Z)", combined_text, re.DOTALL)
|
| 162 |
+
for code, heading, content in matches:
|
| 163 |
+
if code in sections:
|
| 164 |
+
sections[code]["content"] = content.strip()
|
| 165 |
+
|
| 166 |
+
# Dark-mode accordion style
|
| 167 |
+
style = """
|
| 168 |
+
<style>
|
| 169 |
+
.accordion {
|
| 170 |
+
background-color: #121212;
|
| 171 |
+
border-radius: 8px;
|
| 172 |
+
margin-bottom: 10px;
|
| 173 |
+
border: 1px solid #333;
|
| 174 |
+
overflow: hidden;
|
| 175 |
+
}
|
| 176 |
+
.accordion summary {
|
| 177 |
+
font-weight: bold;
|
| 178 |
+
cursor: pointer;
|
| 179 |
+
padding: 15px;
|
| 180 |
+
font-size: 1.1rem;
|
| 181 |
+
background: #1e1e1e;
|
| 182 |
+
color: #fff;
|
| 183 |
+
}
|
| 184 |
+
.accordion p {
|
| 185 |
+
padding: 15px;
|
| 186 |
+
margin: 0;
|
| 187 |
+
background: #1a1a1a;
|
| 188 |
+
color: #ddd;
|
| 189 |
+
line-height: 1.6;
|
| 190 |
+
}
|
| 191 |
+
</style>
|
| 192 |
+
"""
|
| 193 |
+
|
| 194 |
+
html_parts = []
|
| 195 |
+
for sec in ["b", "c", "d", "e"]:
|
| 196 |
+
content = sections[sec]["content"]
|
| 197 |
+
if not content:
|
| 198 |
+
continue
|
| 199 |
+
content_html = content.replace("\n", "<br>")
|
| 200 |
+
html_parts.append(f"""
|
| 201 |
+
<details class="accordion">
|
| 202 |
+
<summary>{sections[sec]["title"]}</summary>
|
| 203 |
+
<p>{content_html}</p>
|
| 204 |
+
</details>
|
| 205 |
+
""")
|
| 206 |
+
|
| 207 |
+
return style + "\n".join(html_parts)
|
| 208 |
+
|
| 209 |
+
def combined_output_func(text, api_key):
|
| 210 |
+
status_text = "⏳ Processing..."
|
| 211 |
+
|
| 212 |
+
parsed = handle_tool_call("parse_ingredients", {"text": text})
|
| 213 |
+
spoilage = handle_tool_call("predict_spoilage", {"items": parsed})
|
| 214 |
+
carbon = handle_tool_call("estimate_carbon", {"items": parsed})
|
| 215 |
+
|
| 216 |
+
items_with_risk = [
|
| 217 |
+
{**item, "risk": spoilage[i]} for i, item in enumerate(parsed)
|
| 218 |
+
]
|
| 219 |
+
|
| 220 |
+
meal_plan = handle_tool_call(
|
| 221 |
+
"generate_meal_plan",
|
| 222 |
+
{
|
| 223 |
+
"items": items_with_risk,
|
| 224 |
+
"spoilage": spoilage,
|
| 225 |
+
"carbon": carbon,
|
| 226 |
+
"parsed": parsed,
|
| 227 |
+
"api_key": api_key
|
| 228 |
+
}
|
| 229 |
+
)
|
| 230 |
+
|
| 231 |
+
# Check for connection error inside 'recipes'
|
| 232 |
+
if (
|
| 233 |
+
isinstance(meal_plan, list) and
|
| 234 |
+
len(meal_plan) >= 2 and
|
| 235 |
+
isinstance(meal_plan[1], dict) and
|
| 236 |
+
"recipes" in meal_plan[1] and
|
| 237 |
+
meal_plan[1]["recipes"] == ['Error generating recipes: Connection error.']
|
| 238 |
+
):
|
| 239 |
+
status_text = "❌ Connection error: Please input correct API key"
|
| 240 |
+
error_html = """
|
| 241 |
+
<div style="background-color: #ffdddd; color: #900; padding: 16px; margin-bottom: 20px; border-left: 6px solid #f44336; border-radius: 6px;">
|
| 242 |
+
⚠️ <strong>Connection error:</strong> Please input a correct Nebius API key and try again.
|
| 243 |
+
</div>
|
| 244 |
+
"""
|
| 245 |
+
return status_text, error_html
|
| 246 |
+
|
| 247 |
+
else:
|
| 248 |
+
status_text = "✅ Success! EcoChef has generated your meal plan and sustainability report."
|
| 249 |
+
success_banner = """
|
| 250 |
+
<div style="background-color: #ddffdd; color: #064b1a; padding: 16px; margin-bottom: 20px; border-left: 6px solid #4CAF50; border-radius: 6px;">
|
| 251 |
+
🌱 <strong>Success!</strong> Your meal plan and carbon footprint report have been generated.
|
| 252 |
+
</div>
|
| 253 |
+
"""
|
| 254 |
+
|
| 255 |
+
if isinstance(meal_plan, str):
|
| 256 |
+
try:
|
| 257 |
+
meal_plan = json.loads(meal_plan)
|
| 258 |
+
except json.JSONDecodeError:
|
| 259 |
+
meal_plan = []
|
| 260 |
+
|
| 261 |
+
table_html = format_items_table(meal_plan)
|
| 262 |
+
meal_html = format_meal_output(meal_plan)
|
| 263 |
+
|
| 264 |
+
full_html = f"""
|
| 265 |
+
{success_banner}
|
| 266 |
+
{table_html}
|
| 267 |
+
<hr style="margin:40px 0; border-top: 2px solid #ccc;">
|
| 268 |
+
{meal_html}
|
| 269 |
+
"""
|
| 270 |
+
|
| 271 |
+
return status_text, full_html
|
| 272 |
+
|
| 273 |
+
# Continue as usual if no error
|
| 274 |
+
if isinstance(meal_plan, str):
|
| 275 |
+
try:
|
| 276 |
+
meal_plan = json.loads(meal_plan)
|
| 277 |
+
except json.JSONDecodeError:
|
| 278 |
+
meal_plan = []
|
| 279 |
+
|
| 280 |
+
status_text = "✅ Done!"
|
| 281 |
+
table_html = format_items_table(meal_plan)
|
| 282 |
+
meal_html = format_meal_output(meal_plan)
|
| 283 |
+
|
| 284 |
+
full_html = f"""
|
| 285 |
+
{table_html}
|
| 286 |
+
<hr style="margin:40px 0; border-top: 2px solid #ccc;">
|
| 287 |
+
{meal_html}
|
| 288 |
+
"""
|
| 289 |
+
return status_text, full_html
|
| 290 |
+
|
| 291 |
+
# --- Gradio UI ---
|
| 292 |
+
with gr.Blocks(title="EcoChef Dashboard", theme=gr.themes.Base(primary_hue="green", secondary_hue="blue")) as demo:
|
| 293 |
+
gr.Markdown("# 🥗 **EcoChef — Eco-Friendly AI Meal Planner with Food Carbon Insights**")
|
| 294 |
+
gr.Markdown("> _Reduce food waste. Eat smarter and Save the planet._")
|
| 295 |
+
|
| 296 |
+
with gr.Row():
|
| 297 |
+
with gr.Column(scale=1):
|
| 298 |
+
user_input = gr.Textbox(
|
| 299 |
+
lines=4,
|
| 300 |
+
placeholder="e.g. 2 bananas, LOBSTER, 6 eggs, rice, lentils",
|
| 301 |
+
value="Bananas 2, Lobsters 2, rice 1kg, lentils 500gm, yogurt expiring on 12th june, pasta expiring on 20th june",
|
| 302 |
+
label="🧊 What's in your fridge or pantry?"
|
| 303 |
+
)
|
| 304 |
+
gr.Markdown("_Note: **You may add expiry date of a product as: expiring on**_")
|
| 305 |
+
|
| 306 |
+
nebius_key_input = gr.Textbox(
|
| 307 |
+
placeholder="Paste your Nebius AI API key here...",
|
| 308 |
+
label="🔐 Enter Nebius AI API Key",
|
| 309 |
+
type="password"
|
| 310 |
+
)
|
| 311 |
+
submit_btn = gr.Button("✨ Analyze Ingredients")
|
| 312 |
+
status = gr.Markdown("⏳", visible=False)
|
| 313 |
+
|
| 314 |
+
gr.Markdown(" Carbon footprint data taken from the SU-EATABLE LIFE dataset.")
|
| 315 |
+
gr.Markdown(" 🤖 Created for Gradio Agents & MCP Hackathon 2025")
|
| 316 |
+
|
| 317 |
+
with gr.Column(scale=2):
|
| 318 |
+
combined_output = gr.HTML(label="Ingredients & Meal Plan")
|
| 319 |
+
|
| 320 |
+
submit_btn.click(
|
| 321 |
+
combined_output_func,
|
| 322 |
+
inputs=[user_input, nebius_key_input],
|
| 323 |
+
outputs=[status, combined_output]
|
| 324 |
+
)
|
| 325 |
+
|
| 326 |
+
if __name__ == "__main__":
|
| 327 |
+
demo.launch(mcp_server=True)
|
carbon_model.py
ADDED
|
@@ -0,0 +1,41 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import pandas as pd
|
| 2 |
+
from rapidfuzz import process, fuzz
|
| 3 |
+
|
| 4 |
+
# Load Excel sheet once
|
| 5 |
+
excel_path = "SuEatableLife_Food_Fooprint_database.xlsx"
|
| 6 |
+
sheet_name = "SEL CF for users"
|
| 7 |
+
|
| 8 |
+
df = pd.read_excel(excel_path, sheet_name=sheet_name, engine='openpyxl')
|
| 9 |
+
|
| 10 |
+
# Clean column names
|
| 11 |
+
df.columns = df.columns.str.strip()
|
| 12 |
+
|
| 13 |
+
# Define lookup column and value column
|
| 14 |
+
item_col = "Food commodity ITEM"
|
| 15 |
+
carbon_col = "Carbon Footprint kg CO2eq/kg or l of food ITEM"
|
| 16 |
+
|
| 17 |
+
# Drop rows with missing values
|
| 18 |
+
df = df[[item_col, carbon_col]].dropna()
|
| 19 |
+
|
| 20 |
+
# Prepare choices
|
| 21 |
+
choices = df[item_col].str.lower().tolist()
|
| 22 |
+
data = df.to_dict(orient="records")
|
| 23 |
+
|
| 24 |
+
# Main function to estimate carbon footprint
|
| 25 |
+
def estimate_carbon(item, data_lookup=data, top_n=3, score_threshold=80):
|
| 26 |
+
name = item.get("name", "").strip().lower()
|
| 27 |
+
|
| 28 |
+
top_matches = process.extract(name, choices, scorer=fuzz.token_set_ratio, limit=top_n)
|
| 29 |
+
|
| 30 |
+
for matched_name, score, match_index in top_matches:
|
| 31 |
+
if score >= score_threshold:
|
| 32 |
+
match = data_lookup[match_index]
|
| 33 |
+
return {
|
| 34 |
+
"name": item["name"],
|
| 35 |
+
"estimated_kg_CO2e_per_kg": match.get(carbon_col, "unknown")
|
| 36 |
+
}
|
| 37 |
+
|
| 38 |
+
return {
|
| 39 |
+
"name": item["name"],
|
| 40 |
+
"estimated_kg_CO2e_per_kg": "unknown"
|
| 41 |
+
}
|
mcp_server.py
ADDED
|
@@ -0,0 +1,62 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# mcp_server.py
|
| 2 |
+
import sys
|
| 3 |
+
import json
|
| 4 |
+
import pandas as pd
|
| 5 |
+
from parser import parse_ingredients
|
| 6 |
+
from spoilage_model import predict_spoilage
|
| 7 |
+
from carbon_model import estimate_carbon
|
| 8 |
+
from meal_planner import generate_meal_plan
|
| 9 |
+
|
| 10 |
+
# Load Excel dataset for carbon model once
|
| 11 |
+
carbon_df = pd.read_excel("SuEatableLife_Food_Fooprint_database.xlsx", sheet_name=0)
|
| 12 |
+
carbon_data = carbon_df.to_dict(orient="records")
|
| 13 |
+
|
| 14 |
+
def handle_tool_call(tool_name, arguments):
|
| 15 |
+
if tool_name == "parse_ingredients":
|
| 16 |
+
user_input = arguments.get("text", "")
|
| 17 |
+
return parse_ingredients(user_input)
|
| 18 |
+
|
| 19 |
+
elif tool_name == "predict_spoilage":
|
| 20 |
+
items = arguments.get("items", [])
|
| 21 |
+
return predict_spoilage(items)
|
| 22 |
+
|
| 23 |
+
elif tool_name == "estimate_carbon":
|
| 24 |
+
items = arguments.get("items", [])
|
| 25 |
+
return [estimate_carbon(item, carbon_data) for item in items]
|
| 26 |
+
|
| 27 |
+
elif tool_name == "generate_meal_plan":
|
| 28 |
+
items = arguments.get("items", [])
|
| 29 |
+
spoilage = arguments.get("spoilage", [])
|
| 30 |
+
carbon = arguments.get("carbon", [])
|
| 31 |
+
parsed = arguments.get("parsed", [])
|
| 32 |
+
api_key = arguments.get("api_key", []) # api key
|
| 33 |
+
return generate_meal_plan(items, spoilage, carbon, parsed, api_key)
|
| 34 |
+
|
| 35 |
+
else:
|
| 36 |
+
return {"error": f"Unknown tool: {tool_name}"}
|
| 37 |
+
|
| 38 |
+
def main():
|
| 39 |
+
while True:
|
| 40 |
+
line = sys.stdin.readline()
|
| 41 |
+
if not line:
|
| 42 |
+
break
|
| 43 |
+
try:
|
| 44 |
+
request = json.loads(line)
|
| 45 |
+
tool_name = request.get("tool")
|
| 46 |
+
arguments = request.get("input", {})
|
| 47 |
+
|
| 48 |
+
result = handle_tool_call(tool_name, arguments)
|
| 49 |
+
response = {
|
| 50 |
+
"id": request.get("id"),
|
| 51 |
+
"output": result,
|
| 52 |
+
}
|
| 53 |
+
print(json.dumps(response), flush=True)
|
| 54 |
+
|
| 55 |
+
except Exception as e:
|
| 56 |
+
error_response = {
|
| 57 |
+
"error": str(e)
|
| 58 |
+
}
|
| 59 |
+
print(json.dumps(error_response), flush=True)
|
| 60 |
+
|
| 61 |
+
if __name__ == "__main__":
|
| 62 |
+
main()
|
meal_planner.py
ADDED
|
@@ -0,0 +1,142 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import os
|
| 2 |
+
from openai import OpenAI
|
| 3 |
+
import json
|
| 4 |
+
|
| 5 |
+
def call_llm(prompt, api_key):
|
| 6 |
+
try:
|
| 7 |
+
client = OpenAI(
|
| 8 |
+
base_url="https://api.studio.nebius.com/v1/",
|
| 9 |
+
api_key=api_key
|
| 10 |
+
)
|
| 11 |
+
response = client.chat.completions.create(
|
| 12 |
+
model="meta-llama/Llama-3.3-70B-Instruct",
|
| 13 |
+
max_tokens=1024,
|
| 14 |
+
temperature=0.6,
|
| 15 |
+
top_p=0.9,
|
| 16 |
+
extra_body={"top_k": 50},
|
| 17 |
+
messages=[
|
| 18 |
+
{
|
| 19 |
+
"role": "system",
|
| 20 |
+
"content": """You are an eco-conscious recipe planner. Use the provided ingredient data to minimize food waste and carbon footprint.
|
| 21 |
+
|
| 22 |
+
### Instructions:
|
| 23 |
+
1. Provide json values with the following keys for given data:
|
| 24 |
+
• Ingredient (Qty: if specified)
|
| 25 |
+
• Expiry / Shelf Life (use 'X days left' if available, else 'Unknown')
|
| 26 |
+
• Spoilage Risk (high, medium, low — assume only if missing)
|
| 27 |
+
• Carbon Footprint (kg CO₂e/kg — assume only if missing)
|
| 28 |
+
- Only assume values **if the field is null or missing**, and **only if the ingredient is a known food item**.
|
| 29 |
+
- When assuming missing values, refer to other similar ingredients in the data as examples for expected format and typical values.
|
| 30 |
+
- Fields eligible for assumption (only under above condition):
|
| 31 |
+
• spoilage → risk
|
| 32 |
+
• carbon → estimated_kg_CO2e_per_kg
|
| 33 |
+
• parsed → days_left
|
| 34 |
+
- Use all other values exactly as provided without change.
|
| 35 |
+
2. Summary of carbon footprint based on provided ingredients in friendly tone.
|
| 36 |
+
3. Provide new 2-3 simple recipe ideas (bulleted or numbered) for ingredients focusing more on high risk to reduce food spoilage. Keep recipes simple, using common household ingredients and easy steps. Give everything as JSON format.
|
| 37 |
+
4. Present your answer in clear sections as below:
|
| 38 |
+
(a) Json ingredients table with all data.
|
| 39 |
+
(b) Carbon footprint overview and impact of ingredients present in json data.
|
| 40 |
+
(c) Suggested meals to reduce food spoilage: i. recipe name, ii. ingredients, iii. preparation steps
|
| 41 |
+
(d) Alternative ingredients to reduce carbon emissions from your fridge/pantry (if possible). Also, If possible suggest ingredient substitutions reasons (e.g. high carbon footprint).
|
| 42 |
+
(e) End with single sentence why and how your recomendation reduces food wastage and also helps user carbon emissions. give appropriate examples with slight pun (like might save trips to some destination)"""
|
| 43 |
+
},
|
| 44 |
+
{
|
| 45 |
+
"role": "user",
|
| 46 |
+
"content": prompt # ← Inject user-provided ingredient data here
|
| 47 |
+
}
|
| 48 |
+
]
|
| 49 |
+
)
|
| 50 |
+
|
| 51 |
+
parsed_json = json.loads(response.to_json())
|
| 52 |
+
|
| 53 |
+
# Extract the 'content' safely
|
| 54 |
+
try:
|
| 55 |
+
content = parsed_json["choices"][0]["message"]["content"]
|
| 56 |
+
except (KeyError, IndexError) as e:
|
| 57 |
+
content = None
|
| 58 |
+
print("Failed to get content from LLM:", e)
|
| 59 |
+
|
| 60 |
+
return content
|
| 61 |
+
|
| 62 |
+
except Exception as e:
|
| 63 |
+
return [f"Error generating recipes: {str(e)}"]
|
| 64 |
+
|
| 65 |
+
# === LLM-powered meal planner ===
|
| 66 |
+
def generate_meal_plan(parsed_items, spoilage, carbon, parsed, api_key):
|
| 67 |
+
if not parsed_items:
|
| 68 |
+
return [{"note": "No ingredients found to suggest recipes."}]
|
| 69 |
+
|
| 70 |
+
parsed_names = [item.get("name", "").lower() for item in parsed_items]
|
| 71 |
+
spoilage_dict = {item["name"].lower(): item["risk"].lower() for item in spoilage}
|
| 72 |
+
carbon_dict = {
|
| 73 |
+
item["name"].lower(): item.get("estimated_kg_CO2e_per_kg", "unknown")
|
| 74 |
+
for item in carbon
|
| 75 |
+
}
|
| 76 |
+
|
| 77 |
+
# Categorize by spoilage risk
|
| 78 |
+
high_risk = [name for name in parsed_names if spoilage_dict.get(name) == "high"]
|
| 79 |
+
medium_risk = [name for name in parsed_names if spoilage_dict.get(name) == "medium"]
|
| 80 |
+
low_risk = [name for name in parsed_names if spoilage_dict.get(name) == "low"]
|
| 81 |
+
|
| 82 |
+
# Construct note with carbon footprint
|
| 83 |
+
def format_item(name, risk_level):
|
| 84 |
+
co2e = carbon_dict.get(name, "unknown")
|
| 85 |
+
return f"- {name.title()} (Risk: {risk_level.title()}, CO₂e: {co2e} kg/kg)"
|
| 86 |
+
|
| 87 |
+
note_lines = ["Suggesting recipes based on spoilage risk:"]
|
| 88 |
+
for name in high_risk:
|
| 89 |
+
note_lines.append(format_item(name, "high"))
|
| 90 |
+
for name in medium_risk:
|
| 91 |
+
note_lines.append(format_item(name, "medium"))
|
| 92 |
+
for name in low_risk:
|
| 93 |
+
note_lines.append(format_item(name, "low"))
|
| 94 |
+
if not high_risk and not medium_risk:
|
| 95 |
+
note_lines.append("- No items at spoilage risk. Showing general ideas.")
|
| 96 |
+
|
| 97 |
+
# Format CO2 label with risk
|
| 98 |
+
def co2_label(name):
|
| 99 |
+
co2 = carbon_dict.get(name, "unknown")
|
| 100 |
+
return f"{name} (CO₂e: {co2} kg/kg)"
|
| 101 |
+
|
| 102 |
+
# Add detailed ingredient info from 'parsed'
|
| 103 |
+
parsed_ingredient_info = "Ingredient details:\n"
|
| 104 |
+
for item in parsed:
|
| 105 |
+
name = item.get("name", "unknown").strip().lower()
|
| 106 |
+
qty = item.get("quantity", "unknown")
|
| 107 |
+
expiry = item.get("expiry_date", "unknown")
|
| 108 |
+
days_left = item.get("days_left")
|
| 109 |
+
|
| 110 |
+
parsed_ingredient_info += (
|
| 111 |
+
f"- {name.title()}: Quantity = {qty}, "
|
| 112 |
+
f"Expiry Date = {expiry}, Days Left = {days_left}\n"
|
| 113 |
+
)
|
| 114 |
+
|
| 115 |
+
# Construct prompt with all context
|
| 116 |
+
prompt = (
|
| 117 |
+
"You are an eco-conscious recipe planner. Use the provided ingredient data to minimize food waste and carbon footprint.\n\n"
|
| 118 |
+
"### Data for processing is as follows:\n"
|
| 119 |
+
"- Spoilage info:\n"
|
| 120 |
+
f"{json.dumps(spoilage, indent=2)}\n\n"
|
| 121 |
+
"- Carbon footprint info:\n"
|
| 122 |
+
f"{json.dumps(carbon, indent=2)}\n\n"
|
| 123 |
+
"- Parsed ingredient details:\n"
|
| 124 |
+
f"{json.dumps(parsed, indent=2)}\n\n"
|
| 125 |
+
)
|
| 126 |
+
|
| 127 |
+
recipes = call_llm(prompt, api_key)
|
| 128 |
+
|
| 129 |
+
note_lines = ["Suggesting recipes based on spoilage risk and carbon footprint:"]
|
| 130 |
+
for name in high_risk:
|
| 131 |
+
cf = carbon_dict.get(name, "N/A")
|
| 132 |
+
note_lines.append(f"- {name.title()} (Risk: High, CO₂e: {cf} kg/kg)")
|
| 133 |
+
for name in medium_risk:
|
| 134 |
+
cf = carbon_dict.get(name, "N/A")
|
| 135 |
+
note_lines.append(f"- {name.title()} (Risk: Medium, CO₂e: {cf} kg/kg)")
|
| 136 |
+
if not high_risk and not medium_risk:
|
| 137 |
+
note_lines.append("- No items at spoilage risk. Showing general ideas.")
|
| 138 |
+
|
| 139 |
+
return [
|
| 140 |
+
{"note": "\n".join(note_lines)},
|
| 141 |
+
{"recipes": recipes}
|
| 142 |
+
]
|
parser.py
ADDED
|
@@ -0,0 +1,71 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import dateparser
|
| 2 |
+
import re
|
| 3 |
+
from datetime import datetime
|
| 4 |
+
import json
|
| 5 |
+
import os
|
| 6 |
+
import inflect
|
| 7 |
+
|
| 8 |
+
# Load shelf life data from spoilage.json
|
| 9 |
+
with open("spoilage_data.json", "r") as f:
|
| 10 |
+
shelf_life_data = json.load(f)
|
| 11 |
+
|
| 12 |
+
p = inflect.engine()
|
| 13 |
+
|
| 14 |
+
def parse_ingredients(text):
|
| 15 |
+
lines = [item.strip() for item in text.split(',')]
|
| 16 |
+
parsed = []
|
| 17 |
+
|
| 18 |
+
for line in lines:
|
| 19 |
+
raw_line = line
|
| 20 |
+
expiry = None
|
| 21 |
+
expiry_phrase = None
|
| 22 |
+
quantity = "1"
|
| 23 |
+
unit = ""
|
| 24 |
+
|
| 25 |
+
# Extract expiry phrase
|
| 26 |
+
keywords = r"(?:expiring|expire|exp|exp dt|expiration date|use by|best before|by|from|on)"
|
| 27 |
+
expiry_match = re.search(rf'\b{keywords}\b\s*(.*)', line, flags=re.IGNORECASE)
|
| 28 |
+
|
| 29 |
+
if expiry_match:
|
| 30 |
+
expiry_phrase = expiry_match.group(0)
|
| 31 |
+
possible_date_str = expiry_match.group(1)
|
| 32 |
+
maybe_date = dateparser.parse(possible_date_str)
|
| 33 |
+
if maybe_date:
|
| 34 |
+
expiry = maybe_date.date()
|
| 35 |
+
line = line.replace(expiry_phrase, '').strip()
|
| 36 |
+
|
| 37 |
+
# Extract quantity + unit + name
|
| 38 |
+
match = re.match(r"(?:(\d+)\s*([a-zA-Z]+)?\s+([a-zA-Z ]+))|([a-zA-Z ]+)\s+(\d+)([a-zA-Z]*)", line)
|
| 39 |
+
if match:
|
| 40 |
+
if match.group(1):
|
| 41 |
+
quantity = match.group(1)
|
| 42 |
+
unit = match.group(2) or ""
|
| 43 |
+
name_clean = match.group(3).strip()
|
| 44 |
+
elif match.group(4):
|
| 45 |
+
name_clean = match.group(4).strip()
|
| 46 |
+
quantity = match.group(5)
|
| 47 |
+
unit = match.group(6) or ""
|
| 48 |
+
else:
|
| 49 |
+
name_clean = re.sub(r'\d+.*', '', line).strip()
|
| 50 |
+
|
| 51 |
+
# Calculate days left
|
| 52 |
+
if expiry:
|
| 53 |
+
days_left = (expiry - datetime.today().date()).days
|
| 54 |
+
else:
|
| 55 |
+
# Estimate from shelf life data
|
| 56 |
+
shelf_key = name_clean.lower()
|
| 57 |
+
singular_key = p.singular_noun(shelf_key) or shelf_key
|
| 58 |
+
shelf_life = shelf_life_data.get(shelf_key) or shelf_life_data.get(singular_key)
|
| 59 |
+
days_left = shelf_life if isinstance(shelf_life, int) else None
|
| 60 |
+
|
| 61 |
+
parsed.append({
|
| 62 |
+
"raw": raw_line,
|
| 63 |
+
"name": name_clean.lower(),
|
| 64 |
+
"quantity": quantity,
|
| 65 |
+
"unit": unit.lower(),
|
| 66 |
+
"expiry_date": expiry.isoformat() if expiry else None,
|
| 67 |
+
"days_left": days_left,
|
| 68 |
+
"note": "ok"
|
| 69 |
+
})
|
| 70 |
+
|
| 71 |
+
return parsed
|
requirements.txt
ADDED
|
@@ -0,0 +1,7 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
openai
|
| 2 |
+
gradio
|
| 3 |
+
dateparser
|
| 4 |
+
inflect
|
| 5 |
+
pandas
|
| 6 |
+
rapidfuzz
|
| 7 |
+
openpyxl
|
spoilage_data.json
ADDED
|
@@ -0,0 +1,323 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"buffalo bone free meat": 3,
|
| 3 |
+
"beef bone free meat": 3,
|
| 4 |
+
"lamb bone free meat": 3,
|
| 5 |
+
"lobster": 2,
|
| 6 |
+
"beef meat with bone": 3,
|
| 7 |
+
"pork sausages": 2,
|
| 8 |
+
"pecorino": 30,
|
| 9 |
+
"lamb meat with bone": 3,
|
| 10 |
+
"grana padano": 30,
|
| 11 |
+
"prawns shrimps f": 2,
|
| 12 |
+
"turbot": 2,
|
| 13 |
+
"sole f": 2,
|
| 14 |
+
"megrim": 2,
|
| 15 |
+
"swordfish": 2,
|
| 16 |
+
"parmigiano reggiano": 30,
|
| 17 |
+
"anglerfish": 2,
|
| 18 |
+
"porbeagle": 2,
|
| 19 |
+
"shark": 2,
|
| 20 |
+
"ceddar": 30,
|
| 21 |
+
"hake": 2,
|
| 22 |
+
"asiago": 30,
|
| 23 |
+
"cheese": 30,
|
| 24 |
+
"catfish": 2,
|
| 25 |
+
"cheese semi-hard": 30,
|
| 26 |
+
"butter": 30,
|
| 27 |
+
"rhombus": 2,
|
| 28 |
+
"goat cheese": 20,
|
| 29 |
+
"coffee parchment": 180,
|
| 30 |
+
"mozzarella": 7,
|
| 31 |
+
"emmental": 30,
|
| 32 |
+
"fork beard": 2,
|
| 33 |
+
"pork ham": 10,
|
| 34 |
+
"octopus f": 2,
|
| 35 |
+
"camembert": 14,
|
| 36 |
+
"raspberry g": 3,
|
| 37 |
+
"prawns shrimps": 2,
|
| 38 |
+
"rock fish": 2,
|
| 39 |
+
"squid": 2,
|
| 40 |
+
"pomfret": 2,
|
| 41 |
+
"chocolate": 60,
|
| 42 |
+
"ling": 2,
|
| 43 |
+
"flatfish": 2,
|
| 44 |
+
"plaice": 2,
|
| 45 |
+
"diamond fish": 2,
|
| 46 |
+
"octopus": 2,
|
| 47 |
+
"pork bone free meat": 3,
|
| 48 |
+
"mascarpone": 7,
|
| 49 |
+
"krill": 2,
|
| 50 |
+
"cream": 7,
|
| 51 |
+
"sole": 2,
|
| 52 |
+
"coffee ground": 180,
|
| 53 |
+
"cuttlefish": 2,
|
| 54 |
+
"fish mixed": 2,
|
| 55 |
+
"stracchino": 14,
|
| 56 |
+
"almond covered with chocolate": 60,
|
| 57 |
+
"trout f": 2,
|
| 58 |
+
"rabbit meat with bone": 3,
|
| 59 |
+
"vanilla": 90,
|
| 60 |
+
"trout": 2,
|
| 61 |
+
"kangaroo meat": 3,
|
| 62 |
+
"duck meat bone free": 3,
|
| 63 |
+
"turkey meat bone free": 3,
|
| 64 |
+
"bacon": 7,
|
| 65 |
+
"neck": 3,
|
| 66 |
+
"eel": 2,
|
| 67 |
+
"pepper g": 7,
|
| 68 |
+
"haddock f": 2,
|
| 69 |
+
"chicken bone free meat": 3,
|
| 70 |
+
"tuna": 2,
|
| 71 |
+
"milk chocolate": 60,
|
| 72 |
+
"sea-bass": 2,
|
| 73 |
+
"buffalo milk": 7,
|
| 74 |
+
"alfonsino": 2,
|
| 75 |
+
"yogurt lactose free": 14,
|
| 76 |
+
"pork meat with bone": 3,
|
| 77 |
+
"hazelnut covered with chocolate": 60,
|
| 78 |
+
"ricotta": 7,
|
| 79 |
+
"salmon": 2,
|
| 80 |
+
"yeast dried": 180,
|
| 81 |
+
"olive oil": 180,
|
| 82 |
+
"fig juice i": 5,
|
| 83 |
+
"eggs": 21,
|
| 84 |
+
"egg pasta": 3,
|
| 85 |
+
"haddock": 2,
|
| 86 |
+
"cod": 2,
|
| 87 |
+
"cod f": 2,
|
| 88 |
+
"blueberry juice i": 5,
|
| 89 |
+
"almond paste": 90,
|
| 90 |
+
"mango juice i": 5,
|
| 91 |
+
"eggplant g": 7,
|
| 92 |
+
"peanut oil": 180,
|
| 93 |
+
"strawberry g": 3,
|
| 94 |
+
"cranberry juice i": 5,
|
| 95 |
+
"yogurt flavoured": 14,
|
| 96 |
+
"apple juice i": 5,
|
| 97 |
+
"kiwi juice i": 5,
|
| 98 |
+
"mealworms": 3,
|
| 99 |
+
"chicken meat with bone": 3,
|
| 100 |
+
"pineapple juice i": 5,
|
| 101 |
+
"green beans f": 5,
|
| 102 |
+
"pesto without garlic": 7,
|
| 103 |
+
"pesto": 7,
|
| 104 |
+
"dark chocolate": 60,
|
| 105 |
+
"hazelnut cream": 60,
|
| 106 |
+
"strawberry juice i": 5,
|
| 107 |
+
"lettuce g": 5,
|
| 108 |
+
"whiting": 2,
|
| 109 |
+
"cornflakes": 180,
|
| 110 |
+
"tuna f": 2,
|
| 111 |
+
"flathead": 2,
|
| 112 |
+
"quorne": 7,
|
| 113 |
+
"palm oil": 180,
|
| 114 |
+
"exotic fruit g": 5,
|
| 115 |
+
"exotic fruit i": 5,
|
| 116 |
+
"tomato g": 7,
|
| 117 |
+
"barnacle": 2,
|
| 118 |
+
"tofu": 7,
|
| 119 |
+
"fish stick f": 2,
|
| 120 |
+
"rice": 180,
|
| 121 |
+
"pollock stick f": 2,
|
| 122 |
+
"cod fish stick": 2,
|
| 123 |
+
"hake fish stick": 2,
|
| 124 |
+
"cakes and croissant": 3,
|
| 125 |
+
"turkey meat with bone": 3,
|
| 126 |
+
"gerkin g": 7,
|
| 127 |
+
"tuna in can": 180,
|
| 128 |
+
"duck meat with bone": 3,
|
| 129 |
+
"mushroom": 7,
|
| 130 |
+
"walnut": 90,
|
| 131 |
+
"mayonnaise": 14,
|
| 132 |
+
"buckwheat flour": 180,
|
| 133 |
+
"cocoa cakes and croissant": 3,
|
| 134 |
+
"almond": 90,
|
| 135 |
+
"peanut paste": 90,
|
| 136 |
+
"mackerel": 2,
|
| 137 |
+
"soybean oil": 180,
|
| 138 |
+
"rapeseed oil": 180,
|
| 139 |
+
"coconut i": 90,
|
| 140 |
+
"zucchini g": 7,
|
| 141 |
+
"beans f": 5,
|
| 142 |
+
"carp": 2,
|
| 143 |
+
"honey": 365,
|
| 144 |
+
"yogurt white": 14,
|
| 145 |
+
"hazelnut paste": 90,
|
| 146 |
+
"soy cream": 14,
|
| 147 |
+
"salmon f": 2,
|
| 148 |
+
"carrots canned": 180,
|
| 149 |
+
"pistachio": 90,
|
| 150 |
+
"chocolate or cream filled cookies": 60,
|
| 151 |
+
"emu bone free meat": 3,
|
| 152 |
+
"mung bean flour": 180,
|
| 153 |
+
"soy burger": 7,
|
| 154 |
+
"tomato chopped": 7,
|
| 155 |
+
"ketchup": 180,
|
| 156 |
+
"green beans in can": 180,
|
| 157 |
+
"goat milk": 7,
|
| 158 |
+
"melon g": 5,
|
| 159 |
+
"pollock": 2,
|
| 160 |
+
"simple cookies": 60,
|
| 161 |
+
"margarine": 30,
|
| 162 |
+
"cashew nut": 90,
|
| 163 |
+
"millet flour": 180,
|
| 164 |
+
"soy yogurt": 14,
|
| 165 |
+
"corn canned": 180,
|
| 166 |
+
"zucchini f": 7,
|
| 167 |
+
"plain crackers": 180,
|
| 168 |
+
"crispbread": 180,
|
| 169 |
+
"sorghum flour": 180,
|
| 170 |
+
"azuki bean flour": 180,
|
| 171 |
+
"bread frozen f": 30,
|
| 172 |
+
"maize flour": 180,
|
| 173 |
+
"cow milk": 7,
|
| 174 |
+
"tomato peeled": 7,
|
| 175 |
+
"herring f": 2,
|
| 176 |
+
"tomato arrabbiata": 7,
|
| 177 |
+
"herring": 2,
|
| 178 |
+
"pasta": 7,
|
| 179 |
+
"raspberries f": 3,
|
| 180 |
+
"pomegranate i": 5,
|
| 181 |
+
"sunflower seed": 90,
|
| 182 |
+
"peanut butter": 90,
|
| 183 |
+
"tomato basil": 7,
|
| 184 |
+
"hazelnut": 90,
|
| 185 |
+
"carrob": 7,
|
| 186 |
+
"tempe": 7,
|
| 187 |
+
"pilchard": 2,
|
| 188 |
+
"avocado i": 5,
|
| 189 |
+
"mixed fish f": 2,
|
| 190 |
+
"tomato puree": 7,
|
| 191 |
+
"lentil": 180,
|
| 192 |
+
"ice cream": 7,
|
| 193 |
+
"makerel fish stick": 2,
|
| 194 |
+
"mackerel f": 2,
|
| 195 |
+
"sunflower oil": 180,
|
| 196 |
+
"quinoa": 180,
|
| 197 |
+
"wholegrain crackers": 180,
|
| 198 |
+
"beer in glass": 180,
|
| 199 |
+
"beet sugar": 365,
|
| 200 |
+
"flavored crackers": 180,
|
| 201 |
+
"sardine": 2,
|
| 202 |
+
"pollock f": 2,
|
| 203 |
+
"bread plain": 30,
|
| 204 |
+
"sorghum": 180,
|
| 205 |
+
"sesam seed": 90,
|
| 206 |
+
"ginger": 180,
|
| 207 |
+
"wine red": 180,
|
| 208 |
+
"oat meal": 180,
|
| 209 |
+
"cranberry": 5,
|
| 210 |
+
"millet": 180,
|
| 211 |
+
"gooseberry": 5,
|
| 212 |
+
"strawberry i": 3,
|
| 213 |
+
"peanut": 90,
|
| 214 |
+
"asparagus": 5,
|
| 215 |
+
"yeast compressed": 21,
|
| 216 |
+
"anchovy": 2,
|
| 217 |
+
"banana i": 5,
|
| 218 |
+
"raisin": 180,
|
| 219 |
+
"palm nut": 90,
|
| 220 |
+
"chilly": 180,
|
| 221 |
+
"rape seed": 180,
|
| 222 |
+
"soy milk": 7,
|
| 223 |
+
"blueberry": 5,
|
| 224 |
+
"bread whole": 30,
|
| 225 |
+
"chickpea": 180,
|
| 226 |
+
"snails": 3,
|
| 227 |
+
"garlic": 180,
|
| 228 |
+
"bread multicereal": 30,
|
| 229 |
+
"zucchini": 7,
|
| 230 |
+
"beer in can": 180,
|
| 231 |
+
"oat": 180,
|
| 232 |
+
"rice milk": 7,
|
| 233 |
+
"pepper": 7,
|
| 234 |
+
"currant": 5,
|
| 235 |
+
"olives": 90,
|
| 236 |
+
"raspberry": 3,
|
| 237 |
+
"cane sugar": 365,
|
| 238 |
+
"soy meal": 180,
|
| 239 |
+
"melon": 5,
|
| 240 |
+
"strawberry": 3,
|
| 241 |
+
"date": 180,
|
| 242 |
+
"coffee drip filtered l": 180,
|
| 243 |
+
"wheat": 180,
|
| 244 |
+
"broccoli": 5,
|
| 245 |
+
"soybean": 180,
|
| 246 |
+
"espresso l": 180,
|
| 247 |
+
"pineapple i": 5,
|
| 248 |
+
"avocado": 5,
|
| 249 |
+
"vegetables": 7,
|
| 250 |
+
"beer modular can": 180,
|
| 251 |
+
"wheat plain flour": 180,
|
| 252 |
+
"watermelon": 5,
|
| 253 |
+
"green bean fresh": 5,
|
| 254 |
+
"pomelo": 5,
|
| 255 |
+
"yeast liquid": 21,
|
| 256 |
+
"kiwi": 5,
|
| 257 |
+
"mineral water": 180,
|
| 258 |
+
"pear juice": 5,
|
| 259 |
+
"coffee green": 180,
|
| 260 |
+
"cowpea": 180,
|
| 261 |
+
"barley": 180,
|
| 262 |
+
"maize": 180,
|
| 263 |
+
"fennel": 7,
|
| 264 |
+
"graham flour": 180,
|
| 265 |
+
"mixed nuts": 90,
|
| 266 |
+
"durum wheat": 180,
|
| 267 |
+
"mussels": 2,
|
| 268 |
+
"orange juice": 5,
|
| 269 |
+
"cherry": 5,
|
| 270 |
+
"tomato": 7,
|
| 271 |
+
"mandarin": 5,
|
| 272 |
+
"pea": 180,
|
| 273 |
+
"wine white": 180,
|
| 274 |
+
"chestnut": 90,
|
| 275 |
+
"bean": 180,
|
| 276 |
+
"blackberry": 3,
|
| 277 |
+
"almond milk": 7,
|
| 278 |
+
"coconut milk": 7,
|
| 279 |
+
"lettuce": 5,
|
| 280 |
+
"clementine": 5,
|
| 281 |
+
"rockmelon": 5,
|
| 282 |
+
"peach": 5,
|
| 283 |
+
"tangerin": 5,
|
| 284 |
+
"rye": 180,
|
| 285 |
+
"whole wheat flour": 180,
|
| 286 |
+
"turnip": 7,
|
| 287 |
+
"banana": 5,
|
| 288 |
+
"spinach": 5,
|
| 289 |
+
"coffee soluble powder l": 180,
|
| 290 |
+
"artichoke": 7,
|
| 291 |
+
"pomegranate": 5,
|
| 292 |
+
"cucumber g": 7,
|
| 293 |
+
"celery": 7,
|
| 294 |
+
"grapes": 5,
|
| 295 |
+
"mango": 5,
|
| 296 |
+
"cauliflower": 7,
|
| 297 |
+
"orange": 5,
|
| 298 |
+
"apricot": 5,
|
| 299 |
+
"starchy roots": 180,
|
| 300 |
+
"swede": 7,
|
| 301 |
+
"pineapple": 5,
|
| 302 |
+
"guava": 5,
|
| 303 |
+
"green bean fresh g": 5,
|
| 304 |
+
"pear": 5,
|
| 305 |
+
"sweet potato": 180,
|
| 306 |
+
"cucumber": 7,
|
| 307 |
+
"carrots f": 7,
|
| 308 |
+
"fig": 5,
|
| 309 |
+
"plum": 5,
|
| 310 |
+
"apple": 5,
|
| 311 |
+
"beetroot": 7,
|
| 312 |
+
"potato": 180,
|
| 313 |
+
"pumpkin": 7,
|
| 314 |
+
"carrot": 7,
|
| 315 |
+
"cabagge": 7,
|
| 316 |
+
"onion": 180,
|
| 317 |
+
"lemon": 7,
|
| 318 |
+
"lime": 7,
|
| 319 |
+
"gerkin": 7,
|
| 320 |
+
"eggplant": 7,
|
| 321 |
+
"radish": 7,
|
| 322 |
+
"beans in can": 180
|
| 323 |
+
}
|
spoilage_model.py
ADDED
|
@@ -0,0 +1,74 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from datetime import datetime, date
|
| 2 |
+
import json
|
| 3 |
+
import os
|
| 4 |
+
import re
|
| 5 |
+
import inflect
|
| 6 |
+
|
| 7 |
+
p = inflect.engine()
|
| 8 |
+
SHELF_LIFE_DATA_PATH = "spoilage_data.json"
|
| 9 |
+
|
| 10 |
+
if not os.path.exists(SHELF_LIFE_DATA_PATH):
|
| 11 |
+
raise FileNotFoundError(f"Could not find {SHELF_LIFE_DATA_PATH}")
|
| 12 |
+
|
| 13 |
+
with open(SHELF_LIFE_DATA_PATH, "r") as f:
|
| 14 |
+
shelf_life_data = json.load(f)
|
| 15 |
+
|
| 16 |
+
shelf_life_data = {k.lower().strip(): v for k, v in shelf_life_data.items()}
|
| 17 |
+
|
| 18 |
+
def parse_expiry_date(date_str):
|
| 19 |
+
if not date_str:
|
| 20 |
+
return None
|
| 21 |
+
try:
|
| 22 |
+
return datetime.strptime(date_str, "%Y-%m-%d").date()
|
| 23 |
+
except ValueError:
|
| 24 |
+
pass
|
| 25 |
+
try:
|
| 26 |
+
dt = datetime.strptime(date_str, "%d %B")
|
| 27 |
+
return dt.replace(year=date.today().year).date()
|
| 28 |
+
except ValueError:
|
| 29 |
+
pass
|
| 30 |
+
return None
|
| 31 |
+
|
| 32 |
+
def calculate_days_left(expiry_date):
|
| 33 |
+
if not expiry_date:
|
| 34 |
+
return None
|
| 35 |
+
return (expiry_date - date.today()).days
|
| 36 |
+
|
| 37 |
+
def classify_spoilage_risk(days_left):
|
| 38 |
+
if days_left is None:
|
| 39 |
+
return "unknown"
|
| 40 |
+
if days_left < 0:
|
| 41 |
+
return "expired"
|
| 42 |
+
elif days_left <= 2:
|
| 43 |
+
return "high"
|
| 44 |
+
elif days_left <= 5:
|
| 45 |
+
return "medium"
|
| 46 |
+
else:
|
| 47 |
+
return "low"
|
| 48 |
+
|
| 49 |
+
def singularize_name(name):
|
| 50 |
+
return " ".join([p.singular_noun(w) or w for w in name.split()])
|
| 51 |
+
|
| 52 |
+
def predict_spoilage(items):
|
| 53 |
+
results = []
|
| 54 |
+
for item in items:
|
| 55 |
+
raw_name = item.get("name", "").lower().strip()
|
| 56 |
+
clean_name = re.sub(r'\d+.*', '', raw_name).strip()
|
| 57 |
+
singular_name = singularize_name(clean_name)
|
| 58 |
+
|
| 59 |
+
expiry_date_str = item.get("expiry_date")
|
| 60 |
+
expiry_date = parse_expiry_date(expiry_date_str)
|
| 61 |
+
days_left = calculate_days_left(expiry_date)
|
| 62 |
+
|
| 63 |
+
if days_left is None:
|
| 64 |
+
shelf_life = shelf_life_data.get(singular_name) or shelf_life_data.get(clean_name)
|
| 65 |
+
if isinstance(shelf_life, int):
|
| 66 |
+
days_left = shelf_life
|
| 67 |
+
|
| 68 |
+
risk = classify_spoilage_risk(days_left)
|
| 69 |
+
results.append({
|
| 70 |
+
"name": item.get("name", ""),
|
| 71 |
+
"risk": risk
|
| 72 |
+
})
|
| 73 |
+
|
| 74 |
+
return results
|