TESTTTS / app.py
thameem022's picture
Update app.py
31ce339 verified
import gradio as gr
from huggingface_hub import InferenceClient
# Emissions data
EMISSIONS_FACTORS = {
"transportation": {
"car": 2.3,
"bus": 0.1,
"train": 0.04,
"plane": 0.25,
},
"food": {
"meat": 6.0,
"vegetarian": 1.5,
"vegan": 1.0,
}
}
# Carbon footprint calculator
def calculate_footprint(car_km, bus_km, train_km, air_km, meat_meals, vegetarian_meals, vegan_meals):
transport_emissions = (
car_km * EMISSIONS_FACTORS["transportation"]["car"]
+ bus_km * EMISSIONS_FACTORS["transportation"]["bus"]
+ train_km * EMISSIONS_FACTORS["transportation"]["train"]
+ air_km * EMISSIONS_FACTORS["transportation"]["plane"]
)
food_emissions = (
meat_meals * EMISSIONS_FACTORS["food"]["meat"]
+ vegetarian_meals * EMISSIONS_FACTORS["food"]["vegetarian"]
+ vegan_meals * EMISSIONS_FACTORS["food"]["vegan"]
)
total_emissions = transport_emissions + food_emissions
stats = {
"trees": round(total_emissions / 21),
"flights": round(total_emissions / 500),
"driving100km": round(total_emissions / 230),
}
return total_emissions, stats
# Response generator
def respond(
message,
history: list[dict[str, str]],
system_message,
car_km,
bus_km,
train_km,
air_km,
meat_meals,
vegetarian_meals,
vegan_meals,
max_tokens,
temperature,
top_p,
hf_token_textbox,
):
client = InferenceClient(token=hf_token_textbox, model="openai/gpt-oss-20b")
footprint, stats = calculate_footprint(
car_km, bus_km, train_km, air_km, meat_meals, vegetarian_meals, vegan_meals
)
custom_prompt = f"""
The user's estimated weekly footprint is **{footprint:.1f} kg CO2**.
That's equivalent to planting about {stats['trees']} trees 🌳 or taking {stats['flights']} short flights ✈️.
Their breakdown includes both transportation and food habits.
Your job is to give them personalized, practical, and encouraging suggestions to reduce this footprint.
{system_message}
"""
messages = [{"role": "system", "content": custom_prompt}]
messages.extend(history)
messages.append({"role": "user", "content": message})
response = ""
for message in client.chat_completion(
messages,
max_tokens=max_tokens,
stream=True,
temperature=temperature,
top_p=top_p,
):
choices = message.choices
token = ""
if len(choices) and choices[0].delta.content:
token = choices[0].delta.content
response += token
yield response
# Chat UI
chatbot = gr.ChatInterface(
fn=respond,
chatbot=gr.Chatbot(),
type="messages",
additional_inputs=[
gr.Textbox(value="You are Sustainable.ai, a friendly and practical climate coach.", label="System message"),
gr.Number(value=0, label="🚘 Car Travel (km/week)"),
gr.Number(value=0, label="🚌 Bus Travel (km/week)"),
gr.Number(value=0, label="πŸš† Train Travel (km/week)"),
gr.Number(value=0, label="✈️ Air Travel (km/month)"),
gr.Number(value=0, label="πŸ₯© Meat Meals (per week)"),
gr.Number(value=0, label="πŸ₯— Vegetarian Meals (per week)"),
gr.Number(value=0, label="🌱 Vegan Meals (per week)"),
gr.Slider(minimum=1, maximum=2048, value=512, step=1, label="Max new tokens"),
gr.Slider(minimum=0.1, maximum=4.0, value=0.7, step=0.1, label="Temperature"),
gr.Slider(minimum=0.1, maximum=1.0, value=0.95, step=0.05, label="Top-p (nucleus sampling)"),
gr.Textbox(value="", label="πŸ” Hugging Face Token (paste here)", type="password"),
],
)
# Launch with public link
if __name__ == "__main__":
chatbot.launch(share=True)