Spaces:
Sleeping
Sleeping
| import os | |
| import streamlit as st | |
| import numpy as np | |
| from groq import Groq | |
| # Initialize Groq client | |
| client = Groq( | |
| api_key="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") | |
| # Function to calculate power requirements | |
| def calculate_solar_requirements(appliances): | |
| total_load_kw = 0 | |
| for appliance, details in appliances.items(): | |
| total_load_kw += details['count'] * details['wattage'] / 1000 # Convert to kW | |
| battery_ah = (total_load_kw * 6 * 1000) / 24 # Assuming 6 hours backup at 24V | |
| solar_panels = np.ceil(total_load_kw / 0.585) # 585W panels for direct load | |
| return total_load_kw, battery_ah, solar_panels | |
| # Streamlit App | |
| st.title("Solar System Calculator") | |
| # Input Section | |
| st.header("Input Appliance Details") | |
| appliances = {} | |
| appliance_list = { | |
| "Fan": 75, | |
| "Light": 20, | |
| "Water Pump": 500, | |
| "Air Conditioner": 1500, | |
| "LCD": 100, | |
| "Computer": 200, | |
| } | |
| for appliance, wattage in appliance_list.items(): | |
| count = st.number_input(f"Number of {appliance}s", min_value=0, value=0) | |
| if count > 0: | |
| appliances[appliance] = {"count": count, "wattage": wattage} | |
| # Calculate Button | |
| if st.button("Calculate"): | |
| if appliances: | |
| total_load_kw, battery_ah, solar_panels = calculate_solar_requirements(appliances) | |
| st.subheader("Results") | |
| st.write(f"Total Load: {total_load_kw:.2f} kW") | |
| st.write(f"Required Battery Capacity: {battery_ah:.0f} Ah (24V)") | |
| st.write(f"Number of Solar Panels (585W): {solar_panels:.0f}") | |
| else: | |
| st.warning("Please enter at least one appliance.") | |
| # Interaction with Groq | |
| st.header("AI-Powered Help") | |
| user_query = st.text_input("Ask the AI about Solar Systems:") | |
| if st.button("Get AI Response"): | |
| if user_query.strip(): | |
| chat_completion = client.chat.completions.create( | |
| messages=[ | |
| {"role": "user", "content": user_query} | |
| ], | |
| model="llama3-8b-8192", | |
| ) | |
| st.write("AI Response:") | |
| st.write(chat_completion.choices[0].message.content) | |
| else: | |
| st.warning("Please enter a query.") | |