import os import requests import gradio as gr from langchain_groq import ChatGroq from langchain_core.prompts import ChatPromptTemplate from langchain_core.output_parsers import StrOutputParser def get_weather_and_insight(city): # Fetch weather data url = f'http://api.openweathermap.org/data/2.5/weather?q={city}&appid={os.getenv("OPENWEATHER_API_KEY")}&units=metric' res = requests.get(url) data = res.json() if res.status_code != 200: return "Failed to retrieve weather data. Please check the city name and try again." humidity = data.get('main', {}).get('humidity') temp = data.get('main', {}).get('temp') description = data.get('weather', [{}])[0].get('description') # Generate a creative weather summary using LangChain LLM groq_api_key = os.getenv("GROQ_API_KEY") system = f"You are a weather expert reporter of {city}. You must compile a short weather summary based on information like {temp} degree celsius is the current temperature of the city, {humidity} percent is the humidity percentage for the city. The weather of the city can be best described as {description}." human = "{text}" prompt = ChatPromptTemplate.from_messages( [("system", system), ("human", human)] ) chat = ChatGroq(api_key=groq_api_key, model_name="llama3-70b-8192") chain = prompt | chat | StrOutputParser() output = chain.invoke({"text": city}) # Add emojis for visual flair weather_emoji = "☀️" if "clear" in description else "☁️" if "cloud" in description else "🌧️" if "rain" in description else "❄️" if "snow" in description else "🌫️" # Create the final response string final_response = f"🌍 {city.upper()} Weather Insight: {output} {weather_emoji}\n🌡️ Temp: {temp}°C, 💧 Humidity: {humidity}%." return final_response # Gradio Interface iface = gr.Interface( fn=get_weather_and_insight, inputs=gr.Textbox(label="Enter City Name", placeholder="Type city here..."), outputs=gr.Textbox(label="Weather Report and Insight"), title="WeatherAssistantApp", description="Enter a city name to get a detailed weather report with an AI-generated insight." ) iface.launch()