import os import requests import gradio as gr from langchain_groq import ChatGroq from langchain_core.prompts import ChatPromptTemplate from langchain_core.output_parsers import StrOutputParser def get_weather_and_insight(city): # Fetching weather data url = f'http://api.openweathermap.org/data/2.5/weather?q={city}&appid={os.getenv("OPENWEATHER_API_KEY")}&units=metric' res = requests.get(url) data = res.json() if res.status_code != 200: return "Failed to retrieve weather data. Please check the city name and try again." humidity = data.get('main', {}).get('humidity') temp = data.get('main', {}).get('temp') description = data.get('weather', [{}])[0].get('description') # Generating a creative weather summary using LangChain LLM groq_api_key = os.getenv("GROQ_API_KEY") system = f"You are a local weather reporter bringing the latest update for {city}. Right now, it's \n🌡️ {temp}°C, degrees Celsius with about 💧 {humidity}% percent humidity. The weather of the city can be best described as {description} with a short fun fact." human = "{text}" prompt = ChatPromptTemplate.from_messages( [("system", system), ("human", human)] ) chat = ChatGroq(api_key=groq_api_key, model_name="llama3-70b-8192") chain = prompt | chat | StrOutputParser() output = chain.invoke({"text": city}) # Adding emojis for visual flair weather_emoji = "☀️" if "clear" in description else "☁️" if "cloud" in description else "🌧️" if "rain" in description else "❄️" if "snow" in description else "🌫️" # Creating the final response string final_response = f"🌍 {city.upper()} Weather Insight: {output} {weather_emoji}" return final_response # Gradio Interface iface = gr.Interface( fn=get_weather_and_insight, inputs=gr.Textbox(label="Enter City Name", placeholder="Type city here..."), outputs=gr.Textbox(label="Weather Report and Insight"), title="WeatherAssistantApp", description="Enter a city name to get a detailed weather report with an AI-generated insight." ) iface.launch()