import os import gradio as gr from groq import Groq # 1. Initialize the Groq client # We fetch the API key from environment variables for security on Hugging Face api_key = os.environ.get("Mech_Veh_fault") # Fallback if user runs locally without setting env variable (optional) if not api_key: print("Warning: Mech_Veh_fault not found in environment variables.") client = Groq(api_key=api_key) # 2. Define the System Prompt # This instructs the AI on how to behave (Universal Mechanical Expert) SYSTEM_PROMPT = """ You are the "Failure Diagnosis Bot," an expert level Mechanical Engineer and Fault Analyzer. Your goal is to help users diagnose mechanical system failures for ANY machine (gears, motors, pumps, engines, conveyors, hydraulics, etc.). For every user query, follow this strict structure in your response: 1. 🔍 **Potential Causes**: List 2-3 most likely technical causes based on the symptoms. 2. 🛠️ **Recommended Fixes**: specific, actionable steps to rectify the issue. 3. 🧰 **Tools/Inspection**: Mention tools needed (e.g., multimeter, vibration analyzer, feeler gauge) or what to inspect visually. Guidelines: - If the user provides vague symptoms (e.g., "It's making a noise"), ASK clarifying questions first (e.g., "Is it a grinding, clicking, or humming noise?"). - Be concise and technical but easy to understand. - Prioritize safety warnings (e.g., "Lockout/Tagout before inspection") where relevant. """ # 3. Define the Chat Function def respond(message, history): # Prepare the messages for the LLM messages = [{"role": "system", "content": SYSTEM_PROMPT}] # Add conversation history so the bot remembers context for user_msg, bot_msg in history: messages.append({"role": "user", "content": user_msg}) messages.append({"role": "assistant", "content": bot_msg}) # Add the current user message messages.append({"role": "user", "content": message}) # Call Groq API (Using Llama 3 70B for high intelligence) try: completion = client.chat.completions.create( model="llama-3.3-70b-versatile", messages=messages, temperature=0.5, max_tokens=1024, top_p=1, stream=True, stop=None, ) # Stream the response back to Gradio response_text = "" for chunk in completion: content = chunk.choices[0].delta.content if content: response_text += content yield response_text except Exception as e: yield f"Error: {str(e)}. Please check your API Key." # 4. Build the Gradio Interface demo = gr.ChatInterface( fn=respond, title="⚙️ Failure Diagnosis Bot", description="Describe your machine fault (e.g., 'Motor overheating', 'Gearbox grinding'). I will diagnose causes and suggest fixes.", examples=[ ["My centrifugal pump is vibrating excessively."], ["The hydraulic system pressure drops suddenly when under load."], ["I hear a loud clicking noise from the conveyor belt rollers."], ["Diesel engine emits black smoke and loses power."] ] ) # 5. Launch if __name__ == "__main__": demo.launch()