NOVATech / app.py
AroojImtiaz's picture
Update app.py
829ccad verified
import chainlit as cl
import google.generativeai as genai
# 🔐 Insert your Gemini API key temporarily for testing (never share publicly!)
GEMINI_API_KEY = "AIzaSyBo1mx-ghrH3EZcj1WrLT7x4L5etHx_Zws"
genai.configure(api_key=GEMINI_API_KEY)
# 🧠 In-memory chat memory
memory = {}
# 🎭 Roles
ROLES = {
"customer": "You are NovaTech’s polite and empathetic customer support assistant.",
"employee": "You are NovaTech’s helpful internal assistant for staff.",
"manager": "You are NovaTech’s insightful business assistant. Be analytical and clear."
}
# ✅ This ensures Chainlit initializes properly
@cl.on_chat_start
async def on_chat_start():
await cl.Message(
content="👋 Welcome to **NovaTech Solutions Virtual Assistant!**\nPlease tell me your role: customer, employee, or manager."
).send()
@cl.on_message
async def on_message(message: cl.Message):
user = message.author or "guest"
user_state = memory.get(user, {"role": "customer", "history": ""})
text = message.content.strip().lower()
# If user defines role
if text in ROLES:
user_state["role"] = text
memory[user] = user_state
await cl.Message(content=f"✅ Got it! You are a **{text}**. How can I assist you today?").send()
return
# Build role-aware prompt
prompt = f"""
{ROLES[user_state['role']]}
Conversation so far:
{user_state['history']}
User: {message.content}
Reply as NovaTech’s assistant with professionalism and empathy.
"""
try:
model = genai.GenerativeModel("gemini-1.5-flash")
response = model.generate_content(prompt)
reply = response.text.strip()
except Exception as e:
reply = f"⚠️ Something went wrong: {e}"
# Update conversation memory
user_state["history"] += f"User: {message.content}\nAI: {reply}\n"
memory[user] = user_state
await cl.Message(content=reply).send()
# 👇 This line ensures the app initializes correctly when running in Hugging Face
if __name__ == "__main__":
cl.main()