File size: 3,644 Bytes
75ed8a1
 
5cd09ba
75ed8a1
 
bfe315f
5cd09ba
 
 
75ed8a1
 
 
5cd09ba
75ed8a1
5cd09ba
bfe315f
75ed8a1
 
bfe315f
75ed8a1
 
 
ec583a3
75ed8a1
 
5cd09ba
75ed8a1
 
 
 
5cd09ba
75ed8a1
ee36a65
5cd09ba
ee36a65
513fbaf
75ed8a1
 
 
 
 
 
 
 
 
 
 
 
3d473c5
75ed8a1
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
5cd09ba
75ed8a1
 
 
 
5cd09ba
75ed8a1
 
 
 
 
 
 
 
 
 
 
 
 
5cd09ba
 
75ed8a1
 
 
 
513fbaf
5cd09ba
 
75ed8a1
 
 
 
3d473c5
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
import os
import asyncio
import logging
from dotenv import load_dotenv
import chainlit as cl

# Set up logging
logging.basicConfig(level=logging.INFO)

# Attempt to import google.generativeai (optional dependency)
try:
    import google.generativeai as genai
except ImportError:
    genai = None
    logging.warning("google.generativeai not installed or failed to import. Gemini will be unavailable.")

# Load .env (won't override Space secrets)
load_dotenv()

# Read config
GOOGLE_API_KEY = os.getenv("GOOGLE_API_KEY")
MODEL_NAME = os.getenv("MODEL_NAME", "gemini-2.0-flash")

# Try to configure Gemini if possible
model = None
if genai is not None:
    if GOOGLE_API_KEY:
        try:
            genai.configure(api_key=GOOGLE_API_KEY)
            model = genai.GenerativeModel(MODEL_NAME)
            logging.info(f"Gemini model initialized: {MODEL_NAME}")
        except Exception as e:
            logging.error(f"Failed to initialize Gemini model '{MODEL_NAME}': {e}")
else:
    logging.error("google.generativeai is not available.")

SYSTEM_PROMPT = "You are a helpful assistant."

async def _call_gemini_generate(prompt: str):
    """Run Gemini generate_content in a background thread."""
    if model is None:
        raise RuntimeError("Gemini model not initialized")
    return await asyncio.to_thread(model.generate_content, prompt)

@cl.on_chat_start
async def on_chat_start():
    """Welcome message and session init."""
    cl.user_session.set("message_history", [{"role": "system", "content": SYSTEM_PROMPT}])
    
    if model is None:
        await cl.Message(
            content=(
                "Hello — the app is running. Gemini is not available because GOOGLE_API_KEY is not set or the model failed to init.\n\n"
                "Owner action: add a `GOOGLE_API_KEY` secret in Space Settings and rebuild the Space.\n\n"
                "You can still test the chat using the echo fallback below."
            )
        ).send()
    else:
        await cl.Message(content="Hello — Gemini bot ready. Ask me anything!").send()

@cl.on_message
async def handle_message(message: cl.Message):
    user_text = (message.content or "").strip()
    if not user_text:
        await cl.Message(content="Please type a question or prompt.").send()
        return

    # Store message in session history
    history = cl.user_session.get("message_history", [])
    history.append({"role": "user", "content": user_text})
    cl.user_session.set("message_history", history)

    # Send a progress message
    progress = cl.Message(content="(Thinking...)")
    await progress.send()

    if model is None:
        reply = (
            "Gemini not initialized (GOOGLE_API_KEY missing or model failed to init).\n\n"
            "Echoing your input:\n\n" + user_text + "\n\n"
            "To enable Gemini responses: open Space Settings → Secrets → add `GOOGLE_API_KEY` with your key, then rebuild the Space."
        )
    else:
        try:
            resp = await _call_gemini_generate(user_text)
            reply = getattr(resp, "text", None) or str(resp)
        except RuntimeError as e:
            reply = f"[Runtime Error] {e}"
        except Exception as e:
            reply = f"[Error calling Gemini API] {e}"

    # Update the progress message with final reply
    try:
        await progress.update(content=reply)
    except Exception as e:
        await cl.Message(content=reply).send()

    # Save assistant reply to history
    history.append({"role": "assistant", "content": reply})
    cl.user_session.set("message_history", history)

if __name__ == "__main__":
    logging.info("Starting the application...")