muhammadharis222 commited on
Commit
d1972b2
·
verified ·
1 Parent(s): db6d236

Create app.py

Browse files
Files changed (1) hide show
  1. app.py +26 -72
app.py CHANGED
@@ -1,85 +1,39 @@
1
- # app.py
2
  import os
3
  import gradio as gr
4
  from google import genai
5
 
6
- # Read API key from env (set this as a Space secret on HF: GEMINI_API_KEY)
7
- api_key = os.environ.get("GEMINI_API_KEY")
8
- client = genai.Client(api_key=api_key)
9
- MODEL = os.environ.get("GEMINI_MODEL", "gemini-2.5-flash")
10
 
 
 
 
11
 
12
- def call_gemini(prompt: str) -> str:
13
- """Call Gemini synchronously and return text reply (safe fallback)."""
 
 
14
  try:
15
- response = client.models.generate_content(
16
- model=MODEL,
17
- contents=prompt,
18
- )
19
- # SDK commonly exposes textual output as .text
20
- return getattr(response, "text", str(response))
21
  except Exception as e:
22
- return f"[Error calling Gemini API: {e}]"
23
-
24
-
25
- def generate_reply(message: str, history: list) -> list:
26
- """
27
- Given a user message and current `history` (list of role/content dicts),
28
- append the user's message, call Gemini, append the assistant reply, and
29
- return the updated history (in Gradio 'messages' format).
30
- """
31
- if history is None:
32
- history = []
33
-
34
- user_message = message.strip()
35
- if not user_message:
36
- return history
37
-
38
- # Append user message (messages format)
39
- history.append({"role": "user", "content": user_message})
40
-
41
- # Option A: send only the current user message as prompt:
42
- # reply_text = call_gemini(user_message)
43
-
44
- # Option B: (commented) send the full conversation as a single prompt for more context.
45
- # Uncomment if you want multi-turn context included.
46
- # combined_prompt = "\n".join(
47
- # f"{m['role']}: {m['content']}" for m in history
48
- # )
49
- # reply_text = call_gemini(combined_prompt)
50
-
51
- # Using Option A by default:
52
- reply_text = call_gemini(user_message)
53
-
54
- # Append assistant response
55
- history.append({"role": "assistant", "content": reply_text})
56
- return history
57
 
58
 
59
  with gr.Blocks(title="Gemini Chatbot") as demo:
60
- gr.Markdown("# Gemini Chatbot (Gradio messages format)")
61
-
62
- # Use the new 'messages' type so Gradio won't warn about tuples->messages deprecation
63
- chatbot = gr.Chatbot(label="Gemini", type="messages")
64
- state = gr.State([]) # will hold the list of {"role","content"} dicts
65
-
66
- with gr.Row():
67
- txt = gr.Textbox(
68
- show_label=False,
69
- placeholder="Type your message and press Enter...",
70
- lines=1,
71
- )
72
-
73
- def user_submit(message, history):
74
- # Ensure history is a list
75
- if history is None:
76
- history = []
77
- updated = generate_reply(message, history)
78
- # Gradio expects (chatbot, state) outputs; we return the updated messages list for both
79
- return updated, updated
80
-
81
- txt.submit(fn=user_submit, inputs=[txt, state], outputs=[chatbot, state])
82
- gr.Button("Clear").click(lambda: ([], []), None, [chatbot, state], queue=False)
83
 
84
  if __name__ == "__main__":
85
- demo.launch(server_name="0.0.0.0", server_port=int(os.environ.get("PORT", 7860)))
 
 
1
  import os
2
  import gradio as gr
3
  from google import genai
4
 
5
+ # ---- Load API key safely ----
6
+ API_KEY = os.getenv("GEMINI_API_KEY")
7
+ if not API_KEY:
8
+ raise ValueError("Environment variable GEMINI_API_KEY not found or empty")
9
 
10
+ # ---- Initialize Gemini client ----
11
+ client = genai.Client(api_key=API_KEY)
12
+ MODEL = os.getenv("GEMINI_MODEL", "gemini-2.5-flash")
13
 
14
+
15
+ def chat_fn(message, history):
16
+ if not message.strip():
17
+ return history, history
18
  try:
19
+ result = client.models.generate_content(model=MODEL, contents=message)
20
+ reply = getattr(result, "text", str(result))
 
 
 
 
21
  except Exception as e:
22
+ reply = f"[Error: {e}]"
23
+ history = (history or []) + [
24
+ {"role": "user", "content": message},
25
+ {"role": "assistant", "content": reply},
26
+ ]
27
+ return history, history
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
28
 
29
 
30
  with gr.Blocks(title="Gemini Chatbot") as demo:
31
+ gr.Markdown("## Gemini Chatbot (Gradio + Hugging Face Space)")
32
+ chatbot = gr.Chatbot(type="messages", label="Gemini")
33
+ state = gr.State([])
34
+ txt = gr.Textbox(placeholder="Ask something...", show_label=False)
35
+ txt.submit(chat_fn, [txt, state], [chatbot, state])
36
+ gr.Button("Clear").click(lambda: ([], []), None, [chatbot, state])
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
37
 
38
  if __name__ == "__main__":
39
+ demo.launch(server_name="0.0.0.0", server_port=int(os.getenv("PORT", 7860)))