julesbonnard commited on
Commit
94a1d5e
·
1 Parent(s): 1e22cda

works with gemini

Browse files
Files changed (2) hide show
  1. README.md +1 -4
  2. app.py +78 -59
README.md CHANGED
@@ -7,8 +7,5 @@ sdk: gradio
7
  sdk_version: 5.42.0
8
  app_file: app.py
9
  pinned: false
10
- hf_oauth: true
11
- hf_oauth_scopes:
12
- - inference-api
13
  ---
14
- An example chatbot using [Gradio](https://gradio.app), [`huggingface_hub`](https://huggingface.co/docs/huggingface_hub/v0.22.2/en/index), and the [Hugging Face Inference API](https://huggingface.co/docs/api-inference/index).
 
7
  sdk_version: 5.42.0
8
  app_file: app.py
9
  pinned: false
 
 
 
10
  ---
11
+ AskNews-powered news assistant built with [Gradio](https://gradio.app), [`google-genai`](https://pypi.org/project/google-genai/), and the [AskNews SDK](https://pypi.org/project/asknews/).
app.py CHANGED
@@ -9,10 +9,11 @@ from dotenv import load_dotenv
9
  load_dotenv()
10
 
11
  import gradio as gr
12
- from huggingface_hub import InferenceClient, InferenceTimeoutError
 
13
  from asknews_sdk import AskNewsSDK
14
 
15
- DEFAULT_MODEL = "openai/gpt-oss-20b"
16
 
17
 
18
  LOG_LEVEL = os.getenv("ASKNEWS_LOG_LEVEL", "INFO").upper()
@@ -35,7 +36,7 @@ def get_asknews_sdk() -> Optional[AskNewsSDK]:
35
  client_id=client_id,
36
  client_secret=client_secret,
37
  scopes=["news"]
38
- )
39
  logger.info("AskNews SDK initialised successfully.")
40
  return sdk
41
  except Exception as exc:
@@ -102,7 +103,6 @@ def respond(
102
  max_tokens: int,
103
  temperature: float,
104
  top_p: float,
105
- hf_token: gr.OAuthToken,
106
  model_name: str = DEFAULT_MODEL,
107
  use_asknews: bool = True,
108
  asknews_hours_back: int = 24*30,
@@ -110,13 +110,26 @@ def respond(
110
  asknews_domains_csv: str = "afp.com",
111
  ):
112
  """
113
- Stream chat responses from HF, enriching with AskNews context when enabled.
114
  """
115
- # Validate OAuth token for HF
116
- if hf_token is None or hf_token.token is None or hf_token.token.strip() == "":
117
- yield "Veuillez vous connecter à Hugging Face via le bouton Login dans la barre latérale."
 
 
 
 
 
 
 
 
 
 
118
  return
119
 
 
 
 
120
  # Prepare AskNews SDK if requested
121
  sdk = get_asknews_sdk() if use_asknews else None
122
  asknews_context = ""
@@ -130,7 +143,7 @@ def respond(
130
  )
131
  asknews_context = fetch_asknews_context(
132
  sdk=sdk,
133
- query=message,
134
  hours_back=asknews_hours_back,
135
  n_articles=asknews_n_articles,
136
  domains=domains,
@@ -140,40 +153,11 @@ def respond(
140
  else:
141
  logger.warning("AskNews context is empty after fetch.")
142
 
143
- # Build messages
144
- messages: List[Dict[str, str]] = []
145
  base_system = system_message.strip() if system_message else "You are a helpful assistant."
146
- messages.append({"role": "system", "content": base_system})
147
 
148
- # If we have AskNews context, inject it as an additional system guidance
149
- if asknews_context:
150
- messages.append({
151
- "role": "system",
152
- "content": (
153
- "Use the following news context when answering. If the user's query is unrelated, ignore it.\n\n"
154
- f"{asknews_context}"
155
- ),
156
- })
157
-
158
- # Include history (roles should be valid)
159
- for m in history or []:
160
- role = m.get("role")
161
- content = m.get("content", "")
162
- if role in ("user", "assistant", "system") and content is not None:
163
- messages.append({"role": role, "content": str(content)})
164
-
165
- # Current user message
166
- if message is None or str(message).strip() == "":
167
  yield "Veuillez saisir un message."
168
  return
169
- messages.append({"role": "user", "content": str(message).strip()})
170
-
171
- # Initialize HF client
172
- try:
173
- client = InferenceClient(token=hf_token.token, model=model_name)
174
- except Exception as e:
175
- yield f"Échec d'initialisation du client d'inférence HF: {e}"
176
- return
177
 
178
  response_accum = ""
179
  # Optional prefix informing about context usage (not counted by model, only displayed)
@@ -191,33 +175,69 @@ def respond(
191
  # response_accum += "[Contexte AskNews]\n" + context_display + "\n\n"
192
  # yield response_accum
193
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
194
  try:
195
- for chunk in client.chat_completion(
196
- messages=messages,
197
- max_tokens=max_tokens,
198
- stream=True,
199
- temperature=temperature,
200
- top_p=top_p,
201
- ):
202
  try:
203
- choices = getattr(chunk, "choices", [])
204
- token = ""
205
- if choices and getattr(choices[0], "delta", None) is not None:
206
- token_piece = getattr(choices[0].delta, "content", None)
207
- if token_piece:
208
- token = token_piece
 
 
 
 
 
209
  if token:
210
  response_accum += token
211
  yield response_accum
212
  except Exception:
213
  continue
214
- except InferenceTimeoutError:
215
- yield response_accum + "\n\n[Temps dépassé. Réessayez ou réduisez 'Max new tokens'.]"
216
  except Exception as e:
 
217
  if response_accum:
218
  yield response_accum + f"\n\n[Erreur: {e}]"
219
  else:
220
- yield f"Erreur de l'API d'inférence: {e}"
221
 
222
 
223
  # ---- Gradio UI ----
@@ -263,14 +283,13 @@ Style :
263
  )
264
 
265
  with gr.Blocks() as demo:
266
- gr.Markdown("# Chatbot HF avec contexte AskNews")
267
  with gr.Sidebar():
268
- gr.LoginButton()
269
  gr.Markdown(
270
- "Connectez-vous avec votre compte Hugging Face.\n\n"
271
- "Pour activer AskNews, définissez les variables d'environnement:\n"
272
  "- ASKNEWS_CLIENT_ID\n"
273
  "- ASKNEWS_CLIENT_SECRET\n\n"
 
274
  "Ajustez les paramètres pour contrôler le contexte (heures, domaines, nombre d'articles)."
275
  )
276
  chatbot.render()
 
9
  load_dotenv()
10
 
11
  import gradio as gr
12
+ from google import genai
13
+ from google.genai import types
14
  from asknews_sdk import AskNewsSDK
15
 
16
+ DEFAULT_MODEL = "gemini-2.5-pro"
17
 
18
 
19
  LOG_LEVEL = os.getenv("ASKNEWS_LOG_LEVEL", "INFO").upper()
 
36
  client_id=client_id,
37
  client_secret=client_secret,
38
  scopes=["news"]
39
+ )
40
  logger.info("AskNews SDK initialised successfully.")
41
  return sdk
42
  except Exception as exc:
 
103
  max_tokens: int,
104
  temperature: float,
105
  top_p: float,
 
106
  model_name: str = DEFAULT_MODEL,
107
  use_asknews: bool = True,
108
  asknews_hours_back: int = 24*30,
 
110
  asknews_domains_csv: str = "afp.com",
111
  ):
112
  """
113
+ Stream chat responses from Google Gemini, enriching with AskNews context when enabled.
114
  """
115
+ api_key = os.getenv("GOOGLE_API_KEY", "").strip()
116
+ if not api_key:
117
+ logger.warning("Missing Google API key.")
118
+ yield (
119
+ "Définissez GOOGLE_API_KEY dans votre environnement ou saisissez la clé API Google Gemini dans le champ dédié."
120
+ )
121
+ return
122
+
123
+ try:
124
+ genai_client = genai.Client(api_key=api_key)
125
+ except Exception as exc:
126
+ logger.exception("Failed to initialise Google GenAI client: %s", exc)
127
+ yield f"Échec d'initialisation du client Google GenAI: {exc}"
128
  return
129
 
130
+ user_message_raw = "" if message is None else str(message)
131
+ user_message = user_message_raw.strip()
132
+
133
  # Prepare AskNews SDK if requested
134
  sdk = get_asknews_sdk() if use_asknews else None
135
  asknews_context = ""
 
143
  )
144
  asknews_context = fetch_asknews_context(
145
  sdk=sdk,
146
+ query=user_message,
147
  hours_back=asknews_hours_back,
148
  n_articles=asknews_n_articles,
149
  domains=domains,
 
153
  else:
154
  logger.warning("AskNews context is empty after fetch.")
155
 
 
 
156
  base_system = system_message.strip() if system_message else "You are a helpful assistant."
 
157
 
158
+ if not user_message:
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
159
  yield "Veuillez saisir un message."
160
  return
 
 
 
 
 
 
 
 
161
 
162
  response_accum = ""
163
  # Optional prefix informing about context usage (not counted by model, only displayed)
 
175
  # response_accum += "[Contexte AskNews]\n" + context_display + "\n\n"
176
  # yield response_accum
177
 
178
+ system_instruction = base_system.strip()
179
+ if asknews_context:
180
+ system_instruction += (
181
+ "\n\nUtilise le contexte AskNews suivant pour ta réponse. Si la question est sans rapport, ignore ce contexte.\n"
182
+ f"{asknews_context}"
183
+ )
184
+
185
+ conversation: List[types.Content] = []
186
+ for msg in history or []:
187
+ role = msg.get("role")
188
+ content = str(msg.get("content", "")).strip()
189
+ if not content or role not in ("user", "assistant"):
190
+ continue
191
+ if role == "user":
192
+ conversation.append(
193
+ types.Content(role="user", parts=[types.Part.from_text(text=content)])
194
+ )
195
+ else:
196
+ conversation.append(
197
+ types.Content(role="model", parts=[types.Part.from_text(text=content)])
198
+ )
199
+
200
+ conversation.append(
201
+ types.Content(role="user", parts=[types.Part.from_text(text=user_message)])
202
+ )
203
+
204
+ generation_config = types.GenerateContentConfig(
205
+ systemInstruction=system_instruction,
206
+ temperature=float(temperature),
207
+ topP=float(top_p),
208
+ maxOutputTokens=int(max_tokens),
209
+ )
210
+
211
  try:
212
+ stream = genai_client.models.generate_content_stream(
213
+ model=model_name,
214
+ contents=conversation,
215
+ config=generation_config,
216
+ )
217
+ for chunk in stream:
 
218
  try:
219
+ token = getattr(chunk, "text", None)
220
+ if not token and getattr(chunk, "candidates", None):
221
+ parts: List[str] = []
222
+ for candidate in chunk.candidates:
223
+ content = getattr(candidate, "content", None)
224
+ if content and getattr(content, "parts", None):
225
+ for part in content.parts:
226
+ piece = getattr(part, "text", None)
227
+ if piece:
228
+ parts.append(piece)
229
+ token = "".join(parts)
230
  if token:
231
  response_accum += token
232
  yield response_accum
233
  except Exception:
234
  continue
 
 
235
  except Exception as e:
236
+ logger.exception("Google GenAI generation failed: %s", e)
237
  if response_accum:
238
  yield response_accum + f"\n\n[Erreur: {e}]"
239
  else:
240
+ yield f"Erreur de génération Gemini: {e}"
241
 
242
 
243
  # ---- Gradio UI ----
 
283
  )
284
 
285
  with gr.Blocks() as demo:
286
+ gr.Markdown("# Chatbot Gemini avec contexte AskNews")
287
  with gr.Sidebar():
 
288
  gr.Markdown(
289
+ "Définissez dans votre environnement les variables d'environnement suivantes :\n"
 
290
  "- ASKNEWS_CLIENT_ID\n"
291
  "- ASKNEWS_CLIENT_SECRET\n\n"
292
+ "Configurer la clé Google Gemini via GOOGLE_API_KEY.\n\n"
293
  "Ajustez les paramètres pour contrôler le contexte (heures, domaines, nombre d'articles)."
294
  )
295
  chatbot.render()