bambadij commited on
Commit
a750588
·
1 Parent(s): 1dd35ac
Files changed (1) hide show
  1. app.py +76 -82
app.py CHANGED
@@ -1,94 +1,88 @@
1
  import gradio as gr
2
  import pandas as pd
3
- from utils import summarize_dataframe, validate_columns
4
- from llm_adapter import llm_generate
5
- from planner import plan_from_llm
6
- from viz import plot_bar, plot_line, plot_scatter, plot_hist
 
 
 
 
7
 
8
- def load_file(file):
9
- if file is None:
10
- return None, "Aucun fichier.", "", None, None
11
- df = pd.read_csv(file.name)
12
- preview = df.head(10)
13
- summary = summarize_dataframe(df)
14
- cols = list(df.columns)
15
- return df, "✅ Dataset chargé", summary, preview, gr.update(choices=cols)
16
 
17
- def ask_llm(summary, question):
18
- if not summary or not question:
19
- return "Charge un dataset et pose une question."
20
- prompt = f"""Voici un résumé de données suivi d'une question.
21
- Réponds en puces (6-10) + mini conclusion actionnable.
22
 
23
- [DATA SUMMARY]
24
- {summary}
 
 
 
 
 
 
 
 
25
 
26
- [QUESTION]
27
- {question}
28
- """
29
- return llm_generate(prompt)
 
 
 
 
 
 
 
 
 
 
 
 
30
 
31
- def ask_to_chart(df, summary, question, bins):
32
- if df is None:
33
- return None, "Charge un dataset."
34
- plan = plan_from_llm(summary, question, list(df.columns))
35
- # Validation & défauts
36
- x = plan.get("x"); y = plan.get("y"); chart = plan.get("chart", "bar"); agg = plan.get("agg", "count")
37
- valid = validate_columns(df, [x, y])
38
- x_ok = valid[0] if len(valid) > 0 else None
39
- y_ok = valid[1] if len(valid) > 1 else None
40
- if x_ok is None:
41
- return None, f"Colonne X invalide. Colonnes disponibles: {list(df.columns)}"
42
- # Routing
43
- fig = None
44
- if chart == "bar":
45
- fig = plot_bar(df, x_ok, y_ok, agg)
46
- elif chart == "line":
47
- if y_ok is None: return None, "Pour une courbe, X et Y sont requis."
48
- fig = plot_line(df, x_ok, y_ok, agg)
49
- elif chart == "scatter":
50
- if y_ok is None: return None, "Pour un scatter, X et Y sont requis."
51
- fig = plot_scatter(df, x_ok, y_ok)
52
- elif chart == "hist":
53
- fig = plot_hist(df, x_ok, bins or 20)
54
- else:
55
- return None, f"Type de graphique non supporté: {chart}"
56
- return fig, f"OK → plan: {plan}"
57
-
58
- with gr.Blocks(title="Assistant analytique LLM + Gradio") as demo:
59
- gr.Markdown("## De la donnée brute aux insights visuels — LLM + Gradio")
60
-
61
- with gr.Row():
62
- file = gr.File(label="Uploader un CSV")
63
- load_btn = gr.Button("Charger")
64
-
65
- df_state = gr.State(None)
66
- status = gr.Markdown()
67
- summary_box = gr.Textbox(label="Résumé (schéma + stats)", lines=12)
68
- preview = gr.Dataframe(label="Aperçu (10 premières lignes)", interactive=False)
69
- cols_dropdown = gr.Dropdown(choices=[], visible=False) # juste pour transporter les colonnes
70
-
71
- def _load(file):
72
- df, stat, summary, prev, cols = load_file(file)
73
- return df, stat, summary, prev, cols
74
 
75
- load_btn.click(_load, [file], [df_state, status, summary_box, preview, cols_dropdown])
76
-
77
- gr.Markdown("---")
78
- gr.Markdown("### 🧠 Questions à l'IA")
79
- question = gr.Textbox(label="Pose une question (FR/EN)")
80
- answer = gr.Markdown()
81
- ask_btn = gr.Button("Demander à l'IA")
82
- ask_btn.click(ask_llm, [summary_box, question], [answer])
 
83
 
84
- gr.Markdown("---")
85
- gr.Markdown("### 📊 Ask → Chart (par LLM)")
86
- bins = gr.Slider(5, 100, value=20, step=1, label="Bins (hist)")
87
- plot = gr.Plot(label="Graphique")
88
- plot_msg = gr.Markdown()
89
- chart_btn = gr.Button("Proposer et tracer le graphique")
90
- chart_btn.click(ask_to_chart, [df_state, summary_box, question, bins], [plot, plot_msg])
91
 
 
 
92
  if __name__ == "__main__":
93
  demo.launch()
94
-
 
 
1
  import gradio as gr
2
  import pandas as pd
3
+ import numpy as np
4
+ import random
5
+ import time
6
+ from openai import OpenAI
7
+ from dotenv import load_dotenv
8
+ import logging
9
+ import os
10
+ import requests
11
 
12
+ logger = logging.getLogger(__name__)
13
+ load_dotenv()
14
+ GEMINI_API_KEY=os.getenv("GEMINI_API_KEY")
15
+ GEMINI_URL=os.getenv("GEMINI_URL")
 
 
 
 
16
 
17
+ # client = OpenAI(api_key=XAI_API_KEY, base_url=XAI_BASE)
 
 
 
 
18
 
19
+ def call_grok(user_msg:str,history:list[tuple[str, str]]):
20
+ # if not XAI_API_KEY:
21
+ # raise gr.Error("Please set the XAI_API_KEY environment variable.")
22
+ # messages=[{"role":"system","content":"Tu es u n assistant intelligent"}]
23
+ # for user, assistant in( history or []):
24
+ # if user:
25
+ # messages.append({"role":"user", "content":user})
26
+ # if assistant:
27
+ # messages.append({"role":"assistant", "content":assistant})
28
+ # messages.append({"role":"assistant","content":user_msg})
29
 
30
+ # try:
31
+ # response = client.chat.completions.create(
32
+ # model=XAI_MODEL,
33
+ # messages=messages,
34
+ # temperature=0,
35
+ # max_tokens=1024,
36
+ # top_p=1,
37
+ # frequency_penalty=0,
38
+ # presence_penalty=0,
39
+ # stop=None,
40
+ # )
41
+ # return response.choices[0].message.content.strip()
42
+ # except Exception as e:
43
+ # logger.error(f"Error calling Grok: {e}")
44
+ if not GEMINI_API_KEY:
45
+ raise gr.Error("Please set the GEMINI_API_KEY environment variable.")
46
 
47
+
48
+ contents=[]
49
+ SYSTEM_PROMPT = "Tu es un assistant utile, concis et amical. Réponds en français."
50
+ contents = []
51
+ for u, a in (history or []):
52
+ if u:
53
+ contents.append({"role": "user", "parts": [{"text": u}]})
54
+ if a:
55
+ contents.append({"role": "model", "parts": [{"text": a}]})
56
+ contents.append({"role": "user", "parts": [{"text": user_msg}]})
57
+ url =f"{GEMINI_URL}?key={GEMINI_API_KEY}"
58
+ headers = {"Content-Type": "application/json"}
59
+ payload = {
60
+ # 👉 Le SEUL et unique message system est passé ici :
61
+ "systemInstruction": {
62
+ "role": "system",
63
+ "parts": [{"text": SYSTEM_PROMPT}] # une seule part !
64
+ },
65
+ "contents": contents
66
+ }
67
+ try:
68
+ resp=requests.post(url, json=payload, headers=headers)
69
+ except requests.RequestException as e:
70
+ logger.error(f"Error calling Gemini: {e}")
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
71
 
72
+ if resp.status_code == 200:
73
+ response = resp.json()
74
+ logger.info(f"Response from Gemini: {response}")
75
+ return response["candidates"][0]["content"]["parts"][0]["text"].strip()
76
+ else:
77
+ logger.error(f"Error calling Gemini: {resp.status_code} {resp.text}")
78
+ def chat_fn(message,history):
79
+ response = call_grok(message, history or [])
80
+ return response
81
 
 
 
 
 
 
 
 
82
 
83
+ with gr.Blocks() as demo:
84
+ gr.ChatInterface(chat_fn, title="MasterClass LLM + Gradio")
85
  if __name__ == "__main__":
86
  demo.launch()
87
+
88
+