File size: 3,000 Bytes
5088582
 
a750588
 
 
 
 
 
 
 
5088582
a750588
 
2df1170
 
5088582
a750588
5088582
a750588
 
 
 
 
 
 
 
 
 
5088582
a750588
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
5088582
a750588
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
5088582
a750588
 
 
 
 
 
 
 
 
5088582
 
a750588
 
5088582
 
a750588
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
import gradio as gr
import pandas as pd
import numpy as np
import random
import time
from openai import OpenAI
from dotenv import load_dotenv
import logging
import os
import requests

logger = logging.getLogger(__name__)
load_dotenv()
GEMINI_API_KEY="AIzaSyCSmhKbTF_sGbk8tD0dptlqbP0A1R3tQRk"
GEMINI_URL="https://generativelanguage.googleapis.com/v1beta/models/gemini-2.0-flash:generateContent"

# client = OpenAI(api_key=XAI_API_KEY, base_url=XAI_BASE)

def call_grok(user_msg:str,history:list[tuple[str, str]]):
    # if not XAI_API_KEY:
    #     raise gr.Error("Please set the XAI_API_KEY environment variable.")
    # messages=[{"role":"system","content":"Tu es u n assistant intelligent"}]
    # for user, assistant in( history or []):
    #     if user:
    #         messages.append({"role":"user", "content":user})
    #     if assistant:
    #         messages.append({"role":"assistant", "content":assistant})
    #     messages.append({"role":"assistant","content":user_msg})

    # try:
    #     response = client.chat.completions.create(
    #         model=XAI_MODEL,
    #         messages=messages,
    #         temperature=0,
    #         max_tokens=1024,
    #         top_p=1,
    #         frequency_penalty=0,
    #         presence_penalty=0,
    #         stop=None,
    #     )
    #     return response.choices[0].message.content.strip()
    # except Exception as e:
    #     logger.error(f"Error calling Grok: {e}")
    if not GEMINI_API_KEY:
        raise gr.Error("Please set the GEMINI_API_KEY environment variable.")

    
    contents=[]
    SYSTEM_PROMPT = "Tu es un assistant utile, concis et amical. Réponds en français."
    contents = []
    for u, a in (history or []):
        if u:
            contents.append({"role": "user",  "parts": [{"text": u}]})
        if a:
            contents.append({"role": "model", "parts": [{"text": a}]})
    contents.append({"role": "user", "parts": [{"text": user_msg}]})
    url =f"{GEMINI_URL}?key={GEMINI_API_KEY}"
    headers = {"Content-Type": "application/json"}  
    payload = {
        # 👉 Le SEUL et unique message system est passé ici :
        "systemInstruction": {
            "role": "system",
            "parts": [{"text": SYSTEM_PROMPT}]   # une seule part !
        },
        "contents": contents
    }
    try:
        resp=requests.post(url, json=payload, headers=headers)
    except requests.RequestException as e:
        logger.error(f"Error calling Gemini: {e}")

    if resp.status_code == 200:
        response = resp.json()
        logger.info(f"Response from Gemini: {response}")
        return response["candidates"][0]["content"]["parts"][0]["text"].strip()
    else:
        logger.error(f"Error calling Gemini: {resp.status_code} {resp.text}")
def chat_fn(message,history):
    response = call_grok(message, history or [])
    return response


with gr.Blocks() as demo:
    gr.ChatInterface(chat_fn, title="MasterClass LLM + Gradio")
if __name__ == "__main__":
    demo.launch()