RougeVertBleu commited on
Commit
2081b5b
·
verified ·
1 Parent(s): dd1df87

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +22 -28
app.py CHANGED
@@ -1,38 +1,32 @@
1
- import torch
2
- from transformers import pipeline
3
-
4
- #pipe = pipeline("text-generation", "meta-llama/Meta-Llama-3-8B-Instruct", torch_dtype=torch.bfloat16, device_map="auto")
5
- pipe = pipeline("text-generation", "mistralai/Mistral-7B-Instruct-v0.3", torch_dtype=torch.bfloat16, device_map="auto")
6
- #response = pipe(chat, max_new_tokens=512)
7
-
8
-
9
  import gradio as gr
 
10
 
 
 
 
11
 
 
 
 
12
 
13
- # Fonction de prédiction
14
- def predict(question, context):
15
  chat = [
16
- {"role": "system", "content": context}
17
- ]
18
- chat.append({"role": "user", "content": question})
19
- response = pipe(chat, max_new_tokens=512)
20
- answer = response[0]['generated_text']
21
-
22
- return f"{answer}"
23
-
24
 
25
  # Interface Gradio
26
- iface = gr.Interface(
27
- fn=predict,
28
- inputs=[
29
- gr.Textbox(label="Entrez votre question"),
30
- gr.Textbox(label="Entrez le contexte")
31
- ],
32
- outputs=gr.Textbox(label="Réponse du modèle")
33
- )
34
 
 
35
 
36
  # Lancer l'interface
37
- if __name__ == "__main__":
38
- iface.launch()
 
 
 
 
 
 
 
 
 
1
  import gradio as gr
2
+ import torch
3
 
4
+ # Charger un modèle de Question-Answering
5
+ from huggingface_hub import InferenceClient
6
+ model = InferenceClient(base_url='https://pflgm2locj2t89co.us-east-1.aws.endpoints.huggingface.cloud')
7
 
8
+ def answer_question(context, question):
9
+ if not context.strip() or not question.strip():
10
+ return "Veuillez fournir un contexte et une question."
11
 
 
 
12
  chat = [
13
+ {"role": "system", "content": context},
14
+ {"role": "user", "content": question}
15
+ ]
16
+ response = model.chat_completion(chat, max_tokens=512)
17
+ return response['choices'][0]['message']['content']
 
 
 
18
 
19
  # Interface Gradio
20
+ with gr.Blocks() as demo:
21
+ gr.Markdown("## Chatbot avec Contexte et Question")
22
+
23
+ context_input = gr.Textbox(label="Contexte", placeholder="Entrez un contexte ici...")
24
+ question_input = gr.Textbox(label="Question", placeholder="Posez une question sur le contexte...")
25
+ output = gr.Textbox(label="Réponse", interactive=False)
26
+
27
+ submit_button = gr.Button("Obtenir la Réponse")
28
 
29
+ submit_button.click(answer_question, inputs=[context_input, question_input], outputs=output)
30
 
31
  # Lancer l'interface
32
+ demo.launch()