Initial commit
Browse files- app.py +15 -0
- requirements.txt +7 -0
app.py
ADDED
|
@@ -0,0 +1,15 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import gradio as gr
|
| 2 |
+
from transformers import AutoModelForCausalLM, AutoTokenizer
|
| 3 |
+
|
| 4 |
+
# Chargez votre modèle Mistral
|
| 5 |
+
model = AutoModelForCausalLM.from_pretrained("Nac31/Sacha-Mistral-0")
|
| 6 |
+
tokenizer = AutoTokenizer.from_pretrained("Nac31/Sacha-Mistral-0")
|
| 7 |
+
|
| 8 |
+
def predict(prompt):
|
| 9 |
+
inputs = tokenizer(prompt, return_tensors="pt")
|
| 10 |
+
outputs = model.generate(inputs["input_ids"], max_length=100)
|
| 11 |
+
return tokenizer.decode(outputs[0])
|
| 12 |
+
|
| 13 |
+
# Interface Gradio
|
| 14 |
+
interface = gr.Interface(fn=predict, inputs="text", outputs="text")
|
| 15 |
+
interface.launch()
|
requirements.txt
ADDED
|
@@ -0,0 +1,7 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
transformers
|
| 2 |
+
torch
|
| 3 |
+
accelerate
|
| 4 |
+
datasets
|
| 5 |
+
sentencepiece
|
| 6 |
+
tokenizers
|
| 7 |
+
gradio
|