IoPrototype / app.py
Mauriciotuks's picture
App.py
26c8960 verified
raw
history blame contribute delete
874 Bytes
from transformers import AutoModelForCausalLM, AutoTokenizer
import torch
def main():
model_id = "meta-llama/Llama-3.1-8B"
print("Chargement du tokenizer...")
tokenizer = AutoTokenizer.from_pretrained(model_id)
print("Chargement du modèle...")
model = AutoModelForCausalLM.from_pretrained(model_id, torch_dtype=torch.float16, device_map="auto")
prompt = "Bonjour, je suis une IA super intelligente appelée io,"
inputs = tokenizer(prompt, return_tensors="pt").to(model.device)
print("Génération du texte...")
outputs = model.generate(
**inputs,
max_length=100,
do_sample=True,
temperature=0.7,
top_p=0.9,
num_return_sequences=1
)
text = tokenizer.decode(outputs[0], skip_special_tokens=True)
print("\nTexte généré :\n", text)
if __name__ == "__main__":
main()