chatfinance.io / app.py
salmapm's picture
Update app.py
a4b419c verified
from flask import Flask, request, jsonify, render_template
from transformers import AutoModelForCausalLM, AutoTokenizer
app = Flask(__name__)
# Charger le modèle et le tokenizer depuis Hugging Face
model_name = "salmapm/llama2_salma"
model = AutoModelForCausalLM.from_pretrained(model_name)
tokenizer = AutoTokenizer.from_pretrained(model_name)
# Définir une fonction de génération de texte
def generate_text(prompt):
inputs = tokenizer(prompt, return_tensors="pt")
outputs = model.generate(**inputs, max_length=50, num_return_sequences=1, temperature=0.7)
generated_text = tokenizer.decode(outputs[0], skip_special_tokens=True)
return generated_text
@app.route('/')
def home():
return render_template('index.html')
@app.route('/generate', methods=['POST'])
def generate():
data = request.json
prompt = data.get('prompt', '')
if prompt:
generated_text = generate_text(prompt)
return jsonify({"generated_text": generated_text})
else:
return jsonify({"error": "Veuillez entrer du texte."}), 400
if __name__ == '__main__':
app.run(debug=True)