import os from flask import Flask, request, jsonify from transformers import pipeline, AutoTokenizer, AutoModelForSequenceClassification from flask_cors import CORS app = Flask(__name__) CORS(app) # --- CHANGE IS HERE --- # Define the local directory path where your model files are located. # The "." means the current directory. local_model_path = "." print(f"Loading model from local path: {local_model_path}") # Load the model and tokenizer from the local directory try: tokenizer = AutoTokenizer.from_pretrained(local_model_path) model = AutoModelForSequenceClassification.from_pretrained(local_model_path) classifier = pipeline("text-classification", model=model, tokenizer=tokenizer) print("Model loaded successfully.") except Exception as e: print(f"Error loading model: {e}") # If the model fails to load, we can't run the app. Exit or handle appropriately. classifier = None @app.route('/analyze', methods=['POST']) def analyze_sentiment(): if not classifier: return jsonify({"error": "Model could not be loaded. Check server logs."}), 500 data = request.json text = data.get('text', '') if not text: return jsonify({"error": "No text provided"}), 400 try: result = classifier(text) return jsonify(result) except Exception as e: print(f"Error during analysis: {e}") return jsonify({"error": str(e)}), 500 if __name__ == '__main__': port = int(os.environ.get("PORT", 8080)) app.run(host='0.0.0.0', port=port)