| | from flask import Flask, request, jsonify
|
| | from flask_cors import CORS
|
| | import os
|
| | from dotenv import load_dotenv
|
| | from transformers import pipeline
|
| | import feedparser
|
| | import json
|
| | from dateutil import parser
|
| | import re
|
| |
|
| | load_dotenv()
|
| |
|
| | sentiment_analysis = pipeline(
|
| | "sentiment-analysis", model="siebert/sentiment-roberta-large-english"
|
| | )
|
| | app = Flask(__name__, static_url_path="", static_folder="dist")
|
| |
|
| | CORS(app)
|
| |
|
| |
|
| | @app.route("/")
|
| | def index():
|
| | return app.send_static_file("index.html")
|
| |
|
| |
|
| | @app.route("/api/news")
|
| | def get_news():
|
| | feed_url = request.args.get("feed_url")
|
| |
|
| |
|
| |
|
| | file_name = "".join(re.split(r"https://|\.|/", feed_url))
|
| |
|
| | feed_entries = get_feed(feed_url)
|
| |
|
| | try:
|
| | with open(f"{file_name}_cache.json") as file:
|
| | cache = json.load(file)
|
| | except:
|
| | cache = {}
|
| |
|
| |
|
| | print("new date", feed_entries["last_update"])
|
| | print("old date", cache["last_update"] if "last_update" in cache else "None")
|
| | if not cache or parser.parse(feed_entries["last_update"]) > parser.parse(
|
| | cache["last_update"]
|
| | ):
|
| | print("Updating cache with new preditions")
|
| | titles = [entry["title"] for entry in feed_entries["entries"]]
|
| |
|
| | predictions = [sentiment_analysis(sentence) for sentence in titles]
|
| |
|
| | predictions = [
|
| | -prediction[0]["score"]
|
| | if prediction[0]["label"] == "NEGATIVE"
|
| | else prediction[0]["score"]
|
| | for prediction in predictions
|
| | ]
|
| |
|
| | entries_predicitons = [
|
| | {**entry, "sentiment": prediction}
|
| | for entry, prediction in zip(feed_entries["entries"], predictions)
|
| | ]
|
| | output = {
|
| | "entries": entries_predicitons,
|
| | "last_update": feed_entries["last_update"],
|
| | }
|
| |
|
| | with open(f"{file_name}_cache.json", "w") as file:
|
| | json.dump(output, file)
|
| |
|
| | return jsonify(output)
|
| | else:
|
| | print("Returning cached predictions")
|
| | return jsonify(cache)
|
| |
|
| |
|
| | @app.route("/api/predict", methods=["POST"])
|
| | def predict():
|
| |
|
| | if request.method == "POST":
|
| |
|
| |
|
| | data = request.get_json()
|
| | if data.get("sentences") is None:
|
| | return jsonify({"error": "No text provided"})
|
| |
|
| | sentences = data.get("sentences")
|
| |
|
| | predictions = [sentiment_analysis(sentence) for sentence in sentences]
|
| |
|
| | predictions = [
|
| | -prediction[0]["score"]
|
| | if prediction[0]["label"] == "NEGATIVE"
|
| | else prediction[0]["score"]
|
| | for prediction in predictions
|
| | ]
|
| | output = [
|
| | dict(sentence=sentence, sentiment=prediction)
|
| | for sentence, prediction in zip(sentences, predictions)
|
| | ]
|
| |
|
| | return jsonify(output)
|
| |
|
| |
|
| | def get_feed(feed_url):
|
| | feed = feedparser.parse(feed_url)
|
| | return {"entries": feed["entries"], "last_update": feed["feed"]["updated"]}
|
| |
|
| |
|
| | if __name__ == "__main__":
|
| | app.run(host="0.0.0.0", port=int(os.environ.get("PORT", 7860))) |