File size: 2,105 Bytes
b8d4e1d | 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 | from flask import Flask, jsonify
from flask_cors import CORS
import threading
import time
import subprocess
import sys
import os
import json
app = Flask(__name__)
CORS(app) # Enable CORS so the React frontend can fetch data
def run_scrapers_periodically():
"""Background thread to run scrapers every 60 minutes."""
while True:
print(f"\n[{time.strftime('%Y-%m-%d %H:%M:%S')}] Starting scheduled background aggregation...")
try:
# Run aggregator.py
subprocess.run([sys.executable, "aggregator.py"], check=True)
# Run ai_training_scraper.py
subprocess.run([sys.executable, "ai_training_scraper.py"], check=True)
# Run upwork_scraper.py
subprocess.run([sys.executable, "upwork_scraper.py"], check=True)
print(f"[{time.strftime('%Y-%m-%d %H:%M:%S')}] All scrapers synced successfully.")
except Exception as e:
print(f"[{time.strftime('%Y-%m-%d %H:%M:%S')}] Aggregation error: {e}")
# Wait for 60 minutes
time.sleep(3600)
@app.route("/")
def health_check():
return jsonify({"status": "running", "platform": "Firstify Engine"})
@app.route("/api/startups")
def get_startups():
try:
with open("data.json", "r") as f:
return jsonify(json.load(f))
except:
return jsonify([])
@app.route("/api/training")
def get_training():
try:
with open("training_data.json", "r") as f:
return jsonify(json.load(f))
except:
return jsonify([])
@app.route("/api/upwork")
def get_upwork():
try:
with open("upwork_data.json", "r") as f:
return jsonify(json.load(f))
except:
return jsonify([])
if __name__ == "__main__":
# Start the scraping thread
threading.Thread(target=run_scrapers_periodically, daemon=True).start()
# Start the Flask server
# Hugging Face Spaces use port 7860 by default
port = int(os.environ.get("PORT", 7860))
app.run(host="0.0.0.0", port=port)
|