dhruv575
CORS
6672d0b
"""
PolyCorr Backend Server
Flask API for handling Polymarket correlation analysis
"""
from flask import Flask, request, jsonify
from flask_cors import CORS
from dotenv import load_dotenv
import os
import sys
from polymarket_api import PolymarketAPI
from time_utils import TimeConverter
from llm_service import LLMService
from data_analysis import DataAnalyzer
# Load environment variables
load_dotenv()
app = Flask(__name__)
# Configure CORS to allow requests from frontend
# Allow production (Vercel), preview deployments, and local development
CORS(app,
origins=[
"https://polycorr.vercel.app",
r"https://.*\.vercel\.app", # Allow all Vercel preview deployments
"http://localhost:3000",
"http://localhost:5173", # Vite default port
"http://127.0.0.1:3000",
"http://127.0.0.1:5173"
],
methods=["GET", "POST", "OPTIONS"],
allow_headers=["Content-Type", "Authorization"],
supports_credentials=True)
# Initialize API clients
polymarket_api = PolymarketAPI()
time_converter = TimeConverter()
data_analyzer = DataAnalyzer()
# LLM service will be initialized on first use to avoid startup errors
_llm_service = None
def get_llm_service():
"""Get or create LLM service instance (lazy initialization)"""
global _llm_service
if _llm_service is None:
try:
_llm_service = LLMService()
except Exception as e:
raise Exception(f"Failed to initialize LLM service: {str(e)}. Make sure OPENAI_API_KEY is set correctly.")
return _llm_service
@app.route('/', methods=['GET'])
def root():
"""Root endpoint - API information"""
return jsonify({
"name": "PolyCorr Backend API",
"status": "running",
"version": "1.0.0",
"endpoints": {
"health": "/health",
"market_info": "/api/market-info",
"analyze_full": "/api/analyze-full",
"add_asset": "/api/add-asset",
"fetch_asset_prices": "/api/fetch-asset-prices",
"analyze_correlations": "/api/analyze-correlations",
"price_history": "/api/price-history",
"time_conversion": "/api/time/est-to-unix, /api/time/unix-to-est"
},
"documentation": "See README.md for API documentation"
}), 200
@app.route('/health', methods=['GET'])
def health_check():
"""Health check endpoint"""
return jsonify({"status": "ok"}), 200
@app.route('/api/market-info', methods=['POST'])
def get_market_info():
"""
Get market information from a Polymarket URL
Request body:
{
"url": "https://polymarket.com/event/..."
}
Response:
{
"id": "...",
"question": "...",
"conditionId": "...",
"clobTokenIds": [...],
"firstTokenId": "..."
}
"""
try:
data = request.get_json()
url = data.get('url')
if not url:
return jsonify({"error": "URL is required"}), 400
# Get market information
market_info = polymarket_api.get_market_info(url)
# Extract first token ID
first_token_id = polymarket_api.get_first_token_id(market_info)
response = {
"id": market_info['id'],
"question": market_info['question'],
"conditionId": market_info['conditionId'],
"clobTokenIds": market_info['clobTokenIds'],
"firstTokenId": first_token_id
}
return jsonify(response), 200
except ValueError as e:
return jsonify({"error": str(e)}), 400
except Exception as e:
return jsonify({"error": f"Internal server error: {str(e)}"}), 500
@app.route('/api/time/est-to-unix', methods=['POST'])
def est_to_unix():
"""
Convert EST datetime to Unix timestamp
Request body:
{
"datetime": "YYYY-MM-DD HH:MM:SS" or "MM/DD/YYYY HH:MM:SS"
}
Response:
{
"unix_timestamp": 1234567890,
"est_datetime": "YYYY-MM-DD HH:MM:SS EST"
}
"""
try:
data = request.get_json()
datetime_str = data.get('datetime')
if not datetime_str:
return jsonify({"error": "datetime is required"}), 400
unix_timestamp = time_converter.est_to_unix(datetime_str)
est_str = time_converter.unix_to_est(unix_timestamp)
return jsonify({
"unix_timestamp": unix_timestamp,
"est_datetime": est_str
}), 200
except ValueError as e:
return jsonify({"error": str(e)}), 400
except Exception as e:
return jsonify({"error": f"Internal server error: {str(e)}"}), 500
@app.route('/api/time/unix-to-est', methods=['POST'])
def unix_to_est():
"""
Convert Unix timestamp to EST datetime
Request body:
{
"timestamp": 1234567890
}
Response:
{
"unix_timestamp": 1234567890,
"est_datetime": "YYYY-MM-DD HH:MM:SS EST"
}
"""
try:
data = request.get_json()
timestamp = data.get('timestamp')
if timestamp is None:
return jsonify({"error": "timestamp is required"}), 400
est_str = time_converter.unix_to_est(int(timestamp))
return jsonify({
"unix_timestamp": timestamp,
"est_datetime": est_str
}), 200
except ValueError as e:
return jsonify({"error": str(e)}), 400
except Exception as e:
return jsonify({"error": f"Internal server error: {str(e)}"}), 500
@app.route('/api/price-history', methods=['POST'])
def get_price_history():
"""
Get price history for a Polymarket token
Request body:
{
"token_id": "52607315900507156846622820770453728082833251091510131025984187712529448877245",
"start_time": "2025-11-15 10:00:00", // EST datetime
"end_time": "2025-11-15 18:00:00", // EST datetime
"fidelity": 1, // minutes (optional, default: 1)
"granularity": 1 // minutes (optional, default: 1)
}
Response:
{
"token_id": "...",
"start_unix": 1234567890,
"end_unix": 1234567900,
"fidelity": 1,
"raw_history": [...], // sparse data from API
"timeseries": [ // reconstructed complete timeseries
{
"timestamp": 1234567890,
"price": 0.26
},
...
]
}
"""
try:
data = request.get_json()
token_id = data.get('token_id')
start_time_str = data.get('start_time')
end_time_str = data.get('end_time')
fidelity = data.get('fidelity', 1)
granularity = data.get('granularity', 1)
# Validate required fields
if not token_id:
return jsonify({"error": "token_id is required"}), 400
if not start_time_str:
return jsonify({"error": "start_time is required"}), 400
if not end_time_str:
return jsonify({"error": "end_time is required"}), 400
# Convert times to Unix
start_unix = time_converter.est_to_unix(start_time_str)
end_unix = time_converter.est_to_unix(end_time_str)
# Validate time range
time_converter.validate_time_range(start_unix, end_unix)
# Get price history from CLOB API
raw_history = polymarket_api.get_price_history(
token_id,
start_unix,
end_unix,
fidelity
)
# Reconstruct complete timeseries
timeseries = polymarket_api.reconstruct_timeseries(
raw_history,
start_unix,
end_unix,
granularity
)
response = {
"token_id": token_id,
"start_unix": start_unix,
"end_unix": end_unix,
"start_time": time_converter.unix_to_est(start_unix),
"end_time": time_converter.unix_to_est(end_unix),
"fidelity": fidelity,
"granularity": granularity,
"raw_history_count": len(raw_history),
"timeseries_count": len(timeseries),
"raw_history": raw_history,
"timeseries": timeseries
}
return jsonify(response), 200
except ValueError as e:
return jsonify({"error": str(e)}), 400
except Exception as e:
return jsonify({"error": f"Internal server error: {str(e)}"}), 500
@app.route('/api/analyze-correlations', methods=['POST'])
def analyze_correlations():
"""
Get correlated and inversely correlated assets for a market question
Request body:
{
"question": "Will the Supreme Court rule in favor of Trump's tariffs?"
}
Response:
{
"question": "...",
"correlated": [
{"ticker": "AAPL", "reason": "..."},
...
],
"inversely_correlated": [
{"ticker": "GLD", "reason": "..."},
...
]
}
"""
try:
data = request.get_json()
question = data.get('question')
if not question:
return jsonify({"error": "question is required"}), 400
# Get correlated assets from LLM
llm_service = get_llm_service()
result = llm_service.get_correlated_assets(question)
response = {
"question": question,
"correlated": result['correlated'],
"inversely_correlated": result['inversely_correlated']
}
return jsonify(response), 200
except ValueError as e:
return jsonify({"error": str(e)}), 400
except Exception as e:
return jsonify({"error": f"Internal server error: {str(e)}"}), 500
@app.route('/api/fetch-asset-prices', methods=['POST'])
def fetch_asset_prices():
"""
Fetch asset prices from Yahoo Finance
Request body:
{
"ticker": "AAPL",
"start_time": "2025-11-15 10:00:00", // EST datetime
"end_time": "2025-11-15 18:00:00", // EST datetime
"interval": "1h" // optional: 1m, 5m, 15m, 30m, 1h, 1d
}
Response:
{
"ticker": "AAPL",
"data_points": 150,
"timeseries": [
{"timestamp": 1234567890, "price": 150.25},
...
]
}
"""
try:
data = request.get_json()
ticker = data.get('ticker')
start_time_str = data.get('start_time')
end_time_str = data.get('end_time')
interval = data.get('interval', '1h')
if not ticker:
return jsonify({"error": "ticker is required"}), 400
if not start_time_str:
return jsonify({"error": "start_time is required"}), 400
if not end_time_str:
return jsonify({"error": "end_time is required"}), 400
# Convert times to Unix
start_unix = time_converter.est_to_unix(start_time_str)
end_unix = time_converter.est_to_unix(end_time_str)
# Fetch asset prices
timeseries = data_analyzer.fetch_asset_prices(
ticker,
start_unix,
end_unix,
interval
)
response = {
"ticker": ticker,
"start_time": start_time_str,
"end_time": end_time_str,
"interval": interval,
"data_points": len(timeseries),
"timeseries": timeseries
}
return jsonify(response), 200
except ValueError as e:
return jsonify({"error": str(e)}), 400
except Exception as e:
return jsonify({"error": f"Internal server error: {str(e)}"}), 500
@app.route('/api/analyze-full', methods=['POST'])
def analyze_full():
"""
Complete analysis: Get Polymarket data, LLM correlations, and calculate actual correlations
Request body:
{
"url": "https://polymarket.com/event/...",
"start_time": "2025-11-15 10:00:00",
"end_time": "2025-11-15 18:00:00",
"granularity": 60 // minutes (optional, default: 60)
}
Response:
{
"market_info": {...},
"polymarket_data": [...],
"correlated_assets": [
{
"ticker": "AAPL",
"reason": "...",
"correlation": 0.85,
"timeseries": [...],
"normalized_timeseries": [...]
},
...
],
"inversely_correlated_assets": [...]
}
"""
try:
data = request.get_json()
url = data.get('url')
start_time_str = data.get('start_time')
end_time_str = data.get('end_time')
granularity = data.get('granularity', 60)
# Validate required fields
if not url:
return jsonify({"error": "url is required"}), 400
if not start_time_str:
return jsonify({"error": "start_time is required"}), 400
if not end_time_str:
return jsonify({"error": "end_time is required"}), 400
# Step 1: Get market info
market_info = polymarket_api.get_market_info(url)
token_id = polymarket_api.get_first_token_id(market_info)
# Step 2: Convert times
start_unix = time_converter.est_to_unix(start_time_str)
end_unix = time_converter.est_to_unix(end_time_str)
# Step 3: Get Polymarket price history
raw_history = polymarket_api.get_price_history(
token_id,
start_unix,
end_unix,
fidelity=granularity
)
polymarket_timeseries = polymarket_api.reconstruct_timeseries(
raw_history,
start_unix,
end_unix,
granularity
)
# Step 4: Get correlated assets from LLM
llm_service = get_llm_service()
llm_result = llm_service.get_correlated_assets(market_info['question'])
# Step 5: Analyze correlated assets
# Use appropriate interval based on user's granularity choice
if granularity >= 60:
interval = "1h"
else:
interval = "5m"
correlated_results = data_analyzer.analyze_multiple_assets(
polymarket_timeseries,
llm_result['correlated'],
start_unix,
end_unix,
interval
)
inversely_correlated_results = data_analyzer.analyze_multiple_assets(
polymarket_timeseries,
llm_result['inversely_correlated'],
start_unix,
end_unix,
interval
)
# Normalize Polymarket data
normalized_polymarket = data_analyzer.normalize_timeseries(
polymarket_timeseries,
"min-max"
)
response = {
"market_info": {
"id": market_info['id'],
"question": market_info['question'],
"token_id": token_id
},
"time_range": {
"start": start_time_str,
"end": end_time_str,
"start_unix": start_unix,
"end_unix": end_unix,
"granularity": granularity
},
"polymarket_data": polymarket_timeseries,
"polymarket_data_normalized": normalized_polymarket,
"correlated_assets": correlated_results,
"inversely_correlated_assets": inversely_correlated_results
}
return jsonify(response), 200
except ValueError as e:
return jsonify({"error": str(e)}), 400
except Exception as e:
return jsonify({"error": f"Internal server error: {str(e)}"}), 500
@app.route('/api/add-asset', methods=['POST'])
def add_asset():
"""
Add a custom asset to the analysis after initial load.
Request body:
{
"token_id": "...",
"start_time": "2025-11-10 10:00:00",
"end_time": "2025-11-14 18:00:00",
"granularity": 60,
"ticker": "NVDA",
"category": "correlated" | "inverse",
"reason": "User provided reason"
}
Response:
{
"ticker": "...",
"reason": "...",
"correlation": 0.45,
"data_points": 120,
"timeseries": [...],
"normalized_timeseries": [...],
"aligned_timeseries": [...],
"aligned_normalized_timeseries": [...],
"success": true,
"category": "correlated"
}
"""
try:
data = request.get_json()
token_id = data.get('token_id')
start_time_str = data.get('start_time')
end_time_str = data.get('end_time')
granularity = data.get('granularity', 60)
ticker = data.get('ticker')
category = data.get('category', 'correlated')
reason = data.get('reason', '')
if not token_id:
return jsonify({"error": "token_id is required"}), 400
if not ticker:
return jsonify({"error": "ticker is required"}), 400
if not start_time_str or not end_time_str:
return jsonify({"error": "start_time and end_time are required"}), 400
# Convert times
start_unix = time_converter.est_to_unix(start_time_str)
end_unix = time_converter.est_to_unix(end_time_str)
time_converter.validate_time_range(start_unix, end_unix)
# Fetch Polymarket data
raw_history = polymarket_api.get_price_history(
token_id,
start_unix,
end_unix,
granularity
)
polymarket_timeseries = polymarket_api.reconstruct_timeseries(
raw_history,
start_unix,
end_unix,
granularity
)
interval = "1h" if granularity >= 60 else "5m"
assets = [{"ticker": ticker, "reason": reason}]
results = data_analyzer.analyze_multiple_assets(
polymarket_timeseries,
assets,
start_unix,
end_unix,
interval
)
if not results:
return jsonify({"error": "Could not analyze asset"}), 500
asset_result = results[0]
asset_result["category"] = category
return jsonify(asset_result), 200
except ValueError as e:
return jsonify({"error": str(e)}), 400
except Exception as e:
return jsonify({"error": f"Internal server error: {str(e)}"}), 500
# For local development, run the server
# For HuggingFace Spaces, Gunicorn will handle the server (see Dockerfile)
if __name__ == '__main__':
port = int(os.environ.get('PORT', 5000))
app.run(debug=True, host='0.0.0.0', port=port)