igriv commited on
Commit
15e1317
·
1 Parent(s): 453dafe

Prepare for HF Spaces deployment

Browse files
Files changed (4) hide show
  1. app.py +7 -0
  2. cryptoindex.py +2 -2
  3. index_interface_supabase.py +233 -0
  4. requirements.txt +12 -0
app.py ADDED
@@ -0,0 +1,7 @@
 
 
 
 
 
 
 
 
1
+ """
2
+ Hugging Face Spaces entry point
3
+ This file is required for HF Spaces deployment
4
+ """
5
+ from index_interface_supabase import *
6
+
7
+ # The app will auto-launch when imported with iface.launch()
cryptoindex.py CHANGED
@@ -35,7 +35,7 @@ def format_output(mymean, mystandarddeviation, mysharpe):
35
  return output
36
 
37
  import os
38
- api_key = os.getenv("POLYGON_KEY")
39
 
40
  import pandas as pd
41
  from polygon import RESTClient
@@ -149,4 +149,4 @@ def update_weights(fname="/tmp/wts.csv", **kwargs):
149
  _, dfs = get_crypto_index(crypto_data)
150
  retval = dfs[dfs.date == dfs.date.max()]
151
  retval.to_csv(fname, index = False)
152
- return retval
 
35
  return output
36
 
37
  import os
38
+ api_key = os.getenv("POLYGON_API_KEY")
39
 
40
  import pandas as pd
41
  from polygon import RESTClient
 
149
  _, dfs = get_crypto_index(crypto_data)
150
  retval = dfs[dfs.date == dfs.date.max()]
151
  retval.to_csv(fname, index = False)
152
+ return retval
index_interface_supabase.py ADDED
@@ -0,0 +1,233 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import gradio as gr
2
+ import plotly.express as px
3
+ from cryptoindex import *
4
+ import pandas as pd
5
+ from updater import *
6
+ from time import sleep
7
+ from functools import partial
8
+ import argparse
9
+ import os
10
+ from supabase import create_client, Client
11
+ from datetime import datetime
12
+ import hashlib
13
+ import json
14
+ import uuid
15
+ from dotenv import load_dotenv
16
+
17
+ load_dotenv()
18
+
19
+ supabase_url = os.getenv("SUPABASE_URL", "")
20
+ supabase_key = os.getenv("SUPABASE_KEY", "")
21
+
22
+ print(f"Supabase URL: {supabase_url[:30]}..." if supabase_url else "No Supabase URL")
23
+ print(f"Supabase Key: {supabase_key[:20]}..." if supabase_key else "No Supabase Key")
24
+
25
+ try:
26
+ supabase: Client = create_client(supabase_url, supabase_key) if supabase_url else None
27
+ if supabase:
28
+ # Test the connection
29
+ test_response = supabase.table('index_cache').select('id').limit(1).execute()
30
+ print("Supabase connection test successful")
31
+ except Exception as e:
32
+ print(f"Supabase initialization error: {e}")
33
+ supabase = None
34
+
35
+
36
+ def get_cache_key(start_date: str, end_date: str, locale: str, market_type: str) -> str:
37
+ """Generate a unique cache key for the given parameters."""
38
+ key_string = f"{start_date}_{end_date}_{locale}_{market_type}"
39
+ return hashlib.md5(key_string.encode()).hexdigest()
40
+
41
+
42
+ def get_or_create_session(request: gr.Request) -> str:
43
+ """Get or create a session ID from the Gradio request."""
44
+ if hasattr(request, 'session_hash'):
45
+ return request.session_hash
46
+ return str(uuid.uuid4())
47
+
48
+
49
+ def fetch_from_cache(cache_key: str):
50
+ """Fetch index data from Supabase cache."""
51
+ if not supabase:
52
+ print("Supabase client not initialized")
53
+ return None
54
+
55
+ try:
56
+ print(f"Fetching cache for key: {cache_key}")
57
+ response = supabase.table('index_cache').select('*').eq('cache_key', cache_key).execute()
58
+ if response.data and len(response.data) > 0:
59
+ data = response.data[0]
60
+ v_data = pd.read_json(data['index_data'])
61
+ v_data.index = pd.to_datetime(v_data.index)
62
+ print(f"Cache hit for key: {cache_key}")
63
+ return v_data
64
+ else:
65
+ print(f"Cache miss for key: {cache_key}")
66
+ except Exception as e:
67
+ print(f"Cache fetch error: {e}")
68
+ print(f"Error type: {type(e)}")
69
+ if hasattr(e, 'response'):
70
+ print(f"Response: {e.response}")
71
+
72
+ return None
73
+
74
+
75
+ def save_to_cache(cache_key: str, v_data: pd.DataFrame, start_date: str, end_date: str,
76
+ locale: str, market_type: str):
77
+ """Save index data to Supabase cache."""
78
+ if not supabase:
79
+ return
80
+
81
+ try:
82
+ cache_data = {
83
+ 'cache_key': cache_key,
84
+ 'start_date': start_date,
85
+ 'end_date': end_date,
86
+ 'locale': locale,
87
+ 'market_type': market_type,
88
+ 'index_data': v_data.to_json(),
89
+ 'created_at': datetime.now().isoformat()
90
+ }
91
+
92
+ supabase.table('index_cache').upsert(cache_data).execute()
93
+ except Exception as e:
94
+ print(f"Cache save error: {e}")
95
+
96
+
97
+ def get_user_weights(session_id: str, locale: str, market_type: str):
98
+ """Get user-specific weights from Supabase."""
99
+ if not supabase:
100
+ return None
101
+
102
+ try:
103
+ response = supabase.table('user_weights').select('*').eq('session_id', session_id).eq('locale', locale).eq('market_type', market_type).order('created_at', desc=True).limit(1).execute()
104
+
105
+ if response.data and len(response.data) > 0:
106
+ weights_data = response.data[0]['weights_data']
107
+ return pd.read_json(weights_data)
108
+ except Exception as e:
109
+ print(f"Error fetching user weights: {e}")
110
+
111
+ return None
112
+
113
+
114
+ def save_user_weights(session_id: str, weights_df: pd.DataFrame, locale: str, market_type: str):
115
+ """Save user-specific weights to Supabase."""
116
+ if not supabase:
117
+ return
118
+
119
+ try:
120
+ weights_data = {
121
+ 'session_id': session_id,
122
+ 'locale': locale,
123
+ 'market_type': market_type,
124
+ 'weights_data': weights_df.to_json(),
125
+ 'created_at': datetime.now().isoformat()
126
+ }
127
+
128
+ supabase.table('user_weights').insert(weights_data).execute()
129
+ except Exception as e:
130
+ print(f"Error saving user weights: {e}")
131
+
132
+
133
+ def plot_index_prices(start_date, end_date, request: gr.Request, **kwargs):
134
+ """Plot historical index prices with caching."""
135
+ session_id = get_or_create_session(request)
136
+ locale = kwargs.get('locale', 'global')
137
+ market_type = kwargs.get('market_type', 'crypto')
138
+
139
+ cache_key = get_cache_key(start_date, end_date, locale, market_type)
140
+
141
+ v = fetch_from_cache(cache_key)
142
+
143
+ if v is None:
144
+ cryptodf = fetch_crypto_data(start_date=start_date, end_date=end_date, **kwargs)
145
+ v, _ = get_crypto_index(cryptodf, func=np.sqrt)
146
+ save_to_cache(cache_key, v, start_date, end_date, locale, market_type)
147
+
148
+ _, _, _, output = do_sharpe(v.close)
149
+ fig = px.line(v, x=v.index, y='close', title='Index Prices')
150
+ fig.update_xaxes(rangeslider_visible=True)
151
+ return fig, output
152
+
153
+
154
+ def realtime_update_weighted_prices(request: gr.Request, locale='global', market_type='crypto'):
155
+ """Update real-time prices with user-specific weights."""
156
+ session_id = get_or_create_session(request)
157
+
158
+ if should_update_weights():
159
+ weights_df = update_weights(locale=locale, market_type=market_type)
160
+ save_user_weights(session_id, weights_df, locale, market_type)
161
+
162
+ last_day = get_user_weights(session_id, locale, market_type)
163
+
164
+ if last_day is None:
165
+ weights_df = update_weights(locale=locale, market_type=market_type)
166
+ save_user_weights(session_id, weights_df, locale, market_type)
167
+ last_day = weights_df
168
+
169
+ prices = update_day(last_day)
170
+ _, _, _, output = do_sharpe(prices, days=False)
171
+ fig = px.line(prices, x=prices.index, y=prices.values, title='Index Today')
172
+ return fig, output
173
+
174
+
175
+ def make_graph(choice, start_date=None, end_date=None, request: gr.Request = None, **kwargs):
176
+ """Create graph based on user choice."""
177
+ if choice == "Historical":
178
+ fig, stats = plot_index_prices(start_date, end_date, request, **kwargs)
179
+ else:
180
+ fig, stats = realtime_update_weighted_prices(request, **kwargs)
181
+
182
+ return gr.Plot(fig), gr.Markdown(stats)
183
+
184
+
185
+ def initialize_database():
186
+ """Initialize Supabase tables if they don't exist."""
187
+ if not supabase:
188
+ print("Supabase not configured. Running in local mode.")
189
+ return
190
+
191
+ print("Supabase connected successfully!")
192
+
193
+
194
+ if __name__ == "__main__":
195
+ parser = argparse.ArgumentParser()
196
+ parser.add_argument("--locale", default='global', help="the locale")
197
+ parser.add_argument("--market_type", default='crypto', help="the market type")
198
+ parser.add_argument("--share", action="store_true", help="share the interface")
199
+ args = parser.parse_args()
200
+
201
+ initialize_database()
202
+
203
+ with gr.Blocks() as iface:
204
+ gr.Markdown("# Crypto Index Tracker (Supabase Edition)")
205
+ gr.Markdown("Each user session has isolated data and computations are cached.")
206
+
207
+ startdatebox = gr.Textbox(label="Start Date", placeholder="YYYY-MM-DD")
208
+ enddatebox = gr.Textbox(label="End Date", placeholder="YYYY-MM-DD")
209
+ radio = gr.Radio(choices=["Historical", "Real-time"], label="Graph Type", value="Historical")
210
+ update_button = gr.Button("Update Graph")
211
+
212
+ theplot = gr.Plot()
213
+ thestats = gr.Markdown()
214
+
215
+ make_graph_partial = partial(make_graph, locale=args.locale, market_type=args.market_type)
216
+
217
+ radio.change(
218
+ fn=make_graph_partial,
219
+ inputs=[radio, startdatebox, enddatebox],
220
+ outputs=[theplot, thestats]
221
+ )
222
+
223
+ update_button.click(
224
+ fn=make_graph_partial,
225
+ inputs=[radio, startdatebox, enddatebox],
226
+ outputs=[theplot, thestats]
227
+ )
228
+
229
+ # Detect if running on Hugging Face Spaces
230
+ if os.getenv("SPACE_ID"):
231
+ iface.launch()
232
+ else:
233
+ iface.launch(server_port=7860, server_name="0.0.0.0", share=args.share)
requirements.txt ADDED
@@ -0,0 +1,12 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Core dependencies
2
+ gradio==4.19.2
3
+ plotly==5.18.0
4
+ pandas==2.1.4
5
+ numpy==1.26.3
6
+ polygon-api-client==1.13.6
7
+
8
+ # Supabase integration
9
+ supabase==2.3.0
10
+
11
+ # Optional but recommended
12
+ python-dotenv==1.0.0