File size: 7,084 Bytes
41923c6
 
 
 
 
 
 
8b34d9d
 
 
 
 
48512cb
41923c6
 
 
 
8b34d9d
 
 
 
41923c6
8b34d9d
41923c6
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
8b34d9d
 
 
 
 
 
 
 
 
 
 
 
 
 
 
48512cb
 
 
 
 
 
8b34d9d
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
48512cb
 
 
 
 
 
 
8b34d9d
 
 
41923c6
8b34d9d
 
 
41923c6
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
8b34d9d
 
 
41923c6
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
import requests
import json
import sqlite3
import pandas as pd
from datetime import datetime
import os
import time
import json
from google.oauth2 import service_account
from googleapiclient.discovery import build
from googleapiclient.http import MediaFileUpload, MediaIoBaseDownload
import io
from googleapiclient.errors import HttpError

# Database path
DB_PATH = "stocks.db"

# Google Drive sync credentials and folder (use Secrets in production)
GOOGLE_SERVICE_ACCOUNT_JSON = os.environ.get("GOOGLE_SERVICE_ACCOUNT_JSON", "YOUR_GOOGLE_SERVICE_ACCOUNT_JSON")
GOOGLE_DRIVE_FOLDER_ID = os.environ.get("GOOGLE_DRIVE_FOLDER_ID", "YOUR_GOOGLE_DRIVE_FOLDER_ID")

# Alpha Vantage API key (set as environment variable or hardcode temporarily for testing, update for Spaces)
ALPHA_VANTAGE_API_KEY = os.environ.get("ALPHA_VANTAGE_API_KEY", "YOUR_ALPHA_VANTAGE_API_KEY")

# List of tickers to refresh
popular_stocks = ["AAPL", "GOOGL", "GOOG", "MSFT", "AMZN", "TSLA", "FB", "NVDA", "IBM", "INTC", "ORCL", "META"]

def fetch_alpha_vantage_quote(ticker, api_key):
    url = f"https://www.alphavantage.co/query?function=GLOBAL_QUOTE&symbol={ticker}&apikey={api_key}"
    try:
        response = requests.get(url, timeout=10)
        response.raise_for_status()
        data = response.json()
        if "Global Quote" in data:
            return data["Global Quote"]
        return None
    except requests.exceptions.RequestException as e:
        print(f"Error fetching quote for {ticker}: {str(e)}")
        return None

def fetch_alpha_vantage_history(ticker, api_key):
    url = f"https://www.alphavantage.co/query?function=TIME_SERIES_DAILY&symbol={ticker}&outputsize=compact&apikey={api_key}"
    try:
        response = requests.get(url, timeout=10)
        response.raise_for_status()
        data = response.json()
        if "Time Series (Daily)" in data:
            daily_data = data["Time Series (Daily)"]
            df = pd.DataFrame.from_dict(daily_data, orient='index')
            df = df.astype(float)
            df.index = pd.to_datetime(df.index)
            df = df.sort_index(ascending=True)
            df.columns = ['Open', 'High', 'Low', 'Close', 'Volume']
            return df[['Close', 'Volume']].to_json()
        return None
    except requests.exceptions.RequestException as e:
        print(f"Error fetching history for {ticker}: {str(e)}")
        return None

def fetch_alpha_vantage_overview(ticker, api_key):
    url = f"https://www.alphavantage.co/query?function=OVERVIEW&symbol={ticker}&apikey={api_key}"
    try:
        response = requests.get(url, timeout=10)
        response.raise_for_status()
        data = response.json()
        return data
    except requests.exceptions.RequestException as e:
        print(f"Error fetching overview for {ticker}: {str(e)}")
        return None

def sync_database_to_drive():
    try:
        # Parse service account JSON from Secrets
        service_account_info = json.loads(GOOGLE_SERVICE_ACCOUNT_JSON)
        credentials = service_account.Credentials.from_service_account_info(service_account_info)
        drive_service = build("drive", "v3", credentials=credentials)

        # Upload stocks.db to Google Drive
        file_metadata = {
            "name": "stocks.db",
            "parents": [GOOGLE_DRIVE_FOLDER_ID]
        }
        media = MediaFileUpload(DB_PATH, mimetype="application/octet-stream")
        file = drive_service.files().create(body=file_metadata, media_body=media, fields="id").execute()
        print(f"Successfully synced SQLite database to Google Drive (File ID: {file.get('id')})")
    except HttpError as e:
        if e.resp.status == 404:
            print("Google Drive folder not found. Check GOOGLE_DRIVE_FOLDER_ID and permissions.")
            # Optionally, create a new file or folder, but this requires additional logic
        else:
            print(f"Error syncing database to Google Drive: {str(e)}")
    except Exception as e:
        print(f"Error syncing database to Google Drive: {str(e)}")

def sync_database_from_drive():
    try:
        # Parse service account JSON from Secrets
        service_account_info = json.loads(GOOGLE_SERVICE_ACCOUNT_JSON)
        credentials = service_account.Credentials.from_service_account_info(service_account_info)
        drive_service = build("drive", "v3", credentials=credentials)

        # Download stocks.db from Google Drive
        file_metadata = drive_service.files().list(q=f"'{GOOGLE_DRIVE_FOLDER_ID}' in parents and name='stocks.db'").execute()
        files = file_metadata.get("files", [])
        if files:
            request = drive_service.files().get_media(fileId=files[0]["id"])
            fh = io.BytesIO()
            downloader = MediaIoBaseDownload(fh, request)
            done = False
            while not done:
                status, done = downloader.next_chunk()
            with open(DB_PATH, "wb") as f:
                f.write(fh.getvalue())
            print("Successfully synced SQLite database from Google Drive.")
        else:
            print("No stocks.db found in Google Drive. Creating a new database.")
            conn = sqlite3.connect(DB_PATH)
            conn.close()
    except HttpError as e:
        if e.resp.status == 404:
            print("stocks.db not found in Google Drive. Creating a new local database.")
            conn = sqlite3.connect(DB_PATH)
            conn.close()
        else:
            print(f"Error syncing database from Google Drive: {str(e)}")
    except Exception as e:
        print(f"Error syncing database from Google Drive: {str(e)}")

def update_database():
    # Sync database from Google Drive before updating
    sync_database_from_drive()

    # Connect to SQLite database
    conn = sqlite3.connect(DB_PATH)
    cursor = conn.cursor()

    # Create table if it doesn't exist
    cursor.execute("""
        CREATE TABLE IF NOT EXISTS stocks (
            ticker TEXT PRIMARY KEY,
            quote TEXT,
            history TEXT,
            overview TEXT,
            last_updated TEXT
        )
    """)

    # Fetch and store data for each ticker, with delays to respect rate limits
    for ticker in popular_stocks:
        quote = fetch_alpha_vantage_quote(ticker, ALPHA_VANTAGE_API_KEY)
        history = fetch_alpha_vantage_history(ticker, ALPHA_VANTAGE_API_KEY)
        overview = fetch_alpha_vantage_overview(ticker, ALPHA_VANTAGE_API_KEY)

        if quote and history and overview:
            cursor.execute("""
                REPLACE INTO stocks (ticker, quote, history, overview, last_updated)
                VALUES (?, ?, ?, ?, ?)
            """, (ticker, json.dumps(quote), history, json.dumps(overview), datetime.now().strftime('%Y-%m-%d %H:%M:%S')))
        else:
            print(f"Skipping {ticker} due to missing data")
        
        # Delay to respect 5 requests/minute limit (12 seconds per request for 5 requests/min)
        time.sleep(12)

    conn.commit()
    conn.close()

    # Sync updated database to Google Drive
    sync_database_to_drive()

if __name__ == "__main__":
    update_database()