|
|
from minio import Minio |
|
|
from minio.error import S3Error |
|
|
import os |
|
|
import argparse |
|
|
import pandas as pd |
|
|
from dotenv import load_dotenv |
|
|
from datetime import datetime |
|
|
from utils import HIST_DIR, ROOT_DIR, TMP_DIR |
|
|
|
|
|
load_dotenv() |
|
|
|
|
|
|
|
|
MINIO_ENDPOINT = "minio.autonolas.tech" |
|
|
ACCESS_KEY = os.environ.get("CLOUD_ACCESS_KEY", None) |
|
|
SECRET_KEY = os.environ.get("CLOUD_SECRET_KEY", None) |
|
|
BUCKET_NAME = "weekly-stats" |
|
|
FOLDER_NAME = "historical_data" |
|
|
FILES_IN_TWO_MONTHS = 16 |
|
|
FILES_IN_FOUR_MONTHS = 30 |
|
|
FILES_IN_SIX_MONTHS = 40 |
|
|
FILES_IN_EIGHT_MONTHS = 48 |
|
|
FILES_IN_TEN_MONTHS = 56 |
|
|
|
|
|
|
|
|
def initialize_client(): |
|
|
|
|
|
client = Minio( |
|
|
MINIO_ENDPOINT, |
|
|
access_key=ACCESS_KEY, |
|
|
secret_key=SECRET_KEY, |
|
|
secure=True, |
|
|
) |
|
|
return client |
|
|
|
|
|
|
|
|
def upload_file( |
|
|
client, filename: str, file_path: str, extra_folder: str = None |
|
|
) -> bool: |
|
|
"""Upload a file to the bucket""" |
|
|
try: |
|
|
if extra_folder is not None: |
|
|
OBJECT_NAME = FOLDER_NAME + "/" + extra_folder + "/" + filename |
|
|
else: |
|
|
OBJECT_NAME = FOLDER_NAME + "/" + filename |
|
|
print( |
|
|
f"filename={filename}, object_name={OBJECT_NAME} and file_path={file_path}" |
|
|
) |
|
|
client.fput_object( |
|
|
BUCKET_NAME, OBJECT_NAME, file_path, part_size=10 * 1024 * 1024 |
|
|
) |
|
|
print(f"File '{file_path}' uploaded as '{OBJECT_NAME}'.") |
|
|
return True |
|
|
except S3Error as err: |
|
|
print(f"Error uploading file: {err}") |
|
|
return False |
|
|
|
|
|
|
|
|
def download_file(client, filename: str): |
|
|
"""Download the file back""" |
|
|
try: |
|
|
OBJECT_NAME = FOLDER_NAME + "/" + filename |
|
|
file_path = filename |
|
|
client.fget_object(BUCKET_NAME, OBJECT_NAME, "downloaded_" + file_path) |
|
|
print(f"File '{OBJECT_NAME}' downloaded as 'downloaded_{file_path}'.") |
|
|
except S3Error as err: |
|
|
print(f"Error downloading file: {err}") |
|
|
|
|
|
|
|
|
def load_historical_file(client, filename: str, extra_folder: str = None) -> bool: |
|
|
"""Function to load one file into the cloud storage""" |
|
|
file_path = filename |
|
|
file_path = HIST_DIR / filename |
|
|
return upload_file(client, filename, file_path, extra_folder) |
|
|
|
|
|
|
|
|
def upload_historical_file(filename: str): |
|
|
client = initialize_client() |
|
|
load_historical_file(client=client, filename=filename) |
|
|
|
|
|
|
|
|
def process_historical_files(client): |
|
|
"""Process all parquet files in historical_data folder""" |
|
|
|
|
|
|
|
|
for filename in os.listdir(HIST_DIR): |
|
|
|
|
|
if filename.endswith(".parquet"): |
|
|
try: |
|
|
if load_historical_file(client, filename): |
|
|
print(f"Successfully processed {filename}") |
|
|
else: |
|
|
print("Error loading the files") |
|
|
except Exception as e: |
|
|
print(f"Error processing {filename}: {str(e)}") |
|
|
|
|
|
|
|
|
def download_tools_historical_files(client, skip_files_count: int) -> pd.DataFrame: |
|
|
"""Download the last nr_files tools files from the cloud storage""" |
|
|
|
|
|
try: |
|
|
nr_files = skip_files_count + 2 |
|
|
print(f"Downloading the last {nr_files} tools files from cloud storage") |
|
|
|
|
|
objects = client.list_objects( |
|
|
BUCKET_NAME, prefix=FOLDER_NAME + "/", recursive=True |
|
|
) |
|
|
all_objects = list(objects) |
|
|
print(f"Total objects found: {len(all_objects)}") |
|
|
|
|
|
tool_files = [ |
|
|
obj.object_name |
|
|
for obj in all_objects |
|
|
if obj.object_name.endswith(".parquet") and "tools" in obj.object_name |
|
|
] |
|
|
print(f"tool files found: {tool_files}") |
|
|
if len(tool_files) < nr_files - 1: |
|
|
return None |
|
|
|
|
|
|
|
|
tool_files.sort() |
|
|
selected_files = tool_files[-nr_files:] |
|
|
|
|
|
print(f"Selected files: {selected_files}") |
|
|
|
|
|
selected_files.reverse() |
|
|
|
|
|
selected_files = selected_files[skip_files_count:] |
|
|
|
|
|
for filename in selected_files: |
|
|
|
|
|
|
|
|
local_filename = filename.replace("historical_data/", "") |
|
|
print(f"Downloading {local_filename}") |
|
|
download_path = TMP_DIR / local_filename |
|
|
client.fget_object(BUCKET_NAME, filename, str(download_path)) |
|
|
return local_filename |
|
|
except S3Error as err: |
|
|
print(f"Error downloading files: {err}") |
|
|
|
|
|
return None |
|
|
|
|
|
|
|
|
if __name__ == "__main__": |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
client = initialize_client() |
|
|
|
|
|
download_tools_historical_files(client, skip_files_count=0) |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|