File size: 6,005 Bytes
cd451ea
 
 
 
807709a
36c053a
807709a
 
cd451ea
36c053a
 
 
cd451ea
 
 
 
 
599eb7e
 
02cbfc5
 
 
cd451ea
 
 
 
 
 
 
 
 
 
 
 
 
3881446
 
 
cd451ea
 
3881446
 
 
 
cd451ea
 
 
 
 
 
 
 
 
 
 
 
 
3881446
cd451ea
 
 
3881446
cd451ea
 
 
 
 
 
3881446
cd451ea
 
 
3881446
cd451ea
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
599eb7e
807709a
599eb7e
807709a
599eb7e
807709a
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
599eb7e
807709a
 
599eb7e
 
807709a
 
 
 
 
 
 
 
 
 
 
cd451ea
 
 
 
 
 
 
 
 
 
 
807709a
4409d1e
 
3881446
 
d954b1b
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
from minio import Minio
from minio.error import S3Error
import os
import argparse
import pandas as pd
from dotenv import load_dotenv
from datetime import datetime
from utils import HIST_DIR, ROOT_DIR, TMP_DIR

load_dotenv()


MINIO_ENDPOINT = "minio.autonolas.tech"
ACCESS_KEY = os.environ.get("CLOUD_ACCESS_KEY", None)
SECRET_KEY = os.environ.get("CLOUD_SECRET_KEY", None)
BUCKET_NAME = "weekly-stats"
FOLDER_NAME = "historical_data"
FILES_IN_TWO_MONTHS = 16  # 2 files per week
FILES_IN_FOUR_MONTHS = 30  # four months ago we did not have two files per week but one
FILES_IN_SIX_MONTHS = 40  # 1 file per week
FILES_IN_EIGHT_MONTHS = 48
FILES_IN_TEN_MONTHS = 56


def initialize_client():
    # Initialize the MinIO client
    client = Minio(
        MINIO_ENDPOINT,
        access_key=ACCESS_KEY,
        secret_key=SECRET_KEY,
        secure=True,  # Set to False if not using HTTPS
    )
    return client


def upload_file(
    client, filename: str, file_path: str, extra_folder: str = None
) -> bool:
    """Upload a file to the bucket"""
    try:
        if extra_folder is not None:
            OBJECT_NAME = FOLDER_NAME + "/" + extra_folder + "/" + filename
        else:
            OBJECT_NAME = FOLDER_NAME + "/" + filename
        print(
            f"filename={filename}, object_name={OBJECT_NAME} and file_path={file_path}"
        )
        client.fput_object(
            BUCKET_NAME, OBJECT_NAME, file_path, part_size=10 * 1024 * 1024
        )  # 10MB parts
        print(f"File '{file_path}' uploaded as '{OBJECT_NAME}'.")
        return True
    except S3Error as err:
        print(f"Error uploading file: {err}")
        return False


def download_file(client, filename: str):
    """Download the file back"""
    try:
        OBJECT_NAME = FOLDER_NAME + "/" + filename
        file_path = filename
        client.fget_object(BUCKET_NAME, OBJECT_NAME, "downloaded_" + file_path)
        print(f"File '{OBJECT_NAME}' downloaded as 'downloaded_{file_path}'.")
    except S3Error as err:
        print(f"Error downloading file: {err}")


def load_historical_file(client, filename: str, extra_folder: str = None) -> bool:
    """Function to load one file into the cloud storage"""
    file_path = filename
    file_path = HIST_DIR / filename
    return upload_file(client, filename, file_path, extra_folder)


def upload_historical_file(filename: str):
    client = initialize_client()
    load_historical_file(client=client, filename=filename)


def process_historical_files(client):
    """Process all parquet files in historical_data folder"""

    # Walk through all files in the folder
    for filename in os.listdir(HIST_DIR):
        # Check if file is a parquet file
        if filename.endswith(".parquet"):
            try:
                if load_historical_file(client, filename):
                    print(f"Successfully processed {filename}")
                else:
                    print("Error loading the files")
            except Exception as e:
                print(f"Error processing {filename}: {str(e)}")


def download_tools_historical_files(client, skip_files_count: int) -> pd.DataFrame:
    """Download the last nr_files tools files from the cloud storage"""

    try:
        nr_files = skip_files_count + 2
        print(f"Downloading the last {nr_files} tools files from cloud storage")
        # Use recursive=True to get all objects including those in subfolders
        objects = client.list_objects(
            BUCKET_NAME, prefix=FOLDER_NAME + "/", recursive=True
        )
        all_objects = list(objects)
        print(f"Total objects found: {len(all_objects)}")

        tool_files = [
            obj.object_name
            for obj in all_objects
            if obj.object_name.endswith(".parquet") and "tools" in obj.object_name
        ]
        print(f"tool files found: {tool_files}")
        if len(tool_files) < nr_files - 1:  # at least one file to collect
            return None
        # format of the filename is tools_YYYYMMDD_HHMMSS.parquet
        # get the last nr_files by sorting the tool_files by the YYYYMMDD_HHMMSS part
        tool_files.sort()  # Sort files by name (assumed to be timestamped)
        selected_files = tool_files[-nr_files:]  # Get the last nr_files

        print(f"Selected files: {selected_files}")
        # traverse the selected files in reverse order
        selected_files.reverse()
        # skip the first FILES_IN_TWO_MONTHS files
        selected_files = selected_files[skip_files_count:]  # limit to last two months

        for filename in selected_files:
            # if exclude_filename and exclude_filename in filename:
            #     continue
            local_filename = filename.replace("historical_data/", "")
            print(f"Downloading {local_filename}")
            download_path = TMP_DIR / local_filename
            client.fget_object(BUCKET_NAME, filename, str(download_path))
            return local_filename
    except S3Error as err:
        print(f"Error downloading files: {err}")

    return None


if __name__ == "__main__":
    # parser = argparse.ArgumentParser(
    #     description="Load files to the cloud storate for historical data"
    # )
    # parser.add_argument("param_1", type=str, help="Name of the file to upload")

    # # Parse the arguments
    # args = parser.parse_args()
    # filename = args.param_1

    client = initialize_client()
    # download_file(client, "all_trades_profitability_20250103_162106.parquet")
    download_tools_historical_files(client, skip_files_count=0)
    # load_historical_file(client, "all_trades_profitability_20250826_102759.parquet")
    # process_historical_files(client)
    # checking files at the cloud storage
    # files = ["data_delivers_22_04_2024.csv", "data_tools_22_04_2024.csv"]
    # for old_file in files:
    #     # download_file(client=client, filename=tools_file)
    #     load_historical_file(
    #         client=client, filename=old_file, extra_folder=APRIL_FOLDER
    #     )