rafat0421 commited on
Commit
ffa380e
·
1 Parent(s): 5a89ac2

Create functions.py

Browse files
Files changed (1) hide show
  1. functions.py +249 -0
functions.py ADDED
@@ -0,0 +1,249 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from datetime import datetime
2
+ import requests
3
+ import os
4
+ import joblib
5
+ import pandas as pd
6
+
7
+ import json
8
+
9
+ from dotenv import load_dotenv
10
+ load_dotenv()
11
+
12
+
13
+ def decode_features(df, feature_view):
14
+ """Decodes features in the input DataFrame using corresponding Hopsworks Feature Store transformation functions"""
15
+ df_res = df.copy()
16
+
17
+ import inspect
18
+
19
+
20
+ td_transformation_functions = feature_view._batch_scoring_server._transformation_functions
21
+
22
+ res = {}
23
+ for feature_name in td_transformation_functions:
24
+ if feature_name in df_res.columns:
25
+ td_transformation_function = td_transformation_functions[feature_name]
26
+ sig, foobar_locals = inspect.signature(td_transformation_function.transformation_fn), locals()
27
+ param_dict = dict([(param.name, param.default) for param in sig.parameters.values() if param.default != inspect._empty])
28
+ if td_transformation_function.name == "min_max_scaler":
29
+ df_res[feature_name] = df_res[feature_name].map(
30
+ lambda x: x * (param_dict["max_value"] - param_dict["min_value"]) + param_dict["min_value"])
31
+
32
+ elif td_transformation_function.name == "standard_scaler":
33
+ df_res[feature_name] = df_res[feature_name].map(
34
+ lambda x: x * param_dict['std_dev'] + param_dict["mean"])
35
+ elif td_transformation_function.name == "label_encoder":
36
+ dictionary = param_dict['value_to_index']
37
+ dictionary_ = {v: k for k, v in dictionary.items()}
38
+ df_res[feature_name] = df_res[feature_name].map(
39
+ lambda x: dictionary_[x])
40
+ return df_res
41
+
42
+
43
+ def get_model(project, model_name, evaluation_metric, sort_metrics_by):
44
+ """Retrieve desired model or download it from the Hopsworks Model Registry.
45
+
46
+ In second case, it will be physically downloaded to this directory"""
47
+ TARGET_FILE = "model.pkl"
48
+ list_of_files = [os.path.join(dirpath,filename) for dirpath, _, filenames \
49
+ in os.walk('.') for filename in filenames if filename == TARGET_FILE]
50
+
51
+ if list_of_files:
52
+ model_path = list_of_files[0]
53
+ model = joblib.load(model_path)
54
+ else:
55
+ if not os.path.exists(TARGET_FILE):
56
+ mr = project.get_model_registry()
57
+ # get best model based on custom metrics
58
+ model = mr.get_best_model(model_name,
59
+ evaluation_metric,
60
+ sort_metrics_by)
61
+ model_dir = model.download()
62
+ model = joblib.load(model_dir + "/model.pkl")
63
+
64
+ return model
65
+
66
+
67
+ def get_air_json(AIR_QUALITY_API_KEY):
68
+ return requests.get(f'https://api.waqi.info/feed/Helsinki/?token={AIR_QUALITY_API_KEY}').json()['data']
69
+
70
+
71
+
72
+ def get_air_quality_data1():
73
+
74
+ AIR_QUALITY_API_KEY = os.getenv('AIR_QUALITY_API_KEY')
75
+ json = get_air_json(AIR_QUALITY_API_KEY)
76
+
77
+ # print(json)
78
+ # iaqi = json['iaqi']
79
+ # forecast = json['forecast']['daily']
80
+ return [
81
+ json['date'], # AQI
82
+ json['pm25'],
83
+ json['pm10'],
84
+ json['o3'],
85
+ json['no2'],
86
+
87
+ ]
88
+
89
+ def get_air_quality_data():
90
+ AIR_QUALITY_API_KEY = os.getenv('AIR_QUALITY_API_KEY')
91
+ json = get_air_json(AIR_QUALITY_API_KEY)
92
+ print(json)
93
+ iaqi = json['iaqi']
94
+ forecast = json['forecast']['daily']
95
+ return [
96
+ json['aqi'], # AQI
97
+ json['time']['s'][:10], # Date
98
+ iaqi['h']['v'],
99
+ iaqi['p']['v'],
100
+ iaqi['pm10']['v'],
101
+ iaqi['t']['v'],
102
+ forecast['o3'][0]['avg'],
103
+ forecast['o3'][0]['max'],
104
+ forecast['o3'][0]['min'],
105
+ forecast['pm10'][0]['avg'],
106
+ forecast['pm10'][0]['max'],
107
+ forecast['pm10'][0]['min'],
108
+ forecast['pm25'][0]['avg'],
109
+ forecast['pm25'][0]['max'],
110
+ forecast['pm25'][0]['min'],
111
+ forecast['uvi'][0]['avg'],
112
+ forecast['uvi'][0]['avg'],
113
+ forecast['uvi'][0]['avg']
114
+ ]
115
+
116
+ def get_air_quality_df1(data):
117
+ col_names = [
118
+ 'aqi',
119
+ 'date',
120
+ 'pm25',
121
+ 'pm10',
122
+ 'o3',
123
+ 'no2',
124
+
125
+ ]
126
+
127
+ new_data = pd.DataFrame(
128
+ data,
129
+ columns=col_names
130
+ )
131
+ new_data.date = new_data.date.apply(timestamp_2_time1)
132
+
133
+ return new_data
134
+
135
+ def get_air_quality_df(data):
136
+ col_names = [
137
+ 'aqi',
138
+ 'date',
139
+ 'iaqi_h',
140
+ 'iaqi_p',
141
+ 'iaqi_pm10',
142
+ 'iaqi_t',
143
+ 'o3_avg',
144
+ 'o3_max',
145
+ 'o3_min',
146
+ 'pm10_avg',
147
+ 'pm10_max',
148
+ 'pm10_min',
149
+ 'pm25_avg',
150
+ 'pm25_max',
151
+ 'pm25_min',
152
+ 'uvi_avg',
153
+ 'uvi_max',
154
+ 'uvi_min',
155
+ ]
156
+
157
+ new_data = pd.DataFrame(
158
+ data,
159
+ columns=col_names
160
+ )
161
+ new_data.date = new_data.date.apply(timestamp_2_time1)
162
+
163
+ return new_data
164
+
165
+
166
+ def get_weather_json(date, WEATHER_API_KEY):
167
+ return requests.get(f'https://weather.visualcrossing.com/VisualCrossingWebServices/rest/services/timeline/helsinki/{date}?unitGroup=metric&include=days&key={WEATHER_API_KEY}&contentType=json').json()
168
+
169
+
170
+ def get_weather_data(date):
171
+ WEATHER_API_KEY = os.getenv('WEATHER_API_KEY')
172
+ json = get_weather_json(date, WEATHER_API_KEY)
173
+ data = json['days'][0]
174
+
175
+ return [
176
+ json['address'].capitalize(),
177
+ data['datetime'],
178
+ data['tempmax'],
179
+ data['tempmin'],
180
+ data['temp'],
181
+ data['feelslikemax'],
182
+ data['feelslikemin'],
183
+ data['feelslike'],
184
+ data['dew'],
185
+ data['humidity'],
186
+ data['precip'],
187
+ data['precipprob'],
188
+ data['precipcover'],
189
+ data['snow'],
190
+ data['snowdepth'],
191
+ data['windgust'],
192
+ data['windspeed'],
193
+ data['winddir'],
194
+ data['pressure'],
195
+ data['cloudcover'],
196
+ data['visibility'],
197
+ data['solarradiation'],
198
+ data['solarenergy'],
199
+ data['uvindex'],
200
+ data['conditions']
201
+ ]
202
+
203
+
204
+ def get_weather_df(data):
205
+ col_names = [
206
+ 'city',
207
+ 'date',
208
+ 'tempmax',
209
+ 'tempmin',
210
+ 'temp',
211
+ 'feelslikemax',
212
+ 'feelslikemin',
213
+ 'feelslike',
214
+ 'dew',
215
+ 'humidity',
216
+ 'precip',
217
+ 'precipprob',
218
+ 'precipcover',
219
+ 'snow',
220
+ 'snowdepth',
221
+ 'windgust',
222
+ 'windspeed',
223
+ 'winddir',
224
+ 'pressure',
225
+ 'cloudcover',
226
+ 'visibility',
227
+ 'solarradiation',
228
+ 'solarenergy',
229
+ 'uvindex',
230
+ 'conditions'
231
+ ]
232
+
233
+ new_data = pd.DataFrame(
234
+ data,
235
+ columns=col_names
236
+ )
237
+ new_data.date = new_data.date.apply(timestamp_2_time1)
238
+
239
+ return new_data
240
+
241
+ def timestamp_2_time1(x):
242
+ dt_obj = datetime.strptime(str(x), '%Y-%m-%d')
243
+ dt_obj = dt_obj.timestamp() * 1000
244
+ return int(dt_obj)
245
+
246
+ def timestamp_2_time(x):
247
+ dt_obj = datetime.strptime(str(x), '%m/%d/%Y')
248
+ dt_obj = dt_obj.timestamp() * 1000
249
+ return int(dt_obj)