Yasaman commited on
Commit
0c42bb1
·
1 Parent(s): 7391b42

Create functions.py

Browse files
Files changed (1) hide show
  1. functions.py +199 -0
functions.py ADDED
@@ -0,0 +1,199 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from datetime import datetime
2
+ import requests
3
+ import os
4
+ import joblib
5
+ import pandas as pd
6
+
7
+ from dotenv import load_dotenv
8
+ load_dotenv()
9
+
10
+
11
+ def decode_features(df, feature_view):
12
+ """Decodes features in the input DataFrame using corresponding Hopsworks Feature Store transformation functions"""
13
+ df_res = df.copy()
14
+
15
+ import inspect
16
+
17
+
18
+ td_transformation_functions = feature_view._batch_scoring_server._transformation_functions
19
+
20
+ res = {}
21
+ for feature_name in td_transformation_functions:
22
+ if feature_name in df_res.columns:
23
+ td_transformation_function = td_transformation_functions[feature_name]
24
+ sig, foobar_locals = inspect.signature(td_transformation_function.transformation_fn), locals()
25
+ param_dict = dict([(param.name, param.default) for param in sig.parameters.values() if param.default != inspect._empty])
26
+ if td_transformation_function.name == "min_max_scaler":
27
+ df_res[feature_name] = df_res[feature_name].map(
28
+ lambda x: x * (param_dict["max_value"] - param_dict["min_value"]) + param_dict["min_value"])
29
+
30
+ elif td_transformation_function.name == "standard_scaler":
31
+ df_res[feature_name] = df_res[feature_name].map(
32
+ lambda x: x * param_dict['std_dev'] + param_dict["mean"])
33
+ elif td_transformation_function.name == "label_encoder":
34
+ dictionary = param_dict['value_to_index']
35
+ dictionary_ = {v: k for k, v in dictionary.items()}
36
+ df_res[feature_name] = df_res[feature_name].map(
37
+ lambda x: dictionary_[x])
38
+ return df_res
39
+
40
+
41
+ def get_model(project, model_name, evaluation_metric, sort_metrics_by):
42
+ """Retrieve desired model or download it from the Hopsworks Model Registry.
43
+ In second case, it will be physically downloaded to this directory"""
44
+ TARGET_FILE = "model.pkl"
45
+ list_of_files = [os.path.join(dirpath,filename) for dirpath, _, filenames \
46
+ in os.walk('.') for filename in filenames if filename == TARGET_FILE]
47
+
48
+ if list_of_files:
49
+ model_path = list_of_files[0]
50
+ model = joblib.load(model_path)
51
+ else:
52
+ if not os.path.exists(TARGET_FILE):
53
+ mr = project.get_model_registry()
54
+ # get best model based on custom metrics
55
+ model = mr.get_best_model(model_name,
56
+ evaluation_metric,
57
+ sort_metrics_by)
58
+ model_dir = model.download()
59
+ model = joblib.load(model_dir + "/model.pkl")
60
+
61
+ return model
62
+
63
+
64
+ def get_air_json(city_name, AIR_QUALITY_API_KEY):
65
+ return requests.get(f'https://api.waqi.info/feed/malmo/?token={AIR_QUALITY_API_KEY}').json()['data']
66
+
67
+
68
+ def get_air_quality_data(city_name):
69
+ AIR_QUALITY_API_KEY = os.getenv('AIR_QUALITY_API_KEY')
70
+ json = get_air_json(city_name, AIR_QUALITY_API_KEY)
71
+ iaqi = json['iaqi']
72
+ forecast = json['forecast']['daily']
73
+ return [
74
+ city_name,
75
+ json['aqi'], # AQI
76
+ json['time']['s'][:10], # Date
77
+ iaqi['h']['v'],
78
+ iaqi['p']['v'],
79
+ iaqi['pm10']['v'],
80
+ iaqi['t']['v'],
81
+ forecast['o3'][0]['avg'],
82
+ forecast['o3'][0]['max'],
83
+ forecast['o3'][0]['min'],
84
+ forecast['pm10'][0]['avg'],
85
+ forecast['pm10'][0]['max'],
86
+ forecast['pm10'][0]['min'],
87
+ forecast['pm25'][0]['avg'],
88
+ forecast['pm25'][0]['max'],
89
+ forecast['pm25'][0]['min']
90
+ ]
91
+
92
+ def get_air_quality_df(data):
93
+ col_names = [
94
+ 'city',
95
+ 'aqi',
96
+ 'date',
97
+ 'iaqi_h',
98
+ 'iaqi_p',
99
+ 'iaqi_pm10',
100
+ 'iaqi_t',
101
+ 'o3_avg',
102
+ 'o3_max',
103
+ 'o3_min',
104
+ 'pm10_avg',
105
+ 'pm10_max',
106
+ 'pm10_min',
107
+ 'pm25_avg',
108
+ 'pm25_max',
109
+ 'pm25_min'
110
+ ]
111
+
112
+ new_data = pd.DataFrame(
113
+ data,
114
+ columns=col_names
115
+ )
116
+ new_data.date = new_data.date.apply(timestamp_2_time)
117
+
118
+ return new_data
119
+
120
+
121
+ def get_weather_json(city, date, WEATHER_API_KEY):
122
+ return requests.get(f'https://weather.visualcrossing.com/VisualCrossingWebServices/rest/services/timeline/{city.lower()}/{date}?unitGroup=metric&include=days&key={WEATHER_API_KEY}&contentType=json').json()
123
+
124
+
125
+ def get_weather_data(date):
126
+ WEATHER_API_KEY = os.getenv('WEATHER_API_KEY')
127
+ json = get_weather_json("Malmo", date, WEATHER_API_KEY)
128
+ data = json['days'][0]
129
+
130
+ return [
131
+ json['address'].capitalize(),
132
+ data['datetime'],
133
+ data['tempmax'],
134
+ data['tempmin'],
135
+ data['temp'],
136
+ data['feelslikemax'],
137
+ data['feelslikemin'],
138
+ data['feelslike'],
139
+ data['dew'],
140
+ data['humidity'],
141
+ data['precip'],
142
+ data['precipprob'],
143
+ data['precipcover'],
144
+ data['snow'],
145
+ data['snowdepth'],
146
+ data['windgust'],
147
+ data['windspeed'],
148
+ data['winddir'],
149
+ data['pressure'],
150
+ data['cloudcover'],
151
+ data['visibility'],
152
+ data['solarradiation'],
153
+ data['solarenergy'],
154
+ data['uvindex'],
155
+ data['conditions']
156
+ ]
157
+
158
+
159
+ def get_weather_df(data):
160
+ col_names = [
161
+ 'city',
162
+ 'date',
163
+ 'tempmax',
164
+ 'tempmin',
165
+ 'temp',
166
+ 'feelslikemax',
167
+ 'feelslikemin',
168
+ 'feelslike',
169
+ 'dew',
170
+ 'humidity',
171
+ 'precip',
172
+ 'precipprob',
173
+ 'precipcover',
174
+ 'snow',
175
+ 'snowdepth',
176
+ 'windgust',
177
+ 'windspeed',
178
+ 'winddir',
179
+ 'pressure',
180
+ 'cloudcover',
181
+ 'visibility',
182
+ 'solarradiation',
183
+ 'solarenergy',
184
+ 'uvindex',
185
+ 'conditions'
186
+ ]
187
+
188
+ new_data = pd.DataFrame(
189
+ data,
190
+ columns=col_names
191
+ )
192
+ new_data.date = new_data.date.apply(timestamp_2_time)
193
+
194
+ return new_data
195
+
196
+ def timestamp_2_time(x):
197
+ dt_obj = datetime.strptime(str(x), '%Y-%m-%d')
198
+ dt_obj = dt_obj.timestamp() * 1000
199
+ return int(dt_obj)