Spaces:
Runtime error
Runtime error
Upload functions.py
Browse files- functions.py +191 -0
functions.py
ADDED
@@ -0,0 +1,191 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from datetime import datetime
|
2 |
+
import requests
|
3 |
+
import os
|
4 |
+
import joblib
|
5 |
+
import pandas as pd
|
6 |
+
|
7 |
+
from dotenv import load_dotenv
|
8 |
+
load_dotenv()
|
9 |
+
|
10 |
+
|
11 |
+
def decode_features(df, feature_view):
|
12 |
+
"""Decodes features in the input DataFrame using corresponding Hopsworks Feature Store transformation functions"""
|
13 |
+
df_res = df.copy()
|
14 |
+
|
15 |
+
import inspect
|
16 |
+
|
17 |
+
|
18 |
+
td_transformation_functions = feature_view._batch_scoring_server._transformation_functions
|
19 |
+
|
20 |
+
res = {}
|
21 |
+
for feature_name in td_transformation_functions:
|
22 |
+
if feature_name in df_res.columns:
|
23 |
+
td_transformation_function = td_transformation_functions[feature_name]
|
24 |
+
sig, foobar_locals = inspect.signature(td_transformation_function.transformation_fn), locals()
|
25 |
+
param_dict = dict([(param.name, param.default) for param in sig.parameters.values() if param.default != inspect._empty])
|
26 |
+
if td_transformation_function.name == "min_max_scaler":
|
27 |
+
df_res[feature_name] = df_res[feature_name].map(
|
28 |
+
lambda x: x * (param_dict["max_value"] - param_dict["min_value"]) + param_dict["min_value"])
|
29 |
+
|
30 |
+
elif td_transformation_function.name == "standard_scaler":
|
31 |
+
df_res[feature_name] = df_res[feature_name].map(
|
32 |
+
lambda x: x * param_dict['std_dev'] + param_dict["mean"])
|
33 |
+
elif td_transformation_function.name == "label_encoder":
|
34 |
+
dictionary = param_dict['value_to_index']
|
35 |
+
dictionary_ = {v: k for k, v in dictionary.items()}
|
36 |
+
df_res[feature_name] = df_res[feature_name].map(
|
37 |
+
lambda x: dictionary_[x])
|
38 |
+
return df_res
|
39 |
+
|
40 |
+
|
41 |
+
def get_model(project, model_name, evaluation_metric, sort_metrics_by):
|
42 |
+
"""Retrieve desired model or download it from the Hopsworks Model Registry.
|
43 |
+
|
44 |
+
In second case, it will be physically downloaded to this directory"""
|
45 |
+
TARGET_FILE = "model.pkl"
|
46 |
+
list_of_files = [os.path.join(dirpath,filename) for dirpath, _, filenames \
|
47 |
+
in os.walk('.') for filename in filenames if filename == TARGET_FILE]
|
48 |
+
|
49 |
+
if list_of_files:
|
50 |
+
model_path = list_of_files[0]
|
51 |
+
model = joblib.load(model_path)
|
52 |
+
else:
|
53 |
+
if not os.path.exists(TARGET_FILE):
|
54 |
+
mr = project.get_model_registry()
|
55 |
+
# get best model based on custom metrics
|
56 |
+
model = mr.get_best_model(model_name,
|
57 |
+
evaluation_metric,
|
58 |
+
sort_metrics_by)
|
59 |
+
model_dir = model.download()
|
60 |
+
model = joblib.load(model_dir + "/model.pkl")
|
61 |
+
|
62 |
+
return model
|
63 |
+
|
64 |
+
|
65 |
+
def get_air_json(city_name, AIR_QUALITY_API_KEY):
|
66 |
+
return requests.get(f'https://api.waqi.info/feed/{city_name}/?token={AIR_QUALITY_API_KEY}').json()['data']
|
67 |
+
|
68 |
+
|
69 |
+
def get_air_quality_data(city_name):
|
70 |
+
AIR_QUALITY_API_KEY = os.getenv('AIR_QUALITY_API_KEY')
|
71 |
+
json = get_air_json(city_name, AIR_QUALITY_API_KEY)
|
72 |
+
|
73 |
+
iaqi = json['iaqi']
|
74 |
+
forecast = json['forecast']['daily']
|
75 |
+
return [
|
76 |
+
|
77 |
+
|
78 |
+
json['time']['s'][:10], # Date
|
79 |
+
iaqi['pm25']['v'],
|
80 |
+
iaqi['pm10']['v'],
|
81 |
+
iaqi['o3']['v'],
|
82 |
+
iaqi['no2']['v'],
|
83 |
+
iaqi['so2']['v'],
|
84 |
+
iaqi['co']['v'],
|
85 |
+
json['aqi']# AQI
|
86 |
+
|
87 |
+
]
|
88 |
+
|
89 |
+
def get_air_quality_df(data):
|
90 |
+
col_names = [
|
91 |
+
'date',
|
92 |
+
'pm25',
|
93 |
+
'pm10',
|
94 |
+
'o3',
|
95 |
+
'no2',
|
96 |
+
'so2',
|
97 |
+
'co',
|
98 |
+
'aqi'
|
99 |
+
]
|
100 |
+
|
101 |
+
new_data = pd.DataFrame(
|
102 |
+
data,
|
103 |
+
columns=col_names
|
104 |
+
)
|
105 |
+
new_data.date = new_data.date.apply(timestamp_2_time_weather)
|
106 |
+
|
107 |
+
return new_data
|
108 |
+
|
109 |
+
|
110 |
+
def get_weather_json(city, date, WEATHER_API_KEY):
|
111 |
+
return requests.get(f'https://weather.visualcrossing.com/VisualCrossingWebServices/rest/services/timeline/{city.lower()}/{date}?unitGroup=metric&include=days&key={WEATHER_API_KEY}&contentType=json').json()
|
112 |
+
|
113 |
+
|
114 |
+
def get_weather_data(city_name, date):
|
115 |
+
WEATHER_API_KEY = os.getenv('WEATHER_API_KEY')
|
116 |
+
json = get_weather_json(city_name, date, WEATHER_API_KEY)
|
117 |
+
data = json['days'][0]
|
118 |
+
|
119 |
+
|
120 |
+
return [
|
121 |
+
|
122 |
+
data['datetime'],
|
123 |
+
data['tempmax'],
|
124 |
+
data['tempmin'],
|
125 |
+
data['temp'],
|
126 |
+
data['feelslikemax'],
|
127 |
+
data['feelslikemin'],
|
128 |
+
data['feelslike'],
|
129 |
+
data['dew'],
|
130 |
+
data['humidity'],
|
131 |
+
data['precip'],
|
132 |
+
data['precipprob'],
|
133 |
+
data['precipcover'],
|
134 |
+
|
135 |
+
data['windgust'],
|
136 |
+
data['windspeed'],
|
137 |
+
data['winddir'],
|
138 |
+
data['pressure'],
|
139 |
+
data['cloudcover'],
|
140 |
+
data['visibility'],
|
141 |
+
data['solarradiation'],
|
142 |
+
data['solarenergy'],
|
143 |
+
data['uvindex'],
|
144 |
+
data['conditions']
|
145 |
+
]
|
146 |
+
|
147 |
+
|
148 |
+
def get_weather_df(data):
|
149 |
+
col_names = [
|
150 |
+
'date',
|
151 |
+
'tempmax',
|
152 |
+
'tempmin',
|
153 |
+
'temp',
|
154 |
+
'feelslikemax',
|
155 |
+
'feelslikemin',
|
156 |
+
'feelslike',
|
157 |
+
'dew',
|
158 |
+
'humidity',
|
159 |
+
'precip',
|
160 |
+
'precipprob',
|
161 |
+
'precipcover',
|
162 |
+
|
163 |
+
'windgust',
|
164 |
+
'windspeed',
|
165 |
+
'winddir',
|
166 |
+
'sealevelpressure',
|
167 |
+
'cloudcover',
|
168 |
+
'visibility',
|
169 |
+
'solarradiation',
|
170 |
+
'solarenergy',
|
171 |
+
'uvindex',
|
172 |
+
'conditions'
|
173 |
+
]
|
174 |
+
|
175 |
+
new_data = pd.DataFrame(
|
176 |
+
data,
|
177 |
+
columns=col_names
|
178 |
+
)
|
179 |
+
new_data.date = new_data.date.apply(timestamp_2_time_weather)
|
180 |
+
|
181 |
+
return new_data
|
182 |
+
|
183 |
+
def timestamp_2_time(x):
|
184 |
+
dt_obj = datetime.strptime(str(x), '%Y/%m/%d')
|
185 |
+
dt_obj = dt_obj.timestamp() * 1000
|
186 |
+
return int(dt_obj)
|
187 |
+
|
188 |
+
def timestamp_2_time_weather(x):
|
189 |
+
dt_obj = datetime.strptime(str(x), '%Y-%m-%d')
|
190 |
+
dt_obj = dt_obj.timestamp() * 1000
|
191 |
+
return int(dt_obj)
|