Spaces:
Sleeping
Sleeping
avojarot
commited on
Commit
•
fabbccf
1
Parent(s):
494696a
Add application file
Browse files- Dockerfile +13 -0
- app.py +198 -0
- data.py +78 -0
- models/model_air_conditioning.txt +0 -0
- models/model_circulation_pump.txt +0 -0
- models/model_cooling_aggregate.txt +0 -0
- models/model_dishwasher.txt +0 -0
- models/model_facility.txt +0 -0
- models/model_freezer.txt +0 -0
- models/model_heat_pump.txt +0 -0
- models/model_refrigerator.txt +0 -0
- models/model_storage_charge.txt +0 -0
- models/model_ventilation.txt +0 -0
- models/model_washing_machine.txt +0 -0
- requirements.txt +7 -0
Dockerfile
ADDED
@@ -0,0 +1,13 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
FROM python:3.9
|
2 |
+
|
3 |
+
RUN useradd -m -u 1000 user
|
4 |
+
|
5 |
+
WORKDIR /app
|
6 |
+
|
7 |
+
COPY --chown=user ./requirements.txt requirements.txt
|
8 |
+
|
9 |
+
RUN pip install --no-cache-dir --upgrade -r requirements.txt
|
10 |
+
|
11 |
+
COPY --chown=user .. /app
|
12 |
+
|
13 |
+
CMD ["uvicorn", "app:app", "--host", "0.0.0.0", "--port", "7860"]
|
app.py
ADDED
@@ -0,0 +1,198 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from fastapi import FastAPI, HTTPException, Request
|
2 |
+
from fastapi.responses import StreamingResponse
|
3 |
+
import lightgbm as lgb
|
4 |
+
from pydantic import BaseModel, validator
|
5 |
+
from datetime import datetime
|
6 |
+
import pandas as pd
|
7 |
+
import json
|
8 |
+
import io
|
9 |
+
import zipfile
|
10 |
+
import matplotlib.pyplot as plt
|
11 |
+
from data import get_all_features
|
12 |
+
|
13 |
+
import warnings
|
14 |
+
warnings.filterwarnings("ignore")
|
15 |
+
|
16 |
+
|
17 |
+
class DataPoint(BaseModel):
|
18 |
+
timestamp: datetime
|
19 |
+
storage_charge: float
|
20 |
+
heat_pump: float
|
21 |
+
circulation_pump: float
|
22 |
+
air_conditioning: float
|
23 |
+
ventilation: float
|
24 |
+
dishwasher: float
|
25 |
+
washing_machine: float
|
26 |
+
refrigerator: float
|
27 |
+
freezer: float
|
28 |
+
cooling_aggregate: float
|
29 |
+
facility: float
|
30 |
+
total: float
|
31 |
+
|
32 |
+
@validator('timestamp', pre=True)
|
33 |
+
def parse_timestamp(cls, value):
|
34 |
+
if isinstance(value, str):
|
35 |
+
return datetime.fromisoformat(value)
|
36 |
+
return value
|
37 |
+
|
38 |
+
|
39 |
+
app = FastAPI()
|
40 |
+
|
41 |
+
devices = [ 'storage_charge',
|
42 |
+
'heat_pump',
|
43 |
+
'circulation_pump',
|
44 |
+
'air_conditioning',
|
45 |
+
'ventilation',
|
46 |
+
'dishwasher',
|
47 |
+
'washing_machine',
|
48 |
+
'refrigerator',
|
49 |
+
'freezer',
|
50 |
+
'cooling_aggregate',
|
51 |
+
'facility']
|
52 |
+
models = dict()
|
53 |
+
for device in devices:
|
54 |
+
models[device] = lgb.Booster(model_file=f"models/model_{device}.txt")
|
55 |
+
|
56 |
+
def lowercase_keys_and_copy_values(list_of_dicts):
|
57 |
+
return [{key.lower(): value for key, value in d.items()} for d in list_of_dicts]
|
58 |
+
@app.get("/")
|
59 |
+
def greet_json():
|
60 |
+
return {"Hello": str(models)}
|
61 |
+
|
62 |
+
|
63 |
+
async def get_data(request: Request):
|
64 |
+
data = await request.json()
|
65 |
+
|
66 |
+
#data = json.loads(data)
|
67 |
+
data = lowercase_keys_and_copy_values(data)
|
68 |
+
data_points = [DataPoint(**item) for item in data]
|
69 |
+
data_dicts = [item.dict() for item in data_points]
|
70 |
+
df = pd.DataFrame(data_dicts)
|
71 |
+
|
72 |
+
predictions = dict()
|
73 |
+
for i in devices:
|
74 |
+
predictions[i] = []
|
75 |
+
return df, predictions
|
76 |
+
|
77 |
+
async def get_plots(request: Request, mode):
|
78 |
+
res = await request.json()
|
79 |
+
df = pd.DataFrame(res)
|
80 |
+
print(df)
|
81 |
+
if mode == 1:
|
82 |
+
plt.style.use('dark_background')
|
83 |
+
else:
|
84 |
+
plt.style.use('default')
|
85 |
+
|
86 |
+
plots = []
|
87 |
+
d = devices + ['total']
|
88 |
+
for i in d:
|
89 |
+
buf = io.BytesIO()
|
90 |
+
plt.figure()
|
91 |
+
plt.plot(list(range(1, len(df)+1)), df[i])
|
92 |
+
plt.xticks(rotation=60)
|
93 |
+
plt.xlabel('Hour')
|
94 |
+
plt.ylabel('kWh')
|
95 |
+
plt.title(f'Energy consumption of {i}')
|
96 |
+
plt.savefig(buf, format='png', bbox_inches='tight')
|
97 |
+
buf.seek(0)
|
98 |
+
plots.append(buf)
|
99 |
+
|
100 |
+
zip_buf = io.BytesIO()
|
101 |
+
with zipfile.ZipFile(zip_buf, 'w', zipfile.ZIP_DEFLATED) as z:
|
102 |
+
for i, plot_buf in enumerate(plots):
|
103 |
+
z.writestr(f"{d[i]}.png", plot_buf.getvalue())
|
104 |
+
zip_buf.seek(0)
|
105 |
+
|
106 |
+
return StreamingResponse(zip_buf,
|
107 |
+
media_type="application/zip",
|
108 |
+
headers={"Content-Disposition": "attachment; filename=plots.zip"})
|
109 |
+
|
110 |
+
async def get_prediction(request, H):
|
111 |
+
df, predictions = await get_data(request)
|
112 |
+
predictions['total'] = []
|
113 |
+
for _ in range(H):
|
114 |
+
res = get_all_features(df, devices)
|
115 |
+
p = dict()
|
116 |
+
predictions['total'].append(0)
|
117 |
+
for i in devices:
|
118 |
+
pred = (models[i].predict(res[i].iloc[-1]) * 0.8)
|
119 |
+
predictions[i].append(pred[0])
|
120 |
+
predictions['total'][-1] += pred[0]
|
121 |
+
p[i] = pred
|
122 |
+
p['timestamp'] = df.iloc[-1]['timestamp'] + pd.to_timedelta(1, unit='h')
|
123 |
+
df = pd.concat([df, pd.DataFrame(p)], ignore_index=True)
|
124 |
+
return {"dataframe": pd.DataFrame(predictions).to_json()}
|
125 |
+
|
126 |
+
|
127 |
+
async def get_anomalies(request):
|
128 |
+
df, _ = await get_data(request)
|
129 |
+
res = get_all_features(df, devices)
|
130 |
+
for i in devices:
|
131 |
+
pred = (df[i] - models[i].predict(res[i].iloc[-1]) * 0.8).abs()
|
132 |
+
df[f"is_anomaly_{i}"] = pred > 3
|
133 |
+
|
134 |
+
return {"dataframe": df.to_json(orient='records')}
|
135 |
+
|
136 |
+
@app.post("/anomalies")
|
137 |
+
async def predict(request: Request):
|
138 |
+
try:
|
139 |
+
res = await get_anomalies(request)
|
140 |
+
return res
|
141 |
+
except Exception as e:
|
142 |
+
raise HTTPException(status_code=500, detail=str(e))
|
143 |
+
|
144 |
+
@app.post("/statistcks")
|
145 |
+
async def statistcks(request: Request):
|
146 |
+
try:
|
147 |
+
df, _ = await get_data(request)
|
148 |
+
res = get_all_features(df, devices)
|
149 |
+
json_dict = {key: df.to_json() for key, df in res.items()}
|
150 |
+
json_object = json.dumps(json_dict, indent=4)
|
151 |
+
return {"dataframe": json_object}
|
152 |
+
except Exception as e:
|
153 |
+
raise HTTPException(status_code=500, detail=str(e))
|
154 |
+
|
155 |
+
@app.post("/predict/day")
|
156 |
+
async def predict(request: Request):
|
157 |
+
try:
|
158 |
+
H = 24
|
159 |
+
res = await get_prediction(request, H)
|
160 |
+
return res
|
161 |
+
except Exception as e:
|
162 |
+
raise HTTPException(status_code=500, detail=str(e))
|
163 |
+
|
164 |
+
@app.post("/predict/three_day")
|
165 |
+
async def predict(request: Request):
|
166 |
+
try:
|
167 |
+
H = 24 * 3
|
168 |
+
res = await get_prediction(request, H)
|
169 |
+
return res
|
170 |
+
except Exception as e:
|
171 |
+
raise HTTPException(status_code=500, detail=str(e))
|
172 |
+
|
173 |
+
|
174 |
+
@app.post("/predict/week")
|
175 |
+
async def predict(request: Request):
|
176 |
+
try:
|
177 |
+
H = 24 * 7
|
178 |
+
res = await get_prediction(request, H)
|
179 |
+
return res
|
180 |
+
except Exception as e:
|
181 |
+
raise HTTPException(status_code=500, detail=str(e))
|
182 |
+
|
183 |
+
|
184 |
+
@app.post("/plots/dark")
|
185 |
+
async def predict(request: Request):
|
186 |
+
try:
|
187 |
+
zip_buf = await get_plots(request, 1)
|
188 |
+
return zip_buf
|
189 |
+
except Exception as e:
|
190 |
+
raise HTTPException(status_code=500, detail=str(e))
|
191 |
+
|
192 |
+
@app.post("/plots/light")
|
193 |
+
async def predict(request: Request):
|
194 |
+
try:
|
195 |
+
zip_buf = await get_plots(request, 0)
|
196 |
+
return zip_buf
|
197 |
+
except Exception as e:
|
198 |
+
raise HTTPException(status_code=500, detail=str(e))
|
data.py
ADDED
@@ -0,0 +1,78 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import pandas as pd
|
2 |
+
import numpy as np
|
3 |
+
|
4 |
+
|
5 |
+
def get_time_features(data):
|
6 |
+
df = pd.DataFrame()
|
7 |
+
df['timestamp'] = data['timestamp']
|
8 |
+
df['hour'] = data['timestamp'].dt.hour
|
9 |
+
df['day_of_week'] = data['timestamp'].dt.dayofweek
|
10 |
+
df['day'] = data['timestamp'].dt.day
|
11 |
+
df['month'] = data['timestamp'].dt.month
|
12 |
+
|
13 |
+
season_label = lambda x: 0 if 3 <= x <= 5 else 1 if 6 <= x <= 8 else 2 if 9 <= x <= 11 else 3
|
14 |
+
df['season'] = df['month'].apply(season_label)
|
15 |
+
|
16 |
+
cols = df.columns[1::]
|
17 |
+
for i in cols:
|
18 |
+
max_values = df[i].max()
|
19 |
+
df[f'{i}_sin'] = np.sin(2 * np.pi * df[i] / max_values)
|
20 |
+
df[f'{i}_cos'] = np.cos(2 * np.pi * df[i] / max_values)
|
21 |
+
|
22 |
+
time_label = lambda x: 0 if 6 <= x <= 11 else 1 if 12 <= x <= 17 else 2 if 18 <= x <= 21 else 3
|
23 |
+
df['part_of_day'] = df['hour'].apply(time_label)
|
24 |
+
working_hours_label = lambda x: 1 if 9 <= x < 17 else 0
|
25 |
+
df['is_working_hours'] = df['hour'].apply(working_hours_label)
|
26 |
+
|
27 |
+
is_weekend_label = lambda x: 1 if x >= 5 else 0
|
28 |
+
df['is_weekend'] = df['day_of_week'].apply(is_weekend_label)
|
29 |
+
|
30 |
+
return df
|
31 |
+
|
32 |
+
|
33 |
+
def get_ts_features(data, time, column):
|
34 |
+
data = data[['timestamp', column]].copy()
|
35 |
+
data.set_index('timestamp', inplace=True)
|
36 |
+
|
37 |
+
# Create a rolling window with the given time span
|
38 |
+
rolling_window = data[column].rolling(f'{time}H', closed='both')
|
39 |
+
|
40 |
+
# Calculate the desired statistics
|
41 |
+
df = pd.DataFrame(index=data.index)
|
42 |
+
df[f'exact_{time}'] = data[column]
|
43 |
+
df[f'mean_{time}'] = rolling_window.mean()
|
44 |
+
df[f'median_{time}'] = rolling_window.median()
|
45 |
+
df[f'std_{time}'] = rolling_window.std()
|
46 |
+
df[f'min_{time}'] = rolling_window.min()
|
47 |
+
df[f'max_{time}'] = rolling_window.max()
|
48 |
+
|
49 |
+
# Fill NaN values with -1 for consistency with the original code
|
50 |
+
df.fillna(-1, inplace=True)
|
51 |
+
|
52 |
+
return df.reset_index()
|
53 |
+
|
54 |
+
|
55 |
+
def get_all_ts_features(data, column):
|
56 |
+
res = pd.DataFrame()
|
57 |
+
res['timestamp'] = data['timestamp']
|
58 |
+
values = [1, 3, 6, 12, 24, 24 * 2, 24 * 4, 24 * 8]
|
59 |
+
for i in values:
|
60 |
+
features_df = get_ts_features(data, i, column)
|
61 |
+
res = res.merge(features_df, on='timestamp', how='left')
|
62 |
+
|
63 |
+
return res
|
64 |
+
|
65 |
+
|
66 |
+
def get_all_features(df, devices):
|
67 |
+
res = dict()
|
68 |
+
for i in devices:
|
69 |
+
res[i] = pd.DataFrame()
|
70 |
+
|
71 |
+
f = get_time_features(df)
|
72 |
+
for k in devices:
|
73 |
+
t = get_all_ts_features(df, k)
|
74 |
+
combined = f.merge(t, on='timestamp', how='left')
|
75 |
+
combined['type'] = 0
|
76 |
+
res[k] = pd.concat([res[k], combined.drop(['timestamp'], axis=1)], ignore_index=True)
|
77 |
+
|
78 |
+
return res
|
models/model_air_conditioning.txt
ADDED
The diff for this file is too large to render.
See raw diff
|
|
models/model_circulation_pump.txt
ADDED
The diff for this file is too large to render.
See raw diff
|
|
models/model_cooling_aggregate.txt
ADDED
The diff for this file is too large to render.
See raw diff
|
|
models/model_dishwasher.txt
ADDED
The diff for this file is too large to render.
See raw diff
|
|
models/model_facility.txt
ADDED
The diff for this file is too large to render.
See raw diff
|
|
models/model_freezer.txt
ADDED
The diff for this file is too large to render.
See raw diff
|
|
models/model_heat_pump.txt
ADDED
The diff for this file is too large to render.
See raw diff
|
|
models/model_refrigerator.txt
ADDED
The diff for this file is too large to render.
See raw diff
|
|
models/model_storage_charge.txt
ADDED
The diff for this file is too large to render.
See raw diff
|
|
models/model_ventilation.txt
ADDED
The diff for this file is too large to render.
See raw diff
|
|
models/model_washing_machine.txt
ADDED
The diff for this file is too large to render.
See raw diff
|
|
requirements.txt
ADDED
@@ -0,0 +1,7 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
gradio
|
2 |
+
fastapi==0.111.0
|
3 |
+
uvicorn[standard]
|
4 |
+
pandas
|
5 |
+
numpy
|
6 |
+
lightgbm
|
7 |
+
matplotlib
|