Spaces:
Runtime error
Runtime error
File size: 4,488 Bytes
f93ab8d 7772a1c bee4a9d 8c2c3d2 cd83371 f93ab8d a8a34f1 cd83371 8fe2fcb 91ff99c a8a34f1 8fe2fcb afd3f8f a8a34f1 614ac0b a8a34f1 8c2c3d2 98d9c54 8c2c3d2 b2d6c75 6f71655 b2d6c75 936f336 b2d6c75 936f336 b2d6c75 34ac461 936f336 34ac461 b5dc851 936f336 b5dc851 34ac461 4e9e9f0 34ac461 b5dc851 40198f4 4794c33 4e9e9f0 e6f6142 b5dc851 34ac461 f90575b 4e9e9f0 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 |
import sklearn
import gradio as gr
import joblib
import pandas as pd
import datasets
import requests
import json
import dateutil.parser as dp
import pandas as pd
from huggingface_hub import hf_hub_url, cached_download
import time
def get_row():
response_tomtom = requests.get(
'https://api.tomtom.com/traffic/services/4/flowSegmentData/absolute/10/json?key=azGiX8jKKGxCxdsF1OzvbbWGPDuInWez&point=59.39575,17.98343')
json_response_tomtom = json.loads(response_tomtom.text) # get json response
currentSpeed = json_response_tomtom["flowSegmentData"]["currentSpeed"]
freeFlowSpeed = json_response_tomtom["flowSegmentData"]["freeFlowSpeed"]
congestionLevel = currentSpeed/freeFlowSpeed
confidence = json_response_tomtom["flowSegmentData"]["confidence"] # Reliability of the traffic data, by percentage
# Get weather data from SMHI, updated hourly
response_smhi = requests.get(
'https://opendata-download-metanalys.smhi.se/api/category/mesan1g/version/2/geotype/point/lon/17.983/lat/59.3957/data.json')
json_response_smhi = json.loads(response_smhi.text)
# weather data manual https://opendata.smhi.se/apidocs/metanalys/parameters.html#parameter-wsymb
referenceTime = dp.parse(json_response_smhi["referenceTime"]).timestamp()
t = json_response_smhi["timeSeries"][0]["parameters"][0]["values"][0] # Temperature
ws = json_response_smhi["timeSeries"][0]["parameters"][4]["values"][0] # Wind Speed
prec1h = json_response_smhi["timeSeries"][0]["parameters"][6]["values"][0] # Precipation last hour
fesn1h = json_response_smhi["timeSeries"][0]["parameters"][8]["values"][0] # Snow precipation last hour
vis = json_response_smhi["timeSeries"][0]["parameters"][9]["values"][0] # Visibility
# Use current time
referenceTime = time.time()
row ={"referenceTime": referenceTime,
"temperature": t,
"wind speed": ws,
"precipation last hour": prec1h,
"snow precipation last hour": fesn1h,
"visibility": vis,
"confidence of data": confidence}
row = pd.DataFrame([row], columns=row.keys())
print(row)
row.dropna(axis=0, inplace=True)
return row
model = joblib.load(cached_download(
hf_hub_url("tilos/Traffic_Prediction", "traffic_model.pkl")
))
def infer(input_dataframe):
return pd.DataFrame(model.predict(input_dataframe)).clip(0, 1)
title = "Stoclholm Highway E4 Real Time Traffic Prediction"
description = "Stockholm E4 (59°23'44.7"" N 17°59'00.4""E) highway real time traffic prediction"
# inputs = [gr.Dataframe(row_count = (1, "fixed"), col_count=(7,"fixed"),
# headers=["referenceTime", "t", "ws", "prec1h", "fesn1h", "vis", "confidence"],
# # datatype=["timestamp", "float", "float", "float", "float", "float"],
# label="Input Data", interactive=1)]
# outputs = [gr.Dataframe(row_count = (1, "fixed"), col_count=(1, "fixed"), label="Predictions", headers=["Congestion Level"])]
with gr.Blocks() as demo:
with gr.Row():
with gr.Column():
inputs = gr.Dataframe(row_count = (1, "fixed"), col_count=(7,"fixed"),
headers=["referenceTime", "t", "ws", "prec1h", "fesn1h", "vis", "confidence"],
# datatype=["timestamp", "float", "float", "float", "float", "float"],
label="Input Data", interactive=1)
with gr.Column():
outputs = gr.Dataframe(row_count = (1, "fixed"), col_count=(1, "fixed"), label="Predictions", headers=["Congestion Level"])
with gr.Row():
btn_sub = gr.Button(value="Submit")
btn_sub.click(infer, inputs = inputs, outputs = outputs)
#examples = gr.Examples(fn = infer, examples=[get_row()],inputs=inputs,outputs=outputs ,cache_examples=True)
examples = gr.Examples(fn = infer, examples=[get_row()] ,inputs=inputs, outputs=outputs, cache_examples=False)
demo.load(get_row, inputs = None, outputs = examples, every=10)
# interface = gr.Interface(fn = infer, inputs = inputs, outputs = outputs, title=title, description=description, examples=[get_row()], cache_examples=False)
# interface.launch()
if __name__ == "__main__":
demo.queue().launch() |