import sklearn import gradio as gr import joblib import pandas as pd import datasets import requests import json import dateutil.parser as dp import pandas as pd from huggingface_hub import hf_hub_url, cached_download import time def get_row(): response_tomtom = requests.get( 'https://api.tomtom.com/traffic/services/4/flowSegmentData/absolute/10/json?key=azGiX8jKKGxCxdsF1OzvbbWGPDuInWez&point=59.39575,17.98343') json_response_tomtom = json.loads(response_tomtom.text) # get json response currentSpeed = json_response_tomtom["flowSegmentData"]["currentSpeed"] freeFlowSpeed = json_response_tomtom["flowSegmentData"]["freeFlowSpeed"] congestionLevel = currentSpeed/freeFlowSpeed confidence = json_response_tomtom["flowSegmentData"]["confidence"] # Reliability of the traffic data, by percentage # Get weather data from SMHI, updated hourly response_smhi = requests.get( 'https://opendata-download-metanalys.smhi.se/api/category/mesan1g/version/2/geotype/point/lon/17.983/lat/59.3957/data.json') json_response_smhi = json.loads(response_smhi.text) # weather data manual https://opendata.smhi.se/apidocs/metanalys/parameters.html#parameter-wsymb referenceTime = dp.parse(json_response_smhi["referenceTime"]).timestamp() t = json_response_smhi["timeSeries"][0]["parameters"][0]["values"][0] # Temperature ws = json_response_smhi["timeSeries"][0]["parameters"][4]["values"][0] # Wind Speed prec1h = json_response_smhi["timeSeries"][0]["parameters"][6]["values"][0] # Precipation last hour fesn1h = json_response_smhi["timeSeries"][0]["parameters"][8]["values"][0] # Snow precipation last hour vis = json_response_smhi["timeSeries"][0]["parameters"][9]["values"][0] # Visibility # Use current time referenceTime = time.time() row ={"referenceTime": referenceTime, "temperature": t, "wind speed": ws, "precipation last hour": prec1h, "snow precipation last hour": fesn1h, "visibility": vis, "confidence of data": confidence} row = pd.DataFrame([row], columns=row.keys()) print(row) row.dropna(axis=0, inplace=True) return row model = joblib.load(cached_download( hf_hub_url("tilos/Traffic_Prediction", "traffic_model.pkl") )) def infer(input_dataframe): return pd.DataFrame(model.predict(input_dataframe)).clip(0, 1) title = "Stoclholm Highway E4 Real Time Traffic Prediction" description = "Stockholm E4 (59°23'44.7"" N 17°59'00.4""E) highway real time traffic prediction" inputs = [gr.Dataframe(row_count = (1, "fixed"), col_count=(7,"fixed"), headers=["referenceTime", "t", "ws", "prec1h", "fesn1h", "vis", "confidence"], # datatype=["timestamp", "float", "float", "float", "float", "float"], label="Input Data", interactive=1)] outputs = [gr.Dataframe(row_count = (1, "fixed"), col_count=(1, "fixed"), label="Predictions", headers=["Congestion Level"])] # with gr.Blocks() as demo: # with gr.Row(): # with gr.Column(): # gr.Dataframe(row_count = (1, "fixed"), col_count=(7,"fixed"), # headers=["referenceTime", "t", "ws", "prec1h", "fesn1h", "vis", "confidence"], # # datatype=["timestamp", "float", "float", "float", "float", "float"], # label="Input Data", interactive=1) # with gr.Column: # gr.Dataframe(row_count = (1, "fixed"), col_count=(1, "fixed"), label="Predictions", headers=["Congestion Level"]) # btn = gr.Button(value="Refresh") # btn.click(interface.launch()) interface = gr.Interface(fn = infer, inputs = inputs, outputs = outputs, title=title, description=description, examples=[get_row()], cache_examples=False) interface.launch() if __name__ == "__main__": demo.queue().launch()