Datasets:
File size: 4,439 Bytes
6e5700c f2b317c 6e5700c dca03b7 6e5700c dca03b7 6e5700c e1cf256 6e5700c f2b317c 6e5700c e1cf256 6e5700c f2b317c 6e5700c f2b317c 6e5700c f2b317c 6e5700c f2b317c 6e5700c f2b317c 6e5700c f2b317c 6e5700c f2b317c 6e5700c f2b317c 6e5700c f2b317c 6e5700c |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 |
import csv
import json
import os
from typing import List
import datasets
import logging
from datetime import datetime, timedelta
import pandas as pd
import requests
# TODO: Add BibTeX citation
# Find for instance the citation on arxiv or on the dataset repo/website
_CITATION = """\
@InProceedings{huggingface:dataset,
title = {Singapore Traffic Image Dataset},
author={huggingface, Inc.
},
year={2023}
}
"""
_DESCRIPTION = """\
This dataset contains traffic images from traffic signal cameras of singapore. The images are captured at 1.5 minute interval from 6 pm to 7 pm everyday for the month of January 2024.
"""
_HOMEPAGE = "https://beta.data.gov.sg/collections/354/view"
# _URL = "https://raw.githubusercontent.com/Sayali-pingle/HuggingFace--Traffic-Image-Dataset/main/camera_data.csv"
class TrafficSignalImages(datasets.GeneratorBasedBuilder):
"""My dataset is in the form of CSV file hosted on my github. It contains traffic images from 1st Jan 2024 to 31st Jan 2024 from 6 to 7 pm everyday. The original code to fetch these images has been commented in the generate_examples function."""
# _URLS = _URLS
VERSION = datasets.Version("1.1.0")
def _info(self):
return datasets.DatasetInfo(
description=_DESCRIPTION,
features=datasets.Features(
{
"timestamp": datasets.Value("string"),
"camera_id": datasets.Value("string"),
"latitude": datasets.Value("float"),
"longitude": datasets.Value("float"),
"image_url": datasets.Image(),
"image_metadata": datasets.Value("string")
}
),
homepage=_HOMEPAGE,
citation=_CITATION,
)
def _split_generators(self, dl_manager: datasets.DownloadManager):
# The URLs should be the paths to the raw files in the Hugging Face dataset repository
urls_to_download = {
"csv_file": "https://raw.githubusercontent.com/Sayali-pingle/HuggingFace--Traffic-Image-Dataset/main/camera_data.csv"
}
downloaded_files = dl_manager.download_and_extract(urls_to_download['csv_file'])
return [
datasets.SplitGenerator(
name=datasets.Split.TRAIN,
gen_kwargs={
"csv_file_path": downloaded_files,
},
),
]
def _generate_examples(self, csv_file_path):
# This method will yield examples from your dataset
# start_date = datetime(2024, 1, 1, 18, 0, 0)
# end_date = datetime(2024, 1, 2, 19, 0, 0)
# interval_seconds = 240
# date_time_strings = [
# (current_date + timedelta(seconds=seconds)).strftime('%Y-%m-%dT%H:%M:%S+08:00')
# for current_date in pd.date_range(start=start_date, end=end_date, freq='D')
# for seconds in range(0, 3600, interval_seconds)
# ]
# url = 'https://api.data.gov.sg/v1/transport/traffic-images'
# camera_data = []
# for date_time in date_time_strings:
# params = {'date_time': date_time}
# response = requests.get(url, params=params)
# if response.status_code == 200:
# data = response.json()
# camera_data.extend([
# {
# 'timestamp': item['timestamp'],
# 'camera_id': camera['camera_id'],
# 'latitude': camera['location']['latitude'],
# 'longitude': camera['location']['longitude'],
# 'image_url': camera['image'],
# 'image_metadata': camera['image_metadata']
# }
# for item in data['items']
# for camera in item['cameras']
# ])
# else:
# print(f"Error: {response.status_code}")
camera_data= pd.read_csv(csv_file_path)
for idx, example in camera_data.iterrows():
yield idx, {
"timestamp": example["timestamp"],
"camera_id": example["camera_id"],
"latitude": example["latitude"],
"longitude": example["longitude"],
"image_url": example["image_url"],
"image_metadata": example["image_metadata"]
}
|