TobiTob commited on
Commit
38d9370
1 Parent(s): 484ef92

Update CityLearn.py

Browse files
Files changed (1) hide show
  1. CityLearn.py +5 -54
CityLearn.py CHANGED
@@ -4,26 +4,15 @@ import datasets
4
  import numpy as np
5
 
6
  _DESCRIPTION = """\
7
- A subset of the D4RL dataset, used for training Decision Transformers
8
  """
9
 
10
- _HOMEPAGE = "https://github.com/rail-berkeley/d4rl"
11
-
12
- _LICENSE = "Apache-2.0"
13
-
14
  # The HuggingFace Datasets library doesn't host the datasets but only points to the original files.
15
  # This can be an arbitrary nested dict/list of URLs (see below in `_split_generators` method)
16
- _BASE_URL = "https://huggingface.co/datasets/edbeeching/decision_transformer_gym_replay/resolve/main/data"
17
  _URLS = {
18
- "halfcheetah-expert-v2": f"{_BASE_URL}/halfcheetah-expert-v2.pkl",
19
  "halfcheetah-medium-replay-v2": f"{_BASE_URL}/halfcheetah-medium-replay-v2.pkl",
20
- "halfcheetah-medium-v2": f"{_BASE_URL}/halfcheetah-medium-v2.pkl",
21
- "hopper-expert-v2": f"{_BASE_URL}/hopper-expert-v2.pkl",
22
- "hopper-medium-replay-v2": f"{_BASE_URL}/hopper-medium-replay-v2.pkl",
23
- "hopper-medium-v2": f"{_BASE_URL}/hopper-medium-v2.pkl",
24
- "walker2d-expert-v2": f"{_BASE_URL}/walker2d-expert-v2.pkl",
25
- "walker2d-medium-replay-v2": f"{_BASE_URL}/walker2d-medium-replay-v2.pkl",
26
- "walker2d-medium-v2": f"{_BASE_URL}/walker2d-medium-v2.pkl",
27
  }
28
 
29
 
@@ -46,50 +35,15 @@ class DecisionTransformerGymDataset(datasets.GeneratorBasedBuilder):
46
  # data = datasets.load_dataset('my_dataset', 'second_domain')
47
  BUILDER_CONFIGS = [
48
  datasets.BuilderConfig(
49
- name="halfcheetah-expert-v2",
50
  version=VERSION,
51
- description="Data sampled from an expert policy in the halfcheetah Mujoco environment",
52
  ),
53
  datasets.BuilderConfig(
54
  name="halfcheetah-medium-replay-v2",
55
  version=VERSION,
56
  description="Data sampled from an medium policy in the halfcheetah Mujoco environment",
57
  ),
58
- datasets.BuilderConfig(
59
- name="halfcheetah-medium-v2",
60
- version=VERSION,
61
- description="Data sampled from an medium policy in the halfcheetah Mujoco environment",
62
- ),
63
- datasets.BuilderConfig(
64
- name="hopper-expert-v2",
65
- version=VERSION,
66
- description="Data sampled from an expert policy in the hopper Mujoco environment",
67
- ),
68
- datasets.BuilderConfig(
69
- name="hopper-medium-replay-v2",
70
- version=VERSION,
71
- description="Data sampled from an medium policy in the hopper Mujoco environment",
72
- ),
73
- datasets.BuilderConfig(
74
- name="hopper-medium-v2",
75
- version=VERSION,
76
- description="Data sampled from an medium policy in the hopper Mujoco environment",
77
- ),
78
- datasets.BuilderConfig(
79
- name="walker2d-expert-v2",
80
- version=VERSION,
81
- description="Data sampled from an expert policy in the halfcheetah Mujoco environment",
82
- ),
83
- datasets.BuilderConfig(
84
- name="walker2d-medium-replay-v2",
85
- version=VERSION,
86
- description="Data sampled from an medium policy in the halfcheetah Mujoco environment",
87
- ),
88
- datasets.BuilderConfig(
89
- name="walker2d-medium-v2",
90
- version=VERSION,
91
- description="Data sampled from an medium policy in the halfcheetah Mujoco environment",
92
- ),
93
  ]
94
 
95
  def _info(self):
@@ -114,9 +68,6 @@ class DecisionTransformerGymDataset(datasets.GeneratorBasedBuilder):
114
  # specify them. They'll be used if as_supervised=True in builder.as_dataset.
115
  # supervised_keys=("sentence", "label"),
116
  # Homepage of the dataset for documentation
117
- homepage=_HOMEPAGE,
118
- # License for the dataset if available
119
- license=_LICENSE,
120
  )
121
 
122
  def _split_generators(self, dl_manager):
 
4
  import numpy as np
5
 
6
  _DESCRIPTION = """\
7
+ This dataset is used to train a decision Transformer for the CityLearn 2022 environment https://www.aicrowd.com/challenges/neurips-2022-citylearn-challenge
8
  """
9
 
 
 
 
 
10
  # The HuggingFace Datasets library doesn't host the datasets but only points to the original files.
11
  # This can be an arbitrary nested dict/list of URLs (see below in `_split_generators` method)
12
+ _BASE_URL = "https://huggingface.co/datasets/TobiTob/CityLearn/resolve/main/data"
13
  _URLS = {
14
+ "sequences": f"{_BASE_URL}/sequences.pkl",
15
  "halfcheetah-medium-replay-v2": f"{_BASE_URL}/halfcheetah-medium-replay-v2.pkl",
 
 
 
 
 
 
 
16
  }
17
 
18
 
 
35
  # data = datasets.load_dataset('my_dataset', 'second_domain')
36
  BUILDER_CONFIGS = [
37
  datasets.BuilderConfig(
38
+ name="sequences",
39
  version=VERSION,
40
+ description="Test Data sampled from an expert policy in CityLearn environment",
41
  ),
42
  datasets.BuilderConfig(
43
  name="halfcheetah-medium-replay-v2",
44
  version=VERSION,
45
  description="Data sampled from an medium policy in the halfcheetah Mujoco environment",
46
  ),
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
47
  ]
48
 
49
  def _info(self):
 
68
  # specify them. They'll be used if as_supervised=True in builder.as_dataset.
69
  # supervised_keys=("sentence", "label"),
70
  # Homepage of the dataset for documentation
 
 
 
71
  )
72
 
73
  def _split_generators(self, dl_manager):