marktrovinger commited on
Commit
65bebb2
1 Parent(s): 9282ec3

Update cartpole_gym_replay.py

Browse files
Files changed (1) hide show
  1. cartpole_gym_replay.py +65 -0
cartpole_gym_replay.py CHANGED
@@ -0,0 +1,65 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import pickle
2
+
3
+ import datasets
4
+ import numpy as np
5
+
6
+ # The HuggingFace Datasets library doesn't host the datasets but only points to the original files.
7
+ # This can be an arbitrary nested dict/list of URLs (see below in `_split_generators` method)
8
+ _BASE_URL = "https://huggingface.co/datasets/marktrovinger/cartpole_gym_replay/resolve/main"
9
+ _DATA_URL = f"{_BASE_URL}/replay_buffer_npz.npz"
10
+
11
+
12
+
13
+ class DecisionTransformerGymDataset(datasets.GeneratorBasedBuilder):
14
+ def _info(self):
15
+
16
+ features = datasets.Features(
17
+ {
18
+ "observations": datasets.Sequence(datasets.Sequence(datasets.Value("float32"))),
19
+ "actions": datasets.Sequence(datasets.Sequence(datasets.Value("float32"))),
20
+ "rewards": datasets.Sequence(datasets.Value("float32")),
21
+ "dones": datasets.Sequence(datasets.Value("float32")),
22
+ # These are the features of your dataset like images, labels ...
23
+ }
24
+ )
25
+
26
+ return datasets.DatasetInfo(
27
+ # This is the description that will appear on the datasets page.
28
+ description=_DESCRIPTION,
29
+ # This defines the different columns of the dataset and their types
30
+ # Here we define them above because they are different between the two configurations
31
+ features=features,
32
+ # If there's a common (input, target) tuple from the features, uncomment supervised_keys line below and
33
+ # specify them. They'll be used if as_supervised=True in builder.as_dataset.
34
+ # supervised_keys=("sentence", "label"),
35
+ # Homepage of the dataset for documentation
36
+ homepage=_HOMEPAGE,
37
+ # License for the dataset if available
38
+ license=_LICENSE,
39
+ )
40
+ def _split_generators(self, dl_manager):
41
+ urls = _URLS[self.config.name]
42
+ data_dir = dl_manager.download_and_extract(urls)
43
+ return [
44
+ datasets.SplitGenerator(
45
+ name=datasets.Split.TRAIN,
46
+ # These kwargs will be passed to _generate_examples
47
+ gen_kwargs={
48
+ "filepath": data_dir,
49
+ "split": "train",
50
+ },
51
+ )
52
+ ]
53
+ # method parameters are unpacked from `gen_kwargs` as given in `_split_generators`
54
+ def _generate_examples(self, filepath, split):
55
+ with open(filepath, "rb") as f:
56
+ #trajectories = pickle.load(f)
57
+ trajectories = np.load(f)
58
+
59
+ for idx, traj in enumerate(trajectories):
60
+ yield idx, {
61
+ "observations": traj["observations"],
62
+ "actions": traj["actions"],
63
+ "rewards": traj["rewards"],
64
+ "dones": traj["dones"],
65
+ }