Upload nle_hf_dataset.py
Browse files- nle_hf_dataset.py +2 -2
nle_hf_dataset.py
CHANGED
|
@@ -108,7 +108,6 @@ class NleHfDataset(datasets.GeneratorBasedBuilder):
|
|
| 108 |
|
| 109 |
def _split_generators(self, dl_manager):
|
| 110 |
urls = _URLS[self.config.name]
|
| 111 |
-
print(urls)
|
| 112 |
filepaths = [dl_manager.download(url) for url in urls]
|
| 113 |
return [
|
| 114 |
datasets.SplitGenerator(
|
|
@@ -119,7 +118,8 @@ class NleHfDataset(datasets.GeneratorBasedBuilder):
|
|
| 119 |
for i, filepath in enumerate(filepaths):
|
| 120 |
if self.config.name == "metadata":
|
| 121 |
with open(filepath, "r") as f:
|
| 122 |
-
|
|
|
|
| 123 |
yield i, data
|
| 124 |
else:
|
| 125 |
with h5py.File(filepath, "r") as f:
|
|
|
|
| 108 |
|
| 109 |
def _split_generators(self, dl_manager):
|
| 110 |
urls = _URLS[self.config.name]
|
|
|
|
| 111 |
filepaths = [dl_manager.download(url) for url in urls]
|
| 112 |
return [
|
| 113 |
datasets.SplitGenerator(
|
|
|
|
| 118 |
for i, filepath in enumerate(filepaths):
|
| 119 |
if self.config.name == "metadata":
|
| 120 |
with open(filepath, "r") as f:
|
| 121 |
+
print(filepath)
|
| 122 |
+
data = json.load(f)
|
| 123 |
yield i, data
|
| 124 |
else:
|
| 125 |
with h5py.File(filepath, "r") as f:
|