pbk0 commited on
Commit
b20a9a3
·
1 Parent(s): 44bf929

Refactor dataset loading logic to handle manual directory and remote downloads; allow loading with pickle support

Browse files
Files changed (2) hide show
  1. test.py +8 -6
  2. test_dataset.py +3 -1
test.py CHANGED
@@ -50,10 +50,12 @@ class TestDataset(datasets.GeneratorBasedBuilder):
50
  )
51
 
52
  def _split_generators(self, dl_manager):
53
- # Use the provided data_dir from load_dataset
54
- data_dir = dl_manager.manual_dir if dl_manager.manual_dir else dl_manager.data_dir
55
- traces_path = os.path.join(data_dir, "traces.npy")
56
- labels_path = os.path.join(data_dir, "labels.npy")
 
 
57
  return [
58
  datasets.SplitGenerator(
59
  name=datasets.Split.TRAIN,
@@ -65,8 +67,8 @@ class TestDataset(datasets.GeneratorBasedBuilder):
65
  ]
66
 
67
  def _generate_examples(self, traces_path, labels_path):
68
- traces = np.load(traces_path)
69
- labels = np.load(labels_path)
70
  for idx, (trace, label) in enumerate(zip(traces, labels)):
71
  yield idx, {
72
  "trace": trace.tolist(),
 
50
  )
51
 
52
  def _split_generators(self, dl_manager):
53
+ if dl_manager.manual_dir is not None:
54
+ traces_path = os.path.join(dl_manager.manual_dir, "traces.npy")
55
+ labels_path = os.path.join(dl_manager.manual_dir, "labels.npy")
56
+ else:
57
+ traces_path = dl_manager.download("https://huggingface.co/datasets/DLSCA/test/resolve/main/data/traces.npy")
58
+ labels_path = dl_manager.download("https://huggingface.co/datasets/DLSCA/test/resolve/main/data/labels.npy")
59
  return [
60
  datasets.SplitGenerator(
61
  name=datasets.Split.TRAIN,
 
67
  ]
68
 
69
  def _generate_examples(self, traces_path, labels_path):
70
+ traces = np.load(traces_path, allow_pickle=True)
71
+ labels = np.load(labels_path, allow_pickle=True)
72
  for idx, (trace, label) in enumerate(zip(traces, labels)):
73
  yield idx, {
74
  "trace": trace.tolist(),
test_dataset.py CHANGED
@@ -1,10 +1,12 @@
1
  from datasets import load_dataset
2
 
 
 
3
  def main():
4
  # Load the dataset from the local script
5
  ds = load_dataset(
6
  'test.py',
7
- data_dir='data',
8
  split='train',
9
  trust_remote_code=True,
10
  )
 
1
  from datasets import load_dataset
2
 
3
+ LOCAL = True
4
+
5
  def main():
6
  # Load the dataset from the local script
7
  ds = load_dataset(
8
  'test.py',
9
+ data_dir='data' if LOCAL else None,
10
  split='train',
11
  trust_remote_code=True,
12
  )