iamgroot42 commited on
Commit
9f3c65a
·
verified ·
1 Parent(s): 3ecfb9b

Update mimir.py

Browse files
Files changed (1) hide show
  1. mimir.py +50 -14
mimir.py CHANGED
@@ -3,7 +3,12 @@
3
  Processing data at HF end.
4
  """
5
 
6
- from datasets import GeneratorBasedBuilder, SplitGenerator, DownloadManager, BuilderConfig
 
 
 
 
 
7
  import json
8
  import os
9
 
@@ -20,12 +25,15 @@ We also cache neighbors (generated for the NE attack).
20
  _CITATION = """\
21
  @article{duan2024do,
22
  title={Do Membership Inference Attacks Work on Large Language Models?},
23
- author={Duan*, Michael and Suri*, Anshuman and Mireshghallah, Niloofar and Min, Sewon and Shi, Weijia and Zettlemoyer, Luke and Tsvetkov, Yulia and Choi, Yejin and Evans, David and Hajishirzi, Hannaneh},
24
  journal={arXiv preprint arXiv:???},
25
  year={2024}
26
  }
27
  """
28
 
 
 
 
29
  class MimirConfig(BuilderConfig):
30
  """BuilderConfig for Mimir dataset."""
31
 
@@ -45,7 +53,12 @@ class MimirDataset(GeneratorBasedBuilder):
45
  # Define the builder configs
46
  BUILDER_CONFIG_CLASS = MimirConfig
47
  BUILDER_CONFIGS = [
48
- MimirConfig(name="the_pile_arxiv", description="This split contains data from Arxiv, truncated with 7-gram overlap threshold < 0.2."),
 
 
 
 
 
49
  ]
50
 
51
  def _info(self):
@@ -54,9 +67,7 @@ class MimirDataset(GeneratorBasedBuilder):
54
  description=_DESCRIPTION,
55
  # This defines the different columns of the dataset and their types
56
  features=datasets.Features(
57
- {
58
- "text": datasets.Value("string"), # Each example is a piece of text
59
- }
60
  ),
61
  # If there's a common (input, target) tuple from the features,
62
  # specify them here. They'll be used if as_supervised=True in
@@ -72,27 +83,52 @@ class MimirDataset(GeneratorBasedBuilder):
72
  """Returns SplitGenerators."""
73
  # Path to the data files
74
  NEIGHBOR_SUFFIX = "_neighbors_25_bert_in_place_swap"
75
- parent_dir = "cache_100_200_1000_512"
 
 
 
 
76
 
77
  file_paths = {
78
  "member": os.path.join(parent_dir, "train", self.config.name + ".jsonl"),
79
- "nonmember": os.path.join(parent_dir, "test", self.config.name + ".jsonl")
80
  }
81
  # Load neighbor splits if they exist
82
- if os.path.exists(os.path.join(parent_dir, "train_neighbors", self.config.name + f"{NEIGHBOR_SUFFIX}.jsonl")):
 
 
 
 
 
 
 
83
  # Assume if train nieghbors exist, test neighbors also exist
84
- file_paths["member_neighbors"] = os.path.join("cache_100_200_1000_512", "train_neighbors", self.config.name + f"{NEIGHBOR_SUFFIX}.jsonl"),
85
- file_paths["nonmember_neighbors"] = os.path.join("cache_100_200_1000_512", "test_neighbors", self.config.name + f"{NEIGHBOR_SUFFIX}.jsonl")
 
 
 
 
 
 
 
 
 
 
 
 
86
 
87
  splits = []
88
- for k, v in file_paths.items():
89
- splits.append(SplitGenerator(name=k, gen_kwargs={"file_path": v}))
90
  return splits
91
 
92
  def _generate_examples(self, file_path):
93
  """Yields examples."""
94
  # Open the specified .jsonl file and read each line
95
- with open(file_path, 'r') as f:
96
  for id, line in enumerate(f):
97
  data = json.loads(line)
 
 
98
  yield id, {"text": data}
 
3
  Processing data at HF end.
4
  """
5
 
6
+ from datasets import (
7
+ GeneratorBasedBuilder,
8
+ SplitGenerator,
9
+ DownloadManager,
10
+ BuilderConfig,
11
+ )
12
  import json
13
  import os
14
 
 
25
  _CITATION = """\
26
  @article{duan2024do,
27
  title={Do Membership Inference Attacks Work on Large Language Models?},
28
+ author={Duan*, Michael and \textbf{A. Suri*} and Mireshghallah, Niloofar and Min, Sewon and Shi, Weijia and Zettlemoyer, Luke and Tsvetkov, Yulia and Choi, Yejin and Evans, David and Hajishirzi, Hannaneh},
29
  journal={arXiv preprint arXiv:???},
30
  year={2024}
31
  }
32
  """
33
 
34
+ _DOWNLOAD_URL = "https://huggingface.co/datasets/iamgroot42/mimir/resolve/main/"
35
+
36
+
37
  class MimirConfig(BuilderConfig):
38
  """BuilderConfig for Mimir dataset."""
39
 
 
53
  # Define the builder configs
54
  BUILDER_CONFIG_CLASS = MimirConfig
55
  BUILDER_CONFIGS = [
56
+ MimirConfig(
57
+ name="the_pile_arxiv", description="This split contains data from Arxiv"
58
+ ),
59
+ MimirConfig(
60
+ name="the_pile_full_pile", description="This split contains data from multiple sources in the Pile",
61
+ ),
62
  ]
63
 
64
  def _info(self):
 
67
  description=_DESCRIPTION,
68
  # This defines the different columns of the dataset and their types
69
  features=datasets.Features(
70
+ {"text": datasets.Sequence(datasets.Value("string"))}
 
 
71
  ),
72
  # If there's a common (input, target) tuple from the features,
73
  # specify them here. They'll be used if as_supervised=True in
 
83
  """Returns SplitGenerators."""
84
  # Path to the data files
85
  NEIGHBOR_SUFFIX = "_neighbors_25_bert_in_place_swap"
86
+ parent_dir = (
87
+ "cache_100_200_10000_512"
88
+ if self.config.name == "the_pile_full_pile"
89
+ else "cache_100_200_1000_512"
90
+ )
91
 
92
  file_paths = {
93
  "member": os.path.join(parent_dir, "train", self.config.name + ".jsonl"),
94
+ "nonmember": os.path.join(parent_dir, "test", self.config.name + ".jsonl"),
95
  }
96
  # Load neighbor splits if they exist
97
+ # TODO: This is not correct (should be checking URL, not local file structure). Fix later
98
+ if os.path.exists(
99
+ os.path.join(
100
+ parent_dir,
101
+ "train_neighbors",
102
+ self.config.name + f"{NEIGHBOR_SUFFIX}.jsonl",
103
+ )
104
+ ):
105
  # Assume if train nieghbors exist, test neighbors also exist
106
+ file_paths["member_neighbors"] = os.path.join(
107
+ parent_dir,
108
+ "train_neighbors",
109
+ self.config.name + f"{NEIGHBOR_SUFFIX}.jsonl",
110
+ )
111
+ file_paths["nonmember_neighbors"] = os.path.join(
112
+ parent_dir,
113
+ "test_neighbors",
114
+ self.config.name + f"{NEIGHBOR_SUFFIX}.jsonl",
115
+ )
116
+
117
+ # Now that we know which files to load, download them
118
+ download_paths = [_DOWNLOAD_URL + v for v in file_paths.values()]
119
+ data_dir = dl_manager.download_and_extract(download_paths)
120
 
121
  splits = []
122
+ for i, k in enumerate(file_paths.keys()):
123
+ splits.append(SplitGenerator(name=k, gen_kwargs={"file_path": data_dir[i]}))
124
  return splits
125
 
126
  def _generate_examples(self, file_path):
127
  """Yields examples."""
128
  # Open the specified .jsonl file and read each line
129
+ with open(file_path, "r") as f:
130
  for id, line in enumerate(f):
131
  data = json.loads(line)
132
+ if type(data) != list:
133
+ data = [data]
134
  yield id, {"text": data}