cyd0806 commited on
Commit
d708d3d
·
verified ·
1 Parent(s): 54b90c1

Upload src/partition_dataset.py with huggingface_hub

Browse files
Files changed (1) hide show
  1. src/partition_dataset.py +51 -0
src/partition_dataset.py ADDED
@@ -0,0 +1,51 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import argparse
2
+ import os.path
3
+
4
+ import ipdb
5
+ from datasets import load_dataset
6
+
7
+
8
+ def filter_test_dataset(example):
9
+ if example["quality_assessment"] is not None:
10
+ scores = list(example["quality_assessment"].values())
11
+ if example["quality_assessment"]['compositeStructure']>=3 and example["quality_assessment"]['imageQuality']==5 and not all(score == 5 for score in scores) and example['quality_assessment']['objectConsistency']==5:
12
+ return True
13
+ else:
14
+ return False
15
+ else:
16
+ return False
17
+
18
+ def filter_train_dataset(example):
19
+ if example["quality_assessment"] is not None:
20
+ return list(example["quality_assessment"].values()) == [5, 5, 5]
21
+ else:
22
+ return False
23
+
24
+ def parse_args():
25
+ parser = argparse.ArgumentParser("partition dataset")
26
+ parser.add_argument("--dataset", type=str, default=None,required=True)
27
+ parser.add_argument("--output_dir", type=str, default=None,required=True)
28
+ parser.add_argument("--partition", type=str, default=None,required=True,choices=["train","test"])
29
+ parser.add_argument("--num_shards", type=int, default=None)
30
+ parser.add_argument("--num_proc", type=int, default=32)
31
+ parser.add_argument("--cache", type=str, default="cache")
32
+ args = parser.parse_args()
33
+ if args.num_shards is None and args.partition == "train":
34
+ args.num_shards = len(os.listdir(args.dataset))
35
+ elif args.num_shards is None and args.partition == "test":
36
+ args.num_shards = 1
37
+ args.output_dir = os.path.join(args.output_dir, args.partition)
38
+ return args
39
+
40
+ if __name__ == "__main__":
41
+ args = parse_args()
42
+ os.makedirs(args.output_dir, exist_ok=True)
43
+ dataset = load_dataset(args.dataset, split="train", cache_dir=args.cache)
44
+ if args.partition == "train":
45
+ filtered_dataset = dataset.filter(filter_train_dataset,num_proc =args.num_proc)
46
+ elif args.partition == "test":
47
+ filtered_dataset = dataset.filter(filter_test_dataset,num_proc =args.num_proc)
48
+ output_path = os.path.join(args.output_dir,"data-{index:05d}-of-{num_shards:05d}.parquet")
49
+ for index in range(args.num_shards):
50
+ shard = filtered_dataset.shard(index=index, num_shards=args.num_shards, contiguous=True)
51
+ shard.to_parquet(output_path.format(index=index,num_shards=args.num_shards))