Legitking4pf commited on
Commit
1daee20
·
verified ·
1 Parent(s): 5073b65

Update dataset.py

Browse files
Files changed (1) hide show
  1. dataset.py +45 -39
dataset.py CHANGED
@@ -1,5 +1,5 @@
1
  import os
2
- from datasets import DatasetInfo, GeneratorBasedBuilder, Split, Features, Image, ClassLabel
3
  from huggingface_hub import hf_hub_download
4
 
5
  class UIR25MReference(GeneratorBasedBuilder):
@@ -7,11 +7,11 @@ class UIR25MReference(GeneratorBasedBuilder):
7
 
8
  VERSION = "1.0.0"
9
 
10
- # Directory to store pretrained models
11
  PRETRAINED_DIR = os.path.expanduser("./pretrained_models")
12
  os.makedirs(PRETRAINED_DIR, exist_ok=True)
13
 
14
- # List of pretrained model files
15
  PRETRAINED_MODELS = [
16
  "nafnet_maskdcpt_12d.pth",
17
  "nafnet_maskdcpt_5d.pth",
@@ -21,22 +21,22 @@ class UIR25MReference(GeneratorBasedBuilder):
21
  "swinir_maskdcpt_5d.pth"
22
  ]
23
 
24
- HF_REPO = "Jiakui/MaskDCPT"
25
 
26
- @staticmethod
27
- def download_pretrained_models():
28
  downloaded_files = []
29
- for filename in UIR25MReference.PRETRAINED_MODELS:
30
- target_path = os.path.join(UIR25MReference.PRETRAINED_DIR, filename)
31
  if not os.path.exists(target_path):
32
- print(f"Downloading {filename}...")
33
  hf_hub_download(
34
- repo_id=UIR25MReference.HF_REPO,
35
  filename=f"pretrained_models/{filename}",
36
- local_dir=UIR25MReference.PRETRAINED_DIR
37
  )
38
  else:
39
- print(f"{filename} already exists, skipping.")
40
  downloaded_files.append(target_path)
41
  return downloaded_files
42
 
@@ -51,10 +51,10 @@ class UIR25MReference(GeneratorBasedBuilder):
51
  "degradation_type": ClassLabel(names=[
52
  "noise", "blur", "compression", "haze", "low_light",
53
  "degradation_5", "degradation_6", "degradation_7",
54
- "degradation_8", "degradation_9", "degradation_10",
55
- "degradation_11", "degradation_12", "degradation_13",
56
- "degradation_14", "degradation_15", "degradation_16",
57
- "degradation_17", "degradation_18", "degradation_19"
58
  ])
59
  }),
60
  task_categories=["image-to-image"],
@@ -62,30 +62,36 @@ class UIR25MReference(GeneratorBasedBuilder):
62
  )
63
 
64
  def _split_generators(self, dl_manager):
65
- data_dir = os.path.expanduser(dl_manager.download_and_extract("https://github.com/MILab-PKU/MaskDCPT.git"))
66
- return [
67
- self.SplitGenerator(
68
- name=Split.TRAIN,
69
- gen_kwargs={"images_dir": os.path.join(data_dir, "train")}
70
- ),
71
- # Optionally add test split
72
- ]
 
 
 
 
 
 
 
 
73
 
74
  def _generate_examples(self, images_dir):
75
- for idx, fname in enumerate(os.listdir(images_dir)):
76
- if fname.endswith(".png") or fname.endswith(".jpg"):
77
- low_quality_path = os.path.join(images_dir, fname)
78
- high_quality_path = os.path.join(images_dir, "high_quality", fname)
79
- degradation_type = 0 # Replace with actual mapping from metadata
 
 
 
 
 
80
  yield idx, {
81
- "low_quality": low_quality_path,
82
- "high_quality": high_quality_path,
83
  "degradation_type": degradation_type
84
- }
85
-
86
- # Example: download pretrained models if needed
87
- if __name__ == "__main__":
88
- downloaded_models = UIR25MReference.download_pretrained_models()
89
- print("Downloaded pretrained models:")
90
- for path in downloaded_models:
91
- print(path)
 
1
  import os
2
+ from datasets import DatasetInfo, GeneratorBasedBuilder, Features, Image, ClassLabel, Split
3
  from huggingface_hub import hf_hub_download
4
 
5
  class UIR25MReference(GeneratorBasedBuilder):
 
7
 
8
  VERSION = "1.0.0"
9
 
10
+ # Directory where pretrained models will be downloaded
11
  PRETRAINED_DIR = os.path.expanduser("./pretrained_models")
12
  os.makedirs(PRETRAINED_DIR, exist_ok=True)
13
 
14
+ # List of pretrained model files with Hugging Face URLs
15
  PRETRAINED_MODELS = [
16
  "nafnet_maskdcpt_12d.pth",
17
  "nafnet_maskdcpt_5d.pth",
 
21
  "swinir_maskdcpt_5d.pth"
22
  ]
23
 
24
+ PRETRAINED_REPO = "Jiakui/MaskDCPT"
25
 
26
+ def download_pretrained_models(self):
27
+ """Download all pretrained models to local directory."""
28
  downloaded_files = []
29
+ for filename in self.PRETRAINED_MODELS:
30
+ target_path = os.path.join(self.PRETRAINED_DIR, filename)
31
  if not os.path.exists(target_path):
32
+ print(f"Downloading pretrained model: {filename}")
33
  hf_hub_download(
34
+ repo_id=self.PRETRAINED_REPO,
35
  filename=f"pretrained_models/{filename}",
36
+ local_dir=self.PRETRAINED_DIR
37
  )
38
  else:
39
+ print(f"{filename} already exists, skipping download.")
40
  downloaded_files.append(target_path)
41
  return downloaded_files
42
 
 
51
  "degradation_type": ClassLabel(names=[
52
  "noise", "blur", "compression", "haze", "low_light",
53
  "degradation_5", "degradation_6", "degradation_7",
54
+ "degradation_8", "degradation_9",
55
+ "degradation_10", "degradation_11", "degradation_12",
56
+ "degradation_13", "degradation_14", "degradation_15",
57
+ "degradation_16", "degradation_17", "degradation_18", "degradation_19"
58
  ])
59
  }),
60
  task_categories=["image-to-image"],
 
62
  )
63
 
64
  def _split_generators(self, dl_manager):
65
+ """
66
+ Leverage automatic split detection:
67
+ Expects directories named `train`, `test`, `validation` in external path
68
+ """
69
+ data_dir = dl_manager.download_and_extract("https://github.com/MILab-PKU/MaskDCPT.git")
70
+ splits = []
71
+ for split_name in ["train", "test", "validation"]:
72
+ split_path = os.path.join(data_dir, split_name)
73
+ if os.path.exists(split_path):
74
+ splits.append(
75
+ self.SplitGenerator(
76
+ name=getattr(Split, split_name.upper()),
77
+ gen_kwargs={"images_dir": split_path}
78
+ )
79
+ )
80
+ return splits
81
 
82
  def _generate_examples(self, images_dir):
83
+ """Yield examples for Hugging Face dataset."""
84
+ low_dir = os.path.join(images_dir, "low_quality")
85
+ high_dir = os.path.join(images_dir, "high_quality")
86
+
87
+ # Expect a mapping from low to high quality images and degradation type metadata
88
+ for idx, fname in enumerate(os.listdir(low_dir)):
89
+ if fname.lower().endswith((".png", ".jpg", ".jpeg")):
90
+ low_path = os.path.join(low_dir, fname)
91
+ high_path = os.path.join(high_dir, fname)
92
+ degradation_type = 0 # Replace with actual metadata mapping if available
93
  yield idx, {
94
+ "low_quality": low_path,
95
+ "high_quality": high_path,
96
  "degradation_type": degradation_type
97
+ }