evalstate HF Staff commited on
Commit
4c0d540
·
verified ·
1 Parent(s): 0234efd

Add fix_dataset.py utility script

Browse files
Files changed (1) hide show
  1. fix_dataset.py +96 -0
fix_dataset.py ADDED
@@ -0,0 +1,96 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #!/usr/bin/env -S uv run
2
+ # /// script
3
+ # requires-python = ">=3.10"
4
+ # dependencies = [
5
+ # "datasets",
6
+ # "huggingface_hub",
7
+ # ]
8
+ # ///
9
+ """
10
+ Fix dataset by deleting a split with mismatched schema and re-pushing.
11
+
12
+ Usage:
13
+ uv run fix_dataset.py --repo evalstate/mcp-clients --split deduplicated
14
+ """
15
+
16
+ import argparse
17
+ import sys
18
+
19
+ from datasets import load_dataset
20
+ from huggingface_hub import HfApi
21
+
22
+
23
+ def main():
24
+ parser = argparse.ArgumentParser(description="Delete and recreate a dataset split")
25
+ parser.add_argument("--repo", required=True, help="Repository ID")
26
+ parser.add_argument("--split", required=True, help="Split name to fix")
27
+ parser.add_argument("--token", default=None, help="HF token")
28
+ args = parser.parse_args()
29
+
30
+ api = HfApi(token=args.token)
31
+
32
+ # Try to get the current repo info
33
+ try:
34
+ repo_info = api.repo_info(args.repo, repo_type="dataset")
35
+ print(f"Found repo: {args.repo}", file=sys.stderr)
36
+ except Exception as e:
37
+ print(f"Repo not found or error: {e}", file=sys.stderr)
38
+ sys.exit(1)
39
+
40
+ # The datasets library doesn't have a direct delete split API
41
+ # But we can use the deletion parameter in push_to_hub
42
+ # First, let's try to get the existing dataset and see what splits exist
43
+
44
+ try:
45
+ ds = load_dataset(args.repo)
46
+ print(f"Existing splits: {list(ds.keys())}", file=sys.stderr)
47
+ except Exception as e:
48
+ print(f"Could not load dataset: {e}", file=sys.stderr)
49
+ sys.exit(1)
50
+
51
+ if args.split not in ds:
52
+ print(f"Split '{args.split}' not found in dataset", file=sys.stderr)
53
+ sys.exit(1)
54
+
55
+ # Delete the split by not including it in the push
56
+ # But first we need to save without that split and re-push
57
+ print(f"Deleting split '{args.split}'...", file=sys.stderr)
58
+
59
+ # Use the deletions parameter to remove the split
60
+ try:
61
+ # Get all other splits
62
+ other_splits = {k: v for k, v in ds.items() if k != args.split}
63
+
64
+ if other_splits:
65
+ print(f"Pushing without '{args.split}' split...", file=sys.stderr)
66
+ # Push only the other splits
67
+ other_splits['_all'] = other_splits.pop(list(other_splits.keys())[0])
68
+
69
+ # Actually, the easiest way is to use hf CLI to delete files
70
+ # Let's find the parquet file for this split
71
+ print(f"Checking for split files to delete...", file=sys.stderr)
72
+
73
+ # List all files in the repo
74
+ repo_files = api.list_repo_files(args.repo, repo_type="dataset")
75
+ split_files = [f for f in repo_files if f.startswith(f"data/{args.split}-") or f"split_{args.split}" in f]
76
+
77
+ print(f"Files to delete for split '{args.split}': {split_files}", file=sys.stderr)
78
+
79
+ if split_files:
80
+ print(f"Deleting {len(split_files)} files...", file=sys.stderr)
81
+ for f in split_files:
82
+ print(f" Deleting: {f}", file=sys.stderr)
83
+ api.delete_file(f, args.repo, repo_type="dataset")
84
+ print("Files deleted. You'll need to re-push the split with correct schema.", file=sys.stderr)
85
+ else:
86
+ print("No split files found to delete", file=sys.stderr)
87
+
88
+ except Exception as e:
89
+ print(f"Error: {e}", file=sys.stderr)
90
+ sys.exit(1)
91
+
92
+ print("Done! Re-run the dedup script to create the split with correct schema.", file=sys.stderr)
93
+
94
+
95
+ if __name__ == "__main__":
96
+ main()