| import json |
| import argparse |
| import random |
| from pathlib import Path |
| from tqdm import tqdm |
| import datasets |
| from huggingface_hub import HfApi, RepoCard |
| from transformers import HfArgumentParser |
|
|
| random.seed(0) |
|
|
| def generate_unique_multiplication_data(a_max, b_max, n_train, n_test): |
| """Generate train and test datasets for each multiplication range ensuring no overlap.""" |
| datasets = {} |
| |
| for a in range(1, a_max + 1): |
| for b in range(1, b_max + 1): |
| all_pairs = [(x, y) for x in range(1, a + 1) for y in range(1, b + 1)] |
| |
| |
| test_data = set(random.sample(list(all_pairs), min(n_test, len(all_pairs)))) |
| train_data = set(random.sample(list(set(all_pairs) - test_data), min(n_train, len(set(all_pairs) - test_data)))) |
|
|
| datasets[f"{a}x{b}"] = {"train": list(train_data), "test": list(test_data)} |
|
|
| return datasets |
|
|
| def save_to_jsonl(data, file_path): |
| """Save dataset to JSONL format.""" |
| with open(file_path, "w") as f: |
| for a, b in data: |
| json.dump({"problem": f"What is {a} times {b}?", "answer": str(a * b)}, f) |
| f.write("\n") |
|
|
| def prepare_datasets(output_dir): |
| """Prepare train and test datasets ensuring no overlap for all 1x1 to 15x15 combinations.""" |
| output_dir = Path(output_dir) |
| output_dir.mkdir(parents=True, exist_ok=True) |
|
|
| all_datasets = generate_unique_multiplication_data(a_max=15, b_max=15, n_train=1000, n_test=100) |
|
|
| train_files, test_files = [], [] |
| for name, data in all_datasets.items(): |
| train_file = output_dir / f"multiplication_train_{name}.jsonl" |
| test_file = output_dir / f"multiplication_test_{name}.jsonl" |
|
|
| save_to_jsonl(data["train"], train_file) |
| save_to_jsonl(data["test"], test_file) |
|
|
| train_files.append(train_file) |
| test_files.append(test_file) |
|
|
| print(f"\n✅ Datasets saved to {output_dir}") |
| return train_files, test_files |
|
|
| def process_file(file_path): |
| """Convert JSONL data into Hugging Face dataset format.""" |
| with open(file_path, "r") as f: |
| data = [json.loads(line.strip()) for line in f if line.strip()] |
|
|
| dataset = { |
| "messages": [[ |
| {"role": "user", "content": item["problem"]}, |
| {"role": "assistant", "content": item["answer"]}, |
| ] for item in data], |
| "ground_truth": [item["answer"] for item in data], |
| "dataset": ["multiplication"] * len(data), |
| } |
| return datasets.Dataset.from_dict(dataset) |
|
|
| def push_to_huggingface(train_files, test_files, hf_entity): |
| """Push datasets to Hugging Face Hub and print the dataset link.""" |
| api = HfApi() |
| hf_entity = hf_entity or api.whoami()["name"] |
|
|
| print("\n📤 Uploading datasets to Hugging Face...\n") |
|
|
| for file in train_files + test_files: |
| dataset = process_file(file) |
| dataset_name = file.stem |
| repo_id = f"{hf_entity}/{dataset_name}" |
| hf_url = f"https://huggingface.co/datasets/{repo_id}" |
|
|
| print(f"✅ Dataset uploaded: {dataset_name}") |
| |
|
|
| dataset.push_to_hub(repo_id) |
|
|
| api.upload_file( |
| path_or_fileobj=__file__, |
| path_in_repo="create_dataset.py", |
| repo_type="dataset", |
| repo_id=repo_id, |
| ) |
|
|
| |
| repo_card = RepoCard( |
| content=f"""\ |
| # Multiplication Dataset - {dataset_name} |
| |
| This dataset contains multiplication problems for numbers up to 15x15. |
| |
| ## Dataset Format |
| |
| - `messages`: User question and assistant answer. |
| - `ground_truth`: Correct multiplication result. |
| - `dataset`: "multiplication" |
| |
| ## Hugging Face Dataset Link |
| ➡️ [View dataset on Hugging Face]({hf_url}) |
| """ |
| ) |
| repo_card.push_to_hub(repo_id, repo_type="dataset") |
|
|
| def main(): |
| parser = argparse.ArgumentParser() |
| parser.add_argument("--output_dir", type=str, default="math_data", help="Output directory") |
| parser.add_argument("--push_to_hub", action="store_true", help="Upload to Hugging Face") |
| parser.add_argument("--hf_entity", type=str, default=None, help="Hugging Face entity") |
| args = parser.parse_args() |
|
|
| train_files, test_files = prepare_datasets(args.output_dir) |
|
|
| if args.push_to_hub: |
| push_to_huggingface(train_files, test_files, args.hf_entity) |
|
|
| if __name__ == "__main__": |
| main() |
|
|