File size: 1,409 Bytes
3a9b8fd |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 |
#!/usr/bin/env python3
"""
Script to load and verify the matrix_operations dataset
"""
import json
from pathlib import Path
def load_and_verify_dataset():
dataset_path = Path(".")
print(f"Loading {config.name} dataset...")
# Load train split
train_data = []
if (dataset_path / "train.jsonl").exists():
with open(dataset_path / "train.jsonl", "r") as f:
for line in f:
train_data.append(json.loads(line))
print(f"Loaded {len(train_data)} train samples")
# Load test split
test_data = []
if (dataset_path / "test.jsonl").exists():
with open(dataset_path / "test.jsonl", "r") as f:
for line in f:
test_data.append(json.loads(line))
print(f"Loaded {len(test_data)} test samples")
# Basic validation
print("\nDataset Validation:")
print(f"Total samples: {len(train_data) + len(test_data)}")
if train_data:
sample = train_data[0]
print(f"Sample keys: {list(sample.keys())}")
print(f"Matrix size: {sample.get('matrix_size')}")
print(f"Operations count: {len(sample.get('operations', []))}")
print("\nDataset ready for use!")
print("To upload to Hugging Face Hub:")
print(f" git push https://huggingface.co/datasets/your-username/{config.name}")
if __name__ == "__main__":
load_and_verify_dataset()
|