| import pyarrow.parquet as pq | |
| class TatoebaLarge: | |
| def __init__(self, file_path): | |
| self.file_path = file_path | |
| self.parquet_file = pq.ParquetFile(self.file_path) | |
| def __iter__(self): | |
| return self.parquet_file.iter_batches() | |
| def __len__(self): | |
| return self.parquet_file.metadata.num_rows | |
| # Usage example | |
| dataset = ParquetDataset("path/to/your/dataset.parquet") | |
| for batch in dataset: | |
| for example in batch.to_pydict(): | |
| # Do something with the example, e.g., preprocess the data | |
| print(example) |