| import os |
| import glob |
| import pandas as pd |
| from datasets import Dataset, Features, Value, Image |
| from tqdm import tqdm |
|
|
| |
| parquet_dir = "data/chunk-000" |
| hf_repo = "mjuarez4/duck_mano_v4" |
| batch_size = 50 |
|
|
| |
| parquet_files = sorted(glob.glob(os.path.join(parquet_dir, "episode_*.parquet"))) |
| print(f"π¦ Found {len(parquet_files)} files in {parquet_dir}") |
|
|
| |
| for i in range(0, len(parquet_files), batch_size): |
| batch_files = parquet_files[i:i + batch_size] |
| dfs = [] |
|
|
| print(f"\nπ Processing batch {i // batch_size} ({len(batch_files)} files)") |
| for file_path in tqdm(batch_files): |
| try: |
| df = pd.read_parquet(file_path) |
|
|
| |
| for col in df.columns: |
| if col not in ["observation.image.low", "observation.image.wrist"]: |
| df[col] = df[col].apply(lambda x: str(x)[:1000]) |
|
|
| dfs.append(df) |
|
|
| except Exception as e: |
| print(f"β Failed to process {file_path}: {e}") |
|
|
| if not dfs: |
| print(f"β οΈ Skipping batch {i // batch_size} β no valid data.") |
| continue |
|
|
| df_batch = pd.concat(dfs, ignore_index=True) |
|
|
| |
| features = Features({ |
| col: Image(decode=True) |
| if col in ["observation.image.low", "observation.image.wrist"] |
| else Value("string") |
| for col in df_batch.columns |
| }) |
|
|
| try: |
| dataset = Dataset.from_pandas(df_batch, features=features) |
| dataset.push_to_hub(hf_repo, split=f"batch_{i // batch_size}") |
| print(f"β
Uploaded batch {i // batch_size} to {hf_repo}") |
| except Exception as e: |
| print(f"β Push failed for batch {i // batch_size}: {e}") |
|
|
|
|