duck_mano_v4 / push_to_hf.py
mjuarez4's picture
Upload folder using huggingface_hub
9262f29 verified
import os
import glob
import pandas as pd
from datasets import Dataset, Features, Value, Image
from tqdm import tqdm
# πŸ—‚ Directory where your parquet files live
parquet_dir = "data/chunk-000"
hf_repo = "mjuarez4/duck_mano_v4" # NOTE: pushing to v3 now
batch_size = 50
# Collect all parquet files
parquet_files = sorted(glob.glob(os.path.join(parquet_dir, "episode_*.parquet")))
print(f"πŸ“¦ Found {len(parquet_files)} files in {parquet_dir}")
# Process in batches
for i in range(0, len(parquet_files), batch_size):
batch_files = parquet_files[i:i + batch_size]
dfs = []
print(f"\nπŸš€ Processing batch {i // batch_size} ({len(batch_files)} files)")
for file_path in tqdm(batch_files):
try:
df = pd.read_parquet(file_path)
# Clean up all non-image columns
for col in df.columns:
if col not in ["observation.image.low", "observation.image.wrist"]:
df[col] = df[col].apply(lambda x: str(x)[:1000])
dfs.append(df)
except Exception as e:
print(f"❌ Failed to process {file_path}: {e}")
if not dfs:
print(f"⚠️ Skipping batch {i // batch_size} β€” no valid data.")
continue
df_batch = pd.concat(dfs, ignore_index=True)
# Define schema with both image fields
features = Features({
col: Image(decode=True)
if col in ["observation.image.low", "observation.image.wrist"]
else Value("string")
for col in df_batch.columns
})
try:
dataset = Dataset.from_pandas(df_batch, features=features)
dataset.push_to_hub(hf_repo, split=f"batch_{i // batch_size}")
print(f"βœ… Uploaded batch {i // batch_size} to {hf_repo}")
except Exception as e:
print(f"❌ Push failed for batch {i // batch_size}: {e}")