Dataset Preview
The full dataset viewer is not available (click to read why). Only showing a preview of the rows.
The dataset generation failed
Error code: DatasetGenerationError
Exception: ArrowNotImplementedError
Message: Cannot write struct type 'rename_map' with no child field to Parquet. Consider adding a dummy child field.
Traceback: Traceback (most recent call last):
File "/usr/local/lib/python3.12/site-packages/datasets/builder.py", line 1887, in _prepare_split_single
writer.write_table(table)
File "/usr/local/lib/python3.12/site-packages/datasets/arrow_writer.py", line 672, in write_table
self._build_writer(inferred_schema=pa_table.schema)
File "/usr/local/lib/python3.12/site-packages/datasets/arrow_writer.py", line 713, in _build_writer
self.pa_writer = pq.ParquetWriter(
^^^^^^^^^^^^^^^^^
File "/usr/local/lib/python3.12/site-packages/pyarrow/parquet/core.py", line 1070, in __init__
self.writer = _parquet.ParquetWriter(
^^^^^^^^^^^^^^^^^^^^^^^
File "pyarrow/_parquet.pyx", line 2363, in pyarrow._parquet.ParquetWriter.__cinit__
File "pyarrow/error.pxi", line 155, in pyarrow.lib.pyarrow_internal_check_status
File "pyarrow/error.pxi", line 92, in pyarrow.lib.check_status
pyarrow.lib.ArrowNotImplementedError: Cannot write struct type 'rename_map' with no child field to Parquet. Consider adding a dummy child field.
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "/usr/local/lib/python3.12/site-packages/datasets/builder.py", line 1908, in _prepare_split_single
num_examples, num_bytes = writer.finalize()
^^^^^^^^^^^^^^^^^
File "/usr/local/lib/python3.12/site-packages/datasets/arrow_writer.py", line 687, in finalize
self._build_writer(self.schema)
File "/usr/local/lib/python3.12/site-packages/datasets/arrow_writer.py", line 713, in _build_writer
self.pa_writer = pq.ParquetWriter(
^^^^^^^^^^^^^^^^^
File "/usr/local/lib/python3.12/site-packages/pyarrow/parquet/core.py", line 1070, in __init__
self.writer = _parquet.ParquetWriter(
^^^^^^^^^^^^^^^^^^^^^^^
File "pyarrow/_parquet.pyx", line 2363, in pyarrow._parquet.ParquetWriter.__cinit__
File "pyarrow/error.pxi", line 155, in pyarrow.lib.pyarrow_internal_check_status
File "pyarrow/error.pxi", line 92, in pyarrow.lib.check_status
pyarrow.lib.ArrowNotImplementedError: Cannot write struct type 'rename_map' with no child field to Parquet. Consider adding a dummy child field.
The above exception was the direct cause of the following exception:
Traceback (most recent call last):
File "/src/services/worker/src/worker/job_runners/config/parquet_and_info.py", line 1347, in compute_config_parquet_and_info_response
parquet_operations = convert_to_parquet(builder)
^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/src/services/worker/src/worker/job_runners/config/parquet_and_info.py", line 980, in convert_to_parquet
builder.download_and_prepare(
File "/usr/local/lib/python3.12/site-packages/datasets/builder.py", line 884, in download_and_prepare
self._download_and_prepare(
File "/usr/local/lib/python3.12/site-packages/datasets/builder.py", line 947, in _download_and_prepare
self._prepare_split(split_generator, **prepare_split_kwargs)
File "/usr/local/lib/python3.12/site-packages/datasets/builder.py", line 1736, in _prepare_split
for job_id, done, content in self._prepare_split_single(
^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/usr/local/lib/python3.12/site-packages/datasets/builder.py", line 1919, in _prepare_split_single
raise DatasetGenerationError("An error occurred while generating the dataset") from e
datasets.exceptions.DatasetGenerationError: An error occurred while generating the datasetNeed help to make the dataset viewer work? Make sure to review how to configure the dataset viewer, and open a discussion for direct support.
dataset dict | env null | policy dict | output_dir string | job_name string | resume bool | seed int64 | num_workers int64 | batch_size int64 | steps int64 | eval_freq int64 | log_freq int64 | tolerance_s float64 | save_checkpoint bool | save_freq int64 | use_policy_training_preset bool | optimizer dict | scheduler dict | eval dict | wandb dict | peft null | use_rabc bool | rabc_progress_path null | rabc_kappa float64 | rabc_epsilon float64 | rabc_head_mode string | rename_map dict | checkpoint_path null |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
{
"repo_id": "local/dataset",
"root": null,
"episodes": null,
"image_transforms": {
"enable": false,
"max_num_transforms": 3,
"random_order": false,
"tfs": {
"brightness": {
"weight": 1,
"type": "ColorJitter",
"kwargs": {
"brightness": [
0.8,
1.2
]
}
},
"contrast": {
"weight": 1,
"type": "ColorJitter",
"kwargs": {
"contrast": [
0.8,
1.2
]
}
},
"saturation": {
"weight": 1,
"type": "ColorJitter",
"kwargs": {
"saturation": [
0.5,
1.5
]
}
},
"hue": {
"weight": 1,
"type": "ColorJitter",
"kwargs": {
"hue": [
-0.05,
0.05
]
}
},
"sharpness": {
"weight": 1,
"type": "SharpnessJitter",
"kwargs": {
"sharpness": [
0.5,
1.5
]
}
},
"affine": {
"weight": 1,
"type": "RandomAffine",
"kwargs": {
"degrees": [
-5,
5
],
"translate": [
0.05,
0.05
]
}
}
}
},
"revision": null,
"use_imagenet_stats": true,
"video_backend": "pyav",
"streaming": false
} | null | {
"type": "smolvla",
"n_obs_steps": 1,
"input_features": {
"observation.state": {
"type": "STATE",
"shape": [
6
]
},
"observation.images.front": {
"type": "VISUAL",
"shape": [
3,
1080,
1920
]
},
"observation.images.side": {
"type": "VISUAL",
"shape": [
3,
720,
1280
]
}
},
"output_features": {
"action": {
"type": "ACTION",
"shape": [
6
]
}
},
"device": "cuda",
"use_amp": false,
"use_peft": false,
"push_to_hub": false,
"repo_id": "user/smolvla-local-test",
"private": null,
"tags": null,
"license": null,
"pretrained_path": null,
"chunk_size": 50,
"n_action_steps": 50,
"normalization_mapping": {
"VISUAL": "IDENTITY",
"STATE": "MEAN_STD",
"ACTION": "MEAN_STD"
},
"max_state_dim": 32,
"max_action_dim": 32,
"resize_imgs_with_padding": [
512,
512
],
"empty_cameras": 0,
"adapt_to_pi_aloha": false,
"use_delta_joint_actions_aloha": false,
"tokenizer_max_length": 48,
"num_steps": 10,
"use_cache": true,
"freeze_vision_encoder": true,
"train_expert_only": true,
"train_state_proj": true,
"optimizer_lr": 0.0001,
"optimizer_betas": [
0.9,
0.95
],
"optimizer_eps": 1e-8,
"optimizer_weight_decay": 1e-10,
"optimizer_grad_clip_norm": 10,
"scheduler_warmup_steps": 1000,
"scheduler_decay_steps": 30000,
"scheduler_decay_lr": 0.0000025,
"vlm_model_name": "HuggingFaceTB/SmolVLM2-500M-Video-Instruct",
"load_vlm_weights": false,
"add_image_special_tokens": false,
"attention_mode": "cross_attn",
"prefix_length": -1,
"pad_language_to": "longest",
"num_expert_layers": -1,
"num_vlm_layers": 16,
"self_attn_every_n_layers": 2,
"expert_width_multiplier": 0.75,
"min_period": 0.004,
"max_period": 4,
"rtc_config": null
} | outputs/train/2026-02-01/08-28-00_smolvla | smolvla | false | 1,000 | 4 | 8 | 20,000 | 20,000 | 200 | 0.0001 | true | 20,000 | true | {
"type": "adamw",
"lr": 0.0001,
"weight_decay": 1e-10,
"grad_clip_norm": 10,
"betas": [
0.9,
0.95
],
"eps": 1e-8
} | {
"type": "cosine_decay_with_warmup",
"num_warmup_steps": 1000,
"num_decay_steps": 30000,
"peak_lr": 0.0001,
"decay_lr": 0.0000025
} | {
"n_episodes": 50,
"batch_size": 50,
"use_async_envs": false
} | {
"enable": false,
"disable_artifact": false,
"project": "lerobot",
"entity": null,
"notes": null,
"run_id": null,
"mode": null
} | null | false | null | 0.01 | 0.000001 | sparse | {} | null |
No dataset card yet
- Downloads last month
- 6