Upload folder using huggingface_hub
Browse filesThis view is limited to 50 files because it contains too many changes. See raw diff
- .config/.last_opt_in_prompt.yaml +1 -0
- .config/.last_survey_prompt.yaml +1 -0
- .config/.last_update_check.json +1 -0
- .config/active_config +1 -0
- .config/config_sentinel +0 -0
- .config/configurations/config_default +6 -0
- .config/default_configs.db +0 -0
- .config/gce +1 -0
- .config/hidden_gcloud_config_universe_descriptor_data_cache_configs.db +0 -0
- .config/logs/2025.07.01/21.03.20.550425.log +765 -0
- .config/logs/2025.07.01/21.03.41.948871.log +5 -0
- .config/logs/2025.07.01/21.03.53.252709.log +153 -0
- .config/logs/2025.07.01/21.03.54.626322.log +5 -0
- .config/logs/2025.07.01/21.04.06.597104.log +8 -0
- .config/logs/2025.07.01/21.04.07.490711.log +8 -0
- .gitattributes +15 -0
- condacolab_install.log +318 -0
- lerobot/.dockerignore +160 -0
- lerobot/.gitattributes +21 -0
- lerobot/.github/ISSUE_TEMPLATE/bug-report.yml +68 -0
- lerobot/.github/PULL_REQUEST_TEMPLATE.md +34 -0
- lerobot/.github/workflows/build-docker-images.yml +135 -0
- lerobot/.github/workflows/build_documentation.yml +23 -0
- lerobot/.github/workflows/build_pr_documentation.yml +19 -0
- lerobot/.github/workflows/nightly-tests.yml +93 -0
- lerobot/.github/workflows/quality.yml +72 -0
- lerobot/.github/workflows/test-docker-build.yml +82 -0
- lerobot/.github/workflows/test.yml +150 -0
- lerobot/.github/workflows/trufflehog.yml +35 -0
- lerobot/.github/workflows/upload_pr_documentation.yml +16 -0
- lerobot/.gitignore +175 -0
- lerobot/.pre-commit-config.yaml +74 -0
- lerobot/CODE_OF_CONDUCT.md +133 -0
- lerobot/CONTRIBUTING.md +305 -0
- lerobot/LICENSE +507 -0
- lerobot/MANIFEST.in +2 -0
- lerobot/Makefile +180 -0
- lerobot/README.md +412 -0
- lerobot/benchmarks/video/README.md +271 -0
- lerobot/benchmarks/video/capture_camera_feed.py +102 -0
- lerobot/benchmarks/video/run_video_benchmark.py +490 -0
- lerobot/docker/lerobot-cpu/Dockerfile +29 -0
- lerobot/docker/lerobot-gpu-dev/Dockerfile +68 -0
- lerobot/docker/lerobot-gpu/Dockerfile +24 -0
- lerobot/docs/README.md +137 -0
- lerobot/docs/source/_toctree.yml +44 -0
- lerobot/docs/source/backwardcomp.mdx +82 -0
- lerobot/docs/source/cameras.mdx +173 -0
- lerobot/docs/source/contributing.md +305 -0
- lerobot/docs/source/hilserl.mdx +548 -0
.config/.last_opt_in_prompt.yaml
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
{}
|
.config/.last_survey_prompt.yaml
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
last_prompt_time: 1751403832.4579225
|
.config/.last_update_check.json
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
{"last_update_check_time": 1751403834.040841, "last_update_check_revision": 20250627154417, "notifications": [], "last_nag_times": {}}
|
.config/active_config
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
default
|
.config/config_sentinel
ADDED
|
File without changes
|
.config/configurations/config_default
ADDED
|
@@ -0,0 +1,6 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
[component_manager]
|
| 2 |
+
disable_update_check = true
|
| 3 |
+
|
| 4 |
+
[compute]
|
| 5 |
+
gce_metadata_read_timeout_sec = 0
|
| 6 |
+
|
.config/default_configs.db
ADDED
|
Binary file (12.3 kB). View file
|
|
|
.config/gce
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
False
|
.config/hidden_gcloud_config_universe_descriptor_data_cache_configs.db
ADDED
|
Binary file (12.3 kB). View file
|
|
|
.config/logs/2025.07.01/21.03.20.550425.log
ADDED
|
@@ -0,0 +1,765 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
2025-07-01 21:03:32,573 DEBUG root Loaded Command Group: ['gcloud', 'components']
|
| 2 |
+
2025-07-01 21:03:32,577 DEBUG root Loaded Command Group: ['gcloud', 'components', 'update']
|
| 3 |
+
2025-07-01 21:03:32,579 DEBUG root Running [gcloud.components.update] with arguments: [--compile-python: "True", --quiet: "True", COMPONENT-IDS:6: "['core', 'gcloud-deps', 'bq', 'gcloud', 'gcloud-crc32c', 'gsutil']"]
|
| 4 |
+
2025-07-01 21:03:32,580 INFO ___FILE_ONLY___ Beginning update. This process may take several minutes.
|
| 5 |
+
|
| 6 |
+
2025-07-01 21:03:32,614 DEBUG urllib3.connectionpool Starting new HTTPS connection (1): dl.google.com:443
|
| 7 |
+
2025-07-01 21:03:32,626 DEBUG urllib3.connectionpool https://dl.google.com:443 "GET /dl/cloudsdk/channels/rapid/components-2.json HTTP/11" 200 239798
|
| 8 |
+
2025-07-01 21:03:32,638 INFO ___FILE_ONLY___
|
| 9 |
+
|
| 10 |
+
2025-07-01 21:03:32,639 INFO ___FILE_ONLY___
|
| 11 |
+
Your current Google Cloud CLI version is: 529.0.0
|
| 12 |
+
|
| 13 |
+
2025-07-01 21:03:32,639 INFO ___FILE_ONLY___ Installing components from version: 529.0.0
|
| 14 |
+
|
| 15 |
+
2025-07-01 21:03:32,639 INFO ___FILE_ONLY___
|
| 16 |
+
|
| 17 |
+
2025-07-01 21:03:32,639 DEBUG root Chosen display Format:table[box,title="These components will be removed."](details.display_name:label=Name:align=left,version.version_string:label=Version:align=right,data.size.size(zero="",min=1048576):label=Size:align=right)
|
| 18 |
+
2025-07-01 21:03:32,640 DEBUG root Chosen display Format:table[box,title="These components will be updated."](details.display_name:label=Name:align=left,version.version_string:label=Version:align=right,data.size.size(zero="",min=1048576):label=Size:align=right)
|
| 19 |
+
2025-07-01 21:03:32,640 DEBUG root Chosen display Format:table[box,title="These components will be installed."](details.display_name:label=Name:align=left,version.version_string:label=Version:align=right,data.size.size(zero="",min=1048576):label=Size:align=right)
|
| 20 |
+
2025-07-01 21:03:32,678 INFO ___FILE_ONLY___ ┌─────────────────────────────────────────────────────────────────────────────┐
|
| 21 |
+
2025-07-01 21:03:32,678 INFO ___FILE_ONLY___
|
| 22 |
+
|
| 23 |
+
2025-07-01 21:03:32,678 INFO ___FILE_ONLY___ │ These components will be installed. │
|
| 24 |
+
2025-07-01 21:03:32,678 INFO ___FILE_ONLY___
|
| 25 |
+
|
| 26 |
+
2025-07-01 21:03:32,678 INFO ___FILE_ONLY___ ├─────────────────────────────────────────────────────┬────────────┬──────────┤
|
| 27 |
+
2025-07-01 21:03:32,678 INFO ___FILE_ONLY___
|
| 28 |
+
|
| 29 |
+
2025-07-01 21:03:32,678 INFO ___FILE_ONLY___ │ Name │ Version │ Size │
|
| 30 |
+
2025-07-01 21:03:32,678 INFO ___FILE_ONLY___
|
| 31 |
+
|
| 32 |
+
2025-07-01 21:03:32,678 INFO ___FILE_ONLY___ ├─────────────────────────────────────────────────────┼────────────┼──────────┤
|
| 33 |
+
2025-07-01 21:03:32,678 INFO ___FILE_ONLY___
|
| 34 |
+
|
| 35 |
+
2025-07-01 21:03:32,679 INFO ___FILE_ONLY___ │
|
| 36 |
+
2025-07-01 21:03:32,679 INFO ___FILE_ONLY___ BigQuery Command Line Tool
|
| 37 |
+
2025-07-01 21:03:32,679 INFO ___FILE_ONLY___
|
| 38 |
+
2025-07-01 21:03:32,679 INFO ___FILE_ONLY___ │
|
| 39 |
+
2025-07-01 21:03:32,679 INFO ___FILE_ONLY___ 2.1.19
|
| 40 |
+
2025-07-01 21:03:32,679 INFO ___FILE_ONLY___
|
| 41 |
+
2025-07-01 21:03:32,679 INFO ___FILE_ONLY___ │
|
| 42 |
+
2025-07-01 21:03:32,679 INFO ___FILE_ONLY___ 1.8 MiB
|
| 43 |
+
2025-07-01 21:03:32,679 INFO ___FILE_ONLY___
|
| 44 |
+
2025-07-01 21:03:32,679 INFO ___FILE_ONLY___ │
|
| 45 |
+
2025-07-01 21:03:32,679 INFO ___FILE_ONLY___
|
| 46 |
+
|
| 47 |
+
2025-07-01 21:03:32,680 INFO ___FILE_ONLY___ │
|
| 48 |
+
2025-07-01 21:03:32,680 INFO ___FILE_ONLY___ BigQuery Command Line Tool (Platform Specific)
|
| 49 |
+
2025-07-01 21:03:32,680 INFO ___FILE_ONLY___
|
| 50 |
+
2025-07-01 21:03:32,680 INFO ___FILE_ONLY___ │
|
| 51 |
+
2025-07-01 21:03:32,680 INFO ___FILE_ONLY___ 2.1.17
|
| 52 |
+
2025-07-01 21:03:32,680 INFO ___FILE_ONLY___
|
| 53 |
+
2025-07-01 21:03:32,680 INFO ___FILE_ONLY___ │
|
| 54 |
+
2025-07-01 21:03:32,680 INFO ___FILE_ONLY___ < 1 MiB
|
| 55 |
+
2025-07-01 21:03:32,680 INFO ___FILE_ONLY___
|
| 56 |
+
2025-07-01 21:03:32,681 INFO ___FILE_ONLY___ │
|
| 57 |
+
2025-07-01 21:03:32,681 INFO ___FILE_ONLY___
|
| 58 |
+
|
| 59 |
+
2025-07-01 21:03:32,681 INFO ___FILE_ONLY___ │
|
| 60 |
+
2025-07-01 21:03:32,681 INFO ___FILE_ONLY___ Bundled Python 3.12 (Platform Specific)
|
| 61 |
+
2025-07-01 21:03:32,681 INFO ___FILE_ONLY___
|
| 62 |
+
2025-07-01 21:03:32,681 INFO ___FILE_ONLY___ │
|
| 63 |
+
2025-07-01 21:03:32,681 INFO ___FILE_ONLY___ 3.12.9
|
| 64 |
+
2025-07-01 21:03:32,681 INFO ___FILE_ONLY___
|
| 65 |
+
2025-07-01 21:03:32,681 INFO ___FILE_ONLY___ │
|
| 66 |
+
2025-07-01 21:03:32,681 INFO ___FILE_ONLY___ 89.3 MiB
|
| 67 |
+
2025-07-01 21:03:32,681 INFO ___FILE_ONLY___
|
| 68 |
+
2025-07-01 21:03:32,681 INFO ___FILE_ONLY___ │
|
| 69 |
+
2025-07-01 21:03:32,682 INFO ___FILE_ONLY___
|
| 70 |
+
|
| 71 |
+
2025-07-01 21:03:32,682 INFO ___FILE_ONLY___ │
|
| 72 |
+
2025-07-01 21:03:32,682 INFO ___FILE_ONLY___ Cloud Storage Command Line Tool
|
| 73 |
+
2025-07-01 21:03:32,682 INFO ___FILE_ONLY___
|
| 74 |
+
2025-07-01 21:03:32,682 INFO ___FILE_ONLY___ │
|
| 75 |
+
2025-07-01 21:03:32,682 INFO ___FILE_ONLY___ 5.35
|
| 76 |
+
2025-07-01 21:03:32,682 INFO ___FILE_ONLY___
|
| 77 |
+
2025-07-01 21:03:32,682 INFO ___FILE_ONLY___ │
|
| 78 |
+
2025-07-01 21:03:32,682 INFO ___FILE_ONLY___ 12.4 MiB
|
| 79 |
+
2025-07-01 21:03:32,682 INFO ___FILE_ONLY___
|
| 80 |
+
2025-07-01 21:03:32,682 INFO ___FILE_ONLY___ │
|
| 81 |
+
2025-07-01 21:03:32,682 INFO ___FILE_ONLY___
|
| 82 |
+
|
| 83 |
+
2025-07-01 21:03:32,682 INFO ___FILE_ONLY___ │
|
| 84 |
+
2025-07-01 21:03:32,682 INFO ___FILE_ONLY___ Cloud Storage Command Line Tool (Platform Specific)
|
| 85 |
+
2025-07-01 21:03:32,683 INFO ___FILE_ONLY___
|
| 86 |
+
2025-07-01 21:03:32,683 INFO ___FILE_ONLY___ │
|
| 87 |
+
2025-07-01 21:03:32,683 INFO ___FILE_ONLY___ 5.34
|
| 88 |
+
2025-07-01 21:03:32,683 INFO ___FILE_ONLY___
|
| 89 |
+
2025-07-01 21:03:32,683 INFO ___FILE_ONLY___ │
|
| 90 |
+
2025-07-01 21:03:32,683 INFO ___FILE_ONLY___ < 1 MiB
|
| 91 |
+
2025-07-01 21:03:32,683 INFO ___FILE_ONLY___
|
| 92 |
+
2025-07-01 21:03:32,683 INFO ___FILE_ONLY___ │
|
| 93 |
+
2025-07-01 21:03:32,683 INFO ___FILE_ONLY___
|
| 94 |
+
|
| 95 |
+
2025-07-01 21:03:32,683 INFO ___FILE_ONLY___ │
|
| 96 |
+
2025-07-01 21:03:32,683 INFO ___FILE_ONLY___ Google Cloud CLI Core Libraries (Platform Specific)
|
| 97 |
+
2025-07-01 21:03:32,683 INFO ___FILE_ONLY___
|
| 98 |
+
2025-07-01 21:03:32,683 INFO ___FILE_ONLY___ │
|
| 99 |
+
2025-07-01 21:03:32,684 INFO ___FILE_ONLY___ 2025.05.23
|
| 100 |
+
2025-07-01 21:03:32,684 INFO ___FILE_ONLY___
|
| 101 |
+
2025-07-01 21:03:32,684 INFO ___FILE_ONLY___ │
|
| 102 |
+
2025-07-01 21:03:32,684 INFO ___FILE_ONLY___ < 1 MiB
|
| 103 |
+
2025-07-01 21:03:32,684 INFO ___FILE_ONLY___
|
| 104 |
+
2025-07-01 21:03:32,684 INFO ___FILE_ONLY___ │
|
| 105 |
+
2025-07-01 21:03:32,684 INFO ___FILE_ONLY___
|
| 106 |
+
|
| 107 |
+
2025-07-01 21:03:32,684 INFO ___FILE_ONLY___ │
|
| 108 |
+
2025-07-01 21:03:32,684 INFO ___FILE_ONLY___ Google Cloud CRC32C Hash Tool (Platform Specific)
|
| 109 |
+
2025-07-01 21:03:32,684 INFO ___FILE_ONLY___
|
| 110 |
+
2025-07-01 21:03:32,684 INFO ___FILE_ONLY___ │
|
| 111 |
+
2025-07-01 21:03:32,684 INFO ___FILE_ONLY___ 1.0.0
|
| 112 |
+
2025-07-01 21:03:32,684 INFO ___FILE_ONLY___
|
| 113 |
+
2025-07-01 21:03:32,684 INFO ___FILE_ONLY___ │
|
| 114 |
+
2025-07-01 21:03:32,685 INFO ___FILE_ONLY___ 1.5 MiB
|
| 115 |
+
2025-07-01 21:03:32,685 INFO ___FILE_ONLY___
|
| 116 |
+
2025-07-01 21:03:32,685 INFO ___FILE_ONLY___ │
|
| 117 |
+
2025-07-01 21:03:32,685 INFO ___FILE_ONLY___
|
| 118 |
+
|
| 119 |
+
2025-07-01 21:03:32,685 INFO ___FILE_ONLY___ │
|
| 120 |
+
2025-07-01 21:03:32,685 INFO ___FILE_ONLY___ gcloud cli dependencies (Platform Specific)
|
| 121 |
+
2025-07-01 21:03:32,685 INFO ___FILE_ONLY___
|
| 122 |
+
2025-07-01 21:03:32,685 INFO ___FILE_ONLY___ │
|
| 123 |
+
2025-07-01 21:03:32,685 INFO ___FILE_ONLY___ 2021.04.16
|
| 124 |
+
2025-07-01 21:03:32,685 INFO ___FILE_ONLY___
|
| 125 |
+
2025-07-01 21:03:32,685 INFO ___FILE_ONLY___ │
|
| 126 |
+
2025-07-01 21:03:32,685 INFO ___FILE_ONLY___ < 1 MiB
|
| 127 |
+
2025-07-01 21:03:32,685 INFO ___FILE_ONLY___
|
| 128 |
+
2025-07-01 21:03:32,685 INFO ___FILE_ONLY___ │
|
| 129 |
+
2025-07-01 21:03:32,686 INFO ___FILE_ONLY___
|
| 130 |
+
|
| 131 |
+
2025-07-01 21:03:32,686 INFO ___FILE_ONLY___ └─────────────────────────────────────────────────────┴────────────┴──────────┘
|
| 132 |
+
2025-07-01 21:03:32,686 INFO ___FILE_ONLY___
|
| 133 |
+
|
| 134 |
+
2025-07-01 21:03:32,686 INFO ___FILE_ONLY___
|
| 135 |
+
|
| 136 |
+
2025-07-01 21:03:32,690 DEBUG urllib3.connectionpool Starting new HTTPS connection (1): dl.google.com:443
|
| 137 |
+
2025-07-01 21:03:32,703 DEBUG urllib3.connectionpool https://dl.google.com:443 "GET /dl/cloudsdk/channels/rapid/RELEASE_NOTES HTTP/11" 200 1444035
|
| 138 |
+
2025-07-01 21:03:33,180 INFO ___FILE_ONLY___ For the latest full release notes, please visit:
|
| 139 |
+
https://cloud.google.com/sdk/release_notes
|
| 140 |
+
|
| 141 |
+
|
| 142 |
+
2025-07-01 21:03:33,180 INFO ___FILE_ONLY___ Performing in place update...
|
| 143 |
+
|
| 144 |
+
|
| 145 |
+
2025-07-01 21:03:33,183 INFO ___FILE_ONLY___ ╔════════════════════════════════════════════════════════════╗
|
| 146 |
+
|
| 147 |
+
2025-07-01 21:03:33,183 INFO ___FILE_ONLY___ ╠═ Downloading: BigQuery Command Line Tool ═╣
|
| 148 |
+
|
| 149 |
+
2025-07-01 21:03:33,183 INFO ___FILE_ONLY___ ╚
|
| 150 |
+
2025-07-01 21:03:33,187 DEBUG urllib3.connectionpool Starting new HTTPS connection (1): dl.google.com:443
|
| 151 |
+
2025-07-01 21:03:33,200 DEBUG urllib3.connectionpool https://dl.google.com:443 "GET /dl/cloudsdk/channels/rapid/components/google-cloud-sdk-bq-20250627154417.tar.gz HTTP/11" 200 1850167
|
| 152 |
+
2025-07-01 21:03:33,213 INFO ___FILE_ONLY___ ═
|
| 153 |
+
2025-07-01 21:03:33,213 INFO ___FILE_ONLY___ ═
|
| 154 |
+
2025-07-01 21:03:33,213 INFO ___FILE_ONLY___ ═
|
| 155 |
+
2025-07-01 21:03:33,214 INFO ___FILE_ONLY___ ═
|
| 156 |
+
2025-07-01 21:03:33,214 INFO ___FILE_ONLY___ ═
|
| 157 |
+
2025-07-01 21:03:33,214 INFO ___FILE_ONLY___ ═
|
| 158 |
+
2025-07-01 21:03:33,214 INFO ___FILE_ONLY___ ═
|
| 159 |
+
2025-07-01 21:03:33,215 INFO ___FILE_ONLY___ ═
|
| 160 |
+
2025-07-01 21:03:33,215 INFO ___FILE_ONLY___ ═
|
| 161 |
+
2025-07-01 21:03:33,215 INFO ___FILE_ONLY___ ═
|
| 162 |
+
2025-07-01 21:03:33,215 INFO ___FILE_ONLY___ ═
|
| 163 |
+
2025-07-01 21:03:33,215 INFO ___FILE_ONLY___ ═
|
| 164 |
+
2025-07-01 21:03:33,215 INFO ___FILE_ONLY___ ═
|
| 165 |
+
2025-07-01 21:03:33,215 INFO ___FILE_ONLY___ ═
|
| 166 |
+
2025-07-01 21:03:33,216 INFO ___FILE_ONLY___ ═
|
| 167 |
+
2025-07-01 21:03:33,216 INFO ___FILE_ONLY___ ═
|
| 168 |
+
2025-07-01 21:03:33,216 INFO ___FILE_ONLY___ ═
|
| 169 |
+
2025-07-01 21:03:33,216 INFO ___FILE_ONLY___ ═
|
| 170 |
+
2025-07-01 21:03:33,216 INFO ___FILE_ONLY___ ═
|
| 171 |
+
2025-07-01 21:03:33,216 INFO ___FILE_ONLY___ ═
|
| 172 |
+
2025-07-01 21:03:33,216 INFO ___FILE_ONLY___ ═
|
| 173 |
+
2025-07-01 21:03:33,217 INFO ___FILE_ONLY___ ═
|
| 174 |
+
2025-07-01 21:03:33,217 INFO ___FILE_ONLY___ ═
|
| 175 |
+
2025-07-01 21:03:33,217 INFO ___FILE_ONLY___ ═
|
| 176 |
+
2025-07-01 21:03:33,217 INFO ___FILE_ONLY___ ═
|
| 177 |
+
2025-07-01 21:03:33,217 INFO ___FILE_ONLY___ ═
|
| 178 |
+
2025-07-01 21:03:33,217 INFO ___FILE_ONLY___ ═
|
| 179 |
+
2025-07-01 21:03:33,217 INFO ___FILE_ONLY___ ═
|
| 180 |
+
2025-07-01 21:03:33,218 INFO ___FILE_ONLY___ ═
|
| 181 |
+
2025-07-01 21:03:33,218 INFO ___FILE_ONLY___ ═
|
| 182 |
+
2025-07-01 21:03:33,218 INFO ___FILE_ONLY___ ═
|
| 183 |
+
2025-07-01 21:03:33,218 INFO ___FILE_ONLY___ ═
|
| 184 |
+
2025-07-01 21:03:33,218 INFO ___FILE_ONLY___ ═
|
| 185 |
+
2025-07-01 21:03:33,218 INFO ___FILE_ONLY___ ═
|
| 186 |
+
2025-07-01 21:03:33,218 INFO ___FILE_ONLY___ ═
|
| 187 |
+
2025-07-01 21:03:33,219 INFO ___FILE_ONLY___ ═
|
| 188 |
+
2025-07-01 21:03:33,219 INFO ___FILE_ONLY___ ═
|
| 189 |
+
2025-07-01 21:03:33,219 INFO ___FILE_ONLY___ ═
|
| 190 |
+
2025-07-01 21:03:33,219 INFO ___FILE_ONLY___ ═
|
| 191 |
+
2025-07-01 21:03:33,219 INFO ___FILE_ONLY___ ═
|
| 192 |
+
2025-07-01 21:03:33,219 INFO ___FILE_ONLY___ ═
|
| 193 |
+
2025-07-01 21:03:33,220 INFO ___FILE_ONLY___ ═
|
| 194 |
+
2025-07-01 21:03:33,220 INFO ___FILE_ONLY___ ═
|
| 195 |
+
2025-07-01 21:03:33,220 INFO ___FILE_ONLY___ ═
|
| 196 |
+
2025-07-01 21:03:33,220 INFO ___FILE_ONLY___ ═
|
| 197 |
+
2025-07-01 21:03:33,220 INFO ___FILE_ONLY___ ═
|
| 198 |
+
2025-07-01 21:03:33,220 INFO ___FILE_ONLY___ ═
|
| 199 |
+
2025-07-01 21:03:33,220 INFO ___FILE_ONLY___ ═
|
| 200 |
+
2025-07-01 21:03:33,221 INFO ___FILE_ONLY___ ═
|
| 201 |
+
2025-07-01 21:03:33,221 INFO ___FILE_ONLY___ ═
|
| 202 |
+
2025-07-01 21:03:33,221 INFO ___FILE_ONLY___ ═
|
| 203 |
+
2025-07-01 21:03:33,221 INFO ___FILE_ONLY___ ═
|
| 204 |
+
2025-07-01 21:03:33,221 INFO ___FILE_ONLY___ ═
|
| 205 |
+
2025-07-01 21:03:33,221 INFO ___FILE_ONLY___ ═
|
| 206 |
+
2025-07-01 21:03:33,222 INFO ___FILE_ONLY___ ═
|
| 207 |
+
2025-07-01 21:03:33,222 INFO ___FILE_ONLY___ ═
|
| 208 |
+
2025-07-01 21:03:33,222 INFO ___FILE_ONLY___ ═
|
| 209 |
+
2025-07-01 21:03:33,222 INFO ___FILE_ONLY___ ═
|
| 210 |
+
2025-07-01 21:03:33,222 INFO ___FILE_ONLY___ ═
|
| 211 |
+
2025-07-01 21:03:33,222 INFO ___FILE_ONLY___ ═
|
| 212 |
+
2025-07-01 21:03:33,223 INFO ___FILE_ONLY___ ╝
|
| 213 |
+
|
| 214 |
+
2025-07-01 21:03:33,225 INFO ___FILE_ONLY___ ╔════════════════════════════════════════════════════════════╗
|
| 215 |
+
|
| 216 |
+
2025-07-01 21:03:33,225 INFO ___FILE_ONLY___ ╠═ Downloading: BigQuery Command Line Tool (Platform Spe... ═╣
|
| 217 |
+
|
| 218 |
+
2025-07-01 21:03:33,225 INFO ___FILE_ONLY___ ╚
|
| 219 |
+
2025-07-01 21:03:33,230 DEBUG urllib3.connectionpool Starting new HTTPS connection (1): dl.google.com:443
|
| 220 |
+
2025-07-01 21:03:33,240 DEBUG urllib3.connectionpool https://dl.google.com:443 "GET /dl/cloudsdk/channels/rapid/components/google-cloud-sdk-bq-nix-20250523104322.tar.gz HTTP/11" 200 1935
|
| 221 |
+
2025-07-01 21:03:33,241 INFO ___FILE_ONLY___ ════════════════════════════════════════════════════════════
|
| 222 |
+
2025-07-01 21:03:33,241 INFO ___FILE_ONLY___ ╝
|
| 223 |
+
|
| 224 |
+
2025-07-01 21:03:33,244 INFO ___FILE_ONLY___ ╔════════════════════════════════════════════════════════════╗
|
| 225 |
+
|
| 226 |
+
2025-07-01 21:03:33,244 INFO ___FILE_ONLY___ ╠═ Downloading: Bundled Python 3.12 ═╣
|
| 227 |
+
|
| 228 |
+
2025-07-01 21:03:33,244 INFO ___FILE_ONLY___ ╚
|
| 229 |
+
2025-07-01 21:03:33,244 INFO ___FILE_ONLY___ ════════════════════════════════════════════════════════════
|
| 230 |
+
2025-07-01 21:03:33,244 INFO ___FILE_ONLY___ ╝
|
| 231 |
+
|
| 232 |
+
2025-07-01 21:03:33,246 INFO ___FILE_ONLY___ ╔════════════════════════════════════════════���═══════════════╗
|
| 233 |
+
|
| 234 |
+
2025-07-01 21:03:33,246 INFO ___FILE_ONLY___ ╠═ Downloading: Bundled Python 3.12 (Platform Specific) ═╣
|
| 235 |
+
|
| 236 |
+
2025-07-01 21:03:33,247 INFO ___FILE_ONLY___ ╚
|
| 237 |
+
2025-07-01 21:03:33,250 DEBUG urllib3.connectionpool Starting new HTTPS connection (1): dl.google.com:443
|
| 238 |
+
2025-07-01 21:03:33,266 DEBUG urllib3.connectionpool https://dl.google.com:443 "GET /dl/cloudsdk/channels/rapid/components/google-cloud-sdk-bundled-python3-unix-linux-x86_64-20250502143716.tar.gz HTTP/11" 200 93610468
|
| 239 |
+
2025-07-01 21:03:33,637 INFO ___FILE_ONLY___ ═
|
| 240 |
+
2025-07-01 21:03:33,639 INFO ___FILE_ONLY___ ═
|
| 241 |
+
2025-07-01 21:03:33,642 INFO ___FILE_ONLY___ ═
|
| 242 |
+
2025-07-01 21:03:33,644 INFO ___FILE_ONLY___ ═
|
| 243 |
+
2025-07-01 21:03:33,646 INFO ___FILE_ONLY___ ═
|
| 244 |
+
2025-07-01 21:03:33,648 INFO ___FILE_ONLY___ ═
|
| 245 |
+
2025-07-01 21:03:33,651 INFO ___FILE_ONLY___ ═
|
| 246 |
+
2025-07-01 21:03:33,653 INFO ___FILE_ONLY___ ═
|
| 247 |
+
2025-07-01 21:03:33,655 INFO ___FILE_ONLY___ ═
|
| 248 |
+
2025-07-01 21:03:33,657 INFO ___FILE_ONLY___ ═
|
| 249 |
+
2025-07-01 21:03:33,659 INFO ___FILE_ONLY___ ═
|
| 250 |
+
2025-07-01 21:03:33,662 INFO ___FILE_ONLY___ ═
|
| 251 |
+
2025-07-01 21:03:33,664 INFO ___FILE_ONLY___ ═
|
| 252 |
+
2025-07-01 21:03:33,666 INFO ___FILE_ONLY___ ═
|
| 253 |
+
2025-07-01 21:03:33,668 INFO ___FILE_ONLY___ ═
|
| 254 |
+
2025-07-01 21:03:33,670 INFO ___FILE_ONLY___ ═
|
| 255 |
+
2025-07-01 21:03:33,673 INFO ___FILE_ONLY___ ═
|
| 256 |
+
2025-07-01 21:03:33,675 INFO ___FILE_ONLY___ ═
|
| 257 |
+
2025-07-01 21:03:33,677 INFO ___FILE_ONLY___ ═
|
| 258 |
+
2025-07-01 21:03:33,679 INFO ___FILE_ONLY___ ═
|
| 259 |
+
2025-07-01 21:03:33,681 INFO ___FILE_ONLY___ ═
|
| 260 |
+
2025-07-01 21:03:33,683 INFO ___FILE_ONLY___ ═
|
| 261 |
+
2025-07-01 21:03:33,686 INFO ___FILE_ONLY___ ═
|
| 262 |
+
2025-07-01 21:03:33,688 INFO ___FILE_ONLY___ ═
|
| 263 |
+
2025-07-01 21:03:33,690 INFO ___FILE_ONLY___ ═
|
| 264 |
+
2025-07-01 21:03:33,692 INFO ___FILE_ONLY___ ═
|
| 265 |
+
2025-07-01 21:03:33,694 INFO ___FILE_ONLY___ ═
|
| 266 |
+
2025-07-01 21:03:33,696 INFO ___FILE_ONLY___ ═
|
| 267 |
+
2025-07-01 21:03:33,699 INFO ___FILE_ONLY___ ═
|
| 268 |
+
2025-07-01 21:03:33,701 INFO ___FILE_ONLY___ ═
|
| 269 |
+
2025-07-01 21:03:33,703 INFO ___FILE_ONLY___ ═
|
| 270 |
+
2025-07-01 21:03:33,705 INFO ___FILE_ONLY___ ═
|
| 271 |
+
2025-07-01 21:03:33,708 INFO ___FILE_ONLY___ ═
|
| 272 |
+
2025-07-01 21:03:33,710 INFO ___FILE_ONLY___ ═
|
| 273 |
+
2025-07-01 21:03:33,712 INFO ___FILE_ONLY___ ═
|
| 274 |
+
2025-07-01 21:03:33,714 INFO ___FILE_ONLY___ ═
|
| 275 |
+
2025-07-01 21:03:33,717 INFO ___FILE_ONLY___ ═
|
| 276 |
+
2025-07-01 21:03:33,719 INFO ___FILE_ONLY___ ═
|
| 277 |
+
2025-07-01 21:03:33,721 INFO ___FILE_ONLY___ ═
|
| 278 |
+
2025-07-01 21:03:33,723 INFO ___FILE_ONLY___ ═
|
| 279 |
+
2025-07-01 21:03:33,725 INFO ___FILE_ONLY___ ═
|
| 280 |
+
2025-07-01 21:03:33,728 INFO ___FILE_ONLY___ ═
|
| 281 |
+
2025-07-01 21:03:33,730 INFO ___FILE_ONLY___ ═
|
| 282 |
+
2025-07-01 21:03:33,732 INFO ___FILE_ONLY___ ═
|
| 283 |
+
2025-07-01 21:03:33,734 INFO ___FILE_ONLY___ ═
|
| 284 |
+
2025-07-01 21:03:33,737 INFO ___FILE_ONLY___ ═
|
| 285 |
+
2025-07-01 21:03:33,739 INFO ___FILE_ONLY___ ═
|
| 286 |
+
2025-07-01 21:03:33,741 INFO ___FILE_ONLY___ ═
|
| 287 |
+
2025-07-01 21:03:33,743 INFO ___FILE_ONLY___ ═
|
| 288 |
+
2025-07-01 21:03:33,746 INFO ___FILE_ONLY___ ═
|
| 289 |
+
2025-07-01 21:03:33,748 INFO ___FILE_ONLY___ ═
|
| 290 |
+
2025-07-01 21:03:33,750 INFO ___FILE_ONLY___ ═
|
| 291 |
+
2025-07-01 21:03:33,752 INFO ___FILE_ONLY___ ═
|
| 292 |
+
2025-07-01 21:03:33,754 INFO ___FILE_ONLY___ ═
|
| 293 |
+
2025-07-01 21:03:33,756 INFO ___FILE_ONLY___ ═
|
| 294 |
+
2025-07-01 21:03:33,759 INFO ___FILE_ONLY___ ═
|
| 295 |
+
2025-07-01 21:03:33,761 INFO ___FILE_ONLY___ ═
|
| 296 |
+
2025-07-01 21:03:33,763 INFO ___FILE_ONLY___ ═
|
| 297 |
+
2025-07-01 21:03:33,765 INFO ___FILE_ONLY___ ═
|
| 298 |
+
2025-07-01 21:03:33,767 INFO ___FILE_ONLY___ ═
|
| 299 |
+
2025-07-01 21:03:33,767 INFO ___FILE_ONLY___ ╝
|
| 300 |
+
|
| 301 |
+
2025-07-01 21:03:33,773 INFO ___FILE_ONLY___ ╔════════════════════════════════════════════════════════════╗
|
| 302 |
+
|
| 303 |
+
2025-07-01 21:03:33,773 INFO ___FILE_ONLY___ ╠═ Downloading: Cloud Storage Command Line Tool ═╣
|
| 304 |
+
|
| 305 |
+
2025-07-01 21:03:33,773 INFO ___FILE_ONLY___ ╚
|
| 306 |
+
2025-07-01 21:03:33,776 DEBUG urllib3.connectionpool Starting new HTTPS connection (1): dl.google.com:443
|
| 307 |
+
2025-07-01 21:03:33,790 DEBUG urllib3.connectionpool https://dl.google.com:443 "GET /dl/cloudsdk/channels/rapid/components/google-cloud-sdk-gsutil-20250627154417.tar.gz HTTP/11" 200 12962791
|
| 308 |
+
2025-07-01 21:03:33,839 INFO ___FILE_ONLY___ ═
|
| 309 |
+
2025-07-01 21:03:33,839 INFO ___FILE_ONLY___ ═
|
| 310 |
+
2025-07-01 21:03:33,840 INFO ___FILE_ONLY___ ═
|
| 311 |
+
2025-07-01 21:03:33,840 INFO ___FILE_ONLY___ ═
|
| 312 |
+
2025-07-01 21:03:33,841 INFO ___FILE_ONLY___ ═
|
| 313 |
+
2025-07-01 21:03:33,841 INFO ___FILE_ONLY___ ═
|
| 314 |
+
2025-07-01 21:03:33,841 INFO ___FILE_ONLY___ ═
|
| 315 |
+
2025-07-01 21:03:33,842 INFO ___FILE_ONLY___ ═
|
| 316 |
+
2025-07-01 21:03:33,842 INFO ___FILE_ONLY___ ═
|
| 317 |
+
2025-07-01 21:03:33,843 INFO ___FILE_ONLY___ ═
|
| 318 |
+
2025-07-01 21:03:33,843 INFO ___FILE_ONLY___ ═
|
| 319 |
+
2025-07-01 21:03:33,843 INFO ___FILE_ONLY___ ═
|
| 320 |
+
2025-07-01 21:03:33,844 INFO ___FILE_ONLY___ ═
|
| 321 |
+
2025-07-01 21:03:33,844 INFO ___FILE_ONLY___ ═
|
| 322 |
+
2025-07-01 21:03:33,845 INFO ___FILE_ONLY___ ═
|
| 323 |
+
2025-07-01 21:03:33,845 INFO ___FILE_ONLY___ ═
|
| 324 |
+
2025-07-01 21:03:33,845 INFO ___FILE_ONLY___ ═
|
| 325 |
+
2025-07-01 21:03:33,846 INFO ___FILE_ONLY___ ═
|
| 326 |
+
2025-07-01 21:03:33,846 INFO ___FILE_ONLY___ ═
|
| 327 |
+
2025-07-01 21:03:33,847 INFO ___FILE_ONLY___ ═
|
| 328 |
+
2025-07-01 21:03:33,847 INFO ___FILE_ONLY___ ═
|
| 329 |
+
2025-07-01 21:03:33,847 INFO ___FILE_ONLY___ ═
|
| 330 |
+
2025-07-01 21:03:33,848 INFO ___FILE_ONLY___ ═
|
| 331 |
+
2025-07-01 21:03:33,848 INFO ___FILE_ONLY___ ═
|
| 332 |
+
2025-07-01 21:03:33,849 INFO ___FILE_ONLY___ ═
|
| 333 |
+
2025-07-01 21:03:33,849 INFO ___FILE_ONLY___ ═
|
| 334 |
+
2025-07-01 21:03:33,849 INFO ___FILE_ONLY___ ═
|
| 335 |
+
2025-07-01 21:03:33,850 INFO ___FILE_ONLY___ ═
|
| 336 |
+
2025-07-01 21:03:33,850 INFO ___FILE_ONLY___ ═
|
| 337 |
+
2025-07-01 21:03:33,851 INFO ___FILE_ONLY___ ═
|
| 338 |
+
2025-07-01 21:03:33,851 INFO ___FILE_ONLY___ ═
|
| 339 |
+
2025-07-01 21:03:33,851 INFO ___FILE_ONLY___ ═
|
| 340 |
+
2025-07-01 21:03:33,852 INFO ___FILE_ONLY___ ═
|
| 341 |
+
2025-07-01 21:03:33,852 INFO ___FILE_ONLY___ ═
|
| 342 |
+
2025-07-01 21:03:33,853 INFO ___FILE_ONLY___ ═
|
| 343 |
+
2025-07-01 21:03:33,853 INFO ___FILE_ONLY___ ═
|
| 344 |
+
2025-07-01 21:03:33,853 INFO ___FILE_ONLY___ ═
|
| 345 |
+
2025-07-01 21:03:33,854 INFO ___FILE_ONLY___ ═
|
| 346 |
+
2025-07-01 21:03:33,854 INFO ___FILE_ONLY___ ═
|
| 347 |
+
2025-07-01 21:03:33,855 INFO ___FILE_ONLY___ ═
|
| 348 |
+
2025-07-01 21:03:33,855 INFO ___FILE_ONLY___ ═
|
| 349 |
+
2025-07-01 21:03:33,855 INFO ___FILE_ONLY___ ═
|
| 350 |
+
2025-07-01 21:03:33,856 INFO ___FILE_ONLY___ ═
|
| 351 |
+
2025-07-01 21:03:33,856 INFO ___FILE_ONLY___ ═
|
| 352 |
+
2025-07-01 21:03:33,857 INFO ___FILE_ONLY___ ═
|
| 353 |
+
2025-07-01 21:03:33,857 INFO ___FILE_ONLY___ ═
|
| 354 |
+
2025-07-01 21:03:33,857 INFO ___FILE_ONLY___ ═
|
| 355 |
+
2025-07-01 21:03:33,858 INFO ___FILE_ONLY___ ═
|
| 356 |
+
2025-07-01 21:03:33,858 INFO ___FILE_ONLY___ ═
|
| 357 |
+
2025-07-01 21:03:33,859 INFO ___FILE_ONLY___ ═
|
| 358 |
+
2025-07-01 21:03:33,859 INFO ___FILE_ONLY___ ═
|
| 359 |
+
2025-07-01 21:03:33,860 INFO ___FILE_ONLY___ ═
|
| 360 |
+
2025-07-01 21:03:33,860 INFO ___FILE_ONLY___ ═
|
| 361 |
+
2025-07-01 21:03:33,860 INFO ___FILE_ONLY___ ═
|
| 362 |
+
2025-07-01 21:03:33,861 INFO ___FILE_ONLY___ ═
|
| 363 |
+
2025-07-01 21:03:33,861 INFO ___FILE_ONLY___ ═
|
| 364 |
+
2025-07-01 21:03:33,862 INFO ___FILE_ONLY___ ═
|
| 365 |
+
2025-07-01 21:03:33,862 INFO ___FILE_ONLY___ ═
|
| 366 |
+
2025-07-01 21:03:33,862 INFO ___FILE_ONLY___ ═
|
| 367 |
+
2025-07-01 21:03:33,863 INFO ___FILE_ONLY___ ═
|
| 368 |
+
2025-07-01 21:03:33,863 INFO ___FILE_ONLY___ ╝
|
| 369 |
+
|
| 370 |
+
2025-07-01 21:03:33,865 INFO ___FILE_ONLY___ ╔════════════════════════════════════════════════════════════╗
|
| 371 |
+
|
| 372 |
+
2025-07-01 21:03:33,865 INFO ___FILE_ONLY___ ╠═ Downloading: Cloud Storage Command Line Tool (Platfor... ═╣
|
| 373 |
+
|
| 374 |
+
2025-07-01 21:03:33,865 INFO ___FILE_ONLY___ ╚
|
| 375 |
+
2025-07-01 21:03:33,869 DEBUG urllib3.connectionpool Starting new HTTPS connection (1): dl.google.com:443
|
| 376 |
+
2025-07-01 21:03:33,883 DEBUG urllib3.connectionpool https://dl.google.com:443 "GET /dl/cloudsdk/channels/rapid/components/google-cloud-sdk-gsutil-nix-20250523104322.tar.gz HTTP/11" 200 1950
|
| 377 |
+
2025-07-01 21:03:33,884 INFO ___FILE_ONLY___ ════════════════════════════════════════════════════════════
|
| 378 |
+
2025-07-01 21:03:33,884 INFO ___FILE_ONLY___ ╝
|
| 379 |
+
|
| 380 |
+
2025-07-01 21:03:33,886 INFO ___FILE_ONLY___ ╔════════════════════════════════════════════════════════════╗
|
| 381 |
+
|
| 382 |
+
2025-07-01 21:03:33,887 INFO ___FILE_ONLY___ ╠═ Downloading: Default set of gcloud commands ═╣
|
| 383 |
+
|
| 384 |
+
2025-07-01 21:03:33,887 INFO ___FILE_ONLY___ ╚
|
| 385 |
+
2025-07-01 21:03:33,887 INFO ___FILE_ONLY___ ════════════════════════════════════════════════════════════
|
| 386 |
+
2025-07-01 21:03:33,887 INFO ___FILE_ONLY___ ╝
|
| 387 |
+
|
| 388 |
+
2025-07-01 21:03:33,889 INFO ___FILE_ONLY___ ╔════════════════════════════════════════════════════════════╗
|
| 389 |
+
|
| 390 |
+
2025-07-01 21:03:33,889 INFO ___FILE_ONLY___ ╠═ Downloading: Google Cloud CLI Core Libraries (Platfor... ═╣
|
| 391 |
+
|
| 392 |
+
2025-07-01 21:03:33,889 INFO ___FILE_ONLY___ ╚
|
| 393 |
+
2025-07-01 21:03:33,892 DEBUG urllib3.connectionpool Starting new HTTPS connection (1): dl.google.com:443
|
| 394 |
+
2025-07-01 21:03:33,906 DEBUG urllib3.connectionpool https://dl.google.com:443 "GET /dl/cloudsdk/channels/rapid/components/google-cloud-sdk-core-nix-20250523104322.tar.gz HTTP/11" 200 2325
|
| 395 |
+
2025-07-01 21:03:33,906 INFO ___FILE_ONLY___ ════════════════════════════════════════════════════════════
|
| 396 |
+
2025-07-01 21:03:33,907 INFO ___FILE_ONLY___ ╝
|
| 397 |
+
|
| 398 |
+
2025-07-01 21:03:33,909 INFO ___FILE_ONLY___ ╔════════════════════════════════════════════════════════════╗
|
| 399 |
+
|
| 400 |
+
2025-07-01 21:03:33,909 INFO ___FILE_ONLY___ ╠═ Downloading: Google Cloud CRC32C Hash Tool ═╣
|
| 401 |
+
|
| 402 |
+
2025-07-01 21:03:33,909 INFO ___FILE_ONLY___ ╚
|
| 403 |
+
2025-07-01 21:03:33,909 INFO ___FILE_ONLY___ ════════════════════════════════════════════════════════════
|
| 404 |
+
2025-07-01 21:03:33,909 INFO ___FILE_ONLY___ ╝
|
| 405 |
+
|
| 406 |
+
2025-07-01 21:03:33,911 INFO ___FILE_ONLY___ ╔════════════════════════════════════════════════════════════╗
|
| 407 |
+
|
| 408 |
+
2025-07-01 21:03:33,911 INFO ___FILE_ONLY___ ╠═ Downloading: Google Cloud CRC32C Hash Tool (Platform ... ═╣
|
| 409 |
+
|
| 410 |
+
2025-07-01 21:03:33,911 INFO ___FILE_ONLY___ ╚
|
| 411 |
+
2025-07-01 21:03:33,914 DEBUG urllib3.connectionpool Starting new HTTPS connection (1): dl.google.com:443
|
| 412 |
+
2025-07-01 21:03:33,968 DEBUG urllib3.connectionpool https://dl.google.com:443 "GET /dl/cloudsdk/channels/rapid/components/google-cloud-sdk-gcloud-crc32c-linux-x86_64-20250613150750.tar.gz HTTP/11" 200 1525557
|
| 413 |
+
2025-07-01 21:03:33,982 INFO ___FILE_ONLY___ ═
|
| 414 |
+
2025-07-01 21:03:33,982 INFO ___FILE_ONLY___ ═
|
| 415 |
+
2025-07-01 21:03:33,982 INFO ___FILE_ONLY___ ═
|
| 416 |
+
2025-07-01 21:03:33,982 INFO ___FILE_ONLY___ ═
|
| 417 |
+
2025-07-01 21:03:33,983 INFO ___FILE_ONLY___ ═
|
| 418 |
+
2025-07-01 21:03:33,983 INFO ___FILE_ONLY___ ═
|
| 419 |
+
2025-07-01 21:03:33,983 INFO ___FILE_ONLY___ ═
|
| 420 |
+
2025-07-01 21:03:33,983 INFO ___FILE_ONLY___ ═
|
| 421 |
+
2025-07-01 21:03:33,983 INFO ___FILE_ONLY___ ═
|
| 422 |
+
2025-07-01 21:03:33,983 INFO ___FILE_ONLY___ ═
|
| 423 |
+
2025-07-01 21:03:33,983 INFO ___FILE_ONLY___ ═
|
| 424 |
+
2025-07-01 21:03:33,983 INFO ___FILE_ONLY___ ═
|
| 425 |
+
2025-07-01 21:03:33,984 INFO ___FILE_ONLY___ ═
|
| 426 |
+
2025-07-01 21:03:33,984 INFO ___FILE_ONLY___ ═
|
| 427 |
+
2025-07-01 21:03:33,984 INFO ___FILE_ONLY___ ═
|
| 428 |
+
2025-07-01 21:03:33,984 INFO ___FILE_ONLY___ ═
|
| 429 |
+
2025-07-01 21:03:33,984 INFO ___FILE_ONLY___ ═
|
| 430 |
+
2025-07-01 21:03:33,984 INFO ___FILE_ONLY___ ═
|
| 431 |
+
2025-07-01 21:03:33,984 INFO ___FILE_ONLY___ ═
|
| 432 |
+
2025-07-01 21:03:33,985 INFO ___FILE_ONLY___ ═
|
| 433 |
+
2025-07-01 21:03:33,985 INFO ___FILE_ONLY___ ═
|
| 434 |
+
2025-07-01 21:03:33,985 INFO ___FILE_ONLY___ ═
|
| 435 |
+
2025-07-01 21:03:33,985 INFO ___FILE_ONLY___ ═
|
| 436 |
+
2025-07-01 21:03:33,985 INFO ___FILE_ONLY___ ═
|
| 437 |
+
2025-07-01 21:03:33,985 INFO ___FILE_ONLY___ ═
|
| 438 |
+
2025-07-01 21:03:33,985 INFO ___FILE_ONLY___ ═
|
| 439 |
+
2025-07-01 21:03:33,985 INFO ___FILE_ONLY___ ═
|
| 440 |
+
2025-07-01 21:03:33,985 INFO ___FILE_ONLY___ ═
|
| 441 |
+
2025-07-01 21:03:33,986 INFO ___FILE_ONLY___ ═
|
| 442 |
+
2025-07-01 21:03:33,986 INFO ___FILE_ONLY___ ═
|
| 443 |
+
2025-07-01 21:03:33,986 INFO ___FILE_ONLY___ ═
|
| 444 |
+
2025-07-01 21:03:33,986 INFO ___FILE_ONLY___ ═
|
| 445 |
+
2025-07-01 21:03:33,986 INFO ___FILE_ONLY___ ═
|
| 446 |
+
2025-07-01 21:03:33,986 INFO ___FILE_ONLY___ ═
|
| 447 |
+
2025-07-01 21:03:33,986 INFO ___FILE_ONLY___ ═
|
| 448 |
+
2025-07-01 21:03:33,987 INFO ___FILE_ONLY___ ═
|
| 449 |
+
2025-07-01 21:03:33,987 INFO ___FILE_ONLY___ ═
|
| 450 |
+
2025-07-01 21:03:33,987 INFO ___FILE_ONLY___ ═
|
| 451 |
+
2025-07-01 21:03:33,987 INFO ___FILE_ONLY___ ═
|
| 452 |
+
2025-07-01 21:03:33,987 INFO ___FILE_ONLY___ ═
|
| 453 |
+
2025-07-01 21:03:33,987 INFO ___FILE_ONLY___ ═
|
| 454 |
+
2025-07-01 21:03:33,987 INFO ___FILE_ONLY___ ═
|
| 455 |
+
2025-07-01 21:03:33,987 INFO ___FILE_ONLY___ ═
|
| 456 |
+
2025-07-01 21:03:33,988 INFO ___FILE_ONLY___ ═
|
| 457 |
+
2025-07-01 21:03:33,988 INFO ___FILE_ONLY___ ═
|
| 458 |
+
2025-07-01 21:03:33,988 INFO ___FILE_ONLY___ ═
|
| 459 |
+
2025-07-01 21:03:33,988 INFO ___FILE_ONLY___ ═
|
| 460 |
+
2025-07-01 21:03:33,988 INFO ___FILE_ONLY___ ═
|
| 461 |
+
2025-07-01 21:03:33,988 INFO ___FILE_ONLY___ ═
|
| 462 |
+
2025-07-01 21:03:33,988 INFO ___FILE_ONLY___ ═
|
| 463 |
+
2025-07-01 21:03:33,988 INFO ___FILE_ONLY___ ═
|
| 464 |
+
2025-07-01 21:03:33,988 INFO ___FILE_ONLY___ ═
|
| 465 |
+
2025-07-01 21:03:33,989 INFO ___FILE_ONLY___ ═
|
| 466 |
+
2025-07-01 21:03:33,989 INFO ___FILE_ONLY___ ═
|
| 467 |
+
2025-07-01 21:03:33,989 INFO ___FILE_ONLY___ ═
|
| 468 |
+
2025-07-01 21:03:33,989 INFO ___FILE_ONLY___ ═
|
| 469 |
+
2025-07-01 21:03:33,989 INFO ___FILE_ONLY___ ═
|
| 470 |
+
2025-07-01 21:03:33,989 INFO ___FILE_ONLY___ ═
|
| 471 |
+
2025-07-01 21:03:33,990 INFO ___FILE_ONLY___ ═
|
| 472 |
+
2025-07-01 21:03:33,990 INFO ___FILE_ONLY___ ═
|
| 473 |
+
2025-07-01 21:03:33,990 INFO ___FILE_ONLY___ ╝
|
| 474 |
+
|
| 475 |
+
2025-07-01 21:03:33,992 INFO ___FILE_ONLY___ ╔════════════════════════════════════════════════════════════╗
|
| 476 |
+
|
| 477 |
+
2025-07-01 21:03:33,993 INFO ___FILE_ONLY___ ╠═ Downloading: gcloud cli dependencies (Platform Specific) ═╣
|
| 478 |
+
|
| 479 |
+
2025-07-01 21:03:33,993 INFO ___FILE_ONLY___ ╚
|
| 480 |
+
2025-07-01 21:03:33,996 DEBUG urllib3.connectionpool Starting new HTTPS connection (1): dl.google.com:443
|
| 481 |
+
2025-07-01 21:03:34,012 DEBUG urllib3.connectionpool https://dl.google.com:443 "GET /dl/cloudsdk/channels/rapid/components/google-cloud-sdk-gcloud-deps-linux-x86_64-20210416153011.tar.gz HTTP/11" 200 104
|
| 482 |
+
2025-07-01 21:03:34,013 INFO ___FILE_ONLY___ ════════════════════════════════════════════════════════════
|
| 483 |
+
2025-07-01 21:03:34,013 INFO ___FILE_ONLY___ ╝
|
| 484 |
+
|
| 485 |
+
2025-07-01 21:03:34,015 INFO ___FILE_ONLY___ ╔════════════════════════════════════════════════════════════╗
|
| 486 |
+
|
| 487 |
+
2025-07-01 21:03:34,015 INFO ___FILE_ONLY___ ╠═ Installing: BigQuery Command Line Tool ═╣
|
| 488 |
+
|
| 489 |
+
2025-07-01 21:03:34,015 INFO ___FILE_ONLY___ ╚
|
| 490 |
+
2025-07-01 21:03:34,109 INFO ___FILE_ONLY___ ═
|
| 491 |
+
2025-07-01 21:03:34,112 INFO ___FILE_ONLY___ ═
|
| 492 |
+
2025-07-01 21:03:34,115 INFO ___FILE_ONLY___ ═
|
| 493 |
+
2025-07-01 21:03:34,117 INFO ___FILE_ONLY___ ═
|
| 494 |
+
2025-07-01 21:03:34,121 INFO ___FILE_ONLY___ ═
|
| 495 |
+
2025-07-01 21:03:34,123 INFO ___FILE_ONLY___ ═
|
| 496 |
+
2025-07-01 21:03:34,126 INFO ___FILE_ONLY___ ═
|
| 497 |
+
2025-07-01 21:03:34,128 INFO ___FILE_ONLY___ ═
|
| 498 |
+
2025-07-01 21:03:34,131 INFO ___FILE_ONLY___ ═
|
| 499 |
+
2025-07-01 21:03:34,133 INFO ___FILE_ONLY___ ═
|
| 500 |
+
2025-07-01 21:03:34,135 INFO ___FILE_ONLY___ ═
|
| 501 |
+
2025-07-01 21:03:34,138 INFO ___FILE_ONLY___ ═
|
| 502 |
+
2025-07-01 21:03:34,140 INFO ___FILE_ONLY___ ═
|
| 503 |
+
2025-07-01 21:03:34,143 INFO ___FILE_ONLY___ ═
|
| 504 |
+
2025-07-01 21:03:34,144 INFO ___FILE_ONLY___ ═
|
| 505 |
+
2025-07-01 21:03:34,147 INFO ___FILE_ONLY___ ═
|
| 506 |
+
2025-07-01 21:03:34,149 INFO ___FILE_ONLY___ ═
|
| 507 |
+
2025-07-01 21:03:34,151 INFO ___FILE_ONLY___ ═
|
| 508 |
+
2025-07-01 21:03:34,153 INFO ___FILE_ONLY___ ═
|
| 509 |
+
2025-07-01 21:03:34,157 INFO ___FILE_ONLY___ ═
|
| 510 |
+
2025-07-01 21:03:34,159 INFO ___FILE_ONLY___ ═
|
| 511 |
+
2025-07-01 21:03:34,161 INFO ___FILE_ONLY___ ═
|
| 512 |
+
2025-07-01 21:03:34,164 INFO ___FILE_ONLY___ ═
|
| 513 |
+
2025-07-01 21:03:34,166 INFO ___FILE_ONLY___ ═
|
| 514 |
+
2025-07-01 21:03:34,168 INFO ___FILE_ONLY___ ═
|
| 515 |
+
2025-07-01 21:03:34,170 INFO ___FILE_ONLY___ ═
|
| 516 |
+
2025-07-01 21:03:34,173 INFO ___FILE_ONLY___ ═
|
| 517 |
+
2025-07-01 21:03:34,176 INFO ___FILE_ONLY___ ═
|
| 518 |
+
2025-07-01 21:03:34,179 INFO ___FILE_ONLY___ ═
|
| 519 |
+
2025-07-01 21:03:34,183 INFO ___FILE_ONLY___ ═
|
| 520 |
+
2025-07-01 21:03:34,185 INFO ___FILE_ONLY___ ═
|
| 521 |
+
2025-07-01 21:03:34,187 INFO ___FILE_ONLY___ ═
|
| 522 |
+
2025-07-01 21:03:34,189 INFO ___FILE_ONLY___ ═
|
| 523 |
+
2025-07-01 21:03:34,193 INFO ___FILE_ONLY___ ═
|
| 524 |
+
2025-07-01 21:03:34,195 INFO ___FILE_ONLY___ ═
|
| 525 |
+
2025-07-01 21:03:34,202 INFO ___FILE_ONLY___ ═
|
| 526 |
+
2025-07-01 21:03:34,208 INFO ___FILE_ONLY___ ═
|
| 527 |
+
2025-07-01 21:03:34,212 INFO ___FILE_ONLY___ ═
|
| 528 |
+
2025-07-01 21:03:34,215 INFO ___FILE_ONLY___ ═
|
| 529 |
+
2025-07-01 21:03:34,217 INFO ___FILE_ONLY___ ═
|
| 530 |
+
2025-07-01 21:03:34,220 INFO ___FILE_ONLY___ ═
|
| 531 |
+
2025-07-01 21:03:34,222 INFO ___FILE_ONLY___ ═
|
| 532 |
+
2025-07-01 21:03:34,225 INFO ___FILE_ONLY___ ═
|
| 533 |
+
2025-07-01 21:03:34,229 INFO ___FILE_ONLY___ ═
|
| 534 |
+
2025-07-01 21:03:34,231 INFO ___FILE_ONLY___ ═
|
| 535 |
+
2025-07-01 21:03:34,234 INFO ___FILE_ONLY___ ═
|
| 536 |
+
2025-07-01 21:03:34,236 INFO ___FILE_ONLY___ ═
|
| 537 |
+
2025-07-01 21:03:34,238 INFO ___FILE_ONLY___ ═
|
| 538 |
+
2025-07-01 21:03:34,240 INFO ___FILE_ONLY___ ═
|
| 539 |
+
2025-07-01 21:03:34,242 INFO ___FILE_ONLY___ ═
|
| 540 |
+
2025-07-01 21:03:34,245 INFO ___FILE_ONLY___ ═
|
| 541 |
+
2025-07-01 21:03:34,247 INFO ___FILE_ONLY___ ═
|
| 542 |
+
2025-07-01 21:03:34,250 INFO ___FILE_ONLY___ ═
|
| 543 |
+
2025-07-01 21:03:34,252 INFO ___FILE_ONLY___ ═
|
| 544 |
+
2025-07-01 21:03:34,254 INFO ___FILE_ONLY___ ═
|
| 545 |
+
2025-07-01 21:03:34,256 INFO ___FILE_ONLY___ ═
|
| 546 |
+
2025-07-01 21:03:34,259 INFO ___FILE_ONLY___ ═
|
| 547 |
+
2025-07-01 21:03:34,261 INFO ___FILE_ONLY___ ═
|
| 548 |
+
2025-07-01 21:03:34,263 INFO ___FILE_ONLY___ ═
|
| 549 |
+
2025-07-01 21:03:34,265 INFO ___FILE_ONLY___ ═
|
| 550 |
+
2025-07-01 21:03:34,265 INFO ___FILE_ONLY___ ╝
|
| 551 |
+
|
| 552 |
+
2025-07-01 21:03:34,277 INFO ___FILE_ONLY___ ╔════════════════════════════════════════════════════════════╗
|
| 553 |
+
|
| 554 |
+
2025-07-01 21:03:34,277 INFO ___FILE_ONLY___ ╠═ Installing: BigQuery Command Line Tool (Platform Spec... ═╣
|
| 555 |
+
|
| 556 |
+
2025-07-01 21:03:34,277 INFO ___FILE_ONLY___ ╚
|
| 557 |
+
2025-07-01 21:03:34,278 INFO ___FILE_ONLY___ ════════════════════════════════════════════════════════════
|
| 558 |
+
2025-07-01 21:03:34,278 INFO ___FILE_ONLY___ ╝
|
| 559 |
+
|
| 560 |
+
2025-07-01 21:03:34,286 INFO ___FILE_ONLY___ ╔════════════════════════════════════════════════════════════╗
|
| 561 |
+
|
| 562 |
+
2025-07-01 21:03:34,286 INFO ___FILE_ONLY___ ╠═ Installing: Bundled Python 3.12 ═╣
|
| 563 |
+
|
| 564 |
+
2025-07-01 21:03:34,286 INFO ___FILE_ONLY___ ╚
|
| 565 |
+
2025-07-01 21:03:34,289 INFO ___FILE_ONLY___ ════════════════════════════════════════════════════════════
|
| 566 |
+
2025-07-01 21:03:34,289 INFO ___FILE_ONLY___ ╝
|
| 567 |
+
|
| 568 |
+
2025-07-01 21:03:34,291 INFO ___FILE_ONLY___ ╔════════════════════════════════════════════════════════════╗
|
| 569 |
+
|
| 570 |
+
2025-07-01 21:03:34,291 INFO ___FILE_ONLY___ ╠═ Installing: Bundled Python 3.12 (Platform Specific) ═╣
|
| 571 |
+
|
| 572 |
+
2025-07-01 21:03:34,291 INFO ___FILE_ONLY___ ╚
|
| 573 |
+
2025-07-01 21:03:36,564 INFO ___FILE_ONLY___ ═
|
| 574 |
+
2025-07-01 21:03:36,577 INFO ___FILE_ONLY___ ═
|
| 575 |
+
2025-07-01 21:03:36,590 INFO ___FILE_ONLY___ ═
|
| 576 |
+
2025-07-01 21:03:36,603 INFO ___FILE_ONLY___ ═
|
| 577 |
+
2025-07-01 21:03:36,616 INFO ___FILE_ONLY___ ═
|
| 578 |
+
2025-07-01 21:03:36,631 INFO ___FILE_ONLY___ ═
|
| 579 |
+
2025-07-01 21:03:36,645 INFO ___FILE_ONLY___ ═
|
| 580 |
+
2025-07-01 21:03:36,658 INFO ___FILE_ONLY___ ═
|
| 581 |
+
2025-07-01 21:03:36,672 INFO ___FILE_ONLY___ ═
|
| 582 |
+
2025-07-01 21:03:36,689 INFO ___FILE_ONLY___ ═
|
| 583 |
+
2025-07-01 21:03:36,703 INFO ___FILE_ONLY___ ═
|
| 584 |
+
2025-07-01 21:03:36,718 INFO ___FILE_ONLY___ ═
|
| 585 |
+
2025-07-01 21:03:36,731 INFO ___FILE_ONLY___ ═
|
| 586 |
+
2025-07-01 21:03:36,744 INFO ___FILE_ONLY___ ═
|
| 587 |
+
2025-07-01 21:03:36,759 INFO ___FILE_ONLY___ ═
|
| 588 |
+
2025-07-01 21:03:36,772 INFO ___FILE_ONLY___ ═
|
| 589 |
+
2025-07-01 21:03:36,785 INFO ___FILE_ONLY___ ═
|
| 590 |
+
2025-07-01 21:03:36,798 INFO ___FILE_ONLY___ ═
|
| 591 |
+
2025-07-01 21:03:36,812 INFO ___FILE_ONLY___ ═
|
| 592 |
+
2025-07-01 21:03:36,825 INFO ___FILE_ONLY___ ═
|
| 593 |
+
2025-07-01 21:03:36,840 INFO ___FILE_ONLY___ ═
|
| 594 |
+
2025-07-01 21:03:36,853 INFO ___FILE_ONLY___ ═
|
| 595 |
+
2025-07-01 21:03:36,866 INFO ___FILE_ONLY___ ═
|
| 596 |
+
2025-07-01 21:03:36,880 INFO ___FILE_ONLY___ ═
|
| 597 |
+
2025-07-01 21:03:36,893 INFO ___FILE_ONLY___ ═
|
| 598 |
+
2025-07-01 21:03:36,906 INFO ___FILE_ONLY___ ═
|
| 599 |
+
2025-07-01 21:03:36,919 INFO ___FILE_ONLY___ ═
|
| 600 |
+
2025-07-01 21:03:36,932 INFO ___FILE_ONLY___ ═
|
| 601 |
+
2025-07-01 21:03:36,945 INFO ___FILE_ONLY___ ═
|
| 602 |
+
2025-07-01 21:03:36,959 INFO ___FILE_ONLY___ ═
|
| 603 |
+
2025-07-01 21:03:36,972 INFO ___FILE_ONLY___ ═
|
| 604 |
+
2025-07-01 21:03:36,985 INFO ___FILE_ONLY___ ═
|
| 605 |
+
2025-07-01 21:03:36,998 INFO ___FILE_ONLY___ ═
|
| 606 |
+
2025-07-01 21:03:37,012 INFO ___FILE_ONLY___ ═
|
| 607 |
+
2025-07-01 21:03:37,030 INFO ___FILE_ONLY___ ═
|
| 608 |
+
2025-07-01 21:03:37,046 INFO ___FILE_ONLY___ ═
|
| 609 |
+
2025-07-01 21:03:38,014 INFO ___FILE_ONLY___ ═
|
| 610 |
+
2025-07-01 21:03:38,040 INFO ___FILE_ONLY___ ═
|
| 611 |
+
2025-07-01 21:03:38,564 INFO ___FILE_ONLY___ ═
|
| 612 |
+
2025-07-01 21:03:38,584 INFO ___FILE_ONLY___ ═
|
| 613 |
+
2025-07-01 21:03:38,606 INFO ___FILE_ONLY___ ═
|
| 614 |
+
2025-07-01 21:03:38,633 INFO ___FILE_ONLY___ ═
|
| 615 |
+
2025-07-01 21:03:38,653 INFO ___FILE_ONLY___ ═
|
| 616 |
+
2025-07-01 21:03:38,682 INFO ___FILE_ONLY___ ═
|
| 617 |
+
2025-07-01 21:03:38,704 INFO ___FILE_ONLY___ ═
|
| 618 |
+
2025-07-01 21:03:38,726 INFO ___FILE_ONLY___ ═
|
| 619 |
+
2025-07-01 21:03:38,746 INFO ___FILE_ONLY___ ═
|
| 620 |
+
2025-07-01 21:03:38,766 INFO ___FILE_ONLY___ ═
|
| 621 |
+
2025-07-01 21:03:38,861 INFO ___FILE_ONLY___ ═
|
| 622 |
+
2025-07-01 21:03:38,880 INFO ___FILE_ONLY___ ═
|
| 623 |
+
2025-07-01 21:03:39,028 INFO ___FILE_ONLY___ ═
|
| 624 |
+
2025-07-01 21:03:39,046 INFO ___FILE_ONLY___ ═
|
| 625 |
+
2025-07-01 21:03:39,063 INFO ___FILE_ONLY___ ═
|
| 626 |
+
2025-07-01 21:03:39,081 INFO ___FILE_ONLY___ ═
|
| 627 |
+
2025-07-01 21:03:39,097 INFO ___FILE_ONLY___ ═
|
| 628 |
+
2025-07-01 21:03:39,116 INFO ___FILE_ONLY___ ═
|
| 629 |
+
2025-07-01 21:03:39,134 INFO ___FILE_ONLY___ ═
|
| 630 |
+
2025-07-01 21:03:39,153 INFO ___FILE_ONLY___ ═
|
| 631 |
+
2025-07-01 21:03:39,183 INFO ___FILE_ONLY___ ═
|
| 632 |
+
2025-07-01 21:03:39,742 INFO ___FILE_ONLY___ ═
|
| 633 |
+
2025-07-01 21:03:39,743 INFO ___FILE_ONLY___ ╝
|
| 634 |
+
|
| 635 |
+
2025-07-01 21:03:39,801 INFO ___FILE_ONLY___ ╔════════════════════════════════════════════════════════════╗
|
| 636 |
+
|
| 637 |
+
2025-07-01 21:03:39,801 INFO ___FILE_ONLY___ ╠═ Installing: Cloud Storage Command Line Tool ═╣
|
| 638 |
+
|
| 639 |
+
2025-07-01 21:03:39,801 INFO ___FILE_ONLY___ ╚
|
| 640 |
+
2025-07-01 21:03:40,363 INFO ___FILE_ONLY___ ═
|
| 641 |
+
2025-07-01 21:03:40,376 INFO ___FILE_ONLY___ ═
|
| 642 |
+
2025-07-01 21:03:40,387 INFO ___FILE_ONLY___ ═
|
| 643 |
+
2025-07-01 21:03:40,398 INFO ___FILE_ONLY___ ═
|
| 644 |
+
2025-07-01 21:03:40,408 INFO ___FILE_ONLY___ ═
|
| 645 |
+
2025-07-01 21:03:40,446 INFO ___FILE_ONLY___ ═
|
| 646 |
+
2025-07-01 21:03:40,465 INFO ___FILE_ONLY___ ═
|
| 647 |
+
2025-07-01 21:03:40,487 INFO ___FILE_ONLY___ ═
|
| 648 |
+
2025-07-01 21:03:40,503 INFO ___FILE_ONLY___ ═
|
| 649 |
+
2025-07-01 21:03:40,520 INFO ___FILE_ONLY___ ═
|
| 650 |
+
2025-07-01 21:03:40,538 INFO ___FILE_ONLY___ ═
|
| 651 |
+
2025-07-01 21:03:40,550 INFO ___FILE_ONLY___ ═
|
| 652 |
+
2025-07-01 21:03:40,563 INFO ___FILE_ONLY___ ═
|
| 653 |
+
2025-07-01 21:03:40,575 INFO ___FILE_ONLY___ ═
|
| 654 |
+
2025-07-01 21:03:40,586 INFO ___FILE_ONLY___ ═
|
| 655 |
+
2025-07-01 21:03:40,606 INFO ___FILE_ONLY___ ═
|
| 656 |
+
2025-07-01 21:03:40,621 INFO ___FILE_ONLY___ ═
|
| 657 |
+
2025-07-01 21:03:40,633 INFO ___FILE_ONLY___ ═
|
| 658 |
+
2025-07-01 21:03:40,646 INFO ___FILE_ONLY___ ═
|
| 659 |
+
2025-07-01 21:03:40,658 INFO ___FILE_ONLY___ ═
|
| 660 |
+
2025-07-01 21:03:40,669 INFO ___FILE_ONLY___ ═
|
| 661 |
+
2025-07-01 21:03:40,683 INFO ___FILE_ONLY___ ═
|
| 662 |
+
2025-07-01 21:03:40,694 INFO ___FILE_ONLY___ ═
|
| 663 |
+
2025-07-01 21:03:40,707 INFO ___FILE_ONLY___ ═
|
| 664 |
+
2025-07-01 21:03:40,719 INFO ___FILE_ONLY___ ═
|
| 665 |
+
2025-07-01 21:03:40,730 INFO ___FILE_ONLY___ ═
|
| 666 |
+
2025-07-01 21:03:40,742 INFO ___FILE_ONLY___ ═
|
| 667 |
+
2025-07-01 21:03:40,758 INFO ___FILE_ONLY___ ═
|
| 668 |
+
2025-07-01 21:03:40,776 INFO ___FILE_ONLY___ ═
|
| 669 |
+
2025-07-01 21:03:40,791 INFO ___FILE_ONLY___ ═
|
| 670 |
+
2025-07-01 21:03:40,805 INFO ___FILE_ONLY___ ═
|
| 671 |
+
2025-07-01 21:03:40,829 INFO ___FILE_ONLY___ ═
|
| 672 |
+
2025-07-01 21:03:40,846 INFO ___FILE_ONLY___ ═
|
| 673 |
+
2025-07-01 21:03:40,869 INFO ___FILE_ONLY___ ═
|
| 674 |
+
2025-07-01 21:03:40,885 INFO ___FILE_ONLY___ ═
|
| 675 |
+
2025-07-01 21:03:40,904 INFO ___FILE_ONLY___ ═
|
| 676 |
+
2025-07-01 21:03:40,915 INFO ___FILE_ONLY___ ═
|
| 677 |
+
2025-07-01 21:03:40,927 INFO ___FILE_ONLY___ ═
|
| 678 |
+
2025-07-01 21:03:40,968 INFO ___FILE_ONLY___ ═
|
| 679 |
+
2025-07-01 21:03:40,984 INFO ___FILE_ONLY___ ═
|
| 680 |
+
2025-07-01 21:03:40,998 INFO ___FILE_ONLY___ ═
|
| 681 |
+
2025-07-01 21:03:41,010 INFO ___FILE_ONLY___ ═
|
| 682 |
+
2025-07-01 21:03:41,022 INFO ___FILE_ONLY___ ═
|
| 683 |
+
2025-07-01 21:03:41,035 INFO ___FILE_ONLY___ ═
|
| 684 |
+
2025-07-01 21:03:41,047 INFO ___FILE_ONLY___ ═
|
| 685 |
+
2025-07-01 21:03:41,058 INFO ___FILE_ONLY___ ═
|
| 686 |
+
2025-07-01 21:03:41,069 INFO ___FILE_ONLY___ ═
|
| 687 |
+
2025-07-01 21:03:41,080 INFO ___FILE_ONLY___ ═
|
| 688 |
+
2025-07-01 21:03:41,091 INFO ___FILE_ONLY___ ═
|
| 689 |
+
2025-07-01 21:03:41,105 INFO ___FILE_ONLY___ ═
|
| 690 |
+
2025-07-01 21:03:41,121 INFO ___FILE_ONLY___ ═
|
| 691 |
+
2025-07-01 21:03:41,135 INFO ___FILE_ONLY___ ═
|
| 692 |
+
2025-07-01 21:03:41,150 INFO ___FILE_ONLY___ ═
|
| 693 |
+
2025-07-01 21:03:41,166 INFO ___FILE_ONLY___ ═
|
| 694 |
+
2025-07-01 21:03:41,180 INFO ___FILE_ONLY___ ═
|
| 695 |
+
2025-07-01 21:03:41,197 INFO ___FILE_ONLY___ ═
|
| 696 |
+
2025-07-01 21:03:41,215 INFO ___FILE_ONLY___ ═
|
| 697 |
+
2025-07-01 21:03:41,238 INFO ___FILE_ONLY___ ═
|
| 698 |
+
2025-07-01 21:03:41,256 INFO ___FILE_ONLY___ ═
|
| 699 |
+
2025-07-01 21:03:41,277 INFO ___FILE_ONLY___ ═
|
| 700 |
+
2025-07-01 21:03:41,277 INFO ___FILE_ONLY___ ╝
|
| 701 |
+
|
| 702 |
+
2025-07-01 21:03:41,313 INFO ___FILE_ONLY___ ╔════════════════════════════════════════════════════════════╗
|
| 703 |
+
|
| 704 |
+
2025-07-01 21:03:41,313 INFO ___FILE_ONLY___ ╠═ Installing: Cloud Storage Command Line Tool (Platform... ═╣
|
| 705 |
+
|
| 706 |
+
2025-07-01 21:03:41,313 INFO ___FILE_ONLY___ ╚
|
| 707 |
+
2025-07-01 21:03:41,314 INFO ___FILE_ONLY___ ════════════════════════════════════════════════════════════
|
| 708 |
+
2025-07-01 21:03:41,314 INFO ___FILE_ONLY___ ╝
|
| 709 |
+
|
| 710 |
+
2025-07-01 21:03:41,320 INFO ___FILE_ONLY___ ╔════════════════════════════════════════════════════════════╗
|
| 711 |
+
|
| 712 |
+
2025-07-01 21:03:41,320 INFO ___FILE_ONLY___ ╠═ Installing: Default set of gcloud commands ═╣
|
| 713 |
+
|
| 714 |
+
2025-07-01 21:03:41,320 INFO ___FILE_ONLY___ ╚
|
| 715 |
+
2025-07-01 21:03:41,324 INFO ___FILE_ONLY___ ════════════════════════════════════════════════════════════
|
| 716 |
+
2025-07-01 21:03:41,324 INFO ___FILE_ONLY___ ╝
|
| 717 |
+
|
| 718 |
+
2025-07-01 21:03:41,326 INFO ___FILE_ONLY___ ╔════════════════════════════════════════════════════════════╗
|
| 719 |
+
|
| 720 |
+
2025-07-01 21:03:41,326 INFO ___FILE_ONLY___ ╠═ Installing: Google Cloud CLI Core Libraries (Platform... ═╣
|
| 721 |
+
|
| 722 |
+
2025-07-01 21:03:41,326 INFO ___FILE_ONLY___ ╚
|
| 723 |
+
2025-07-01 21:03:41,328 INFO ___FILE_ONLY___ ══════════════════════════════
|
| 724 |
+
2025-07-01 21:03:41,328 INFO ___FILE_ONLY___ ══════════════════════════════
|
| 725 |
+
2025-07-01 21:03:41,328 INFO ___FILE_ONLY___ ╝
|
| 726 |
+
|
| 727 |
+
2025-07-01 21:03:41,333 INFO ___FILE_ONLY___ ╔════════════════════════════════════════════════════════════╗
|
| 728 |
+
|
| 729 |
+
2025-07-01 21:03:41,333 INFO ___FILE_ONLY___ ╠═ Installing: Google Cloud CRC32C Hash Tool ═╣
|
| 730 |
+
|
| 731 |
+
2025-07-01 21:03:41,333 INFO ___FILE_ONLY___ ╚
|
| 732 |
+
2025-07-01 21:03:41,337 INFO ___FILE_ONLY___ ════════════════════════════════════════════════════════════
|
| 733 |
+
2025-07-01 21:03:41,337 INFO ___FILE_ONLY___ ╝
|
| 734 |
+
|
| 735 |
+
2025-07-01 21:03:41,339 INFO ___FILE_ONLY___ ╔════════════════════════════════════════════════════════════╗
|
| 736 |
+
|
| 737 |
+
2025-07-01 21:03:41,339 INFO ___FILE_ONLY___ ╠═ Installing: Google Cloud CRC32C Hash Tool (Platform S... ═╣
|
| 738 |
+
|
| 739 |
+
2025-07-01 21:03:41,339 INFO ___FILE_ONLY___ ╚
|
| 740 |
+
2025-07-01 21:03:41,381 INFO ___FILE_ONLY___ ══════════════════════════════
|
| 741 |
+
2025-07-01 21:03:41,381 INFO ___FILE_ONLY___ ══════════════════════════════
|
| 742 |
+
2025-07-01 21:03:41,381 INFO ___FILE_ONLY___ ╝
|
| 743 |
+
|
| 744 |
+
2025-07-01 21:03:41,387 INFO ___FILE_ONLY___ ╔════════════════════════════════════════════════════════════╗
|
| 745 |
+
|
| 746 |
+
2025-07-01 21:03:41,387 INFO ___FILE_ONLY___ ╠═ Installing: gcloud cli dependencies (Platform Specific) ═╣
|
| 747 |
+
|
| 748 |
+
2025-07-01 21:03:41,387 INFO ___FILE_ONLY___ ╚
|
| 749 |
+
2025-07-01 21:03:41,387 INFO ___FILE_ONLY___ ════════════════════════════════════════════════════════════
|
| 750 |
+
2025-07-01 21:03:41,388 INFO ___FILE_ONLY___ ╝
|
| 751 |
+
|
| 752 |
+
2025-07-01 21:03:41,393 DEBUG root Updating notification cache...
|
| 753 |
+
2025-07-01 21:03:41,393 INFO ___FILE_ONLY___
|
| 754 |
+
|
| 755 |
+
2025-07-01 21:03:41,395 INFO ___FILE_ONLY___ Performing post processing steps...
|
| 756 |
+
2025-07-01 21:03:41,396 DEBUG root Executing command: ['/tools/google-cloud-sdk/bin/gcloud', 'components', 'post-process']
|
| 757 |
+
2025-07-01 21:03:52,373 DEBUG ___FILE_ONLY___
|
| 758 |
+
2025-07-01 21:03:52,373 DEBUG ___FILE_ONLY___
|
| 759 |
+
2025-07-01 21:03:52,453 INFO root descriptor_list: [{'universeDomain': 'googleapis.com', 'universeShortName': '', 'authenticationDomain': 'auth.cloud.google.com', 'projectPrefix': '', 'cloudWebDomain': 'cloud.google.com', 'documentationDomain': 'cloud.google.com', 'version': '1.0.0', 'state': 'primary', 'artifactRegistryDomain': 'pkg.dev'}]
|
| 760 |
+
2025-07-01 21:03:52,454 INFO ___FILE_ONLY___
|
| 761 |
+
Update done!
|
| 762 |
+
|
| 763 |
+
|
| 764 |
+
2025-07-01 21:03:52,456 DEBUG root Chosen display Format:none
|
| 765 |
+
2025-07-01 21:03:52,457 INFO root Display format: "none"
|
.config/logs/2025.07.01/21.03.41.948871.log
ADDED
|
@@ -0,0 +1,5 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
2025-07-01 21:03:41,949 DEBUG root Loaded Command Group: ['gcloud', 'components']
|
| 2 |
+
2025-07-01 21:03:41,951 DEBUG root Loaded Command Group: ['gcloud', 'components', 'post_process']
|
| 3 |
+
2025-07-01 21:03:41,953 DEBUG root Running [gcloud.components.post-process] with arguments: []
|
| 4 |
+
2025-07-01 21:03:52,197 DEBUG root Chosen display Format:none
|
| 5 |
+
2025-07-01 21:03:52,198 INFO root Display format: "none"
|
.config/logs/2025.07.01/21.03.53.252709.log
ADDED
|
@@ -0,0 +1,153 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
2025-07-01 21:03:53,253 DEBUG root Loaded Command Group: ['gcloud', 'components']
|
| 2 |
+
2025-07-01 21:03:53,255 DEBUG root Loaded Command Group: ['gcloud', 'components', 'update']
|
| 3 |
+
2025-07-01 21:03:53,257 DEBUG root Running [gcloud.components.update] with arguments: [--quiet: "True", COMPONENT-IDS:8: "['gcloud', 'core', 'bq', 'gsutil', 'compute', 'preview', 'alpha', 'beta']"]
|
| 4 |
+
2025-07-01 21:03:53,258 INFO ___FILE_ONLY___ Beginning update. This process may take several minutes.
|
| 5 |
+
|
| 6 |
+
2025-07-01 21:03:53,268 DEBUG urllib3.connectionpool Starting new HTTPS connection (1): dl.google.com:443
|
| 7 |
+
2025-07-01 21:03:53,281 DEBUG urllib3.connectionpool https://dl.google.com:443 "GET /dl/cloudsdk/channels/rapid/components-2.json HTTP/11" 200 239798
|
| 8 |
+
2025-07-01 21:03:53,295 WARNING root Component [compute] no longer exists.
|
| 9 |
+
2025-07-01 21:03:53,296 INFO ___FILE_ONLY___
|
| 10 |
+
|
| 11 |
+
2025-07-01 21:03:53,296 INFO ___FILE_ONLY___
|
| 12 |
+
Your current Google Cloud CLI version is: 529.0.0
|
| 13 |
+
|
| 14 |
+
2025-07-01 21:03:53,296 INFO ___FILE_ONLY___ Installing components from version: 529.0.0
|
| 15 |
+
|
| 16 |
+
2025-07-01 21:03:53,296 INFO ___FILE_ONLY___
|
| 17 |
+
|
| 18 |
+
2025-07-01 21:03:53,296 DEBUG root Chosen display Format:table[box,title="These components will be removed."](details.display_name:label=Name:align=left,version.version_string:label=Version:align=right,data.size.size(zero="",min=1048576):label=Size:align=right)
|
| 19 |
+
2025-07-01 21:03:53,297 DEBUG root Chosen display Format:table[box,title="These components will be updated."](details.display_name:label=Name:align=left,version.version_string:label=Version:align=right,data.size.size(zero="",min=1048576):label=Size:align=right)
|
| 20 |
+
2025-07-01 21:03:53,297 DEBUG root Chosen display Format:table[box,title="These components will be installed."](details.display_name:label=Name:align=left,version.version_string:label=Version:align=right,data.size.size(zero="",min=1048576):label=Size:align=right)
|
| 21 |
+
2025-07-01 21:03:53,312 INFO ___FILE_ONLY___ ┌────────────────────────────────────────────────┐
|
| 22 |
+
2025-07-01 21:03:53,312 INFO ___FILE_ONLY___
|
| 23 |
+
|
| 24 |
+
2025-07-01 21:03:53,312 INFO ___FILE_ONLY___ │ These components will be installed. │
|
| 25 |
+
2025-07-01 21:03:53,312 INFO ___FILE_ONLY___
|
| 26 |
+
|
| 27 |
+
2025-07-01 21:03:53,312 INFO ___FILE_ONLY___ ├─────────────────────────┬────────────┬─────────┤
|
| 28 |
+
2025-07-01 21:03:53,312 INFO ___FILE_ONLY___
|
| 29 |
+
|
| 30 |
+
2025-07-01 21:03:53,312 INFO ___FILE_ONLY___ │ Name │ Version │ Size │
|
| 31 |
+
2025-07-01 21:03:53,313 INFO ___FILE_ONLY___
|
| 32 |
+
|
| 33 |
+
2025-07-01 21:03:53,313 INFO ___FILE_ONLY___ ├─────────────────────────┼────────────┼─────────┤
|
| 34 |
+
2025-07-01 21:03:53,313 INFO ___FILE_ONLY___
|
| 35 |
+
|
| 36 |
+
2025-07-01 21:03:53,313 INFO ___FILE_ONLY___ │
|
| 37 |
+
2025-07-01 21:03:53,313 INFO ___FILE_ONLY___ gcloud Alpha Commands
|
| 38 |
+
2025-07-01 21:03:53,313 INFO ___FILE_ONLY___
|
| 39 |
+
2025-07-01 21:03:53,313 INFO ___FILE_ONLY___ │
|
| 40 |
+
2025-07-01 21:03:53,313 INFO ___FILE_ONLY___ 2025.06.27
|
| 41 |
+
2025-07-01 21:03:53,313 INFO ___FILE_ONLY___
|
| 42 |
+
2025-07-01 21:03:53,313 INFO ___FILE_ONLY___ │
|
| 43 |
+
2025-07-01 21:03:53,313 INFO ___FILE_ONLY___ < 1 MiB
|
| 44 |
+
2025-07-01 21:03:53,313 INFO ___FILE_ONLY___
|
| 45 |
+
2025-07-01 21:03:53,313 INFO ___FILE_ONLY___ │
|
| 46 |
+
2025-07-01 21:03:53,313 INFO ___FILE_ONLY___
|
| 47 |
+
|
| 48 |
+
2025-07-01 21:03:53,313 INFO ___FILE_ONLY___ │
|
| 49 |
+
2025-07-01 21:03:53,313 INFO ___FILE_ONLY___ gcloud Beta Commands
|
| 50 |
+
2025-07-01 21:03:53,313 INFO ___FILE_ONLY___
|
| 51 |
+
2025-07-01 21:03:53,313 INFO ___FILE_ONLY___ │
|
| 52 |
+
2025-07-01 21:03:53,313 INFO ___FILE_ONLY___ 2025.06.27
|
| 53 |
+
2025-07-01 21:03:53,313 INFO ___FILE_ONLY___
|
| 54 |
+
2025-07-01 21:03:53,313 INFO ___FILE_ONLY___ │
|
| 55 |
+
2025-07-01 21:03:53,314 INFO ___FILE_ONLY___ < 1 MiB
|
| 56 |
+
2025-07-01 21:03:53,314 INFO ___FILE_ONLY___
|
| 57 |
+
2025-07-01 21:03:53,314 INFO ___FILE_ONLY___ │
|
| 58 |
+
2025-07-01 21:03:53,314 INFO ___FILE_ONLY___
|
| 59 |
+
|
| 60 |
+
2025-07-01 21:03:53,314 INFO ___FILE_ONLY___ │
|
| 61 |
+
2025-07-01 21:03:53,314 INFO ___FILE_ONLY___ gcloud Preview Commands
|
| 62 |
+
2025-07-01 21:03:53,314 INFO ___FILE_ONLY___
|
| 63 |
+
2025-07-01 21:03:53,314 INFO ___FILE_ONLY___ │
|
| 64 |
+
2025-07-01 21:03:53,314 INFO ___FILE_ONLY___
|
| 65 |
+
2025-07-01 21:03:53,314 INFO ___FILE_ONLY___
|
| 66 |
+
2025-07-01 21:03:53,314 INFO ___FILE_ONLY___ │
|
| 67 |
+
2025-07-01 21:03:53,314 INFO ___FILE_ONLY___ < 1 MiB
|
| 68 |
+
2025-07-01 21:03:53,314 INFO ___FILE_ONLY___
|
| 69 |
+
2025-07-01 21:03:53,314 INFO ___FILE_ONLY___ │
|
| 70 |
+
2025-07-01 21:03:53,314 INFO ___FILE_ONLY___
|
| 71 |
+
|
| 72 |
+
2025-07-01 21:03:53,314 INFO ___FILE_ONLY___ └─────────────────────────┴────────────┴─────────┘
|
| 73 |
+
2025-07-01 21:03:53,314 INFO ___FILE_ONLY___
|
| 74 |
+
|
| 75 |
+
2025-07-01 21:03:53,314 INFO ___FILE_ONLY___
|
| 76 |
+
|
| 77 |
+
2025-07-01 21:03:53,318 DEBUG urllib3.connectionpool Starting new HTTPS connection (1): dl.google.com:443
|
| 78 |
+
2025-07-01 21:03:53,332 DEBUG urllib3.connectionpool https://dl.google.com:443 "GET /dl/cloudsdk/channels/rapid/RELEASE_NOTES HTTP/11" 200 1444035
|
| 79 |
+
2025-07-01 21:03:53,802 INFO ___FILE_ONLY___ For the latest full release notes, please visit:
|
| 80 |
+
https://cloud.google.com/sdk/release_notes
|
| 81 |
+
|
| 82 |
+
|
| 83 |
+
2025-07-01 21:03:53,803 INFO ___FILE_ONLY___ Performing in place update...
|
| 84 |
+
|
| 85 |
+
|
| 86 |
+
2025-07-01 21:03:53,805 INFO ___FILE_ONLY___ ╔════════════════════════════════════════════════════════════╗
|
| 87 |
+
|
| 88 |
+
2025-07-01 21:03:53,805 INFO ___FILE_ONLY___ ╠═ Downloading: gcloud Alpha Commands ═╣
|
| 89 |
+
|
| 90 |
+
2025-07-01 21:03:53,806 INFO ___FILE_ONLY___ ╚
|
| 91 |
+
2025-07-01 21:03:53,809 DEBUG urllib3.connectionpool Starting new HTTPS connection (1): dl.google.com:443
|
| 92 |
+
2025-07-01 21:03:53,916 DEBUG urllib3.connectionpool https://dl.google.com:443 "GET /dl/cloudsdk/channels/rapid/components/google-cloud-sdk-alpha-20250627154417.tar.gz HTTP/11" 200 800
|
| 93 |
+
2025-07-01 21:03:53,917 INFO ___FILE_ONLY___ ════════════════════════════════════════════════════════════
|
| 94 |
+
2025-07-01 21:03:53,917 INFO ___FILE_ONLY___ ╝
|
| 95 |
+
|
| 96 |
+
2025-07-01 21:03:53,919 INFO ___FILE_ONLY___ ╔════════════════════════════════════════════════════════════╗
|
| 97 |
+
|
| 98 |
+
2025-07-01 21:03:53,919 INFO ___FILE_ONLY___ ╠═ Downloading: gcloud Beta Commands ═╣
|
| 99 |
+
|
| 100 |
+
2025-07-01 21:03:53,919 INFO ___FILE_ONLY___ ╚
|
| 101 |
+
2025-07-01 21:03:53,923 DEBUG urllib3.connectionpool Starting new HTTPS connection (1): dl.google.com:443
|
| 102 |
+
2025-07-01 21:03:53,974 DEBUG urllib3.connectionpool https://dl.google.com:443 "GET /dl/cloudsdk/channels/rapid/components/google-cloud-sdk-beta-20250627154417.tar.gz HTTP/11" 200 797
|
| 103 |
+
2025-07-01 21:03:53,975 INFO ___FILE_ONLY___ ════════════════════════════════════════════════════════════
|
| 104 |
+
2025-07-01 21:03:53,975 INFO ___FILE_ONLY___ ╝
|
| 105 |
+
|
| 106 |
+
2025-07-01 21:03:53,977 INFO ___FILE_ONLY___ ╔════════════════════════════════════════════════════════════╗
|
| 107 |
+
|
| 108 |
+
2025-07-01 21:03:53,977 INFO ___FILE_ONLY___ ╠═ Downloading: gcloud Preview Commands ═╣
|
| 109 |
+
|
| 110 |
+
2025-07-01 21:03:53,977 INFO ___FILE_ONLY___ ╚
|
| 111 |
+
2025-07-01 21:03:53,981 DEBUG urllib3.connectionpool Starting new HTTPS connection (1): dl.google.com:443
|
| 112 |
+
2025-07-01 21:03:54,014 DEBUG urllib3.connectionpool https://dl.google.com:443 "GET /dl/cloudsdk/channels/rapid/components/google-cloud-sdk-preview-20241115154308.tar.gz HTTP/11" 200 823
|
| 113 |
+
2025-07-01 21:03:54,015 INFO ___FILE_ONLY___ ════════════════════════════════════════════════════════════
|
| 114 |
+
2025-07-01 21:03:54,015 INFO ___FILE_ONLY___ ╝
|
| 115 |
+
|
| 116 |
+
2025-07-01 21:03:54,018 INFO ___FILE_ONLY___ ╔════════════════════════════════════════════════════════════╗
|
| 117 |
+
|
| 118 |
+
2025-07-01 21:03:54,018 INFO ___FILE_ONLY___ ╠═ Installing: gcloud Alpha Commands ═╣
|
| 119 |
+
|
| 120 |
+
2025-07-01 21:03:54,018 INFO ___FILE_ONLY___ ╚
|
| 121 |
+
2025-07-01 21:03:54,019 INFO ___FILE_ONLY___ ════════════════════════════════════════════════════════════
|
| 122 |
+
2025-07-01 21:03:54,019 INFO ___FILE_ONLY___ ╝
|
| 123 |
+
|
| 124 |
+
2025-07-01 21:03:54,026 INFO ___FILE_ONLY___ ╔════════════════════════════════════════════════════════════╗
|
| 125 |
+
|
| 126 |
+
2025-07-01 21:03:54,026 INFO ___FILE_ONLY___ ╠═ Installing: gcloud Beta Commands ═╣
|
| 127 |
+
|
| 128 |
+
2025-07-01 21:03:54,026 INFO ___FILE_ONLY___ ╚
|
| 129 |
+
2025-07-01 21:03:54,027 INFO ___FILE_ONLY___ ════════════════════════════════════════════════════════════
|
| 130 |
+
2025-07-01 21:03:54,027 INFO ___FILE_ONLY___ ╝
|
| 131 |
+
|
| 132 |
+
2025-07-01 21:03:54,033 INFO ___FILE_ONLY___ ╔════════════════════════════════════════════════════════════╗
|
| 133 |
+
|
| 134 |
+
2025-07-01 21:03:54,034 INFO ___FILE_ONLY___ ╠═ Installing: gcloud Preview Commands ═╣
|
| 135 |
+
|
| 136 |
+
2025-07-01 21:03:54,034 INFO ___FILE_ONLY___ ╚
|
| 137 |
+
2025-07-01 21:03:54,034 INFO ___FILE_ONLY___ ════════════════════════════════════════════════════════════
|
| 138 |
+
2025-07-01 21:03:54,035 INFO ___FILE_ONLY___ ╝
|
| 139 |
+
|
| 140 |
+
2025-07-01 21:03:54,040 DEBUG root Updating notification cache...
|
| 141 |
+
2025-07-01 21:03:54,041 INFO ___FILE_ONLY___
|
| 142 |
+
|
| 143 |
+
2025-07-01 21:03:54,043 INFO ___FILE_ONLY___ Performing post processing steps...
|
| 144 |
+
2025-07-01 21:03:54,043 DEBUG root Executing command: ['/tools/google-cloud-sdk/bin/gcloud', 'components', 'post-process']
|
| 145 |
+
2025-07-01 21:04:05,574 DEBUG ___FILE_ONLY___
|
| 146 |
+
2025-07-01 21:04:05,574 DEBUG ___FILE_ONLY___
|
| 147 |
+
2025-07-01 21:04:05,812 INFO root descriptor_list: [{'universeDomain': 'googleapis.com', 'universeShortName': '', 'authenticationDomain': 'auth.cloud.google.com', 'projectPrefix': '', 'cloudWebDomain': 'cloud.google.com', 'documentationDomain': 'cloud.google.com', 'version': '1.0.0', 'state': 'primary', 'artifactRegistryDomain': 'pkg.dev'}]
|
| 148 |
+
2025-07-01 21:04:05,813 INFO ___FILE_ONLY___
|
| 149 |
+
Update done!
|
| 150 |
+
|
| 151 |
+
|
| 152 |
+
2025-07-01 21:04:05,815 DEBUG root Chosen display Format:none
|
| 153 |
+
2025-07-01 21:04:05,815 INFO root Display format: "none"
|
.config/logs/2025.07.01/21.03.54.626322.log
ADDED
|
@@ -0,0 +1,5 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
2025-07-01 21:03:54,627 DEBUG root Loaded Command Group: ['gcloud', 'components']
|
| 2 |
+
2025-07-01 21:03:54,628 DEBUG root Loaded Command Group: ['gcloud', 'components', 'post_process']
|
| 3 |
+
2025-07-01 21:03:54,630 DEBUG root Running [gcloud.components.post-process] with arguments: []
|
| 4 |
+
2025-07-01 21:04:05,377 DEBUG root Chosen display Format:none
|
| 5 |
+
2025-07-01 21:04:05,378 INFO root Display format: "none"
|
.config/logs/2025.07.01/21.04.06.597104.log
ADDED
|
@@ -0,0 +1,8 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
2025-07-01 21:04:06,599 DEBUG root Loaded Command Group: ['gcloud', 'config']
|
| 2 |
+
2025-07-01 21:04:06,658 DEBUG root Loaded Command Group: ['gcloud', 'config', 'set']
|
| 3 |
+
2025-07-01 21:04:06,661 DEBUG root Running [gcloud.config.set] with arguments: [SECTION/PROPERTY: "component_manager/disable_update_check", VALUE: "true"]
|
| 4 |
+
2025-07-01 21:04:06,662 INFO ___FILE_ONLY___ Updated property [component_manager/disable_update_check].
|
| 5 |
+
|
| 6 |
+
2025-07-01 21:04:06,663 DEBUG root Chosen display Format:default
|
| 7 |
+
2025-07-01 21:04:06,663 INFO root Display format: "default"
|
| 8 |
+
2025-07-01 21:04:06,664 DEBUG root SDK update checks are disabled.
|
.config/logs/2025.07.01/21.04.07.490711.log
ADDED
|
@@ -0,0 +1,8 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
2025-07-01 21:04:07,492 DEBUG root Loaded Command Group: ['gcloud', 'config']
|
| 2 |
+
2025-07-01 21:04:07,547 DEBUG root Loaded Command Group: ['gcloud', 'config', 'set']
|
| 3 |
+
2025-07-01 21:04:07,550 DEBUG root Running [gcloud.config.set] with arguments: [SECTION/PROPERTY: "compute/gce_metadata_read_timeout_sec", VALUE: "0"]
|
| 4 |
+
2025-07-01 21:04:07,551 INFO ___FILE_ONLY___ Updated property [compute/gce_metadata_read_timeout_sec].
|
| 5 |
+
|
| 6 |
+
2025-07-01 21:04:07,552 DEBUG root Chosen display Format:default
|
| 7 |
+
2025-07-01 21:04:07,553 INFO root Display format: "default"
|
| 8 |
+
2025-07-01 21:04:07,554 DEBUG root SDK update checks are disabled.
|
.gitattributes
CHANGED
|
@@ -33,3 +33,18 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
|
|
| 33 |
*.zip filter=lfs diff=lfs merge=lfs -text
|
| 34 |
*.zst filter=lfs diff=lfs merge=lfs -text
|
| 35 |
*tfevents* filter=lfs diff=lfs merge=lfs -text
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 33 |
*.zip filter=lfs diff=lfs merge=lfs -text
|
| 34 |
*.zst filter=lfs diff=lfs merge=lfs -text
|
| 35 |
*tfevents* filter=lfs diff=lfs merge=lfs -text
|
| 36 |
+
lerobot/media/gym/aloha_act.gif filter=lfs diff=lfs merge=lfs -text
|
| 37 |
+
lerobot/media/gym/pusht_diffusion.gif filter=lfs diff=lfs merge=lfs -text
|
| 38 |
+
lerobot/media/gym/simxarm_tdmpc.gif filter=lfs diff=lfs merge=lfs -text
|
| 39 |
+
lerobot/media/lekiwi/kiwi.webp filter=lfs diff=lfs merge=lfs -text
|
| 40 |
+
lerobot/media/lerobot-logo-light.png filter=lfs diff=lfs merge=lfs -text
|
| 41 |
+
lerobot/media/lerobot-logo-thumbnail.png filter=lfs diff=lfs merge=lfs -text
|
| 42 |
+
lerobot/media/so100/leader_follower.webp filter=lfs diff=lfs merge=lfs -text
|
| 43 |
+
lerobot/media/so101/so101-leader.webp filter=lfs diff=lfs merge=lfs -text
|
| 44 |
+
lerobot/media/so101/so101.webp filter=lfs diff=lfs merge=lfs -text
|
| 45 |
+
lerobot/media/wandb.png filter=lfs diff=lfs merge=lfs -text
|
| 46 |
+
lerobot/tests/artifacts/cameras/image_320x180.png filter=lfs diff=lfs merge=lfs -text
|
| 47 |
+
lerobot/tests/artifacts/cameras/image_480x270.png filter=lfs diff=lfs merge=lfs -text
|
| 48 |
+
lerobot/tests/artifacts/cameras/test_rs.bag filter=lfs diff=lfs merge=lfs -text
|
| 49 |
+
sample_data/mnist_test.csv filter=lfs diff=lfs merge=lfs -text
|
| 50 |
+
sample_data/mnist_train_small.csv filter=lfs diff=lfs merge=lfs -text
|
condacolab_install.log
ADDED
|
@@ -0,0 +1,318 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
PREFIX=/usr/local
|
| 2 |
+
Unpacking payload ...
|
| 3 |
+
Extracting _libgcc_mutex-0.1-conda_forge.tar.bz2
|
| 4 |
+
Extracting ca-certificates-2024.12.14-hbcca054_0.conda
|
| 5 |
+
Extracting ld_impl_linux-64-2.43-h712a8e2_2.conda
|
| 6 |
+
Extracting pybind11-abi-4-hd8ed1ab_3.tar.bz2
|
| 7 |
+
Extracting python_abi-3.11-5_cp311.conda
|
| 8 |
+
Extracting tzdata-2024b-hc8b5060_0.conda
|
| 9 |
+
Extracting libgomp-14.2.0-h77fa898_1.conda
|
| 10 |
+
Extracting _openmp_mutex-4.5-2_gnu.tar.bz2
|
| 11 |
+
Extracting libgcc-14.2.0-h77fa898_1.conda
|
| 12 |
+
Extracting c-ares-1.34.4-hb9d3cd8_0.conda
|
| 13 |
+
Extracting libexpat-2.6.4-h5888daf_0.conda
|
| 14 |
+
Extracting libgcc-ng-14.2.0-h69a702a_1.conda
|
| 15 |
+
Extracting liblzma-5.6.3-hb9d3cd8_1.conda
|
| 16 |
+
Extracting libstdcxx-14.2.0-hc0a3c3a_1.conda
|
| 17 |
+
Extracting libzlib-1.3.1-hb9d3cd8_2.conda
|
| 18 |
+
Extracting ncurses-6.5-h2d0b736_2.conda
|
| 19 |
+
Extracting openssl-3.4.0-h7b32b05_1.conda
|
| 20 |
+
Extracting reproc-14.2.5.post0-hb9d3cd8_0.conda
|
| 21 |
+
Extracting bzip2-1.0.8-h4bc722e_7.conda
|
| 22 |
+
Extracting keyutils-1.6.1-h166bdaf_0.tar.bz2
|
| 23 |
+
Extracting libedit-3.1.20240808-pl5321h7949ede_0.conda
|
| 24 |
+
Extracting libev-4.33-hd590300_2.conda
|
| 25 |
+
Extracting libffi-3.4.2-h7f98852_5.tar.bz2
|
| 26 |
+
Extracting libiconv-1.17-hd590300_2.conda
|
| 27 |
+
Extracting libnsl-2.0.1-hd590300_0.conda
|
| 28 |
+
Extracting libsqlite-3.47.2-hee588c1_0.conda
|
| 29 |
+
Extracting libssh2-1.11.1-hf672d98_0.conda
|
| 30 |
+
Extracting libstdcxx-ng-14.2.0-h4852527_1.conda
|
| 31 |
+
Extracting libuuid-2.38.1-h0b41bf4_0.conda
|
| 32 |
+
Extracting libxcrypt-4.4.36-hd590300_1.conda
|
| 33 |
+
Extracting lz4-c-1.10.0-h5888daf_1.conda
|
| 34 |
+
Extracting lzo-2.10-hd590300_1001.conda
|
| 35 |
+
Extracting readline-8.2-h8228510_1.conda
|
| 36 |
+
Extracting reproc-cpp-14.2.5.post0-h5888daf_0.conda
|
| 37 |
+
Extracting tk-8.6.13-noxft_h4845f30_101.conda
|
| 38 |
+
Extracting fmt-11.0.2-h434a139_0.conda
|
| 39 |
+
Extracting krb5-1.21.3-h659f571_0.conda
|
| 40 |
+
Extracting libnghttp2-1.64.0-h161d5f1_0.conda
|
| 41 |
+
Extracting libsolv-0.7.30-h3509ff9_0.conda
|
| 42 |
+
Extracting libxml2-2.13.5-h0d44e9d_1.conda
|
| 43 |
+
Extracting python-3.11.11-h9e4cc4f_1_cpython.conda
|
| 44 |
+
Extracting yaml-cpp-0.8.0-h59595ed_0.conda
|
| 45 |
+
Extracting zstd-1.5.6-ha6fb4c9_0.conda
|
| 46 |
+
Extracting libarchive-3.7.7-h4585015_3.conda
|
| 47 |
+
Extracting libcurl-8.11.1-h332b0f4_0.conda
|
| 48 |
+
Extracting menuinst-2.2.0-py311h38be061_0.conda
|
| 49 |
+
Extracting archspec-0.2.3-pyhd8ed1ab_0.conda
|
| 50 |
+
Extracting boltons-24.0.0-pyhd8ed1ab_1.conda
|
| 51 |
+
Extracting brotli-python-1.1.0-py311hfdbb021_2.conda
|
| 52 |
+
Extracting certifi-2024.12.14-pyhd8ed1ab_0.conda
|
| 53 |
+
Extracting charset-normalizer-3.4.1-pyhd8ed1ab_0.conda
|
| 54 |
+
Extracting colorama-0.4.6-pyhd8ed1ab_1.conda
|
| 55 |
+
Extracting distro-1.9.0-pyhd8ed1ab_1.conda
|
| 56 |
+
Extracting frozendict-2.4.6-py311h9ecbd09_0.conda
|
| 57 |
+
Extracting hpack-4.0.0-pyhd8ed1ab_1.conda
|
| 58 |
+
Extracting hyperframe-6.0.1-pyhd8ed1ab_1.conda
|
| 59 |
+
Extracting idna-3.10-pyhd8ed1ab_1.conda
|
| 60 |
+
Extracting jsonpointer-3.0.0-py311h38be061_1.conda
|
| 61 |
+
Extracting libmamba-1.5.12-h49b8a8d_0.conda
|
| 62 |
+
Extracting packaging-24.2-pyhd8ed1ab_2.conda
|
| 63 |
+
Extracting platformdirs-4.3.6-pyhd8ed1ab_1.conda
|
| 64 |
+
Extracting pluggy-1.5.0-pyhd8ed1ab_1.conda
|
| 65 |
+
Extracting pycosat-0.6.6-py311h9ecbd09_2.conda
|
| 66 |
+
Extracting pycparser-2.22-pyh29332c3_1.conda
|
| 67 |
+
Extracting pysocks-1.7.1-pyha55dd90_7.conda
|
| 68 |
+
Extracting ruamel.yaml.clib-0.2.8-py311h9ecbd09_1.conda
|
| 69 |
+
Extracting setuptools-65.6.3-pyhd8ed1ab_0.conda
|
| 70 |
+
Extracting truststore-0.10.0-pyhd8ed1ab_0.conda
|
| 71 |
+
Extracting wheel-0.45.1-pyhd8ed1ab_1.conda
|
| 72 |
+
Extracting cffi-1.17.1-py311hf29c0ef_0.conda
|
| 73 |
+
Extracting h2-4.1.0-pyhd8ed1ab_1.conda
|
| 74 |
+
Extracting jsonpatch-1.33-pyhd8ed1ab_1.conda
|
| 75 |
+
Extracting libmambapy-1.5.12-py311hb3373dd_0.conda
|
| 76 |
+
Extracting pip-24.3.1-pyh8b19718_2.conda
|
| 77 |
+
Extracting ruamel.yaml-0.18.10-py311h9ecbd09_0.conda
|
| 78 |
+
Extracting tqdm-4.67.1-pyhd8ed1ab_1.conda
|
| 79 |
+
Extracting zstandard-0.23.0-py311hbc35293_1.conda
|
| 80 |
+
Extracting conda-package-streaming-0.11.0-pyhd8ed1ab_0.conda
|
| 81 |
+
Extracting urllib3-2.3.0-pyhd8ed1ab_0.conda
|
| 82 |
+
Extracting requests-2.32.3-pyhd8ed1ab_1.conda
|
| 83 |
+
Extracting conda-package-handling-2.4.0-pyh7900ff3_2.conda
|
| 84 |
+
Extracting conda-24.11.2-py311h38be061_1.conda
|
| 85 |
+
Extracting conda-libmamba-solver-24.9.0-pyhd8ed1ab_0.conda
|
| 86 |
+
Extracting mamba-1.5.12-py311h3072747_0.conda
|
| 87 |
+
|
| 88 |
+
Installing base environment...
|
| 89 |
+
|
| 90 |
+
Transaction
|
| 91 |
+
|
| 92 |
+
Prefix: /usr/local
|
| 93 |
+
|
| 94 |
+
Updating specs:
|
| 95 |
+
|
| 96 |
+
- conda-forge/linux-64::_libgcc_mutex==0.1=conda_forge[md5=d7c89558ba9fa0495403155b64376d81]
|
| 97 |
+
- conda-forge/linux-64::ca-certificates==2024.12.14=hbcca054_0[md5=720523eb0d6a9b0f6120c16b2aa4e7de]
|
| 98 |
+
- conda-forge/linux-64::ld_impl_linux-64==2.43=h712a8e2_2[md5=048b02e3962f066da18efe3a21b77672]
|
| 99 |
+
- conda-forge/noarch::pybind11-abi==4=hd8ed1ab_3[md5=878f923dd6acc8aeb47a75da6c4098be]
|
| 100 |
+
- conda-forge/linux-64::python_abi==3.11=5_cp311[md5=139a8d40c8a2f430df31048949e450de]
|
| 101 |
+
- conda-forge/noarch::tzdata==2024b=hc8b5060_0[md5=8ac3367aafb1cc0a068483c580af8015]
|
| 102 |
+
- conda-forge/linux-64::libgomp==14.2.0=h77fa898_1[md5=cc3573974587f12dda90d96e3e55a702]
|
| 103 |
+
- conda-forge/linux-64::_openmp_mutex==4.5=2_gnu[md5=73aaf86a425cc6e73fcf236a5a46396d]
|
| 104 |
+
- conda-forge/linux-64::libgcc==14.2.0=h77fa898_1[md5=3cb76c3f10d3bc7f1105b2fc9db984df]
|
| 105 |
+
- conda-forge/linux-64::c-ares==1.34.4=hb9d3cd8_0[md5=e2775acf57efd5af15b8e3d1d74d72d3]
|
| 106 |
+
- conda-forge/linux-64::libexpat==2.6.4=h5888daf_0[md5=db833e03127376d461e1e13e76f09b6c]
|
| 107 |
+
- conda-forge/linux-64::libgcc-ng==14.2.0=h69a702a_1[md5=e39480b9ca41323497b05492a63bc35b]
|
| 108 |
+
- conda-forge/linux-64::liblzma==5.6.3=hb9d3cd8_1[md5=2ecf2f1c7e4e21fcfe6423a51a992d84]
|
| 109 |
+
- conda-forge/linux-64::libstdcxx==14.2.0=hc0a3c3a_1[md5=234a5554c53625688d51062645337328]
|
| 110 |
+
- conda-forge/linux-64::libzlib==1.3.1=hb9d3cd8_2[md5=edb0dca6bc32e4f4789199455a1dbeb8]
|
| 111 |
+
- conda-forge/linux-64::ncurses==6.5=h2d0b736_2[md5=04b34b9a40cdc48cfdab261ab176ff74]
|
| 112 |
+
- conda-forge/linux-64::openssl==3.4.0=h7b32b05_1[md5=4ce6875f75469b2757a65e10a5d05e31]
|
| 113 |
+
- conda-forge/linux-64::reproc==14.2.5.post0=hb9d3cd8_0[md5=69fbc0a9e42eb5fe6733d2d60d818822]
|
| 114 |
+
- conda-forge/linux-64::bzip2==1.0.8=h4bc722e_7[md5=62ee74e96c5ebb0af99386de58cf9553]
|
| 115 |
+
- conda-forge/linux-64::keyutils==1.6.1=h166bdaf_0[md5=30186d27e2c9fa62b45fb1476b7200e3]
|
| 116 |
+
- conda-forge/linux-64::libedit==3.1.20240808=pl5321h7949ede_0[md5=8247f80f3dc464d9322e85007e307fe8]
|
| 117 |
+
- conda-forge/linux-64::libev==4.33=hd590300_2[md5=172bf1cd1ff8629f2b1179945ed45055]
|
| 118 |
+
- conda-forge/linux-64::libffi==3.4.2=h7f98852_5[md5=d645c6d2ac96843a2bfaccd2d62b3ac3]
|
| 119 |
+
- conda-forge/linux-64::libiconv==1.17=hd590300_2[md5=d66573916ffcf376178462f1b61c941e]
|
| 120 |
+
- conda-forge/linux-64::libnsl==2.0.1=hd590300_0[md5=30fd6e37fe21f86f4bd26d6ee73eeec7]
|
| 121 |
+
- conda-forge/linux-64::libsqlite==3.47.2=hee588c1_0[md5=b58da17db24b6e08bcbf8fed2fb8c915]
|
| 122 |
+
- conda-forge/linux-64::libssh2==1.11.1=hf672d98_0[md5=be2de152d8073ef1c01b7728475f2fe7]
|
| 123 |
+
- conda-forge/linux-64::libstdcxx-ng==14.2.0=h4852527_1[md5=8371ac6457591af2cf6159439c1fd051]
|
| 124 |
+
- conda-forge/linux-64::libuuid==2.38.1=h0b41bf4_0[md5=40b61aab5c7ba9ff276c41cfffe6b80b]
|
| 125 |
+
- conda-forge/linux-64::libxcrypt==4.4.36=hd590300_1[md5=5aa797f8787fe7a17d1b0821485b5adc]
|
| 126 |
+
- conda-forge/linux-64::lz4-c==1.10.0=h5888daf_1[md5=9de5350a85c4a20c685259b889aa6393]
|
| 127 |
+
- conda-forge/linux-64::lzo==2.10=hd590300_1001[md5=ec7398d21e2651e0dcb0044d03b9a339]
|
| 128 |
+
- conda-forge/linux-64::readline==8.2=h8228510_1[md5=47d31b792659ce70f470b5c82fdfb7a4]
|
| 129 |
+
- conda-forge/linux-64::reproc-cpp==14.2.5.post0=h5888daf_0[md5=828302fca535f9cfeb598d5f7c204323]
|
| 130 |
+
- conda-forge/linux-64::tk==8.6.13=noxft_h4845f30_101[md5=d453b98d9c83e71da0741bb0ff4d76bc]
|
| 131 |
+
- conda-forge/linux-64::fmt==11.0.2=h434a139_0[md5=995f7e13598497691c1dc476d889bc04]
|
| 132 |
+
- conda-forge/linux-64::krb5==1.21.3=h659f571_0[md5=3f43953b7d3fb3aaa1d0d0723d91e368]
|
| 133 |
+
- conda-forge/linux-64::libnghttp2==1.64.0=h161d5f1_0[md5=19e57602824042dfd0446292ef90488b]
|
| 134 |
+
- conda-forge/linux-64::libsolv==0.7.30=h3509ff9_0[md5=02539b77d25aa4f65b20246549e256c3]
|
| 135 |
+
- conda-forge/linux-64::libxml2==2.13.5=h0d44e9d_1[md5=f5b05674697ae7d2c5932766695945e1]
|
| 136 |
+
- conda-forge/linux-64::python==3.11.11=h9e4cc4f_1_cpython[md5=8387070aa413ce9a8cc35a509fae938b]
|
| 137 |
+
- conda-forge/linux-64::yaml-cpp==0.8.0=h59595ed_0[md5=965eaacd7c18eb8361fd12bb9e7a57d7]
|
| 138 |
+
- conda-forge/linux-64::zstd==1.5.6=ha6fb4c9_0[md5=4d056880988120e29d75bfff282e0f45]
|
| 139 |
+
- conda-forge/linux-64::libarchive==3.7.7=h4585015_3[md5=a28808eae584c7f519943719b2a2b386]
|
| 140 |
+
- conda-forge/linux-64::libcurl==8.11.1=h332b0f4_0[md5=2b3e0081006dc21e8bf53a91c83a055c]
|
| 141 |
+
- conda-forge/linux-64::menuinst==2.2.0=py311h38be061_0[md5=56b688f5333037364ddf9c5cbdd4d672]
|
| 142 |
+
- conda-forge/noarch::archspec==0.2.3=pyhd8ed1ab_0[md5=192278292e20704f663b9c766909d67b]
|
| 143 |
+
- conda-forge/noarch::boltons==24.0.0=pyhd8ed1ab_1[md5=d88c38e66d85ecc9c7e2c4110676bbf4]
|
| 144 |
+
- conda-forge/linux-64::brotli-python==1.1.0=py311hfdbb021_2[md5=d21daab070d76490cb39a8f1d1729d79]
|
| 145 |
+
- conda-forge/noarch::certifi==2024.12.14=pyhd8ed1ab_0[md5=6feb87357ecd66733be3279f16a8c400]
|
| 146 |
+
- conda-forge/noarch::charset-normalizer==3.4.1=pyhd8ed1ab_0[md5=e83a31202d1c0a000fce3e9cf3825875]
|
| 147 |
+
- conda-forge/noarch::colorama==0.4.6=pyhd8ed1ab_1[md5=962b9857ee8e7018c22f2776ffa0b2d7]
|
| 148 |
+
- conda-forge/noarch::distro==1.9.0=pyhd8ed1ab_1[md5=0a2014fd9860f8b1eaa0b1f3d3771a08]
|
| 149 |
+
- conda-forge/linux-64::frozendict==2.4.6=py311h9ecbd09_0[md5=5ed089614b88920f8cc673fe3bc34558]
|
| 150 |
+
- conda-forge/noarch::hpack==4.0.0=pyhd8ed1ab_1[md5=2aa5ff7fa34a81b9196532c84c10d865]
|
| 151 |
+
- conda-forge/noarch::hyperframe==6.0.1=pyhd8ed1ab_1[md5=566e75c90c1d0c8c459eb0ad9833dc7a]
|
| 152 |
+
- conda-forge/noarch::idna==3.10=pyhd8ed1ab_1[md5=39a4f67be3286c86d696df570b1201b7]
|
| 153 |
+
- conda-forge/linux-64::jsonpointer==3.0.0=py311h38be061_1[md5=5ca76f61b00a15a9be0612d4d883badc]
|
| 154 |
+
- conda-forge/linux-64::libmamba==1.5.12=h49b8a8d_0[md5=d6aec145c4f2473c09c2699d74f6dfc0]
|
| 155 |
+
- conda-forge/noarch::packaging==24.2=pyhd8ed1ab_2[md5=3bfed7e6228ebf2f7b9eaa47f1b4e2aa]
|
| 156 |
+
- conda-forge/noarch::platformdirs==4.3.6=pyhd8ed1ab_1[md5=577852c7e53901ddccc7e6a9959ddebe]
|
| 157 |
+
- conda-forge/noarch::pluggy==1.5.0=pyhd8ed1ab_1[md5=e9dcbce5f45f9ee500e728ae58b605b6]
|
| 158 |
+
- conda-forge/linux-64::pycosat==0.6.6=py311h9ecbd09_2[md5=65a6d37c5a2868d5605cf2df3ea0236e]
|
| 159 |
+
- conda-forge/noarch::pycparser==2.22=pyh29332c3_1[md5=12c566707c80111f9799308d9e265aef]
|
| 160 |
+
- conda-forge/noarch::pysocks==1.7.1=pyha55dd90_7[md5=461219d1a5bd61342293efa2c0c90eac]
|
| 161 |
+
- conda-forge/linux-64::ruamel.yaml.clib==0.2.8=py311h9ecbd09_1[md5=e56869fca385961323e43783b89bef66]
|
| 162 |
+
- conda-forge/noarch::setuptools==65.6.3=pyhd8ed1ab_0[md5=9600fc9524d3f821e6a6d58c52f5bf5a]
|
| 163 |
+
- conda-forge/noarch::truststore==0.10.0=pyhd8ed1ab_0[md5=ad1c20cd193e3044bcf17798c33b9d67]
|
| 164 |
+
- conda-forge/noarch::wheel==0.45.1=pyhd8ed1ab_1[md5=75cb7132eb58d97896e173ef12ac9986]
|
| 165 |
+
- conda-forge/linux-64::cffi==1.17.1=py311hf29c0ef_0[md5=55553ecd5328336368db611f350b7039]
|
| 166 |
+
- conda-forge/noarch::h2==4.1.0=pyhd8ed1ab_1[md5=825927dc7b0f287ef8d4d0011bb113b1]
|
| 167 |
+
- conda-forge/noarch::jsonpatch==1.33=pyhd8ed1ab_1[md5=cb60ae9cf02b9fcb8004dec4089e5691]
|
| 168 |
+
- conda-forge/linux-64::libmambapy==1.5.12=py311hb3373dd_0[md5=47fb74d72998e00565428ccdcafc97cb]
|
| 169 |
+
- conda-forge/noarch::pip==24.3.1=pyh8b19718_2[md5=04e691b9fadd93a8a9fad87a81d4fd8f]
|
| 170 |
+
- conda-forge/linux-64::ruamel.yaml==0.18.10=py311h9ecbd09_0[md5=a3188715e28c25f1404b84c702e6fdf4]
|
| 171 |
+
- conda-forge/noarch::tqdm==4.67.1=pyhd8ed1ab_1[md5=9efbfdc37242619130ea42b1cc4ed861]
|
| 172 |
+
- conda-forge/linux-64::zstandard==0.23.0=py311hbc35293_1[md5=aec590674ba365e50ae83aa2d6e1efae]
|
| 173 |
+
- conda-forge/noarch::conda-package-streaming==0.11.0=pyhd8ed1ab_0[md5=bc9533d8616a97551ed144789bf9c1cd]
|
| 174 |
+
- conda-forge/noarch::urllib3==2.3.0=pyhd8ed1ab_0[md5=32674f8dbfb7b26410ed580dd3c10a29]
|
| 175 |
+
- conda-forge/noarch::requests==2.32.3=pyhd8ed1ab_1[md5=a9b9368f3701a417eac9edbcae7cb737]
|
| 176 |
+
- conda-forge/noarch::conda-package-handling==2.4.0=pyh7900ff3_2[md5=32c158f481b4fd7630c565030f7bc482]
|
| 177 |
+
- conda-forge/linux-64::conda==24.11.2=py311h38be061_1[md5=665bae7596e3f4131355ef85e30b8106]
|
| 178 |
+
- conda-forge/noarch::conda-libmamba-solver==24.9.0=pyhd8ed1ab_0[md5=45378d089c5f72c9c0d63d58414c645d]
|
| 179 |
+
- conda-forge/linux-64::mamba==1.5.12=py311h3072747_0[md5=d681a3fbf59b09439d72955897d7fc76]
|
| 180 |
+
|
| 181 |
+
|
| 182 |
+
Package Version Build Channel Size
|
| 183 |
+
─────────────────────────────────────────────────────────────────────────────────────
|
| 184 |
+
Install:
|
| 185 |
+
─────────────────────────────────────────────────────────────────────────────────────
|
| 186 |
+
|
| 187 |
+
+ _libgcc_mutex 0.1 conda_forge conda-forge
|
| 188 |
+
+ ca-certificates 2024.12.14 hbcca054_0 conda-forge
|
| 189 |
+
+ ld_impl_linux-64 2.43 h712a8e2_2 conda-forge
|
| 190 |
+
+ pybind11-abi 4 hd8ed1ab_3 conda-forge
|
| 191 |
+
+ python_abi 3.11 5_cp311 conda-forge
|
| 192 |
+
+ tzdata 2024b hc8b5060_0 conda-forge
|
| 193 |
+
+ libgomp 14.2.0 h77fa898_1 conda-forge
|
| 194 |
+
+ _openmp_mutex 4.5 2_gnu conda-forge
|
| 195 |
+
+ libgcc 14.2.0 h77fa898_1 conda-forge
|
| 196 |
+
+ c-ares 1.34.4 hb9d3cd8_0 conda-forge
|
| 197 |
+
+ libexpat 2.6.4 h5888daf_0 conda-forge
|
| 198 |
+
+ libgcc-ng 14.2.0 h69a702a_1 conda-forge
|
| 199 |
+
+ liblzma 5.6.3 hb9d3cd8_1 conda-forge
|
| 200 |
+
+ libstdcxx 14.2.0 hc0a3c3a_1 conda-forge
|
| 201 |
+
+ libzlib 1.3.1 hb9d3cd8_2 conda-forge
|
| 202 |
+
+ ncurses 6.5 h2d0b736_2 conda-forge
|
| 203 |
+
+ openssl 3.4.0 h7b32b05_1 conda-forge
|
| 204 |
+
+ reproc 14.2.5.post0 hb9d3cd8_0 conda-forge
|
| 205 |
+
+ bzip2 1.0.8 h4bc722e_7 conda-forge
|
| 206 |
+
+ keyutils 1.6.1 h166bdaf_0 conda-forge
|
| 207 |
+
+ libedit 3.1.20240808 pl5321h7949ede_0 conda-forge
|
| 208 |
+
+ libev 4.33 hd590300_2 conda-forge
|
| 209 |
+
+ libffi 3.4.2 h7f98852_5 conda-forge
|
| 210 |
+
+ libiconv 1.17 hd590300_2 conda-forge
|
| 211 |
+
+ libnsl 2.0.1 hd590300_0 conda-forge
|
| 212 |
+
+ libsqlite 3.47.2 hee588c1_0 conda-forge
|
| 213 |
+
+ libssh2 1.11.1 hf672d98_0 conda-forge
|
| 214 |
+
+ libstdcxx-ng 14.2.0 h4852527_1 conda-forge
|
| 215 |
+
+ libuuid 2.38.1 h0b41bf4_0 conda-forge
|
| 216 |
+
+ libxcrypt 4.4.36 hd590300_1 conda-forge
|
| 217 |
+
+ lz4-c 1.10.0 h5888daf_1 conda-forge
|
| 218 |
+
+ lzo 2.10 hd590300_1001 conda-forge
|
| 219 |
+
+ readline 8.2 h8228510_1 conda-forge
|
| 220 |
+
+ reproc-cpp 14.2.5.post0 h5888daf_0 conda-forge
|
| 221 |
+
+ tk 8.6.13 noxft_h4845f30_101 conda-forge
|
| 222 |
+
+ fmt 11.0.2 h434a139_0 conda-forge
|
| 223 |
+
+ krb5 1.21.3 h659f571_0 conda-forge
|
| 224 |
+
+ libnghttp2 1.64.0 h161d5f1_0 conda-forge
|
| 225 |
+
+ libsolv 0.7.30 h3509ff9_0 conda-forge
|
| 226 |
+
+ libxml2 2.13.5 h0d44e9d_1 conda-forge
|
| 227 |
+
+ python 3.11.11 h9e4cc4f_1_cpython conda-forge
|
| 228 |
+
+ yaml-cpp 0.8.0 h59595ed_0 conda-forge
|
| 229 |
+
+ zstd 1.5.6 ha6fb4c9_0 conda-forge
|
| 230 |
+
+ libarchive 3.7.7 h4585015_3 conda-forge
|
| 231 |
+
+ libcurl 8.11.1 h332b0f4_0 conda-forge
|
| 232 |
+
+ menuinst 2.2.0 py311h38be061_0 conda-forge
|
| 233 |
+
+ archspec 0.2.3 pyhd8ed1ab_0 conda-forge
|
| 234 |
+
+ boltons 24.0.0 pyhd8ed1ab_1 conda-forge
|
| 235 |
+
+ brotli-python 1.1.0 py311hfdbb021_2 conda-forge
|
| 236 |
+
+ certifi 2024.12.14 pyhd8ed1ab_0 conda-forge
|
| 237 |
+
+ charset-normalizer 3.4.1 pyhd8ed1ab_0 conda-forge
|
| 238 |
+
+ colorama 0.4.6 pyhd8ed1ab_1 conda-forge
|
| 239 |
+
+ distro 1.9.0 pyhd8ed1ab_1 conda-forge
|
| 240 |
+
+ frozendict 2.4.6 py311h9ecbd09_0 conda-forge
|
| 241 |
+
+ hpack 4.0.0 pyhd8ed1ab_1 conda-forge
|
| 242 |
+
+ hyperframe 6.0.1 pyhd8ed1ab_1 conda-forge
|
| 243 |
+
+ idna 3.10 pyhd8ed1ab_1 conda-forge
|
| 244 |
+
+ jsonpointer 3.0.0 py311h38be061_1 conda-forge
|
| 245 |
+
+ libmamba 1.5.12 h49b8a8d_0 conda-forge
|
| 246 |
+
+ packaging 24.2 pyhd8ed1ab_2 conda-forge
|
| 247 |
+
+ platformdirs 4.3.6 pyhd8ed1ab_1 conda-forge
|
| 248 |
+
+ pluggy 1.5.0 pyhd8ed1ab_1 conda-forge
|
| 249 |
+
+ pycosat 0.6.6 py311h9ecbd09_2 conda-forge
|
| 250 |
+
+ pycparser 2.22 pyh29332c3_1 conda-forge
|
| 251 |
+
+ pysocks 1.7.1 pyha55dd90_7 conda-forge
|
| 252 |
+
+ ruamel.yaml.clib 0.2.8 py311h9ecbd09_1 conda-forge
|
| 253 |
+
+ setuptools 65.6.3 pyhd8ed1ab_0 conda-forge
|
| 254 |
+
+ truststore 0.10.0 pyhd8ed1ab_0 conda-forge
|
| 255 |
+
+ wheel 0.45.1 pyhd8ed1ab_1 conda-forge
|
| 256 |
+
+ cffi 1.17.1 py311hf29c0ef_0 conda-forge
|
| 257 |
+
+ h2 4.1.0 pyhd8ed1ab_1 conda-forge
|
| 258 |
+
+ jsonpatch 1.33 pyhd8ed1ab_1 conda-forge
|
| 259 |
+
+ libmambapy 1.5.12 py311hb3373dd_0 conda-forge
|
| 260 |
+
+ pip 24.3.1 pyh8b19718_2 conda-forge
|
| 261 |
+
+ ruamel.yaml 0.18.10 py311h9ecbd09_0 conda-forge
|
| 262 |
+
+ tqdm 4.67.1 pyhd8ed1ab_1 conda-forge
|
| 263 |
+
+ zstandard 0.23.0 py311hbc35293_1 conda-forge
|
| 264 |
+
+ conda-package-streaming 0.11.0 pyhd8ed1ab_0 conda-forge
|
| 265 |
+
+ urllib3 2.3.0 pyhd8ed1ab_0 conda-forge
|
| 266 |
+
+ requests 2.32.3 pyhd8ed1ab_1 conda-forge
|
| 267 |
+
+ conda-package-handling 2.4.0 pyh7900ff3_2 conda-forge
|
| 268 |
+
+ conda 24.11.2 py311h38be061_1 conda-forge
|
| 269 |
+
+ conda-libmamba-solver 24.9.0 pyhd8ed1ab_0 conda-forge
|
| 270 |
+
+ mamba 1.5.12 py311h3072747_0 conda-forge
|
| 271 |
+
|
| 272 |
+
Summary:
|
| 273 |
+
|
| 274 |
+
Install: 84 packages
|
| 275 |
+
|
| 276 |
+
Total download: 0 B
|
| 277 |
+
|
| 278 |
+
─────────────────────────────────────────────────────────────────────────────────────
|
| 279 |
+
|
| 280 |
+
|
| 281 |
+
|
| 282 |
+
Transaction starting
|
| 283 |
+
warning libmamba [python-3.11.11-h9e4cc4f_1_cpython] The following files were already present in the environment:
|
| 284 |
+
- bin/python
|
| 285 |
+
warning libmamba [charset-normalizer-3.4.1-pyhd8ed1ab_0] The following files were already present in the environment:
|
| 286 |
+
- bin/normalizer
|
| 287 |
+
warning libmamba [distro-1.9.0-pyhd8ed1ab_1] The following files were already present in the environment:
|
| 288 |
+
- bin/distro
|
| 289 |
+
warning libmamba [jsonpointer-3.0.0-py311h38be061_1] The following files were already present in the environment:
|
| 290 |
+
- bin/jsonpointer
|
| 291 |
+
warning libmamba [wheel-0.45.1-pyhd8ed1ab_1] The following files were already present in the environment:
|
| 292 |
+
- bin/wheel
|
| 293 |
+
warning libmamba [jsonpatch-1.33-pyhd8ed1ab_1] The following files were already present in the environment:
|
| 294 |
+
- bin/jsondiff
|
| 295 |
+
- bin/jsonpatch
|
| 296 |
+
warning libmamba [pip-24.3.1-pyh8b19718_2] The following files were already present in the environment:
|
| 297 |
+
- bin/pip
|
| 298 |
+
- bin/pip3
|
| 299 |
+
warning libmamba [tqdm-4.67.1-pyhd8ed1ab_1] The following files were already present in the environment:
|
| 300 |
+
- bin/tqdm
|
| 301 |
+
|
| 302 |
+
Transaction finished
|
| 303 |
+
|
| 304 |
+
To activate this environment, use:
|
| 305 |
+
|
| 306 |
+
micromamba activate /usr/local
|
| 307 |
+
|
| 308 |
+
Or to execute a single command in this environment, use:
|
| 309 |
+
|
| 310 |
+
micromamba run -p /usr/local mycommand
|
| 311 |
+
|
| 312 |
+
installation finished.
|
| 313 |
+
WARNING:
|
| 314 |
+
You currently have a PYTHONPATH environment variable set. This may cause
|
| 315 |
+
unexpected behavior when running the Python interpreter in Miniforge3-colab.
|
| 316 |
+
For best results, please verify that your PYTHONPATH only points to
|
| 317 |
+
directories of packages that are compatible with the Python interpreter
|
| 318 |
+
in Miniforge3-colab: /usr/local
|
lerobot/.dockerignore
ADDED
|
@@ -0,0 +1,160 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Copyright 2024 The HuggingFace Inc. team. All rights reserved.
|
| 2 |
+
#
|
| 3 |
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
| 4 |
+
# you may not use this file except in compliance with the License.
|
| 5 |
+
# You may obtain a copy of the License at
|
| 6 |
+
#
|
| 7 |
+
# http://www.apache.org/licenses/LICENSE-2.0
|
| 8 |
+
#
|
| 9 |
+
# Unless required by applicable law or agreed to in writing, software
|
| 10 |
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
| 11 |
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
| 12 |
+
# See the License for the specific language governing permissions and
|
| 13 |
+
# limitations under the License.
|
| 14 |
+
|
| 15 |
+
# Misc
|
| 16 |
+
.git
|
| 17 |
+
tmp
|
| 18 |
+
wandb
|
| 19 |
+
data
|
| 20 |
+
outputs
|
| 21 |
+
.vscode
|
| 22 |
+
rl
|
| 23 |
+
media
|
| 24 |
+
|
| 25 |
+
|
| 26 |
+
# Logging
|
| 27 |
+
logs
|
| 28 |
+
|
| 29 |
+
# HPC
|
| 30 |
+
nautilus/*.yaml
|
| 31 |
+
*.key
|
| 32 |
+
|
| 33 |
+
# Slurm
|
| 34 |
+
sbatch*.sh
|
| 35 |
+
|
| 36 |
+
# Byte-compiled / optimized / DLL files
|
| 37 |
+
__pycache__/
|
| 38 |
+
*.py[cod]
|
| 39 |
+
*$py.class
|
| 40 |
+
|
| 41 |
+
# C extensions
|
| 42 |
+
*.so
|
| 43 |
+
|
| 44 |
+
# Distribution / packaging
|
| 45 |
+
.Python
|
| 46 |
+
build/
|
| 47 |
+
develop-eggs/
|
| 48 |
+
dist/
|
| 49 |
+
downloads/
|
| 50 |
+
eggs/
|
| 51 |
+
.eggs/
|
| 52 |
+
lib/
|
| 53 |
+
lib64/
|
| 54 |
+
parts/
|
| 55 |
+
sdist/
|
| 56 |
+
var/
|
| 57 |
+
wheels/
|
| 58 |
+
pip-wheel-metadata/
|
| 59 |
+
share/python-wheels/
|
| 60 |
+
*.egg-info/
|
| 61 |
+
.installed.cfg
|
| 62 |
+
*.egg
|
| 63 |
+
MANIFEST
|
| 64 |
+
|
| 65 |
+
# PyInstaller
|
| 66 |
+
# Usually these files are written by a python script from a template
|
| 67 |
+
# before PyInstaller builds the exe, so as to inject date/other infos into it.
|
| 68 |
+
*.manifest
|
| 69 |
+
*.spec
|
| 70 |
+
|
| 71 |
+
# Installer logs
|
| 72 |
+
pip-log.txt
|
| 73 |
+
pip-delete-this-directory.txt
|
| 74 |
+
|
| 75 |
+
# Unit test / coverage reports
|
| 76 |
+
!tests/artifacts
|
| 77 |
+
htmlcov/
|
| 78 |
+
.tox/
|
| 79 |
+
.nox/
|
| 80 |
+
.coverage
|
| 81 |
+
.coverage.*
|
| 82 |
+
nosetests.xml
|
| 83 |
+
coverage.xml
|
| 84 |
+
*.cover
|
| 85 |
+
*.py,cover
|
| 86 |
+
.hypothesis/
|
| 87 |
+
.pytest_cache/
|
| 88 |
+
|
| 89 |
+
# Ignore .cache except calibration
|
| 90 |
+
.cache/*
|
| 91 |
+
!.cache/calibration/
|
| 92 |
+
!.cache/calibration/**
|
| 93 |
+
|
| 94 |
+
# Translations
|
| 95 |
+
*.mo
|
| 96 |
+
*.pot
|
| 97 |
+
|
| 98 |
+
# Django stuff:
|
| 99 |
+
*.log
|
| 100 |
+
local_settings.py
|
| 101 |
+
db.sqlite3
|
| 102 |
+
db.sqlite3-journal
|
| 103 |
+
|
| 104 |
+
# Flask stuff:
|
| 105 |
+
instance/
|
| 106 |
+
.webassets-cache
|
| 107 |
+
|
| 108 |
+
# Scrapy stuff:
|
| 109 |
+
.scrapy
|
| 110 |
+
|
| 111 |
+
# Sphinx documentation
|
| 112 |
+
docs/_build/
|
| 113 |
+
|
| 114 |
+
# PyBuilder
|
| 115 |
+
target/
|
| 116 |
+
|
| 117 |
+
# Jupyter Notebook
|
| 118 |
+
.ipynb_checkpoints
|
| 119 |
+
|
| 120 |
+
# IPython
|
| 121 |
+
profile_default/
|
| 122 |
+
ipython_config.py
|
| 123 |
+
|
| 124 |
+
# pyenv
|
| 125 |
+
.python-version
|
| 126 |
+
|
| 127 |
+
# pipenv
|
| 128 |
+
# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
|
| 129 |
+
# However, in case of collaboration, if having platform-specific dependencies or dependencies
|
| 130 |
+
# having no cross-platform support, pipenv may install dependencies that don't work, or not
|
| 131 |
+
# install all needed dependencies.
|
| 132 |
+
#Pipfile.lock
|
| 133 |
+
|
| 134 |
+
# PEP 582; used by e.g. github.com/David-OConnor/pyflow
|
| 135 |
+
__pypackages__/
|
| 136 |
+
|
| 137 |
+
# Celery stuff
|
| 138 |
+
celerybeat-schedule
|
| 139 |
+
celerybeat.pid
|
| 140 |
+
|
| 141 |
+
# SageMath parsed files
|
| 142 |
+
*.sage.py
|
| 143 |
+
|
| 144 |
+
# Spyder project settings
|
| 145 |
+
.spyderproject
|
| 146 |
+
.spyproject
|
| 147 |
+
|
| 148 |
+
# Rope project settings
|
| 149 |
+
.ropeproject
|
| 150 |
+
|
| 151 |
+
# mkdocs documentation
|
| 152 |
+
/site
|
| 153 |
+
|
| 154 |
+
# mypy
|
| 155 |
+
.mypy_cache/
|
| 156 |
+
.dmypy.json
|
| 157 |
+
dmypy.json
|
| 158 |
+
|
| 159 |
+
# Pyre type checker
|
| 160 |
+
.pyre/
|
lerobot/.gitattributes
ADDED
|
@@ -0,0 +1,21 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Copyright 2024 The HuggingFace Inc. team. All rights reserved.
|
| 2 |
+
#
|
| 3 |
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
| 4 |
+
# you may not use this file except in compliance with the License.
|
| 5 |
+
# You may obtain a copy of the License at
|
| 6 |
+
#
|
| 7 |
+
# http://www.apache.org/licenses/LICENSE-2.0
|
| 8 |
+
#
|
| 9 |
+
# Unless required by applicable law or agreed to in writing, software
|
| 10 |
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
| 11 |
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
| 12 |
+
# See the License for the specific language governing permissions and
|
| 13 |
+
# limitations under the License.
|
| 14 |
+
*.memmap filter=lfs diff=lfs merge=lfs -text
|
| 15 |
+
*.stl filter=lfs diff=lfs merge=lfs -text
|
| 16 |
+
*.safetensors filter=lfs diff=lfs merge=lfs -text
|
| 17 |
+
*.mp4 filter=lfs diff=lfs merge=lfs -text
|
| 18 |
+
*.arrow filter=lfs diff=lfs merge=lfs -text
|
| 19 |
+
*.json !text !filter !merge !diff
|
| 20 |
+
tests/artifacts/cameras/*.png filter=lfs diff=lfs merge=lfs -text
|
| 21 |
+
*.bag filter=lfs diff=lfs merge=lfs -text
|
lerobot/.github/ISSUE_TEMPLATE/bug-report.yml
ADDED
|
@@ -0,0 +1,68 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Copyright 2024 The HuggingFace Inc. team. All rights reserved.
|
| 2 |
+
#
|
| 3 |
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
| 4 |
+
# you may not use this file except in compliance with the License.
|
| 5 |
+
# You may obtain a copy of the License at
|
| 6 |
+
#
|
| 7 |
+
# http://www.apache.org/licenses/LICENSE-2.0
|
| 8 |
+
#
|
| 9 |
+
# Unless required by applicable law or agreed to in writing, software
|
| 10 |
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
| 11 |
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
| 12 |
+
# See the License for the specific language governing permissions and
|
| 13 |
+
# limitations under the License.
|
| 14 |
+
|
| 15 |
+
name: "\U0001F41B Bug Report"
|
| 16 |
+
description: Submit a bug report to help us improve LeRobot
|
| 17 |
+
body:
|
| 18 |
+
- type: markdown
|
| 19 |
+
attributes:
|
| 20 |
+
value: |
|
| 21 |
+
Thanks for taking the time to submit a bug report! 🐛
|
| 22 |
+
If this is not a bug related to the LeRobot library directly, but instead a general question about your code or the library specifically please use our [discord](https://discord.gg/s3KuuzsPFb).
|
| 23 |
+
|
| 24 |
+
- type: textarea
|
| 25 |
+
id: system-info
|
| 26 |
+
attributes:
|
| 27 |
+
label: System Info
|
| 28 |
+
description: If needed, you can share your lerobot configuration with us by running `python -m lerobot.scripts.display_sys_info` and copy-pasting its outputs below
|
| 29 |
+
render: Shell
|
| 30 |
+
placeholder: lerobot version, OS, python version, numpy version, torch version, and lerobot's configuration
|
| 31 |
+
validations:
|
| 32 |
+
required: true
|
| 33 |
+
|
| 34 |
+
- type: checkboxes
|
| 35 |
+
id: information-scripts-examples
|
| 36 |
+
attributes:
|
| 37 |
+
label: Information
|
| 38 |
+
description: 'The problem arises when using:'
|
| 39 |
+
options:
|
| 40 |
+
- label: "One of the scripts in the examples/ folder of LeRobot"
|
| 41 |
+
- label: "My own task or dataset (give details below)"
|
| 42 |
+
|
| 43 |
+
- type: textarea
|
| 44 |
+
id: reproduction
|
| 45 |
+
validations:
|
| 46 |
+
required: true
|
| 47 |
+
attributes:
|
| 48 |
+
label: Reproduction
|
| 49 |
+
description: |
|
| 50 |
+
If needed, provide a simple code sample that reproduces the problem you ran into. It can be a Colab link or just a code snippet.
|
| 51 |
+
Sharing error messages or stack traces could be useful as well!
|
| 52 |
+
Important! Use code tags to correctly format your code. See https://help.github.com/en/github/writing-on-github/creating-and-highlighting-code-blocks#syntax-highlighting
|
| 53 |
+
Try to avoid screenshots, as they are hard to read and don't allow copy-and-pasting.
|
| 54 |
+
|
| 55 |
+
placeholder: |
|
| 56 |
+
Steps to reproduce the behavior:
|
| 57 |
+
|
| 58 |
+
1.
|
| 59 |
+
2.
|
| 60 |
+
3.
|
| 61 |
+
|
| 62 |
+
- type: textarea
|
| 63 |
+
id: expected-behavior
|
| 64 |
+
validations:
|
| 65 |
+
required: true
|
| 66 |
+
attributes:
|
| 67 |
+
label: Expected behavior
|
| 68 |
+
description: "A clear and concise description of what you would expect to happen."
|
lerobot/.github/PULL_REQUEST_TEMPLATE.md
ADDED
|
@@ -0,0 +1,34 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
## What this does
|
| 2 |
+
Explain what this PR does. Feel free to tag your PR with the appropriate label(s).
|
| 3 |
+
|
| 4 |
+
Examples:
|
| 5 |
+
| Title | Label |
|
| 6 |
+
|----------------------|-----------------|
|
| 7 |
+
| Fixes #[issue] | (🐛 Bug) |
|
| 8 |
+
| Adds new dataset | (🗃️ Dataset) |
|
| 9 |
+
| Optimizes something | (⚡️ Performance) |
|
| 10 |
+
|
| 11 |
+
## How it was tested
|
| 12 |
+
Explain/show how you tested your changes.
|
| 13 |
+
|
| 14 |
+
Examples:
|
| 15 |
+
- Added `test_something` in `tests/test_stuff.py`.
|
| 16 |
+
- Added `new_feature` and checked that training converges with policy X on dataset/environment Y.
|
| 17 |
+
- Optimized `some_function`, it now runs X times faster than previously.
|
| 18 |
+
|
| 19 |
+
## How to checkout & try? (for the reviewer)
|
| 20 |
+
Provide a simple way for the reviewer to try out your changes.
|
| 21 |
+
|
| 22 |
+
Examples:
|
| 23 |
+
```bash
|
| 24 |
+
pytest -sx tests/test_stuff.py::test_something
|
| 25 |
+
```
|
| 26 |
+
```bash
|
| 27 |
+
python -m lerobot.scripts.train --some.option=true
|
| 28 |
+
```
|
| 29 |
+
|
| 30 |
+
## SECTION TO REMOVE BEFORE SUBMITTING YOUR PR
|
| 31 |
+
**Note**: Anyone in the community is free to review the PR once the tests have passed. Feel free to tag
|
| 32 |
+
members/contributors who may be interested in your PR. Try to avoid tagging more than 3 people.
|
| 33 |
+
|
| 34 |
+
**Note**: Before submitting this PR, please read the [contributor guideline](https://github.com/huggingface/lerobot/blob/main/CONTRIBUTING.md#submitting-a-pull-request-pr).
|
lerobot/.github/workflows/build-docker-images.yml
ADDED
|
@@ -0,0 +1,135 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Copyright 2024 The HuggingFace Inc. team. All rights reserved.
|
| 2 |
+
#
|
| 3 |
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
| 4 |
+
# you may not use this file except in compliance with the License.
|
| 5 |
+
# You may obtain a copy of the License at
|
| 6 |
+
#
|
| 7 |
+
# http://www.apache.org/licenses/LICENSE-2.0
|
| 8 |
+
#
|
| 9 |
+
# Unless required by applicable law or agreed to in writing, software
|
| 10 |
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
| 11 |
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
| 12 |
+
# See the License for the specific language governing permissions and
|
| 13 |
+
# limitations under the License.
|
| 14 |
+
|
| 15 |
+
# Inspired by
|
| 16 |
+
# https://github.com/huggingface/peft/blob/main/.github/workflows/build_docker_images.yml
|
| 17 |
+
name: Builds
|
| 18 |
+
|
| 19 |
+
on:
|
| 20 |
+
workflow_dispatch:
|
| 21 |
+
workflow_call:
|
| 22 |
+
schedule:
|
| 23 |
+
- cron: "0 1 * * *"
|
| 24 |
+
|
| 25 |
+
permissions: {}
|
| 26 |
+
|
| 27 |
+
env:
|
| 28 |
+
PYTHON_VERSION: "3.10"
|
| 29 |
+
|
| 30 |
+
jobs:
|
| 31 |
+
latest-cpu:
|
| 32 |
+
name: CPU
|
| 33 |
+
runs-on:
|
| 34 |
+
group: aws-general-8-plus
|
| 35 |
+
steps:
|
| 36 |
+
- name: Install Git LFS
|
| 37 |
+
run: |
|
| 38 |
+
sudo apt-get update
|
| 39 |
+
sudo apt-get install git-lfs
|
| 40 |
+
git lfs install
|
| 41 |
+
|
| 42 |
+
- name: Set up Docker Buildx
|
| 43 |
+
uses: docker/setup-buildx-action@b5ca514318bd6ebac0fb2aedd5d36ec1b5c232a2 # v3.10.0
|
| 44 |
+
with:
|
| 45 |
+
cache-binary: false
|
| 46 |
+
|
| 47 |
+
- name: Check out code
|
| 48 |
+
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
| 49 |
+
with:
|
| 50 |
+
lfs: true
|
| 51 |
+
persist-credentials: false
|
| 52 |
+
|
| 53 |
+
- name: Login to DockerHub
|
| 54 |
+
uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 # v3.4.0
|
| 55 |
+
with:
|
| 56 |
+
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
| 57 |
+
password: ${{ secrets.DOCKERHUB_PASSWORD }}
|
| 58 |
+
|
| 59 |
+
- name: Build and Push CPU
|
| 60 |
+
uses: docker/build-push-action@ca052bb54ab0790a636c9b5f226502c73d547a25 # v5.4.0
|
| 61 |
+
with:
|
| 62 |
+
context: .
|
| 63 |
+
file: ./docker/lerobot-cpu/Dockerfile
|
| 64 |
+
push: true
|
| 65 |
+
tags: huggingface/lerobot-cpu
|
| 66 |
+
build-args: PYTHON_VERSION=${{ env.PYTHON_VERSION }}
|
| 67 |
+
|
| 68 |
+
|
| 69 |
+
latest-cuda:
|
| 70 |
+
name: GPU
|
| 71 |
+
runs-on:
|
| 72 |
+
group: aws-general-8-plus
|
| 73 |
+
steps:
|
| 74 |
+
- name: Install Git LFS
|
| 75 |
+
run: |
|
| 76 |
+
sudo apt-get update
|
| 77 |
+
sudo apt-get install git-lfs
|
| 78 |
+
git lfs install
|
| 79 |
+
|
| 80 |
+
- name: Set up Docker Buildx
|
| 81 |
+
uses: docker/setup-buildx-action@b5ca514318bd6ebac0fb2aedd5d36ec1b5c232a2 # v3.10.0
|
| 82 |
+
with:
|
| 83 |
+
cache-binary: false
|
| 84 |
+
|
| 85 |
+
- name: Check out code
|
| 86 |
+
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
| 87 |
+
with:
|
| 88 |
+
lfs: true
|
| 89 |
+
persist-credentials: false
|
| 90 |
+
|
| 91 |
+
- name: Login to DockerHub
|
| 92 |
+
uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 # v3.4.0
|
| 93 |
+
with:
|
| 94 |
+
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
| 95 |
+
password: ${{ secrets.DOCKERHUB_PASSWORD }}
|
| 96 |
+
|
| 97 |
+
- name: Build and Push GPU
|
| 98 |
+
uses: docker/build-push-action@ca052bb54ab0790a636c9b5f226502c73d547a25 # v5.4.0
|
| 99 |
+
with:
|
| 100 |
+
context: .
|
| 101 |
+
file: ./docker/lerobot-gpu/Dockerfile
|
| 102 |
+
push: true
|
| 103 |
+
tags: huggingface/lerobot-gpu
|
| 104 |
+
build-args: PYTHON_VERSION=${{ env.PYTHON_VERSION }}
|
| 105 |
+
|
| 106 |
+
|
| 107 |
+
latest-cuda-dev:
|
| 108 |
+
name: GPU Dev
|
| 109 |
+
runs-on:
|
| 110 |
+
group: aws-general-8-plus
|
| 111 |
+
steps:
|
| 112 |
+
- name: Set up Docker Buildx
|
| 113 |
+
uses: docker/setup-buildx-action@b5ca514318bd6ebac0fb2aedd5d36ec1b5c232a2 # v3.10.0
|
| 114 |
+
with:
|
| 115 |
+
cache-binary: false
|
| 116 |
+
|
| 117 |
+
- name: Check out code
|
| 118 |
+
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
| 119 |
+
with:
|
| 120 |
+
persist-credentials: false
|
| 121 |
+
|
| 122 |
+
- name: Login to DockerHub
|
| 123 |
+
uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 # v3.4.0
|
| 124 |
+
with:
|
| 125 |
+
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
| 126 |
+
password: ${{ secrets.DOCKERHUB_PASSWORD }}
|
| 127 |
+
|
| 128 |
+
- name: Build and Push GPU dev
|
| 129 |
+
uses: docker/build-push-action@ca052bb54ab0790a636c9b5f226502c73d547a25 # v5.4.0
|
| 130 |
+
with:
|
| 131 |
+
context: .
|
| 132 |
+
file: ./docker/lerobot-gpu-dev/Dockerfile
|
| 133 |
+
push: true
|
| 134 |
+
tags: huggingface/lerobot-gpu:dev
|
| 135 |
+
build-args: PYTHON_VERSION=${{ env.PYTHON_VERSION }}
|
lerobot/.github/workflows/build_documentation.yml
ADDED
|
@@ -0,0 +1,23 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
name: Build documentation
|
| 2 |
+
|
| 3 |
+
on:
|
| 4 |
+
workflow_dispatch:
|
| 5 |
+
push:
|
| 6 |
+
paths:
|
| 7 |
+
- "docs/**"
|
| 8 |
+
branches:
|
| 9 |
+
- main
|
| 10 |
+
- doc-builder*
|
| 11 |
+
- v*-release
|
| 12 |
+
|
| 13 |
+
|
| 14 |
+
jobs:
|
| 15 |
+
build: # zizmor: ignore[excessive-permissions] We follow the same pattern as in Transformers
|
| 16 |
+
uses: huggingface/doc-builder/.github/workflows/build_main_documentation.yml@main
|
| 17 |
+
with:
|
| 18 |
+
commit_sha: ${{ github.sha }}
|
| 19 |
+
package: lerobot
|
| 20 |
+
additional_args: --not_python_module
|
| 21 |
+
secrets:
|
| 22 |
+
token: ${{ secrets.HUGGINGFACE_PUSH }}
|
| 23 |
+
hf_token: ${{ secrets.HF_DOC_BUILD_PUSH }}
|
lerobot/.github/workflows/build_pr_documentation.yml
ADDED
|
@@ -0,0 +1,19 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
name: Build PR Documentation
|
| 2 |
+
|
| 3 |
+
on:
|
| 4 |
+
pull_request:
|
| 5 |
+
paths:
|
| 6 |
+
- "docs/**"
|
| 7 |
+
|
| 8 |
+
concurrency:
|
| 9 |
+
group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }}
|
| 10 |
+
cancel-in-progress: true
|
| 11 |
+
|
| 12 |
+
jobs:
|
| 13 |
+
build: # zizmor: ignore[excessive-permissions] We follow the same pattern as in Transformers
|
| 14 |
+
uses: huggingface/doc-builder/.github/workflows/build_pr_documentation.yml@main
|
| 15 |
+
with:
|
| 16 |
+
commit_sha: ${{ github.event.pull_request.head.sha }}
|
| 17 |
+
pr_number: ${{ github.event.number }}
|
| 18 |
+
package: lerobot
|
| 19 |
+
additional_args: --not_python_module
|
lerobot/.github/workflows/nightly-tests.yml
ADDED
|
@@ -0,0 +1,93 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Copyright 2024 The HuggingFace Inc. team. All rights reserved.
|
| 2 |
+
#
|
| 3 |
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
| 4 |
+
# you may not use this file except in compliance with the License.
|
| 5 |
+
# You may obtain a copy of the License at
|
| 6 |
+
#
|
| 7 |
+
# http://www.apache.org/licenses/LICENSE-2.0
|
| 8 |
+
#
|
| 9 |
+
# Unless required by applicable law or agreed to in writing, software
|
| 10 |
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
| 11 |
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
| 12 |
+
# See the License for the specific language governing permissions and
|
| 13 |
+
# limitations under the License.
|
| 14 |
+
|
| 15 |
+
# Inspired by
|
| 16 |
+
# https://github.com/huggingface/peft/blob/main/.github/workflows/nightly.yml
|
| 17 |
+
name: Nightly
|
| 18 |
+
|
| 19 |
+
on:
|
| 20 |
+
workflow_dispatch:
|
| 21 |
+
schedule:
|
| 22 |
+
- cron: "0 2 * * *"
|
| 23 |
+
|
| 24 |
+
permissions: {}
|
| 25 |
+
|
| 26 |
+
# env:
|
| 27 |
+
# SLACK_API_TOKEN: ${{ secrets.SLACK_API_TOKEN }}
|
| 28 |
+
jobs:
|
| 29 |
+
run_all_tests_cpu:
|
| 30 |
+
name: CPU
|
| 31 |
+
strategy:
|
| 32 |
+
fail-fast: false
|
| 33 |
+
runs-on:
|
| 34 |
+
group: aws-general-8-plus
|
| 35 |
+
container:
|
| 36 |
+
image: huggingface/lerobot-cpu:latest # zizmor: ignore[unpinned-images]
|
| 37 |
+
options: --shm-size "16gb"
|
| 38 |
+
credentials:
|
| 39 |
+
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
| 40 |
+
password: ${{ secrets.DOCKERHUB_PASSWORD }}
|
| 41 |
+
defaults:
|
| 42 |
+
run:
|
| 43 |
+
shell: bash
|
| 44 |
+
working-directory: /lerobot
|
| 45 |
+
steps:
|
| 46 |
+
- name: Tests
|
| 47 |
+
run: pytest -v --cov=./src/lerobot --disable-warnings tests
|
| 48 |
+
|
| 49 |
+
- name: Tests end-to-end
|
| 50 |
+
run: make test-end-to-end
|
| 51 |
+
|
| 52 |
+
|
| 53 |
+
run_all_tests_single_gpu:
|
| 54 |
+
name: GPU
|
| 55 |
+
strategy:
|
| 56 |
+
fail-fast: false
|
| 57 |
+
runs-on:
|
| 58 |
+
group: aws-g6-4xlarge-plus
|
| 59 |
+
env:
|
| 60 |
+
CUDA_VISIBLE_DEVICES: "0"
|
| 61 |
+
TEST_TYPE: "single_gpu"
|
| 62 |
+
container:
|
| 63 |
+
image: huggingface/lerobot-gpu:latest # zizmor: ignore[unpinned-images]
|
| 64 |
+
options: --gpus all --shm-size "16gb"
|
| 65 |
+
credentials:
|
| 66 |
+
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
| 67 |
+
password: ${{ secrets.DOCKERHUB_PASSWORD }}
|
| 68 |
+
defaults:
|
| 69 |
+
run:
|
| 70 |
+
shell: bash
|
| 71 |
+
working-directory: /lerobot
|
| 72 |
+
steps:
|
| 73 |
+
- name: Nvidia-smi
|
| 74 |
+
run: nvidia-smi
|
| 75 |
+
|
| 76 |
+
- name: Test
|
| 77 |
+
run: pytest -v --cov=./src/lerobot --cov-report=xml --disable-warnings tests
|
| 78 |
+
# TODO(aliberts): Link with HF Codecov account
|
| 79 |
+
# - name: Upload coverage reports to Codecov with GitHub Action
|
| 80 |
+
# uses: codecov/codecov-action@v4
|
| 81 |
+
# with:
|
| 82 |
+
# files: ./coverage.xml
|
| 83 |
+
# verbose: true
|
| 84 |
+
- name: Tests end-to-end
|
| 85 |
+
env:
|
| 86 |
+
DEVICE: cuda
|
| 87 |
+
run: make test-end-to-end
|
| 88 |
+
|
| 89 |
+
# - name: Generate Report
|
| 90 |
+
# if: always()
|
| 91 |
+
# run: |
|
| 92 |
+
# pip install slack_sdk tabulate
|
| 93 |
+
# python scripts/log_reports.py >> $GITHUB_STEP_SUMMARY
|
lerobot/.github/workflows/quality.yml
ADDED
|
@@ -0,0 +1,72 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Copyright 2024 The HuggingFace Inc. team. All rights reserved.
|
| 2 |
+
#
|
| 3 |
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
| 4 |
+
# you may not use this file except in compliance with the License.
|
| 5 |
+
# You may obtain a copy of the License at
|
| 6 |
+
#
|
| 7 |
+
# http://www.apache.org/licenses/LICENSE-2.0
|
| 8 |
+
#
|
| 9 |
+
# Unless required by applicable law or agreed to in writing, software
|
| 10 |
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
| 11 |
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
| 12 |
+
# See the License for the specific language governing permissions and
|
| 13 |
+
# limitations under the License.
|
| 14 |
+
|
| 15 |
+
name: Quality
|
| 16 |
+
|
| 17 |
+
on:
|
| 18 |
+
workflow_dispatch:
|
| 19 |
+
workflow_call:
|
| 20 |
+
pull_request:
|
| 21 |
+
push:
|
| 22 |
+
branches:
|
| 23 |
+
- main
|
| 24 |
+
|
| 25 |
+
permissions: {}
|
| 26 |
+
|
| 27 |
+
env:
|
| 28 |
+
PYTHON_VERSION: "3.10"
|
| 29 |
+
|
| 30 |
+
jobs:
|
| 31 |
+
style:
|
| 32 |
+
name: Style
|
| 33 |
+
runs-on: ubuntu-latest
|
| 34 |
+
steps:
|
| 35 |
+
- name: Checkout Repository
|
| 36 |
+
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
| 37 |
+
with:
|
| 38 |
+
persist-credentials: false
|
| 39 |
+
|
| 40 |
+
- name: Set up Python
|
| 41 |
+
uses: actions/setup-python@7f4fc3e22c37d6ff65e88745f38bd3157c663f7c # v4.9.1
|
| 42 |
+
with:
|
| 43 |
+
python-version: ${{ env.PYTHON_VERSION }}
|
| 44 |
+
|
| 45 |
+
- name: Get Ruff Version from pre-commit-config.yaml
|
| 46 |
+
id: get-ruff-version
|
| 47 |
+
run: |
|
| 48 |
+
RUFF_VERSION=$(awk '/repo: https:\/\/github.com\/astral-sh\/ruff-pre-commit/{flag=1;next}/rev:/{if(flag){print $2;exit}}' .pre-commit-config.yaml)
|
| 49 |
+
echo "ruff_version=${RUFF_VERSION}" >> $GITHUB_OUTPUT
|
| 50 |
+
|
| 51 |
+
- name: Install Ruff
|
| 52 |
+
env:
|
| 53 |
+
RUFF_VERSION: ${{ steps.get-ruff-version.outputs.ruff_version }}
|
| 54 |
+
run: python -m pip install "ruff==${RUFF_VERSION}"
|
| 55 |
+
|
| 56 |
+
- name: Ruff check
|
| 57 |
+
run: ruff check --output-format=github
|
| 58 |
+
|
| 59 |
+
- name: Ruff format
|
| 60 |
+
run: ruff format --diff
|
| 61 |
+
|
| 62 |
+
typos:
|
| 63 |
+
name: Typos
|
| 64 |
+
runs-on: ubuntu-latest
|
| 65 |
+
steps:
|
| 66 |
+
- name: Checkout Repository
|
| 67 |
+
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
| 68 |
+
with:
|
| 69 |
+
persist-credentials: false
|
| 70 |
+
|
| 71 |
+
- name: typos-action
|
| 72 |
+
uses: crate-ci/typos@db35ee91e80fbb447f33b0e5fbddb24d2a1a884f # v1.29.10
|
lerobot/.github/workflows/test-docker-build.yml
ADDED
|
@@ -0,0 +1,82 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Copyright 2024 The HuggingFace Inc. team. All rights reserved.
|
| 2 |
+
#
|
| 3 |
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
| 4 |
+
# you may not use this file except in compliance with the License.
|
| 5 |
+
# You may obtain a copy of the License at
|
| 6 |
+
#
|
| 7 |
+
# http://www.apache.org/licenses/LICENSE-2.0
|
| 8 |
+
#
|
| 9 |
+
# Unless required by applicable law or agreed to in writing, software
|
| 10 |
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
| 11 |
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
| 12 |
+
# See the License for the specific language governing permissions and
|
| 13 |
+
# limitations under the License.
|
| 14 |
+
|
| 15 |
+
# Inspired by
|
| 16 |
+
# https://github.com/huggingface/peft/blob/main/.github/workflows/test-docker-build.yml
|
| 17 |
+
name: Test Dockerfiles
|
| 18 |
+
|
| 19 |
+
on:
|
| 20 |
+
pull_request:
|
| 21 |
+
paths:
|
| 22 |
+
# Run only when DockerFile files are modified
|
| 23 |
+
- "docker/**"
|
| 24 |
+
|
| 25 |
+
permissions: {}
|
| 26 |
+
|
| 27 |
+
env:
|
| 28 |
+
PYTHON_VERSION: "3.10"
|
| 29 |
+
|
| 30 |
+
jobs:
|
| 31 |
+
get_changed_files:
|
| 32 |
+
name: Detect modified Dockerfiles
|
| 33 |
+
runs-on: ubuntu-latest
|
| 34 |
+
outputs:
|
| 35 |
+
matrix: ${{ steps.set-matrix.outputs.matrix }}
|
| 36 |
+
steps:
|
| 37 |
+
- name: Check out code
|
| 38 |
+
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
| 39 |
+
with:
|
| 40 |
+
persist-credentials: false
|
| 41 |
+
|
| 42 |
+
- name: Get changed files
|
| 43 |
+
id: changed-files
|
| 44 |
+
uses: tj-actions/changed-files@3f54ebb830831fc121d3263c1857cfbdc310cdb9 #v42
|
| 45 |
+
with:
|
| 46 |
+
files: docker/**
|
| 47 |
+
json: "true"
|
| 48 |
+
|
| 49 |
+
- name: Run step if only the files listed above change # zizmor: ignore[template-injection]
|
| 50 |
+
if: steps.changed-files.outputs.any_changed == 'true'
|
| 51 |
+
id: set-matrix
|
| 52 |
+
run: |
|
| 53 |
+
echo "matrix=${{ steps.changed-files.outputs.all_changed_files}}" >> $GITHUB_OUTPUT
|
| 54 |
+
|
| 55 |
+
build_modified_dockerfiles:
|
| 56 |
+
name: Build modified Docker images
|
| 57 |
+
needs: get_changed_files
|
| 58 |
+
runs-on:
|
| 59 |
+
group: aws-general-8-plus
|
| 60 |
+
if: needs.get_changed_files.outputs.matrix != ''
|
| 61 |
+
strategy:
|
| 62 |
+
fail-fast: false
|
| 63 |
+
matrix:
|
| 64 |
+
docker-file: ${{ fromJson(needs.get_changed_files.outputs.matrix) }}
|
| 65 |
+
steps:
|
| 66 |
+
- name: Set up Docker Buildx
|
| 67 |
+
uses: docker/setup-buildx-action@b5ca514318bd6ebac0fb2aedd5d36ec1b5c232a2 # v3.10.0
|
| 68 |
+
with:
|
| 69 |
+
cache-binary: false
|
| 70 |
+
|
| 71 |
+
- name: Check out code
|
| 72 |
+
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
| 73 |
+
with:
|
| 74 |
+
persist-credentials: false
|
| 75 |
+
|
| 76 |
+
- name: Build Docker image
|
| 77 |
+
uses: docker/build-push-action@ca052bb54ab0790a636c9b5f226502c73d547a25 # v5.4.0
|
| 78 |
+
with:
|
| 79 |
+
file: ${{ matrix.docker-file }}
|
| 80 |
+
context: .
|
| 81 |
+
push: False
|
| 82 |
+
build-args: PYTHON_VERSION=${{ env.PYTHON_VERSION }}
|
lerobot/.github/workflows/test.yml
ADDED
|
@@ -0,0 +1,150 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Copyright 2024 The HuggingFace Inc. team. All rights reserved.
|
| 2 |
+
#
|
| 3 |
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
| 4 |
+
# you may not use this file except in compliance with the License.
|
| 5 |
+
# You may obtain a copy of the License at
|
| 6 |
+
#
|
| 7 |
+
# http://www.apache.org/licenses/LICENSE-2.0
|
| 8 |
+
#
|
| 9 |
+
# Unless required by applicable law or agreed to in writing, software
|
| 10 |
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
| 11 |
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
| 12 |
+
# See the License for the specific language governing permissions and
|
| 13 |
+
# limitations under the License.
|
| 14 |
+
|
| 15 |
+
name: Tests
|
| 16 |
+
|
| 17 |
+
on:
|
| 18 |
+
pull_request:
|
| 19 |
+
paths:
|
| 20 |
+
- "src/**"
|
| 21 |
+
- "tests/**"
|
| 22 |
+
- "examples/**"
|
| 23 |
+
- ".github/**"
|
| 24 |
+
- "pyproject.toml"
|
| 25 |
+
- ".pre-commit-config.yaml"
|
| 26 |
+
- "Makefile"
|
| 27 |
+
- ".cache/**"
|
| 28 |
+
push:
|
| 29 |
+
branches:
|
| 30 |
+
- main
|
| 31 |
+
paths:
|
| 32 |
+
- "src/**"
|
| 33 |
+
- "tests/**"
|
| 34 |
+
- "examples/**"
|
| 35 |
+
- ".github/**"
|
| 36 |
+
- "pyproject.toml"
|
| 37 |
+
- ".pre-commit-config.yaml"
|
| 38 |
+
- "Makefile"
|
| 39 |
+
- ".cache/**"
|
| 40 |
+
|
| 41 |
+
permissions: {}
|
| 42 |
+
|
| 43 |
+
env:
|
| 44 |
+
UV_VERSION: "0.6.0"
|
| 45 |
+
|
| 46 |
+
jobs:
|
| 47 |
+
pytest:
|
| 48 |
+
name: Pytest
|
| 49 |
+
runs-on: ubuntu-latest
|
| 50 |
+
env:
|
| 51 |
+
MUJOCO_GL: egl
|
| 52 |
+
steps:
|
| 53 |
+
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
| 54 |
+
with:
|
| 55 |
+
lfs: true # Ensure LFS files are pulled
|
| 56 |
+
persist-credentials: false
|
| 57 |
+
|
| 58 |
+
- name: Install apt dependencies
|
| 59 |
+
# portaudio19-dev is needed to install pyaudio
|
| 60 |
+
run: |
|
| 61 |
+
sudo apt-get update && \
|
| 62 |
+
sudo apt-get install -y libegl1-mesa-dev ffmpeg portaudio19-dev
|
| 63 |
+
|
| 64 |
+
- name: Install uv and python
|
| 65 |
+
uses: astral-sh/setup-uv@d4b2f3b6ecc6e67c4457f6d3e41ec42d3d0fcb86 # v5.4.2
|
| 66 |
+
with:
|
| 67 |
+
enable-cache: true
|
| 68 |
+
version: ${{ env.UV_VERSION }}
|
| 69 |
+
python-version: "3.10"
|
| 70 |
+
|
| 71 |
+
- name: Install lerobot (all extras)
|
| 72 |
+
run: uv sync --all-extras
|
| 73 |
+
|
| 74 |
+
- name: Test with pytest
|
| 75 |
+
run: |
|
| 76 |
+
uv run pytest tests -v --cov=./src/lerobot --durations=0 \
|
| 77 |
+
-W ignore::DeprecationWarning:imageio_ffmpeg._utils:7 \
|
| 78 |
+
-W ignore::UserWarning:torch.utils.data.dataloader:558 \
|
| 79 |
+
-W ignore::UserWarning:gymnasium.utils.env_checker:247 \
|
| 80 |
+
&& rm -rf tests/outputs outputs
|
| 81 |
+
|
| 82 |
+
pytest-minimal:
|
| 83 |
+
name: Pytest (minimal install)
|
| 84 |
+
runs-on: ubuntu-latest
|
| 85 |
+
env:
|
| 86 |
+
MUJOCO_GL: egl
|
| 87 |
+
steps:
|
| 88 |
+
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
| 89 |
+
with:
|
| 90 |
+
lfs: true # Ensure LFS files are pulled
|
| 91 |
+
persist-credentials: false
|
| 92 |
+
|
| 93 |
+
- name: Install apt dependencies
|
| 94 |
+
run: sudo apt-get update && sudo apt-get install -y ffmpeg
|
| 95 |
+
|
| 96 |
+
- name: Install uv and python
|
| 97 |
+
uses: astral-sh/setup-uv@d4b2f3b6ecc6e67c4457f6d3e41ec42d3d0fcb86 # v5.4.2
|
| 98 |
+
with:
|
| 99 |
+
enable-cache: true
|
| 100 |
+
version: ${{ env.UV_VERSION }}
|
| 101 |
+
python-version: "3.10"
|
| 102 |
+
|
| 103 |
+
- name: Install lerobot
|
| 104 |
+
run: uv sync --extra "test"
|
| 105 |
+
|
| 106 |
+
- name: Test with pytest
|
| 107 |
+
run: |
|
| 108 |
+
uv run pytest tests -v --cov=./src/lerobot --durations=0 \
|
| 109 |
+
-W ignore::DeprecationWarning:imageio_ffmpeg._utils:7 \
|
| 110 |
+
-W ignore::UserWarning:torch.utils.data.dataloader:558 \
|
| 111 |
+
-W ignore::UserWarning:gymnasium.utils.env_checker:247 \
|
| 112 |
+
&& rm -rf tests/outputs outputs
|
| 113 |
+
|
| 114 |
+
end-to-end:
|
| 115 |
+
name: End-to-end
|
| 116 |
+
runs-on: ubuntu-latest
|
| 117 |
+
env:
|
| 118 |
+
MUJOCO_GL: egl
|
| 119 |
+
steps:
|
| 120 |
+
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
| 121 |
+
with:
|
| 122 |
+
lfs: true # Ensure LFS files are pulled
|
| 123 |
+
persist-credentials: false
|
| 124 |
+
|
| 125 |
+
- name: Install apt dependencies
|
| 126 |
+
# portaudio19-dev is needed to install pyaudio
|
| 127 |
+
run: |
|
| 128 |
+
sudo apt-get update && \
|
| 129 |
+
sudo apt-get install -y libegl1-mesa-dev ffmpeg portaudio19-dev
|
| 130 |
+
|
| 131 |
+
- name: Install uv and python
|
| 132 |
+
uses: astral-sh/setup-uv@d4b2f3b6ecc6e67c4457f6d3e41ec42d3d0fcb86 # v5.4.2
|
| 133 |
+
with:
|
| 134 |
+
enable-cache: true
|
| 135 |
+
version: ${{ env.UV_VERSION }}
|
| 136 |
+
python-version: "3.10"
|
| 137 |
+
|
| 138 |
+
- name: Install lerobot (all extras)
|
| 139 |
+
run: |
|
| 140 |
+
uv venv
|
| 141 |
+
uv sync --all-extras
|
| 142 |
+
|
| 143 |
+
- name: venv
|
| 144 |
+
run: |
|
| 145 |
+
echo "PYTHON_PATH=${{ github.workspace }}/.venv/bin/python" >> $GITHUB_ENV
|
| 146 |
+
|
| 147 |
+
- name: Test end-to-end
|
| 148 |
+
run: |
|
| 149 |
+
make test-end-to-end \
|
| 150 |
+
&& rm -rf outputs
|
lerobot/.github/workflows/trufflehog.yml
ADDED
|
@@ -0,0 +1,35 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Copyright 2024 The HuggingFace Inc. team. All rights reserved.
|
| 2 |
+
#
|
| 3 |
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
| 4 |
+
# you may not use this file except in compliance with the License.
|
| 5 |
+
# You may obtain a copy of the License at
|
| 6 |
+
#
|
| 7 |
+
# http://www.apache.org/licenses/LICENSE-2.0
|
| 8 |
+
#
|
| 9 |
+
# Unless required by applicable law or agreed to in writing, software
|
| 10 |
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
| 11 |
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
| 12 |
+
# See the License for the specific language governing permissions and
|
| 13 |
+
# limitations under the License.
|
| 14 |
+
|
| 15 |
+
on:
|
| 16 |
+
push:
|
| 17 |
+
|
| 18 |
+
name: Secret Leaks
|
| 19 |
+
|
| 20 |
+
permissions: {}
|
| 21 |
+
|
| 22 |
+
jobs:
|
| 23 |
+
trufflehog:
|
| 24 |
+
runs-on: ubuntu-latest
|
| 25 |
+
steps:
|
| 26 |
+
- name: Checkout code
|
| 27 |
+
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
| 28 |
+
with:
|
| 29 |
+
fetch-depth: 0
|
| 30 |
+
persist-credentials: false
|
| 31 |
+
|
| 32 |
+
- name: Secret Scanning
|
| 33 |
+
uses: trufflesecurity/trufflehog@90694bf9af66e7536abc5824e7a87246dbf933cb # v3.88.35
|
| 34 |
+
with:
|
| 35 |
+
extra_args: --only-verified
|
lerobot/.github/workflows/upload_pr_documentation.yml
ADDED
|
@@ -0,0 +1,16 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
name: Upload PR Documentation
|
| 2 |
+
|
| 3 |
+
on: # zizmor: ignore[dangerous-triggers] We follow the same pattern as in Transformers
|
| 4 |
+
workflow_run:
|
| 5 |
+
workflows: [ "Build PR Documentation" ]
|
| 6 |
+
types:
|
| 7 |
+
- completed
|
| 8 |
+
|
| 9 |
+
jobs:
|
| 10 |
+
build: # zizmor: ignore[excessive-permissions] We follow the same pattern as in Transformers
|
| 11 |
+
uses: huggingface/doc-builder/.github/workflows/upload_pr_documentation.yml@main
|
| 12 |
+
with:
|
| 13 |
+
package_name: lerobot
|
| 14 |
+
secrets:
|
| 15 |
+
hf_token: ${{ secrets.HF_DOC_BUILD_PUSH }}
|
| 16 |
+
comment_bot_token: ${{ secrets.COMMENT_BOT_TOKEN }}
|
lerobot/.gitignore
ADDED
|
@@ -0,0 +1,175 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Copyright 2024 The HuggingFace Inc. team. All rights reserved.
|
| 2 |
+
#
|
| 3 |
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
| 4 |
+
# you may not use this file except in compliance with the License.
|
| 5 |
+
# You may obtain a copy of the License at
|
| 6 |
+
#
|
| 7 |
+
# http://www.apache.org/licenses/LICENSE-2.0
|
| 8 |
+
#
|
| 9 |
+
# Unless required by applicable law or agreed to in writing, software
|
| 10 |
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
| 11 |
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
| 12 |
+
# See the License for the specific language governing permissions and
|
| 13 |
+
# limitations under the License.
|
| 14 |
+
|
| 15 |
+
# Dev scripts
|
| 16 |
+
.dev
|
| 17 |
+
|
| 18 |
+
# Logging
|
| 19 |
+
logs
|
| 20 |
+
tmp
|
| 21 |
+
wandb
|
| 22 |
+
|
| 23 |
+
# Data
|
| 24 |
+
data
|
| 25 |
+
outputs
|
| 26 |
+
|
| 27 |
+
# Apple
|
| 28 |
+
.DS_Store
|
| 29 |
+
|
| 30 |
+
# VS Code
|
| 31 |
+
.vscode
|
| 32 |
+
.devcontainer
|
| 33 |
+
|
| 34 |
+
# HPC
|
| 35 |
+
nautilus/*.yaml
|
| 36 |
+
*.key
|
| 37 |
+
|
| 38 |
+
# Slurm
|
| 39 |
+
sbatch*.sh
|
| 40 |
+
|
| 41 |
+
# Byte-compiled / optimized / DLL files
|
| 42 |
+
__pycache__/
|
| 43 |
+
*.py[cod]
|
| 44 |
+
*$py.class
|
| 45 |
+
|
| 46 |
+
# C extensions
|
| 47 |
+
*.so
|
| 48 |
+
|
| 49 |
+
# Distribution / packaging
|
| 50 |
+
.Python
|
| 51 |
+
build/
|
| 52 |
+
develop-eggs/
|
| 53 |
+
dist/
|
| 54 |
+
downloads/
|
| 55 |
+
eggs/
|
| 56 |
+
.eggs/
|
| 57 |
+
lib/
|
| 58 |
+
lib64/
|
| 59 |
+
parts/
|
| 60 |
+
sdist/
|
| 61 |
+
var/
|
| 62 |
+
wheels/
|
| 63 |
+
pip-wheel-metadata/
|
| 64 |
+
share/python-wheels/
|
| 65 |
+
*.egg-info/
|
| 66 |
+
.installed.cfg
|
| 67 |
+
*.egg
|
| 68 |
+
MANIFEST
|
| 69 |
+
|
| 70 |
+
# uv/poetry lock files
|
| 71 |
+
poetry.lock
|
| 72 |
+
uv.lock
|
| 73 |
+
|
| 74 |
+
# PyInstaller
|
| 75 |
+
# Usually these files are written by a python script from a template
|
| 76 |
+
# before PyInstaller builds the exe, so as to inject date/other infos into it.
|
| 77 |
+
*.manifest
|
| 78 |
+
*.spec
|
| 79 |
+
|
| 80 |
+
# Installer logs
|
| 81 |
+
pip-log.txt
|
| 82 |
+
pip-delete-this-directory.txt
|
| 83 |
+
|
| 84 |
+
# Unit test / coverage reports
|
| 85 |
+
!tests/artifacts
|
| 86 |
+
htmlcov/
|
| 87 |
+
.tox/
|
| 88 |
+
.nox/
|
| 89 |
+
.coverage
|
| 90 |
+
.coverage.*
|
| 91 |
+
nosetests.xml
|
| 92 |
+
coverage.xml
|
| 93 |
+
*.cover
|
| 94 |
+
*.py,cover
|
| 95 |
+
.hypothesis/
|
| 96 |
+
.pytest_cache/
|
| 97 |
+
|
| 98 |
+
# Ignore .cache
|
| 99 |
+
.cache/*
|
| 100 |
+
|
| 101 |
+
# Translations
|
| 102 |
+
*.mo
|
| 103 |
+
*.pot
|
| 104 |
+
|
| 105 |
+
# Django stuff:
|
| 106 |
+
*.log
|
| 107 |
+
local_settings.py
|
| 108 |
+
db.sqlite3
|
| 109 |
+
db.sqlite3-journal
|
| 110 |
+
|
| 111 |
+
# Flask stuff:
|
| 112 |
+
instance/
|
| 113 |
+
.webassets-cache
|
| 114 |
+
|
| 115 |
+
# Scrapy stuff:
|
| 116 |
+
.scrapy
|
| 117 |
+
|
| 118 |
+
# Sphinx documentation
|
| 119 |
+
docs/_build/
|
| 120 |
+
|
| 121 |
+
# PyBuilder
|
| 122 |
+
.pybuilder/
|
| 123 |
+
target/
|
| 124 |
+
|
| 125 |
+
# Jupyter Notebook
|
| 126 |
+
.ipynb_checkpoints
|
| 127 |
+
|
| 128 |
+
# IPython
|
| 129 |
+
profile_default/
|
| 130 |
+
ipython_config.py
|
| 131 |
+
|
| 132 |
+
# pyenv
|
| 133 |
+
.python-version
|
| 134 |
+
|
| 135 |
+
# PEP 582; used by e.g. github.com/David-OConnor/pyflow
|
| 136 |
+
__pypackages__/
|
| 137 |
+
|
| 138 |
+
# Celery stuff
|
| 139 |
+
celerybeat-schedule
|
| 140 |
+
celerybeat.pid
|
| 141 |
+
|
| 142 |
+
# SageMath parsed files
|
| 143 |
+
*.sage.py
|
| 144 |
+
|
| 145 |
+
# Environments
|
| 146 |
+
.env
|
| 147 |
+
.venv
|
| 148 |
+
env/
|
| 149 |
+
venv/
|
| 150 |
+
env.bak/
|
| 151 |
+
venv.bak/
|
| 152 |
+
|
| 153 |
+
# Spyder project settings
|
| 154 |
+
.spyderproject
|
| 155 |
+
.spyproject
|
| 156 |
+
|
| 157 |
+
# Rope project settings
|
| 158 |
+
.ropeproject
|
| 159 |
+
|
| 160 |
+
# mkdocs documentation
|
| 161 |
+
/site
|
| 162 |
+
|
| 163 |
+
# mypy
|
| 164 |
+
.mypy_cache/
|
| 165 |
+
.dmypy.json
|
| 166 |
+
dmypy.json
|
| 167 |
+
|
| 168 |
+
# Pyre type checker
|
| 169 |
+
.pyre/
|
| 170 |
+
|
| 171 |
+
# pytype static type analyzer
|
| 172 |
+
.pytype/
|
| 173 |
+
|
| 174 |
+
# Cython debug symbols
|
| 175 |
+
cython_debug/
|
lerobot/.pre-commit-config.yaml
ADDED
|
@@ -0,0 +1,74 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Copyright 2024 The HuggingFace Inc. team. All rights reserved.
|
| 2 |
+
#
|
| 3 |
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
| 4 |
+
# you may not use this file except in compliance with the License.
|
| 5 |
+
# You may obtain a copy of the License at
|
| 6 |
+
#
|
| 7 |
+
# http://www.apache.org/licenses/LICENSE-2.0
|
| 8 |
+
#
|
| 9 |
+
# Unless required by applicable law or agreed to in writing, software
|
| 10 |
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
| 11 |
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
| 12 |
+
# See the License for the specific language governing permissions and
|
| 13 |
+
# limitations under the License.
|
| 14 |
+
|
| 15 |
+
exclude: "tests/artifacts/.*\\.safetensors$"
|
| 16 |
+
default_language_version:
|
| 17 |
+
python: python3.10
|
| 18 |
+
repos:
|
| 19 |
+
##### Meta #####
|
| 20 |
+
- repo: meta
|
| 21 |
+
hooks:
|
| 22 |
+
- id: check-useless-excludes
|
| 23 |
+
- id: check-hooks-apply
|
| 24 |
+
|
| 25 |
+
|
| 26 |
+
##### Style / Misc. #####
|
| 27 |
+
- repo: https://github.com/pre-commit/pre-commit-hooks
|
| 28 |
+
rev: v5.0.0
|
| 29 |
+
hooks:
|
| 30 |
+
- id: check-added-large-files
|
| 31 |
+
- id: debug-statements
|
| 32 |
+
- id: check-merge-conflict
|
| 33 |
+
- id: check-case-conflict
|
| 34 |
+
- id: check-yaml
|
| 35 |
+
- id: check-toml
|
| 36 |
+
- id: end-of-file-fixer
|
| 37 |
+
- id: trailing-whitespace
|
| 38 |
+
|
| 39 |
+
- repo: https://github.com/adhtruong/mirrors-typos
|
| 40 |
+
rev: v1.33.1
|
| 41 |
+
hooks:
|
| 42 |
+
- id: typos
|
| 43 |
+
args: [--force-exclude]
|
| 44 |
+
|
| 45 |
+
- repo: https://github.com/asottile/pyupgrade
|
| 46 |
+
rev: v3.20.0
|
| 47 |
+
hooks:
|
| 48 |
+
- id: pyupgrade
|
| 49 |
+
|
| 50 |
+
- repo: https://github.com/astral-sh/ruff-pre-commit
|
| 51 |
+
rev: v0.11.13
|
| 52 |
+
hooks:
|
| 53 |
+
- id: ruff
|
| 54 |
+
args: [--fix]
|
| 55 |
+
- id: ruff-format
|
| 56 |
+
|
| 57 |
+
|
| 58 |
+
##### Security #####
|
| 59 |
+
- repo: https://github.com/gitleaks/gitleaks
|
| 60 |
+
rev: v8.27.2
|
| 61 |
+
hooks:
|
| 62 |
+
- id: gitleaks
|
| 63 |
+
|
| 64 |
+
- repo: https://github.com/woodruffw/zizmor-pre-commit
|
| 65 |
+
rev: v1.9.0
|
| 66 |
+
hooks:
|
| 67 |
+
- id: zizmor
|
| 68 |
+
|
| 69 |
+
- repo: https://github.com/PyCQA/bandit
|
| 70 |
+
rev: 1.8.3
|
| 71 |
+
hooks:
|
| 72 |
+
- id: bandit
|
| 73 |
+
args: ["-c", "pyproject.toml"]
|
| 74 |
+
additional_dependencies: ["bandit[toml]"]
|
lerobot/CODE_OF_CONDUCT.md
ADDED
|
@@ -0,0 +1,133 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
|
| 2 |
+
# Contributor Covenant Code of Conduct
|
| 3 |
+
|
| 4 |
+
## Our Pledge
|
| 5 |
+
|
| 6 |
+
We as members, contributors, and leaders pledge to make participation in our
|
| 7 |
+
community a harassment-free experience for everyone, regardless of age, body
|
| 8 |
+
size, visible or invisible disability, ethnicity, sex characteristics, gender
|
| 9 |
+
identity and expression, level of experience, education, socio-economic status,
|
| 10 |
+
nationality, personal appearance, race, caste, color, religion, or sexual
|
| 11 |
+
identity and orientation.
|
| 12 |
+
|
| 13 |
+
We pledge to act and interact in ways that contribute to an open, welcoming,
|
| 14 |
+
diverse, inclusive, and healthy community.
|
| 15 |
+
|
| 16 |
+
## Our Standards
|
| 17 |
+
|
| 18 |
+
Examples of behavior that contributes to a positive environment for our
|
| 19 |
+
community include:
|
| 20 |
+
|
| 21 |
+
* Demonstrating empathy and kindness toward other people
|
| 22 |
+
* Being respectful of differing opinions, viewpoints, and experiences
|
| 23 |
+
* Giving and gracefully accepting constructive feedback
|
| 24 |
+
* Accepting responsibility and apologizing to those affected by our mistakes,
|
| 25 |
+
and learning from the experience
|
| 26 |
+
* Focusing on what is best not just for us as individuals, but for the overall
|
| 27 |
+
community
|
| 28 |
+
|
| 29 |
+
Examples of unacceptable behavior include:
|
| 30 |
+
|
| 31 |
+
* The use of sexualized language or imagery, and sexual attention or advances of
|
| 32 |
+
any kind
|
| 33 |
+
* Trolling, insulting or derogatory comments, and personal or political attacks
|
| 34 |
+
* Public or private harassment
|
| 35 |
+
* Publishing others' private information, such as a physical or email address,
|
| 36 |
+
without their explicit permission
|
| 37 |
+
* Other conduct which could reasonably be considered inappropriate in a
|
| 38 |
+
professional setting
|
| 39 |
+
|
| 40 |
+
## Enforcement Responsibilities
|
| 41 |
+
|
| 42 |
+
Community leaders are responsible for clarifying and enforcing our standards of
|
| 43 |
+
acceptable behavior and will take appropriate and fair corrective action in
|
| 44 |
+
response to any behavior that they deem inappropriate, threatening, offensive,
|
| 45 |
+
or harmful.
|
| 46 |
+
|
| 47 |
+
Community leaders have the right and responsibility to remove, edit, or reject
|
| 48 |
+
comments, commits, code, wiki edits, issues, and other contributions that are
|
| 49 |
+
not aligned to this Code of Conduct, and will communicate reasons for moderation
|
| 50 |
+
decisions when appropriate.
|
| 51 |
+
|
| 52 |
+
## Scope
|
| 53 |
+
|
| 54 |
+
This Code of Conduct applies within all community spaces, and also applies when
|
| 55 |
+
an individual is officially representing the community in public spaces.
|
| 56 |
+
Examples of representing our community include using an official email address,
|
| 57 |
+
posting via an official social media account, or acting as an appointed
|
| 58 |
+
representative at an online or offline event.
|
| 59 |
+
|
| 60 |
+
## Enforcement
|
| 61 |
+
|
| 62 |
+
Instances of abusive, harassing, or otherwise unacceptable behavior may be
|
| 63 |
+
reported to the community leaders responsible for enforcement at
|
| 64 |
+
[feedback@huggingface.co](mailto:feedback@huggingface.co).
|
| 65 |
+
All complaints will be reviewed and investigated promptly and fairly.
|
| 66 |
+
|
| 67 |
+
All community leaders are obligated to respect the privacy and security of the
|
| 68 |
+
reporter of any incident.
|
| 69 |
+
|
| 70 |
+
## Enforcement Guidelines
|
| 71 |
+
|
| 72 |
+
Community leaders will follow these Community Impact Guidelines in determining
|
| 73 |
+
the consequences for any action they deem in violation of this Code of Conduct:
|
| 74 |
+
|
| 75 |
+
### 1. Correction
|
| 76 |
+
|
| 77 |
+
**Community Impact**: Use of inappropriate language or other behavior deemed
|
| 78 |
+
unprofessional or unwelcome in the community.
|
| 79 |
+
|
| 80 |
+
**Consequence**: A private, written warning from community leaders, providing
|
| 81 |
+
clarity around the nature of the violation and an explanation of why the
|
| 82 |
+
behavior was inappropriate. A public apology may be requested.
|
| 83 |
+
|
| 84 |
+
### 2. Warning
|
| 85 |
+
|
| 86 |
+
**Community Impact**: A violation through a single incident or series of
|
| 87 |
+
actions.
|
| 88 |
+
|
| 89 |
+
**Consequence**: A warning with consequences for continued behavior. No
|
| 90 |
+
interaction with the people involved, including unsolicited interaction with
|
| 91 |
+
those enforcing the Code of Conduct, for a specified period of time. This
|
| 92 |
+
includes avoiding interactions in community spaces as well as external channels
|
| 93 |
+
like social media. Violating these terms may lead to a temporary or permanent
|
| 94 |
+
ban.
|
| 95 |
+
|
| 96 |
+
### 3. Temporary Ban
|
| 97 |
+
|
| 98 |
+
**Community Impact**: A serious violation of community standards, including
|
| 99 |
+
sustained inappropriate behavior.
|
| 100 |
+
|
| 101 |
+
**Consequence**: A temporary ban from any sort of interaction or public
|
| 102 |
+
communication with the community for a specified period of time. No public or
|
| 103 |
+
private interaction with the people involved, including unsolicited interaction
|
| 104 |
+
with those enforcing the Code of Conduct, is allowed during this period.
|
| 105 |
+
Violating these terms may lead to a permanent ban.
|
| 106 |
+
|
| 107 |
+
### 4. Permanent Ban
|
| 108 |
+
|
| 109 |
+
**Community Impact**: Demonstrating a pattern of violation of community
|
| 110 |
+
standards, including sustained inappropriate behavior, harassment of an
|
| 111 |
+
individual, or aggression toward or disparagement of classes of individuals.
|
| 112 |
+
|
| 113 |
+
**Consequence**: A permanent ban from any sort of public interaction within the
|
| 114 |
+
community.
|
| 115 |
+
|
| 116 |
+
## Attribution
|
| 117 |
+
|
| 118 |
+
This Code of Conduct is adapted from the [Contributor Covenant][homepage],
|
| 119 |
+
version 2.1, available at
|
| 120 |
+
[https://www.contributor-covenant.org/version/2/1/code_of_conduct.html][v2.1].
|
| 121 |
+
|
| 122 |
+
Community Impact Guidelines were inspired by
|
| 123 |
+
[Mozilla's code of conduct enforcement ladder][Mozilla CoC].
|
| 124 |
+
|
| 125 |
+
For answers to common questions about this code of conduct, see the FAQ at
|
| 126 |
+
[https://www.contributor-covenant.org/faq][FAQ]. Translations are available at
|
| 127 |
+
[https://www.contributor-covenant.org/translations][translations].
|
| 128 |
+
|
| 129 |
+
[homepage]: https://www.contributor-covenant.org
|
| 130 |
+
[v2.1]: https://www.contributor-covenant.org/version/2/1/code_of_conduct.html
|
| 131 |
+
[Mozilla CoC]: https://github.com/mozilla/diversity
|
| 132 |
+
[FAQ]: https://www.contributor-covenant.org/faq
|
| 133 |
+
[translations]: https://www.contributor-covenant.org/translations
|
lerobot/CONTRIBUTING.md
ADDED
|
@@ -0,0 +1,305 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# How to contribute to 🤗 LeRobot?
|
| 2 |
+
|
| 3 |
+
Everyone is welcome to contribute, and we value everybody's contribution. Code
|
| 4 |
+
is thus not the only way to help the community. Answering questions, helping
|
| 5 |
+
others, reaching out and improving the documentations are immensely valuable to
|
| 6 |
+
the community.
|
| 7 |
+
|
| 8 |
+
It also helps us if you spread the word: reference the library from blog posts
|
| 9 |
+
on the awesome projects it made possible, shout out on Twitter when it has
|
| 10 |
+
helped you, or simply ⭐️ the repo to say "thank you".
|
| 11 |
+
|
| 12 |
+
Whichever way you choose to contribute, please be mindful to respect our
|
| 13 |
+
[code of conduct](https://github.com/huggingface/lerobot/blob/main/CODE_OF_CONDUCT.md).
|
| 14 |
+
|
| 15 |
+
## You can contribute in so many ways!
|
| 16 |
+
|
| 17 |
+
Some of the ways you can contribute to 🤗 LeRobot:
|
| 18 |
+
* Fixing outstanding issues with the existing code.
|
| 19 |
+
* Implementing new models, datasets or simulation environments.
|
| 20 |
+
* Contributing to the examples or to the documentation.
|
| 21 |
+
* Submitting issues related to bugs or desired new features.
|
| 22 |
+
|
| 23 |
+
Following the guides below, feel free to open issues and PRs and to coordinate your efforts with the community on our [Discord Channel](https://discord.gg/VjFz58wn3R). For specific inquiries, reach out to [Remi Cadene](mailto:remi.cadene@huggingface.co).
|
| 24 |
+
|
| 25 |
+
If you are not sure how to contribute or want to know the next features we working on, look on this project page: [LeRobot TODO](https://github.com/orgs/huggingface/projects/46)
|
| 26 |
+
|
| 27 |
+
## Submitting a new issue or feature request
|
| 28 |
+
|
| 29 |
+
Do your best to follow these guidelines when submitting an issue or a feature
|
| 30 |
+
request. It will make it easier for us to come back to you quickly and with good
|
| 31 |
+
feedback.
|
| 32 |
+
|
| 33 |
+
### Did you find a bug?
|
| 34 |
+
|
| 35 |
+
The 🤗 LeRobot library is robust and reliable thanks to the users who notify us of
|
| 36 |
+
the problems they encounter. So thank you for reporting an issue.
|
| 37 |
+
|
| 38 |
+
First, we would really appreciate it if you could **make sure the bug was not
|
| 39 |
+
already reported** (use the search bar on Github under Issues).
|
| 40 |
+
|
| 41 |
+
Did not find it? :( So we can act quickly on it, please follow these steps:
|
| 42 |
+
|
| 43 |
+
* Include your **OS type and version**, the versions of **Python** and **PyTorch**.
|
| 44 |
+
* A short, self-contained, code snippet that allows us to reproduce the bug in
|
| 45 |
+
less than 30s.
|
| 46 |
+
* The full traceback if an exception is raised.
|
| 47 |
+
* Attach any other additional information, like screenshots, you think may help.
|
| 48 |
+
|
| 49 |
+
### Do you want a new feature?
|
| 50 |
+
|
| 51 |
+
A good feature request addresses the following points:
|
| 52 |
+
|
| 53 |
+
1. Motivation first:
|
| 54 |
+
* Is it related to a problem/frustration with the library? If so, please explain
|
| 55 |
+
why. Providing a code snippet that demonstrates the problem is best.
|
| 56 |
+
* Is it related to something you would need for a project? We'd love to hear
|
| 57 |
+
about it!
|
| 58 |
+
* Is it something you worked on and think could benefit the community?
|
| 59 |
+
Awesome! Tell us what problem it solved for you.
|
| 60 |
+
2. Write a *paragraph* describing the feature.
|
| 61 |
+
3. Provide a **code snippet** that demonstrates its future use.
|
| 62 |
+
4. In case this is related to a paper, please attach a link.
|
| 63 |
+
5. Attach any additional information (drawings, screenshots, etc.) you think may help.
|
| 64 |
+
|
| 65 |
+
If your issue is well written we're already 80% of the way there by the time you
|
| 66 |
+
post it.
|
| 67 |
+
|
| 68 |
+
## Adding new policies, datasets or environments
|
| 69 |
+
|
| 70 |
+
Look at our implementations for [datasets](./src/lerobot/datasets/), [policies](./src/lerobot/policies/),
|
| 71 |
+
environments ([aloha](https://github.com/huggingface/gym-aloha),
|
| 72 |
+
[xarm](https://github.com/huggingface/gym-xarm),
|
| 73 |
+
[pusht](https://github.com/huggingface/gym-pusht))
|
| 74 |
+
and follow the same api design.
|
| 75 |
+
|
| 76 |
+
When implementing a new dataset loadable with LeRobotDataset follow these steps:
|
| 77 |
+
- Update `available_datasets_per_env` in `lerobot/__init__.py`
|
| 78 |
+
|
| 79 |
+
When implementing a new environment (e.g. `gym_aloha`), follow these steps:
|
| 80 |
+
- Update `available_tasks_per_env` and `available_datasets_per_env` in `lerobot/__init__.py`
|
| 81 |
+
|
| 82 |
+
When implementing a new policy class (e.g. `DiffusionPolicy`) follow these steps:
|
| 83 |
+
- Update `available_policies` and `available_policies_per_env`, in `lerobot/__init__.py`
|
| 84 |
+
- Set the required `name` class attribute.
|
| 85 |
+
- Update variables in `tests/test_available.py` by importing your new Policy class
|
| 86 |
+
|
| 87 |
+
## Submitting a pull request (PR)
|
| 88 |
+
|
| 89 |
+
Before writing code, we strongly advise you to search through the existing PRs or
|
| 90 |
+
issues to make sure that nobody is already working on the same thing. If you are
|
| 91 |
+
unsure, it is always a good idea to open an issue to get some feedback.
|
| 92 |
+
|
| 93 |
+
You will need basic `git` proficiency to be able to contribute to
|
| 94 |
+
🤗 LeRobot. `git` is not the easiest tool to use but it has the greatest
|
| 95 |
+
manual. Type `git --help` in a shell and enjoy. If you prefer books, [Pro
|
| 96 |
+
Git](https://git-scm.com/book/en/v2) is a very good reference.
|
| 97 |
+
|
| 98 |
+
Follow these steps to start contributing:
|
| 99 |
+
|
| 100 |
+
1. Fork the [repository](https://github.com/huggingface/lerobot) by
|
| 101 |
+
clicking on the 'Fork' button on the repository's page. This creates a copy of the code
|
| 102 |
+
under your GitHub user account.
|
| 103 |
+
|
| 104 |
+
2. Clone your fork to your local disk, and add the base repository as a remote. The following command
|
| 105 |
+
assumes you have your public SSH key uploaded to GitHub. See the following guide for more
|
| 106 |
+
[information](https://docs.github.com/en/repositories/creating-and-managing-repositories/cloning-a-repository).
|
| 107 |
+
|
| 108 |
+
```bash
|
| 109 |
+
git clone git@github.com:<your Github handle>/lerobot.git
|
| 110 |
+
cd lerobot
|
| 111 |
+
git remote add upstream https://github.com/huggingface/lerobot.git
|
| 112 |
+
```
|
| 113 |
+
|
| 114 |
+
3. Create a new branch to hold your development changes, and do this for every new PR you work on.
|
| 115 |
+
|
| 116 |
+
Start by synchronizing your `main` branch with the `upstream/main` branch (more details in the [GitHub Docs](https://docs.github.com/en/github/collaborating-with-issues-and-pull-requests/syncing-a-fork)):
|
| 117 |
+
|
| 118 |
+
```bash
|
| 119 |
+
git checkout main
|
| 120 |
+
git fetch upstream
|
| 121 |
+
git rebase upstream/main
|
| 122 |
+
```
|
| 123 |
+
|
| 124 |
+
Once your `main` branch is synchronized, create a new branch from it:
|
| 125 |
+
|
| 126 |
+
```bash
|
| 127 |
+
git checkout -b a-descriptive-name-for-my-changes
|
| 128 |
+
```
|
| 129 |
+
|
| 130 |
+
🚨 **Do not** work on the `main` branch.
|
| 131 |
+
|
| 132 |
+
4. for development, we advise to use a tool like `poetry` or `uv` instead of just `pip` to easily track our dependencies.
|
| 133 |
+
Follow the instructions to [install poetry](https://python-poetry.org/docs/#installation) (use a version >=2.1.0) or to [install uv](https://docs.astral.sh/uv/getting-started/installation/#installation-methods) if you don't have one of them already.
|
| 134 |
+
|
| 135 |
+
Set up a development environment with conda or miniconda:
|
| 136 |
+
```bash
|
| 137 |
+
conda create -y -n lerobot-dev python=3.10 && conda activate lerobot-dev
|
| 138 |
+
```
|
| 139 |
+
|
| 140 |
+
If you're using `uv`, it can manage python versions so you can instead do:
|
| 141 |
+
```bash
|
| 142 |
+
uv venv --python 3.10 && source .venv/bin/activate
|
| 143 |
+
```
|
| 144 |
+
|
| 145 |
+
To develop on 🤗 LeRobot, you will at least need to install the `dev` and `test` extras dependencies along with the core library:
|
| 146 |
+
|
| 147 |
+
using `poetry`
|
| 148 |
+
```bash
|
| 149 |
+
poetry sync --extras "dev test"
|
| 150 |
+
```
|
| 151 |
+
|
| 152 |
+
using `uv`
|
| 153 |
+
```bash
|
| 154 |
+
uv sync --extra dev --extra test
|
| 155 |
+
```
|
| 156 |
+
|
| 157 |
+
You can also install the project with all its dependencies (including environments):
|
| 158 |
+
|
| 159 |
+
using `poetry`
|
| 160 |
+
```bash
|
| 161 |
+
poetry sync --all-extras
|
| 162 |
+
```
|
| 163 |
+
|
| 164 |
+
using `uv`
|
| 165 |
+
```bash
|
| 166 |
+
uv sync --all-extras
|
| 167 |
+
```
|
| 168 |
+
|
| 169 |
+
> **Note:** If you don't install simulation environments with `--all-extras`, the tests that require them will be skipped when running the pytest suite locally. However, they *will* be tested in the CI. In general, we advise you to install everything and test locally before pushing.
|
| 170 |
+
|
| 171 |
+
Whichever command you chose to install the project (e.g. `poetry sync --all-extras`), you should run it again when pulling code with an updated version of `pyproject.toml` and `poetry.lock` in order to synchronize your virtual environment with the new dependencies.
|
| 172 |
+
|
| 173 |
+
The equivalent of `pip install some-package`, would just be:
|
| 174 |
+
|
| 175 |
+
using `poetry`
|
| 176 |
+
```bash
|
| 177 |
+
poetry add some-package
|
| 178 |
+
```
|
| 179 |
+
|
| 180 |
+
using `uv`
|
| 181 |
+
```bash
|
| 182 |
+
uv add some-package
|
| 183 |
+
```
|
| 184 |
+
|
| 185 |
+
When making changes to the poetry sections of the `pyproject.toml`, you should run the following command to lock dependencies.
|
| 186 |
+
using `poetry`
|
| 187 |
+
```bash
|
| 188 |
+
poetry lock
|
| 189 |
+
```
|
| 190 |
+
|
| 191 |
+
using `uv`
|
| 192 |
+
```bash
|
| 193 |
+
uv lock
|
| 194 |
+
```
|
| 195 |
+
|
| 196 |
+
|
| 197 |
+
5. Develop the features on your branch.
|
| 198 |
+
|
| 199 |
+
As you work on the features, you should make sure that the test suite
|
| 200 |
+
passes. You should run the tests impacted by your changes like this (see
|
| 201 |
+
below an explanation regarding the environment variable):
|
| 202 |
+
|
| 203 |
+
```bash
|
| 204 |
+
pytest tests/<TEST_TO_RUN>.py
|
| 205 |
+
```
|
| 206 |
+
|
| 207 |
+
6. Follow our style.
|
| 208 |
+
|
| 209 |
+
`lerobot` relies on `ruff` to format its source code
|
| 210 |
+
consistently. Set up [`pre-commit`](https://pre-commit.com/) to run these checks
|
| 211 |
+
automatically as Git commit hooks.
|
| 212 |
+
|
| 213 |
+
Install `pre-commit` hooks:
|
| 214 |
+
```bash
|
| 215 |
+
pre-commit install
|
| 216 |
+
```
|
| 217 |
+
|
| 218 |
+
You can run these hooks whenever you need on staged files with:
|
| 219 |
+
```bash
|
| 220 |
+
pre-commit
|
| 221 |
+
```
|
| 222 |
+
|
| 223 |
+
Once you're happy with your changes, add changed files using `git add` and
|
| 224 |
+
make a commit with `git commit` to record your changes locally:
|
| 225 |
+
|
| 226 |
+
```bash
|
| 227 |
+
git add modified_file.py
|
| 228 |
+
git commit
|
| 229 |
+
```
|
| 230 |
+
|
| 231 |
+
Note, if you already committed some changes that have a wrong formatting, you can use:
|
| 232 |
+
```bash
|
| 233 |
+
pre-commit run --all-files
|
| 234 |
+
```
|
| 235 |
+
|
| 236 |
+
Please write [good commit messages](https://chris.beams.io/posts/git-commit/).
|
| 237 |
+
|
| 238 |
+
It is a good idea to sync your copy of the code with the original
|
| 239 |
+
repository regularly. This way you can quickly account for changes:
|
| 240 |
+
|
| 241 |
+
```bash
|
| 242 |
+
git fetch upstream
|
| 243 |
+
git rebase upstream/main
|
| 244 |
+
```
|
| 245 |
+
|
| 246 |
+
Push the changes to your account using:
|
| 247 |
+
|
| 248 |
+
```bash
|
| 249 |
+
git push -u origin a-descriptive-name-for-my-changes
|
| 250 |
+
```
|
| 251 |
+
|
| 252 |
+
6. Once you are satisfied (**and the checklist below is happy too**), go to the
|
| 253 |
+
webpage of your fork on GitHub. Click on 'Pull request' to send your changes
|
| 254 |
+
to the project maintainers for review.
|
| 255 |
+
|
| 256 |
+
7. It's ok if maintainers ask you for changes. It happens to core contributors
|
| 257 |
+
too! So everyone can see the changes in the Pull request, work in your local
|
| 258 |
+
branch and push the changes to your fork. They will automatically appear in
|
| 259 |
+
the pull request.
|
| 260 |
+
|
| 261 |
+
|
| 262 |
+
### Checklist
|
| 263 |
+
|
| 264 |
+
1. The title of your pull request should be a summary of its contribution;
|
| 265 |
+
2. If your pull request addresses an issue, please mention the issue number in
|
| 266 |
+
the pull request description to make sure they are linked (and people
|
| 267 |
+
consulting the issue know you are working on it);
|
| 268 |
+
3. To indicate a work in progress please prefix the title with `[WIP]`, or preferably mark
|
| 269 |
+
the PR as a draft PR. These are useful to avoid duplicated work, and to differentiate
|
| 270 |
+
it from PRs ready to be merged;
|
| 271 |
+
4. Make sure existing tests pass;
|
| 272 |
+
|
| 273 |
+
### Tests
|
| 274 |
+
|
| 275 |
+
An extensive test suite is included to test the library behavior and several examples. Library tests can be found in the [tests folder](https://github.com/huggingface/lerobot/tree/main/tests).
|
| 276 |
+
|
| 277 |
+
Install [git lfs](https://git-lfs.com/) to retrieve test artifacts (if you don't have it already).
|
| 278 |
+
|
| 279 |
+
On Mac:
|
| 280 |
+
```bash
|
| 281 |
+
brew install git-lfs
|
| 282 |
+
git lfs install
|
| 283 |
+
```
|
| 284 |
+
|
| 285 |
+
On Ubuntu:
|
| 286 |
+
```bash
|
| 287 |
+
sudo apt-get install git-lfs
|
| 288 |
+
git lfs install
|
| 289 |
+
```
|
| 290 |
+
|
| 291 |
+
Pull artifacts if they're not in [tests/artifacts](tests/artifacts)
|
| 292 |
+
```bash
|
| 293 |
+
git lfs pull
|
| 294 |
+
```
|
| 295 |
+
|
| 296 |
+
We use `pytest` in order to run the tests. From the root of the
|
| 297 |
+
repository, here's how to run tests with `pytest` for the library:
|
| 298 |
+
|
| 299 |
+
```bash
|
| 300 |
+
python -m pytest -sv ./tests
|
| 301 |
+
```
|
| 302 |
+
|
| 303 |
+
|
| 304 |
+
You can specify a smaller set of tests in order to test only the feature
|
| 305 |
+
you're working on.
|
lerobot/LICENSE
ADDED
|
@@ -0,0 +1,507 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
Copyright 2024 The Hugging Face team. All rights reserved.
|
| 2 |
+
|
| 3 |
+
Apache License
|
| 4 |
+
Version 2.0, January 2004
|
| 5 |
+
http://www.apache.org/licenses/
|
| 6 |
+
|
| 7 |
+
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
| 8 |
+
|
| 9 |
+
1. Definitions.
|
| 10 |
+
|
| 11 |
+
"License" shall mean the terms and conditions for use, reproduction,
|
| 12 |
+
and distribution as defined by Sections 1 through 9 of this document.
|
| 13 |
+
|
| 14 |
+
"Licensor" shall mean the copyright owner or entity authorized by
|
| 15 |
+
the copyright owner that is granting the License.
|
| 16 |
+
|
| 17 |
+
"Legal Entity" shall mean the union of the acting entity and all
|
| 18 |
+
other entities that control, are controlled by, or are under common
|
| 19 |
+
control with that entity. For the purposes of this definition,
|
| 20 |
+
"control" means (i) the power, direct or indirect, to cause the
|
| 21 |
+
direction or management of such entity, whether by contract or
|
| 22 |
+
otherwise, or (ii) ownership of fifty percent (50%) or more of the
|
| 23 |
+
outstanding shares, or (iii) beneficial ownership of such entity.
|
| 24 |
+
|
| 25 |
+
"You" (or "Your") shall mean an individual or Legal Entity
|
| 26 |
+
exercising permissions granted by this License.
|
| 27 |
+
|
| 28 |
+
"Source" form shall mean the preferred form for making modifications,
|
| 29 |
+
including but not limited to software source code, documentation
|
| 30 |
+
source, and configuration files.
|
| 31 |
+
|
| 32 |
+
"Object" form shall mean any form resulting from mechanical
|
| 33 |
+
transformation or translation of a Source form, including but
|
| 34 |
+
not limited to compiled object code, generated documentation,
|
| 35 |
+
and conversions to other media types.
|
| 36 |
+
|
| 37 |
+
"Work" shall mean the work of authorship, whether in Source or
|
| 38 |
+
Object form, made available under the License, as indicated by a
|
| 39 |
+
copyright notice that is included in or attached to the work
|
| 40 |
+
(an example is provided in the Appendix below).
|
| 41 |
+
|
| 42 |
+
"Derivative Works" shall mean any work, whether in Source or Object
|
| 43 |
+
form, that is based on (or derived from) the Work and for which the
|
| 44 |
+
editorial revisions, annotations, elaborations, or other modifications
|
| 45 |
+
represent, as a whole, an original work of authorship. For the purposes
|
| 46 |
+
of this License, Derivative Works shall not include works that remain
|
| 47 |
+
separable from, or merely link (or bind by name) to the interfaces of,
|
| 48 |
+
the Work and Derivative Works thereof.
|
| 49 |
+
|
| 50 |
+
"Contribution" shall mean any work of authorship, including
|
| 51 |
+
the original version of the Work and any modifications or additions
|
| 52 |
+
to that Work or Derivative Works thereof, that is intentionally
|
| 53 |
+
submitted to Licensor for inclusion in the Work by the copyright owner
|
| 54 |
+
or by an individual or Legal Entity authorized to submit on behalf of
|
| 55 |
+
the copyright owner. For the purposes of this definition, "submitted"
|
| 56 |
+
means any form of electronic, verbal, or written communication sent
|
| 57 |
+
to the Licensor or its representatives, including but not limited to
|
| 58 |
+
communication on electronic mailing lists, source code control systems,
|
| 59 |
+
and issue tracking systems that are managed by, or on behalf of, the
|
| 60 |
+
Licensor for the purpose of discussing and improving the Work, but
|
| 61 |
+
excluding communication that is conspicuously marked or otherwise
|
| 62 |
+
designated in writing by the copyright owner as "Not a Contribution."
|
| 63 |
+
|
| 64 |
+
"Contributor" shall mean Licensor and any individual or Legal Entity
|
| 65 |
+
on behalf of whom a Contribution has been received by Licensor and
|
| 66 |
+
subsequently incorporated within the Work.
|
| 67 |
+
|
| 68 |
+
2. Grant of Copyright License. Subject to the terms and conditions of
|
| 69 |
+
this License, each Contributor hereby grants to You a perpetual,
|
| 70 |
+
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
| 71 |
+
copyright license to reproduce, prepare Derivative Works of,
|
| 72 |
+
publicly display, publicly perform, sublicense, and distribute the
|
| 73 |
+
Work and such Derivative Works in Source or Object form.
|
| 74 |
+
|
| 75 |
+
3. Grant of Patent License. Subject to the terms and conditions of
|
| 76 |
+
this License, each Contributor hereby grants to You a perpetual,
|
| 77 |
+
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
| 78 |
+
(except as stated in this section) patent license to make, have made,
|
| 79 |
+
use, offer to sell, sell, import, and otherwise transfer the Work,
|
| 80 |
+
where such license applies only to those patent claims licensable
|
| 81 |
+
by such Contributor that are necessarily infringed by their
|
| 82 |
+
Contribution(s) alone or by combination of their Contribution(s)
|
| 83 |
+
with the Work to which such Contribution(s) was submitted. If You
|
| 84 |
+
institute patent litigation against any entity (including a
|
| 85 |
+
cross-claim or counterclaim in a lawsuit) alleging that the Work
|
| 86 |
+
or a Contribution incorporated within the Work constitutes direct
|
| 87 |
+
or contributory patent infringement, then any patent licenses
|
| 88 |
+
granted to You under this License for that Work shall terminate
|
| 89 |
+
as of the date such litigation is filed.
|
| 90 |
+
|
| 91 |
+
4. Redistribution. You may reproduce and distribute copies of the
|
| 92 |
+
Work or Derivative Works thereof in any medium, with or without
|
| 93 |
+
modifications, and in Source or Object form, provided that You
|
| 94 |
+
meet the following conditions:
|
| 95 |
+
|
| 96 |
+
(a) You must give any other recipients of the Work or
|
| 97 |
+
Derivative Works a copy of this License; and
|
| 98 |
+
|
| 99 |
+
(b) You must cause any modified files to carry prominent notices
|
| 100 |
+
stating that You changed the files; and
|
| 101 |
+
|
| 102 |
+
(c) You must retain, in the Source form of any Derivative Works
|
| 103 |
+
that You distribute, all copyright, patent, trademark, and
|
| 104 |
+
attribution notices from the Source form of the Work,
|
| 105 |
+
excluding those notices that do not pertain to any part of
|
| 106 |
+
the Derivative Works; and
|
| 107 |
+
|
| 108 |
+
(d) If the Work includes a "NOTICE" text file as part of its
|
| 109 |
+
distribution, then any Derivative Works that You distribute must
|
| 110 |
+
include a readable copy of the attribution notices contained
|
| 111 |
+
within such NOTICE file, excluding those notices that do not
|
| 112 |
+
pertain to any part of the Derivative Works, in at least one
|
| 113 |
+
of the following places: within a NOTICE text file distributed
|
| 114 |
+
as part of the Derivative Works; within the Source form or
|
| 115 |
+
documentation, if provided along with the Derivative Works; or,
|
| 116 |
+
within a display generated by the Derivative Works, if and
|
| 117 |
+
wherever such third-party notices normally appear. The contents
|
| 118 |
+
of the NOTICE file are for informational purposes only and
|
| 119 |
+
do not modify the License. You may add Your own attribution
|
| 120 |
+
notices within Derivative Works that You distribute, alongside
|
| 121 |
+
or as an addendum to the NOTICE text from the Work, provided
|
| 122 |
+
that such additional attribution notices cannot be construed
|
| 123 |
+
as modifying the License.
|
| 124 |
+
|
| 125 |
+
You may add Your own copyright statement to Your modifications and
|
| 126 |
+
may provide additional or different license terms and conditions
|
| 127 |
+
for use, reproduction, or distribution of Your modifications, or
|
| 128 |
+
for any such Derivative Works as a whole, provided Your use,
|
| 129 |
+
reproduction, and distribution of the Work otherwise complies with
|
| 130 |
+
the conditions stated in this License.
|
| 131 |
+
|
| 132 |
+
5. Submission of Contributions. Unless You explicitly state otherwise,
|
| 133 |
+
any Contribution intentionally submitted for inclusion in the Work
|
| 134 |
+
by You to the Licensor shall be under the terms and conditions of
|
| 135 |
+
this License, without any additional terms or conditions.
|
| 136 |
+
Notwithstanding the above, nothing herein shall supersede or modify
|
| 137 |
+
the terms of any separate license agreement you may have executed
|
| 138 |
+
with Licensor regarding such Contributions.
|
| 139 |
+
|
| 140 |
+
6. Trademarks. This License does not grant permission to use the trade
|
| 141 |
+
names, trademarks, service marks, or product names of the Licensor,
|
| 142 |
+
except as required for reasonable and customary use in describing the
|
| 143 |
+
origin of the Work and reproducing the content of the NOTICE file.
|
| 144 |
+
|
| 145 |
+
7. Disclaimer of Warranty. Unless required by applicable law or
|
| 146 |
+
agreed to in writing, Licensor provides the Work (and each
|
| 147 |
+
Contributor provides its Contributions) on an "AS IS" BASIS,
|
| 148 |
+
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
| 149 |
+
implied, including, without limitation, any warranties or conditions
|
| 150 |
+
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
|
| 151 |
+
PARTICULAR PURPOSE. You are solely responsible for determining the
|
| 152 |
+
appropriateness of using or redistributing the Work and assume any
|
| 153 |
+
risks associated with Your exercise of permissions under this License.
|
| 154 |
+
|
| 155 |
+
8. Limitation of Liability. In no event and under no legal theory,
|
| 156 |
+
whether in tort (including negligence), contract, or otherwise,
|
| 157 |
+
unless required by applicable law (such as deliberate and grossly
|
| 158 |
+
negligent acts) or agreed to in writing, shall any Contributor be
|
| 159 |
+
liable to You for damages, including any direct, indirect, special,
|
| 160 |
+
incidental, or consequential damages of any character arising as a
|
| 161 |
+
result of this License or out of the use or inability to use the
|
| 162 |
+
Work (including but not limited to damages for loss of goodwill,
|
| 163 |
+
work stoppage, computer failure or malfunction, or any and all
|
| 164 |
+
other commercial damages or losses), even if such Contributor
|
| 165 |
+
has been advised of the possibility of such damages.
|
| 166 |
+
|
| 167 |
+
9. Accepting Warranty or Additional Liability. While redistributing
|
| 168 |
+
the Work or Derivative Works thereof, You may choose to offer,
|
| 169 |
+
and charge a fee for, acceptance of support, warranty, indemnity,
|
| 170 |
+
or other liability obligations and/or rights consistent with this
|
| 171 |
+
License. However, in accepting such obligations, You may act only
|
| 172 |
+
on Your own behalf and on Your sole responsibility, not on behalf
|
| 173 |
+
of any other Contributor, and only if You agree to indemnify,
|
| 174 |
+
defend, and hold each Contributor harmless for any liability
|
| 175 |
+
incurred by, or claims asserted against, such Contributor by reason
|
| 176 |
+
of your accepting any such warranty or additional liability.
|
| 177 |
+
|
| 178 |
+
END OF TERMS AND CONDITIONS
|
| 179 |
+
|
| 180 |
+
APPENDIX: How to apply the Apache License to your work.
|
| 181 |
+
|
| 182 |
+
To apply the Apache License to your work, attach the following
|
| 183 |
+
boilerplate notice, with the fields enclosed by brackets "[]"
|
| 184 |
+
replaced with your own identifying information. (Don't include
|
| 185 |
+
the brackets!) The text should be enclosed in the appropriate
|
| 186 |
+
comment syntax for the file format. We also recommend that a
|
| 187 |
+
file or class name and description of purpose be included on the
|
| 188 |
+
same "printed page" as the copyright notice for easier
|
| 189 |
+
identification within third-party archives.
|
| 190 |
+
|
| 191 |
+
Copyright [yyyy] [name of copyright owner]
|
| 192 |
+
|
| 193 |
+
Licensed under the Apache License, Version 2.0 (the "License");
|
| 194 |
+
you may not use this file except in compliance with the License.
|
| 195 |
+
You may obtain a copy of the License at
|
| 196 |
+
|
| 197 |
+
http://www.apache.org/licenses/LICENSE-2.0
|
| 198 |
+
|
| 199 |
+
Unless required by applicable law or agreed to in writing, software
|
| 200 |
+
distributed under the License is distributed on an "AS IS" BASIS,
|
| 201 |
+
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
| 202 |
+
See the License for the specific language governing permissions and
|
| 203 |
+
limitations under the License.
|
| 204 |
+
|
| 205 |
+
|
| 206 |
+
## Some of lerobot's code is derived from Diffusion Policy, which is subject to the following copyright notice:
|
| 207 |
+
|
| 208 |
+
MIT License
|
| 209 |
+
|
| 210 |
+
Copyright (c) 2023 Columbia Artificial Intelligence and Robotics Lab
|
| 211 |
+
|
| 212 |
+
Permission is hereby granted, free of charge, to any person obtaining a copy
|
| 213 |
+
of this software and associated documentation files (the "Software"), to deal
|
| 214 |
+
in the Software without restriction, including without limitation the rights
|
| 215 |
+
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
| 216 |
+
copies of the Software, and to permit persons to whom the Software is
|
| 217 |
+
furnished to do so, subject to the following conditions:
|
| 218 |
+
|
| 219 |
+
The above copyright notice and this permission notice shall be included in all
|
| 220 |
+
copies or substantial portions of the Software.
|
| 221 |
+
|
| 222 |
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
| 223 |
+
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
| 224 |
+
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
| 225 |
+
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
| 226 |
+
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
| 227 |
+
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
| 228 |
+
SOFTWARE.
|
| 229 |
+
|
| 230 |
+
|
| 231 |
+
## Some of lerobot's code is derived from FOWM, which is subject to the following copyright notice:
|
| 232 |
+
|
| 233 |
+
MIT License
|
| 234 |
+
|
| 235 |
+
Copyright (c) 2023 Yunhai Feng
|
| 236 |
+
|
| 237 |
+
Permission is hereby granted, free of charge, to any person obtaining a copy
|
| 238 |
+
of this software and associated documentation files (the "Software"), to deal
|
| 239 |
+
in the Software without restriction, including without limitation the rights
|
| 240 |
+
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
| 241 |
+
copies of the Software, and to permit persons to whom the Software is
|
| 242 |
+
furnished to do so, subject to the following conditions:
|
| 243 |
+
|
| 244 |
+
The above copyright notice and this permission notice shall be included in all
|
| 245 |
+
copies or substantial portions of the Software.
|
| 246 |
+
|
| 247 |
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
| 248 |
+
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
| 249 |
+
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
| 250 |
+
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
| 251 |
+
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
| 252 |
+
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
| 253 |
+
SOFTWARE.
|
| 254 |
+
|
| 255 |
+
|
| 256 |
+
## Some of lerobot's code is derived from simxarm, which is subject to the following copyright notice:
|
| 257 |
+
|
| 258 |
+
MIT License
|
| 259 |
+
|
| 260 |
+
Copyright (c) 2023 Nicklas Hansen & Yanjie Ze
|
| 261 |
+
|
| 262 |
+
Permission is hereby granted, free of charge, to any person obtaining a copy
|
| 263 |
+
of this software and associated documentation files (the "Software"), to deal
|
| 264 |
+
in the Software without restriction, including without limitation the rights
|
| 265 |
+
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
| 266 |
+
copies of the Software, and to permit persons to whom the Software is
|
| 267 |
+
furnished to do so, subject to the following conditions:
|
| 268 |
+
|
| 269 |
+
The above copyright notice and this permission notice shall be included in all
|
| 270 |
+
copies or substantial portions of the Software.
|
| 271 |
+
|
| 272 |
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
| 273 |
+
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
| 274 |
+
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
| 275 |
+
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
| 276 |
+
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
| 277 |
+
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
| 278 |
+
SOFTWARE.
|
| 279 |
+
|
| 280 |
+
|
| 281 |
+
## Some of lerobot's code is derived from ALOHA, which is subject to the following copyright notice:
|
| 282 |
+
|
| 283 |
+
MIT License
|
| 284 |
+
|
| 285 |
+
Copyright (c) 2023 Tony Z. Zhao
|
| 286 |
+
|
| 287 |
+
Permission is hereby granted, free of charge, to any person obtaining a copy
|
| 288 |
+
of this software and associated documentation files (the "Software"), to deal
|
| 289 |
+
in the Software without restriction, including without limitation the rights
|
| 290 |
+
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
| 291 |
+
copies of the Software, and to permit persons to whom the Software is
|
| 292 |
+
furnished to do so, subject to the following conditions:
|
| 293 |
+
|
| 294 |
+
The above copyright notice and this permission notice shall be included in all
|
| 295 |
+
copies or substantial portions of the Software.
|
| 296 |
+
|
| 297 |
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
| 298 |
+
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
| 299 |
+
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
| 300 |
+
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
| 301 |
+
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
| 302 |
+
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
| 303 |
+
SOFTWARE.
|
| 304 |
+
|
| 305 |
+
## Some of lerobot's code is derived from DETR, which is subject to the following copyright notice:
|
| 306 |
+
|
| 307 |
+
Apache License
|
| 308 |
+
Version 2.0, January 2004
|
| 309 |
+
http://www.apache.org/licenses/
|
| 310 |
+
|
| 311 |
+
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
| 312 |
+
|
| 313 |
+
1. Definitions.
|
| 314 |
+
|
| 315 |
+
"License" shall mean the terms and conditions for use, reproduction,
|
| 316 |
+
and distribution as defined by Sections 1 through 9 of this document.
|
| 317 |
+
|
| 318 |
+
"Licensor" shall mean the copyright owner or entity authorized by
|
| 319 |
+
the copyright owner that is granting the License.
|
| 320 |
+
|
| 321 |
+
"Legal Entity" shall mean the union of the acting entity and all
|
| 322 |
+
other entities that control, are controlled by, or are under common
|
| 323 |
+
control with that entity. For the purposes of this definition,
|
| 324 |
+
"control" means (i) the power, direct or indirect, to cause the
|
| 325 |
+
direction or management of such entity, whether by contract or
|
| 326 |
+
otherwise, or (ii) ownership of fifty percent (50%) or more of the
|
| 327 |
+
outstanding shares, or (iii) beneficial ownership of such entity.
|
| 328 |
+
|
| 329 |
+
"You" (or "Your") shall mean an individual or Legal Entity
|
| 330 |
+
exercising permissions granted by this License.
|
| 331 |
+
|
| 332 |
+
"Source" form shall mean the preferred form for making modifications,
|
| 333 |
+
including but not limited to software source code, documentation
|
| 334 |
+
source, and configuration files.
|
| 335 |
+
|
| 336 |
+
"Object" form shall mean any form resulting from mechanical
|
| 337 |
+
transformation or translation of a Source form, including but
|
| 338 |
+
not limited to compiled object code, generated documentation,
|
| 339 |
+
and conversions to other media types.
|
| 340 |
+
|
| 341 |
+
"Work" shall mean the work of authorship, whether in Source or
|
| 342 |
+
Object form, made available under the License, as indicated by a
|
| 343 |
+
copyright notice that is included in or attached to the work
|
| 344 |
+
(an example is provided in the Appendix below).
|
| 345 |
+
|
| 346 |
+
"Derivative Works" shall mean any work, whether in Source or Object
|
| 347 |
+
form, that is based on (or derived from) the Work and for which the
|
| 348 |
+
editorial revisions, annotations, elaborations, or other modifications
|
| 349 |
+
represent, as a whole, an original work of authorship. For the purposes
|
| 350 |
+
of this License, Derivative Works shall not include works that remain
|
| 351 |
+
separable from, or merely link (or bind by name) to the interfaces of,
|
| 352 |
+
the Work and Derivative Works thereof.
|
| 353 |
+
|
| 354 |
+
"Contribution" shall mean any work of authorship, including
|
| 355 |
+
the original version of the Work and any modifications or additions
|
| 356 |
+
to that Work or Derivative Works thereof, that is intentionally
|
| 357 |
+
submitted to Licensor for inclusion in the Work by the copyright owner
|
| 358 |
+
or by an individual or Legal Entity authorized to submit on behalf of
|
| 359 |
+
the copyright owner. For the purposes of this definition, "submitted"
|
| 360 |
+
means any form of electronic, verbal, or written communication sent
|
| 361 |
+
to the Licensor or its representatives, including but not limited to
|
| 362 |
+
communication on electronic mailing lists, source code control systems,
|
| 363 |
+
and issue tracking systems that are managed by, or on behalf of, the
|
| 364 |
+
Licensor for the purpose of discussing and improving the Work, but
|
| 365 |
+
excluding communication that is conspicuously marked or otherwise
|
| 366 |
+
designated in writing by the copyright owner as "Not a Contribution."
|
| 367 |
+
|
| 368 |
+
"Contributor" shall mean Licensor and any individual or Legal Entity
|
| 369 |
+
on behalf of whom a Contribution has been received by Licensor and
|
| 370 |
+
subsequently incorporated within the Work.
|
| 371 |
+
|
| 372 |
+
2. Grant of Copyright License. Subject to the terms and conditions of
|
| 373 |
+
this License, each Contributor hereby grants to You a perpetual,
|
| 374 |
+
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
| 375 |
+
copyright license to reproduce, prepare Derivative Works of,
|
| 376 |
+
publicly display, publicly perform, sublicense, and distribute the
|
| 377 |
+
Work and such Derivative Works in Source or Object form.
|
| 378 |
+
|
| 379 |
+
3. Grant of Patent License. Subject to the terms and conditions of
|
| 380 |
+
this License, each Contributor hereby grants to You a perpetual,
|
| 381 |
+
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
| 382 |
+
(except as stated in this section) patent license to make, have made,
|
| 383 |
+
use, offer to sell, sell, import, and otherwise transfer the Work,
|
| 384 |
+
where such license applies only to those patent claims licensable
|
| 385 |
+
by such Contributor that are necessarily infringed by their
|
| 386 |
+
Contribution(s) alone or by combination of their Contribution(s)
|
| 387 |
+
with the Work to which such Contribution(s) was submitted. If You
|
| 388 |
+
institute patent litigation against any entity (including a
|
| 389 |
+
cross-claim or counterclaim in a lawsuit) alleging that the Work
|
| 390 |
+
or a Contribution incorporated within the Work constitutes direct
|
| 391 |
+
or contributory patent infringement, then any patent licenses
|
| 392 |
+
granted to You under this License for that Work shall terminate
|
| 393 |
+
as of the date such litigation is filed.
|
| 394 |
+
|
| 395 |
+
4. Redistribution. You may reproduce and distribute copies of the
|
| 396 |
+
Work or Derivative Works thereof in any medium, with or without
|
| 397 |
+
modifications, and in Source or Object form, provided that You
|
| 398 |
+
meet the following conditions:
|
| 399 |
+
|
| 400 |
+
(a) You must give any other recipients of the Work or
|
| 401 |
+
Derivative Works a copy of this License; and
|
| 402 |
+
|
| 403 |
+
(b) You must cause any modified files to carry prominent notices
|
| 404 |
+
stating that You changed the files; and
|
| 405 |
+
|
| 406 |
+
(c) You must retain, in the Source form of any Derivative Works
|
| 407 |
+
that You distribute, all copyright, patent, trademark, and
|
| 408 |
+
attribution notices from the Source form of the Work,
|
| 409 |
+
excluding those notices that do not pertain to any part of
|
| 410 |
+
the Derivative Works; and
|
| 411 |
+
|
| 412 |
+
(d) If the Work includes a "NOTICE" text file as part of its
|
| 413 |
+
distribution, then any Derivative Works that You distribute must
|
| 414 |
+
include a readable copy of the attribution notices contained
|
| 415 |
+
within such NOTICE file, excluding those notices that do not
|
| 416 |
+
pertain to any part of the Derivative Works, in at least one
|
| 417 |
+
of the following places: within a NOTICE text file distributed
|
| 418 |
+
as part of the Derivative Works; within the Source form or
|
| 419 |
+
documentation, if provided along with the Derivative Works; or,
|
| 420 |
+
within a display generated by the Derivative Works, if and
|
| 421 |
+
wherever such third-party notices normally appear. The contents
|
| 422 |
+
of the NOTICE file are for informational purposes only and
|
| 423 |
+
do not modify the License. You may add Your own attribution
|
| 424 |
+
notices within Derivative Works that You distribute, alongside
|
| 425 |
+
or as an addendum to the NOTICE text from the Work, provided
|
| 426 |
+
that such additional attribution notices cannot be construed
|
| 427 |
+
as modifying the License.
|
| 428 |
+
|
| 429 |
+
You may add Your own copyright statement to Your modifications and
|
| 430 |
+
may provide additional or different license terms and conditions
|
| 431 |
+
for use, reproduction, or distribution of Your modifications, or
|
| 432 |
+
for any such Derivative Works as a whole, provided Your use,
|
| 433 |
+
reproduction, and distribution of the Work otherwise complies with
|
| 434 |
+
the conditions stated in this License.
|
| 435 |
+
|
| 436 |
+
5. Submission of Contributions. Unless You explicitly state otherwise,
|
| 437 |
+
any Contribution intentionally submitted for inclusion in the Work
|
| 438 |
+
by You to the Licensor shall be under the terms and conditions of
|
| 439 |
+
this License, without any additional terms or conditions.
|
| 440 |
+
Notwithstanding the above, nothing herein shall supersede or modify
|
| 441 |
+
the terms of any separate license agreement you may have executed
|
| 442 |
+
with Licensor regarding such Contributions.
|
| 443 |
+
|
| 444 |
+
6. Trademarks. This License does not grant permission to use the trade
|
| 445 |
+
names, trademarks, service marks, or product names of the Licensor,
|
| 446 |
+
except as required for reasonable and customary use in describing the
|
| 447 |
+
origin of the Work and reproducing the content of the NOTICE file.
|
| 448 |
+
|
| 449 |
+
7. Disclaimer of Warranty. Unless required by applicable law or
|
| 450 |
+
agreed to in writing, Licensor provides the Work (and each
|
| 451 |
+
Contributor provides its Contributions) on an "AS IS" BASIS,
|
| 452 |
+
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
| 453 |
+
implied, including, without limitation, any warranties or conditions
|
| 454 |
+
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
|
| 455 |
+
PARTICULAR PURPOSE. You are solely responsible for determining the
|
| 456 |
+
appropriateness of using or redistributing the Work and assume any
|
| 457 |
+
risks associated with Your exercise of permissions under this License.
|
| 458 |
+
|
| 459 |
+
8. Limitation of Liability. In no event and under no legal theory,
|
| 460 |
+
whether in tort (including negligence), contract, or otherwise,
|
| 461 |
+
unless required by applicable law (such as deliberate and grossly
|
| 462 |
+
negligent acts) or agreed to in writing, shall any Contributor be
|
| 463 |
+
liable to You for damages, including any direct, indirect, special,
|
| 464 |
+
incidental, or consequential damages of any character arising as a
|
| 465 |
+
result of this License or out of the use or inability to use the
|
| 466 |
+
Work (including but not limited to damages for loss of goodwill,
|
| 467 |
+
work stoppage, computer failure or malfunction, or any and all
|
| 468 |
+
other commercial damages or losses), even if such Contributor
|
| 469 |
+
has been advised of the possibility of such damages.
|
| 470 |
+
|
| 471 |
+
9. Accepting Warranty or Additional Liability. While redistributing
|
| 472 |
+
the Work or Derivative Works thereof, You may choose to offer,
|
| 473 |
+
and charge a fee for, acceptance of support, warranty, indemnity,
|
| 474 |
+
or other liability obligations and/or rights consistent with this
|
| 475 |
+
License. However, in accepting such obligations, You may act only
|
| 476 |
+
on Your own behalf and on Your sole responsibility, not on behalf
|
| 477 |
+
of any other Contributor, and only if You agree to indemnify,
|
| 478 |
+
defend, and hold each Contributor harmless for any liability
|
| 479 |
+
incurred by, or claims asserted against, such Contributor by reason
|
| 480 |
+
of your accepting any such warranty or additional liability.
|
| 481 |
+
|
| 482 |
+
END OF TERMS AND CONDITIONS
|
| 483 |
+
|
| 484 |
+
APPENDIX: How to apply the Apache License to your work.
|
| 485 |
+
|
| 486 |
+
To apply the Apache License to your work, attach the following
|
| 487 |
+
boilerplate notice, with the fields enclosed by brackets "[]"
|
| 488 |
+
replaced with your own identifying information. (Don't include
|
| 489 |
+
the brackets!) The text should be enclosed in the appropriate
|
| 490 |
+
comment syntax for the file format. We also recommend that a
|
| 491 |
+
file or class name and description of purpose be included on the
|
| 492 |
+
same "printed page" as the copyright notice for easier
|
| 493 |
+
identification within third-party archives.
|
| 494 |
+
|
| 495 |
+
Copyright 2020 - present, Facebook, Inc
|
| 496 |
+
|
| 497 |
+
Licensed under the Apache License, Version 2.0 (the "License");
|
| 498 |
+
you may not use this file except in compliance with the License.
|
| 499 |
+
You may obtain a copy of the License at
|
| 500 |
+
|
| 501 |
+
http://www.apache.org/licenses/LICENSE-2.0
|
| 502 |
+
|
| 503 |
+
Unless required by applicable law or agreed to in writing, software
|
| 504 |
+
distributed under the License is distributed on an "AS IS" BASIS,
|
| 505 |
+
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
| 506 |
+
See the License for the specific language governing permissions and
|
| 507 |
+
limitations under the License.
|
lerobot/MANIFEST.in
ADDED
|
@@ -0,0 +1,2 @@
|
|
|
|
|
|
|
|
|
|
| 1 |
+
include src/lerobot/templates/lerobot_modelcard_template.md
|
| 2 |
+
include src/lerobot/datasets/card_template.md
|
lerobot/Makefile
ADDED
|
@@ -0,0 +1,180 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Copyright 2024 The HuggingFace Inc. team. All rights reserved.
|
| 2 |
+
#
|
| 3 |
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
| 4 |
+
# you may not use this file except in compliance with the License.
|
| 5 |
+
# You may obtain a copy of the License at
|
| 6 |
+
#
|
| 7 |
+
# http://www.apache.org/licenses/LICENSE-2.0
|
| 8 |
+
#
|
| 9 |
+
# Unless required by applicable law or agreed to in writing, software
|
| 10 |
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
| 11 |
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
| 12 |
+
# See the License for the specific language governing permissions and
|
| 13 |
+
# limitations under the License.
|
| 14 |
+
|
| 15 |
+
.PHONY: tests
|
| 16 |
+
|
| 17 |
+
PYTHON_PATH := $(shell which python)
|
| 18 |
+
|
| 19 |
+
# If uv is installed and a virtual environment exists, use it
|
| 20 |
+
UV_CHECK := $(shell command -v uv)
|
| 21 |
+
ifneq ($(UV_CHECK),)
|
| 22 |
+
PYTHON_PATH := $(shell .venv/bin/python)
|
| 23 |
+
endif
|
| 24 |
+
|
| 25 |
+
export PATH := $(dir $(PYTHON_PATH)):$(PATH)
|
| 26 |
+
|
| 27 |
+
DEVICE ?= cpu
|
| 28 |
+
|
| 29 |
+
build-cpu:
|
| 30 |
+
docker build -t lerobot:latest -f docker/lerobot-cpu/Dockerfile .
|
| 31 |
+
|
| 32 |
+
build-gpu:
|
| 33 |
+
docker build -t lerobot:latest -f docker/lerobot-gpu/Dockerfile .
|
| 34 |
+
|
| 35 |
+
test-end-to-end:
|
| 36 |
+
${MAKE} DEVICE=$(DEVICE) test-act-ete-train
|
| 37 |
+
${MAKE} DEVICE=$(DEVICE) test-act-ete-train-resume
|
| 38 |
+
${MAKE} DEVICE=$(DEVICE) test-act-ete-eval
|
| 39 |
+
${MAKE} DEVICE=$(DEVICE) test-diffusion-ete-train
|
| 40 |
+
${MAKE} DEVICE=$(DEVICE) test-diffusion-ete-eval
|
| 41 |
+
${MAKE} DEVICE=$(DEVICE) test-tdmpc-ete-train
|
| 42 |
+
${MAKE} DEVICE=$(DEVICE) test-tdmpc-ete-eval
|
| 43 |
+
${MAKE} DEVICE=$(DEVICE) test-smolvla-ete-train
|
| 44 |
+
${MAKE} DEVICE=$(DEVICE) test-smolvla-ete-eval
|
| 45 |
+
|
| 46 |
+
test-act-ete-train:
|
| 47 |
+
python -m lerobot.scripts.train \
|
| 48 |
+
--policy.type=act \
|
| 49 |
+
--policy.dim_model=64 \
|
| 50 |
+
--policy.n_action_steps=20 \
|
| 51 |
+
--policy.chunk_size=20 \
|
| 52 |
+
--policy.device=$(DEVICE) \
|
| 53 |
+
--policy.push_to_hub=false \
|
| 54 |
+
--env.type=aloha \
|
| 55 |
+
--env.episode_length=5 \
|
| 56 |
+
--dataset.repo_id=lerobot/aloha_sim_transfer_cube_human \
|
| 57 |
+
--dataset.image_transforms.enable=true \
|
| 58 |
+
--dataset.episodes="[0]" \
|
| 59 |
+
--batch_size=2 \
|
| 60 |
+
--steps=4 \
|
| 61 |
+
--eval_freq=2 \
|
| 62 |
+
--eval.n_episodes=1 \
|
| 63 |
+
--eval.batch_size=1 \
|
| 64 |
+
--save_freq=2 \
|
| 65 |
+
--save_checkpoint=true \
|
| 66 |
+
--log_freq=1 \
|
| 67 |
+
--wandb.enable=false \
|
| 68 |
+
--output_dir=tests/outputs/act/
|
| 69 |
+
|
| 70 |
+
test-act-ete-train-resume:
|
| 71 |
+
python -m lerobot.scripts.train \
|
| 72 |
+
--config_path=tests/outputs/act/checkpoints/000002/pretrained_model/train_config.json \
|
| 73 |
+
--resume=true
|
| 74 |
+
|
| 75 |
+
test-act-ete-eval:
|
| 76 |
+
python -m lerobot.scripts.eval \
|
| 77 |
+
--policy.path=tests/outputs/act/checkpoints/000004/pretrained_model \
|
| 78 |
+
--policy.device=$(DEVICE) \
|
| 79 |
+
--env.type=aloha \
|
| 80 |
+
--env.episode_length=5 \
|
| 81 |
+
--eval.n_episodes=1 \
|
| 82 |
+
--eval.batch_size=1
|
| 83 |
+
|
| 84 |
+
test-diffusion-ete-train:
|
| 85 |
+
python -m lerobot.scripts.train \
|
| 86 |
+
--policy.type=diffusion \
|
| 87 |
+
--policy.down_dims='[64,128,256]' \
|
| 88 |
+
--policy.diffusion_step_embed_dim=32 \
|
| 89 |
+
--policy.num_inference_steps=10 \
|
| 90 |
+
--policy.device=$(DEVICE) \
|
| 91 |
+
--policy.push_to_hub=false \
|
| 92 |
+
--env.type=pusht \
|
| 93 |
+
--env.episode_length=5 \
|
| 94 |
+
--dataset.repo_id=lerobot/pusht \
|
| 95 |
+
--dataset.image_transforms.enable=true \
|
| 96 |
+
--dataset.episodes="[0]" \
|
| 97 |
+
--batch_size=2 \
|
| 98 |
+
--steps=2 \
|
| 99 |
+
--eval_freq=2 \
|
| 100 |
+
--eval.n_episodes=1 \
|
| 101 |
+
--eval.batch_size=1 \
|
| 102 |
+
--save_checkpoint=true \
|
| 103 |
+
--save_freq=2 \
|
| 104 |
+
--log_freq=1 \
|
| 105 |
+
--wandb.enable=false \
|
| 106 |
+
--output_dir=tests/outputs/diffusion/
|
| 107 |
+
|
| 108 |
+
test-diffusion-ete-eval:
|
| 109 |
+
python -m lerobot.scripts.eval \
|
| 110 |
+
--policy.path=tests/outputs/diffusion/checkpoints/000002/pretrained_model \
|
| 111 |
+
--policy.device=$(DEVICE) \
|
| 112 |
+
--env.type=pusht \
|
| 113 |
+
--env.episode_length=5 \
|
| 114 |
+
--eval.n_episodes=1 \
|
| 115 |
+
--eval.batch_size=1
|
| 116 |
+
|
| 117 |
+
test-tdmpc-ete-train:
|
| 118 |
+
python -m lerobot.scripts.train \
|
| 119 |
+
--policy.type=tdmpc \
|
| 120 |
+
--policy.device=$(DEVICE) \
|
| 121 |
+
--policy.push_to_hub=false \
|
| 122 |
+
--env.type=xarm \
|
| 123 |
+
--env.task=XarmLift-v0 \
|
| 124 |
+
--env.episode_length=5 \
|
| 125 |
+
--dataset.repo_id=lerobot/xarm_lift_medium \
|
| 126 |
+
--dataset.image_transforms.enable=true \
|
| 127 |
+
--dataset.episodes="[0]" \
|
| 128 |
+
--batch_size=2 \
|
| 129 |
+
--steps=2 \
|
| 130 |
+
--eval_freq=2 \
|
| 131 |
+
--eval.n_episodes=1 \
|
| 132 |
+
--eval.batch_size=1 \
|
| 133 |
+
--save_checkpoint=true \
|
| 134 |
+
--save_freq=2 \
|
| 135 |
+
--log_freq=1 \
|
| 136 |
+
--wandb.enable=false \
|
| 137 |
+
--output_dir=tests/outputs/tdmpc/
|
| 138 |
+
|
| 139 |
+
test-tdmpc-ete-eval:
|
| 140 |
+
python -m lerobot.scripts.eval \
|
| 141 |
+
--policy.path=tests/outputs/tdmpc/checkpoints/000002/pretrained_model \
|
| 142 |
+
--policy.device=$(DEVICE) \
|
| 143 |
+
--env.type=xarm \
|
| 144 |
+
--env.episode_length=5 \
|
| 145 |
+
--env.task=XarmLift-v0 \
|
| 146 |
+
--eval.n_episodes=1 \
|
| 147 |
+
--eval.batch_size=1
|
| 148 |
+
|
| 149 |
+
|
| 150 |
+
test-smolvla-ete-train:
|
| 151 |
+
python -m lerobot.scripts.train \
|
| 152 |
+
--policy.type=smolvla \
|
| 153 |
+
--policy.n_action_steps=20 \
|
| 154 |
+
--policy.chunk_size=20 \
|
| 155 |
+
--policy.device=$(DEVICE) \
|
| 156 |
+
--policy.push_to_hub=false \
|
| 157 |
+
--env.type=aloha \
|
| 158 |
+
--env.episode_length=5 \
|
| 159 |
+
--dataset.repo_id=lerobot/aloha_sim_transfer_cube_human \
|
| 160 |
+
--dataset.image_transforms.enable=true \
|
| 161 |
+
--dataset.episodes="[0]" \
|
| 162 |
+
--batch_size=2 \
|
| 163 |
+
--steps=4 \
|
| 164 |
+
--eval_freq=2 \
|
| 165 |
+
--eval.n_episodes=1 \
|
| 166 |
+
--eval.batch_size=1 \
|
| 167 |
+
--save_freq=2 \
|
| 168 |
+
--save_checkpoint=true \
|
| 169 |
+
--log_freq=1 \
|
| 170 |
+
--wandb.enable=false \
|
| 171 |
+
--output_dir=tests/outputs/smolvla/
|
| 172 |
+
|
| 173 |
+
test-smolvla-ete-eval:
|
| 174 |
+
python -m lerobot.scripts.eval \
|
| 175 |
+
--policy.path=tests/outputs/smolvla/checkpoints/000004/pretrained_model \
|
| 176 |
+
--policy.device=$(DEVICE) \
|
| 177 |
+
--env.type=aloha \
|
| 178 |
+
--env.episode_length=5 \
|
| 179 |
+
--eval.n_episodes=1 \
|
| 180 |
+
--eval.batch_size=1
|
lerobot/README.md
ADDED
|
@@ -0,0 +1,412 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
<p align="center">
|
| 2 |
+
<picture>
|
| 3 |
+
<source media="(prefers-color-scheme: dark)" srcset="media/lerobot-logo-thumbnail.png">
|
| 4 |
+
<source media="(prefers-color-scheme: light)" srcset="media/lerobot-logo-thumbnail.png">
|
| 5 |
+
<img alt="LeRobot, Hugging Face Robotics Library" src="media/lerobot-logo-thumbnail.png" style="max-width: 100%;">
|
| 6 |
+
</picture>
|
| 7 |
+
<br/>
|
| 8 |
+
<br/>
|
| 9 |
+
</p>
|
| 10 |
+
|
| 11 |
+
<div align="center">
|
| 12 |
+
|
| 13 |
+
[](https://github.com/huggingface/lerobot/actions/workflows/nightly-tests.yml?query=branch%3Amain)
|
| 14 |
+
[](https://codecov.io/gh/huggingface/lerobot)
|
| 15 |
+
[](https://www.python.org/downloads/)
|
| 16 |
+
[](https://github.com/huggingface/lerobot/blob/main/LICENSE)
|
| 17 |
+
[](https://pypi.org/project/lerobot/)
|
| 18 |
+
[](https://pypi.org/project/lerobot/)
|
| 19 |
+
[](https://github.com/huggingface/lerobot/tree/main/examples)
|
| 20 |
+
[](https://github.com/huggingface/lerobot/blob/main/CODE_OF_CONDUCT.md)
|
| 21 |
+
[](https://discord.gg/s3KuuzsPFb)
|
| 22 |
+
|
| 23 |
+
</div>
|
| 24 |
+
|
| 25 |
+
<h2 align="center">
|
| 26 |
+
<p><a href="https://huggingface.co/docs/lerobot/so101">
|
| 27 |
+
Build Your Own SO-101 Robot!</a></p>
|
| 28 |
+
</h2>
|
| 29 |
+
|
| 30 |
+
<div align="center">
|
| 31 |
+
<div style="display: flex; gap: 1rem; justify-content: center; align-items: center;" >
|
| 32 |
+
<img
|
| 33 |
+
src="media/so101/so101.webp?raw=true"
|
| 34 |
+
alt="SO-101 follower arm"
|
| 35 |
+
title="SO-101 follower arm"
|
| 36 |
+
style="width: 40%;"
|
| 37 |
+
/>
|
| 38 |
+
<img
|
| 39 |
+
src="media/so101/so101-leader.webp?raw=true"
|
| 40 |
+
alt="SO-101 leader arm"
|
| 41 |
+
title="SO-101 leader arm"
|
| 42 |
+
style="width: 40%;"
|
| 43 |
+
/>
|
| 44 |
+
</div>
|
| 45 |
+
|
| 46 |
+
|
| 47 |
+
<p><strong>Meet the updated SO100, the SO-101 – Just €114 per arm!</strong></p>
|
| 48 |
+
<p>Train it in minutes with a few simple moves on your laptop.</p>
|
| 49 |
+
<p>Then sit back and watch your creation act autonomously! 🤯</p>
|
| 50 |
+
|
| 51 |
+
<p><a href="https://huggingface.co/docs/lerobot/so101">
|
| 52 |
+
See the full SO-101 tutorial here.</a></p>
|
| 53 |
+
|
| 54 |
+
<p>Want to take it to the next level? Make your SO-101 mobile by building LeKiwi!</p>
|
| 55 |
+
<p>Check out the <a href="https://huggingface.co/docs/lerobot/lekiwi">LeKiwi tutorial</a> and bring your robot to life on wheels.</p>
|
| 56 |
+
|
| 57 |
+
<img src="media/lekiwi/kiwi.webp?raw=true" alt="LeKiwi mobile robot" title="LeKiwi mobile robot" width="50%">
|
| 58 |
+
</div>
|
| 59 |
+
|
| 60 |
+
<br/>
|
| 61 |
+
|
| 62 |
+
<h3 align="center">
|
| 63 |
+
<p>LeRobot: State-of-the-art AI for real-world robotics</p>
|
| 64 |
+
</h3>
|
| 65 |
+
|
| 66 |
+
---
|
| 67 |
+
|
| 68 |
+
🤗 LeRobot aims to provide models, datasets, and tools for real-world robotics in PyTorch. The goal is to lower the barrier to entry to robotics so that everyone can contribute and benefit from sharing datasets and pretrained models.
|
| 69 |
+
|
| 70 |
+
🤗 LeRobot contains state-of-the-art approaches that have been shown to transfer to the real-world with a focus on imitation learning and reinforcement learning.
|
| 71 |
+
|
| 72 |
+
🤗 LeRobot already provides a set of pretrained models, datasets with human collected demonstrations, and simulation environments to get started without assembling a robot. In the coming weeks, the plan is to add more and more support for real-world robotics on the most affordable and capable robots out there.
|
| 73 |
+
|
| 74 |
+
🤗 LeRobot hosts pretrained models and datasets on this Hugging Face community page: [huggingface.co/lerobot](https://huggingface.co/lerobot)
|
| 75 |
+
|
| 76 |
+
#### Examples of pretrained models on simulation environments
|
| 77 |
+
|
| 78 |
+
<table>
|
| 79 |
+
<tr>
|
| 80 |
+
<td><img src="media/gym/aloha_act.gif" width="100%" alt="ACT policy on ALOHA env"/></td>
|
| 81 |
+
<td><img src="media/gym/simxarm_tdmpc.gif" width="100%" alt="TDMPC policy on SimXArm env"/></td>
|
| 82 |
+
<td><img src="media/gym/pusht_diffusion.gif" width="100%" alt="Diffusion policy on PushT env"/></td>
|
| 83 |
+
</tr>
|
| 84 |
+
<tr>
|
| 85 |
+
<td align="center">ACT policy on ALOHA env</td>
|
| 86 |
+
<td align="center">TDMPC policy on SimXArm env</td>
|
| 87 |
+
<td align="center">Diffusion policy on PushT env</td>
|
| 88 |
+
</tr>
|
| 89 |
+
</table>
|
| 90 |
+
|
| 91 |
+
### Acknowledgment
|
| 92 |
+
|
| 93 |
+
- The LeRobot team 🤗 for building SmolVLA [Paper](https://arxiv.org/abs/2506.01844), [Blog](https://huggingface.co/blog/smolvla).
|
| 94 |
+
- Thanks to Tony Zhao, Zipeng Fu and colleagues for open sourcing ACT policy, ALOHA environments and datasets. Ours are adapted from [ALOHA](https://tonyzhaozh.github.io/aloha) and [Mobile ALOHA](https://mobile-aloha.github.io).
|
| 95 |
+
- Thanks to Cheng Chi, Zhenjia Xu and colleagues for open sourcing Diffusion policy, Pusht environment and datasets, as well as UMI datasets. Ours are adapted from [Diffusion Policy](https://diffusion-policy.cs.columbia.edu) and [UMI Gripper](https://umi-gripper.github.io).
|
| 96 |
+
- Thanks to Nicklas Hansen, Yunhai Feng and colleagues for open sourcing TDMPC policy, Simxarm environments and datasets. Ours are adapted from [TDMPC](https://github.com/nicklashansen/tdmpc) and [FOWM](https://www.yunhaifeng.com/FOWM).
|
| 97 |
+
- Thanks to Antonio Loquercio and Ashish Kumar for their early support.
|
| 98 |
+
- Thanks to [Seungjae (Jay) Lee](https://sjlee.cc/), [Mahi Shafiullah](https://mahis.life/) and colleagues for open sourcing [VQ-BeT](https://sjlee.cc/vq-bet/) policy and helping us adapt the codebase to our repository. The policy is adapted from [VQ-BeT repo](https://github.com/jayLEE0301/vq_bet_official).
|
| 99 |
+
|
| 100 |
+
|
| 101 |
+
## Installation
|
| 102 |
+
|
| 103 |
+
Download our source code:
|
| 104 |
+
```bash
|
| 105 |
+
git clone https://github.com/huggingface/lerobot.git
|
| 106 |
+
cd lerobot
|
| 107 |
+
```
|
| 108 |
+
|
| 109 |
+
Create a virtual environment with Python 3.10 and activate it, e.g. with [`miniconda`](https://docs.anaconda.com/free/miniconda/index.html):
|
| 110 |
+
```bash
|
| 111 |
+
conda create -y -n lerobot python=3.10
|
| 112 |
+
conda activate lerobot
|
| 113 |
+
```
|
| 114 |
+
|
| 115 |
+
When using `miniconda`, install `ffmpeg` in your environment:
|
| 116 |
+
```bash
|
| 117 |
+
conda install ffmpeg -c conda-forge
|
| 118 |
+
```
|
| 119 |
+
|
| 120 |
+
> **NOTE:** This usually installs `ffmpeg 7.X` for your platform compiled with the `libsvtav1` encoder. If `libsvtav1` is not supported (check supported encoders with `ffmpeg -encoders`), you can:
|
| 121 |
+
> - _[On any platform]_ Explicitly install `ffmpeg 7.X` using:
|
| 122 |
+
> ```bash
|
| 123 |
+
> conda install ffmpeg=7.1.1 -c conda-forge
|
| 124 |
+
> ```
|
| 125 |
+
> - _[On Linux only]_ Install [ffmpeg build dependencies](https://trac.ffmpeg.org/wiki/CompilationGuide/Ubuntu#GettheDependencies) and [compile ffmpeg from source with libsvtav1](https://trac.ffmpeg.org/wiki/CompilationGuide/Ubuntu#libsvtav1), and make sure you use the corresponding ffmpeg binary to your install with `which ffmpeg`.
|
| 126 |
+
|
| 127 |
+
Install 🤗 LeRobot:
|
| 128 |
+
```bash
|
| 129 |
+
pip install -e .
|
| 130 |
+
```
|
| 131 |
+
|
| 132 |
+
> **NOTE:** If you encounter build errors, you may need to install additional dependencies (`cmake`, `build-essential`, and `ffmpeg libs`). On Linux, run:
|
| 133 |
+
`sudo apt-get install cmake build-essential python3-dev pkg-config libavformat-dev libavcodec-dev libavdevice-dev libavutil-dev libswscale-dev libswresample-dev libavfilter-dev`. For other systems, see: [Compiling PyAV](https://pyav.org/docs/develop/overview/installation.html#bring-your-own-ffmpeg)
|
| 134 |
+
|
| 135 |
+
For simulations, 🤗 LeRobot comes with gymnasium environments that can be installed as extras:
|
| 136 |
+
- [aloha](https://github.com/huggingface/gym-aloha)
|
| 137 |
+
- [xarm](https://github.com/huggingface/gym-xarm)
|
| 138 |
+
- [pusht](https://github.com/huggingface/gym-pusht)
|
| 139 |
+
|
| 140 |
+
For instance, to install 🤗 LeRobot with aloha and pusht, use:
|
| 141 |
+
```bash
|
| 142 |
+
pip install -e ".[aloha, pusht]"
|
| 143 |
+
```
|
| 144 |
+
|
| 145 |
+
To use [Weights and Biases](https://docs.wandb.ai/quickstart) for experiment tracking, log in with
|
| 146 |
+
```bash
|
| 147 |
+
wandb login
|
| 148 |
+
```
|
| 149 |
+
|
| 150 |
+
(note: you will also need to enable WandB in the configuration. See below.)
|
| 151 |
+
|
| 152 |
+
### Visualize datasets
|
| 153 |
+
|
| 154 |
+
Check out [example 1](./examples/1_load_lerobot_dataset.py) that illustrates how to use our dataset class which automatically downloads data from the Hugging Face hub.
|
| 155 |
+
|
| 156 |
+
You can also locally visualize episodes from a dataset on the hub by executing our script from the command line:
|
| 157 |
+
```bash
|
| 158 |
+
python -m lerobot.scripts.visualize_dataset \
|
| 159 |
+
--repo-id lerobot/pusht \
|
| 160 |
+
--episode-index 0
|
| 161 |
+
```
|
| 162 |
+
|
| 163 |
+
or from a dataset in a local folder with the `root` option and the `--local-files-only` (in the following case the dataset will be searched for in `./my_local_data_dir/lerobot/pusht`)
|
| 164 |
+
```bash
|
| 165 |
+
python -m lerobot.scripts.visualize_dataset \
|
| 166 |
+
--repo-id lerobot/pusht \
|
| 167 |
+
--root ./my_local_data_dir \
|
| 168 |
+
--local-files-only 1 \
|
| 169 |
+
--episode-index 0
|
| 170 |
+
```
|
| 171 |
+
|
| 172 |
+
|
| 173 |
+
It will open `rerun.io` and display the camera streams, robot states and actions, like this:
|
| 174 |
+
|
| 175 |
+
https://github-production-user-asset-6210df.s3.amazonaws.com/4681518/328035972-fd46b787-b532-47e2-bb6f-fd536a55a7ed.mov?X-Amz-Algorithm=AWS4-HMAC-SHA256&X-Amz-Credential=AKIAVCODYLSA53PQK4ZA%2F20240505%2Fus-east-1%2Fs3%2Faws4_request&X-Amz-Date=20240505T172924Z&X-Amz-Expires=300&X-Amz-Signature=d680b26c532eeaf80740f08af3320d22ad0b8a4e4da1bcc4f33142c15b509eda&X-Amz-SignedHeaders=host&actor_id=24889239&key_id=0&repo_id=748713144
|
| 176 |
+
|
| 177 |
+
|
| 178 |
+
Our script can also visualize datasets stored on a distant server. See `python -m lerobot.scripts.visualize_dataset --help` for more instructions.
|
| 179 |
+
|
| 180 |
+
### The `LeRobotDataset` format
|
| 181 |
+
|
| 182 |
+
A dataset in `LeRobotDataset` format is very simple to use. It can be loaded from a repository on the Hugging Face hub or a local folder simply with e.g. `dataset = LeRobotDataset("lerobot/aloha_static_coffee")` and can be indexed into like any Hugging Face and PyTorch dataset. For instance `dataset[0]` will retrieve a single temporal frame from the dataset containing observation(s) and an action as PyTorch tensors ready to be fed to a model.
|
| 183 |
+
|
| 184 |
+
A specificity of `LeRobotDataset` is that, rather than retrieving a single frame by its index, we can retrieve several frames based on their temporal relationship with the indexed frame, by setting `delta_timestamps` to a list of relative times with respect to the indexed frame. For example, with `delta_timestamps = {"observation.image": [-1, -0.5, -0.2, 0]}` one can retrieve, for a given index, 4 frames: 3 "previous" frames 1 second, 0.5 seconds, and 0.2 seconds before the indexed frame, and the indexed frame itself (corresponding to the 0 entry). See example [1_load_lerobot_dataset.py](examples/1_load_lerobot_dataset.py) for more details on `delta_timestamps`.
|
| 185 |
+
|
| 186 |
+
Under the hood, the `LeRobotDataset` format makes use of several ways to serialize data which can be useful to understand if you plan to work more closely with this format. We tried to make a flexible yet simple dataset format that would cover most type of features and specificities present in reinforcement learning and robotics, in simulation and in real-world, with a focus on cameras and robot states but easily extended to other types of sensory inputs as long as they can be represented by a tensor.
|
| 187 |
+
|
| 188 |
+
Here are the important details and internal structure organization of a typical `LeRobotDataset` instantiated with `dataset = LeRobotDataset("lerobot/aloha_static_coffee")`. The exact features will change from dataset to dataset but not the main aspects:
|
| 189 |
+
|
| 190 |
+
```
|
| 191 |
+
dataset attributes:
|
| 192 |
+
├ hf_dataset: a Hugging Face dataset (backed by Arrow/parquet). Typical features example:
|
| 193 |
+
│ ├ observation.images.cam_high (VideoFrame):
|
| 194 |
+
│ │ VideoFrame = {'path': path to a mp4 video, 'timestamp' (float32): timestamp in the video}
|
| 195 |
+
│ ├ observation.state (list of float32): position of an arm joints (for instance)
|
| 196 |
+
│ ... (more observations)
|
| 197 |
+
│ ├ action (list of float32): goal position of an arm joints (for instance)
|
| 198 |
+
│ ├ episode_index (int64): index of the episode for this sample
|
| 199 |
+
│ ├ frame_index (int64): index of the frame for this sample in the episode ; starts at 0 for each episode
|
| 200 |
+
│ ├ timestamp (float32): timestamp in the episode
|
| 201 |
+
│ ├ next.done (bool): indicates the end of an episode ; True for the last frame in each episode
|
| 202 |
+
│ └ index (int64): general index in the whole dataset
|
| 203 |
+
├ episode_data_index: contains 2 tensors with the start and end indices of each episode
|
| 204 |
+
│ ├ from (1D int64 tensor): first frame index for each episode — shape (num episodes,) starts with 0
|
| 205 |
+
│ └ to: (1D int64 tensor): last frame index for each episode — shape (num episodes,)
|
| 206 |
+
├ stats: a dictionary of statistics (max, mean, min, std) for each feature in the dataset, for instance
|
| 207 |
+
│ ├ observation.images.cam_high: {'max': tensor with same number of dimensions (e.g. `(c, 1, 1)` for images, `(c,)` for states), etc.}
|
| 208 |
+
│ ...
|
| 209 |
+
├ info: a dictionary of metadata on the dataset
|
| 210 |
+
│ ├ codebase_version (str): this is to keep track of the codebase version the dataset was created with
|
| 211 |
+
│ ├ fps (float): frame per second the dataset is recorded/synchronized to
|
| 212 |
+
│ ├ video (bool): indicates if frames are encoded in mp4 video files to save space or stored as png files
|
| 213 |
+
│ └ encoding (dict): if video, this documents the main options that were used with ffmpeg to encode the videos
|
| 214 |
+
├ videos_dir (Path): where the mp4 videos or png images are stored/accessed
|
| 215 |
+
└ camera_keys (list of string): the keys to access camera features in the item returned by the dataset (e.g. `["observation.images.cam_high", ...]`)
|
| 216 |
+
```
|
| 217 |
+
|
| 218 |
+
A `LeRobotDataset` is serialised using several widespread file formats for each of its parts, namely:
|
| 219 |
+
- hf_dataset stored using Hugging Face datasets library serialization to parquet
|
| 220 |
+
- videos are stored in mp4 format to save space
|
| 221 |
+
- metadata are stored in plain json/jsonl files
|
| 222 |
+
|
| 223 |
+
Dataset can be uploaded/downloaded from the HuggingFace hub seamlessly. To work on a local dataset, you can specify its location with the `root` argument if it's not in the default `~/.cache/huggingface/lerobot` location.
|
| 224 |
+
|
| 225 |
+
### Evaluate a pretrained policy
|
| 226 |
+
|
| 227 |
+
Check out [example 2](./examples/2_evaluate_pretrained_policy.py) that illustrates how to download a pretrained policy from Hugging Face hub, and run an evaluation on its corresponding environment.
|
| 228 |
+
|
| 229 |
+
We also provide a more capable script to parallelize the evaluation over multiple environments during the same rollout. Here is an example with a pretrained model hosted on [lerobot/diffusion_pusht](https://huggingface.co/lerobot/diffusion_pusht):
|
| 230 |
+
```bash
|
| 231 |
+
python -m lerobot.scripts.eval \
|
| 232 |
+
--policy.path=lerobot/diffusion_pusht \
|
| 233 |
+
--env.type=pusht \
|
| 234 |
+
--eval.batch_size=10 \
|
| 235 |
+
--eval.n_episodes=10 \
|
| 236 |
+
--policy.use_amp=false \
|
| 237 |
+
--policy.device=cuda
|
| 238 |
+
```
|
| 239 |
+
|
| 240 |
+
Note: After training your own policy, you can re-evaluate the checkpoints with:
|
| 241 |
+
|
| 242 |
+
```bash
|
| 243 |
+
python -m lerobot.scripts.eval --policy.path={OUTPUT_DIR}/checkpoints/last/pretrained_model
|
| 244 |
+
```
|
| 245 |
+
|
| 246 |
+
See `python -m lerobot.scripts.eval --help` for more instructions.
|
| 247 |
+
|
| 248 |
+
### Train your own policy
|
| 249 |
+
|
| 250 |
+
Check out [example 3](./examples/3_train_policy.py) that illustrates how to train a model using our core library in python, and [example 4](./examples/4_train_policy_with_script.md) that shows how to use our training script from command line.
|
| 251 |
+
|
| 252 |
+
To use wandb for logging training and evaluation curves, make sure you've run `wandb login` as a one-time setup step. Then, when running the training command above, enable WandB in the configuration by adding `--wandb.enable=true`.
|
| 253 |
+
|
| 254 |
+
A link to the wandb logs for the run will also show up in yellow in your terminal. Here is an example of what they look like in your browser. Please also check [here](./examples/4_train_policy_with_script.md#typical-logs-and-metrics) for the explanation of some commonly used metrics in logs.
|
| 255 |
+
|
| 256 |
+

|
| 257 |
+
|
| 258 |
+
Note: For efficiency, during training every checkpoint is evaluated on a low number of episodes. You may use `--eval.n_episodes=500` to evaluate on more episodes than the default. Or, after training, you may want to re-evaluate your best checkpoints on more episodes or change the evaluation settings. See `python -m lerobot.scripts.eval --help` for more instructions.
|
| 259 |
+
|
| 260 |
+
#### Reproduce state-of-the-art (SOTA)
|
| 261 |
+
|
| 262 |
+
We provide some pretrained policies on our [hub page](https://huggingface.co/lerobot) that can achieve state-of-the-art performances.
|
| 263 |
+
You can reproduce their training by loading the config from their run. Simply running:
|
| 264 |
+
```bash
|
| 265 |
+
python -m lerobot.scripts.train --config_path=lerobot/diffusion_pusht
|
| 266 |
+
```
|
| 267 |
+
reproduces SOTA results for Diffusion Policy on the PushT task.
|
| 268 |
+
|
| 269 |
+
## Contribute
|
| 270 |
+
|
| 271 |
+
If you would like to contribute to 🤗 LeRobot, please check out our [contribution guide](https://github.com/huggingface/lerobot/blob/main/CONTRIBUTING.md).
|
| 272 |
+
|
| 273 |
+
<!-- ### Add a new dataset
|
| 274 |
+
|
| 275 |
+
To add a dataset to the hub, you need to login using a write-access token, which can be generated from the [Hugging Face settings](https://huggingface.co/settings/tokens):
|
| 276 |
+
```bash
|
| 277 |
+
huggingface-cli login --token ${HUGGINGFACE_TOKEN} --add-to-git-credential
|
| 278 |
+
```
|
| 279 |
+
|
| 280 |
+
Then point to your raw dataset folder (e.g. `data/aloha_static_pingpong_test_raw`), and push your dataset to the hub with:
|
| 281 |
+
```bash
|
| 282 |
+
python lerobot/scripts/push_dataset_to_hub.py \
|
| 283 |
+
--raw-dir data/aloha_static_pingpong_test_raw \
|
| 284 |
+
--out-dir data \
|
| 285 |
+
--repo-id lerobot/aloha_static_pingpong_test \
|
| 286 |
+
--raw-format aloha_hdf5
|
| 287 |
+
```
|
| 288 |
+
|
| 289 |
+
See `python lerobot/scripts/push_dataset_to_hub.py --help` for more instructions.
|
| 290 |
+
|
| 291 |
+
If your dataset format is not supported, implement your own in `lerobot/datasets/push_dataset_to_hub/${raw_format}_format.py` by copying examples like [pusht_zarr](https://github.com/huggingface/lerobot/blob/main/lerobot/datasets/push_dataset_to_hub/pusht_zarr_format.py), [umi_zarr](https://github.com/huggingface/lerobot/blob/main/lerobot/datasets/push_dataset_to_hub/umi_zarr_format.py), [aloha_hdf5](https://github.com/huggingface/lerobot/blob/main/lerobot/datasets/push_dataset_to_hub/aloha_hdf5_format.py), or [xarm_pkl](https://github.com/huggingface/lerobot/blob/main/lerobot/datasets/push_dataset_to_hub/xarm_pkl_format.py). -->
|
| 292 |
+
|
| 293 |
+
|
| 294 |
+
### Add a pretrained policy
|
| 295 |
+
|
| 296 |
+
Once you have trained a policy you may upload it to the Hugging Face hub using a hub id that looks like `${hf_user}/${repo_name}` (e.g. [lerobot/diffusion_pusht](https://huggingface.co/lerobot/diffusion_pusht)).
|
| 297 |
+
|
| 298 |
+
You first need to find the checkpoint folder located inside your experiment directory (e.g. `outputs/train/2024-05-05/20-21-12_aloha_act_default/checkpoints/002500`). Within that there is a `pretrained_model` directory which should contain:
|
| 299 |
+
- `config.json`: A serialized version of the policy configuration (following the policy's dataclass config).
|
| 300 |
+
- `model.safetensors`: A set of `torch.nn.Module` parameters, saved in [Hugging Face Safetensors](https://huggingface.co/docs/safetensors/index) format.
|
| 301 |
+
- `train_config.json`: A consolidated configuration containing all parameters used for training. The policy configuration should match `config.json` exactly. This is useful for anyone who wants to evaluate your policy or for reproducibility.
|
| 302 |
+
|
| 303 |
+
To upload these to the hub, run the following:
|
| 304 |
+
```bash
|
| 305 |
+
huggingface-cli upload ${hf_user}/${repo_name} path/to/pretrained_model
|
| 306 |
+
```
|
| 307 |
+
|
| 308 |
+
See [eval.py](https://github.com/huggingface/lerobot/blob/main/lerobot/scripts/eval.py) for an example of how other people may use your policy.
|
| 309 |
+
|
| 310 |
+
|
| 311 |
+
### Improve your code with profiling
|
| 312 |
+
|
| 313 |
+
An example of a code snippet to profile the evaluation of a policy:
|
| 314 |
+
```python
|
| 315 |
+
from torch.profiler import profile, record_function, ProfilerActivity
|
| 316 |
+
|
| 317 |
+
def trace_handler(prof):
|
| 318 |
+
prof.export_chrome_trace(f"tmp/trace_schedule_{prof.step_num}.json")
|
| 319 |
+
|
| 320 |
+
with profile(
|
| 321 |
+
activities=[ProfilerActivity.CPU, ProfilerActivity.CUDA],
|
| 322 |
+
schedule=torch.profiler.schedule(
|
| 323 |
+
wait=2,
|
| 324 |
+
warmup=2,
|
| 325 |
+
active=3,
|
| 326 |
+
),
|
| 327 |
+
on_trace_ready=trace_handler
|
| 328 |
+
) as prof:
|
| 329 |
+
with record_function("eval_policy"):
|
| 330 |
+
for i in range(num_episodes):
|
| 331 |
+
prof.step()
|
| 332 |
+
# insert code to profile, potentially whole body of eval_policy function
|
| 333 |
+
```
|
| 334 |
+
|
| 335 |
+
## Citation
|
| 336 |
+
|
| 337 |
+
If you want, you can cite this work with:
|
| 338 |
+
```bibtex
|
| 339 |
+
@misc{cadene2024lerobot,
|
| 340 |
+
author = {Cadene, Remi and Alibert, Simon and Soare, Alexander and Gallouedec, Quentin and Zouitine, Adil and Palma, Steven and Kooijmans, Pepijn and Aractingi, Michel and Shukor, Mustafa and Aubakirova, Dana and Russi, Martino and Capuano, Francesco and Pascale, Caroline and Choghari, Jade and Moss, Jess and Wolf, Thomas},
|
| 341 |
+
title = {LeRobot: State-of-the-art Machine Learning for Real-World Robotics in Pytorch},
|
| 342 |
+
howpublished = "\url{https://github.com/huggingface/lerobot}",
|
| 343 |
+
year = {2024}
|
| 344 |
+
}
|
| 345 |
+
```
|
| 346 |
+
|
| 347 |
+
Additionally, if you are using any of the particular policy architecture, pretrained models, or datasets, it is recommended to cite the original authors of the work as they appear below:
|
| 348 |
+
- [SmolVLA](https://arxiv.org/abs/2506.01844)
|
| 349 |
+
```bibtex
|
| 350 |
+
@article{shukor2025smolvla,
|
| 351 |
+
title={SmolVLA: A Vision-Language-Action Model for Affordable and Efficient Robotics},
|
| 352 |
+
author={Shukor, Mustafa and Aubakirova, Dana and Capuano, Francesco and Kooijmans, Pepijn and Palma, Steven and Zouitine, Adil and Aractingi, Michel and Pascal, Caroline and Russi, Martino and Marafioti, Andres and Alibert, Simon and Cord, Matthieu and Wolf, Thomas and Cadene, Remi},
|
| 353 |
+
journal={arXiv preprint arXiv:2506.01844},
|
| 354 |
+
year={2025}
|
| 355 |
+
}
|
| 356 |
+
```
|
| 357 |
+
|
| 358 |
+
- [Diffusion Policy](https://diffusion-policy.cs.columbia.edu)
|
| 359 |
+
```bibtex
|
| 360 |
+
@article{chi2024diffusionpolicy,
|
| 361 |
+
author = {Cheng Chi and Zhenjia Xu and Siyuan Feng and Eric Cousineau and Yilun Du and Benjamin Burchfiel and Russ Tedrake and Shuran Song},
|
| 362 |
+
title ={Diffusion Policy: Visuomotor Policy Learning via Action Diffusion},
|
| 363 |
+
journal = {The International Journal of Robotics Research},
|
| 364 |
+
year = {2024},
|
| 365 |
+
}
|
| 366 |
+
```
|
| 367 |
+
- [ACT or ALOHA](https://tonyzhaozh.github.io/aloha)
|
| 368 |
+
```bibtex
|
| 369 |
+
@article{zhao2023learning,
|
| 370 |
+
title={Learning fine-grained bimanual manipulation with low-cost hardware},
|
| 371 |
+
author={Zhao, Tony Z and Kumar, Vikash and Levine, Sergey and Finn, Chelsea},
|
| 372 |
+
journal={arXiv preprint arXiv:2304.13705},
|
| 373 |
+
year={2023}
|
| 374 |
+
}
|
| 375 |
+
```
|
| 376 |
+
|
| 377 |
+
- [TDMPC](https://www.nicklashansen.com/td-mpc/)
|
| 378 |
+
|
| 379 |
+
```bibtex
|
| 380 |
+
@inproceedings{Hansen2022tdmpc,
|
| 381 |
+
title={Temporal Difference Learning for Model Predictive Control},
|
| 382 |
+
author={Nicklas Hansen and Xiaolong Wang and Hao Su},
|
| 383 |
+
booktitle={ICML},
|
| 384 |
+
year={2022}
|
| 385 |
+
}
|
| 386 |
+
```
|
| 387 |
+
|
| 388 |
+
- [VQ-BeT](https://sjlee.cc/vq-bet/)
|
| 389 |
+
```bibtex
|
| 390 |
+
@article{lee2024behavior,
|
| 391 |
+
title={Behavior generation with latent actions},
|
| 392 |
+
author={Lee, Seungjae and Wang, Yibin and Etukuru, Haritheja and Kim, H Jin and Shafiullah, Nur Muhammad Mahi and Pinto, Lerrel},
|
| 393 |
+
journal={arXiv preprint arXiv:2403.03181},
|
| 394 |
+
year={2024}
|
| 395 |
+
}
|
| 396 |
+
```
|
| 397 |
+
|
| 398 |
+
|
| 399 |
+
- [HIL-SERL](https://hil-serl.github.io/)
|
| 400 |
+
```bibtex
|
| 401 |
+
@Article{luo2024hilserl,
|
| 402 |
+
title={Precise and Dexterous Robotic Manipulation via Human-in-the-Loop Reinforcement Learning},
|
| 403 |
+
author={Jianlan Luo and Charles Xu and Jeffrey Wu and Sergey Levine},
|
| 404 |
+
year={2024},
|
| 405 |
+
eprint={2410.21845},
|
| 406 |
+
archivePrefix={arXiv},
|
| 407 |
+
primaryClass={cs.RO}
|
| 408 |
+
}
|
| 409 |
+
```
|
| 410 |
+
## Star History
|
| 411 |
+
|
| 412 |
+
[](https://star-history.com/#huggingface/lerobot&Timeline)
|
lerobot/benchmarks/video/README.md
ADDED
|
@@ -0,0 +1,271 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Video benchmark
|
| 2 |
+
|
| 3 |
+
|
| 4 |
+
## Questions
|
| 5 |
+
What is the optimal trade-off between:
|
| 6 |
+
- maximizing loading time with random access,
|
| 7 |
+
- minimizing memory space on disk,
|
| 8 |
+
- maximizing success rate of policies,
|
| 9 |
+
- compatibility across devices/platforms for decoding videos (e.g. video players, web browsers).
|
| 10 |
+
|
| 11 |
+
How to encode videos?
|
| 12 |
+
- Which video codec (`-vcodec`) to use? h264, h265, AV1?
|
| 13 |
+
- What pixel format to use (`-pix_fmt`)? `yuv444p` or `yuv420p`?
|
| 14 |
+
- How much compression (`-crf`)? No compression with `0`, intermediate compression with `25` or extreme with `50+`?
|
| 15 |
+
- Which frequency to chose for key frames (`-g`)? A key frame every `10` frames?
|
| 16 |
+
|
| 17 |
+
How to decode videos?
|
| 18 |
+
- Which `decoder`? `torchvision`, `torchaudio`, `ffmpegio`, `decord`, or `nvc`?
|
| 19 |
+
- What scenarios to use for the requesting timestamps during benchmark? (`timestamps_mode`)
|
| 20 |
+
|
| 21 |
+
|
| 22 |
+
## Variables
|
| 23 |
+
**Image content & size**
|
| 24 |
+
We don't expect the same optimal settings for a dataset of images from a simulation, or from real-world in an apartment, or in a factory, or outdoor, or with lots of moving objects in the scene, etc. Similarly, loading times might not vary linearly with the image size (resolution).
|
| 25 |
+
For these reasons, we run this benchmark on four representative datasets:
|
| 26 |
+
- `lerobot/pusht_image`: (96 x 96 pixels) simulation with simple geometric shapes, fixed camera.
|
| 27 |
+
- `aliberts/aloha_mobile_shrimp_image`: (480 x 640 pixels) real-world indoor, moving camera.
|
| 28 |
+
- `aliberts/paris_street`: (720 x 1280 pixels) real-world outdoor, moving camera.
|
| 29 |
+
- `aliberts/kitchen`: (1080 x 1920 pixels) real-world indoor, fixed camera.
|
| 30 |
+
|
| 31 |
+
Note: The datasets used for this benchmark need to be image datasets, not video datasets.
|
| 32 |
+
|
| 33 |
+
**Data augmentations**
|
| 34 |
+
We might revisit this benchmark and find better settings if we train our policies with various data augmentations to make them more robust (e.g. robust to color changes, compression, etc.).
|
| 35 |
+
|
| 36 |
+
### Encoding parameters
|
| 37 |
+
| parameter | values |
|
| 38 |
+
|-------------|--------------------------------------------------------------|
|
| 39 |
+
| **vcodec** | `libx264`, `libx265`, `libsvtav1` |
|
| 40 |
+
| **pix_fmt** | `yuv444p`, `yuv420p` |
|
| 41 |
+
| **g** | `1`, `2`, `3`, `4`, `5`, `6`, `10`, `15`, `20`, `40`, `None` |
|
| 42 |
+
| **crf** | `0`, `5`, `10`, `15`, `20`, `25`, `30`, `40`, `50`, `None` |
|
| 43 |
+
|
| 44 |
+
Note that `crf` value might be interpreted differently by various video codecs. In other words, the same value used with one codec doesn't necessarily translate into the same compression level with another codec. In fact, the default value (`None`) isn't the same amongst the different video codecs. Importantly, it is also the case for many other ffmpeg arguments like `g` which specifies the frequency of the key frames.
|
| 45 |
+
|
| 46 |
+
For a comprehensive list and documentation of these parameters, see the ffmpeg documentation depending on the video codec used:
|
| 47 |
+
- h264: https://trac.ffmpeg.org/wiki/Encode/H.264
|
| 48 |
+
- h265: https://trac.ffmpeg.org/wiki/Encode/H.265
|
| 49 |
+
- AV1: https://trac.ffmpeg.org/wiki/Encode/AV1
|
| 50 |
+
|
| 51 |
+
### Decoding parameters
|
| 52 |
+
**Decoder**
|
| 53 |
+
We tested two video decoding backends from torchvision:
|
| 54 |
+
- `pyav`
|
| 55 |
+
- `video_reader` (requires to build torchvision from source)
|
| 56 |
+
|
| 57 |
+
**Requested timestamps**
|
| 58 |
+
Given the way video decoding works, once a keyframe has been loaded, the decoding of subsequent frames is fast.
|
| 59 |
+
This of course is affected by the `-g` parameter during encoding, which specifies the frequency of the keyframes. Given our typical use cases in robotics policies which might request a few timestamps in different random places, we want to replicate these use cases with the following scenarios:
|
| 60 |
+
- `1_frame`: 1 frame,
|
| 61 |
+
- `2_frames`: 2 consecutive frames (e.g. `[t, t + 1 / fps]`),
|
| 62 |
+
- `6_frames`: 6 consecutive frames (e.g. `[t + i / fps for i in range(6)]`)
|
| 63 |
+
|
| 64 |
+
Note that this differs significantly from a typical use case like watching a movie, in which every frame is loaded sequentially from the beginning to the end and it's acceptable to have big values for `-g`.
|
| 65 |
+
|
| 66 |
+
Additionally, because some policies might request single timestamps that are a few frames apart, we also have the following scenario:
|
| 67 |
+
- `2_frames_4_space`: 2 frames with 4 consecutive frames of spacing in between (e.g `[t, t + 5 / fps]`),
|
| 68 |
+
|
| 69 |
+
However, due to how video decoding is implemented with `pyav`, we don't have access to an accurate seek so in practice this scenario is essentially the same as `6_frames` since all 6 frames between `t` and `t + 5 / fps` will be decoded.
|
| 70 |
+
|
| 71 |
+
|
| 72 |
+
## Metrics
|
| 73 |
+
**Data compression ratio (lower is better)**
|
| 74 |
+
`video_images_size_ratio` is the ratio of the memory space on disk taken by the encoded video over the memory space taken by the original images. For instance, `video_images_size_ratio=25%` means that the video takes 4 times less memory space on disk compared to the original images.
|
| 75 |
+
|
| 76 |
+
**Loading time ratio (lower is better)**
|
| 77 |
+
`video_images_load_time_ratio` is the ratio of the time it takes to decode frames from the video at a given timestamps over the time it takes to load the exact same original images. Lower is better. For instance, `video_images_load_time_ratio=200%` means that decoding from video is 2 times slower than loading the original images.
|
| 78 |
+
|
| 79 |
+
**Average Mean Square Error (lower is better)**
|
| 80 |
+
`avg_mse` is the average mean square error between each decoded frame and its corresponding original image over all requested timestamps, and also divided by the number of pixels in the image to be comparable when switching to different image sizes.
|
| 81 |
+
|
| 82 |
+
**Average Peak Signal to Noise Ratio (higher is better)**
|
| 83 |
+
`avg_psnr` measures the ratio between the maximum possible power of a signal and the power of corrupting noise that affects the fidelity of its representation. Higher PSNR indicates better quality.
|
| 84 |
+
|
| 85 |
+
**Average Structural Similarity Index Measure (higher is better)**
|
| 86 |
+
`avg_ssim` evaluates the perceived quality of images by comparing luminance, contrast, and structure. SSIM values range from -1 to 1, where 1 indicates perfect similarity.
|
| 87 |
+
|
| 88 |
+
One aspect that can't be measured here with those metrics is the compatibility of the encoding across platforms, in particular on web browser, for visualization purposes.
|
| 89 |
+
h264, h265 and AV1 are all commonly used codecs and should not pose an issue. However, the chroma subsampling (`pix_fmt`) format might affect compatibility:
|
| 90 |
+
- `yuv420p` is more widely supported across various platforms, including web browsers.
|
| 91 |
+
- `yuv444p` offers higher color fidelity but might not be supported as broadly.
|
| 92 |
+
|
| 93 |
+
|
| 94 |
+
<!-- **Loss of a pretrained policy (higher is better)** (not available)
|
| 95 |
+
`loss_pretrained` is the result of evaluating with the selected encoding/decoding settings a policy pretrained on original images. It is easier to understand than `avg_l2_error`.
|
| 96 |
+
|
| 97 |
+
**Success rate after retraining (higher is better)** (not available)
|
| 98 |
+
`success_rate` is the result of training and evaluating a policy with the selected encoding/decoding settings. It is the most difficult metric to get but also the very best. -->
|
| 99 |
+
|
| 100 |
+
|
| 101 |
+
## How the benchmark works
|
| 102 |
+
The benchmark evaluates both encoding and decoding of video frames on the first episode of each dataset.
|
| 103 |
+
|
| 104 |
+
**Encoding:** for each `vcodec` and `pix_fmt` pair, we use a default value for `g` and `crf` upon which we change a single value (either `g` or `crf`) to one of the specified values (we don't test every combination of those as this would be computationally too heavy).
|
| 105 |
+
This gives a unique set of encoding parameters which is used to encode the episode.
|
| 106 |
+
|
| 107 |
+
**Decoding:** Then, for each of those unique encodings, we iterate through every combination of the decoding parameters `backend` and `timestamps_mode`. For each of them, we record the metrics of a number of samples (given by `--num-samples`). This is parallelized for efficiency and the number of processes can be controlled with `--num-workers`. Ideally, it's best to have a `--num-samples` that is divisible by `--num-workers`.
|
| 108 |
+
|
| 109 |
+
Intermediate results saved for each `vcodec` and `pix_fmt` combination in csv tables.
|
| 110 |
+
These are then all concatenated to a single table ready for analysis.
|
| 111 |
+
|
| 112 |
+
## Caveats
|
| 113 |
+
We tried to measure the most impactful parameters for both encoding and decoding. However, for computational reasons we can't test out every combination.
|
| 114 |
+
|
| 115 |
+
Additional encoding parameters exist that are not included in this benchmark. In particular:
|
| 116 |
+
- `-preset` which allows for selecting encoding presets. This represents a collection of options that will provide a certain encoding speed to compression ratio. By leaving this parameter unspecified, it is considered to be `medium` for libx264 and libx265 and `8` for libsvtav1.
|
| 117 |
+
- `-tune` which allows to optimize the encoding for certain aspects (e.g. film quality, fast decoding, etc.).
|
| 118 |
+
|
| 119 |
+
See the documentation mentioned above for more detailed info on these settings and for a more comprehensive list of other parameters.
|
| 120 |
+
|
| 121 |
+
Similarly on the decoding side, other decoders exist but are not implemented in our current benchmark. To name a few:
|
| 122 |
+
- `torchaudio`
|
| 123 |
+
- `ffmpegio`
|
| 124 |
+
- `decord`
|
| 125 |
+
- `nvc`
|
| 126 |
+
|
| 127 |
+
Note as well that since we are mostly interested in the performance at decoding time (also because encoding is done only once before uploading a dataset), we did not measure encoding times nor have any metrics regarding encoding.
|
| 128 |
+
However, besides the necessity to build ffmpeg from source, encoding did not pose any issue and it didn't take a significant amount of time during this benchmark.
|
| 129 |
+
|
| 130 |
+
|
| 131 |
+
## Install
|
| 132 |
+
Building ffmpeg from source is required to include libx265 and libaom/libsvtav1 (av1) video codecs ([compilation guide](https://trac.ffmpeg.org/wiki/CompilationGuide/Ubuntu)).
|
| 133 |
+
|
| 134 |
+
**Note:** While you still need to build torchvision with a conda-installed `ffmpeg<4.3` to use the `video_reader` decoder (as described in [#220](https://github.com/huggingface/lerobot/pull/220)), you also need another version which is custom-built with all the video codecs for encoding. For the script to then use that version, you can prepend the command above with `PATH="$HOME/bin:$PATH"`, which is where ffmpeg should be built.
|
| 135 |
+
|
| 136 |
+
|
| 137 |
+
## Adding a video decoder
|
| 138 |
+
Right now, we're only benchmarking the two video decoder available with torchvision: `pyav` and `video_reader`.
|
| 139 |
+
You can easily add a new decoder to benchmark by adding it to this function in the script:
|
| 140 |
+
```diff
|
| 141 |
+
def decode_video_frames(
|
| 142 |
+
video_path: str,
|
| 143 |
+
timestamps: list[float],
|
| 144 |
+
tolerance_s: float,
|
| 145 |
+
backend: str,
|
| 146 |
+
) -> torch.Tensor:
|
| 147 |
+
if backend in ["pyav", "video_reader"]:
|
| 148 |
+
return decode_video_frames_torchvision(
|
| 149 |
+
video_path, timestamps, tolerance_s, backend
|
| 150 |
+
)
|
| 151 |
+
+ elif backend == ["your_decoder"]:
|
| 152 |
+
+ return your_decoder_function(
|
| 153 |
+
+ video_path, timestamps, tolerance_s, backend
|
| 154 |
+
+ )
|
| 155 |
+
else:
|
| 156 |
+
raise NotImplementedError(backend)
|
| 157 |
+
```
|
| 158 |
+
|
| 159 |
+
|
| 160 |
+
## Example
|
| 161 |
+
For a quick run, you can try these parameters:
|
| 162 |
+
```bash
|
| 163 |
+
python benchmark/video/run_video_benchmark.py \
|
| 164 |
+
--output-dir outputs/video_benchmark \
|
| 165 |
+
--repo-ids \
|
| 166 |
+
lerobot/pusht_image \
|
| 167 |
+
aliberts/aloha_mobile_shrimp_image \
|
| 168 |
+
--vcodec libx264 libx265 \
|
| 169 |
+
--pix-fmt yuv444p yuv420p \
|
| 170 |
+
--g 2 20 None \
|
| 171 |
+
--crf 10 40 None \
|
| 172 |
+
--timestamps-modes 1_frame 2_frames \
|
| 173 |
+
--backends pyav video_reader \
|
| 174 |
+
--num-samples 5 \
|
| 175 |
+
--num-workers 5 \
|
| 176 |
+
--save-frames 0
|
| 177 |
+
```
|
| 178 |
+
|
| 179 |
+
|
| 180 |
+
## Results
|
| 181 |
+
|
| 182 |
+
### Reproduce
|
| 183 |
+
We ran the benchmark with the following parameters:
|
| 184 |
+
```bash
|
| 185 |
+
# h264 and h265 encodings
|
| 186 |
+
python benchmark/video/run_video_benchmark.py \
|
| 187 |
+
--output-dir outputs/video_benchmark \
|
| 188 |
+
--repo-ids \
|
| 189 |
+
lerobot/pusht_image \
|
| 190 |
+
aliberts/aloha_mobile_shrimp_image \
|
| 191 |
+
aliberts/paris_street \
|
| 192 |
+
aliberts/kitchen \
|
| 193 |
+
--vcodec libx264 libx265 \
|
| 194 |
+
--pix-fmt yuv444p yuv420p \
|
| 195 |
+
--g 1 2 3 4 5 6 10 15 20 40 None \
|
| 196 |
+
--crf 0 5 10 15 20 25 30 40 50 None \
|
| 197 |
+
--timestamps-modes 1_frame 2_frames 6_frames \
|
| 198 |
+
--backends pyav video_reader \
|
| 199 |
+
--num-samples 50 \
|
| 200 |
+
--num-workers 5 \
|
| 201 |
+
--save-frames 1
|
| 202 |
+
|
| 203 |
+
# av1 encoding (only compatible with yuv420p and pyav decoder)
|
| 204 |
+
python benchmark/video/run_video_benchmark.py \
|
| 205 |
+
--output-dir outputs/video_benchmark \
|
| 206 |
+
--repo-ids \
|
| 207 |
+
lerobot/pusht_image \
|
| 208 |
+
aliberts/aloha_mobile_shrimp_image \
|
| 209 |
+
aliberts/paris_street \
|
| 210 |
+
aliberts/kitchen \
|
| 211 |
+
--vcodec libsvtav1 \
|
| 212 |
+
--pix-fmt yuv420p \
|
| 213 |
+
--g 1 2 3 4 5 6 10 15 20 40 None \
|
| 214 |
+
--crf 0 5 10 15 20 25 30 40 50 None \
|
| 215 |
+
--timestamps-modes 1_frame 2_frames 6_frames \
|
| 216 |
+
--backends pyav \
|
| 217 |
+
--num-samples 50 \
|
| 218 |
+
--num-workers 5 \
|
| 219 |
+
--save-frames 1
|
| 220 |
+
```
|
| 221 |
+
|
| 222 |
+
The full results are available [here](https://docs.google.com/spreadsheets/d/1OYJB43Qu8fC26k_OyoMFgGBBKfQRCi4BIuYitQnq3sw/edit?usp=sharing)
|
| 223 |
+
|
| 224 |
+
|
| 225 |
+
### Parameters selected for LeRobotDataset
|
| 226 |
+
Considering these results, we chose what we think is the best set of encoding parameter:
|
| 227 |
+
- vcodec: `libsvtav1`
|
| 228 |
+
- pix-fmt: `yuv420p`
|
| 229 |
+
- g: `2`
|
| 230 |
+
- crf: `30`
|
| 231 |
+
|
| 232 |
+
Since we're using av1 encoding, we're choosing the `pyav` decoder as `video_reader` does not support it (and `pyav` doesn't require a custom build of `torchvision`).
|
| 233 |
+
|
| 234 |
+
### Summary
|
| 235 |
+
|
| 236 |
+
These tables show the results for `g=2` and `crf=30`, using `timestamps-modes=6_frames` and `backend=pyav`
|
| 237 |
+
|
| 238 |
+
| video_images_size_ratio | vcodec | pix_fmt | | | |
|
| 239 |
+
|------------------------------------|------------|---------|-----------|-----------|-----------|
|
| 240 |
+
| | libx264 | | libx265 | | libsvtav1 |
|
| 241 |
+
| repo_id | yuv420p | yuv444p | yuv420p | yuv444p | yuv420p |
|
| 242 |
+
| lerobot/pusht_image | **16.97%** | 17.58% | 18.57% | 18.86% | 22.06% |
|
| 243 |
+
| aliberts/aloha_mobile_shrimp_image | 2.14% | 2.11% | 1.38% | **1.37%** | 5.59% |
|
| 244 |
+
| aliberts/paris_street | 2.12% | 2.13% | **1.54%** | **1.54%** | 4.43% |
|
| 245 |
+
| aliberts/kitchen | 1.40% | 1.39% | **1.00%** | **1.00%** | 2.52% |
|
| 246 |
+
|
| 247 |
+
| video_images_load_time_ratio | vcodec | pix_fmt | | | |
|
| 248 |
+
|------------------------------------|---------|---------|----------|---------|-----------|
|
| 249 |
+
| | libx264 | | libx265 | | libsvtav1 |
|
| 250 |
+
| repo_id | yuv420p | yuv444p | yuv420p | yuv444p | yuv420p |
|
| 251 |
+
| lerobot/pusht_image | 6.45 | 5.19 | **1.90** | 2.12 | 2.47 |
|
| 252 |
+
| aliberts/aloha_mobile_shrimp_image | 11.80 | 7.92 | 0.71 | 0.85 | **0.48** |
|
| 253 |
+
| aliberts/paris_street | 2.21 | 2.05 | 0.36 | 0.49 | **0.30** |
|
| 254 |
+
| aliberts/kitchen | 1.46 | 1.46 | 0.28 | 0.51 | **0.26** |
|
| 255 |
+
|
| 256 |
+
| | | vcodec | pix_fmt | | | |
|
| 257 |
+
|------------------------------------|----------|----------|--------------|----------|-----------|--------------|
|
| 258 |
+
| | | libx264 | | libx265 | | libsvtav1 |
|
| 259 |
+
| repo_id | metric | yuv420p | yuv444p | yuv420p | yuv444p | yuv420p |
|
| 260 |
+
| lerobot/pusht_image | avg_mse | 2.90E-04 | **2.03E-04** | 3.13E-04 | 2.29E-04 | 2.19E-04 |
|
| 261 |
+
| | avg_psnr | 35.44 | 37.07 | 35.49 | **37.30** | 37.20 |
|
| 262 |
+
| | avg_ssim | 98.28% | **98.85%** | 98.31% | 98.84% | 98.72% |
|
| 263 |
+
| aliberts/aloha_mobile_shrimp_image | avg_mse | 2.76E-04 | 2.59E-04 | 3.17E-04 | 3.06E-04 | **1.30E-04** |
|
| 264 |
+
| | avg_psnr | 35.91 | 36.21 | 35.88 | 36.09 | **40.17** |
|
| 265 |
+
| | avg_ssim | 95.19% | 95.18% | 95.00% | 95.05% | **97.73%** |
|
| 266 |
+
| aliberts/paris_street | avg_mse | 6.89E-04 | 6.70E-04 | 4.03E-03 | 4.02E-03 | **3.09E-04** |
|
| 267 |
+
| | avg_psnr | 33.48 | 33.68 | 32.05 | 32.15 | **35.40** |
|
| 268 |
+
| | avg_ssim | 93.76% | 93.75% | 89.46% | 89.46% | **95.46%** |
|
| 269 |
+
| aliberts/kitchen | avg_mse | 2.50E-04 | 2.24E-04 | 4.28E-04 | 4.18E-04 | **1.53E-04** |
|
| 270 |
+
| | avg_psnr | 36.73 | 37.33 | 36.56 | 36.75 | **39.12** |
|
| 271 |
+
| | avg_ssim | 95.47% | 95.58% | 95.52% | 95.53% | **96.82%** |
|
lerobot/benchmarks/video/capture_camera_feed.py
ADDED
|
@@ -0,0 +1,102 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#!/usr/bin/env python
|
| 2 |
+
|
| 3 |
+
# Copyright 2024 The HuggingFace Inc. team. All rights reserved.
|
| 4 |
+
#
|
| 5 |
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
| 6 |
+
# you may not use this file except in compliance with the License.
|
| 7 |
+
# You may obtain a copy of the License at
|
| 8 |
+
#
|
| 9 |
+
# http://www.apache.org/licenses/LICENSE-2.0
|
| 10 |
+
#
|
| 11 |
+
# Unless required by applicable law or agreed to in writing, software
|
| 12 |
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
| 13 |
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
| 14 |
+
# See the License for the specific language governing permissions and
|
| 15 |
+
# limitations under the License.
|
| 16 |
+
"""Capture video feed from a camera as raw images."""
|
| 17 |
+
|
| 18 |
+
import argparse
|
| 19 |
+
import datetime as dt
|
| 20 |
+
import os
|
| 21 |
+
import time
|
| 22 |
+
from pathlib import Path
|
| 23 |
+
|
| 24 |
+
import cv2
|
| 25 |
+
import rerun as rr
|
| 26 |
+
|
| 27 |
+
# see https://rerun.io/docs/howto/visualization/limit-ram
|
| 28 |
+
RERUN_MEMORY_LIMIT = os.getenv("LEROBOT_RERUN_MEMORY_LIMIT", "5%")
|
| 29 |
+
|
| 30 |
+
|
| 31 |
+
def display_and_save_video_stream(output_dir: Path, fps: int, width: int, height: int, duration: int):
|
| 32 |
+
rr.init("lerobot_capture_camera_feed")
|
| 33 |
+
rr.spawn(memory_limit=RERUN_MEMORY_LIMIT)
|
| 34 |
+
|
| 35 |
+
now = dt.datetime.now()
|
| 36 |
+
capture_dir = output_dir / f"{now:%Y-%m-%d}" / f"{now:%H-%M-%S}"
|
| 37 |
+
if not capture_dir.exists():
|
| 38 |
+
capture_dir.mkdir(parents=True, exist_ok=True)
|
| 39 |
+
|
| 40 |
+
# Opens the default webcam
|
| 41 |
+
cap = cv2.VideoCapture(0)
|
| 42 |
+
if not cap.isOpened():
|
| 43 |
+
print("Error: Could not open video stream.")
|
| 44 |
+
return
|
| 45 |
+
|
| 46 |
+
cap.set(cv2.CAP_PROP_FPS, fps)
|
| 47 |
+
cap.set(cv2.CAP_PROP_FRAME_WIDTH, width)
|
| 48 |
+
cap.set(cv2.CAP_PROP_FRAME_HEIGHT, height)
|
| 49 |
+
|
| 50 |
+
frame_index = 0
|
| 51 |
+
start_time = time.time()
|
| 52 |
+
while time.time() - start_time < duration:
|
| 53 |
+
ret, frame = cap.read()
|
| 54 |
+
|
| 55 |
+
if not ret:
|
| 56 |
+
print("Error: Could not read frame.")
|
| 57 |
+
break
|
| 58 |
+
rr.log("video/stream", rr.Image(frame), static=True)
|
| 59 |
+
cv2.imwrite(str(capture_dir / f"frame_{frame_index:06d}.png"), frame)
|
| 60 |
+
frame_index += 1
|
| 61 |
+
|
| 62 |
+
# Release the capture
|
| 63 |
+
cap.release()
|
| 64 |
+
|
| 65 |
+
# TODO(Steven): Add a graceful shutdown via a close() method for the Viewer context, though not currently supported in the Rerun API.
|
| 66 |
+
|
| 67 |
+
|
| 68 |
+
if __name__ == "__main__":
|
| 69 |
+
parser = argparse.ArgumentParser()
|
| 70 |
+
|
| 71 |
+
parser.add_argument(
|
| 72 |
+
"--output-dir",
|
| 73 |
+
type=Path,
|
| 74 |
+
default=Path("outputs/cam_capture/"),
|
| 75 |
+
help="Directory where the capture images are written. A subfolder named with the current date & time will be created inside it for each capture.",
|
| 76 |
+
)
|
| 77 |
+
parser.add_argument(
|
| 78 |
+
"--fps",
|
| 79 |
+
type=int,
|
| 80 |
+
default=30,
|
| 81 |
+
help="Frames Per Second of the capture.",
|
| 82 |
+
)
|
| 83 |
+
parser.add_argument(
|
| 84 |
+
"--width",
|
| 85 |
+
type=int,
|
| 86 |
+
default=1280,
|
| 87 |
+
help="Width of the captured images.",
|
| 88 |
+
)
|
| 89 |
+
parser.add_argument(
|
| 90 |
+
"--height",
|
| 91 |
+
type=int,
|
| 92 |
+
default=720,
|
| 93 |
+
help="Height of the captured images.",
|
| 94 |
+
)
|
| 95 |
+
parser.add_argument(
|
| 96 |
+
"--duration",
|
| 97 |
+
type=int,
|
| 98 |
+
default=20,
|
| 99 |
+
help="Duration in seconds for which the video stream should be captured.",
|
| 100 |
+
)
|
| 101 |
+
args = parser.parse_args()
|
| 102 |
+
display_and_save_video_stream(**vars(args))
|
lerobot/benchmarks/video/run_video_benchmark.py
ADDED
|
@@ -0,0 +1,490 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#!/usr/bin/env python
|
| 2 |
+
|
| 3 |
+
# Copyright 2024 The HuggingFace Inc. team. All rights reserved.
|
| 4 |
+
#
|
| 5 |
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
| 6 |
+
# you may not use this file except in compliance with the License.
|
| 7 |
+
# You may obtain a copy of the License at
|
| 8 |
+
#
|
| 9 |
+
# http://www.apache.org/licenses/LICENSE-2.0
|
| 10 |
+
#
|
| 11 |
+
# Unless required by applicable law or agreed to in writing, software
|
| 12 |
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
| 13 |
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
| 14 |
+
# See the License for the specific language governing permissions and
|
| 15 |
+
# limitations under the License.
|
| 16 |
+
"""Assess the performance of video decoding in various configurations.
|
| 17 |
+
|
| 18 |
+
This script will benchmark different video encoding and decoding parameters.
|
| 19 |
+
See the provided README.md or run `python benchmark/video/run_video_benchmark.py --help` for usage info.
|
| 20 |
+
"""
|
| 21 |
+
|
| 22 |
+
import argparse
|
| 23 |
+
import datetime as dt
|
| 24 |
+
import random
|
| 25 |
+
import shutil
|
| 26 |
+
from collections import OrderedDict
|
| 27 |
+
from concurrent.futures import ThreadPoolExecutor, as_completed
|
| 28 |
+
from pathlib import Path
|
| 29 |
+
|
| 30 |
+
import einops
|
| 31 |
+
import numpy as np
|
| 32 |
+
import pandas as pd
|
| 33 |
+
import PIL
|
| 34 |
+
import torch
|
| 35 |
+
from skimage.metrics import mean_squared_error, peak_signal_noise_ratio, structural_similarity
|
| 36 |
+
from tqdm import tqdm
|
| 37 |
+
|
| 38 |
+
from lerobot.datasets.lerobot_dataset import LeRobotDataset
|
| 39 |
+
from lerobot.datasets.video_utils import (
|
| 40 |
+
decode_video_frames_torchvision,
|
| 41 |
+
encode_video_frames,
|
| 42 |
+
)
|
| 43 |
+
from lerobot.utils.benchmark import TimeBenchmark
|
| 44 |
+
|
| 45 |
+
BASE_ENCODING = OrderedDict(
|
| 46 |
+
[
|
| 47 |
+
("vcodec", "libx264"),
|
| 48 |
+
("pix_fmt", "yuv444p"),
|
| 49 |
+
("g", 2),
|
| 50 |
+
("crf", None),
|
| 51 |
+
# TODO(aliberts): Add fastdecode
|
| 52 |
+
# ("fastdecode", 0),
|
| 53 |
+
]
|
| 54 |
+
)
|
| 55 |
+
|
| 56 |
+
|
| 57 |
+
# TODO(rcadene, aliberts): move to `utils.py` folder when we want to refactor
|
| 58 |
+
def parse_int_or_none(value) -> int | None:
|
| 59 |
+
if value.lower() == "none":
|
| 60 |
+
return None
|
| 61 |
+
try:
|
| 62 |
+
return int(value)
|
| 63 |
+
except ValueError as e:
|
| 64 |
+
raise argparse.ArgumentTypeError(f"Invalid int or None: {value}") from e
|
| 65 |
+
|
| 66 |
+
|
| 67 |
+
def check_datasets_formats(repo_ids: list) -> None:
|
| 68 |
+
for repo_id in repo_ids:
|
| 69 |
+
dataset = LeRobotDataset(repo_id)
|
| 70 |
+
if len(dataset.meta.video_keys) > 0:
|
| 71 |
+
raise ValueError(
|
| 72 |
+
f"Use only image dataset for running this benchmark. Video dataset provided: {repo_id}"
|
| 73 |
+
)
|
| 74 |
+
|
| 75 |
+
|
| 76 |
+
def get_directory_size(directory: Path) -> int:
|
| 77 |
+
total_size = 0
|
| 78 |
+
for item in directory.rglob("*"):
|
| 79 |
+
if item.is_file():
|
| 80 |
+
total_size += item.stat().st_size
|
| 81 |
+
return total_size
|
| 82 |
+
|
| 83 |
+
|
| 84 |
+
def load_original_frames(imgs_dir: Path, timestamps: list[float], fps: int) -> torch.Tensor:
|
| 85 |
+
frames = []
|
| 86 |
+
for ts in timestamps:
|
| 87 |
+
idx = int(ts * fps)
|
| 88 |
+
frame = PIL.Image.open(imgs_dir / f"frame_{idx:06d}.png")
|
| 89 |
+
frame = torch.from_numpy(np.array(frame))
|
| 90 |
+
frame = frame.type(torch.float32) / 255
|
| 91 |
+
frame = einops.rearrange(frame, "h w c -> c h w")
|
| 92 |
+
frames.append(frame)
|
| 93 |
+
return torch.stack(frames)
|
| 94 |
+
|
| 95 |
+
|
| 96 |
+
def save_decoded_frames(
|
| 97 |
+
imgs_dir: Path, save_dir: Path, frames: torch.Tensor, timestamps: list[float], fps: int
|
| 98 |
+
) -> None:
|
| 99 |
+
if save_dir.exists() and len(list(save_dir.glob("frame_*.png"))) == len(timestamps):
|
| 100 |
+
return
|
| 101 |
+
|
| 102 |
+
save_dir.mkdir(parents=True, exist_ok=True)
|
| 103 |
+
for i, ts in enumerate(timestamps):
|
| 104 |
+
idx = int(ts * fps)
|
| 105 |
+
frame_hwc = (frames[i].permute((1, 2, 0)) * 255).type(torch.uint8).cpu().numpy()
|
| 106 |
+
PIL.Image.fromarray(frame_hwc).save(save_dir / f"frame_{idx:06d}_decoded.png")
|
| 107 |
+
shutil.copyfile(imgs_dir / f"frame_{idx:06d}.png", save_dir / f"frame_{idx:06d}_original.png")
|
| 108 |
+
|
| 109 |
+
|
| 110 |
+
def save_first_episode(imgs_dir: Path, dataset: LeRobotDataset) -> None:
|
| 111 |
+
ep_num_images = dataset.episode_data_index["to"][0].item()
|
| 112 |
+
if imgs_dir.exists() and len(list(imgs_dir.glob("frame_*.png"))) == ep_num_images:
|
| 113 |
+
return
|
| 114 |
+
|
| 115 |
+
imgs_dir.mkdir(parents=True, exist_ok=True)
|
| 116 |
+
hf_dataset = dataset.hf_dataset.with_format(None)
|
| 117 |
+
|
| 118 |
+
# We only save images from the first camera
|
| 119 |
+
img_keys = [key for key in hf_dataset.features if key.startswith("observation.image")]
|
| 120 |
+
imgs_dataset = hf_dataset.select_columns(img_keys[0])
|
| 121 |
+
|
| 122 |
+
for i, item in enumerate(
|
| 123 |
+
tqdm(imgs_dataset, desc=f"saving {dataset.repo_id} first episode images", leave=False)
|
| 124 |
+
):
|
| 125 |
+
img = item[img_keys[0]]
|
| 126 |
+
img.save(str(imgs_dir / f"frame_{i:06d}.png"), quality=100)
|
| 127 |
+
|
| 128 |
+
if i >= ep_num_images - 1:
|
| 129 |
+
break
|
| 130 |
+
|
| 131 |
+
|
| 132 |
+
def sample_timestamps(timestamps_mode: str, ep_num_images: int, fps: int) -> list[float]:
|
| 133 |
+
# Start at 5 to allow for 2_frames_4_space and 6_frames
|
| 134 |
+
idx = random.randint(5, ep_num_images - 1)
|
| 135 |
+
match timestamps_mode:
|
| 136 |
+
case "1_frame":
|
| 137 |
+
frame_indexes = [idx]
|
| 138 |
+
case "2_frames":
|
| 139 |
+
frame_indexes = [idx - 1, idx]
|
| 140 |
+
case "2_frames_4_space":
|
| 141 |
+
frame_indexes = [idx - 5, idx]
|
| 142 |
+
case "6_frames":
|
| 143 |
+
frame_indexes = [idx - i for i in range(6)][::-1]
|
| 144 |
+
case _:
|
| 145 |
+
raise ValueError(timestamps_mode)
|
| 146 |
+
|
| 147 |
+
return [idx / fps for idx in frame_indexes]
|
| 148 |
+
|
| 149 |
+
|
| 150 |
+
def decode_video_frames(
|
| 151 |
+
video_path: str,
|
| 152 |
+
timestamps: list[float],
|
| 153 |
+
tolerance_s: float,
|
| 154 |
+
backend: str,
|
| 155 |
+
) -> torch.Tensor:
|
| 156 |
+
if backend in ["pyav", "video_reader"]:
|
| 157 |
+
return decode_video_frames_torchvision(video_path, timestamps, tolerance_s, backend)
|
| 158 |
+
else:
|
| 159 |
+
raise NotImplementedError(backend)
|
| 160 |
+
|
| 161 |
+
|
| 162 |
+
def benchmark_decoding(
|
| 163 |
+
imgs_dir: Path,
|
| 164 |
+
video_path: Path,
|
| 165 |
+
timestamps_mode: str,
|
| 166 |
+
backend: str,
|
| 167 |
+
ep_num_images: int,
|
| 168 |
+
fps: int,
|
| 169 |
+
num_samples: int = 50,
|
| 170 |
+
num_workers: int = 4,
|
| 171 |
+
save_frames: bool = False,
|
| 172 |
+
) -> dict:
|
| 173 |
+
def process_sample(sample: int):
|
| 174 |
+
time_benchmark = TimeBenchmark()
|
| 175 |
+
timestamps = sample_timestamps(timestamps_mode, ep_num_images, fps)
|
| 176 |
+
num_frames = len(timestamps)
|
| 177 |
+
result = {
|
| 178 |
+
"psnr_values": [],
|
| 179 |
+
"ssim_values": [],
|
| 180 |
+
"mse_values": [],
|
| 181 |
+
}
|
| 182 |
+
|
| 183 |
+
with time_benchmark:
|
| 184 |
+
frames = decode_video_frames(video_path, timestamps=timestamps, tolerance_s=5e-1, backend=backend)
|
| 185 |
+
result["load_time_video_ms"] = time_benchmark.result_ms / num_frames
|
| 186 |
+
|
| 187 |
+
with time_benchmark:
|
| 188 |
+
original_frames = load_original_frames(imgs_dir, timestamps, fps)
|
| 189 |
+
result["load_time_images_ms"] = time_benchmark.result_ms / num_frames
|
| 190 |
+
|
| 191 |
+
frames_np, original_frames_np = frames.numpy(), original_frames.numpy()
|
| 192 |
+
for i in range(num_frames):
|
| 193 |
+
result["mse_values"].append(mean_squared_error(original_frames_np[i], frames_np[i]))
|
| 194 |
+
result["psnr_values"].append(
|
| 195 |
+
peak_signal_noise_ratio(original_frames_np[i], frames_np[i], data_range=1.0)
|
| 196 |
+
)
|
| 197 |
+
result["ssim_values"].append(
|
| 198 |
+
structural_similarity(original_frames_np[i], frames_np[i], data_range=1.0, channel_axis=0)
|
| 199 |
+
)
|
| 200 |
+
|
| 201 |
+
if save_frames and sample == 0:
|
| 202 |
+
save_dir = video_path.with_suffix("") / f"{timestamps_mode}_{backend}"
|
| 203 |
+
save_decoded_frames(imgs_dir, save_dir, frames, timestamps, fps)
|
| 204 |
+
|
| 205 |
+
return result
|
| 206 |
+
|
| 207 |
+
load_times_video_ms = []
|
| 208 |
+
load_times_images_ms = []
|
| 209 |
+
mse_values = []
|
| 210 |
+
psnr_values = []
|
| 211 |
+
ssim_values = []
|
| 212 |
+
|
| 213 |
+
# A sample is a single set of decoded frames specified by timestamps_mode (e.g. a single frame, 2 frames, etc.).
|
| 214 |
+
# For each sample, we record metrics (loading time and quality metrics) which are then averaged over all samples.
|
| 215 |
+
# As these samples are independent, we run them in parallel threads to speed up the benchmark.
|
| 216 |
+
with ThreadPoolExecutor(max_workers=num_workers) as executor:
|
| 217 |
+
futures = [executor.submit(process_sample, i) for i in range(num_samples)]
|
| 218 |
+
for future in tqdm(as_completed(futures), total=num_samples, desc="samples", leave=False):
|
| 219 |
+
result = future.result()
|
| 220 |
+
load_times_video_ms.append(result["load_time_video_ms"])
|
| 221 |
+
load_times_images_ms.append(result["load_time_images_ms"])
|
| 222 |
+
psnr_values.extend(result["psnr_values"])
|
| 223 |
+
ssim_values.extend(result["ssim_values"])
|
| 224 |
+
mse_values.extend(result["mse_values"])
|
| 225 |
+
|
| 226 |
+
avg_load_time_video_ms = float(np.array(load_times_video_ms).mean())
|
| 227 |
+
avg_load_time_images_ms = float(np.array(load_times_images_ms).mean())
|
| 228 |
+
video_images_load_time_ratio = avg_load_time_video_ms / avg_load_time_images_ms
|
| 229 |
+
|
| 230 |
+
return {
|
| 231 |
+
"avg_load_time_video_ms": avg_load_time_video_ms,
|
| 232 |
+
"avg_load_time_images_ms": avg_load_time_images_ms,
|
| 233 |
+
"video_images_load_time_ratio": video_images_load_time_ratio,
|
| 234 |
+
"avg_mse": float(np.mean(mse_values)),
|
| 235 |
+
"avg_psnr": float(np.mean(psnr_values)),
|
| 236 |
+
"avg_ssim": float(np.mean(ssim_values)),
|
| 237 |
+
}
|
| 238 |
+
|
| 239 |
+
|
| 240 |
+
def benchmark_encoding_decoding(
|
| 241 |
+
dataset: LeRobotDataset,
|
| 242 |
+
video_path: Path,
|
| 243 |
+
imgs_dir: Path,
|
| 244 |
+
encoding_cfg: dict,
|
| 245 |
+
decoding_cfg: dict,
|
| 246 |
+
num_samples: int,
|
| 247 |
+
num_workers: int,
|
| 248 |
+
save_frames: bool,
|
| 249 |
+
overwrite: bool = False,
|
| 250 |
+
seed: int = 1337,
|
| 251 |
+
) -> list[dict]:
|
| 252 |
+
fps = dataset.fps
|
| 253 |
+
|
| 254 |
+
if overwrite or not video_path.is_file():
|
| 255 |
+
tqdm.write(f"encoding {video_path}")
|
| 256 |
+
encode_video_frames(
|
| 257 |
+
imgs_dir=imgs_dir,
|
| 258 |
+
video_path=video_path,
|
| 259 |
+
fps=fps,
|
| 260 |
+
vcodec=encoding_cfg["vcodec"],
|
| 261 |
+
pix_fmt=encoding_cfg["pix_fmt"],
|
| 262 |
+
g=encoding_cfg.get("g"),
|
| 263 |
+
crf=encoding_cfg.get("crf"),
|
| 264 |
+
# fast_decode=encoding_cfg.get("fastdecode"),
|
| 265 |
+
overwrite=True,
|
| 266 |
+
)
|
| 267 |
+
|
| 268 |
+
ep_num_images = dataset.episode_data_index["to"][0].item()
|
| 269 |
+
width, height = tuple(dataset[0][dataset.meta.camera_keys[0]].shape[-2:])
|
| 270 |
+
num_pixels = width * height
|
| 271 |
+
video_size_bytes = video_path.stat().st_size
|
| 272 |
+
images_size_bytes = get_directory_size(imgs_dir)
|
| 273 |
+
video_images_size_ratio = video_size_bytes / images_size_bytes
|
| 274 |
+
|
| 275 |
+
random.seed(seed)
|
| 276 |
+
benchmark_table = []
|
| 277 |
+
for timestamps_mode in tqdm(
|
| 278 |
+
decoding_cfg["timestamps_modes"], desc="decodings (timestamps_modes)", leave=False
|
| 279 |
+
):
|
| 280 |
+
for backend in tqdm(decoding_cfg["backends"], desc="decodings (backends)", leave=False):
|
| 281 |
+
benchmark_row = benchmark_decoding(
|
| 282 |
+
imgs_dir,
|
| 283 |
+
video_path,
|
| 284 |
+
timestamps_mode,
|
| 285 |
+
backend,
|
| 286 |
+
ep_num_images,
|
| 287 |
+
fps,
|
| 288 |
+
num_samples,
|
| 289 |
+
num_workers,
|
| 290 |
+
save_frames,
|
| 291 |
+
)
|
| 292 |
+
benchmark_row.update(
|
| 293 |
+
**{
|
| 294 |
+
"repo_id": dataset.repo_id,
|
| 295 |
+
"resolution": f"{width} x {height}",
|
| 296 |
+
"num_pixels": num_pixels,
|
| 297 |
+
"video_size_bytes": video_size_bytes,
|
| 298 |
+
"images_size_bytes": images_size_bytes,
|
| 299 |
+
"video_images_size_ratio": video_images_size_ratio,
|
| 300 |
+
"timestamps_mode": timestamps_mode,
|
| 301 |
+
"backend": backend,
|
| 302 |
+
},
|
| 303 |
+
**encoding_cfg,
|
| 304 |
+
)
|
| 305 |
+
benchmark_table.append(benchmark_row)
|
| 306 |
+
|
| 307 |
+
return benchmark_table
|
| 308 |
+
|
| 309 |
+
|
| 310 |
+
def main(
|
| 311 |
+
output_dir: Path,
|
| 312 |
+
repo_ids: list[str],
|
| 313 |
+
vcodec: list[str],
|
| 314 |
+
pix_fmt: list[str],
|
| 315 |
+
g: list[int],
|
| 316 |
+
crf: list[int],
|
| 317 |
+
# fastdecode: list[int],
|
| 318 |
+
timestamps_modes: list[str],
|
| 319 |
+
backends: list[str],
|
| 320 |
+
num_samples: int,
|
| 321 |
+
num_workers: int,
|
| 322 |
+
save_frames: bool,
|
| 323 |
+
):
|
| 324 |
+
check_datasets_formats(repo_ids)
|
| 325 |
+
encoding_benchmarks = {
|
| 326 |
+
"g": g,
|
| 327 |
+
"crf": crf,
|
| 328 |
+
# "fastdecode": fastdecode,
|
| 329 |
+
}
|
| 330 |
+
decoding_benchmarks = {
|
| 331 |
+
"timestamps_modes": timestamps_modes,
|
| 332 |
+
"backends": backends,
|
| 333 |
+
}
|
| 334 |
+
headers = ["repo_id", "resolution", "num_pixels"]
|
| 335 |
+
headers += list(BASE_ENCODING.keys())
|
| 336 |
+
headers += [
|
| 337 |
+
"timestamps_mode",
|
| 338 |
+
"backend",
|
| 339 |
+
"video_size_bytes",
|
| 340 |
+
"images_size_bytes",
|
| 341 |
+
"video_images_size_ratio",
|
| 342 |
+
"avg_load_time_video_ms",
|
| 343 |
+
"avg_load_time_images_ms",
|
| 344 |
+
"video_images_load_time_ratio",
|
| 345 |
+
"avg_mse",
|
| 346 |
+
"avg_psnr",
|
| 347 |
+
"avg_ssim",
|
| 348 |
+
]
|
| 349 |
+
file_paths = []
|
| 350 |
+
for video_codec in tqdm(vcodec, desc="encodings (vcodec)"):
|
| 351 |
+
for pixel_format in tqdm(pix_fmt, desc="encodings (pix_fmt)", leave=False):
|
| 352 |
+
benchmark_table = []
|
| 353 |
+
for repo_id in tqdm(repo_ids, desc="encodings (datasets)", leave=False):
|
| 354 |
+
dataset = LeRobotDataset(repo_id)
|
| 355 |
+
imgs_dir = output_dir / "images" / dataset.repo_id.replace("/", "_")
|
| 356 |
+
# We only use the first episode
|
| 357 |
+
save_first_episode(imgs_dir, dataset)
|
| 358 |
+
for key, values in tqdm(encoding_benchmarks.items(), desc="encodings (g, crf)", leave=False):
|
| 359 |
+
for value in tqdm(values, desc=f"encodings ({key})", leave=False):
|
| 360 |
+
encoding_cfg = BASE_ENCODING.copy()
|
| 361 |
+
encoding_cfg["vcodec"] = video_codec
|
| 362 |
+
encoding_cfg["pix_fmt"] = pixel_format
|
| 363 |
+
encoding_cfg[key] = value
|
| 364 |
+
args_path = Path("_".join(str(value) for value in encoding_cfg.values()))
|
| 365 |
+
video_path = output_dir / "videos" / args_path / f"{repo_id.replace('/', '_')}.mp4"
|
| 366 |
+
benchmark_table += benchmark_encoding_decoding(
|
| 367 |
+
dataset,
|
| 368 |
+
video_path,
|
| 369 |
+
imgs_dir,
|
| 370 |
+
encoding_cfg,
|
| 371 |
+
decoding_benchmarks,
|
| 372 |
+
num_samples,
|
| 373 |
+
num_workers,
|
| 374 |
+
save_frames,
|
| 375 |
+
)
|
| 376 |
+
|
| 377 |
+
# Save intermediate results
|
| 378 |
+
benchmark_df = pd.DataFrame(benchmark_table, columns=headers)
|
| 379 |
+
now = dt.datetime.now()
|
| 380 |
+
csv_path = (
|
| 381 |
+
output_dir
|
| 382 |
+
/ f"{now:%Y-%m-%d}_{now:%H-%M-%S}_{video_codec}_{pixel_format}_{num_samples}-samples.csv"
|
| 383 |
+
)
|
| 384 |
+
benchmark_df.to_csv(csv_path, header=True, index=False)
|
| 385 |
+
file_paths.append(csv_path)
|
| 386 |
+
del benchmark_df
|
| 387 |
+
|
| 388 |
+
# Concatenate all results
|
| 389 |
+
df_list = [pd.read_csv(csv_path) for csv_path in file_paths]
|
| 390 |
+
concatenated_df = pd.concat(df_list, ignore_index=True)
|
| 391 |
+
concatenated_path = output_dir / f"{now:%Y-%m-%d}_{now:%H-%M-%S}_all_{num_samples}-samples.csv"
|
| 392 |
+
concatenated_df.to_csv(concatenated_path, header=True, index=False)
|
| 393 |
+
|
| 394 |
+
|
| 395 |
+
if __name__ == "__main__":
|
| 396 |
+
parser = argparse.ArgumentParser()
|
| 397 |
+
parser.add_argument(
|
| 398 |
+
"--output-dir",
|
| 399 |
+
type=Path,
|
| 400 |
+
default=Path("outputs/video_benchmark"),
|
| 401 |
+
help="Directory where the video benchmark outputs are written.",
|
| 402 |
+
)
|
| 403 |
+
parser.add_argument(
|
| 404 |
+
"--repo-ids",
|
| 405 |
+
type=str,
|
| 406 |
+
nargs="*",
|
| 407 |
+
default=[
|
| 408 |
+
"lerobot/pusht_image",
|
| 409 |
+
"aliberts/aloha_mobile_shrimp_image",
|
| 410 |
+
"aliberts/paris_street",
|
| 411 |
+
"aliberts/kitchen",
|
| 412 |
+
],
|
| 413 |
+
help="Datasets repo-ids to test against. First episodes only are used. Must be images.",
|
| 414 |
+
)
|
| 415 |
+
parser.add_argument(
|
| 416 |
+
"--vcodec",
|
| 417 |
+
type=str,
|
| 418 |
+
nargs="*",
|
| 419 |
+
default=["libx264", "hevc", "libsvtav1"],
|
| 420 |
+
help="Video codecs to be tested",
|
| 421 |
+
)
|
| 422 |
+
parser.add_argument(
|
| 423 |
+
"--pix-fmt",
|
| 424 |
+
type=str,
|
| 425 |
+
nargs="*",
|
| 426 |
+
default=["yuv444p", "yuv420p"],
|
| 427 |
+
help="Pixel formats (chroma subsampling) to be tested",
|
| 428 |
+
)
|
| 429 |
+
parser.add_argument(
|
| 430 |
+
"--g",
|
| 431 |
+
type=parse_int_or_none,
|
| 432 |
+
nargs="*",
|
| 433 |
+
default=[1, 2, 3, 4, 5, 6, 10, 15, 20, 40, 100, None],
|
| 434 |
+
help="Group of pictures sizes to be tested.",
|
| 435 |
+
)
|
| 436 |
+
parser.add_argument(
|
| 437 |
+
"--crf",
|
| 438 |
+
type=parse_int_or_none,
|
| 439 |
+
nargs="*",
|
| 440 |
+
default=[0, 5, 10, 15, 20, 25, 30, 40, 50, None],
|
| 441 |
+
help="Constant rate factors to be tested.",
|
| 442 |
+
)
|
| 443 |
+
# parser.add_argument(
|
| 444 |
+
# "--fastdecode",
|
| 445 |
+
# type=int,
|
| 446 |
+
# nargs="*",
|
| 447 |
+
# default=[0, 1],
|
| 448 |
+
# help="Use the fastdecode tuning option. 0 disables it. "
|
| 449 |
+
# "For libx264 and libx265/hevc, only 1 is possible. "
|
| 450 |
+
# "For libsvtav1, 1, 2 or 3 are possible values with a higher number meaning a faster decoding optimization",
|
| 451 |
+
# )
|
| 452 |
+
parser.add_argument(
|
| 453 |
+
"--timestamps-modes",
|
| 454 |
+
type=str,
|
| 455 |
+
nargs="*",
|
| 456 |
+
default=[
|
| 457 |
+
"1_frame",
|
| 458 |
+
"2_frames",
|
| 459 |
+
"2_frames_4_space",
|
| 460 |
+
"6_frames",
|
| 461 |
+
],
|
| 462 |
+
help="Timestamps scenarios to be tested.",
|
| 463 |
+
)
|
| 464 |
+
parser.add_argument(
|
| 465 |
+
"--backends",
|
| 466 |
+
type=str,
|
| 467 |
+
nargs="*",
|
| 468 |
+
default=["pyav", "video_reader"],
|
| 469 |
+
help="Torchvision decoding backend to be tested.",
|
| 470 |
+
)
|
| 471 |
+
parser.add_argument(
|
| 472 |
+
"--num-samples",
|
| 473 |
+
type=int,
|
| 474 |
+
default=50,
|
| 475 |
+
help="Number of samples for each encoding x decoding config.",
|
| 476 |
+
)
|
| 477 |
+
parser.add_argument(
|
| 478 |
+
"--num-workers",
|
| 479 |
+
type=int,
|
| 480 |
+
default=10,
|
| 481 |
+
help="Number of processes for parallelized sample processing.",
|
| 482 |
+
)
|
| 483 |
+
parser.add_argument(
|
| 484 |
+
"--save-frames",
|
| 485 |
+
type=int,
|
| 486 |
+
default=0,
|
| 487 |
+
help="Whether to save decoded frames or not. Enter a non-zero number for true.",
|
| 488 |
+
)
|
| 489 |
+
args = parser.parse_args()
|
| 490 |
+
main(**vars(args))
|
lerobot/docker/lerobot-cpu/Dockerfile
ADDED
|
@@ -0,0 +1,29 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Configure image
|
| 2 |
+
ARG PYTHON_VERSION=3.10
|
| 3 |
+
FROM python:${PYTHON_VERSION}-slim
|
| 4 |
+
|
| 5 |
+
# Configure environment variables
|
| 6 |
+
ARG PYTHON_VERSION
|
| 7 |
+
ENV DEBIAN_FRONTEND=noninteractive
|
| 8 |
+
ENV MUJOCO_GL="egl"
|
| 9 |
+
ENV PATH="/opt/venv/bin:$PATH"
|
| 10 |
+
|
| 11 |
+
# Install dependencies and set up Python in a single layer
|
| 12 |
+
RUN apt-get update && apt-get install -y --no-install-recommends \
|
| 13 |
+
build-essential cmake git \
|
| 14 |
+
libglib2.0-0 libgl1-mesa-glx libegl1-mesa ffmpeg \
|
| 15 |
+
speech-dispatcher libgeos-dev \
|
| 16 |
+
&& ln -s /usr/bin/python${PYTHON_VERSION} /usr/bin/python \
|
| 17 |
+
&& python -m venv /opt/venv \
|
| 18 |
+
&& apt-get clean && rm -rf /var/lib/apt/lists/* \
|
| 19 |
+
&& echo "source /opt/venv/bin/activate" >> /root/.bashrc
|
| 20 |
+
|
| 21 |
+
# Clone repository and install LeRobot in a single layer
|
| 22 |
+
COPY . /lerobot
|
| 23 |
+
WORKDIR /lerobot
|
| 24 |
+
RUN /opt/venv/bin/pip install --upgrade --no-cache-dir pip \
|
| 25 |
+
&& /opt/venv/bin/pip install --no-cache-dir ".[test, aloha, xarm, pusht, smolvla]" \
|
| 26 |
+
--extra-index-url https://download.pytorch.org/whl/cpu
|
| 27 |
+
|
| 28 |
+
# Execute in bash shell rather than python
|
| 29 |
+
CMD ["/bin/bash"]
|
lerobot/docker/lerobot-gpu-dev/Dockerfile
ADDED
|
@@ -0,0 +1,68 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
FROM nvidia/cuda:12.2.2-devel-ubuntu22.04
|
| 2 |
+
|
| 3 |
+
# Configure image
|
| 4 |
+
ARG PYTHON_VERSION=3.10
|
| 5 |
+
ARG DEBIAN_FRONTEND=noninteractive
|
| 6 |
+
|
| 7 |
+
# Install apt dependencies
|
| 8 |
+
RUN apt-get update && apt-get install -y --no-install-recommends \
|
| 9 |
+
build-essential cmake \
|
| 10 |
+
git git-lfs openssh-client \
|
| 11 |
+
nano vim less util-linux tree \
|
| 12 |
+
htop atop nvtop \
|
| 13 |
+
sed gawk grep curl wget zip unzip \
|
| 14 |
+
tcpdump sysstat screen tmux \
|
| 15 |
+
libglib2.0-0 libgl1-mesa-glx libegl1-mesa \
|
| 16 |
+
speech-dispatcher portaudio19-dev libgeos-dev \
|
| 17 |
+
python${PYTHON_VERSION} python${PYTHON_VERSION}-venv python${PYTHON_VERSION}-dev \
|
| 18 |
+
&& apt-get clean && rm -rf /var/lib/apt/lists/*
|
| 19 |
+
|
| 20 |
+
# Install ffmpeg build dependencies. See:
|
| 21 |
+
# https://trac.ffmpeg.org/wiki/CompilationGuide/Ubuntu
|
| 22 |
+
# TODO(aliberts): create image to build dependencies from source instead
|
| 23 |
+
RUN apt-get update && apt-get install -y --no-install-recommends \
|
| 24 |
+
autoconf automake yasm \
|
| 25 |
+
libass-dev \
|
| 26 |
+
libfreetype6-dev \
|
| 27 |
+
libgnutls28-dev \
|
| 28 |
+
libunistring-dev \
|
| 29 |
+
libmp3lame-dev \
|
| 30 |
+
libtool \
|
| 31 |
+
libvorbis-dev \
|
| 32 |
+
meson \
|
| 33 |
+
ninja-build \
|
| 34 |
+
pkg-config \
|
| 35 |
+
texinfo \
|
| 36 |
+
yasm \
|
| 37 |
+
zlib1g-dev \
|
| 38 |
+
nasm \
|
| 39 |
+
libx264-dev \
|
| 40 |
+
libx265-dev libnuma-dev \
|
| 41 |
+
libvpx-dev \
|
| 42 |
+
libfdk-aac-dev \
|
| 43 |
+
libopus-dev \
|
| 44 |
+
libsvtav1-dev libsvtav1enc-dev libsvtav1dec-dev \
|
| 45 |
+
libdav1d-dev
|
| 46 |
+
|
| 47 |
+
# Install gh cli tool
|
| 48 |
+
RUN (type -p wget >/dev/null || (apt update && apt-get install wget -y)) \
|
| 49 |
+
&& mkdir -p -m 755 /etc/apt/keyrings \
|
| 50 |
+
&& wget -qO- https://cli.github.com/packages/githubcli-archive-keyring.gpg | tee /etc/apt/keyrings/githubcli-archive-keyring.gpg > /dev/null \
|
| 51 |
+
&& chmod go+r /etc/apt/keyrings/githubcli-archive-keyring.gpg \
|
| 52 |
+
&& echo "deb [arch=$(dpkg --print-architecture) signed-by=/etc/apt/keyrings/githubcli-archive-keyring.gpg] https://cli.github.com/packages stable main" | tee /etc/apt/sources.list.d/github-cli.list > /dev/null \
|
| 53 |
+
&& apt update \
|
| 54 |
+
&& apt install gh -y \
|
| 55 |
+
&& apt clean && rm -rf /var/lib/apt/lists/*
|
| 56 |
+
|
| 57 |
+
# Setup `python`
|
| 58 |
+
RUN ln -s /usr/bin/python3 /usr/bin/python
|
| 59 |
+
|
| 60 |
+
# Install poetry
|
| 61 |
+
RUN curl -sSL https://install.python-poetry.org | python -
|
| 62 |
+
ENV PATH="/root/.local/bin:$PATH"
|
| 63 |
+
RUN echo 'if [ "$HOME" != "/root" ]; then ln -sf /root/.local/bin/poetry $HOME/.local/bin/poetry; fi' >> /root/.bashrc
|
| 64 |
+
RUN poetry config virtualenvs.create false
|
| 65 |
+
RUN poetry config virtualenvs.in-project true
|
| 66 |
+
|
| 67 |
+
# Set EGL as the rendering backend for MuJoCo
|
| 68 |
+
ENV MUJOCO_GL="egl"
|
lerobot/docker/lerobot-gpu/Dockerfile
ADDED
|
@@ -0,0 +1,24 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
FROM nvidia/cuda:12.4.1-base-ubuntu22.04
|
| 2 |
+
|
| 3 |
+
# Configure environment variables
|
| 4 |
+
ARG PYTHON_VERSION=3.10
|
| 5 |
+
ENV DEBIAN_FRONTEND=noninteractive
|
| 6 |
+
ENV MUJOCO_GL="egl"
|
| 7 |
+
ENV PATH="/opt/venv/bin:$PATH"
|
| 8 |
+
|
| 9 |
+
# Install dependencies and set up Python in a single layer
|
| 10 |
+
RUN apt-get update && apt-get install -y --no-install-recommends \
|
| 11 |
+
build-essential cmake git \
|
| 12 |
+
libglib2.0-0 libgl1-mesa-glx libegl1-mesa ffmpeg \
|
| 13 |
+
speech-dispatcher libgeos-dev \
|
| 14 |
+
python${PYTHON_VERSION}-dev python${PYTHON_VERSION}-venv \
|
| 15 |
+
&& ln -s /usr/bin/python${PYTHON_VERSION} /usr/bin/python \
|
| 16 |
+
&& python -m venv /opt/venv \
|
| 17 |
+
&& apt-get clean && rm -rf /var/lib/apt/lists/* \
|
| 18 |
+
&& echo "source /opt/venv/bin/activate" >> /root/.bashrc
|
| 19 |
+
|
| 20 |
+
# Clone repository and install LeRobot in a single layer
|
| 21 |
+
COPY . /lerobot
|
| 22 |
+
WORKDIR /lerobot
|
| 23 |
+
RUN /opt/venv/bin/pip install --upgrade --no-cache-dir pip \
|
| 24 |
+
&& /opt/venv/bin/pip install --no-cache-dir ".[test, aloha, xarm, pusht, dynamixel, smolvla]"
|
lerobot/docs/README.md
ADDED
|
@@ -0,0 +1,137 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
<!---
|
| 2 |
+
Copyright 2020 The HuggingFace Team. All rights reserved.
|
| 3 |
+
|
| 4 |
+
Licensed under the Apache License, Version 2.0 (the "License");
|
| 5 |
+
you may not use this file except in compliance with the License.
|
| 6 |
+
You may obtain a copy of the License at
|
| 7 |
+
|
| 8 |
+
http://www.apache.org/licenses/LICENSE-2.0
|
| 9 |
+
|
| 10 |
+
Unless required by applicable law or agreed to in writing, software
|
| 11 |
+
distributed under the License is distributed on an "AS IS" BASIS,
|
| 12 |
+
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
| 13 |
+
See the License for the specific language governing permissions and
|
| 14 |
+
limitations under the License.
|
| 15 |
+
-->
|
| 16 |
+
|
| 17 |
+
# Generating the documentation
|
| 18 |
+
|
| 19 |
+
To generate the documentation, you first have to build it. Several packages are necessary to build the doc,
|
| 20 |
+
you can install them with the following command, at the root of the code repository:
|
| 21 |
+
|
| 22 |
+
```bash
|
| 23 |
+
pip install -e ".[docs]"
|
| 24 |
+
```
|
| 25 |
+
|
| 26 |
+
You will also need `nodejs`. Please refer to their [installation page](https://nodejs.org/en/download)
|
| 27 |
+
|
| 28 |
+
---
|
| 29 |
+
**NOTE**
|
| 30 |
+
|
| 31 |
+
You only need to generate the documentation to inspect it locally (if you're planning changes and want to
|
| 32 |
+
check how they look before committing for instance). You don't have to `git commit` the built documentation.
|
| 33 |
+
|
| 34 |
+
---
|
| 35 |
+
|
| 36 |
+
## Building the documentation
|
| 37 |
+
|
| 38 |
+
Once you have setup the `doc-builder` and additional packages, you can generate the documentation by
|
| 39 |
+
typing the following command:
|
| 40 |
+
|
| 41 |
+
```bash
|
| 42 |
+
doc-builder build lerobot docs/source/ --build_dir ~/tmp/test-build
|
| 43 |
+
```
|
| 44 |
+
|
| 45 |
+
You can adapt the `--build_dir` to set any temporary folder that you prefer. This command will create it and generate
|
| 46 |
+
the MDX files that will be rendered as the documentation on the main website. You can inspect them in your favorite
|
| 47 |
+
Markdown editor.
|
| 48 |
+
|
| 49 |
+
## Previewing the documentation
|
| 50 |
+
|
| 51 |
+
To preview the docs, first install the `watchdog` module with:
|
| 52 |
+
|
| 53 |
+
```bash
|
| 54 |
+
pip install watchdog
|
| 55 |
+
```
|
| 56 |
+
|
| 57 |
+
Then run the following command:
|
| 58 |
+
|
| 59 |
+
```bash
|
| 60 |
+
doc-builder preview lerobot docs/source/
|
| 61 |
+
```
|
| 62 |
+
|
| 63 |
+
The docs will be viewable at [http://localhost:3000](http://localhost:3000). You can also preview the docs once you have opened a PR. You will see a bot add a comment to a link where the documentation with your changes lives.
|
| 64 |
+
|
| 65 |
+
---
|
| 66 |
+
**NOTE**
|
| 67 |
+
|
| 68 |
+
The `preview` command only works with existing doc files. When you add a completely new file, you need to update `_toctree.yml` & restart `preview` command (`ctrl-c` to stop it & call `doc-builder preview ...` again).
|
| 69 |
+
|
| 70 |
+
---
|
| 71 |
+
|
| 72 |
+
## Adding a new element to the navigation bar
|
| 73 |
+
|
| 74 |
+
Accepted files are Markdown (.md).
|
| 75 |
+
|
| 76 |
+
Create a file with its extension and put it in the source directory. You can then link it to the toc-tree by putting
|
| 77 |
+
the filename without the extension in the [`_toctree.yml`](https://github.com/huggingface/lerobot/blob/main/docs/source/_toctree.yml) file.
|
| 78 |
+
|
| 79 |
+
## Renaming section headers and moving sections
|
| 80 |
+
|
| 81 |
+
It helps to keep the old links working when renaming the section header and/or moving sections from one document to another. This is because the old links are likely to be used in Issues, Forums, and Social media and it'd make for a much more superior user experience if users reading those months later could still easily navigate to the originally intended information.
|
| 82 |
+
|
| 83 |
+
Therefore, we simply keep a little map of moved sections at the end of the document where the original section was. The key is to preserve the original anchor.
|
| 84 |
+
|
| 85 |
+
So if you renamed a section from: "Section A" to "Section B", then you can add at the end of the file:
|
| 86 |
+
|
| 87 |
+
```
|
| 88 |
+
Sections that were moved:
|
| 89 |
+
|
| 90 |
+
[ <a href="#section-b">Section A</a><a id="section-a"></a> ]
|
| 91 |
+
```
|
| 92 |
+
and of course, if you moved it to another file, then:
|
| 93 |
+
|
| 94 |
+
```
|
| 95 |
+
Sections that were moved:
|
| 96 |
+
|
| 97 |
+
[ <a href="../new-file#section-b">Section A</a><a id="section-a"></a> ]
|
| 98 |
+
```
|
| 99 |
+
|
| 100 |
+
Use the relative style to link to the new file so that the versioned docs continue to work.
|
| 101 |
+
|
| 102 |
+
For an example of a rich moved sections set please see the very end of [the transformers Trainer doc](https://github.com/huggingface/transformers/blob/main/docs/source/en/main_classes/trainer.md).
|
| 103 |
+
|
| 104 |
+
### Adding a new tutorial
|
| 105 |
+
|
| 106 |
+
Adding a new tutorial or section is done in two steps:
|
| 107 |
+
|
| 108 |
+
- Add a new file under `./source`. This file can either be ReStructuredText (.rst) or Markdown (.md).
|
| 109 |
+
- Link that file in `./source/_toctree.yml` on the correct toc-tree.
|
| 110 |
+
|
| 111 |
+
Make sure to put your new file under the proper section. If you have a doubt, feel free to ask in a Github Issue or PR.
|
| 112 |
+
|
| 113 |
+
### Writing source documentation
|
| 114 |
+
|
| 115 |
+
Values that should be put in `code` should either be surrounded by backticks: \`like so\`. Note that argument names
|
| 116 |
+
and objects like True, None or any strings should usually be put in `code`.
|
| 117 |
+
|
| 118 |
+
#### Writing a multi-line code block
|
| 119 |
+
|
| 120 |
+
Multi-line code blocks can be useful for displaying examples. They are done between two lines of three backticks as usual in Markdown:
|
| 121 |
+
|
| 122 |
+
|
| 123 |
+
````
|
| 124 |
+
```
|
| 125 |
+
# first line of code
|
| 126 |
+
# second line
|
| 127 |
+
# etc
|
| 128 |
+
```
|
| 129 |
+
````
|
| 130 |
+
|
| 131 |
+
#### Adding an image
|
| 132 |
+
|
| 133 |
+
Due to the rapidly growing repository, it is important to make sure that no files that would significantly weigh down the repository are added. This includes images, videos, and other non-text files. We prefer to leverage a hf.co hosted `dataset` like
|
| 134 |
+
the ones hosted on [`hf-internal-testing`](https://huggingface.co/hf-internal-testing) in which to place these files and reference
|
| 135 |
+
them by URL. We recommend putting them in the following dataset: [huggingface/documentation-images](https://huggingface.co/datasets/huggingface/documentation-images).
|
| 136 |
+
If an external contribution, feel free to add the images to your PR and ask a Hugging Face member to migrate your images
|
| 137 |
+
to this dataset.
|
lerobot/docs/source/_toctree.yml
ADDED
|
@@ -0,0 +1,44 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
- sections:
|
| 2 |
+
- local: index
|
| 3 |
+
title: LeRobot
|
| 4 |
+
- local: installation
|
| 5 |
+
title: Installation
|
| 6 |
+
title: Get started
|
| 7 |
+
- sections:
|
| 8 |
+
- local: il_robots
|
| 9 |
+
title: Imitation Learning for Robots
|
| 10 |
+
- local: il_sim
|
| 11 |
+
title: Imitation Learning in Sim
|
| 12 |
+
- local: cameras
|
| 13 |
+
title: Cameras
|
| 14 |
+
- local: integrate_hardware
|
| 15 |
+
title: Bring Your Own Hardware
|
| 16 |
+
- local: hilserl
|
| 17 |
+
title: Train a Robot with RL
|
| 18 |
+
- local: hilserl_sim
|
| 19 |
+
title: Train RL in Simulation
|
| 20 |
+
title: "Tutorials"
|
| 21 |
+
- sections:
|
| 22 |
+
- local: smolvla
|
| 23 |
+
title: Finetune SmolVLA
|
| 24 |
+
title: "Policies"
|
| 25 |
+
- sections:
|
| 26 |
+
- local: so101
|
| 27 |
+
title: SO-101
|
| 28 |
+
- local: so100
|
| 29 |
+
title: SO-100
|
| 30 |
+
- local: koch
|
| 31 |
+
title: Koch v1.1
|
| 32 |
+
- local: lekiwi
|
| 33 |
+
title: LeKiwi
|
| 34 |
+
title: "Robots"
|
| 35 |
+
- sections:
|
| 36 |
+
- local: notebooks
|
| 37 |
+
title: Notebooks
|
| 38 |
+
title: "Resources"
|
| 39 |
+
- sections:
|
| 40 |
+
- local: contributing
|
| 41 |
+
title: Contribute to LeRobot
|
| 42 |
+
- local: backwardcomp
|
| 43 |
+
title: Backward compatibility
|
| 44 |
+
title: "About"
|
lerobot/docs/source/backwardcomp.mdx
ADDED
|
@@ -0,0 +1,82 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Backward compatibility
|
| 2 |
+
|
| 3 |
+
## Hardware API redesign
|
| 4 |
+
|
| 5 |
+
PR [#777](https://github.com/huggingface/lerobot/pull/777) improves the LeRobot calibration but is **not backward-compatible**. Below is a overview of what changed and how you can continue to work with datasets created before this pull request.
|
| 6 |
+
|
| 7 |
+
### What changed?
|
| 8 |
+
|
| 9 |
+
| | Before PR #777 | After PR #777 |
|
| 10 |
+
| --------------------------------- | ------------------------------------------------- | --------------------------------------------------------------------------- |
|
| 11 |
+
| **Joint range** | Degrees `-180...180°` | **Normalised range** Joints: `–100...100` Gripper: `0...100` |
|
| 12 |
+
| **Zero position (SO100 / SO101)** | Arm fully extended horizontally | **In middle of the range for each joint** |
|
| 13 |
+
| **Boundary handling** | Software safeguards to detect ±180 ° wrap-arounds | No wrap-around logic needed due to mid-range zero |
|
| 14 |
+
|
| 15 |
+
---
|
| 16 |
+
|
| 17 |
+
### Impact on existing datasets
|
| 18 |
+
|
| 19 |
+
* Recorded trajectories created **before** PR #777 will replay incorrectly if loaded directly:
|
| 20 |
+
* Joint angles are offset and incorrectly normalized.
|
| 21 |
+
* Any models directly finetuned or trained on the old data will need their inputs and outputs converted.
|
| 22 |
+
|
| 23 |
+
### Using datasets made with the previous calibration system
|
| 24 |
+
We provide a migration example script for replaying an episode recorded with the previous calibration here: `examples/backward_compatibility/replay.py`.
|
| 25 |
+
Below we take you through the modifications that are done in the example script to make the previous calibration datasets work.
|
| 26 |
+
|
| 27 |
+
```diff
|
| 28 |
+
+ key = f"{name.removeprefix('main_')}.pos"
|
| 29 |
+
action[key] = action_array[i].item()
|
| 30 |
+
+ action["shoulder_lift.pos"] = -(action["shoulder_lift.pos"] - 90)
|
| 31 |
+
+ action["elbow_flex.pos"] -= 90
|
| 32 |
+
```
|
| 33 |
+
|
| 34 |
+
Let's break this down.
|
| 35 |
+
New codebase uses `.pos` suffix for the position observations and we have removed `main_` prefix:
|
| 36 |
+
```python
|
| 37 |
+
key = f"{name.removeprefix('main_')}.pos"
|
| 38 |
+
```
|
| 39 |
+
|
| 40 |
+
For `"shoulder_lift"` (id = 2), the 0 position is changed by -90 degrees and the direction is reversed compared to old calibration/code.
|
| 41 |
+
```python
|
| 42 |
+
action["shoulder_lift.pos"] = -(action["shoulder_lift.pos"] - 90)
|
| 43 |
+
```
|
| 44 |
+
For `"elbow_flex"` (id = 3), the 0 position is changed by -90 degrees compared to old calibration/code.
|
| 45 |
+
```python
|
| 46 |
+
action["elbow_flex.pos"] -= 90
|
| 47 |
+
```
|
| 48 |
+
|
| 49 |
+
To use degrees normalization we then set the `--robot.use_degrees` option to `true`.
|
| 50 |
+
```diff
|
| 51 |
+
python examples/backward_compatibility/replay.py \
|
| 52 |
+
--robot.type=so101_follower \
|
| 53 |
+
--robot.port=/dev/tty.usbmodem5A460814411 \
|
| 54 |
+
--robot.id=blue \
|
| 55 |
+
+ --robot.use_degrees=true \
|
| 56 |
+
--dataset.repo_id=my_dataset_id \
|
| 57 |
+
--dataset.episode=0
|
| 58 |
+
```
|
| 59 |
+
|
| 60 |
+
### Using policies trained with the previous calibration system
|
| 61 |
+
|
| 62 |
+
Policies output actions in the same format as the datasets (`torch.Tensors`). Therefore, the same transformations should be applied.
|
| 63 |
+
|
| 64 |
+
To find these transformations, we recommend to first try and and replay an episode of the dataset your policy was trained on using the section above.
|
| 65 |
+
Then, add these same transformations on your inference script (shown here in the `record.py` script):
|
| 66 |
+
```diff
|
| 67 |
+
action_values = predict_action(
|
| 68 |
+
observation_frame,
|
| 69 |
+
policy,
|
| 70 |
+
get_safe_torch_device(policy.config.device),
|
| 71 |
+
policy.config.use_amp,
|
| 72 |
+
task=single_task,
|
| 73 |
+
robot_type=robot.robot_type,
|
| 74 |
+
)
|
| 75 |
+
action = {key: action_values[i].item() for i, key in enumerate(robot.action_features)}
|
| 76 |
+
|
| 77 |
+
+ action["shoulder_lift.pos"] = -(action["shoulder_lift.pos"] - 90)
|
| 78 |
+
+ action["elbow_flex.pos"] -= 90
|
| 79 |
+
robot.send_action(action)
|
| 80 |
+
```
|
| 81 |
+
|
| 82 |
+
If you have questions or run into migration issues, feel free to ask them on [Discord](https://discord.gg/s3KuuzsPFb)
|
lerobot/docs/source/cameras.mdx
ADDED
|
@@ -0,0 +1,173 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Cameras
|
| 2 |
+
|
| 3 |
+
LeRobot offers multiple options for video capture, including phone cameras, built-in laptop cameras, external webcams, and Intel RealSense cameras. To efficiently record frames from most cameras, you can use either the `OpenCVCamera` or `RealSenseCamera` class. For additional compatibility details on the `OpenCVCamera` class, refer to the [Video I/O with OpenCV Overview](https://docs.opencv.org/4.x/d0/da7/videoio_overview.html).
|
| 4 |
+
|
| 5 |
+
### Finding your camera
|
| 6 |
+
|
| 7 |
+
To instantiate a camera, you need a camera identifier. This identifier might change if you reboot your computer or re-plug your camera, a behavior mostly dependant on your operating system.
|
| 8 |
+
|
| 9 |
+
To find the camera indices of the cameras plugged into your system, run the following script:
|
| 10 |
+
```bash
|
| 11 |
+
python -m lerobot.find_cameras opencv # or realsense for Intel Realsense cameras
|
| 12 |
+
```
|
| 13 |
+
|
| 14 |
+
The output will look something like this if you have two cameras connected:
|
| 15 |
+
```
|
| 16 |
+
--- Detected Cameras ---
|
| 17 |
+
Camera #0:
|
| 18 |
+
Name: OpenCV Camera @ 0
|
| 19 |
+
Type: OpenCV
|
| 20 |
+
Id: 0
|
| 21 |
+
Backend api: AVFOUNDATION
|
| 22 |
+
Default stream profile:
|
| 23 |
+
Format: 16.0
|
| 24 |
+
Width: 1920
|
| 25 |
+
Height: 1080
|
| 26 |
+
Fps: 15.0
|
| 27 |
+
--------------------
|
| 28 |
+
(more cameras ...)
|
| 29 |
+
```
|
| 30 |
+
|
| 31 |
+
> [!WARNING]
|
| 32 |
+
> When using Intel RealSense cameras in `macOS`, you could get this [error](https://github.com/IntelRealSense/librealsense/issues/12307): `Error finding RealSense cameras: failed to set power state`, this can be solved by running the same command with `sudo` permissions. Note that using RealSense cameras in `macOS` is unstable.
|
| 33 |
+
|
| 34 |
+
|
| 35 |
+
## Use Cameras
|
| 36 |
+
|
| 37 |
+
Below are two examples, demonstrating how to work with the API.
|
| 38 |
+
|
| 39 |
+
- **Asynchronous frame capture** using an OpenCV-based camera
|
| 40 |
+
- **Color and depth capture** using an Intel RealSense camera
|
| 41 |
+
|
| 42 |
+
|
| 43 |
+
<hfoptions id="shell_restart">
|
| 44 |
+
<hfoption id="Open CV Camera">
|
| 45 |
+
|
| 46 |
+
```python
|
| 47 |
+
from lerobot.cameras.opencv.configuration_opencv import OpenCVCameraConfig
|
| 48 |
+
from lerobot.cameras.opencv.camera_opencv import OpenCVCamera
|
| 49 |
+
from lerobot.cameras.configs import ColorMode, Cv2Rotation
|
| 50 |
+
|
| 51 |
+
# Construct an `OpenCVCameraConfig` with your desired FPS, resolution, color mode, and rotation.
|
| 52 |
+
config = OpenCVCameraConfig(
|
| 53 |
+
index_or_path=0,
|
| 54 |
+
fps=15,
|
| 55 |
+
width=1920,
|
| 56 |
+
height=1080,
|
| 57 |
+
color_mode=ColorMode.RGB,
|
| 58 |
+
rotation=Cv2Rotation.NO_ROTATION
|
| 59 |
+
)
|
| 60 |
+
|
| 61 |
+
# Instantiate and connect an `OpenCVCamera`, performing a warm-up read (default).
|
| 62 |
+
camera = OpenCVCamera(config)
|
| 63 |
+
camera.connect()
|
| 64 |
+
|
| 65 |
+
# Read frames asynchronously in a loop via `async_read(timeout_ms)`
|
| 66 |
+
try:
|
| 67 |
+
for i in range(10):
|
| 68 |
+
frame = camera.async_read(timeout_ms=200)
|
| 69 |
+
print(f"Async frame {i} shape:", frame.shape)
|
| 70 |
+
finally:
|
| 71 |
+
camera.disconnect()
|
| 72 |
+
```
|
| 73 |
+
|
| 74 |
+
</hfoption>
|
| 75 |
+
<hfoption id="Intel Realsense Camera">
|
| 76 |
+
|
| 77 |
+
```python
|
| 78 |
+
from lerobot.cameras.realsense.configuration_realsense import RealSenseCameraConfig
|
| 79 |
+
from lerobot.cameras.realsense.camera_realsense import RealSenseCamera
|
| 80 |
+
from lerobot.cameras.configs import ColorMode, Cv2Rotation
|
| 81 |
+
|
| 82 |
+
# Create a `RealSenseCameraConfig` specifying your camera’s serial number and enabling depth.
|
| 83 |
+
config = RealSenseCameraConfig(
|
| 84 |
+
serial_number_or_name="233522074606",
|
| 85 |
+
fps=15,
|
| 86 |
+
width=640,
|
| 87 |
+
height=480,
|
| 88 |
+
color_mode=ColorMode.RGB,
|
| 89 |
+
use_depth=True,
|
| 90 |
+
rotation=Cv2Rotation.NO_ROTATION
|
| 91 |
+
)
|
| 92 |
+
|
| 93 |
+
# Instantiate and connect a `RealSenseCamera` with warm-up read (default).
|
| 94 |
+
camera = RealSenseCamera(config)
|
| 95 |
+
camera.connect()
|
| 96 |
+
|
| 97 |
+
# Capture a color frame via `read()` and a depth map via `read_depth()`.
|
| 98 |
+
try:
|
| 99 |
+
color_frame = camera.read()
|
| 100 |
+
depth_map = camera.read_depth()
|
| 101 |
+
print("Color frame shape:", color_frame.shape)
|
| 102 |
+
print("Depth map shape:", depth_map.shape)
|
| 103 |
+
finally:
|
| 104 |
+
camera.disconnect()
|
| 105 |
+
```
|
| 106 |
+
</hfoption>
|
| 107 |
+
</hfoptions>
|
| 108 |
+
|
| 109 |
+
|
| 110 |
+
## Use your phone
|
| 111 |
+
<hfoptions id="use phone">
|
| 112 |
+
<hfoption id="Mac">
|
| 113 |
+
|
| 114 |
+
To use your iPhone as a camera on macOS, enable the Continuity Camera feature:
|
| 115 |
+
- Ensure your Mac is running macOS 13 or later, and your iPhone is on iOS 16 or later.
|
| 116 |
+
- Sign in both devices with the same Apple ID.
|
| 117 |
+
- Connect your devices with a USB cable or turn on Wi-Fi and Bluetooth for a wireless connection.
|
| 118 |
+
|
| 119 |
+
For more details, visit [Apple support](https://support.apple.com/en-gb/guide/mac-help/mchl77879b8a/mac).
|
| 120 |
+
|
| 121 |
+
Your iPhone should be detected automatically when running the camera setup script in the next section.
|
| 122 |
+
|
| 123 |
+
</hfoption>
|
| 124 |
+
<hfoption id="Linux">
|
| 125 |
+
|
| 126 |
+
If you want to use your phone as a camera on Linux, follow these steps to set up a virtual camera
|
| 127 |
+
|
| 128 |
+
1. *Install `v4l2loopback-dkms` and `v4l-utils`*. Those packages are required to create virtual camera devices (`v4l2loopback`) and verify their settings with the `v4l2-ctl` utility from `v4l-utils`. Install them using:
|
| 129 |
+
```python
|
| 130 |
+
sudo apt install v4l2loopback-dkms v4l-utils
|
| 131 |
+
```
|
| 132 |
+
2. *Install [DroidCam](https://droidcam.app) on your phone*. This app is available for both iOS and Android.
|
| 133 |
+
3. *Install [OBS Studio](https://obsproject.com)*. This software will help you manage the camera feed. Install it using [Flatpak](https://flatpak.org):
|
| 134 |
+
```python
|
| 135 |
+
flatpak install flathub com.obsproject.Studio
|
| 136 |
+
```
|
| 137 |
+
4. *Install the DroidCam OBS plugin*. This plugin integrates DroidCam with OBS Studio. Install it with:
|
| 138 |
+
```python
|
| 139 |
+
flatpak install flathub com.obsproject.Studio.Plugin.DroidCam
|
| 140 |
+
```
|
| 141 |
+
5. *Start OBS Studio*. Launch with:
|
| 142 |
+
```python
|
| 143 |
+
flatpak run com.obsproject.Studio
|
| 144 |
+
```
|
| 145 |
+
6. *Add your phone as a source*. Follow the instructions [here](https://droidcam.app/obs/usage). Be sure to set the resolution to `640x480`.
|
| 146 |
+
7. *Adjust resolution settings*. In OBS Studio, go to `File > Settings > Video`. Change the `Base(Canvas) Resolution` and the `Output(Scaled) Resolution` to `640x480` by manually typing it in.
|
| 147 |
+
8. *Start virtual camera*. In OBS Studio, follow the instructions [here](https://obsproject.com/kb/virtual-camera-guide).
|
| 148 |
+
9. *Verify the virtual camera setup*. Use `v4l2-ctl` to list the devices:
|
| 149 |
+
```python
|
| 150 |
+
v4l2-ctl --list-devices
|
| 151 |
+
```
|
| 152 |
+
You should see an entry like:
|
| 153 |
+
```
|
| 154 |
+
VirtualCam (platform:v4l2loopback-000):
|
| 155 |
+
/dev/video1
|
| 156 |
+
```
|
| 157 |
+
10. *Check the camera resolution*. Use `v4l2-ctl` to ensure that the virtual camera output resolution is `640x480`. Change `/dev/video1` to the port of your virtual camera from the output of `v4l2-ctl --list-devices`.
|
| 158 |
+
```python
|
| 159 |
+
v4l2-ctl -d /dev/video1 --get-fmt-video
|
| 160 |
+
```
|
| 161 |
+
You should see an entry like:
|
| 162 |
+
```
|
| 163 |
+
>>> Format Video Capture:
|
| 164 |
+
>>> Width/Height : 640/480
|
| 165 |
+
>>> Pixel Format : 'YUYV' (YUYV 4:2:2)
|
| 166 |
+
```
|
| 167 |
+
|
| 168 |
+
Troubleshooting: If the resolution is not correct you will have to delete the Virtual Camera port and try again as it cannot be changed.
|
| 169 |
+
|
| 170 |
+
If everything is set up correctly, you can proceed with the rest of the tutorial.
|
| 171 |
+
|
| 172 |
+
</hfoption>
|
| 173 |
+
</hfoptions>
|
lerobot/docs/source/contributing.md
ADDED
|
@@ -0,0 +1,305 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# How to contribute to 🤗 LeRobot?
|
| 2 |
+
|
| 3 |
+
Everyone is welcome to contribute, and we value everybody's contribution. Code
|
| 4 |
+
is thus not the only way to help the community. Answering questions, helping
|
| 5 |
+
others, reaching out and improving the documentations are immensely valuable to
|
| 6 |
+
the community.
|
| 7 |
+
|
| 8 |
+
It also helps us if you spread the word: reference the library from blog posts
|
| 9 |
+
on the awesome projects it made possible, shout out on Twitter when it has
|
| 10 |
+
helped you, or simply ⭐️ the repo to say "thank you".
|
| 11 |
+
|
| 12 |
+
Whichever way you choose to contribute, please be mindful to respect our
|
| 13 |
+
[code of conduct](https://github.com/huggingface/lerobot/blob/main/CODE_OF_CONDUCT.md).
|
| 14 |
+
|
| 15 |
+
## You can contribute in so many ways!
|
| 16 |
+
|
| 17 |
+
Some of the ways you can contribute to 🤗 LeRobot:
|
| 18 |
+
* Fixing outstanding issues with the existing code.
|
| 19 |
+
* Implementing new models, datasets or simulation environments.
|
| 20 |
+
* Contributing to the examples or to the documentation.
|
| 21 |
+
* Submitting issues related to bugs or desired new features.
|
| 22 |
+
|
| 23 |
+
Following the guides below, feel free to open issues and PRs and to coordinate your efforts with the community on our [Discord Channel](https://discord.gg/VjFz58wn3R). For specific inquiries, reach out to [Remi Cadene](mailto:remi.cadene@huggingface.co).
|
| 24 |
+
|
| 25 |
+
If you are not sure how to contribute or want to know the next features we working on, look on this project page: [LeRobot TODO](https://github.com/orgs/huggingface/projects/46)
|
| 26 |
+
|
| 27 |
+
## Submitting a new issue or feature request
|
| 28 |
+
|
| 29 |
+
Do your best to follow these guidelines when submitting an issue or a feature
|
| 30 |
+
request. It will make it easier for us to come back to you quickly and with good
|
| 31 |
+
feedback.
|
| 32 |
+
|
| 33 |
+
### Did you find a bug?
|
| 34 |
+
|
| 35 |
+
The 🤗 LeRobot library is robust and reliable thanks to the users who notify us of
|
| 36 |
+
the problems they encounter. So thank you for reporting an issue.
|
| 37 |
+
|
| 38 |
+
First, we would really appreciate it if you could **make sure the bug was not
|
| 39 |
+
already reported** (use the search bar on Github under Issues).
|
| 40 |
+
|
| 41 |
+
Did not find it? :( So we can act quickly on it, please follow these steps:
|
| 42 |
+
|
| 43 |
+
* Include your **OS type and version**, the versions of **Python** and **PyTorch**.
|
| 44 |
+
* A short, self-contained, code snippet that allows us to reproduce the bug in
|
| 45 |
+
less than 30s.
|
| 46 |
+
* The full traceback if an exception is raised.
|
| 47 |
+
* Attach any other additional information, like screenshots, you think may help.
|
| 48 |
+
|
| 49 |
+
### Do you want a new feature?
|
| 50 |
+
|
| 51 |
+
A good feature request addresses the following points:
|
| 52 |
+
|
| 53 |
+
1. Motivation first:
|
| 54 |
+
* Is it related to a problem/frustration with the library? If so, please explain
|
| 55 |
+
why. Providing a code snippet that demonstrates the problem is best.
|
| 56 |
+
* Is it related to something you would need for a project? We'd love to hear
|
| 57 |
+
about it!
|
| 58 |
+
* Is it something you worked on and think could benefit the community?
|
| 59 |
+
Awesome! Tell us what problem it solved for you.
|
| 60 |
+
2. Write a *paragraph* describing the feature.
|
| 61 |
+
3. Provide a **code snippet** that demonstrates its future use.
|
| 62 |
+
4. In case this is related to a paper, please attach a link.
|
| 63 |
+
5. Attach any additional information (drawings, screenshots, etc.) you think may help.
|
| 64 |
+
|
| 65 |
+
If your issue is well written we're already 80% of the way there by the time you
|
| 66 |
+
post it.
|
| 67 |
+
|
| 68 |
+
## Adding new policies, datasets or environments
|
| 69 |
+
|
| 70 |
+
Look at our implementations for [datasets](./src/lerobot/datasets/), [policies](./src/lerobot/policies/),
|
| 71 |
+
environments ([aloha](https://github.com/huggingface/gym-aloha),
|
| 72 |
+
[xarm](https://github.com/huggingface/gym-xarm),
|
| 73 |
+
[pusht](https://github.com/huggingface/gym-pusht))
|
| 74 |
+
and follow the same api design.
|
| 75 |
+
|
| 76 |
+
When implementing a new dataset loadable with LeRobotDataset follow these steps:
|
| 77 |
+
- Update `available_datasets_per_env` in `lerobot/__init__.py`
|
| 78 |
+
|
| 79 |
+
When implementing a new environment (e.g. `gym_aloha`), follow these steps:
|
| 80 |
+
- Update `available_tasks_per_env` and `available_datasets_per_env` in `lerobot/__init__.py`
|
| 81 |
+
|
| 82 |
+
When implementing a new policy class (e.g. `DiffusionPolicy`) follow these steps:
|
| 83 |
+
- Update `available_policies` and `available_policies_per_env`, in `lerobot/__init__.py`
|
| 84 |
+
- Set the required `name` class attribute.
|
| 85 |
+
- Update variables in `tests/test_available.py` by importing your new Policy class
|
| 86 |
+
|
| 87 |
+
## Submitting a pull request (PR)
|
| 88 |
+
|
| 89 |
+
Before writing code, we strongly advise you to search through the existing PRs or
|
| 90 |
+
issues to make sure that nobody is already working on the same thing. If you are
|
| 91 |
+
unsure, it is always a good idea to open an issue to get some feedback.
|
| 92 |
+
|
| 93 |
+
You will need basic `git` proficiency to be able to contribute to
|
| 94 |
+
🤗 LeRobot. `git` is not the easiest tool to use but it has the greatest
|
| 95 |
+
manual. Type `git --help` in a shell and enjoy. If you prefer books, [Pro
|
| 96 |
+
Git](https://git-scm.com/book/en/v2) is a very good reference.
|
| 97 |
+
|
| 98 |
+
Follow these steps to start contributing:
|
| 99 |
+
|
| 100 |
+
1. Fork the [repository](https://github.com/huggingface/lerobot) by
|
| 101 |
+
clicking on the 'Fork' button on the repository's page. This creates a copy of the code
|
| 102 |
+
under your GitHub user account.
|
| 103 |
+
|
| 104 |
+
2. Clone your fork to your local disk, and add the base repository as a remote. The following command
|
| 105 |
+
assumes you have your public SSH key uploaded to GitHub. See the following guide for more
|
| 106 |
+
[information](https://docs.github.com/en/repositories/creating-and-managing-repositories/cloning-a-repository).
|
| 107 |
+
|
| 108 |
+
```bash
|
| 109 |
+
git clone git@github.com:<your Github handle>/lerobot.git
|
| 110 |
+
cd lerobot
|
| 111 |
+
git remote add upstream https://github.com/huggingface/lerobot.git
|
| 112 |
+
```
|
| 113 |
+
|
| 114 |
+
3. Create a new branch to hold your development changes, and do this for every new PR you work on.
|
| 115 |
+
|
| 116 |
+
Start by synchronizing your `main` branch with the `upstream/main` branch (more details in the [GitHub Docs](https://docs.github.com/en/github/collaborating-with-issues-and-pull-requests/syncing-a-fork)):
|
| 117 |
+
|
| 118 |
+
```bash
|
| 119 |
+
git checkout main
|
| 120 |
+
git fetch upstream
|
| 121 |
+
git rebase upstream/main
|
| 122 |
+
```
|
| 123 |
+
|
| 124 |
+
Once your `main` branch is synchronized, create a new branch from it:
|
| 125 |
+
|
| 126 |
+
```bash
|
| 127 |
+
git checkout -b a-descriptive-name-for-my-changes
|
| 128 |
+
```
|
| 129 |
+
|
| 130 |
+
🚨 **Do not** work on the `main` branch.
|
| 131 |
+
|
| 132 |
+
4. for development, we advise to use a tool like `poetry` or `uv` instead of just `pip` to easily track our dependencies.
|
| 133 |
+
Follow the instructions to [install poetry](https://python-poetry.org/docs/#installation) (use a version >=2.1.0) or to [install uv](https://docs.astral.sh/uv/getting-started/installation/#installation-methods) if you don't have one of them already.
|
| 134 |
+
|
| 135 |
+
Set up a development environment with conda or miniconda:
|
| 136 |
+
```bash
|
| 137 |
+
conda create -y -n lerobot-dev python=3.10 && conda activate lerobot-dev
|
| 138 |
+
```
|
| 139 |
+
|
| 140 |
+
If you're using `uv`, it can manage python versions so you can instead do:
|
| 141 |
+
```bash
|
| 142 |
+
uv venv --python 3.10 && source .venv/bin/activate
|
| 143 |
+
```
|
| 144 |
+
|
| 145 |
+
To develop on 🤗 LeRobot, you will at least need to install the `dev` and `test` extras dependencies along with the core library:
|
| 146 |
+
|
| 147 |
+
using `poetry`
|
| 148 |
+
```bash
|
| 149 |
+
poetry sync --extras "dev test"
|
| 150 |
+
```
|
| 151 |
+
|
| 152 |
+
using `uv`
|
| 153 |
+
```bash
|
| 154 |
+
uv sync --extra dev --extra test
|
| 155 |
+
```
|
| 156 |
+
|
| 157 |
+
You can also install the project with all its dependencies (including environments):
|
| 158 |
+
|
| 159 |
+
using `poetry`
|
| 160 |
+
```bash
|
| 161 |
+
poetry sync --all-extras
|
| 162 |
+
```
|
| 163 |
+
|
| 164 |
+
using `uv`
|
| 165 |
+
```bash
|
| 166 |
+
uv sync --all-extras
|
| 167 |
+
```
|
| 168 |
+
|
| 169 |
+
> **Note:** If you don't install simulation environments with `--all-extras`, the tests that require them will be skipped when running the pytest suite locally. However, they *will* be tested in the CI. In general, we advise you to install everything and test locally before pushing.
|
| 170 |
+
|
| 171 |
+
Whichever command you chose to install the project (e.g. `poetry sync --all-extras`), you should run it again when pulling code with an updated version of `pyproject.toml` and `poetry.lock` in order to synchronize your virtual environment with the new dependencies.
|
| 172 |
+
|
| 173 |
+
The equivalent of `pip install some-package`, would just be:
|
| 174 |
+
|
| 175 |
+
using `poetry`
|
| 176 |
+
```bash
|
| 177 |
+
poetry add some-package
|
| 178 |
+
```
|
| 179 |
+
|
| 180 |
+
using `uv`
|
| 181 |
+
```bash
|
| 182 |
+
uv add some-package
|
| 183 |
+
```
|
| 184 |
+
|
| 185 |
+
When making changes to the poetry sections of the `pyproject.toml`, you should run the following command to lock dependencies.
|
| 186 |
+
using `poetry`
|
| 187 |
+
```bash
|
| 188 |
+
poetry lock
|
| 189 |
+
```
|
| 190 |
+
|
| 191 |
+
using `uv`
|
| 192 |
+
```bash
|
| 193 |
+
uv lock
|
| 194 |
+
```
|
| 195 |
+
|
| 196 |
+
|
| 197 |
+
5. Develop the features on your branch.
|
| 198 |
+
|
| 199 |
+
As you work on the features, you should make sure that the test suite
|
| 200 |
+
passes. You should run the tests impacted by your changes like this (see
|
| 201 |
+
below an explanation regarding the environment variable):
|
| 202 |
+
|
| 203 |
+
```bash
|
| 204 |
+
pytest tests/<TEST_TO_RUN>.py
|
| 205 |
+
```
|
| 206 |
+
|
| 207 |
+
6. Follow our style.
|
| 208 |
+
|
| 209 |
+
`lerobot` relies on `ruff` to format its source code
|
| 210 |
+
consistently. Set up [`pre-commit`](https://pre-commit.com/) to run these checks
|
| 211 |
+
automatically as Git commit hooks.
|
| 212 |
+
|
| 213 |
+
Install `pre-commit` hooks:
|
| 214 |
+
```bash
|
| 215 |
+
pre-commit install
|
| 216 |
+
```
|
| 217 |
+
|
| 218 |
+
You can run these hooks whenever you need on staged files with:
|
| 219 |
+
```bash
|
| 220 |
+
pre-commit
|
| 221 |
+
```
|
| 222 |
+
|
| 223 |
+
Once you're happy with your changes, add changed files using `git add` and
|
| 224 |
+
make a commit with `git commit` to record your changes locally:
|
| 225 |
+
|
| 226 |
+
```bash
|
| 227 |
+
git add modified_file.py
|
| 228 |
+
git commit
|
| 229 |
+
```
|
| 230 |
+
|
| 231 |
+
Note, if you already committed some changes that have a wrong formatting, you can use:
|
| 232 |
+
```bash
|
| 233 |
+
pre-commit run --all-files
|
| 234 |
+
```
|
| 235 |
+
|
| 236 |
+
Please write [good commit messages](https://chris.beams.io/posts/git-commit/).
|
| 237 |
+
|
| 238 |
+
It is a good idea to sync your copy of the code with the original
|
| 239 |
+
repository regularly. This way you can quickly account for changes:
|
| 240 |
+
|
| 241 |
+
```bash
|
| 242 |
+
git fetch upstream
|
| 243 |
+
git rebase upstream/main
|
| 244 |
+
```
|
| 245 |
+
|
| 246 |
+
Push the changes to your account using:
|
| 247 |
+
|
| 248 |
+
```bash
|
| 249 |
+
git push -u origin a-descriptive-name-for-my-changes
|
| 250 |
+
```
|
| 251 |
+
|
| 252 |
+
6. Once you are satisfied (**and the checklist below is happy too**), go to the
|
| 253 |
+
webpage of your fork on GitHub. Click on 'Pull request' to send your changes
|
| 254 |
+
to the project maintainers for review.
|
| 255 |
+
|
| 256 |
+
7. It's ok if maintainers ask you for changes. It happens to core contributors
|
| 257 |
+
too! So everyone can see the changes in the Pull request, work in your local
|
| 258 |
+
branch and push the changes to your fork. They will automatically appear in
|
| 259 |
+
the pull request.
|
| 260 |
+
|
| 261 |
+
|
| 262 |
+
### Checklist
|
| 263 |
+
|
| 264 |
+
1. The title of your pull request should be a summary of its contribution;
|
| 265 |
+
2. If your pull request addresses an issue, please mention the issue number in
|
| 266 |
+
the pull request description to make sure they are linked (and people
|
| 267 |
+
consulting the issue know you are working on it);
|
| 268 |
+
3. To indicate a work in progress please prefix the title with `[WIP]`, or preferably mark
|
| 269 |
+
the PR as a draft PR. These are useful to avoid duplicated work, and to differentiate
|
| 270 |
+
it from PRs ready to be merged;
|
| 271 |
+
4. Make sure existing tests pass;
|
| 272 |
+
|
| 273 |
+
### Tests
|
| 274 |
+
|
| 275 |
+
An extensive test suite is included to test the library behavior and several examples. Library tests can be found in the [tests folder](https://github.com/huggingface/lerobot/tree/main/tests).
|
| 276 |
+
|
| 277 |
+
Install [git lfs](https://git-lfs.com/) to retrieve test artifacts (if you don't have it already).
|
| 278 |
+
|
| 279 |
+
On Mac:
|
| 280 |
+
```bash
|
| 281 |
+
brew install git-lfs
|
| 282 |
+
git lfs install
|
| 283 |
+
```
|
| 284 |
+
|
| 285 |
+
On Ubuntu:
|
| 286 |
+
```bash
|
| 287 |
+
sudo apt-get install git-lfs
|
| 288 |
+
git lfs install
|
| 289 |
+
```
|
| 290 |
+
|
| 291 |
+
Pull artifacts if they're not in [tests/artifacts](tests/artifacts)
|
| 292 |
+
```bash
|
| 293 |
+
git lfs pull
|
| 294 |
+
```
|
| 295 |
+
|
| 296 |
+
We use `pytest` in order to run the tests. From the root of the
|
| 297 |
+
repository, here's how to run tests with `pytest` for the library:
|
| 298 |
+
|
| 299 |
+
```bash
|
| 300 |
+
python -m pytest -sv ./tests
|
| 301 |
+
```
|
| 302 |
+
|
| 303 |
+
|
| 304 |
+
You can specify a smaller set of tests in order to test only the feature
|
| 305 |
+
you're working on.
|
lerobot/docs/source/hilserl.mdx
ADDED
|
@@ -0,0 +1,548 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# HIL-SERL Real Robot Training Workflow Guide
|
| 2 |
+
|
| 3 |
+
In this tutorial you will go through the full Human-in-the-Loop Sample-Efficient Reinforcement Learning (HIL-SERL) workflow using LeRobot. You will master training a policy with RL on a real robot in just a few hours.
|
| 4 |
+
|
| 5 |
+
HIL-SERL is a sample-efficient reinforcement learning algorithm that combines human demonstrations with online learning and human interventions. The approach starts from a small set of human demonstrations, uses them to train a reward classifier, and then employs an actor-learner architecture where humans can intervene during policy execution to guide exploration and correct unsafe behaviors. In this tutorial, you'll use a gamepad to provide interventions and control the robot during the learning process.
|
| 6 |
+
|
| 7 |
+
It combines three key ingredients:
|
| 8 |
+
1. **Offline demonstrations & reward classifier:** a handful of human-teleop episodes plus a vision-based success detector give the policy a shaped starting point.
|
| 9 |
+
2. **On-robot actor / learner loop with human interventions:** a distributed Soft Actor Critic (SAC) learner updates the policy while an actor explores on the physical robot; the human can jump in at any time to correct dangerous or unproductive behaviour.
|
| 10 |
+
3. **Safety & efficiency tools:** joint/end-effector (EE) bounds, crop region of interest (ROI) preprocessing and WandB monitoring keep the data useful and the hardware safe.
|
| 11 |
+
|
| 12 |
+
Together these elements let HIL-SERL reach near-perfect task success and faster cycle times than imitation-only baselines.
|
| 13 |
+
|
| 14 |
+
<p align="center">
|
| 15 |
+
<img src="https://huggingface.co/datasets/huggingface/documentation-images/resolve/main/lerobot/hilserl-main-figure.png" alt="HIL-SERL workflow" title="HIL-SERL workflow" width="100%"></img>
|
| 16 |
+
</p>
|
| 17 |
+
|
| 18 |
+
<p align="center"><i>HIL-SERL workflow, Luo et al. 2024</i></p>
|
| 19 |
+
|
| 20 |
+
This guide provides step-by-step instructions for training a robot policy using LeRobot's HilSerl implementation to train on a real robot.
|
| 21 |
+
|
| 22 |
+
## What do I need?
|
| 23 |
+
|
| 24 |
+
- A gamepad (recommended) or keyboard to control the robot
|
| 25 |
+
- A Nvidia GPU
|
| 26 |
+
- A real robot with a follower and leader arm (optional if you use the keyboard or the gamepad)
|
| 27 |
+
- A URDF file for the robot for the kinematics package (check `lerobot/common/model/kinematics.py`)
|
| 28 |
+
|
| 29 |
+
## What kind of tasks can I train?
|
| 30 |
+
|
| 31 |
+
One can use HIL-SERL to train on a variety of manipulation tasks. Some recommendations:
|
| 32 |
+
- Start with a simple task to understand how the system works.
|
| 33 |
+
- Push cube to a goal region
|
| 34 |
+
- Pick and lift cube with the gripper
|
| 35 |
+
- Avoid extremely long horizon tasks. Focus on tasks that can be completed in 5-10 seconds.
|
| 36 |
+
- Once you have a good idea of how the system works, you can try more complex tasks and longer horizons.
|
| 37 |
+
- Pick and place cube
|
| 38 |
+
- Bimanual tasks to pick objects with two arms
|
| 39 |
+
- Hand-over tasks to transfer objects from one arm to another
|
| 40 |
+
- Go crazy!
|
| 41 |
+
|
| 42 |
+
## Install LeRobot with HIL-SERL
|
| 43 |
+
|
| 44 |
+
To install LeRobot with HIL-SERL, you need to install the `hilserl` extra.
|
| 45 |
+
|
| 46 |
+
```bash
|
| 47 |
+
pip install -e ".[hilserl]"
|
| 48 |
+
```
|
| 49 |
+
|
| 50 |
+
## Real Robot Training Workflow
|
| 51 |
+
|
| 52 |
+
### Understanding Configuration
|
| 53 |
+
|
| 54 |
+
The training process begins with proper configuration for the HILSerl environment. The configuration class of interest is `HILSerlRobotEnvConfig` in `lerobot/envs/configs.py`. Which is defined as:
|
| 55 |
+
|
| 56 |
+
```python
|
| 57 |
+
class HILSerlRobotEnvConfig(EnvConfig):
|
| 58 |
+
robot: RobotConfig | None = None # Main robot agent (defined in `lerobot/robots`)
|
| 59 |
+
teleop: TeleoperatorConfig | None = None # Teleoperator agent, e.g., gamepad or leader arm, (defined in `lerobot/teleoperators`)
|
| 60 |
+
wrapper: EnvTransformConfig | None = None # Environment wrapper settings; check `lerobot/scripts/server/gym_manipulator.py`
|
| 61 |
+
fps: int = 10 # Control frequency
|
| 62 |
+
name: str = "real_robot" # Environment name
|
| 63 |
+
mode: str = None # "record", "replay", or None (for training)
|
| 64 |
+
repo_id: str | None = None # LeRobot dataset repository ID
|
| 65 |
+
dataset_root: str | None = None # Local dataset root (optional)
|
| 66 |
+
task: str = "" # Task identifier
|
| 67 |
+
num_episodes: int = 10 # Number of episodes for recording
|
| 68 |
+
episode: int = 0 # episode index for replay
|
| 69 |
+
device: str = "cuda" # Compute device
|
| 70 |
+
push_to_hub: bool = True # Whether to push the recorded datasets to Hub
|
| 71 |
+
pretrained_policy_name_or_path: str | None = None # For policy loading
|
| 72 |
+
reward_classifier_pretrained_path: str | None = None # For reward model
|
| 73 |
+
number_of_steps_after_success: int = 0 # For reward classifier, collect more positive examples after a success to train a classifier
|
| 74 |
+
```
|
| 75 |
+
|
| 76 |
+
|
| 77 |
+
### Finding Robot Workspace Bounds
|
| 78 |
+
|
| 79 |
+
Before collecting demonstrations, you need to determine the appropriate operational bounds for your robot.
|
| 80 |
+
|
| 81 |
+
This helps simplify the problem of learning on the real robot in two ways: 1) by limiting the robot's operational space to a specific region that solves the task and avoids unnecessary or unsafe exploration, and 2) by allowing training in end-effector space rather than joint space. Empirically, learning in joint space for reinforcement learning in manipulation is often a harder problem - some tasks are nearly impossible to learn in joint space but become learnable when the action space is transformed to end-effector coordinates.
|
| 82 |
+
|
| 83 |
+
**Using find_joint_limits.py**
|
| 84 |
+
|
| 85 |
+
This script helps you find the safe operational bounds for your robot's end-effector. Given that you have a follower and leader arm, you can use the script to find the bounds for the follower arm that will be applied during training.
|
| 86 |
+
Bounding the action space will reduce the redundant exploration of the agent and guarantees safety.
|
| 87 |
+
|
| 88 |
+
```bash
|
| 89 |
+
python -m lerobot.scripts.find_joint_limits \
|
| 90 |
+
--robot.type=so100_follower \
|
| 91 |
+
--robot.port=/dev/tty.usbmodem58760431541 \
|
| 92 |
+
--robot.id=black \
|
| 93 |
+
--teleop.type=so100_leader \
|
| 94 |
+
--teleop.port=/dev/tty.usbmodem58760431551 \
|
| 95 |
+
--teleop.id=blue
|
| 96 |
+
```
|
| 97 |
+
|
| 98 |
+
**Workflow**
|
| 99 |
+
|
| 100 |
+
1. Run the script and move the robot through the space that solves the task
|
| 101 |
+
2. The script will record the minimum and maximum end-effector positions and the joint angles and prints them to the console, for example:
|
| 102 |
+
```
|
| 103 |
+
Max ee position [0.2417 0.2012 0.1027]
|
| 104 |
+
Min ee position [0.1663 -0.0823 0.0336]
|
| 105 |
+
Max joint positions [-20.0, -20.0, -20.0, -20.0, -20.0, -20.0]
|
| 106 |
+
Min joint positions [50.0, 50.0, 50.0, 50.0, 50.0, 50.0]
|
| 107 |
+
```
|
| 108 |
+
3. Use these values in the configuration of your teleoperation device (TeleoperatorConfig) under the `end_effector_bounds` field
|
| 109 |
+
|
| 110 |
+
**Example Configuration**
|
| 111 |
+
|
| 112 |
+
```json
|
| 113 |
+
"end_effector_bounds": {
|
| 114 |
+
"max": [0.24, 0.20, 0.10],
|
| 115 |
+
"min": [0.16, -0.08, 0.03]
|
| 116 |
+
}
|
| 117 |
+
```
|
| 118 |
+
|
| 119 |
+
### Collecting Demonstrations
|
| 120 |
+
|
| 121 |
+
With the bounds defined, you can safely collect demonstrations for training. Training RL with off-policy algorithm allows us to use offline datasets collected in order to improve the efficiency of the learning process.
|
| 122 |
+
|
| 123 |
+
**Setting Up Record Mode**
|
| 124 |
+
|
| 125 |
+
Create a configuration file for recording demonstrations (or edit an existing one like [env_config_so100.json](https://huggingface.co/datasets/aractingi/lerobot-example-config-files/blob/main/env_config_so100.json)):
|
| 126 |
+
|
| 127 |
+
1. Set `mode` to `"record"`
|
| 128 |
+
2. Specify a unique `repo_id` for your dataset (e.g., "username/task_name")
|
| 129 |
+
3. Set `num_episodes` to the number of demonstrations you want to collect
|
| 130 |
+
4. Set `crop_params_dict` to `null` initially (we'll determine crops later)
|
| 131 |
+
5. Configure `robot`, `cameras`, and other hardware settings
|
| 132 |
+
|
| 133 |
+
Example configuration section:
|
| 134 |
+
```json
|
| 135 |
+
"mode": "record",
|
| 136 |
+
"repo_id": "username/pick_lift_cube",
|
| 137 |
+
"dataset_root": null,
|
| 138 |
+
"task": "pick_and_lift",
|
| 139 |
+
"num_episodes": 15,
|
| 140 |
+
"episode": 0,
|
| 141 |
+
"push_to_hub": true
|
| 142 |
+
```
|
| 143 |
+
|
| 144 |
+
### Using a Teleoperation Device
|
| 145 |
+
|
| 146 |
+
Along with your robot, you will need a teleoperation device to control it in order to collect datasets of your task and perform interventions during the online training.
|
| 147 |
+
We support using a gamepad or a keyboard or the leader arm of the robot.
|
| 148 |
+
|
| 149 |
+
HIL-Serl learns actions in the end-effector space of the robot. Therefore, the teleoperation will control the end-effector's x,y,z displacements.
|
| 150 |
+
|
| 151 |
+
For that we need to define a version of the robot that takes actions in the end-effector space. Check the robot class `SO100FollowerEndEffector` and its configuration `SO100FollowerEndEffectorConfig` for the default parameters related to the end-effector space.
|
| 152 |
+
|
| 153 |
+
```python
|
| 154 |
+
class SO100FollowerEndEffectorConfig(SO100FollowerConfig):
|
| 155 |
+
"""Configuration for the SO100FollowerEndEffector robot."""
|
| 156 |
+
|
| 157 |
+
# Default bounds for the end-effector position (in meters)
|
| 158 |
+
end_effector_bounds: dict[str, list[float]] = field( # bounds for the end-effector in x,y,z direction
|
| 159 |
+
default_factory=lambda: {
|
| 160 |
+
"min": [-1.0, -1.0, -1.0], # min x, y, z
|
| 161 |
+
"max": [1.0, 1.0, 1.0], # max x, y, z
|
| 162 |
+
}
|
| 163 |
+
)
|
| 164 |
+
|
| 165 |
+
max_gripper_pos: float = 50 # maximum gripper position that the gripper will be open at
|
| 166 |
+
|
| 167 |
+
end_effector_step_sizes: dict[str, float] = field( # maximum step size for the end-effector in x,y,z direction
|
| 168 |
+
default_factory=lambda: {
|
| 169 |
+
"x": 0.02,
|
| 170 |
+
"y": 0.02,
|
| 171 |
+
"z": 0.02,
|
| 172 |
+
}
|
| 173 |
+
)
|
| 174 |
+
```
|
| 175 |
+
|
| 176 |
+
The `Teleoperator` defines the teleoperation device. You can check the list of available teleoperators in `lerobot/teleoperators`.
|
| 177 |
+
|
| 178 |
+
**Setting up the Gamepad**
|
| 179 |
+
|
| 180 |
+
The gamepad provides a very convenient way to control the robot and the episode state.
|
| 181 |
+
|
| 182 |
+
To setup the gamepad, you need to set the `control_mode` to `"gamepad"` and define the `teleop` section in the configuration file.
|
| 183 |
+
|
| 184 |
+
```json
|
| 185 |
+
"teleop": {
|
| 186 |
+
"type": "gamepad",
|
| 187 |
+
"use_gripper": true
|
| 188 |
+
},
|
| 189 |
+
```
|
| 190 |
+
|
| 191 |
+
<p align="center">
|
| 192 |
+
<img src="https://huggingface.co/datasets/huggingface/documentation-images/resolve/main/lerobot/gamepad_guide.jpg?raw=true" alt="Figure shows the control mappings on a Logitech gamepad." title="Gamepad Control Mapping" width="100%"></img>
|
| 193 |
+
</p>
|
| 194 |
+
<p align="center"><i>Gamepad button mapping for robot control and episode management</i></p>
|
| 195 |
+
|
| 196 |
+
**Setting up the SO101 leader**
|
| 197 |
+
|
| 198 |
+
The SO101 leader arm has reduced gears that allows it to move and track the follower arm during exploration. Therefore, taking over is much smoother than the gearless SO100.
|
| 199 |
+
|
| 200 |
+
To setup the SO101 leader, you need to set the `control_mode` to `"leader"` and define the `teleop` section in the configuration file.
|
| 201 |
+
|
| 202 |
+
```json
|
| 203 |
+
"teleop": {
|
| 204 |
+
"type": "so101_leader",
|
| 205 |
+
"port": "/dev/tty.usbmodem585A0077921", # check your port number
|
| 206 |
+
"use_degrees": true
|
| 207 |
+
},
|
| 208 |
+
```
|
| 209 |
+
|
| 210 |
+
In order to annotate the success/failure of the episode, **you will need** to use a keyboard to press `s` for success, `esc` for failure.
|
| 211 |
+
During the online training, press `space` to take over the policy and `space` again to give the control back to the policy.
|
| 212 |
+
|
| 213 |
+
<details>
|
| 214 |
+
<summary><strong>Video: SO101 leader teleoperation</strong></summary>
|
| 215 |
+
|
| 216 |
+
<div class="video-container">
|
| 217 |
+
<video controls width="600">
|
| 218 |
+
<source src="https://huggingface.co/datasets/huggingface/documentation-images/resolve/main/lerobot/so101_leader_tutorial.mp4" type="video/mp4" />
|
| 219 |
+
</video>
|
| 220 |
+
</div>
|
| 221 |
+
|
| 222 |
+
<p align="center"><i>SO101 leader teleoperation example, the leader tracks the follower, press `space` to intervene</i></p>
|
| 223 |
+
</details>
|
| 224 |
+
|
| 225 |
+
**Recording Demonstrations**
|
| 226 |
+
|
| 227 |
+
Start the recording process, an example of the config file can be found [here](https://huggingface.co/datasets/aractingi/lerobot-example-config-files/blob/main/env_config_so100.json):
|
| 228 |
+
|
| 229 |
+
```bash
|
| 230 |
+
python -m lerobot.scripts.rl.gym_manipulator --config_path src/lerobot/configs/env_config_so100.json
|
| 231 |
+
```
|
| 232 |
+
|
| 233 |
+
During recording:
|
| 234 |
+
1. The robot will reset to the initial position defined in the configuration file `fixed_reset_joint_positions`
|
| 235 |
+
2. Complete the task successfully
|
| 236 |
+
3. The episode ends with a reward of 1 when you press the "success" button
|
| 237 |
+
4. If the time limit is reached, or the fail button is pressed, the episode ends with a reward of 0
|
| 238 |
+
5. You can rerecord an episode by pressing the "rerecord" button
|
| 239 |
+
6. The process automatically continues to the next episode
|
| 240 |
+
7. After recording all episodes, the dataset is pushed to the Hugging Face Hub (optional) and saved locally
|
| 241 |
+
|
| 242 |
+
|
| 243 |
+
### Processing the Dataset
|
| 244 |
+
|
| 245 |
+
After collecting demonstrations, process them to determine optimal camera crops.
|
| 246 |
+
Reinforcement learning is sensitive to background distractions, so it is important to crop the images to the relevant workspace area.
|
| 247 |
+
|
| 248 |
+
Visual RL algorithms learn directly from pixel inputs, making them vulnerable to irrelevant visual information. Background elements like changing lighting, shadows, people moving, or objects outside the workspace can confuse the learning process. Good ROI selection should:
|
| 249 |
+
- Include only the essential workspace where the task happens
|
| 250 |
+
- Capture the robot's end-effector and all objects involved in the task
|
| 251 |
+
- Exclude unnecessary background elements and distractions
|
| 252 |
+
|
| 253 |
+
Note: If you already know the crop parameters, you can skip this step and just set the `crop_params_dict` in the configuration file during recording.
|
| 254 |
+
|
| 255 |
+
**Determining Crop Parameters**
|
| 256 |
+
|
| 257 |
+
Use the `crop_dataset_roi.py` script to interactively select regions of interest in your camera images:
|
| 258 |
+
|
| 259 |
+
```bash
|
| 260 |
+
python -m lerobot.scripts.rl.crop_dataset_roi --repo-id username/pick_lift_cube
|
| 261 |
+
```
|
| 262 |
+
|
| 263 |
+
1. For each camera view, the script will display the first frame
|
| 264 |
+
2. Draw a rectangle around the relevant workspace area
|
| 265 |
+
3. Press 'c' to confirm the selection
|
| 266 |
+
4. Repeat for all camera views
|
| 267 |
+
5. The script outputs cropping parameters and creates a new cropped dataset
|
| 268 |
+
|
| 269 |
+
Example output:
|
| 270 |
+
```
|
| 271 |
+
Selected Rectangular Regions of Interest (top, left, height, width):
|
| 272 |
+
observation.images.side: [180, 207, 180, 200]
|
| 273 |
+
observation.images.front: [180, 250, 120, 150]
|
| 274 |
+
```
|
| 275 |
+
|
| 276 |
+
<p align="center">
|
| 277 |
+
<img src="https://huggingface.co/datasets/huggingface/documentation-images/resolve/main/lerobot/crop_dataset.gif" width="600"/>
|
| 278 |
+
</p>
|
| 279 |
+
|
| 280 |
+
<p align="center"><i>Interactive cropping tool for selecting regions of interest</i></p>
|
| 281 |
+
|
| 282 |
+
|
| 283 |
+
**Updating Configuration**
|
| 284 |
+
|
| 285 |
+
Add these crop parameters to your training configuration:
|
| 286 |
+
|
| 287 |
+
```json
|
| 288 |
+
"crop_params_dict": {
|
| 289 |
+
"observation.images.side": [180, 207, 180, 200],
|
| 290 |
+
"observation.images.front": [180, 250, 120, 150]
|
| 291 |
+
},
|
| 292 |
+
"resize_size": [128, 128]
|
| 293 |
+
```
|
| 294 |
+
|
| 295 |
+
**Recommended image resolution**
|
| 296 |
+
|
| 297 |
+
Most vision-based policies have been validated on square inputs of either **128×128** (default) or **64×64** pixels. We therefore advise setting the resize_size parameter to [128, 128] – or [64, 64] if you need to save GPU memory and bandwidth. Other resolutions are possible but have not been extensively tested.
|
| 298 |
+
|
| 299 |
+
|
| 300 |
+
### Training a Reward Classifier
|
| 301 |
+
|
| 302 |
+
The reward classifier plays an important role in the HIL-SERL workflow by automating reward assignment and automatically detecting episode success. Instead of manually defining reward functions or relying on human feedback for every timestep, the reward classifier learns to predict success/failure from visual observations. This enables the RL algorithm to learn efficiently by providing consistent and automated reward signals based on the robot's camera inputs.
|
| 303 |
+
|
| 304 |
+
This guide explains how to train a reward classifier for human-in-the-loop reinforcement learning implementation of LeRobot. Reward classifiers learn to predict the reward value given a state which can be used in an RL setup to train a policy.
|
| 305 |
+
|
| 306 |
+
**Note**: Training a reward classifier is optional. You can start the first round of RL experiments by annotating the success manually with your gamepad or keyboard device.
|
| 307 |
+
|
| 308 |
+
The reward classifier implementation in `modeling_classifier.py` uses a pretrained vision model to process the images. It can output either a single value for binary rewards to predict success/fail cases or multiple values for multi-class settings.
|
| 309 |
+
|
| 310 |
+
**Collecting a Dataset for the reward classifier**
|
| 311 |
+
|
| 312 |
+
Before training, you need to collect a dataset with labeled examples. The `record_dataset` function in `gym_manipulator.py` enables the process of collecting a dataset of observations, actions, and rewards.
|
| 313 |
+
|
| 314 |
+
To collect a dataset, you need to modify some parameters in the environment configuration based on HILSerlRobotEnvConfig.
|
| 315 |
+
|
| 316 |
+
```bash
|
| 317 |
+
python -m lerobot.scripts.rl.gym_manipulator --config_path src/lerobot/configs/reward_classifier_train_config.json
|
| 318 |
+
```
|
| 319 |
+
|
| 320 |
+
**Key Parameters for Data Collection**
|
| 321 |
+
|
| 322 |
+
- **mode**: set it to `"record"` to collect a dataset
|
| 323 |
+
- **repo_id**: `"hf_username/dataset_name"`, name of the dataset and repo on the hub
|
| 324 |
+
- **num_episodes**: Number of episodes to record
|
| 325 |
+
- **number_of_steps_after_success**: Number of additional frames to record after a success (reward=1) is detected
|
| 326 |
+
- **fps**: Number of frames per second to record
|
| 327 |
+
- **push_to_hub**: Whether to push the dataset to the hub
|
| 328 |
+
|
| 329 |
+
The `number_of_steps_after_success` parameter is crucial as it allows you to collect more positive examples. When a success is detected, the system will continue recording for the specified number of steps while maintaining the reward=1 label. Otherwise, there won't be enough states in the dataset labeled to 1 to train a good classifier.
|
| 330 |
+
|
| 331 |
+
Example configuration section for data collection:
|
| 332 |
+
|
| 333 |
+
```json
|
| 334 |
+
{
|
| 335 |
+
"mode": "record",
|
| 336 |
+
"repo_id": "hf_username/dataset_name",
|
| 337 |
+
"dataset_root": "data/your_dataset",
|
| 338 |
+
"num_episodes": 20,
|
| 339 |
+
"push_to_hub": true,
|
| 340 |
+
"fps": 10,
|
| 341 |
+
"number_of_steps_after_success": 15
|
| 342 |
+
}
|
| 343 |
+
```
|
| 344 |
+
|
| 345 |
+
**Reward Classifier Configuration**
|
| 346 |
+
|
| 347 |
+
The reward classifier is configured using `configuration_classifier.py`. Here are the key parameters:
|
| 348 |
+
|
| 349 |
+
- **model_name**: Base model architecture (e.g., we mainly use `"helper2424/resnet10"`)
|
| 350 |
+
- **model_type**: `"cnn"` or `"transformer"`
|
| 351 |
+
- **num_cameras**: Number of camera inputs
|
| 352 |
+
- **num_classes**: Number of output classes (typically 2 for binary success/failure)
|
| 353 |
+
- **hidden_dim**: Size of hidden representation
|
| 354 |
+
- **dropout_rate**: Regularization parameter
|
| 355 |
+
- **learning_rate**: Learning rate for optimizer
|
| 356 |
+
|
| 357 |
+
Example configuration for training the [reward classifier](https://huggingface.co/datasets/aractingi/lerobot-example-config-files/blob/main/reward_classifier_train_config.json):
|
| 358 |
+
|
| 359 |
+
```json
|
| 360 |
+
{
|
| 361 |
+
"policy": {
|
| 362 |
+
"type": "reward_classifier",
|
| 363 |
+
"model_name": "helper2424/resnet10",
|
| 364 |
+
"model_type": "cnn",
|
| 365 |
+
"num_cameras": 2,
|
| 366 |
+
"num_classes": 2,
|
| 367 |
+
"hidden_dim": 256,
|
| 368 |
+
"dropout_rate": 0.1,
|
| 369 |
+
"learning_rate": 1e-4,
|
| 370 |
+
"device": "cuda",
|
| 371 |
+
"use_amp": true,
|
| 372 |
+
"input_features": {
|
| 373 |
+
"observation.images.front": {
|
| 374 |
+
"type": "VISUAL",
|
| 375 |
+
"shape": [3, 128, 128]
|
| 376 |
+
},
|
| 377 |
+
"observation.images.side": {
|
| 378 |
+
"type": "VISUAL",
|
| 379 |
+
"shape": [3, 128, 128]
|
| 380 |
+
}
|
| 381 |
+
}
|
| 382 |
+
}
|
| 383 |
+
}
|
| 384 |
+
```
|
| 385 |
+
|
| 386 |
+
**Training the Classifier**
|
| 387 |
+
|
| 388 |
+
To train the classifier, use the `train.py` script with your configuration:
|
| 389 |
+
|
| 390 |
+
```bash
|
| 391 |
+
python -m lerobot.scripts.train --config_path path/to/reward_classifier_train_config.json
|
| 392 |
+
```
|
| 393 |
+
|
| 394 |
+
**Deploying and Testing the Model**
|
| 395 |
+
|
| 396 |
+
To use your trained reward classifier, configure the `HILSerlRobotEnvConfig` to use your model:
|
| 397 |
+
|
| 398 |
+
```python
|
| 399 |
+
env_config = HILSerlRobotEnvConfig(
|
| 400 |
+
reward_classifier_pretrained_path="path_to_your_pretrained_trained_model",
|
| 401 |
+
# Other environment parameters
|
| 402 |
+
)
|
| 403 |
+
```
|
| 404 |
+
or set the argument in the json config file.
|
| 405 |
+
|
| 406 |
+
```json
|
| 407 |
+
{
|
| 408 |
+
"reward_classifier_pretrained_path": "path_to_your_pretrained_model"
|
| 409 |
+
}
|
| 410 |
+
```
|
| 411 |
+
|
| 412 |
+
Run `gym_manipulator.py` to test the model.
|
| 413 |
+
```bash
|
| 414 |
+
python -m lerobot.scripts.rl.gym_manipulator --config_path path/to/env_config.json
|
| 415 |
+
```
|
| 416 |
+
|
| 417 |
+
The reward classifier will automatically provide rewards based on the visual input from the robot's cameras.
|
| 418 |
+
|
| 419 |
+
**Example Workflow for training the reward classifier**
|
| 420 |
+
|
| 421 |
+
1. **Create the configuration files**:
|
| 422 |
+
Create the necessary json configuration files for the reward classifier and the environment. Check the examples [here](https://huggingface.co/datasets/aractingi/lerobot-example-config-files/tree/main).
|
| 423 |
+
|
| 424 |
+
2. **Collect a dataset**:
|
| 425 |
+
```bash
|
| 426 |
+
python -m lerobot.scripts.rl.gym_manipulator --config_path src/lerobot/configs/env_config.json
|
| 427 |
+
```
|
| 428 |
+
|
| 429 |
+
3. **Train the classifier**:
|
| 430 |
+
```bash
|
| 431 |
+
python -m lerobot.scripts.train --config_path src/lerobot/configs/reward_classifier_train_config.json
|
| 432 |
+
```
|
| 433 |
+
|
| 434 |
+
4. **Test the classifier**:
|
| 435 |
+
```bash
|
| 436 |
+
python -m lerobot.scripts.rl.gym_manipulator --config_path src/lerobot/configs/env_config.json
|
| 437 |
+
```
|
| 438 |
+
|
| 439 |
+
### Training with Actor-Learner
|
| 440 |
+
|
| 441 |
+
The LeRobot system uses a distributed actor-learner architecture for training. This architecture decouples robot interactions from the learning process, allowing them to run concurrently without blocking each other. The actor server handles robot observations and actions, sending interaction data to the learner server. The learner server performs gradient descent and periodically updates the actor's policy weights. You will need to start two processes: a learner and an actor.
|
| 442 |
+
|
| 443 |
+
**Configuration Setup**
|
| 444 |
+
|
| 445 |
+
Create a training configuration file (example available [here](https://huggingface.co/datasets/aractingi/lerobot-example-config-files/blob/main/train_config_hilserl_so100.json)). The training config is based on the main `TrainRLServerPipelineConfig` class in `lerobot/configs/train.py`.
|
| 446 |
+
|
| 447 |
+
1. Configure the policy settings (`type="sac"`, `device`, etc.)
|
| 448 |
+
2. Set `dataset` to your cropped dataset
|
| 449 |
+
3. Configure environment settings with crop parameters
|
| 450 |
+
4. Check the other parameters related to SAC in [configuration_sac.py](https://github.com/huggingface/lerobot/blob/19bb621a7d0a31c20cd3cc08b1dbab68d3031454/lerobot/policies/sac/configuration_sac.py#L79).
|
| 451 |
+
5. Verify that the `policy` config is correct with the right `input_features` and `output_features` for your task.
|
| 452 |
+
|
| 453 |
+
**Starting the Learner**
|
| 454 |
+
|
| 455 |
+
First, start the learner server process:
|
| 456 |
+
|
| 457 |
+
```bash
|
| 458 |
+
python -m lerobot.scripts.rl.learner --config_path src/lerobot/configs/train_config_hilserl_so100.json
|
| 459 |
+
```
|
| 460 |
+
|
| 461 |
+
The learner:
|
| 462 |
+
- Initializes the policy network
|
| 463 |
+
- Prepares replay buffers
|
| 464 |
+
- Opens a `gRPC` server to communicate with actors
|
| 465 |
+
- Processes transitions and updates the policy
|
| 466 |
+
|
| 467 |
+
**Starting the Actor**
|
| 468 |
+
|
| 469 |
+
In a separate terminal, start the actor process with the same configuration:
|
| 470 |
+
|
| 471 |
+
```bash
|
| 472 |
+
python -m lerobot.scripts.rl.actor --config_path src/lerobot/configs/train_config_hilserl_so100.json
|
| 473 |
+
```
|
| 474 |
+
|
| 475 |
+
The actor:
|
| 476 |
+
- Connects to the learner via `gRPC`
|
| 477 |
+
- Initializes the environment
|
| 478 |
+
- Execute rollouts of the policy to collect experience
|
| 479 |
+
- Sends transitions to the learner
|
| 480 |
+
- Receives updated policy parameters
|
| 481 |
+
|
| 482 |
+
**Training Flow**
|
| 483 |
+
|
| 484 |
+
The training proceeds automatically:
|
| 485 |
+
|
| 486 |
+
1. The actor executes the policy in the environment
|
| 487 |
+
2. Transitions are collected and sent to the learner
|
| 488 |
+
3. The learner updates the policy based on these transitions
|
| 489 |
+
4. Updated policy parameters are sent back to the actor
|
| 490 |
+
5. The process continues until the specified step limit is reached
|
| 491 |
+
|
| 492 |
+
**Human in the Loop**
|
| 493 |
+
|
| 494 |
+
- The key to learning efficiently is to have human interventions to provide corrective feedback and completing the task to aide the policy learning and exploration.
|
| 495 |
+
- To perform human interventions, you can press the upper right trigger button on the gamepad (or the `space` key on the keyboard). This will pause the policy actions and allow you to take over.
|
| 496 |
+
- A successful experiment is one where the human has to intervene at the start but then reduces the amount of interventions as the policy improves. You can monitor the intervention rate in the `wandb` dashboard.
|
| 497 |
+
|
| 498 |
+
<p align="center">
|
| 499 |
+
<img src="https://huggingface.co/datasets/huggingface/documentation-images/resolve/main/lerobot/hil_effect.png?raw=true" alt="Figure shows the control mappings on a Logitech gamepad." title="Gamepad Control Mapping" width="100%"></img>
|
| 500 |
+
</p>
|
| 501 |
+
|
| 502 |
+
<p align="center"><i>Example showing how human interventions help guide policy learning over time</i></p>
|
| 503 |
+
|
| 504 |
+
- The figure shows the plot of the episodic reward over interaction step. The figure shows the effect of human interventions on the policy learning.
|
| 505 |
+
- The orange curve is an experiment without any human interventions. While the pink and blue curves are experiments with human interventions.
|
| 506 |
+
- We can observe that the number of steps where the policy starts achieving the maximum reward is cut by a quarter when human interventions are present.
|
| 507 |
+
|
| 508 |
+
**Monitoring and Debugging**
|
| 509 |
+
|
| 510 |
+
If you have `wandb.enable` set to `true` in your configuration, you can monitor training progress in real-time through the [Weights & Biases](https://wandb.ai/site/) dashboard.
|
| 511 |
+
|
| 512 |
+
### Guide to Human Interventions
|
| 513 |
+
The learning process is very sensitive to the intervention strategy. It will takes a few runs to understand how to intervene effectively. Some tips and hints:
|
| 514 |
+
- Allow the policy to explore for a few episodes at the start of training.
|
| 515 |
+
- Avoid intervening for long periods of time. Try to intervene in situation to correct the robot's behaviour when it goes off track.
|
| 516 |
+
- Once the policy starts achieving the task, even if its not perfect, you can limit your interventions to simple quick actions like a simple grasping commands.
|
| 517 |
+
|
| 518 |
+
The ideal behaviour is that your intervention rate should drop gradually during training as shown in the figure below.
|
| 519 |
+
|
| 520 |
+
<p align="center">
|
| 521 |
+
<img src="https://huggingface.co/datasets/huggingface/documentation-images/resolve/main/lerobot/intervention_rate_tutorial_rl.png?raw=true" alt="Intervention rate" title="Intervention rate during training" width="100%"></img>
|
| 522 |
+
</p>
|
| 523 |
+
|
| 524 |
+
<p align="center"><i>Plot of the intervention rate during a training run on a pick and lift cube task</i></p>
|
| 525 |
+
|
| 526 |
+
### Key hyperparameters to tune
|
| 527 |
+
|
| 528 |
+
Some configuration values have a disproportionate impact on training stability and speed:
|
| 529 |
+
|
| 530 |
+
- **`temperature_init`** (`policy.temperature_init`) – initial entropy temperature in SAC. Higher values encourage more exploration; lower values make the policy more deterministic early on. A good starting point is `1e-2`. We observed that setting it too high can make human interventions ineffective and slow down learning.
|
| 531 |
+
- **`policy_parameters_push_frequency`** (`policy.actor_learner_config.policy_parameters_push_frequency`) – interval in *seconds* between two weight pushes from the learner to the actor. The default is `4 s`. Decrease to **1-2 s** to provide fresher weights (at the cost of more network traffic); increase only if your connection is slow, as this will reduce sample efficiency.
|
| 532 |
+
- **`storage_device`** (`policy.storage_device`) – device on which the learner keeps the policy parameters. If you have spare GPU memory, set this to `"cuda"` (instead of the default `"cpu"`). Keeping the weights on-GPU removes CPU→GPU transfer overhead and can significantly increase the number of learner updates per second.
|
| 533 |
+
|
| 534 |
+
|
| 535 |
+
Congrats 🎉, you have finished this tutorial!
|
| 536 |
+
|
| 537 |
+
> [!TIP]
|
| 538 |
+
> If you have any questions or need help, please reach out on [Discord](https://discord.com/invite/s3KuuzsPFb).
|
| 539 |
+
|
| 540 |
+
Paper citation:
|
| 541 |
+
```
|
| 542 |
+
@article{luo2024precise,
|
| 543 |
+
title={Precise and Dexterous Robotic Manipulation via Human-in-the-Loop Reinforcement Learning},
|
| 544 |
+
author={Luo, Jianlan and Xu, Charles and Wu, Jeffrey and Levine, Sergey},
|
| 545 |
+
journal={arXiv preprint arXiv:2410.21845},
|
| 546 |
+
year={2024}
|
| 547 |
+
}
|
| 548 |
+
```
|