{ "action_dim": 14, "action_expert_variant": "gemma_300m", "action_horizon": 16, "chunk_size": 16, "discrete_state_input": false, "freeze_vision_tower": false, "lora_alpha": 256, "lora_module": [ "gemma_expert", "language_model", "vision_tower" ], "lora_r": 256, "max_action_dim": 32, "max_token_len": 180, "model_type": "openpi_policy", "paligemma_variant": "gemma_2b", "pi05": false, "pytorch_training_precision": "bfloat16", "pytorch_weight_path": "/root/.cache/openpi/openpi-assets/checkpoints/pi0_base_torch", "state_dim": 14, "transformers_version": "4.53.2" }