viola_raw / features.json
aractingi's picture
Upload folder using huggingface_hub
5d6fb06 verified
{
"featuresDict": {
"features": {
"steps": {
"pythonClassName": "tensorflow_datasets.core.features.dataset_feature.Dataset",
"sequence": {
"feature": {
"featuresDict": {
"features": {
"action": {
"featuresDict": {
"features": {
"gripper_closedness_action": {
"pythonClassName": "tensorflow_datasets.core.features.tensor_feature.Tensor",
"tensor": {
"dtype": "float32",
"encoding": "none",
"shape": {}
}
},
"rotation_delta": {
"pythonClassName": "tensorflow_datasets.core.features.tensor_feature.Tensor",
"tensor": {
"dtype": "float32",
"encoding": "none",
"shape": {
"dimensions": [
"3"
]
}
}
},
"terminate_episode": {
"pythonClassName": "tensorflow_datasets.core.features.tensor_feature.Tensor",
"tensor": {
"dtype": "float32",
"encoding": "none",
"shape": {}
}
},
"world_vector": {
"pythonClassName": "tensorflow_datasets.core.features.tensor_feature.Tensor",
"tensor": {
"dtype": "float32",
"encoding": "none",
"shape": {
"dimensions": [
"3"
]
}
}
}
}
},
"pythonClassName": "tensorflow_datasets.core.features.features_dict.FeaturesDict"
},
"is_first": {
"pythonClassName": "tensorflow_datasets.core.features.tensor_feature.Tensor",
"tensor": {
"dtype": "bool",
"encoding": "none",
"shape": {}
}
},
"is_last": {
"pythonClassName": "tensorflow_datasets.core.features.tensor_feature.Tensor",
"tensor": {
"dtype": "bool",
"encoding": "none",
"shape": {}
}
},
"is_terminal": {
"pythonClassName": "tensorflow_datasets.core.features.tensor_feature.Tensor",
"tensor": {
"dtype": "bool",
"encoding": "none",
"shape": {}
}
},
"observation": {
"featuresDict": {
"features": {
"agentview_rgb": {
"description": "RGB captured by workspace camera",
"image": {
"dtype": "uint8",
"shape": {
"dimensions": [
"224",
"224",
"3"
]
}
},
"pythonClassName": "tensorflow_datasets.core.features.image_feature.Image"
},
"ee_states": {
"description": "Pose of the end effector specified as a homogenous matrix.",
"pythonClassName": "tensorflow_datasets.core.features.tensor_feature.Tensor",
"tensor": {
"dtype": "float32",
"encoding": "none",
"shape": {
"dimensions": [
"16"
]
}
}
},
"eye_in_hand_rgb": {
"description": "RGB captured by in hand camera",
"image": {
"dtype": "uint8",
"shape": {
"dimensions": [
"224",
"224",
"3"
]
}
},
"pythonClassName": "tensorflow_datasets.core.features.image_feature.Image"
},
"gripper_states": {
"description": "gripper_states = 0 means the gripper is fully closed. The value represents the gripper width of Franka Panda Gripper.",
"pythonClassName": "tensorflow_datasets.core.features.tensor_feature.Tensor",
"tensor": {
"dtype": "float32",
"encoding": "none",
"shape": {
"dimensions": [
"1"
]
}
}
},
"joint_states": {
"description": "joint values",
"pythonClassName": "tensorflow_datasets.core.features.tensor_feature.Tensor",
"tensor": {
"dtype": "float32",
"encoding": "none",
"shape": {
"dimensions": [
"7"
]
}
}
},
"natural_language_embedding": {
"pythonClassName": "tensorflow_datasets.core.features.tensor_feature.Tensor",
"tensor": {
"dtype": "float32",
"encoding": "none",
"shape": {
"dimensions": [
"512"
]
}
}
},
"natural_language_instruction": {
"pythonClassName": "tensorflow_datasets.core.features.tensor_feature.Tensor",
"tensor": {
"dtype": "string",
"encoding": "none",
"shape": {}
}
}
}
},
"pythonClassName": "tensorflow_datasets.core.features.features_dict.FeaturesDict"
},
"reward": {
"pythonClassName": "tensorflow_datasets.core.features.scalar.Scalar",
"tensor": {
"dtype": "float32",
"encoding": "none",
"shape": {}
}
}
}
},
"pythonClassName": "tensorflow_datasets.core.features.features_dict.FeaturesDict"
},
"length": "-1"
}
}
}
},
"pythonClassName": "tensorflow_datasets.core.features.features_dict.FeaturesDict"
}