jdvakil commited on
Commit
5393031
·
1 Parent(s): 4742d6d

Create test_data.py

Browse files
Files changed (1) hide show
  1. test_data.py +60 -0
test_data.py ADDED
@@ -0,0 +1,60 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Copyright (c) Meta Platforms, Inc. and affiliates.
2
+ #
3
+ # This source code is licensed under the MIT license found in the
4
+ # LICENSE file in the root directory of this source tree.
5
+
6
+ import pickle
7
+ import cv2
8
+ from enum import Enum
9
+ from typing import Any, Dict, List, Optional
10
+
11
+ import matplotlib.pyplot as plt
12
+ import numpy as np
13
+ import open3d
14
+ import torch
15
+ from atomicwrites import atomic_write
16
+ from loguru import logger
17
+
18
+ import home_robot.utils.planar as nc
19
+ from examples.demo_utils.mock_agent import MockSpotDemoAgent
20
+
21
+ # Simple IO tool for robot agents
22
+ from home_robot.agent.multitask.robot_agent import RobotAgent, publish_obs
23
+ from home_robot.mapping.voxel import SparseVoxelMap # Aggregate 3d information
24
+ from home_robot.mapping.voxel import ( # Sample positions in free space for our robot to move to
25
+ SparseVoxelMapNavigationSpace,
26
+ )
27
+ from home_robot.motion import ConfigurationSpace, Planner, PlanResult
28
+ from home_robot.motion.rrt_connect import RRTConnect
29
+ from home_robot.motion.shortcut import Shortcut
30
+ from home_robot.motion.spot import ( # Just saves the Spot robot footprint for kinematic planning
31
+ SimpleSpotKinematics,
32
+ )
33
+ from home_robot.perception import create_semantic_sensor
34
+ from home_robot.perception.encoders import ClipEncoder
35
+ from home_robot.utils.config import Config, get_config, load_config
36
+ from home_robot.utils.demo_chat import (
37
+ DemoChat,
38
+ start_demo_ui_server,
39
+ stop_demo_ui_server,
40
+ )
41
+ from PIL import Image
42
+ from home_robot.utils.geometry import xyt_global_to_base
43
+ from home_robot.utils.point_cloud import numpy_to_pcd
44
+ from home_robot.utils.rpc import (
45
+ get_obj_centric_world_representation,
46
+ get_output_from_world_representation,
47
+ get_vlm_rpc_stub,
48
+ parse_pick_and_place_plan,
49
+ )
50
+ from home_robot.utils.threading import Interval
51
+ from home_robot.utils.visualization import get_x_and_y_from_path
52
+ from home_robot_spot import SpotClient, VoxelMapSubscriber
53
+ from home_robot_spot.grasp_env import GraspController
54
+
55
+
56
+ p = "/home/jaydv/code/home-robot/data/hw_exps/spot/2023-12-13-16-19-13/spot_output_2023-12-13-16-19-13.pkl"
57
+ ob = open(p, 'rb+')
58
+ obj = pickle.load(ob)
59
+ #obj should be a dict
60
+ print(obj.keys()) #['camera_poses', 'base_poses', 'xyz', 'rgb', 'depth', 'feats', 'obs', 'prompt', 'combined_xyz', 'combined_feats', 'combined_weights', 'combined_rgb']