finish pipeline
This commit is contained in:
@@ -1,23 +1,35 @@
|
||||
import requests
|
||||
import numpy as np
|
||||
import cv2
|
||||
|
||||
|
||||
class CommunicateUtil:
|
||||
VIEW_HOST = "127.0.0.1:5000"
|
||||
INFERENCE_HOST = "127.0.0.1:5000"
|
||||
VIEW_HOST = "192.168.1.2:7999" #"10.7.250.52:7999" ##
|
||||
INFERENCE_HOST = "127.0.0.1"
|
||||
INFERENCE_PORT = 5000
|
||||
|
||||
def get_view_data(init = False) -> dict:
|
||||
params = {}
|
||||
params["create_scanner"] = init
|
||||
response = requests.get(f"http://{CommunicateUtil.VIEW_HOST}/api/data", json=params)
|
||||
data = response.json()
|
||||
|
||||
if not data["success"]:
|
||||
print(f"Failed to get view data")
|
||||
return None
|
||||
return data
|
||||
|
||||
image_id = data["image_id"]
|
||||
depth_image = np.array(data["depth_image"], dtype=np.uint16)
|
||||
depth_intrinsics = data["depth_intrinsics"]
|
||||
depth_extrinsics = np.array(data["depth_extrinsics"])
|
||||
view_data = {
|
||||
"image_id": image_id,
|
||||
"depth_image": depth_image,
|
||||
"depth_intrinsics": depth_intrinsics,
|
||||
"depth_extrinsics": depth_extrinsics
|
||||
}
|
||||
return view_data
|
||||
|
||||
def get_inference_data(view_data:dict) -> dict:
|
||||
data = {}
|
||||
return data
|
||||
|
||||
|
@@ -1,41 +1,49 @@
|
||||
import numpy as np
|
||||
from frankapy import FrankaArm
|
||||
from autolab_core import RigidTransform
|
||||
import serial
|
||||
import time
|
||||
|
||||
class ControlUtil:
|
||||
|
||||
#__fa = FrankaArm(robot_num=2)
|
||||
__fa:FrankaArm = None
|
||||
__ser: serial.Serial = None
|
||||
cnt_rotation = 0
|
||||
|
||||
BASE_TO_WORLD:np.ndarray = np.asarray([
|
||||
[1, 0, 0, -0.5],
|
||||
[0, 1, 0, 0],
|
||||
[0, 0, 1, -0.2],
|
||||
[1, 0, 0, -0.7323],
|
||||
[0, 1, 0, 0.05926],
|
||||
[0, 0, 1, -0.21],
|
||||
[0, 0, 0, 1]
|
||||
])
|
||||
|
||||
CAMERA_TO_GRIPPER:np.ndarray = np.asarray([
|
||||
[1, 0, 0, 0],
|
||||
[0, 1, 0, 0],
|
||||
[0, 0, 1, 0],
|
||||
[0, -1, 0, 0.01],
|
||||
[1, 0, 0, -0.05],
|
||||
[0, 0, 1, 0.075],
|
||||
[0, 0, 0, 1]
|
||||
])
|
||||
theta = np.radians(25)
|
||||
INIT_POSE:np.ndarray = np.asarray([
|
||||
[np.cos(theta), 0, -np.sin(theta), 0],
|
||||
[0, -1, 0, 0],
|
||||
[-np.sin(theta), 0, -np.cos(theta), 0.35],
|
||||
[0, 0, 0, 1]
|
||||
INIT_GRIPPER_POSE:np.ndarray = np.asarray([
|
||||
[ 0.44808722 , 0.61103352 , 0.65256787 , 0.36428118],
|
||||
[ 0.51676868 , -0.77267257 , 0.36866054, -0.26519364],
|
||||
[ 0.72948524 , 0.17203456 ,-0.66200043 , 0.60938969],
|
||||
[ 0. , 0. , 0. , 1. ]
|
||||
])
|
||||
|
||||
|
||||
AXIS_THRESHOLD = (-(np.pi+5e-2)/2, (np.pi+5e-2)/2)
|
||||
|
||||
@staticmethod
|
||||
def connect_robot():
|
||||
ControlUtil.__ser = serial.Serial(port="/dev/ttyUSB0", baudrate=115200)
|
||||
ControlUtil.__fa = FrankaArm(robot_num=2)
|
||||
|
||||
@staticmethod
|
||||
def franka_reset() -> None:
|
||||
ControlUtil.__fa.reset_joints()
|
||||
|
||||
@staticmethod
|
||||
def init() -> None:
|
||||
ControlUtil.set_pose(ControlUtil.INIT_POSE)
|
||||
ControlUtil.franka_reset()
|
||||
ControlUtil.set_gripper_pose(ControlUtil.INIT_GRIPPER_POSE)
|
||||
|
||||
@staticmethod
|
||||
def get_pose() -> np.ndarray:
|
||||
@@ -46,17 +54,35 @@ class ControlUtil:
|
||||
@staticmethod
|
||||
def set_pose(cam_to_world: np.ndarray) -> None:
|
||||
gripper_to_base = ControlUtil.solve_gripper_to_base(cam_to_world)
|
||||
gripper_to_base = RigidTransform(rotation=gripper_to_base[:3, :3], translation=gripper_to_base[:3, 3], from_frame="franka_tool", to_frame="world")
|
||||
ControlUtil.__fa.goto_pose(gripper_to_base, use_impedance=False, ignore_errors=False)
|
||||
ControlUtil.set_gripper_pose(gripper_to_base)
|
||||
|
||||
@staticmethod
|
||||
def rotate_display_table(degree):
|
||||
pass
|
||||
turn_directions = {
|
||||
"left": 0,
|
||||
"right": 1
|
||||
}
|
||||
ControlUtil.cnt_rotation += degree
|
||||
|
||||
print("total rot:", ControlUtil.cnt_rotation)
|
||||
if degree >= 0:
|
||||
turn_angle = degree
|
||||
turn_direction = turn_directions["right"]
|
||||
else:
|
||||
turn_angle = -degree
|
||||
turn_direction = turn_directions["left"]
|
||||
write_len = ControlUtil.__ser.write(f"CT+TRUNSINGLE({turn_direction},{turn_angle});".encode('utf-8'))
|
||||
|
||||
|
||||
@staticmethod
|
||||
def get_curr_gripper_to_base_pose() -> np.ndarray:
|
||||
return ControlUtil.__fa.get_pose().matrix
|
||||
|
||||
@staticmethod
|
||||
def set_gripper_pose(gripper_to_base: np.ndarray) -> None:
|
||||
gripper_to_base = RigidTransform(rotation=gripper_to_base[:3, :3], translation=gripper_to_base[:3, 3], from_frame="franka_tool", to_frame="world")
|
||||
ControlUtil.__fa.goto_pose(gripper_to_base, duration=5, use_impedance=False, ignore_errors=False)
|
||||
|
||||
@staticmethod
|
||||
def solve_gripper_to_base(cam_to_world: np.ndarray) -> np.ndarray:
|
||||
return np.linalg.inv(ControlUtil.BASE_TO_WORLD) @ cam_to_world @ np.linalg.inv(ControlUtil.CAMERA_TO_GRIPPER)
|
||||
@@ -66,24 +92,38 @@ class ControlUtil:
|
||||
return ControlUtil.BASE_TO_WORLD @ gripper_to_base @ ControlUtil.CAMERA_TO_GRIPPER
|
||||
|
||||
@staticmethod
|
||||
def solve_display_table_rot_and_cam_to_world(cam_to_world: np.ndarray) -> tuple:
|
||||
gripper_to_base = ControlUtil.solve_gripper_to_base(cam_to_world)
|
||||
gripper_to_base_axis_angle = ControlUtil.get_gripper_to_base_axis_angle(gripper_to_base)
|
||||
def check_limit(new_cam_to_world):
|
||||
if new_cam_to_world[0,3] > 0 or new_cam_to_world[1,3] > 0:
|
||||
return False
|
||||
x = abs(new_cam_to_world[0,3])
|
||||
y = abs(new_cam_to_world[1,3])
|
||||
|
||||
if ControlUtil.AXIS_THRESHOLD[0] <= gripper_to_base_axis_angle <= ControlUtil.AXIS_THRESHOLD[1]:
|
||||
tan_y_x = y/x
|
||||
if tan_y_x < np.sqrt(3)/3 or tan_y_x > np.sqrt(3):
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
@staticmethod
|
||||
def solve_display_table_rot_and_cam_to_world(cam_to_world: np.ndarray) -> tuple:
|
||||
if ControlUtil.check_limit(cam_to_world):
|
||||
return 0, cam_to_world
|
||||
else:
|
||||
for display_table_rot in np.linspace(0.1,180, 1800):
|
||||
min_display_table_rot = 180
|
||||
min_new_cam_to_world = None
|
||||
for display_table_rot in np.linspace(0.1,360, 1800):
|
||||
display_table_rot_z_pose = ControlUtil.get_z_axis_rot_mat(display_table_rot)
|
||||
new_cam_to_world = display_table_rot_z_pose @ cam_to_world
|
||||
if ControlUtil.AXIS_THRESHOLD[0] <= ControlUtil.get_gripper_to_base_axis_angle(new_cam_to_world) <= ControlUtil.AXIS_THRESHOLD[1]:
|
||||
return -display_table_rot, new_cam_to_world
|
||||
|
||||
display_table_rot = -display_table_rot
|
||||
display_table_rot_z_pose = ControlUtil.get_z_axis_rot_mat(display_table_rot)
|
||||
new_cam_to_world = display_table_rot_z_pose @ cam_to_world
|
||||
if ControlUtil.AXIS_THRESHOLD[0] <= ControlUtil.get_gripper_to_base_axis_angle(new_cam_to_world) <= ControlUtil.AXIS_THRESHOLD[1]:
|
||||
return -display_table_rot, new_cam_to_world
|
||||
if ControlUtil.check_limit(new_cam_to_world):
|
||||
if display_table_rot < min_display_table_rot:
|
||||
min_display_table_rot, min_new_cam_to_world = display_table_rot, new_cam_to_world
|
||||
if abs(display_table_rot - 360) < min_display_table_rot:
|
||||
min_display_table_rot, min_new_cam_to_world = display_table_rot - 360, new_cam_to_world
|
||||
|
||||
if min_new_cam_to_world is None:
|
||||
raise ValueError("No valid display table rotation found")
|
||||
|
||||
return min_display_table_rot, min_new_cam_to_world
|
||||
|
||||
@staticmethod
|
||||
def get_z_axis_rot_mat(degree):
|
||||
@@ -107,35 +147,32 @@ class ControlUtil:
|
||||
def move_to(pose: np.ndarray):
|
||||
rot_degree, cam_to_world = ControlUtil.solve_display_table_rot_and_cam_to_world(pose)
|
||||
print("table rot degree:", rot_degree)
|
||||
exec_time = rot_degree/9
|
||||
start_time = time.time()
|
||||
ControlUtil.rotate_display_table(rot_degree)
|
||||
ControlUtil.set_pose(cam_to_world)
|
||||
|
||||
|
||||
end_time = time.time()
|
||||
if end_time - start_time < exec_time:
|
||||
time.sleep(exec_time - (end_time - start_time))
|
||||
# ----------- Debug Test -------------
|
||||
|
||||
if __name__ == "__main__":
|
||||
ControlUtil.connect_robot()
|
||||
print(ControlUtil.get_curr_gripper_to_base_pose())
|
||||
ControlUtil.init()
|
||||
import time
|
||||
start = time.time()
|
||||
rot_degree, cam_to_world = ControlUtil.solve_display_table_rot_and_cam_to_world(ControlUtil.INIT_POSE)
|
||||
end = time.time()
|
||||
print(f"Time: {end-start}")
|
||||
print(rot_degree, cam_to_world)
|
||||
# test_pose = np.asarray([
|
||||
# [1, 0, 0, 0.4],
|
||||
# [0, -1, 0, 0],
|
||||
# [0, 0, -1, 0.6],
|
||||
# [0, 0, 0, 1]
|
||||
# ])
|
||||
# ControlUtil.set_pose(test_pose)
|
||||
# print(ControlUtil.get_pose())
|
||||
# ControlUtil.reset()
|
||||
# print(ControlUtil.get_pose())
|
||||
# import time
|
||||
# start = time.time()
|
||||
# test_pose = np.asarray([[ 0.06023737 ,-0.81328565, 0.57872715, -0.23602393],
|
||||
# [-0.99107872 ,-0.11773834, -0.06230158, 0.24174289],
|
||||
# [ 0.11880735 ,-0.56981127 ,-0.813138 , 0.15199069],
|
||||
# [ 0. , 0. , 0. , 1. , ]])
|
||||
# print(test_pose)
|
||||
# rot_degree, cam_to_world = ControlUtil.solve_display_table_rot_and_cam_to_world(test_pose)
|
||||
# print(rot_degree, cam_to_world)
|
||||
# end = time.time()
|
||||
# print(f"Time: {end-start}")
|
||||
|
||||
angle = ControlUtil.get_gripper_to_base_axis_angle(ControlUtil.solve_gripper_to_base(cam_to_world))
|
||||
threshold = ControlUtil.AXIS_THRESHOLD
|
||||
|
||||
angle_degree = np.degrees(angle)
|
||||
threshold_degree = np.degrees(threshold[0]), np.degrees(threshold[1])
|
||||
print(f"Angle: {angle_degree}, range: {threshold_degree}")
|
||||
ControlUtil.set_pose(cam_to_world)
|
||||
# ControlUtil.set_pose(test_pose)
|
||||
# import ipdb; ipdb.set_trace()
|
||||
# ControlUtil.move_to(test_pose)
|
||||
# print("End!")
|
||||
|
@@ -8,6 +8,7 @@ sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
||||
from utils.reconstruction_util import ReconstructionUtil
|
||||
from utils.data_load import DataLoadUtil
|
||||
from utils.pts_util import PtsUtil
|
||||
from PytorchBoot.utils.log_util import Log
|
||||
|
||||
def save_np_pts(path, pts: np.ndarray, file_type="txt"):
|
||||
if file_type == "txt":
|
||||
@@ -75,7 +76,7 @@ def save_scene_data(root, scene, scene_idx=0, scene_total=1,file_type="txt"):
|
||||
''' read frame data(depth|mask|normal) '''
|
||||
frame_num = DataLoadUtil.get_scene_seq_length(root, scene)
|
||||
for frame_id in range(frame_num):
|
||||
print(f"[scene({scene_idx}/{scene_total})|frame({frame_id}/{frame_num})]Processing {scene} frame {frame_id}")
|
||||
Log.info(f"[frame({frame_id}/{frame_num})]Processing {scene} frame {frame_id}")
|
||||
path = DataLoadUtil.get_path(root, scene, frame_id)
|
||||
cam_info = DataLoadUtil.load_cam_info(path, binocular=True)
|
||||
depth_L, depth_R = DataLoadUtil.load_depth(
|
||||
|
127
utils/view_util.py
Normal file
127
utils/view_util.py
Normal file
@@ -0,0 +1,127 @@
|
||||
import numpy as np
|
||||
from scipy.spatial.transform import Rotation as R
|
||||
from dataclasses import dataclass
|
||||
|
||||
|
||||
@dataclass
|
||||
class CameraIntrinsics:
|
||||
width: int
|
||||
height: int
|
||||
fx: float
|
||||
fy: float
|
||||
cx: float
|
||||
cy: float
|
||||
|
||||
@property
|
||||
def intrinsic_matrix(self):
|
||||
return np.array([[self.fx, 0, self.cx], [0, self.fy, self.cy], [0, 0, 1]])
|
||||
|
||||
|
||||
@dataclass
|
||||
class CameraExtrinsics:
|
||||
def __init__(self, rotation: np.ndarray, translation: np.ndarray, rot_type: str):
|
||||
"""
|
||||
rotation: 3x3 rotation matrix or 1x3 euler angles or 1x4 quaternion
|
||||
translation: 1x3 or 3x1 translation vector
|
||||
rot_type: "mat", "euler_xyz", "quat_xyzw"
|
||||
"""
|
||||
assert rot_type in ["mat", "euler_xyz", "quat_xyzw"]
|
||||
if rot_type == "mat":
|
||||
self._rot = R.from_matrix(rotation)
|
||||
elif rot_type == "euler_xyz":
|
||||
self._rot = R.from_euler('xyz', rotation, degrees=True)
|
||||
elif rot_type == "quat_xyzw":
|
||||
self._rot = R.from_quat(rotation)
|
||||
self._translation = translation
|
||||
|
||||
@property
|
||||
def extrinsic_matrix(self):
|
||||
return np.vstack([np.hstack([self._rot.as_matrix(), self._translation.reshape(3, 1)]), [0, 0, 0, 1]])
|
||||
|
||||
@property
|
||||
def rotation_euler_xyz(self):
|
||||
return self._rot.as_euler('xyz', degrees=True)
|
||||
|
||||
@property
|
||||
def rotation_quat_xyzw(self):
|
||||
return self._rot.as_quat()
|
||||
|
||||
@property
|
||||
def rotation_matrix(self):
|
||||
return self._rot.as_matrix()
|
||||
|
||||
@property
|
||||
def translation(self):
|
||||
return self._translation
|
||||
|
||||
|
||||
@dataclass
|
||||
class CameraData:
|
||||
def __init__(self, depth_image: np.ndarray, image_id: int, intrinsics: CameraIntrinsics, extrinsics: CameraExtrinsics):
|
||||
self._depth_image = depth_image
|
||||
self._image_id = image_id
|
||||
self._intrinsics = intrinsics
|
||||
self._extrinsics = extrinsics
|
||||
|
||||
@property
|
||||
def depth_image(self):
|
||||
return self._depth_image
|
||||
|
||||
@property
|
||||
def image_id(self):
|
||||
return self._image_id
|
||||
|
||||
@property
|
||||
def intrinsics(self):
|
||||
return self._intrinsics.intrinsic_matrix
|
||||
|
||||
@property
|
||||
def extrinsics(self):
|
||||
return self._extrinsics.extrinsic_matrix
|
||||
|
||||
@property
|
||||
def projection_matrix(self):
|
||||
return self.intrinsics @ self.extrinsics[:3, :4]
|
||||
|
||||
@property
|
||||
def pts_camera(self):
|
||||
height, width = self.depth_image.shape
|
||||
v, u = np.indices((height, width))
|
||||
points = np.vstack([u.flatten(), v.flatten(), np.ones_like(u.flatten())]) # 3xN
|
||||
points = np.linalg.inv(self.intrinsics) @ points # 3xN
|
||||
points = points.T # Nx3
|
||||
points = points * self.depth_image.flatten()[:, np.newaxis] # Nx3
|
||||
return points
|
||||
|
||||
@property
|
||||
def pts_world(self):
|
||||
homogeneous_pts = np.hstack([self.pts_camera, np.ones((self.pts_camera.shape[0], 1))]) # Nx4
|
||||
pts_world = self.extrinsics @ homogeneous_pts.T # 4xN
|
||||
return pts_world[:3, :].T
|
||||
|
||||
class ViewUtil:
|
||||
def get_pts(view_data):
|
||||
image_id = view_data["image_id"]
|
||||
depth_intrinsics = view_data["depth_intrinsics"]
|
||||
depth_extrinsics = view_data["depth_extrinsics"]
|
||||
depth_image = np.array(view_data["depth_image"], dtype=np.uint16)
|
||||
if image_id is None:
|
||||
return None
|
||||
else:
|
||||
print(f"Image ID: {image_id}")
|
||||
camera_intrinsics = CameraIntrinsics(
|
||||
depth_intrinsics['width'],
|
||||
depth_intrinsics['height'],
|
||||
depth_intrinsics['fx'],
|
||||
depth_intrinsics['fy'],
|
||||
depth_intrinsics['cx'],
|
||||
depth_intrinsics['cy']
|
||||
)
|
||||
camera_extrinsics = CameraExtrinsics(
|
||||
depth_extrinsics[:3, :3],
|
||||
depth_extrinsics[:3, 3],
|
||||
rot_type="mat"
|
||||
)
|
||||
camera_data = CameraData(depth_image, image_id, camera_intrinsics, camera_extrinsics)
|
||||
pts = camera_data.pts_world
|
||||
return pts/1000
|
Reference in New Issue
Block a user