finish register
This commit is contained in:
@@ -4,6 +4,7 @@ import torch
|
||||
import trimesh
|
||||
from scipy.spatial import cKDTree
|
||||
|
||||
|
||||
class PtsUtil:
|
||||
|
||||
@staticmethod
|
||||
@@ -12,7 +13,7 @@ class PtsUtil:
|
||||
o3d_pc.points = o3d.utility.Vector3dVector(point_cloud)
|
||||
downsampled_pc = o3d_pc.voxel_down_sample(voxel_size)
|
||||
return np.asarray(downsampled_pc.points)
|
||||
|
||||
|
||||
@staticmethod
|
||||
def random_downsample_point_cloud(point_cloud, num_points, require_idx=False):
|
||||
if point_cloud.shape[0] == 0:
|
||||
@@ -23,7 +24,7 @@ class PtsUtil:
|
||||
if require_idx:
|
||||
return point_cloud[idx], idx
|
||||
return point_cloud[idx]
|
||||
|
||||
|
||||
@staticmethod
|
||||
def fps_downsample_point_cloud(point_cloud, num_points, require_idx=False):
|
||||
N = point_cloud.shape[0]
|
||||
@@ -31,25 +32,29 @@ class PtsUtil:
|
||||
|
||||
sampled_indices = np.zeros(num_points, dtype=int)
|
||||
sampled_indices[0] = np.random.randint(0, N)
|
||||
distances = np.linalg.norm(point_cloud - point_cloud[sampled_indices[0]], axis=1)
|
||||
distances = np.linalg.norm(
|
||||
point_cloud - point_cloud[sampled_indices[0]], axis=1
|
||||
)
|
||||
for i in range(1, num_points):
|
||||
farthest_index = np.argmax(distances)
|
||||
sampled_indices[i] = farthest_index
|
||||
mask[farthest_index] = True
|
||||
|
||||
new_distances = np.linalg.norm(point_cloud - point_cloud[farthest_index], axis=1)
|
||||
new_distances = np.linalg.norm(
|
||||
point_cloud - point_cloud[farthest_index], axis=1
|
||||
)
|
||||
distances = np.minimum(distances, new_distances)
|
||||
|
||||
|
||||
sampled_points = point_cloud[sampled_indices]
|
||||
if require_idx:
|
||||
return sampled_points, sampled_indices
|
||||
return sampled_points
|
||||
|
||||
|
||||
@staticmethod
|
||||
def random_downsample_point_cloud_tensor(point_cloud, num_points):
|
||||
idx = torch.randint(0, len(point_cloud), (num_points,))
|
||||
return point_cloud[idx]
|
||||
|
||||
|
||||
@staticmethod
|
||||
def voxelize_points(points, voxel_size):
|
||||
voxel_indices = np.floor(points / voxel_size).astype(np.int32)
|
||||
@@ -61,9 +66,11 @@ class PtsUtil:
|
||||
points_h = np.concatenate([points, np.ones((points.shape[0], 1))], axis=1)
|
||||
points_h = np.dot(pose_mat, points_h.T).T
|
||||
return points_h[:, :3]
|
||||
|
||||
|
||||
@staticmethod
|
||||
def get_overlapping_points(point_cloud_L, point_cloud_R, voxel_size=0.005, require_idx=False):
|
||||
def get_overlapping_points(
|
||||
point_cloud_L, point_cloud_R, voxel_size=0.005, require_idx=False
|
||||
):
|
||||
voxels_L, indices_L = PtsUtil.voxelize_points(point_cloud_L, voxel_size)
|
||||
voxels_R, _ = PtsUtil.voxelize_points(point_cloud_R, voxel_size)
|
||||
|
||||
@@ -77,48 +84,88 @@ class PtsUtil:
|
||||
if require_idx:
|
||||
return overlapping_points, mask_L
|
||||
return overlapping_points
|
||||
|
||||
|
||||
@staticmethod
|
||||
def filter_points(points, points_normals, cam_pose, voxel_size=0.002, theta=45, z_range=(0.2, 0.45)):
|
||||
|
||||
""" filter with z range """
|
||||
def filter_points(
|
||||
points,
|
||||
points_normals,
|
||||
cam_pose,
|
||||
voxel_size=0.002,
|
||||
theta=45,
|
||||
z_range=(0.2, 0.45),
|
||||
):
|
||||
"""filter with z range"""
|
||||
points_cam = PtsUtil.transform_point_cloud(points, np.linalg.inv(cam_pose))
|
||||
idx = (points_cam[:, 2] > z_range[0]) & (points_cam[:, 2] < z_range[1])
|
||||
z_filtered_points = points[idx]
|
||||
|
||||
|
||||
""" filter with normal """
|
||||
sampled_points = PtsUtil.voxel_downsample_point_cloud(z_filtered_points, voxel_size)
|
||||
kdtree = cKDTree(points_normals[:,:3])
|
||||
sampled_points = PtsUtil.voxel_downsample_point_cloud(
|
||||
z_filtered_points, voxel_size
|
||||
)
|
||||
kdtree = cKDTree(points_normals[:, :3])
|
||||
_, indices = kdtree.query(sampled_points)
|
||||
nearest_points = points_normals[indices]
|
||||
|
||||
normals = nearest_points[:, 3:]
|
||||
camera_axis = -cam_pose[:3, 2]
|
||||
camera_axis = -cam_pose[:3, 2]
|
||||
normals_normalized = normals / np.linalg.norm(normals, axis=1, keepdims=True)
|
||||
cos_theta = np.dot(normals_normalized, camera_axis)
|
||||
theta_rad = np.deg2rad(theta)
|
||||
idx = cos_theta > np.cos(theta_rad)
|
||||
filtered_sampled_points= sampled_points[idx]
|
||||
filtered_sampled_points = sampled_points[idx]
|
||||
return filtered_sampled_points[:, :3]
|
||||
|
||||
|
||||
@staticmethod
|
||||
def register_icp(pcl: np.ndarray, model: trimesh.Trimesh, threshold=0.5) -> np.ndarray:
|
||||
mesh_points = np.asarray(model.vertices)
|
||||
downsampled_mesh_points = PtsUtil.random_downsample_point_cloud(mesh_points, pcl.shape[0])
|
||||
mesh_point_cloud = o3d.geometry.PointCloud()
|
||||
mesh_point_cloud.points = o3d.utility.Vector3dVector(downsampled_mesh_points)
|
||||
np.savetxt("mesh_point_cloud.txt", downsampled_mesh_points)
|
||||
pcl_point_cloud = o3d.geometry.PointCloud()
|
||||
pcl_point_cloud.points = o3d.utility.Vector3dVector(pcl)
|
||||
initial_transform = np.eye(4)
|
||||
reg_icp = o3d.pipelines.registration.registration_icp(
|
||||
pcl_point_cloud, mesh_point_cloud, threshold,
|
||||
initial_transform,
|
||||
o3d.pipelines.registration.TransformationEstimationPointToPoint()
|
||||
def register(pcl: np.ndarray, model: trimesh.Trimesh, voxel_size=0.01):
|
||||
radius_normal = voxel_size * 2
|
||||
pipreg = o3d.pipelines.registration
|
||||
model_pcd = o3d.geometry.PointCloud()
|
||||
model_pcd.points = o3d.utility.Vector3dVector(model.vertices)
|
||||
model_downsampled = model_pcd.voxel_down_sample(voxel_size)
|
||||
model_downsampled.estimate_normals(
|
||||
search_param=o3d.geometry.KDTreeSearchParamHybrid(
|
||||
radius=radius_normal, max_nn=30
|
||||
)
|
||||
)
|
||||
if np.allclose(reg_icp.transformation, np.eye(4)):
|
||||
print("Registration failed. Check your initial alignment and point cloud quality.")
|
||||
else:
|
||||
print("Registration successful.")
|
||||
print(reg_icp.transformation)
|
||||
return reg_icp.transformation
|
||||
model_fpfh = pipreg.compute_fpfh_feature(
|
||||
model_downsampled,
|
||||
o3d.geometry.KDTreeSearchParamHybrid(radius=radius_normal, max_nn=100),
|
||||
)
|
||||
|
||||
source_pcd = o3d.geometry.PointCloud()
|
||||
source_pcd.points = o3d.utility.Vector3dVector(pcl)
|
||||
source_downsampled = source_pcd.voxel_down_sample(voxel_size)
|
||||
source_downsampled.estimate_normals(
|
||||
search_param=o3d.geometry.KDTreeSearchParamHybrid(
|
||||
radius=radius_normal, max_nn=30
|
||||
)
|
||||
)
|
||||
source_fpfh = pipreg.compute_fpfh_feature(
|
||||
source_downsampled,
|
||||
o3d.geometry.KDTreeSearchParamHybrid(radius=radius_normal, max_nn=100),
|
||||
)
|
||||
|
||||
reg_ransac = pipreg.registration_ransac_based_on_feature_matching(
|
||||
source_downsampled,
|
||||
model_downsampled,
|
||||
source_fpfh,
|
||||
model_fpfh,
|
||||
mutual_filter=True,
|
||||
max_correspondence_distance=voxel_size * 1.5,
|
||||
estimation_method=pipreg.TransformationEstimationPointToPoint(False),
|
||||
ransac_n=4,
|
||||
checkers=[pipreg.CorrespondenceCheckerBasedOnEdgeLength(0.9)],
|
||||
criteria=pipreg.RANSACConvergenceCriteria(4000000, 500),
|
||||
)
|
||||
|
||||
reg_icp = pipreg.registration_icp(
|
||||
source_downsampled,
|
||||
model_downsampled,
|
||||
voxel_size * 2,
|
||||
reg_ransac.transformation,
|
||||
pipreg.TransformationEstimationPointToPlane(),
|
||||
pipreg.ICPConvergenceCriteria(max_iteration=200),
|
||||
)
|
||||
|
||||
return reg_icp.transformation
|
||||
|
Reference in New Issue
Block a user