diff --git a/runners/strategy_generator.py b/runners/strategy_generator.py index 399b2ae..ca40f99 100644 --- a/runners/strategy_generator.py +++ b/runners/strategy_generator.py @@ -73,7 +73,7 @@ class StrategyGenerator(Runner): def generate_sequence(self, root, scene_name, voxel_threshold, soft_overlap_threshold, hard_overlap_threshold): status_manager.set_status("generate_strategy", "strategy_generator", "scene", scene_name) frame_num = DataLoadUtil.get_scene_seq_length(root, scene_name) - import ipdb; ipdb.set_trace() + model_points_normals = DataLoadUtil.load_points_normals(root, scene_name) model_pts = model_points_normals[:,:3] down_sampled_model_pts = PtsUtil.voxel_downsample_point_cloud(model_pts, voxel_threshold) diff --git a/utils/pts.py b/utils/pts.py index 0bdc26c..30f8860 100644 --- a/utils/pts.py +++ b/utils/pts.py @@ -1,16 +1,14 @@ import numpy as np import open3d as o3d import torch -from scipy.spatial import cKDTree class PtsUtil: @staticmethod def voxel_downsample_point_cloud(point_cloud, voxel_size=0.005): - o3d_pc = o3d.geometry.PointCloud() - o3d_pc.points = o3d.utility.Vector3dVector(point_cloud) - downsampled_pc = o3d_pc.voxel_down_sample(voxel_size) - return np.asarray(downsampled_pc.points) + voxel_indices = np.floor(point_cloud / voxel_size).astype(np.int32) + unique_voxels = np.unique(voxel_indices, axis=0, return_inverse=True) + return unique_voxels[0]*voxel_size @staticmethod def random_downsample_point_cloud(point_cloud, num_points, require_idx=False): @@ -94,5 +92,8 @@ class PtsUtil: z_filtered_points = filtered_sampled_points[idx] return z_filtered_points[:, :3] - - \ No newline at end of file + + @staticmethod + def point_to_hash(point, voxel_size): + return tuple(np.floor(point / voxel_size).astype(int)) + \ No newline at end of file diff --git a/utils/reconstruction.py b/utils/reconstruction.py index 3d48dca..fed95e1 100644 --- a/utils/reconstruction.py +++ b/utils/reconstruction.py @@ -3,7 +3,7 @@ from scipy.spatial import cKDTree from utils.pts import PtsUtil class ReconstructionUtil: - + @staticmethod def compute_coverage_rate(target_point_cloud, combined_point_cloud, threshold=0.01): kdtree = cKDTree(combined_point_cloud) @@ -12,6 +12,18 @@ class ReconstructionUtil: coverage_rate = covered_points_num / target_point_cloud.shape[0] return coverage_rate, covered_points_num + def compute_coverage_rate_with_normal(target_point_cloud, combined_point_cloud, target_normal, combined_normal, threshold=0.01, normal_threshold=0.1): + kdtree = cKDTree(combined_point_cloud) + distances, indices = kdtree.query(target_point_cloud) + is_covered_by_distance = distances < threshold + normal_dots = np.einsum('ij,ij->i', target_normal, combined_normal[indices]) + is_covered_by_normal = normal_dots > normal_threshold + covered_points_num = np.sum(is_covered_by_distance & is_covered_by_normal) + coverage_rate = covered_points_num / target_point_cloud.shape[0] + + return coverage_rate, covered_points_num + + @staticmethod def compute_overlap_rate(new_point_cloud, combined_point_cloud, threshold=0.01): kdtree = cKDTree(combined_point_cloud) @@ -145,18 +157,6 @@ class ReconstructionUtil: attempts += 1 return points - @staticmethod - def compute_covered_scan_points(scan_points, point_cloud, threshold=0.01): - - tree = cKDTree(point_cloud) - covered_points = [] - indices = [] - for i, scan_point in enumerate(scan_points): - if tree.query_ball_point(scan_point, threshold): - covered_points.append(scan_point) - indices.append(i) - return covered_points, indices - @staticmethod def check_scan_points_overlap(history_indices, indices2, threshold=5): for indices1 in history_indices: