Compare commits
2 Commits
new_partia
...
ad7a1c9cdf
Author | SHA1 | Date | |
---|---|---|---|
ad7a1c9cdf | |||
be835aded4 |
@@ -70,7 +70,7 @@ module:
|
|||||||
global_feat: True
|
global_feat: True
|
||||||
feature_transform: False
|
feature_transform: False
|
||||||
transformer_seq_encoder:
|
transformer_seq_encoder:
|
||||||
embed_dim: 256
|
embed_dim: 320
|
||||||
num_heads: 4
|
num_heads: 4
|
||||||
ffn_dim: 256
|
ffn_dim: 256
|
||||||
num_layers: 3
|
num_layers: 3
|
||||||
|
@@ -90,6 +90,7 @@ class NBVReconstructionPipeline(nn.Module):
|
|||||||
scanned_n_to_world_pose_9d_batch = data[
|
scanned_n_to_world_pose_9d_batch = data[
|
||||||
"scanned_n_to_world_pose_9d"
|
"scanned_n_to_world_pose_9d"
|
||||||
] # List(B): Tensor(S x 9)
|
] # List(B): Tensor(S x 9)
|
||||||
|
scanned_pts_mask_batch = data["scanned_pts_mask"] # List(B): Tensor(S x N)
|
||||||
|
|
||||||
scanned_pts_mask_batch = data["scanned_pts_mask"] # List(B): Tensor(N)
|
scanned_pts_mask_batch = data["scanned_pts_mask"] # List(B): Tensor(N)
|
||||||
|
|
||||||
@@ -136,4 +137,4 @@ class NBVReconstructionPipeline(nn.Module):
|
|||||||
ipdb.set_trace()
|
ipdb.set_trace()
|
||||||
Log.error("nan in main_feat", True)
|
Log.error("nan in main_feat", True)
|
||||||
|
|
||||||
return main_feat
|
return main_feat
|
@@ -92,7 +92,8 @@ class Inferencer(Runner):
|
|||||||
output = self.predict_sequence(data)
|
output = self.predict_sequence(data)
|
||||||
self.save_inference_result(test_set_name, data["scene_name"], output)
|
self.save_inference_result(test_set_name, data["scene_name"], output)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
Log.error(f"Error in scene {scene_name}, {e}")
|
print(e)
|
||||||
|
Log.error(f"Error, {e}")
|
||||||
continue
|
continue
|
||||||
|
|
||||||
status_manager.set_progress("inference", "inferencer", f"dataset", len(self.test_set_list), len(self.test_set_list))
|
status_manager.set_progress("inference", "inferencer", f"dataset", len(self.test_set_list), len(self.test_set_list))
|
||||||
@@ -116,7 +117,9 @@ class Inferencer(Runner):
|
|||||||
|
|
||||||
''' data for inference '''
|
''' data for inference '''
|
||||||
input_data = {}
|
input_data = {}
|
||||||
|
|
||||||
input_data["combined_scanned_pts"] = torch.tensor(data["first_scanned_pts"][0], dtype=torch.float32).to(self.device).unsqueeze(0)
|
input_data["combined_scanned_pts"] = torch.tensor(data["first_scanned_pts"][0], dtype=torch.float32).to(self.device).unsqueeze(0)
|
||||||
|
input_data["scanned_pts_mask"] = [torch.zeros(input_data["combined_scanned_pts"].shape[1], dtype=torch.bool).to(self.device).unsqueeze(0)]
|
||||||
input_data["scanned_n_to_world_pose_9d"] = [torch.tensor(data["first_scanned_n_to_world_pose_9d"], dtype=torch.float32).to(self.device)]
|
input_data["scanned_n_to_world_pose_9d"] = [torch.tensor(data["first_scanned_n_to_world_pose_9d"], dtype=torch.float32).to(self.device)]
|
||||||
input_data["mode"] = namespace.Mode.TEST
|
input_data["mode"] = namespace.Mode.TEST
|
||||||
input_pts_N = input_data["combined_scanned_pts"].shape[1]
|
input_pts_N = input_data["combined_scanned_pts"].shape[1]
|
||||||
@@ -254,6 +257,14 @@ class Inferencer(Runner):
|
|||||||
|
|
||||||
return result
|
return result
|
||||||
|
|
||||||
|
def voxel_downsample_with_mapping(self, point_cloud, voxel_size=0.003):
|
||||||
|
voxel_indices = np.floor(point_cloud / voxel_size).astype(np.int32)
|
||||||
|
unique_voxels, inverse, counts = np.unique(voxel_indices, axis=0, return_inverse=True, return_counts=True)
|
||||||
|
idx_sort = np.argsort(inverse)
|
||||||
|
idx_unique = idx_sort[np.cumsum(counts)-counts]
|
||||||
|
downsampled_points = point_cloud[idx_unique]
|
||||||
|
return downsampled_points, inverse
|
||||||
|
|
||||||
def compute_coverage_rate(self, scanned_view_pts, new_pts, model_pts, threshold=0.005):
|
def compute_coverage_rate(self, scanned_view_pts, new_pts, model_pts, threshold=0.005):
|
||||||
if new_pts is not None:
|
if new_pts is not None:
|
||||||
new_scanned_view_pts = scanned_view_pts + [new_pts]
|
new_scanned_view_pts = scanned_view_pts + [new_pts]
|
||||||
|
Reference in New Issue
Block a user