diff --git a/configs/server/server_train_config.yaml b/configs/server/server_train_config.yaml index 2083121..2bf558d 100644 --- a/configs/server/server_train_config.yaml +++ b/configs/server/server_train_config.yaml @@ -7,11 +7,7 @@ runner: parallel: False experiment: -<<<<<<< HEAD - name: test_new_pipeline_train_overfit -======= name: debug ->>>>>>> 63a246c0c87d42f04076a459adcfdc88c954b09c root_dir: "experiments" use_checkpoint: False epoch: -1 # -1 stands for last epoch @@ -43,9 +39,9 @@ dataset: type: train cache: True ratio: 1 - batch_size: 16 + batch_size: 160 num_workers: 16 - pts_num: 4096 + pts_num: 8192 load_from_preprocess: True OmniObject3d_test: @@ -61,7 +57,7 @@ dataset: ratio: 0.05 batch_size: 160 num_workers: 12 - pts_num: 4096 + pts_num: 8192 load_from_preprocess: True OmniObject3d_val: @@ -77,7 +73,7 @@ dataset: ratio: 0.005 batch_size: 160 num_workers: 12 - pts_num: 4096 + pts_num: 8192 load_from_preprocess: True diff --git a/core/nbv_dataset.py b/core/nbv_dataset.py index 5dafad3..628f031 100644 --- a/core/nbv_dataset.py +++ b/core/nbv_dataset.py @@ -114,6 +114,10 @@ class NBVReconstructionDataset(BaseDataset): except Exception as e: Log.error(f"Save cache failed: {e}") + def voxel_downsample_with_mask(self, pts, voxel_size): + pass + + def __getitem__(self, index): data_item_info = self.datalist[index] scanned_views = data_item_info["scanned_views"] @@ -162,9 +166,8 @@ class NBVReconstructionDataset(BaseDataset): ) combined_scanned_views_pts = np.concatenate(scanned_views_pts, axis=0) - fps_downsampled_combined_scanned_pts, fps_idx = PtsUtil.fps_downsample_point_cloud( - combined_scanned_views_pts, self.pts_num, require_idx=True - ) + voxel_downsampled_combined_scanned_pts_np = PtsUtil.voxel_downsample_point_cloud(combined_scanned_views_pts, 0.002) + random_downsampled_combined_scanned_pts_np = PtsUtil.random_downsample_point_cloud(voxel_downsampled_combined_scanned_pts_np, self.pts_num) combined_scanned_views_pts_mask = np.zeros(len(combined_scanned_views_pts), dtype=np.uint8)