This commit is contained in:
hofee 2024-10-31 11:13:37 +00:00
parent 96fa40cc35
commit 5e8684d149
2 changed files with 11 additions and 18 deletions

View File

@ -7,7 +7,7 @@ runner:
parallel: False
experiment:
name: overfit_ab_global_and_partial_global
name: train_ab_global_and_partial_global
root_dir: "experiments"
use_checkpoint: False
epoch: -1 # -1 stands for last epoch
@ -32,10 +32,10 @@ runner:
dataset:
OmniObject3d_train:
root_dir: "/data/hofee/nbv_rec_part2_preprocessed"
root_dir: "/data/hofee/data/new_full_data"
model_dir: "../data/scaled_object_meshes"
source: nbv_reconstruction_dataset
split_file: "/data/hofee/data/sample.txt"
split_file: "/data/hofee/data/new_full_data_list/OmniObject3d_train.txt"
type: train
cache: True
ratio: 1
@ -45,32 +45,32 @@ dataset:
load_from_preprocess: True
OmniObject3d_test:
root_dir: "/data/hofee/nbv_rec_part2_preprocessed"
root_dir: "/data/hofee/data/new_full_data"
model_dir: "../data/scaled_object_meshes"
source: nbv_reconstruction_dataset
split_file: "/data/hofee/data/sample.txt"
split_file: "/data/hofee/data/new_full_data_list/OmniObject3d_test.txt"
type: test
cache: True
filter_degree: 75
eval_list:
- pose_diff
ratio: 0.05
ratio: 1
batch_size: 80
num_workers: 12
pts_num: 8192
load_from_preprocess: True
OmniObject3d_val:
root_dir: "/data/hofee/nbv_rec_part2_preprocessed"
root_dir: "/data/hofee/data/new_full_data"
model_dir: "../data/scaled_object_meshes"
source: nbv_reconstruction_dataset
split_file: "/data/hofee/data/sample.txt"
split_file: "/data/hofee/data/new_full_data_list/OmniObject3d_train.txt"
type: test
cache: True
filter_degree: 75
eval_list:
- pose_diff
ratio: 0.005
ratio: 0.1
batch_size: 80
num_workers: 12
pts_num: 8192

View File

@ -35,7 +35,7 @@ class NBVReconstructionDataset(BaseDataset):
#self.model_dir = config["model_dir"]
self.filter_degree = config["filter_degree"]
if self.type == namespace.Mode.TRAIN:
scale_ratio = 50
scale_ratio = 1
self.datalist = self.datalist*scale_ratio
if self.cache:
expr_root = ConfigManager.get("runner", "experiment", "root_dir")
@ -149,7 +149,7 @@ class NBVReconstructionDataset(BaseDataset):
DataLoadUtil.load_from_preprocessed_pts(view_path)
)
downsampled_target_point_cloud = PtsUtil.random_downsample_point_cloud(
target_point_cloud, self.pts_num, replace=False
target_point_cloud, self.pts_num
)
scanned_views_pts.append(downsampled_target_point_cloud)
scanned_coverages_rate.append(coverage_rate)
@ -177,11 +177,8 @@ class NBVReconstructionDataset(BaseDataset):
best_to_world_9d = np.concatenate(
[best_to_world_6d, best_to_world_trans], axis=0
)
start_time = time.time()
combined_scanned_views_pts = np.concatenate(scanned_views_pts, axis=0)
#Log.info(f"combined_scanned_views_pts shape: {combined_scanned_views_pts.shape}")
voxel_downsampled_combined_scanned_pts_np, inverse = self.voxel_downsample_with_mapping(combined_scanned_views_pts, 0.003)
random_downsampled_combined_scanned_pts_np, random_downsample_idx = PtsUtil.random_downsample_point_cloud(voxel_downsampled_combined_scanned_pts_np, self.pts_num, require_idx=True)
@ -197,10 +194,6 @@ class NBVReconstructionDataset(BaseDataset):
view_unique_downsampled_idx_set = set(view_unique_downsampled_idx)
mask = np.array([idx in view_unique_downsampled_idx_set for idx in all_random_downsample_idx])
scanned_pts_mask.append(mask)
#Log.info(f"random_downsampled_combined_scanned_pts_np shape: {random_downsampled_combined_scanned_pts_np.shape}")
end_time = time.time()
#Log.info(f"downsample time: {end_time - start_time}")
data_item = {
"scanned_pts": np.asarray(scanned_views_pts, dtype=np.float32), # Ndarray(S x Nv x 3)
"combined_scanned_pts": np.asarray(random_downsampled_combined_scanned_pts_np, dtype=np.float32), # Ndarray(N x 3)