7 Commits

Author SHA1 Message Date
872405e239 remove fps 2024-10-29 11:23:28 +00:00
b13e45bafc solve merge 2024-10-29 08:14:43 +00:00
9e39c6c6c9 solve merge 2024-10-28 18:27:16 +00:00
3c9e2c8d12 solve merge 2024-10-28 18:25:53 +00:00
bd27226f0f solve merge 2024-10-25 14:40:26 +00:00
0f61e1d64d Merge branch 'master' of https://git.hofee.top/hofee/nbv_reconstruction 2024-10-21 07:33:40 +00:00
9ca0851bf7 debug pipeline 2024-10-21 07:33:32 +00:00
9 changed files with 54 additions and 24 deletions

View File

@@ -5,5 +5,5 @@ from runners.data_spliter import DataSpliter
class DataSplitApp:
@staticmethod
def start():
DataSpliter("configs/server/split_dataset_config.yaml").run()
DataSpliter("configs/server/server_split_dataset_config.yaml").run()

View File

@@ -22,8 +22,6 @@ runner:
datasets:
OmniObject3d:
root_dir: /home/data/hofee/project/nbv_rec_part2_preprocessed
from: 960
to: 1000 # -1 means end
root_dir: /data/hofee/nbv_rec_part2_preprocessed
from: 155
to: 165 # ..-1 means end

View File

@@ -0,0 +1,22 @@
runner:
general:
seed: 0
device: cpu
cuda_visible_devices: "0,1,2,3,4,5,6,7"
experiment:
name: debug
root_dir: "experiments"
split: #
root_dir: "/data/hofee/data/packed_preprocessed_data"
type: "unseen_instance" # "unseen_category"
datasets:
OmniObject3d_train:
path: "/data/hofee/data/OmniObject3d_train.txt"
ratio: 0.9
OmniObject3d_test:
path: "/data/hofee/data/OmniObject3d_test.txt"
ratio: 0.1

View File

@@ -13,7 +13,7 @@ runner:
epoch: -1 # -1 stands for last epoch
max_epochs: 5000
save_checkpoint_interval: 1
test_first: True
test_first: False
train:
optimizer:
@@ -25,7 +25,7 @@ runner:
test:
frequency: 3 # test frequency
dataset_list:
- OmniObject3d_test
#- OmniObject3d_test
- OmniObject3d_val
pipeline: nbv_reconstruction_global_pts_n_num_pipeline
@@ -41,7 +41,7 @@ dataset:
ratio: 1
batch_size: 160
num_workers: 16
pts_num: 4096
pts_num: 8192
load_from_preprocess: True
OmniObject3d_test:
@@ -57,7 +57,7 @@ dataset:
ratio: 0.05
batch_size: 160
num_workers: 12
pts_num: 4096
pts_num: 8192
load_from_preprocess: True
OmniObject3d_val:
@@ -73,7 +73,7 @@ dataset:
ratio: 0.005
batch_size: 160
num_workers: 12
pts_num: 4096
pts_num: 8192
load_from_preprocess: True

View File

@@ -1,4 +1,5 @@
import torch
import time
from torch import nn
import PytorchBoot.namespace as namespace
import PytorchBoot.stereotype as stereotype
@@ -58,7 +59,10 @@ class NBVReconstructionGlobalPointsPipeline(nn.Module):
return perturbed_x, random_t, target_score, std
def forward_train(self, data):
start_time = time.time()
main_feat = self.get_main_feat(data)
end_time = time.time()
print("get_main_feat time: ", end_time - start_time)
""" get std """
best_to_world_pose_9d_batch = data["best_to_world_pose_9d"]
perturbed_x, random_t, target_score, std = self.pertube_data(

View File

@@ -8,7 +8,7 @@ import torch
import os
import sys
sys.path.append(r"/home/data/hofee/project/nbv_rec/nbv_reconstruction")
sys.path.append(r"/data/hofee/project/nbv_rec/nbv_reconstruction")
from utils.data_load import DataLoadUtil
from utils.pose import PoseUtil
@@ -31,7 +31,7 @@ class NBVReconstructionDataset(BaseDataset):
self.load_from_preprocess = config.get("load_from_preprocess", False)
if self.type == namespace.Mode.TEST:
self.model_dir = config["model_dir"]
#self.model_dir = config["model_dir"]
self.filter_degree = config["filter_degree"]
if self.type == namespace.Mode.TRAIN:
scale_ratio = 100
@@ -66,6 +66,8 @@ class NBVReconstructionDataset(BaseDataset):
if max_coverage_rate > scene_max_coverage_rate:
scene_max_coverage_rate = max_coverage_rate
max_coverage_rate_list.append(max_coverage_rate)
if max_coverage_rate_list:
mean_coverage_rate = np.mean(max_coverage_rate_list)
for seq_idx in range(seq_num):
@@ -112,6 +114,10 @@ class NBVReconstructionDataset(BaseDataset):
except Exception as e:
Log.error(f"Save cache failed: {e}")
def voxel_downsample_with_mask(self, pts, voxel_size):
pass
def __getitem__(self, index):
data_item_info = self.datalist[index]
scanned_views = data_item_info["scanned_views"]
@@ -160,9 +166,8 @@ class NBVReconstructionDataset(BaseDataset):
)
combined_scanned_views_pts = np.concatenate(scanned_views_pts, axis=0)
fps_downsampled_combined_scanned_pts, fps_idx = PtsUtil.fps_downsample_point_cloud(
combined_scanned_views_pts, self.pts_num, require_idx=True
)
voxel_downsampled_combined_scanned_pts_np = PtsUtil.voxel_downsample_point_cloud(combined_scanned_views_pts, 0.002)
random_downsampled_combined_scanned_pts_np = PtsUtil.random_downsample_point_cloud(voxel_downsampled_combined_scanned_pts_np, self.pts_num)
combined_scanned_views_pts_mask = np.zeros(len(combined_scanned_views_pts), dtype=np.uint8)
@@ -171,7 +176,6 @@ class NBVReconstructionDataset(BaseDataset):
end_idx = start_idx + len(scanned_views_pts[i])
combined_scanned_views_pts_mask[start_idx:end_idx] = i
start_idx = end_idx
fps_downsampled_combined_scanned_pts_mask = combined_scanned_views_pts_mask[fps_idx]
data_item = {
@@ -238,10 +242,9 @@ if __name__ == "__main__":
torch.manual_seed(seed)
np.random.seed(seed)
config = {
"root_dir": "/home/data/hofee/project/nbv_rec/data/nbv_rec_data_512_preproc_npy",
"model_dir": "/home/data/hofee/project/nbv_rec/data/scaled_object_meshes",
"root_dir": "/data/hofee/data/packed_preprocessed_data",
"source": "nbv_reconstruction_dataset",
"split_file": "/home/data/hofee/project/nbv_rec/data/OmniObject3d_test.txt",
"split_file": "/data/hofee/data/OmniObject3d_train.txt",
"load_from_preprocess": True,
"ratio": 0.5,
"batch_size": 2,

View File

@@ -93,10 +93,8 @@ class StrategyGenerator(Runner):
else:
nrm = np.load(nrm_path)
nrm_list.append(nrm)
indices = np.load(idx_path)
pts_list.append(pts)
indices = np.load(idx_path)
scan_points_indices_list.append(indices)
if pts.shape[0] > 0:
non_zero_cnt += 1

View File

@@ -53,6 +53,8 @@ class DataLoadUtil:
@staticmethod
def get_label_num(root, scene_name):
label_dir = os.path.join(root, scene_name, "label")
if not os.path.exists(label_dir):
return 0
return len(os.listdir(label_dir))
@staticmethod

View File

@@ -75,6 +75,7 @@ class ReconstructionUtil:
cnt_processed_view = 0
remaining_views.remove(init_view)
curr_rec_pts_num = combined_point_cloud.shape[0]
drop_output_ratio = 0.4
import time
while remaining_views:
@@ -84,6 +85,8 @@ class ReconstructionUtil:
best_covered_num = 0
for view_index in remaining_views:
if np.random.rand() < drop_output_ratio:
continue
if point_cloud_list[view_index].shape[0] == 0:
continue
if selected_views: