Merge branch 'ab_global_only' of https://git.hofee.top/hofee/nbv_reconstruction into ab_global_only
This commit is contained in:
commit
445e9dc00b
@ -15,7 +15,7 @@ runner:
|
|||||||
- OmniObject3d_test
|
- OmniObject3d_test
|
||||||
|
|
||||||
blender_script_path: "/media/hofee/data/project/python/nbv_reconstruction/blender/data_renderer.py"
|
blender_script_path: "/media/hofee/data/project/python/nbv_reconstruction/blender/data_renderer.py"
|
||||||
output_dir: "/media/hofee/data/data/new_inference_test_output"
|
output_dir: "/media/hofee/data/results/nbv_rec_inference/global_only_ycb_241204"
|
||||||
pipeline: nbv_reconstruction_pipeline
|
pipeline: nbv_reconstruction_pipeline
|
||||||
voxel_size: 0.003
|
voxel_size: 0.003
|
||||||
min_new_area: 1.0
|
min_new_area: 1.0
|
||||||
@ -34,8 +34,8 @@ dataset:
|
|||||||
# load_from_preprocess: True
|
# load_from_preprocess: True
|
||||||
|
|
||||||
OmniObject3d_test:
|
OmniObject3d_test:
|
||||||
root_dir: "/media/hofee/data/data/new_testset_output"
|
root_dir: "/media/hofee/data/results/ycb_preprocessed_dataset"
|
||||||
model_dir: "/media/hofee/data/data/scaled_object_meshes"
|
model_dir: "/media/hofee/data/data/ycb_obj"
|
||||||
source: seq_reconstruction_dataset_preprocessed
|
source: seq_reconstruction_dataset_preprocessed
|
||||||
# split_file: "C:\\Document\\Datasets\\data_list\\OmniObject3d_test.txt"
|
# split_file: "C:\\Document\\Datasets\\data_list\\OmniObject3d_test.txt"
|
||||||
type: test
|
type: test
|
||||||
|
@ -22,6 +22,6 @@ runner:
|
|||||||
|
|
||||||
datasets:
|
datasets:
|
||||||
OmniObject3d:
|
OmniObject3d:
|
||||||
root_dir: /data/hofee/nbv_rec_part2_preprocessed
|
root_dir: /media/hofee/data/results/ycb_view_data
|
||||||
from: 155
|
from: 0
|
||||||
to: 165 # ..-1 means end
|
to: -1 # ..-1 means end
|
||||||
|
@ -10,9 +10,9 @@ runner:
|
|||||||
port: 5002
|
port: 5002
|
||||||
from: 1
|
from: 1
|
||||||
to: 50 # -1 means all
|
to: 50 # -1 means all
|
||||||
object_dir: C:\\Document\\Datasets\\scaled_object_meshes
|
object_dir: /media/hofee/data/data/ycb_obj
|
||||||
table_model_path: C:\\Document\\Datasets\\table.obj
|
table_model_path: /media/hofee/data/data/others/table.obj
|
||||||
output_dir: C:\\Document\\Datasets\\debug_generate_view
|
output_dir: /media/hofee/data/results/ycb_view_data
|
||||||
binocular_vision: true
|
binocular_vision: true
|
||||||
plane_size: 10
|
plane_size: 10
|
||||||
max_views: 512
|
max_views: 512
|
||||||
|
@ -180,9 +180,9 @@ if __name__ == "__main__":
|
|||||||
np.random.seed(seed)
|
np.random.seed(seed)
|
||||||
|
|
||||||
config = {
|
config = {
|
||||||
"root_dir": "/media/hofee/data/data/new_testset",
|
"root_dir": "/media/hofee/data/results/ycb_view_data",
|
||||||
"source": "seq_reconstruction_dataset",
|
"source": "seq_reconstruction_dataset",
|
||||||
"split_file": "/media/hofee/data/data/OmniObject3d_test.txt",
|
"split_file": "/media/hofee/data/results/ycb_test.txt",
|
||||||
"load_from_preprocess": True,
|
"load_from_preprocess": True,
|
||||||
"filter_degree": 75,
|
"filter_degree": 75,
|
||||||
"num_workers": 0,
|
"num_workers": 0,
|
||||||
@ -190,7 +190,7 @@ if __name__ == "__main__":
|
|||||||
"type": namespace.Mode.TEST,
|
"type": namespace.Mode.TEST,
|
||||||
}
|
}
|
||||||
|
|
||||||
output_dir = "/media/hofee/data/data/new_testset_output"
|
output_dir = "/media/hofee/data/results/ycb_preprocessed_dataset"
|
||||||
os.makedirs(output_dir, exist_ok=True)
|
os.makedirs(output_dir, exist_ok=True)
|
||||||
|
|
||||||
ds = SeqReconstructionDataset(config)
|
ds = SeqReconstructionDataset(config)
|
||||||
|
@ -21,7 +21,7 @@ class SeqReconstructionDatasetPreprocessed(BaseDataset):
|
|||||||
super(SeqReconstructionDatasetPreprocessed, self).__init__(config)
|
super(SeqReconstructionDatasetPreprocessed, self).__init__(config)
|
||||||
self.config = config
|
self.config = config
|
||||||
self.root_dir = config["root_dir"]
|
self.root_dir = config["root_dir"]
|
||||||
self.real_root_dir = r"/media/hofee/data/data/new_testset"
|
self.real_root_dir = r"/media/hofee/data/results/ycb_view_data"
|
||||||
self.item_list = os.listdir(self.root_dir)
|
self.item_list = os.listdir(self.root_dir)
|
||||||
|
|
||||||
def __getitem__(self, index):
|
def __getitem__(self, index):
|
||||||
|
@ -164,10 +164,10 @@ def save_scene_data(root, scene, scene_idx=0, scene_total=1,file_type="txt"):
|
|||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
#root = "/media/hofee/repository/new_data_with_normal"
|
#root = "/media/hofee/repository/new_data_with_normal"
|
||||||
root = r"H:\AI\Datasets\nbv_rec_part2"
|
root = r"/media/hofee/data/results/ycb_view_data"
|
||||||
scene_list = os.listdir(root)
|
scene_list = os.listdir(root)
|
||||||
from_idx = 0 # 1000
|
from_idx = 0 # 1000
|
||||||
to_idx = 600 # 1500
|
to_idx = len(scene_list) # 1500
|
||||||
|
|
||||||
|
|
||||||
cnt = 0
|
cnt = 0
|
||||||
|
@ -138,6 +138,8 @@ class Inferencer(Runner):
|
|||||||
import time
|
import time
|
||||||
while len(pred_cr_seq) < max_iter and retry < max_retry and success < max_success:
|
while len(pred_cr_seq) < max_iter and retry < max_retry and success < max_success:
|
||||||
Log.green(f"iter: {len(pred_cr_seq)}, retry: {retry}/{max_retry}, success: {success}/{max_success}")
|
Log.green(f"iter: {len(pred_cr_seq)}, retry: {retry}/{max_retry}, success: {success}/{max_success}")
|
||||||
|
combined_scanned_pts = np.vstack(scanned_view_pts)
|
||||||
|
voxel_downsampled_combined_scanned_pts_np, inverse = self.voxel_downsample_with_mapping(combined_scanned_pts, voxel_threshold)
|
||||||
output = self.pipeline(input_data)
|
output = self.pipeline(input_data)
|
||||||
pred_pose_9d = output["pred_pose_9d"]
|
pred_pose_9d = output["pred_pose_9d"]
|
||||||
pred_pose = torch.eye(4, device=pred_pose_9d.device)
|
pred_pose = torch.eye(4, device=pred_pose_9d.device)
|
||||||
@ -154,7 +156,7 @@ class Inferencer(Runner):
|
|||||||
curr_overlap_area_threshold = overlap_area_threshold * 0.5
|
curr_overlap_area_threshold = overlap_area_threshold * 0.5
|
||||||
|
|
||||||
downsampled_new_target_pts = PtsUtil.voxel_downsample_point_cloud(new_target_pts, voxel_threshold)
|
downsampled_new_target_pts = PtsUtil.voxel_downsample_point_cloud(new_target_pts, voxel_threshold)
|
||||||
overlap, _ = ReconstructionUtil.check_overlap(downsampled_new_target_pts, down_sampled_model_pts, overlap_area_threshold = curr_overlap_area_threshold, voxel_size=voxel_threshold, require_new_added_pts_num = True)
|
overlap, _ = ReconstructionUtil.check_overlap(downsampled_new_target_pts, voxel_downsampled_combined_scanned_pts_np, overlap_area_threshold = curr_overlap_area_threshold, voxel_size=voxel_threshold, require_new_added_pts_num = True)
|
||||||
if not overlap:
|
if not overlap:
|
||||||
Log.yellow("no overlap!")
|
Log.yellow("no overlap!")
|
||||||
retry += 1
|
retry += 1
|
||||||
@ -241,6 +243,13 @@ class Inferencer(Runner):
|
|||||||
down_sampled_combined_point_cloud = PtsUtil.voxel_downsample_point_cloud(combined_point_cloud,threshold)
|
down_sampled_combined_point_cloud = PtsUtil.voxel_downsample_point_cloud(combined_point_cloud,threshold)
|
||||||
return ReconstructionUtil.compute_coverage_rate(model_pts, down_sampled_combined_point_cloud, threshold)
|
return ReconstructionUtil.compute_coverage_rate(model_pts, down_sampled_combined_point_cloud, threshold)
|
||||||
|
|
||||||
|
def voxel_downsample_with_mapping(self, point_cloud, voxel_size=0.003):
|
||||||
|
voxel_indices = np.floor(point_cloud / voxel_size).astype(np.int32)
|
||||||
|
unique_voxels, inverse, counts = np.unique(voxel_indices, axis=0, return_inverse=True, return_counts=True)
|
||||||
|
idx_sort = np.argsort(inverse)
|
||||||
|
idx_unique = idx_sort[np.cumsum(counts)-counts]
|
||||||
|
downsampled_points = point_cloud[idx_unique]
|
||||||
|
return downsampled_points, inverse
|
||||||
|
|
||||||
def save_inference_result(self, dataset_name, scene_name, output):
|
def save_inference_result(self, dataset_name, scene_name, output):
|
||||||
dataset_dir = os.path.join(self.output_dir, dataset_name)
|
dataset_dir = os.path.join(self.output_dir, dataset_name)
|
||||||
|
@ -9,7 +9,7 @@ class ViewGenerator(Runner):
|
|||||||
self.config_path = config_path
|
self.config_path = config_path
|
||||||
|
|
||||||
def run(self):
|
def run(self):
|
||||||
result = subprocess.run(['blender', '-b', '-P', '../blender/run_blender.py', '--', self.config_path])
|
result = subprocess.run(['/home/hofee/blender-4.0.2-linux-x64/blender', '-b', '-P', '../blender/run_blender.py', '--', self.config_path])
|
||||||
print()
|
print()
|
||||||
|
|
||||||
def create_experiment(self, backup_name=None):
|
def create_experiment(self, backup_name=None):
|
||||||
|
Loading…
x
Reference in New Issue
Block a user