Compare commits
2 Commits
5c56dae24f
...
3b9c966fd9
Author | SHA1 | Date | |
---|---|---|---|
![]() |
3b9c966fd9 | ||
![]() |
a41571e79c |
@ -16,14 +16,14 @@ runner:
|
|||||||
compute_with_normal: False
|
compute_with_normal: False
|
||||||
scan_points_threshold: 10
|
scan_points_threshold: 10
|
||||||
overwrite: False
|
overwrite: False
|
||||||
seq_num: 15
|
seq_num: 10
|
||||||
dataset_list:
|
dataset_list:
|
||||||
- OmniObject3d
|
- OmniObject3d
|
||||||
|
|
||||||
datasets:
|
datasets:
|
||||||
OmniObject3d:
|
OmniObject3d:
|
||||||
root_dir: C:\\Document\\Local Project\\nbv_rec\\nbv_reconstruction\\temp
|
root_dir: H:\\AI\\Datasets\\nbv_rec_part2
|
||||||
from: 0
|
from: 0
|
||||||
to: 1 # -1 means end
|
to: 300 # -1 means end
|
||||||
|
|
||||||
|
|
||||||
|
48
preprocess/pack_preprocessed_data.py
Normal file
48
preprocess/pack_preprocessed_data.py
Normal file
@ -0,0 +1,48 @@
|
|||||||
|
import os
|
||||||
|
import shutil
|
||||||
|
|
||||||
|
def pack_scene_data(root, scene, output_dir):
|
||||||
|
scene_dir = os.path.join(output_dir, scene)
|
||||||
|
if not os.path.exists(scene_dir):
|
||||||
|
os.makedirs(scene_dir)
|
||||||
|
|
||||||
|
pts_dir = os.path.join(root, scene, "pts")
|
||||||
|
if os.path.exists(pts_dir):
|
||||||
|
shutil.move(pts_dir, os.path.join(scene_dir, "pts"))
|
||||||
|
|
||||||
|
scan_points_indices_dir = os.path.join(root, scene, "scan_points_indices")
|
||||||
|
if os.path.exists(scan_points_indices_dir):
|
||||||
|
shutil.move(scan_points_indices_dir, os.path.join(scene_dir, "scan_points_indices"))
|
||||||
|
|
||||||
|
scan_points_file = os.path.join(root, scene, "scan_points.txt")
|
||||||
|
if os.path.exists(scan_points_file):
|
||||||
|
shutil.move(scan_points_file, os.path.join(scene_dir, "scan_points.txt"))
|
||||||
|
|
||||||
|
model_pts_nrm_file = os.path.join(root, scene, "points_and_normals.txt")
|
||||||
|
if os.path.exists(model_pts_nrm_file):
|
||||||
|
shutil.move(model_pts_nrm_file, os.path.join(scene_dir, "points_and_normals.txt"))
|
||||||
|
|
||||||
|
camera_dir = os.path.join(root, scene, "camera_params")
|
||||||
|
if os.path.exists(camera_dir):
|
||||||
|
shutil.move(camera_dir, os.path.join(scene_dir, "camera_params"))
|
||||||
|
|
||||||
|
scene_info_file = os.path.join(root, scene, "scene_info.json")
|
||||||
|
if os.path.exists(scene_info_file):
|
||||||
|
shutil.move(scene_info_file, os.path.join(scene_dir, "scene_info.json"))
|
||||||
|
|
||||||
|
def pack_all_scenes(root, scene_list, output_dir):
|
||||||
|
for idx, scene in enumerate(scene_list):
|
||||||
|
print(f"正在打包场景 {scene} ({idx+1}/{len(scene_list)})")
|
||||||
|
pack_scene_data(root, scene, output_dir)
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
root = r"H:\AI\Datasets\nbv_rec_part2"
|
||||||
|
output_dir = r"H:\AI\Datasets\scene_info_part2"
|
||||||
|
scene_list = os.listdir(root)
|
||||||
|
from_idx = 0
|
||||||
|
to_idx = len(scene_list)
|
||||||
|
print(f"正在打包场景 {scene_list[from_idx:to_idx]}")
|
||||||
|
|
||||||
|
pack_all_scenes(root, scene_list[from_idx:to_idx], output_dir)
|
||||||
|
print("打包完成")
|
||||||
|
|
41
preprocess/pack_upload_data.py
Normal file
41
preprocess/pack_upload_data.py
Normal file
@ -0,0 +1,41 @@
|
|||||||
|
import os
|
||||||
|
import shutil
|
||||||
|
|
||||||
|
def pack_scene_data(root, scene, output_dir):
|
||||||
|
scene_dir = os.path.join(output_dir, scene)
|
||||||
|
if not os.path.exists(scene_dir):
|
||||||
|
os.makedirs(scene_dir)
|
||||||
|
|
||||||
|
pts_dir = os.path.join(root, scene, "pts")
|
||||||
|
if os.path.exists(pts_dir):
|
||||||
|
shutil.move(pts_dir, os.path.join(scene_dir, "pts"))
|
||||||
|
|
||||||
|
camera_dir = os.path.join(root, scene, "camera_params")
|
||||||
|
if os.path.exists(camera_dir):
|
||||||
|
shutil.move(camera_dir, os.path.join(scene_dir, "camera_params"))
|
||||||
|
|
||||||
|
scene_info_file = os.path.join(root, scene, "scene_info.json")
|
||||||
|
if os.path.exists(scene_info_file):
|
||||||
|
shutil.move(scene_info_file, os.path.join(scene_dir, "scene_info.json"))
|
||||||
|
|
||||||
|
label_dir = os.path.join(root, scene, "label")
|
||||||
|
if os.path.exists(label_dir):
|
||||||
|
shutil.move(label_dir, os.path.join(scene_dir, "label"))
|
||||||
|
|
||||||
|
|
||||||
|
def pack_all_scenes(root, scene_list, output_dir):
|
||||||
|
for idx, scene in enumerate(scene_list):
|
||||||
|
print(f"packing {scene} ({idx+1}/{len(scene_list)})")
|
||||||
|
pack_scene_data(root, scene, output_dir)
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
root = r"H:\AI\Datasets\nbv_rec_part2"
|
||||||
|
output_dir = r"H:\AI\Datasets\upload_part2"
|
||||||
|
scene_list = os.listdir(root)
|
||||||
|
from_idx = 0
|
||||||
|
to_idx = len(scene_list)
|
||||||
|
print(f"packing {scene_list[from_idx:to_idx]}")
|
||||||
|
|
||||||
|
pack_all_scenes(root, scene_list[from_idx:to_idx], output_dir)
|
||||||
|
print("packing done")
|
||||||
|
|
@ -164,10 +164,10 @@ def save_scene_data(root, scene, scene_idx=0, scene_total=1,file_type="txt"):
|
|||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
#root = "/media/hofee/repository/new_data_with_normal"
|
#root = "/media/hofee/repository/new_data_with_normal"
|
||||||
root = r"C:\Document\Datasets\nbv_rec_part2"
|
root = r"H:\AI\Datasets\nbv_rec_part2"
|
||||||
scene_list = os.listdir(root)
|
scene_list = os.listdir(root)
|
||||||
from_idx = 600 # 1000
|
from_idx = 0 # 1000
|
||||||
to_idx = len(scene_list) # 1500
|
to_idx = 600 # 1500
|
||||||
|
|
||||||
|
|
||||||
cnt = 0
|
cnt = 0
|
||||||
|
@ -211,6 +211,17 @@ class DataLoadUtil:
|
|||||||
pts = np.load(npy_path)
|
pts = np.load(npy_path)
|
||||||
return pts
|
return pts
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def load_from_preprocessed_nrm(path, file_type="npy"):
|
||||||
|
npy_path = os.path.join(
|
||||||
|
os.path.dirname(path), "nrm", os.path.basename(path) + "." + file_type
|
||||||
|
)
|
||||||
|
if file_type == "txt":
|
||||||
|
nrm = np.loadtxt(npy_path)
|
||||||
|
else:
|
||||||
|
nrm = np.load(npy_path)
|
||||||
|
return nrm
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def cam_pose_transformation(cam_pose_before):
|
def cam_pose_transformation(cam_pose_before):
|
||||||
offset = np.asarray([[1, 0, 0, 0], [0, -1, 0, 0], [0, 0, -1, 0], [0, 0, 0, 1]])
|
offset = np.asarray([[1, 0, 0, 0], [0, -1, 0, 0], [0, 0, -1, 0], [0, 0, 0, 1]])
|
||||||
|
25
utils/vis.py
25
utils/vis.py
@ -158,17 +158,22 @@ class visualizeUtil:
|
|||||||
np.savetxt(os.path.join(output_dir, "target_normal.txt"), sampled_visualized_normal)
|
np.savetxt(os.path.join(output_dir, "target_normal.txt"), sampled_visualized_normal)
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def save_pts_nrm(pts_nrm, output_dir):
|
def save_pts_nrm(root, scene, frame_idx, output_dir, binocular=False):
|
||||||
pts = pts_nrm[:, :3]
|
path = DataLoadUtil.get_path(root, scene, frame_idx)
|
||||||
nrm = pts_nrm[:, 3:]
|
pts_world = DataLoadUtil.load_from_preprocessed_pts(path, "npy")
|
||||||
|
nrm_camera = DataLoadUtil.load_from_preprocessed_nrm(path, "npy")
|
||||||
|
cam_info = DataLoadUtil.load_cam_info(path, binocular=binocular)
|
||||||
|
cam_to_world = cam_info["cam_to_world"]
|
||||||
|
nrm_world = nrm_camera @ cam_to_world[:3, :3].T
|
||||||
visualized_nrm = []
|
visualized_nrm = []
|
||||||
num_samples = 10
|
num_samples = 10
|
||||||
for i in range(len(pts)):
|
for i in range(len(pts_world)):
|
||||||
visualized_nrm.append(pts[i] + 0.02*t * nrm[i] for t in range(num_samples))
|
for t in range(num_samples):
|
||||||
visualized_nrm = np.array(visualized_nrm).reshape(-1, 3)
|
visualized_nrm.append(pts_world[i] - 0.02 * t * nrm_world[i])
|
||||||
np.savetxt(os.path.join(output_dir, "nrm.txt"), visualized_nrm)
|
|
||||||
np.savetxt(os.path.join(output_dir, "pts.txt"), pts)
|
|
||||||
|
|
||||||
|
visualized_nrm = np.array(visualized_nrm)
|
||||||
|
np.savetxt(os.path.join(output_dir, "nrm.txt"), visualized_nrm)
|
||||||
|
np.savetxt(os.path.join(output_dir, "pts.txt"), pts_world)
|
||||||
|
|
||||||
# ------ Debug ------
|
# ------ Debug ------
|
||||||
|
|
||||||
@ -184,6 +189,4 @@ if __name__ == "__main__":
|
|||||||
# visualizeUtil.save_seq_cam_pos_and_cam_axis(root, scene, [0, 121, 286, 175, 111,366,45,230,232,225,255,17,199,78,60], output_dir)
|
# visualizeUtil.save_seq_cam_pos_and_cam_axis(root, scene, [0, 121, 286, 175, 111,366,45,230,232,225,255,17,199,78,60], output_dir)
|
||||||
# visualizeUtil.save_target_mesh_at_world_space(root, model_dir, scene)
|
# visualizeUtil.save_target_mesh_at_world_space(root, model_dir, scene)
|
||||||
#visualizeUtil.save_points_and_normals(root, scene,"10", output_dir, binocular=True)
|
#visualizeUtil.save_points_and_normals(root, scene,"10", output_dir, binocular=True)
|
||||||
pts_nrm = np.loadtxt(r"C:\Document\Local Project\nbv_rec\nbv_reconstruction\pts_nrm_target.txt")
|
visualizeUtil.save_pts_nrm(root, scene, "116", output_dir, binocular=True)
|
||||||
visualizeUtil.save_pts_nrm(pts_nrm, output_dir)
|
|
||||||
|
|
||||||
|
Loading…
x
Reference in New Issue
Block a user