upd
This commit is contained in:
parent
04d3a359e1
commit
155b655938
@ -53,9 +53,12 @@ class SeqReconstructionDataset(BaseDataset):
|
||||
def get_scene_name_list(self):
|
||||
return self.scene_name_list
|
||||
|
||||
|
||||
def get_datalist(self):
|
||||
datalist = []
|
||||
for scene_name in self.scene_name_list:
|
||||
total = len(self.scene_name_list)
|
||||
for idx, scene_name in enumerate(self.scene_name_list):
|
||||
print(f"processing {scene_name} ({idx}/{total})")
|
||||
seq_num = DataLoadUtil.get_label_num(self.root_dir, scene_name)
|
||||
scene_max_coverage_rate = 0
|
||||
max_coverage_rate_list = []
|
||||
@ -178,23 +181,41 @@ class SeqReconstructionDataset(BaseDataset):
|
||||
# -------------- Debug ---------------- #
|
||||
if __name__ == "__main__":
|
||||
import torch
|
||||
from tqdm import tqdm
|
||||
import pickle
|
||||
import os
|
||||
|
||||
seed = 0
|
||||
torch.manual_seed(seed)
|
||||
np.random.seed(seed)
|
||||
|
||||
config = {
|
||||
"root_dir": "/data/hofee/data/new_full_data",
|
||||
"source": "seq_reconstruction_dataset",
|
||||
"split_file": "/data/hofee/data/sample.txt",
|
||||
"split_file": "/data/hofee/data/new_full_data_list/OmniObject3d_test.txt",
|
||||
"load_from_preprocess": True,
|
||||
"ratio": 0.5,
|
||||
"batch_size": 2,
|
||||
"filter_degree": 75,
|
||||
"num_workers": 0,
|
||||
"pts_num": 4096,
|
||||
"type": namespace.Mode.TRAIN,
|
||||
"pts_num": 8192,
|
||||
"type": namespace.Mode.TEST,
|
||||
}
|
||||
ds = SeqReconstructionDataset(config)
|
||||
print(len(ds))
|
||||
print(ds.__getitem__(10))
|
||||
|
||||
output_dir = "/data/hofee/trash_can/output_inference_test"
|
||||
new_output_dir = "/data/hofee/inference_test"
|
||||
os.makedirs(output_dir, exist_ok=True)
|
||||
os.makedirs(new_output_dir, exist_ok=True)
|
||||
|
||||
ds = SeqReconstructionDataset(config)
|
||||
for i in tqdm(range(len(ds)), desc="processing dataset"):
|
||||
output_path = os.path.join(output_dir, f"item_{i}.pkl")
|
||||
if os.path.exists(output_path):
|
||||
item = pickle.load(open(output_path, "rb"))
|
||||
else:
|
||||
item = ds.__getitem__(i)
|
||||
for key, value in item.items():
|
||||
if isinstance(value, np.ndarray):
|
||||
item[key] = value.tolist()
|
||||
new_output_path = os.path.join(new_output_dir, f"item_{i}.pkl")
|
||||
with open(new_output_path, "wb") as f:
|
||||
pickle.dump(item, f)
|
||||
|
||||
|
@ -25,6 +25,7 @@ class InferencerServer(Runner):
|
||||
self.pipeline:torch.nn.Module = ComponentFactory.create(namespace.Stereotype.PIPELINE, self.pipeline_name)
|
||||
self.pipeline = self.pipeline.to(self.device)
|
||||
self.pts_num = 8192
|
||||
self.voxel_size = 0.002
|
||||
|
||||
''' Experiment '''
|
||||
self.load_experiment("inferencer_server")
|
||||
@ -34,8 +35,11 @@ class InferencerServer(Runner):
|
||||
scanned_pts = data["scanned_pts"]
|
||||
scanned_n_to_world_pose_9d = data["scanned_n_to_world_pose_9d"]
|
||||
combined_scanned_views_pts = np.concatenate(scanned_pts, axis=0)
|
||||
voxel_downsampled_combined_scanned_pts = PtsUtil.voxel_downsample_point_cloud(
|
||||
combined_scanned_views_pts, self.voxel_size
|
||||
)
|
||||
fps_downsampled_combined_scanned_pts, fps_idx = PtsUtil.fps_downsample_point_cloud(
|
||||
combined_scanned_views_pts, self.pts_num, require_idx=True
|
||||
voxel_downsampled_combined_scanned_pts, self.pts_num, require_idx=True
|
||||
)
|
||||
# combined_scanned_views_pts_mask = np.zeros(len(scanned_pts), dtype=np.uint8)
|
||||
# start_idx = 0
|
||||
|
Loading…
x
Reference in New Issue
Block a user