update
This commit is contained in:
parent
20514be419
commit
307994c20d
1
.gitignore
vendored
1
.gitignore
vendored
@ -1,5 +1,6 @@
|
||||
# ---> Python
|
||||
# Byte-compiled / optimized / DLL files
|
||||
test/
|
||||
__pycache__/
|
||||
*.py[cod]
|
||||
*$py.class
|
||||
|
49
load_normal.py
Normal file
49
load_normal.py
Normal file
@ -0,0 +1,49 @@
|
||||
import cv2
|
||||
import os
|
||||
import numpy as np
|
||||
|
||||
def load_normal(path, binocular=False, left_only=False):
|
||||
if binocular and not left_only:
|
||||
normal_path_L = os.path.join(
|
||||
os.path.dirname(path), "normal", os.path.basename(path) + "_L.png"
|
||||
)
|
||||
normal_image_L = cv2.imread(normal_path_L, cv2.IMREAD_UNCHANGED)
|
||||
normal_path_R = os.path.join(
|
||||
os.path.dirname(path), "normal", os.path.basename(path) + "_R.png"
|
||||
)
|
||||
normal_image_R = cv2.imread(normal_path_R, cv2.IMREAD_UNCHANGED)
|
||||
normalized_normal_image_L = normal_image_L / 255.0 * 2.0 - 1.0
|
||||
normalized_normal_image_R = normal_image_R / 255.0 * 2.0 - 1.0
|
||||
return normalized_normal_image_L, normalized_normal_image_R
|
||||
else:
|
||||
if binocular and left_only:
|
||||
normal_path = os.path.join(
|
||||
os.path.dirname(path), "normal", os.path.basename(path) + "_L.png"
|
||||
)
|
||||
else:
|
||||
normal_path = os.path.join(
|
||||
os.path.dirname(path), "normal", os.path.basename(path) + ".png"
|
||||
)
|
||||
normal_image = cv2.imread(normal_path, cv2.IMREAD_UNCHANGED)
|
||||
normalized_normal_image = normal_image / 255.0 * 2.0 - 1.0
|
||||
return normalized_normal_image
|
||||
|
||||
def show_rgb(event, x, y, flags, param):
|
||||
if event == cv2.EVENT_MOUSEMOVE:
|
||||
pixel_value = param[y, x]
|
||||
print(f"RGB at ({x},{y}): {pixel_value}")
|
||||
|
||||
if __name__ == "__main__":
|
||||
path = "/Users/hofee/temp/1"
|
||||
normal_image = load_normal(path, binocular=True, left_only=True)
|
||||
display_image = ((normal_image + 1.0) / 2.0 * 255).astype(np.uint8)
|
||||
|
||||
cv2.namedWindow("Normal Image")
|
||||
cv2.setMouseCallback("Normal Image", show_rgb, param=display_image)
|
||||
|
||||
while True:
|
||||
cv2.imshow("Normal Image", display_image)
|
||||
if cv2.waitKey(1) & 0xFF == ord('q'):
|
||||
break
|
||||
|
||||
cv2.destroyAllWindows()
|
@ -88,10 +88,10 @@ class CADCloseLoopStrategyRunner(Runner):
|
||||
)
|
||||
np.savetxt(f"first_real_pts_{model_name}.txt", first_splitted_real_world_pts)
|
||||
""" register """
|
||||
Log.info("[Part 1/5] do registeration")
|
||||
Log.info("[Part 1/4] do registeration")
|
||||
real_world_to_cad = PtsUtil.register(first_splitted_real_world_pts, cad_model)
|
||||
cad_to_real_world = np.linalg.inv(real_world_to_cad)
|
||||
Log.success("[Part 1/5] finish init and register")
|
||||
Log.success("[Part 1/4] finish init and register")
|
||||
real_world_to_blender_world = np.eye(4)
|
||||
real_world_to_blender_world[:3, 3] = np.asarray([0, 0, 0.9215])
|
||||
cad_model_real_world: trimesh.Trimesh = cad_model.apply_transform(
|
||||
@ -108,7 +108,7 @@ class CADCloseLoopStrategyRunner(Runner):
|
||||
temp_dir = "/home/yan20/nbv_rec/project/franka_control/temp_output"
|
||||
cad_model_blender_world.export(os.path.join(temp_dir, f"{temp_name}.obj"))
|
||||
""" sample view """
|
||||
Log.info("[Part 2/5] start running renderer")
|
||||
Log.info("[Part 2/4] start running renderer")
|
||||
subprocess.run(
|
||||
[
|
||||
self.blender_bin_path,
|
||||
@ -121,12 +121,12 @@ class CADCloseLoopStrategyRunner(Runner):
|
||||
capture_output=True,
|
||||
text=True,
|
||||
)
|
||||
Log.success("[Part 2/5] finish running renderer")
|
||||
Log.success("[Part 2/4] finish running renderer")
|
||||
|
||||
""" preprocess """
|
||||
Log.info("[Part 3/5] start preprocessing data")
|
||||
Log.info("[Part 3/4] start preprocessing data")
|
||||
save_scene_data(temp_dir, temp_name)
|
||||
Log.success("[Part 3/5] finish preprocessing data")
|
||||
Log.success("[Part 3/4] finish preprocessing data")
|
||||
|
||||
pts_dir = os.path.join(temp_dir, temp_name, "pts")
|
||||
sample_view_pts_list = []
|
||||
|
54
vis_pts_and_nrm.py
Normal file
54
vis_pts_and_nrm.py
Normal file
@ -0,0 +1,54 @@
|
||||
# import numpy as np
|
||||
# import matplotlib.pyplot as plt
|
||||
# from mpl_toolkits.mplot3d import Axes3D
|
||||
|
||||
# # 假设 points_and_normals 是你的 Nx6 矩阵
|
||||
# # 前三列是点坐标,后三列是法线
|
||||
# points_and_normals = np.loadtxt("/Users/hofee/Downloads/temp_output/cad_model_world/points_and_normals.txt") # 这里用随机点代替你的数据
|
||||
# points = points_and_normals[:100, :3]
|
||||
# normals = points_and_normals[:100, 3:]
|
||||
|
||||
# # 创建3D图形
|
||||
# fig = plt.figure()
|
||||
# ax = fig.add_subplot(111, projection='3d')
|
||||
|
||||
# # 绘制点云
|
||||
# ax.scatter(points[:, 0], points[:, 1], points[:, 2], color='b', marker='o')
|
||||
|
||||
# # 绘制法线 (从每个点出发的一小段箭头)
|
||||
# ax.quiver(points[:, 0], points[:, 1], points[:, 2],
|
||||
# normals[:, 0], normals[:, 1], normals[:, 2], length=0.1, color='r')
|
||||
|
||||
# plt.show()
|
||||
|
||||
import numpy as np
|
||||
|
||||
# 假设 points_and_normals 是你的 Nx6 矩阵
|
||||
# points_and_normals[:,:3] 是点的坐标
|
||||
# points_and_normals[:,3:] 是法线
|
||||
points_and_normals = np.loadtxt("/Users/hofee/Downloads/temp_output/cad_model_world/points_and_normals.txt") # 这里用随机点代替你的数据
|
||||
print(points_and_normals.shape)
|
||||
points = points_and_normals[300:400, :3]
|
||||
normals = points_and_normals[300:400, 3:]
|
||||
|
||||
# 设置你想在法线方向上采样的距离范围和点数
|
||||
num_samples_per_point = 20 # 每个法线方向采样的点数
|
||||
sampling_distances = np.linspace(0, 0.5, num_samples_per_point) # 采样距离范围
|
||||
|
||||
# 创建一个空列表来保存采样点
|
||||
sampled_points = []
|
||||
|
||||
# 对每个点进行法线方向的采样
|
||||
for point, normal in zip(points, normals):
|
||||
for dist in sampling_distances:
|
||||
# 在法线方向上偏移点
|
||||
sampled_point = point + dist * normal
|
||||
sampled_points.append(sampled_point)
|
||||
|
||||
# 转换为 numpy 数组
|
||||
sampled_points = np.array(sampled_points)
|
||||
|
||||
# 保存为点云文件 (例如 .txt 或 .xyz 格式)
|
||||
np.savetxt('sampled_points.txt', sampled_points)
|
||||
|
||||
print("采样点云已保存为 'sampled_points.xyz'")
|
Loading…
x
Reference in New Issue
Block a user