Compare commits

...

2 Commits

Author SHA1 Message Date
hofee
2209acce1b update 2024-10-07 21:48:24 +08:00
hofee
dc769c5c1f remove pycache 2024-10-07 16:44:03 +08:00
12 changed files with 33050 additions and 26 deletions

162
.gitignore vendored Normal file
View File

@ -0,0 +1,162 @@
# ---> Python
# Byte-compiled / optimized / DLL files
__pycache__/
*.py[cod]
*$py.class
# C extensions
*.so
# Distribution / packaging
.Python
build/
develop-eggs/
dist/
downloads/
eggs/
.eggs/
lib/
lib64/
parts/
sdist/
var/
wheels/
share/python-wheels/
*.egg-info/
.installed.cfg
*.egg
MANIFEST
# PyInstaller
# Usually these files are written by a python script from a template
# before PyInstaller builds the exe, so as to inject date/other infos into it.
*.manifest
*.spec
# Installer logs
pip-log.txt
pip-delete-this-directory.txt
# Unit test / coverage reports
htmlcov/
.tox/
.nox/
.coverage
.coverage.*
.cache
nosetests.xml
coverage.xml
*.cover
*.py,cover
.hypothesis/
.pytest_cache/
cover/
# Translations
*.mo
*.pot
# Django stuff:
*.log
local_settings.py
db.sqlite3
db.sqlite3-journal
# Flask stuff:
instance/
.webassets-cache
# Scrapy stuff:
.scrapy
# Sphinx documentation
docs/_build/
# PyBuilder
.pybuilder/
target/
# Jupyter Notebook
.ipynb_checkpoints
# IPython
profile_default/
ipython_config.py
# pyenv
# For a library or package, you might want to ignore these files since the code is
# intended to run in multiple environments; otherwise, check them in:
# .python-version
# pipenv
# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
# However, in case of collaboration, if having platform-specific dependencies or dependencies
# having no cross-platform support, pipenv may install dependencies that don't work, or not
# install all needed dependencies.
#Pipfile.lock
# poetry
# Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control.
# This is especially recommended for binary packages to ensure reproducibility, and is more
# commonly ignored for libraries.
# https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control
#poetry.lock
# pdm
# Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control.
#pdm.lock
# pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it
# in version control.
# https://pdm.fming.dev/#use-with-ide
.pdm.toml
# PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm
__pypackages__/
# Celery stuff
celerybeat-schedule
celerybeat.pid
# SageMath parsed files
*.sage.py
# Environments
.env
.venv
env/
venv/
ENV/
env.bak/
venv.bak/
# Spyder project settings
.spyderproject
.spyproject
# Rope project settings
.ropeproject
# mkdocs documentation
/site
# mypy
.mypy_cache/
.dmypy.json
dmypy.json
# Pyre type checker
.pyre/
# pytype static type analyzer
.pytype/
# Cython debug symbols
cython_debug/
# PyCharm
# JetBrains specific template is maintained in a separate JetBrains.gitignore that can
# be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore
# and can be added to the global gitignore or merged into this file. For a more nuclear
# option (not recommended) you can uncomment the following to ignore the entire idea folder.
#.idea/

Binary file not shown.

32768
mesh_point_cloud.txt Normal file

File diff suppressed because it is too large Load Diff

View File

@ -7,6 +7,10 @@ import PytorchBoot.stereotype as stereotype
from PytorchBoot.utils.log_util import Log
from PytorchBoot.status import status_manager
import sys
sys.path.append("/home/user/nbv_rec/nbv_rec_control")
from utils.control_util import ControlUtil
from utils.communicate_util import CommunicateUtil
from utils.pts_util import PtsUtil
@ -101,7 +105,7 @@ class CADStrategyRunner(Runner):
status_info=self.status_info
)
''' extract cam_to world sequence '''
''' extract cam_to_world sequence '''
cam_to_world_seq = []
coveraget_rate_seq = []
@ -132,13 +136,13 @@ class CADStrategyRunner(Runner):
if __name__ == "__main__":
model_path = "/home/yan20/nbv_rec/data/test_CAD/test_model/bear_scaled.ply"
model_path = "/home/user/nbv_rec/data/mesh.obj"
model = trimesh.load(model_path)
test_pts_L = np.loadtxt("/home/yan20/nbv_rec/data/test_CAD/cam_pts_0_L.txt")
test_pts_R = np.loadtxt("/home/yan20/nbv_rec/data/test_CAD/cam_pts_0_R.txt")
cam_to_world_L = PtsUtil.register_icp(test_pts_L, model)
cam_to_world_R = PtsUtil.register_icp(test_pts_R, model)
print(cam_to_world_L)
print("================================")
print(cam_to_world_R)
test_pts_L = np.loadtxt("/home/user/nbv_rec/data/cam_pts_0_L.txt")
test_pts_R = np.loadtxt("/home/user/nbv_rec/data/cam_pts_0_R.txt")
cad_to_cam_L = PtsUtil.register_icp(test_pts_L, model)
cad_to_cam_R = PtsUtil.register_icp(test_pts_R, model)
cad_pts_L = PtsUtil.transform_point_cloud(test_pts_L, cad_to_cam_L)
cad_pts_R = PtsUtil.transform_point_cloud(test_pts_R, cad_to_cam_R)
np.savetxt("/home/user/nbv_rec/data/cad_pts_0_L.txt", cad_pts_L)
np.savetxt("/home/user/nbv_rec/data/cad_pts_0_R.txt", cad_pts_R)

View File

@ -0,0 +1,80 @@
import os
import trimesh
import numpy as np
from PytorchBoot.runners.runner import Runner
from PytorchBoot.config import ConfigManager
import PytorchBoot.stereotype as stereotype
from PytorchBoot.utils.log_util import Log
import PytorchBoot.namespace as namespace
from PytorchBoot.status import status_manager
from utils.control_util import ControlUtil
from utils.communicate_util import CommunicateUtil
from utils.pts_util import PtsUtil
from utils.view_sample_util import ViewSampleUtil
from utils.reconstruction_util import ReconstructionUtil
@stereotype.runner("inferencer")
class Inferencer(Runner):
def __init__(self, config_path: str):
super().__init__(config_path)
self.load_experiment("inferencer")
self.reconstruct_config = ConfigManager.get("runner", "reconstruct")
self.voxel_size = self.reconstruct_config["voxel_size"]
def create_experiment(self, backup_name=None):
super().create_experiment(backup_name)
def load_experiment(self, backup_name=None):
super().load_experiment(backup_name)
def run_inference(self, model_name):
''' init robot '''
ControlUtil.init()
''' take first view '''
view_data = CommunicateUtil.get_view_data()
first_cam_pts = None
first_cam_pose = None
combined_pts = first_cam_pts
input_data = {
"scanned_target_points_num": [first_cam_pts.shape[0]],
"scanned_n_to_world_pose_9d": [first_cam_pose],
"combined_scanned_pts": combined_pts
}
''' enter loop '''
while True:
''' inference '''
inference_result = CommunicateUtil.get_inference_data(input_data)
cam_to_world = inference_result["cam_to_world"]
''' set pose '''
ControlUtil.set_pose(cam_to_world)
''' take view '''
view_data = CommunicateUtil.get_view_data()
curr_cam_pts = None
curr_cam_pose = None
''' update combined pts '''
combined_pts = np.concatenate([combined_pts, curr_cam_pts], axis=0)
combined_pts = PtsUtil.voxel_downsample_point_cloud(combined_pts, voxel_size=self.voxel_size)
''' update input data '''
def run(self):
self.run_inference()
if __name__ == "__main__":
model_path = "/home/yan20/nbv_rec/data/test_CAD/test_model/bear_scaled.ply"
model = trimesh.load(model_path)
test_pts_L = np.loadtxt("/home/yan20/nbv_rec/data/test_CAD/cam_pts_0_L.txt")
test_pts_R = np.loadtxt("/home/yan20/nbv_rec/data/test_CAD/cam_pts_0_R.txt")
cam_to_world_L = PtsUtil.register_icp(test_pts_L, model)
cam_to_world_R = PtsUtil.register_icp(test_pts_R, model)
print(cam_to_world_L)
print("================================")
print(cam_to_world_R)

10
test.py Normal file
View File

@ -0,0 +1,10 @@
import flask
app = flask.Flask(__name__)
@app.route('/hello')
def hello():
return "Hello, World!"
if __name__ == '__main__':
app.run(host="0.0.0.0", port=7999)

View File

@ -4,10 +4,10 @@ class CommunicateUtil:
INFERENCE_HOST = "127.0.0.1:5000"
def get_view_data() -> dict:
data = None
data = {}
return data
def get_inference_data() -> dict:
data = None
def get_inference_data(view_data:dict) -> dict:
data = {}
return data

View File

@ -4,7 +4,7 @@ from autolab_core import RigidTransform
class ControlUtil:
__fa = FrankaArm(robot_num=2)
#__fa = FrankaArm(robot_num=2)
BASE_TO_WORLD:np.ndarray = np.asarray([
[1, 0, 0, -0.5],
@ -114,7 +114,7 @@ class ControlUtil:
# ----------- Debug Test -------------
if __name__ == "__main__":
#ControlUtil.init()
ControlUtil.init()
import time
start = time.time()
rot_degree, cam_to_world = ControlUtil.solve_display_table_rot_and_cam_to_world(ControlUtil.INIT_POSE)

View File

@ -102,23 +102,23 @@ class PtsUtil:
return filtered_sampled_points[:, :3]
@staticmethod
def register_icp(pcl: np.ndarray, model: trimesh.Trimesh, threshold = 0.005) -> np.ndarray:
"""
Register point cloud to CAD model.
Returns the transformation matrix.
"""
def register_icp(pcl: np.ndarray, model: trimesh.Trimesh, threshold=0.5) -> np.ndarray:
mesh_points = np.asarray(model.vertices)
downsampled_mesh_points = PtsUtil.random_downsample_point_cloud(mesh_points, pcl.shape[0])
mesh_point_cloud = o3d.geometry.PointCloud()
mesh_point_cloud.points = o3d.utility.Vector3dVector(mesh_points)
mesh_point_cloud.points = o3d.utility.Vector3dVector(downsampled_mesh_points)
np.savetxt("mesh_point_cloud.txt", downsampled_mesh_points)
pcl_point_cloud = o3d.geometry.PointCloud()
pcl_point_cloud.points = o3d.utility.Vector3dVector(pcl)
initial_transform = np.eye(4)
reg_icp = o3d.pipelines.registration.registration_icp(
pcl_point_cloud, mesh_point_cloud, threshold,
np.eye(4),
initial_transform,
o3d.pipelines.registration.TransformationEstimationPointToPoint()
)
if np.allclose(reg_icp.transformation, np.eye(4)):
print("Registration failed. Check your initial alignment and point cloud quality.")
else:
print("Registration successful.")
print(reg_icp.transformation)
return reg_icp.transformation