Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
def register_point_cloud_pair(ply_file_names, s, t, config):
print("reading %s ..." % ply_file_names[s])
source = o3d.io.read_point_cloud(ply_file_names[s])
print("reading %s ..." % ply_file_names[t])
target = o3d.io.read_point_cloud(ply_file_names[t])
(source_down, source_fpfh) = preprocess_point_cloud(source, config)
(target_down, target_fpfh) = preprocess_point_cloud(target, config)
(success, transformation, information) = \
compute_initial_registration(
s, t, source_down, target_down,
source_fpfh, target_fpfh, config["path_dataset"], config)
if t != s + 1 and not success:
return (False, np.identity(4), np.identity(6))
if config["debug_mode"]:
print(transformation)
print(information)
return (True, transformation, information)
def load_file(file_name, voxel_size):
if file_name not in cache:
pcd = o3d.io.read_point_cloud(file_name)
cache[file_name] = pcd
pcd = cache[file_name]
coords = np.array(pcd.points)
feats = np.array(pcd.colors)
quantized_coords = np.floor(coords / voxel_size)
inds = ME.utils.sparse_quantize(quantized_coords)
random_labels = torch.zeros(len(inds))
return quantized_coords[inds], feats[inds], random_labels
def make_pointcloud_for_fragment(path_dataset, color_files, depth_files,
fragment_id, n_fragments, intrinsic, config):
mesh = integrate_rgb_frames_for_fragment(
color_files, depth_files, fragment_id, n_fragments,
join(path_dataset,
config["template_fragment_posegraph_optimized"] % fragment_id),
intrinsic, config)
pcd = o3d.geometry.PointCloud()
pcd.points = mesh.vertices
pcd.colors = mesh.vertex_colors
pcd_name = join(path_dataset,
config["template_fragment_pointcloud"] % fragment_id)
o3d.io.write_point_cloud(pcd_name, pcd, False, True)
def run_posegraph_optimization(pose_graph_name, pose_graph_optimized_name,
max_correspondence_distance,
preference_loop_closure):
# to display messages from o3d.registration.global_optimization
o3d.utility.set_verbosity_level(o3d.utility.VerbosityLevel.Debug)
method = o3d.registration.GlobalOptimizationLevenbergMarquardt()
criteria = o3d.registration.GlobalOptimizationConvergenceCriteria()
option = o3d.registration.GlobalOptimizationOption(
max_correspondence_distance=max_correspondence_distance,
edge_prune_threshold=0.25,
preference_loop_closure=preference_loop_closure,
reference_node=0)
pose_graph = o3d.io.read_pose_graph(pose_graph_name)
o3d.registration.global_optimization(pose_graph, method, criteria, option)
o3d.io.write_pose_graph(pose_graph_optimized_name, pose_graph)
o3d.utility.set_verbosity_level(o3d.utility.VerbosityLevel.Error)
option.maximum_iteration = 0
o3d.color_map.color_map_optimization(mesh, rgbd_images, camera, option)
o3d.visualization.draw_geometries([mesh])
o3d.io.write_triangle_mesh(
os.path.join(path, "scene", "color_map_before_optimization.ply"), mesh)
# Optimize texture and save the mesh as texture_mapped.ply
# This is implementation of following paper
# Q.-Y. Zhou and V. Koltun,
# Color Map Optimization for 3D Reconstruction with Consumer Depth Cameras,
# SIGGRAPH 2014
option.maximum_iteration = 300
option.non_rigid_camera_coordinate = True
o3d.color_map.color_map_optimization(mesh, rgbd_images, camera, option)
o3d.visualization.draw_geometries([mesh])
o3d.io.write_triangle_mesh(
os.path.join(path, "scene", "color_map_after_optimization.ply"), mesh)
# Open3D: www.open3d.org
# The MIT License (MIT)
# See license file or visit www.open3d.org for details
# examples/Python/Basic/mesh.py
import copy
import numpy as np
import open3d as o3d
if __name__ == "__main__":
print("Testing mesh in open3d ...")
mesh = o3d.io.read_triangle_mesh("../../TestData/knot.ply")
print(mesh)
print(np.asarray(mesh.vertices))
print(np.asarray(mesh.triangles))
print("")
print("Try to render a mesh with normals (exist: " +
str(mesh.has_vertex_normals()) + ") and colors (exist: " +
str(mesh.has_vertex_colors()) + ")")
o3d.visualization.draw_geometries([mesh])
print("A mesh with no normals and no colors does not seem good.")
print("Computing normal and rendering it.")
mesh.compute_vertex_normals()
print(np.asarray(mesh.triangle_normals))
o3d.visualization.draw_geometries([mesh])
feature1 = o3d.io.read_feature(
"../../TestData/Feature/cloud_bin_1.fpfh.bin")
fpfh_tree = o3d.geometry.KDTreeFlann(feature1)
for i in range(len(pcd0.points)):
[_, idx, _] = fpfh_tree.search_knn_vector_xd(feature0.data[:, i], 1)
dis = np.linalg.norm(pcd0.points[i] - pcd1.points[idx[0]])
c = (0.2 - np.fmin(dis, 0.2)) / 0.2
pcd0.colors[i] = [c, c, c]
o3d.visualization.draw_geometries([pcd0])
print("")
print("Load their L32D feature and evaluate.")
print("Black : matching distance > 0.2")
print("White : matching distance = 0")
feature0 = o3d.io.read_feature("../../TestData/Feature/cloud_bin_0.d32.bin")
feature1 = o3d.io.read_feature("../../TestData/Feature/cloud_bin_1.d32.bin")
fpfh_tree = o3d.geometry.KDTreeFlann(feature1)
for i in range(len(pcd0.points)):
[_, idx, _] = fpfh_tree.search_knn_vector_xd(feature0.data[:, i], 1)
dis = np.linalg.norm(pcd0.points[i] - pcd1.points[idx[0]])
c = (0.2 - np.fmin(dis, 0.2)) / 0.2
pcd0.colors[i] = [c, c, c]
o3d.visualization.draw_geometries([pcd0])
print("")
import numpy as np
import copy
import open3d as o3d
def draw_registration_result_original_color(source, target, transformation):
source_temp = copy.deepcopy(source)
source_temp.transform(transformation)
o3d.visualization.draw_geometries([source_temp, target])
if __name__ == "__main__":
print("1. Load two point clouds and show initial pose")
source = o3d.io.read_point_cloud("../../TestData/ColoredICP/frag_115.ply")
target = o3d.io.read_point_cloud("../../TestData/ColoredICP/frag_116.ply")
# draw initial alignment
current_transformation = np.identity(4)
draw_registration_result_original_color(source, target,
current_transformation)
# point to plane ICP
current_transformation = np.identity(4)
print("2. Point-to-plane ICP registration is applied on original point")
print(" clouds to refine the alignment. Distance threshold 0.02.")
result_icp = o3d.registration.registration_icp(
source, target, 0.02, current_transformation,
o3d.registration.TransformationEstimationPointToPlane())
print(result_icp)
draw_registration_result_original_color(source, target,