File size: 5,505 Bytes
58c54d7 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 |
import torch
import torch.nn as nn
import torch.nn.functional as F
import cv2
import numpy as np
import os
import glob
from skimage.morphology import binary_dilation, disk
import argparse
import trimesh
from pathlib import Path
import subprocess
import sys
sys.path.append("../code")
import render_utils as rend_util
def cull_scan(scan, mesh_path, result_mesh_file):
# load poses
instance_dir = os.path.join('/p300/wangchy/huangbb/anti-alising-gaussian-splatting/data/DTU_dense', 'scan{0}'.format(scan))
image_dir = '{0}/images'.format(instance_dir)
image_paths = sorted(glob.glob(os.path.join(image_dir, "*.png")))
n_images = len(image_paths)
cam_file = '{0}/cameras.npz'.format(instance_dir)
camera_dict = np.load(cam_file)
scale_mats = [camera_dict['scale_mat_%d' % idx].astype(np.float32) for idx in range(n_images)]
world_mats = [camera_dict['world_mat_%d' % idx].astype(np.float32) for idx in range(n_images)]
intrinsics_all = []
pose_all = []
for scale_mat, world_mat in zip(scale_mats, world_mats):
P = world_mat @ scale_mat
P = P[:3, :4]
intrinsics, pose = rend_util.load_K_Rt_from_P(None, P)
intrinsics_all.append(torch.from_numpy(intrinsics).float())
pose_all.append(torch.from_numpy(pose).float())
# load mask
mask_dir = '{0}/mask'.format(instance_dir)
mask_paths = sorted(glob.glob(os.path.join(mask_dir, "*.png")))
masks = []
for p in mask_paths:
mask = cv2.imread(p)
masks.append(mask)
# hard-coded image shape
W, H = 1600, 1200
# load mesh
mesh = trimesh.load(mesh_path)
# load transformation matrix
vertices = mesh.vertices
# project and filter
vertices = torch.from_numpy(vertices).cuda()
vertices = torch.cat((vertices, torch.ones_like(vertices[:, :1])), dim=-1)
vertices = vertices.permute(1, 0)
vertices = vertices.float()
sampled_masks = []
for i in range(n_images):
pose = pose_all[i]
w2c = torch.inverse(pose).cuda()
intrinsic = intrinsics_all[i].cuda()
with torch.no_grad():
# transform and project
cam_points = intrinsic @ w2c @ vertices
pix_coords = cam_points[:2, :] / (cam_points[2, :].unsqueeze(0) + 1e-6)
pix_coords = pix_coords.permute(1, 0)
pix_coords[..., 0] /= W - 1
pix_coords[..., 1] /= H - 1
pix_coords = (pix_coords - 0.5) * 2
valid = ((pix_coords > -1. ) & (pix_coords < 1.)).all(dim=-1).float()
# dialate mask similar to unisurf
maski = masks[i][:, :, 0].astype(np.float32) / 256.
maski = torch.from_numpy(binary_dilation(maski, disk(24))).float()[None, None].cuda()
# # if scan == '83':
# import matplotlib.pyplot as plt
# plt.imshow(maski.cpu().numpy()[0,0])
# points = (cam_points[:2, :] / (cam_points[2, :].unsqueeze(0) + 1e-6)).permute(1,0)[valid==1].cpu().numpy()
# scatters = points[np.random.permutation(len(points))[:10000]]
# plt.scatter(scatters[:,0], scatters[:,1], color='r')
# plt.savefig(f'test{i}')
# plt.clf()
# plt.close()
sampled_mask = F.grid_sample(maski, pix_coords[None, None], mode='nearest', padding_mode='zeros', align_corners=True)[0, -1, 0]
# print(f'culling {i}')
sampled_mask = sampled_mask + (1. - valid)
sampled_masks.append(sampled_mask)
sampled_masks = torch.stack(sampled_masks, -1)
# filter
mask = (sampled_masks > 0.).all(dim=-1).cpu().numpy()
face_mask = mask[mesh.faces].all(axis=1)
mesh.update_vertices(mask)
mesh.update_faces(face_mask)
# with o3d.utility.VerbosityContextManager(o3d.utility.VerbosityLevel.Debug) as cm:
# triangle_clusters, cluster_n_triangles, cluster_area = (mesh.cluster_connected_triangles())
# triangle_clusters = np.asarray(triangle_clusters)
# cluster_n_triangles = np.asarray(cluster_n_triangles)
# cluster_area = np.asarray(cluster_area)
# largest_cluster_idx = cluster_n_triangles.argmax()
# triangles_to_remove = (triangle_clusters != largest_cluster_idx)
# transform vertices to world
scale_mat = scale_mats[0]
mesh.vertices = mesh.vertices * scale_mat[0, 0] + scale_mat[:3, 3][None]
mesh.export(result_mesh_file)
del mesh
if __name__ == "__main__":
parser = argparse.ArgumentParser(
description='Arguments to evaluate the mesh.'
)
parser.add_argument('--input_mesh', type=str, help='path to the mesh to be evaluated')
parser.add_argument('--scan_id', type=str, help='scan id of the input mesh')
parser.add_argument('--output_dir', type=str, default='evaluation_results_single', help='path to the output folder')
parser.add_argument('--DTU', type=str, default='Offical_DTU_Dataset', help='path to the GT DTU point clouds')
args = parser.parse_args()
Offical_DTU_Dataset = args.DTU
out_dir = args.output_dir
Path(out_dir).mkdir(parents=True, exist_ok=True)
scan = args.scan_id
ply_file = args.input_mesh
result_mesh_file = os.path.join(out_dir, "culled_mesh.ply")
cull_scan(scan, ply_file, result_mesh_file)
cmd = f"python eval.py --data {result_mesh_file} --scan {scan} --mode mesh --dataset_dir {Offical_DTU_Dataset} --vis_out_dir {out_dir}"
os.system(cmd) |