forked from vccheng2001/DeepVCP-Pointcloud-Registration
-
Notifications
You must be signed in to change notification settings - Fork 0
/
KITTIDataset.py
106 lines (85 loc) · 3.77 KB
/
KITTIDataset.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
import os
from utils import *
from torch.utils.data import Dataset, DataLoader
'''
Downsample point cloud to N points
@params src: original source point cloud
N: number of points desired
@returns src: sampled point cloud
'''
def downsample(src, N):
num_src = src.shape[0]
src_downsample_indices = np.arange(num_src)
if num_src > N:
src_downsample_indices = np.random.choice(num_src, N, replace=False)
return src[src_downsample_indices,:]
class KITTIDataset(Dataset):
def __init__(self, root, augment=True, rotate=True, split="train", N=10000):
self.root = root
self.split = split
self.augment = augment
self.N = N
self.files = []
self.points = []
self.reflectances = []
# path to pointclouds + poses
path = f"{self.root}sequences/"
for seq in ["00", "01", "02", "03"]:
path = f"{self.root}sequences/{seq}/velodyne/"
for file in os.listdir(path)[:50]:
print(f"Processing {file}")
# get matching file
index = int(file.split(".")[0])
# load point clouds (N x 4)
src = np.fromfile(path + file, dtype=np.float32, count=-1).reshape([-1,4])
# downsample if num points > N
src = downsample(src, self.N) # N x 4
# split into xyz, reflectances
src_points = src[:, :3] # N x 3
src_reflectance = np.expand_dims(src[:,-1], axis=1) # N x 1
self.files.append(file)
self.points.append(src_points)
self.reflectances.append(src_reflectance)
print('# Total clouds', len(self.points))
def __len__(self):
return len(self.points)
def __getitem__(self, index):
# source pointcloud
src_points = self.points[index].T # 3 x N
src_reflectance = self.reflectances[index].T # 1 x N
# print("Loading file: ", self.files[index])
# data augmentation
if self.augment:
# generate random angles for rotation matrices
theta_x = np.random.uniform(0, np.pi*2)
theta_y = np.random.uniform(0, np.pi*2)
theta_z = np.random.uniform(0, np.pi*2)
# generate random translation
translation_max = 1.0
translation_min = -1.0
t = np.random.uniform(translation_min,translation_max, (3, 1))
# Generate target point cloud by doing a series of random
# rotations on source point cloud
Rx = RotX(theta_x)
Ry = RotY(theta_y)
Rz = RotZ(theta_z)
R = Rx @ Ry @ Rz
# rotate source point cloud
target_points = R @ src_points + t
src_points = torch.from_numpy(src_points)
target_points = torch.from_numpy(target_points)
src_reflectance = torch.from_numpy(src_reflectance)
R = torch.from_numpy(R)
# return source point cloud and transformed (target) point cloud
# src, target: B x 3 x N, reflectance : B x 1 x N
# return (src_points, target_points, R, t, src_reflectance)
#
return (src_points, target_points, R, t)
if __name__ == "__main__":
data = KITTIDataset(root='./data/KITTI', N=10000, augment=True, split="train")
DataLoader = torch.utils.data.DataLoader(data, batch_size=1, shuffle=False)
for src, target, R, t, src_reflectance in DataLoader:
print('Source:', src.shape) # B x 3 x N
print('Target:', target.shape) # B x 3 x N
print('R', R.shape)
print('Reflectance', src_reflectance.shape) # B x 1 x N