prepare_data.py 6.09 KB
Newer Older
Benjamin Thomas Graham's avatar
Benjamin Thomas Graham committed
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
import numpy as np
import torch
import glob, math, os
import scipy.io
import h5py
import pickle

classes = [
'wall', 'floor', 'cabinet', 'bed',
'chair', 'sofa', 'table', 'door',
'window', 'bookshelf', 'picture', 'counter',
'blinds', 'desk', 'shelves', 'curtain',
'dresser', 'pillow', 'mirror', 'floor mat',
'clothes', 'ceiling', 'books', 'refridgerator',
'television', 'paper', 'towel', 'shower curtain',
'box', 'whiteboard', 'person', 'night stand',
'toilet', 'sink', 'lamp', 'bathtub',
'bag', 'otherstructure', 'otherfurniture', 'otherprop']

corresponding_classes_in_Silberman_labeling = [40, 40,  3, 22,  5, 40, 12, 38, 40, 40,  2, 39, 40, 40, 26, 40, 24,
        40,  7, 40,  1, 40, 40, 34, 38, 29, 40,  8, 40, 40, 40, 40, 38, 40,
        40, 14, 40, 38, 40, 40, 40, 15, 39, 40, 30, 40, 40, 39, 40, 39, 38,
        40, 38, 40, 37, 40, 38, 38,  9, 40, 40, 38, 40, 11, 38, 40, 40, 40,
        40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 38, 13, 40, 40,  6, 40, 23,
        40, 39, 10, 16, 40, 40, 40, 40, 38, 40, 40, 40, 40, 40, 40, 40, 40,
        40, 38, 40, 39, 40, 40, 40, 40, 39, 38, 40, 40, 40, 40, 40, 40, 18,
        40, 40, 19, 28, 33, 40, 40, 40, 40, 40, 40, 40, 40, 40, 38, 27, 36,
        40, 40, 40, 40, 21, 40, 20, 35, 40, 40, 40, 40, 40, 40, 40, 40, 38,
        40, 40, 40,  4, 32, 40, 40, 39, 40, 39, 40, 40, 40, 40, 40, 17, 40,
        40, 25, 40, 39, 40, 40, 40, 40, 40, 40, 40, 40, 39, 40, 40, 40, 40,
        40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 39,
        40, 40, 40, 40, 40, 40, 40, 40, 40, 39, 38, 38, 40, 40, 39, 40, 39,
        40, 38, 39, 38, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 39, 40, 38,
        40, 40, 38, 38, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40,
        38, 40, 40, 40, 40, 40, 39, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40,
        40, 40, 40, 40, 40, 39, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40,
        40, 39, 40, 40, 40, 38, 40, 40, 39, 40, 40, 38, 40, 40, 40, 40, 40,
        40, 40, 40, 40, 40, 40, 39, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40,
        40, 40, 40, 40, 40, 40, 40, 31, 40, 40, 40, 40, 40, 40, 40, 38, 40,
        40, 38, 39, 39, 40, 40, 40, 40, 40, 40, 40, 40, 40, 38, 40, 39, 40,
        40, 39, 40, 40, 40, 38, 40, 40, 40, 40, 40, 40, 40, 40, 38, 39, 40,
        40, 40, 40, 40, 40, 38, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40,
        38, 39, 40, 40, 40, 40, 40, 40, 40, 39, 40, 40, 40, 40, 40, 40, 38,
        40, 40, 40, 38, 40, 39, 40, 40, 40, 39, 39, 40, 40, 40, 40, 40, 40,
        40, 40, 40, 40, 39, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40,
        39, 39, 40, 40, 39, 39, 40, 40, 40, 40, 38, 40, 40, 38, 39, 39, 40,
        39, 40, 39, 38, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 39, 40,
        38, 40, 39, 40, 40, 40, 40, 40, 39, 39, 40, 40, 40, 40, 40, 40, 39,
        39, 40, 40, 38, 39, 39, 40, 40, 40, 40, 40, 40, 40, 40, 40, 39, 39,
        40, 40, 40, 40, 39, 40, 40, 40, 40, 40, 39, 40, 40, 39, 40, 40, 40,
        40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 39, 38, 40, 40, 40,
        40, 40, 40, 40, 39, 38, 39, 40, 38, 39, 40, 39, 40, 39, 40, 40, 40,
        40, 40, 40, 40, 40, 38, 40, 40, 40, 40, 40, 38, 40, 40, 39, 40, 40,
        40, 39, 40, 38, 40, 40, 40, 40, 40, 40, 40, 40, 38, 40, 40, 40, 40,
        40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 39, 38, 40, 40, 38,
        40, 40, 38, 40, 40, 40, 40, 40, 40, 40, 40, 40, 39, 40, 40, 40, 40,
        40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 38, 40, 40, 38, 40,
        40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 38, 38, 38, 40, 40, 40, 38,
        40, 40, 40, 38, 38, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40,
        40, 40, 40, 40, 40, 38, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40,
        40, 40, 38, 40, 38, 39, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40,
        40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40,
        40, 40, 39, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40,
        40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40,
        40, 40, 40, 40, 39, 40, 39, 40, 40, 40, 40, 38, 38, 40, 40, 40, 38,
        40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 39, 40, 40,
        39, 40, 40, 39, 39, 40, 40, 40, 40, 40, 40, 40, 40, 39, 39, 39, 40,
        40, 40, 40, 39, 40, 40, 40, 40, 40, 40, 40, 40, 39, 40, 40, 40, 40,
        40, 39, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 38,
        40, 40, 40, 40, 40, 40, 40, 39, 40, 40, 38, 40, 39, 40, 40, 40, 40,
        38, 40, 40, 40, 40, 40, 38, 40, 40, 40, 40, 40, 40, 40, 39, 40, 40,
        40, 40, 40, 40, 40, 40, 40, 39, 40, 40]
print(len(classes),len(corresponding_classes_in_Silberman_labeling))
split=scipy.io.loadmat('splits.mat')['testNdxs']-1 # 0-index
testIdxs=[x for x in range(1449) if x in split]
trainIdxs=[x for x in range(1449) if x not in split]
print(len(trainIdxs),len(testIdxs))

f = h5py.File('nyu_depth_v2_labeled.mat','r')
for i,x in enumerate(trainIdxs):
    print(i,x)
    tc=f.get('images')[x]
    td=f.get('depths')[x]*100
    td-=td.mean()
    gt=np.array(f.get('labels')[x],dtype='int16')
    coords=[]
    col=[]
    cl=[]
    for x in range(40,600):
        for y in range(45,470):
            cl.append(corresponding_classes_in_Silberman_labeling[gt[x,y]-1]-1 if gt[x,y]>=1 else -100)
            coords.append([x-320,y-240,td[x,y]])
            col.append([255,tc[0,x,y],tc[1,x,y],tc[2,x,y]])
    coords=np.array(coords,dtype='int16')
    col=np.array(col,dtype='uint8')
    cl=np.array(cl,dtype='int8')
    torch.save([coords,col,cl],'train'+str(i)+'.pth')

f = h5py.File('nyu_depth_v2_labeled.mat','r')
for i,x in enumerate(testIdxs):
    print(i,x)
    tc=f.get('images')[x]
    td=f.get('depths')[x]*100
    td-=td.mean()
    gt=np.array(f.get('labels')[x],dtype='int16')
    coords=[]
    col=[]
    cl=[]
    for x in range(40,600):
        for y in range(45,470):
            cl.append(corresponding_classes_in_Silberman_labeling[gt[x,y]-1]-1 if gt[x,y]>=1 else -100)
            coords.append([x-320,y-240,td[x,y]])
            col.append([255,tc[0,x,y],tc[1,x,y],tc[2,x,y]])
    coords=np.array(coords,dtype='int16')
    col=np.array(col,dtype='uint8')
    cl=np.array(cl,dtype='int8')
    torch.save([coords,col,cl],'test'+str(i)+'.pth')