Spaces:
Running
on
Zero
Running
on
Zero
Upload 21 files
Browse files- .gitattributes +1 -0
- dataset/__pycache__/hypersim.cpython-311.pyc +0 -0
- dataset/__pycache__/hypersim.cpython-39.pyc +0 -0
- dataset/__pycache__/kitti.cpython-311.pyc +0 -0
- dataset/__pycache__/kitti.cpython-39.pyc +0 -0
- dataset/__pycache__/pbr.cpython-311.pyc +0 -0
- dataset/__pycache__/pbr.cpython-39.pyc +0 -0
- dataset/__pycache__/transform.cpython-311.pyc +0 -0
- dataset/__pycache__/transform.cpython-39.pyc +0 -0
- dataset/__pycache__/vkitti2.cpython-311.pyc +0 -0
- dataset/__pycache__/vkitti2.cpython-39.pyc +0 -0
- dataset/hypersim.py +74 -0
- dataset/kitti.py +57 -0
- dataset/pbr.py +71 -0
- dataset/splits/hypersim/train.txt +3 -0
- dataset/splits/hypersim/val.txt +0 -0
- dataset/splits/kitti/val.txt +0 -0
- dataset/splits/pbr/train.txt +0 -0
- dataset/splits/pbr/val.txt +265 -0
- dataset/splits/vkitti2/train.txt +0 -0
- dataset/transform.py +277 -0
- dataset/vkitti2.py +54 -0
.gitattributes
CHANGED
@@ -34,3 +34,4 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
|
|
34 |
*.zst filter=lfs diff=lfs merge=lfs -text
|
35 |
*tfevents* filter=lfs diff=lfs merge=lfs -text
|
36 |
assets/compare_zoedepth.png filter=lfs diff=lfs merge=lfs -text
|
|
|
|
34 |
*.zst filter=lfs diff=lfs merge=lfs -text
|
35 |
*tfevents* filter=lfs diff=lfs merge=lfs -text
|
36 |
assets/compare_zoedepth.png filter=lfs diff=lfs merge=lfs -text
|
37 |
+
dataset/splits/hypersim/train.txt filter=lfs diff=lfs merge=lfs -text
|
dataset/__pycache__/hypersim.cpython-311.pyc
ADDED
Binary file (5.13 kB). View file
|
|
dataset/__pycache__/hypersim.cpython-39.pyc
ADDED
Binary file (2.7 kB). View file
|
|
dataset/__pycache__/kitti.cpython-311.pyc
ADDED
Binary file (3.45 kB). View file
|
|
dataset/__pycache__/kitti.cpython-39.pyc
ADDED
Binary file (1.94 kB). View file
|
|
dataset/__pycache__/pbr.cpython-311.pyc
ADDED
Binary file (4.1 kB). View file
|
|
dataset/__pycache__/pbr.cpython-39.pyc
ADDED
Binary file (2.41 kB). View file
|
|
dataset/__pycache__/transform.cpython-311.pyc
ADDED
Binary file (12.2 kB). View file
|
|
dataset/__pycache__/transform.cpython-39.pyc
ADDED
Binary file (6.79 kB). View file
|
|
dataset/__pycache__/vkitti2.cpython-311.pyc
ADDED
Binary file (3.49 kB). View file
|
|
dataset/__pycache__/vkitti2.cpython-39.pyc
ADDED
Binary file (1.95 kB). View file
|
|
dataset/hypersim.py
ADDED
@@ -0,0 +1,74 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import cv2
|
2 |
+
import h5py
|
3 |
+
import numpy as np
|
4 |
+
import torch
|
5 |
+
from torch.utils.data import Dataset
|
6 |
+
from torchvision.transforms import Compose
|
7 |
+
|
8 |
+
from dataset.transform import Resize, NormalizeImage, PrepareForNet, Crop
|
9 |
+
|
10 |
+
|
11 |
+
def hypersim_distance_to_depth(npyDistance):
|
12 |
+
intWidth, intHeight, fltFocal = 1024, 768, 886.81
|
13 |
+
|
14 |
+
npyImageplaneX = np.linspace((-0.5 * intWidth) + 0.5, (0.5 * intWidth) - 0.5, intWidth).reshape(
|
15 |
+
1, intWidth).repeat(intHeight, 0).astype(np.float32)[:, :, None]
|
16 |
+
npyImageplaneY = np.linspace((-0.5 * intHeight) + 0.5, (0.5 * intHeight) - 0.5,
|
17 |
+
intHeight).reshape(intHeight, 1).repeat(intWidth, 1).astype(np.float32)[:, :, None]
|
18 |
+
npyImageplaneZ = np.full([intHeight, intWidth, 1], fltFocal, np.float32)
|
19 |
+
npyImageplane = np.concatenate(
|
20 |
+
[npyImageplaneX, npyImageplaneY, npyImageplaneZ], 2)
|
21 |
+
|
22 |
+
npyDepth = npyDistance / np.linalg.norm(npyImageplane, 2, 2) * fltFocal
|
23 |
+
return npyDepth
|
24 |
+
|
25 |
+
|
26 |
+
class Hypersim(Dataset):
|
27 |
+
def __init__(self, filelist_path, mode, size=(518, 518)):
|
28 |
+
|
29 |
+
self.mode = mode
|
30 |
+
self.size = size
|
31 |
+
|
32 |
+
with open(filelist_path, 'r') as f:
|
33 |
+
self.filelist = f.read().splitlines()
|
34 |
+
|
35 |
+
net_w, net_h = size
|
36 |
+
self.transform = Compose([
|
37 |
+
Resize(
|
38 |
+
width=net_w,
|
39 |
+
height=net_h,
|
40 |
+
resize_target=True if mode == 'train' else False,
|
41 |
+
keep_aspect_ratio=True,
|
42 |
+
ensure_multiple_of=14,
|
43 |
+
resize_method='lower_bound',
|
44 |
+
image_interpolation_method=cv2.INTER_CUBIC,
|
45 |
+
),
|
46 |
+
NormalizeImage(mean=[0.485, 0.456, 0.406], std=[0.229, 0.224, 0.225]),
|
47 |
+
PrepareForNet(),
|
48 |
+
] + ([Crop(size[0])] if self.mode == 'train' else []))
|
49 |
+
|
50 |
+
def __getitem__(self, item):
|
51 |
+
img_path = self.filelist[item].split(' ')[0]
|
52 |
+
depth_path = self.filelist[item].split(' ')[1]
|
53 |
+
|
54 |
+
image = cv2.imread(img_path)
|
55 |
+
image = cv2.cvtColor(image, cv2.COLOR_BGR2RGB) / 255.0
|
56 |
+
|
57 |
+
depth_fd = h5py.File(depth_path, "r")
|
58 |
+
distance_meters = np.array(depth_fd['dataset'])
|
59 |
+
depth = hypersim_distance_to_depth(distance_meters)
|
60 |
+
|
61 |
+
sample = self.transform({'image': image, 'depth': depth})
|
62 |
+
|
63 |
+
sample['image'] = torch.from_numpy(sample['image'])
|
64 |
+
sample['depth'] = torch.from_numpy(sample['depth'])
|
65 |
+
|
66 |
+
sample['valid_mask'] = (torch.isnan(sample['depth']) == 0)
|
67 |
+
sample['depth'][sample['valid_mask'] == 0] = 0
|
68 |
+
|
69 |
+
sample['image_path'] = self.filelist[item].split(' ')[0]
|
70 |
+
|
71 |
+
return sample
|
72 |
+
|
73 |
+
def __len__(self):
|
74 |
+
return len(self.filelist)
|
dataset/kitti.py
ADDED
@@ -0,0 +1,57 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import cv2
|
2 |
+
import torch
|
3 |
+
from torch.utils.data import Dataset
|
4 |
+
from torchvision.transforms import Compose
|
5 |
+
|
6 |
+
from dataset.transform import Resize, NormalizeImage, PrepareForNet
|
7 |
+
|
8 |
+
|
9 |
+
class KITTI(Dataset):
|
10 |
+
def __init__(self, filelist_path, mode, size=(518, 518)):
|
11 |
+
if mode != 'val':
|
12 |
+
raise NotImplementedError
|
13 |
+
|
14 |
+
self.mode = mode
|
15 |
+
self.size = size
|
16 |
+
|
17 |
+
with open(filelist_path, 'r') as f:
|
18 |
+
self.filelist = f.read().splitlines()
|
19 |
+
|
20 |
+
net_w, net_h = size
|
21 |
+
self.transform = Compose([
|
22 |
+
Resize(
|
23 |
+
width=net_w,
|
24 |
+
height=net_h,
|
25 |
+
resize_target=True if mode == 'train' else False,
|
26 |
+
keep_aspect_ratio=True,
|
27 |
+
ensure_multiple_of=14,
|
28 |
+
resize_method='lower_bound',
|
29 |
+
image_interpolation_method=cv2.INTER_CUBIC,
|
30 |
+
),
|
31 |
+
NormalizeImage(mean=[0.485, 0.456, 0.406], std=[0.229, 0.224, 0.225]),
|
32 |
+
PrepareForNet(),
|
33 |
+
])
|
34 |
+
|
35 |
+
def __getitem__(self, item):
|
36 |
+
img_path = self.filelist[item].split(' ')[0]
|
37 |
+
depth_path = self.filelist[item].split(' ')[1]
|
38 |
+
|
39 |
+
image = cv2.imread(img_path)
|
40 |
+
image = cv2.cvtColor(image, cv2.COLOR_BGR2RGB) / 255.0
|
41 |
+
|
42 |
+
depth = cv2.imread(depth_path, cv2.IMREAD_UNCHANGED).astype('float32')
|
43 |
+
|
44 |
+
sample = self.transform({'image': image, 'depth': depth})
|
45 |
+
|
46 |
+
sample['image'] = torch.from_numpy(sample['image'])
|
47 |
+
sample['depth'] = torch.from_numpy(sample['depth'])
|
48 |
+
sample['depth'] = sample['depth'] / 256.0 # convert in meters
|
49 |
+
|
50 |
+
sample['valid_mask'] = sample['depth'] > 0
|
51 |
+
|
52 |
+
sample['image_path'] = self.filelist[item].split(' ')[0]
|
53 |
+
|
54 |
+
return sample
|
55 |
+
|
56 |
+
def __len__(self):
|
57 |
+
return len(self.filelist)
|
dataset/pbr.py
ADDED
@@ -0,0 +1,71 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import cv2
|
2 |
+
import torch
|
3 |
+
from torch.utils.data import Dataset
|
4 |
+
from torchvision.transforms import Compose
|
5 |
+
|
6 |
+
from dataset.transform import Resize, NormalizeImage, PrepareForNet, Crop
|
7 |
+
|
8 |
+
class PBRDataset(Dataset):
|
9 |
+
def __init__(self, filelist_path, mode, size=(512, 512)):
|
10 |
+
self.mode = mode
|
11 |
+
self.size = size
|
12 |
+
|
13 |
+
# Read filelist using @@ as delimiter
|
14 |
+
self.filelist = []
|
15 |
+
with open(filelist_path, 'r') as f:
|
16 |
+
for line in f:
|
17 |
+
line = line.strip()
|
18 |
+
# Split on @@ delimiter
|
19 |
+
if '@@' in line: # Use @@ as delimiter between paths
|
20 |
+
parts = line.split('@@')
|
21 |
+
if len(parts) == 2:
|
22 |
+
self.filelist.append((parts[0].strip(), parts[1].strip()))
|
23 |
+
|
24 |
+
print(f"Loaded {len(self.filelist)} image pairs")
|
25 |
+
|
26 |
+
net_w, net_h = size
|
27 |
+
self.transform = Compose([
|
28 |
+
Resize(
|
29 |
+
width=net_w,
|
30 |
+
height=net_h,
|
31 |
+
resize_target=True if mode == 'train' else False,
|
32 |
+
keep_aspect_ratio=True,
|
33 |
+
ensure_multiple_of=12,
|
34 |
+
resize_method='lower_bound',
|
35 |
+
image_interpolation_method=cv2.INTER_CUBIC,
|
36 |
+
),
|
37 |
+
NormalizeImage(mean=[0.485, 0.456, 0.406], std=[0.229, 0.224, 0.225]),
|
38 |
+
PrepareForNet(),
|
39 |
+
] + ([Crop(size[0])] if self.mode == 'train' else []))
|
40 |
+
|
41 |
+
def __getitem__(self, item):
|
42 |
+
try:
|
43 |
+
img_path, disp_path = self.filelist[item]
|
44 |
+
|
45 |
+
image = cv2.imread(img_path)
|
46 |
+
if image is None:
|
47 |
+
print(f"Failed to load image: {img_path}")
|
48 |
+
return self.__getitem__((item + 1) % len(self.filelist))
|
49 |
+
image = cv2.cvtColor(image, cv2.COLOR_BGR2RGB) / 255.0
|
50 |
+
|
51 |
+
depth = cv2.imread(disp_path, cv2.IMREAD_GRAYSCALE)
|
52 |
+
if depth is None:
|
53 |
+
print(f"Failed to load depth: {disp_path}")
|
54 |
+
return self.__getitem__((item + 1) % len(self.filelist))
|
55 |
+
depth = depth.astype('float32') / 255.0
|
56 |
+
|
57 |
+
sample = self.transform({'image': image, 'depth': depth})
|
58 |
+
|
59 |
+
sample['image'] = torch.from_numpy(sample['image'])
|
60 |
+
sample['depth'] = torch.from_numpy(sample['depth'])
|
61 |
+
sample['valid_mask'] = torch.ones_like(sample['depth'], dtype=torch.bool)
|
62 |
+
sample['image_path'] = img_path
|
63 |
+
|
64 |
+
return sample
|
65 |
+
|
66 |
+
except Exception as e:
|
67 |
+
print(f"Error loading {item}: {str(e)}")
|
68 |
+
return self.__getitem__((item + 1) % len(self.filelist))
|
69 |
+
|
70 |
+
def __len__(self):
|
71 |
+
return len(self.filelist)
|
dataset/splits/hypersim/train.txt
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:9f67054c519b4c008d7b58ada5735624780e5f89700bf07471747b3a1082b553
|
3 |
+
size 13754433
|
dataset/splits/hypersim/val.txt
ADDED
The diff for this file is too large to render.
See raw diff
|
|
dataset/splits/kitti/val.txt
ADDED
The diff for this file is too large to render.
See raw diff
|
|
dataset/splits/pbr/train.txt
ADDED
The diff for this file is too large to render.
See raw diff
|
|
dataset/splits/pbr/val.txt
ADDED
@@ -0,0 +1,265 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
/mnt/f/Data/Test/valba/0002_particle_board_baked.png@@/mnt/f/Data/Test/valheight/0002_particle_board_height.png
|
2 |
+
/mnt/f/Data/Test/valba/0003_particle_board_baked.png@@/mnt/f/Data/Test/valheight/0003_particle_board_height.png
|
3 |
+
/mnt/f/Data/Test/valba/0004_particle_board_baked.png@@/mnt/f/Data/Test/valheight/0004_particle_board_height.png
|
4 |
+
/mnt/f/Data/Test/valba/0005_particle_board_baked.png@@/mnt/f/Data/Test/valheight/0005_particle_board_height.png
|
5 |
+
/mnt/f/Data/Test/valba/0006_particle_board_baked.png@@/mnt/f/Data/Test/valheight/0006_particle_board_height.png
|
6 |
+
/mnt/f/Data/Test/valba/0007_particle_board_baked.png@@/mnt/f/Data/Test/valheight/0007_particle_board_height.png
|
7 |
+
/mnt/f/Data/Test/valba/0009_particle_board_baked.png@@/mnt/f/Data/Test/valheight/0009_particle_board_height.png
|
8 |
+
/mnt/f/Data/Test/valba/0010_particle_board_baked.png@@/mnt/f/Data/Test/valheight/0010_particle_board_height.png
|
9 |
+
/mnt/f/Data/Test/valba/0011_particle_board_baked.png@@/mnt/f/Data/Test/valheight/0011_particle_board_height.png
|
10 |
+
/mnt/f/Data/Test/valba/0012_shingles_baked.png@@/mnt/f/Data/Test/valheight/0012_shingles_height.png
|
11 |
+
/mnt/f/Data/Test/valba/0013_shingles_baked.png@@/mnt/f/Data/Test/valheight/0013_shingles_height.png
|
12 |
+
/mnt/f/Data/Test/valba/0014_shingles_baked.png@@/mnt/f/Data/Test/valheight/0014_shingles_height.png
|
13 |
+
/mnt/f/Data/Test/valba/0015_shingles_baked.png@@/mnt/f/Data/Test/valheight/0015_shingles_height.png
|
14 |
+
/mnt/f/Data/Test/valba/0016_shingles_baked.png@@/mnt/f/Data/Test/valheight/0016_shingles_height.png
|
15 |
+
/mnt/f/Data/Test/valba/0017_shingles_baked.png@@/mnt/f/Data/Test/valheight/0017_shingles_height.png
|
16 |
+
/mnt/f/Data/Test/valba/0018_shingles_baked.png@@/mnt/f/Data/Test/valheight/0018_shingles_height.png
|
17 |
+
/mnt/f/Data/Test/valba/0019_shingles_baked.png@@/mnt/f/Data/Test/valheight/0019_shingles_height.png
|
18 |
+
/mnt/f/Data/Test/valba/0021_shingles_baked.png@@/mnt/f/Data/Test/valheight/0021_shingles_height.png
|
19 |
+
/mnt/f/Data/Test/valba/0022_shingles_baked.png@@/mnt/f/Data/Test/valheight/0022_shingles_height.png
|
20 |
+
/mnt/f/Data/Test/valba/0024_shingles_baked.png@@/mnt/f/Data/Test/valheight/0024_shingles_height.png
|
21 |
+
/mnt/f/Data/Test/valba/0025_shingles_baked.png@@/mnt/f/Data/Test/valheight/0025_shingles_height.png
|
22 |
+
/mnt/f/Data/Test/valba/0026_shingles_baked.png@@/mnt/f/Data/Test/valheight/0026_shingles_height.png
|
23 |
+
/mnt/f/Data/Test/valba/0027_shingles_baked.png@@/mnt/f/Data/Test/valheight/0027_shingles_height.png
|
24 |
+
/mnt/f/Data/Test/valba/0028_shingles_baked.png@@/mnt/f/Data/Test/valheight/0028_shingles_height.png
|
25 |
+
/mnt/f/Data/Test/valba/0029_shingles_baked.png@@/mnt/f/Data/Test/valheight/0029_shingles_height.png
|
26 |
+
/mnt/f/Data/Test/valba/0030_shingles_baked.png@@/mnt/f/Data/Test/valheight/0030_shingles_height.png
|
27 |
+
/mnt/f/Data/Test/valba/0031_shingles_baked.png@@/mnt/f/Data/Test/valheight/0031_shingles_height.png
|
28 |
+
/mnt/f/Data/Test/valba/0032_shingles_baked.png@@/mnt/f/Data/Test/valheight/0032_shingles_height.png
|
29 |
+
/mnt/f/Data/Test/valba/0033_organic_wood_baked.png@@/mnt/f/Data/Test/valheight/0033_organic_wood_height.png
|
30 |
+
/mnt/f/Data/Test/valba/0034_organic_wood_baked.png@@/mnt/f/Data/Test/valheight/0034_organic_wood_height.png
|
31 |
+
/mnt/f/Data/Test/valba/0035_organic_wood_baked.png@@/mnt/f/Data/Test/valheight/0035_organic_wood_height.png
|
32 |
+
/mnt/f/Data/Test/valba/0036_organic_wood_baked.png@@/mnt/f/Data/Test/valheight/0036_organic_wood_height.png
|
33 |
+
/mnt/f/Data/Test/valba/0037_organic_wood_baked.png@@/mnt/f/Data/Test/valheight/0037_organic_wood_height.png
|
34 |
+
/mnt/f/Data/Test/valba/0038_organic_wood_baked.png@@/mnt/f/Data/Test/valheight/0038_organic_wood_height.png
|
35 |
+
/mnt/f/Data/Test/valba/0039_organic_wood_baked.png@@/mnt/f/Data/Test/valheight/0039_organic_wood_height.png
|
36 |
+
/mnt/f/Data/Test/valba/0040_organic_wood_baked.png@@/mnt/f/Data/Test/valheight/0040_organic_wood_height.png
|
37 |
+
/mnt/f/Data/Test/valba/0041_organic_wood_baked.png@@/mnt/f/Data/Test/valheight/0041_organic_wood_height.png
|
38 |
+
/mnt/f/Data/Test/valba/0042_organic_wood_baked.png@@/mnt/f/Data/Test/valheight/0042_organic_wood_height.png
|
39 |
+
/mnt/f/Data/Test/valba/0043_organic_wood_baked.png@@/mnt/f/Data/Test/valheight/0043_organic_wood_height.png
|
40 |
+
/mnt/f/Data/Test/valba/0044_organic_wood_baked.png@@/mnt/f/Data/Test/valheight/0044_organic_wood_height.png
|
41 |
+
/mnt/f/Data/Test/valba/0045_organic_wood_baked.png@@/mnt/f/Data/Test/valheight/0045_organic_wood_height.png
|
42 |
+
/mnt/f/Data/Test/valba/0046_organic_wood_baked.png@@/mnt/f/Data/Test/valheight/0046_organic_wood_height.png
|
43 |
+
/mnt/f/Data/Test/valba/0047_organic_wood_baked.png@@/mnt/f/Data/Test/valheight/0047_organic_wood_height.png
|
44 |
+
/mnt/f/Data/Test/valba/0049_organic_wood_baked.png@@/mnt/f/Data/Test/valheight/0049_organic_wood_height.png
|
45 |
+
/mnt/f/Data/Test/valba/0050_organic_wood_baked.png@@/mnt/f/Data/Test/valheight/0050_organic_wood_height.png
|
46 |
+
/mnt/f/Data/Test/valba/0051_organic_wood_baked.png@@/mnt/f/Data/Test/valheight/0051_organic_wood_height.png
|
47 |
+
/mnt/f/Data/Test/valba/0052_organic_wood_baked.png@@/mnt/f/Data/Test/valheight/0052_organic_wood_height.png
|
48 |
+
/mnt/f/Data/Test/valba/0053_organic_wood_baked.png@@/mnt/f/Data/Test/valheight/0053_organic_wood_height.png
|
49 |
+
/mnt/f/Data/Test/valba/0055_cardboard_baked.png@@/mnt/f/Data/Test/valheight/0055_cardboard_height.png
|
50 |
+
/mnt/f/Data/Test/valba/0056_cardboard_baked.png@@/mnt/f/Data/Test/valheight/0056_cardboard_height.png
|
51 |
+
/mnt/f/Data/Test/valba/0057_cardboard_baked.png@@/mnt/f/Data/Test/valheight/0057_cardboard_height.png
|
52 |
+
/mnt/f/Data/Test/valba/0058_cardboard_baked.png@@/mnt/f/Data/Test/valheight/0058_cardboard_height.png
|
53 |
+
/mnt/f/Data/Test/valba/0059_cardboard_baked.png@@/mnt/f/Data/Test/valheight/0059_cardboard_height.png
|
54 |
+
/mnt/f/Data/Test/valba/0060_cardboard_baked.png@@/mnt/f/Data/Test/valheight/0060_cardboard_height.png
|
55 |
+
/mnt/f/Data/Test/valba/0061_cardboard_baked.png@@/mnt/f/Data/Test/valheight/0061_cardboard_height.png
|
56 |
+
/mnt/f/Data/Test/valba/0062_cardboard_baked.png@@/mnt/f/Data/Test/valheight/0062_cardboard_height.png
|
57 |
+
/mnt/f/Data/Test/valba/0063_cardboard_baked.png@@/mnt/f/Data/Test/valheight/0063_cardboard_height.png
|
58 |
+
/mnt/f/Data/Test/valba/0064_cardboard_baked.png@@/mnt/f/Data/Test/valheight/0064_cardboard_height.png
|
59 |
+
/mnt/f/Data/Test/valba/0065_cardboard_baked.png@@/mnt/f/Data/Test/valheight/0065_cardboard_height.png
|
60 |
+
/mnt/f/Data/Test/valba/0066_cardboard_baked.png@@/mnt/f/Data/Test/valheight/0066_cardboard_height.png
|
61 |
+
/mnt/f/Data/Test/valba/0067_cardboard_baked.png@@/mnt/f/Data/Test/valheight/0067_cardboard_height.png
|
62 |
+
/mnt/f/Data/Test/valba/0068_cardboard_baked.png@@/mnt/f/Data/Test/valheight/0068_cardboard_height.png
|
63 |
+
/mnt/f/Data/Test/valba/0070_cardboard_baked.png@@/mnt/f/Data/Test/valheight/0070_cardboard_height.png
|
64 |
+
/mnt/f/Data/Test/valba/0071_cardboard_baked.png@@/mnt/f/Data/Test/valheight/0071_cardboard_height.png
|
65 |
+
/mnt/f/Data/Test/valba/0072_cardboard_baked.png@@/mnt/f/Data/Test/valheight/0072_cardboard_height.png
|
66 |
+
/mnt/f/Data/Test/valba/0073_cardboard_baked.png@@/mnt/f/Data/Test/valheight/0073_cardboard_height.png
|
67 |
+
/mnt/f/Data/Test/valba/0075_frog_leather_wrinkles_baked.png@@/mnt/f/Data/Test/valheight/0075_frog_leather_wrinkles_height.png
|
68 |
+
/mnt/f/Data/Test/valba/0076_frog_leather_wrinkles_baked.png@@/mnt/f/Data/Test/valheight/0076_frog_leather_wrinkles_height.png
|
69 |
+
/mnt/f/Data/Test/valba/0077_frog_leather_wrinkles_baked.png@@/mnt/f/Data/Test/valheight/0077_frog_leather_wrinkles_height.png
|
70 |
+
/mnt/f/Data/Test/valba/0078_frog_leather_wrinkles_baked.png@@/mnt/f/Data/Test/valheight/0078_frog_leather_wrinkles_height.png
|
71 |
+
/mnt/f/Data/Test/valba/0080_frog_leather_wrinkles_baked.png@@/mnt/f/Data/Test/valheight/0080_frog_leather_wrinkles_height.png
|
72 |
+
/mnt/f/Data/Test/valba/0081_frog_leather_wrinkles_baked.png@@/mnt/f/Data/Test/valheight/0081_frog_leather_wrinkles_height.png
|
73 |
+
/mnt/f/Data/Test/valba/0082_frog_leather_wrinkles_baked.png@@/mnt/f/Data/Test/valheight/0082_frog_leather_wrinkles_height.png
|
74 |
+
/mnt/f/Data/Test/valba/0083_frog_leather_wrinkles_baked.png@@/mnt/f/Data/Test/valheight/0083_frog_leather_wrinkles_height.png
|
75 |
+
/mnt/f/Data/Test/valba/0084_frog_leather_wrinkles_baked.png@@/mnt/f/Data/Test/valheight/0084_frog_leather_wrinkles_height.png
|
76 |
+
/mnt/f/Data/Test/valba/0085_frog_leather_wrinkles_baked.png@@/mnt/f/Data/Test/valheight/0085_frog_leather_wrinkles_height.png
|
77 |
+
/mnt/f/Data/Test/valba/0086_frog_leather_wrinkles_baked.png@@/mnt/f/Data/Test/valheight/0086_frog_leather_wrinkles_height.png
|
78 |
+
/mnt/f/Data/Test/valba/0087_stone_floor_baked.png@@/mnt/f/Data/Test/valheight/0087_stone_floor_height.png
|
79 |
+
/mnt/f/Data/Test/valba/0089_stone_floor_baked.png@@/mnt/f/Data/Test/valheight/0089_stone_floor_height.png
|
80 |
+
/mnt/f/Data/Test/valba/0090_stone_floor_baked.png@@/mnt/f/Data/Test/valheight/0090_stone_floor_height.png
|
81 |
+
/mnt/f/Data/Test/valba/0091_stone_floor_baked.png@@/mnt/f/Data/Test/valheight/0091_stone_floor_height.png
|
82 |
+
/mnt/f/Data/Test/valba/0092_stone_floor_baked.png@@/mnt/f/Data/Test/valheight/0092_stone_floor_height.png
|
83 |
+
/mnt/f/Data/Test/valba/0094_stone_floor_baked.png@@/mnt/f/Data/Test/valheight/0094_stone_floor_height.png
|
84 |
+
/mnt/f/Data/Test/valba/0095_stone_floor_baked.png@@/mnt/f/Data/Test/valheight/0095_stone_floor_height.png
|
85 |
+
/mnt/f/Data/Test/valba/0096_stone_floor_baked.png@@/mnt/f/Data/Test/valheight/0096_stone_floor_height.png
|
86 |
+
/mnt/f/Data/Test/valba/0097_stone_floor_baked.png@@/mnt/f/Data/Test/valheight/0097_stone_floor_height.png
|
87 |
+
/mnt/f/Data/Test/valba/0099_stone_floor_baked.png@@/mnt/f/Data/Test/valheight/0099_stone_floor_height.png
|
88 |
+
/mnt/f/Data/Test/valba/0100_stone_floor_baked.png@@/mnt/f/Data/Test/valheight/0100_stone_floor_height.png
|
89 |
+
/mnt/f/Data/Test/valba/0101_stone_floor_baked.png@@/mnt/f/Data/Test/valheight/0101_stone_floor_height.png
|
90 |
+
/mnt/f/Data/Test/valba/0102_stone_floor_baked.png@@/mnt/f/Data/Test/valheight/0102_stone_floor_height.png
|
91 |
+
/mnt/f/Data/Test/valba/0103_stone_floor_baked.png@@/mnt/f/Data/Test/valheight/0103_stone_floor_height.png
|
92 |
+
/mnt/f/Data/Test/valba/0106_stone_floor_baked.png@@/mnt/f/Data/Test/valheight/0106_stone_floor_height.png
|
93 |
+
/mnt/f/Data/Test/valba/0107_stone_floor_baked.png@@/mnt/f/Data/Test/valheight/0107_stone_floor_height.png
|
94 |
+
/mnt/f/Data/Test/valba/0109_cliff_baked.png@@/mnt/f/Data/Test/valheight/0109_cliff_height.png
|
95 |
+
/mnt/f/Data/Test/valba/0110_cliff_baked.png@@/mnt/f/Data/Test/valheight/0110_cliff_height.png
|
96 |
+
/mnt/f/Data/Test/valba/0111_cliff_baked.png@@/mnt/f/Data/Test/valheight/0111_cliff_height.png
|
97 |
+
/mnt/f/Data/Test/valba/0113_cliff_baked.png@@/mnt/f/Data/Test/valheight/0113_cliff_height.png
|
98 |
+
/mnt/f/Data/Test/valba/0114_cliff_baked.png@@/mnt/f/Data/Test/valheight/0114_cliff_height.png
|
99 |
+
/mnt/f/Data/Test/valba/0115_cliff_baked.png@@/mnt/f/Data/Test/valheight/0115_cliff_height.png
|
100 |
+
/mnt/f/Data/Test/valba/0116_cliff_baked.png@@/mnt/f/Data/Test/valheight/0116_cliff_height.png
|
101 |
+
/mnt/f/Data/Test/valba/0118_cliff_baked.png@@/mnt/f/Data/Test/valheight/0118_cliff_height.png
|
102 |
+
/mnt/f/Data/Test/valba/0119_cliff_baked.png@@/mnt/f/Data/Test/valheight/0119_cliff_height.png
|
103 |
+
/mnt/f/Data/Test/valba/0120_cliff_baked.png@@/mnt/f/Data/Test/valheight/0120_cliff_height.png
|
104 |
+
/mnt/f/Data/Test/valba/0121_cliff_baked.png@@/mnt/f/Data/Test/valheight/0121_cliff_height.png
|
105 |
+
/mnt/f/Data/Test/valba/0122_cliff_baked.png@@/mnt/f/Data/Test/valheight/0122_cliff_height.png
|
106 |
+
/mnt/f/Data/Test/valba/0123_cliff_baked.png@@/mnt/f/Data/Test/valheight/0123_cliff_height.png
|
107 |
+
/mnt/f/Data/Test/valba/0125_cliff_baked.png@@/mnt/f/Data/Test/valheight/0125_cliff_height.png
|
108 |
+
/mnt/f/Data/Test/valba/0126_cliff_baked.png@@/mnt/f/Data/Test/valheight/0126_cliff_height.png
|
109 |
+
/mnt/f/Data/Test/valba/0127_cliff_baked.png@@/mnt/f/Data/Test/valheight/0127_cliff_height.png
|
110 |
+
/mnt/f/Data/Test/valba/0129_steel_003_bitmap_baked.png@@/mnt/f/Data/Test/valheight/0129_steel_003_bitmap_height.png
|
111 |
+
/mnt/f/Data/Test/valba/0133_steel_003_bitmap_baked.png@@/mnt/f/Data/Test/valheight/0133_steel_003_bitmap_height.png
|
112 |
+
/mnt/f/Data/Test/valba/0134_steel_003_bitmap_baked.png@@/mnt/f/Data/Test/valheight/0134_steel_003_bitmap_height.png
|
113 |
+
/mnt/f/Data/Test/valba/0135_steel_003_bitmap_baked.png@@/mnt/f/Data/Test/valheight/0135_steel_003_bitmap_height.png
|
114 |
+
/mnt/f/Data/Test/valba/0138_steel_003_bitmap_baked.png@@/mnt/f/Data/Test/valheight/0138_steel_003_bitmap_height.png
|
115 |
+
/mnt/f/Data/Test/valba/0139_fabric_suit_vintage_baked.png@@/mnt/f/Data/Test/valheight/0139_fabric_suit_vintage_height.png
|
116 |
+
/mnt/f/Data/Test/valba/0140_fabric_suit_vintage_baked.png@@/mnt/f/Data/Test/valheight/0140_fabric_suit_vintage_height.png
|
117 |
+
/mnt/f/Data/Test/valba/0141_fabric_suit_vintage_baked.png@@/mnt/f/Data/Test/valheight/0141_fabric_suit_vintage_height.png
|
118 |
+
/mnt/f/Data/Test/valba/0142_fabric_suit_vintage_baked.png@@/mnt/f/Data/Test/valheight/0142_fabric_suit_vintage_height.png
|
119 |
+
/mnt/f/Data/Test/valba/0144_fabric_suit_vintage_baked.png@@/mnt/f/Data/Test/valheight/0144_fabric_suit_vintage_height.png
|
120 |
+
/mnt/f/Data/Test/valba/0145_fabric_suit_vintage_baked.png@@/mnt/f/Data/Test/valheight/0145_fabric_suit_vintage_height.png
|
121 |
+
/mnt/f/Data/Test/valba/0146_fabric_suit_vintage_baked.png@@/mnt/f/Data/Test/valheight/0146_fabric_suit_vintage_height.png
|
122 |
+
/mnt/f/Data/Test/valba/0147_fabric_suit_vintage_baked.png@@/mnt/f/Data/Test/valheight/0147_fabric_suit_vintage_height.png
|
123 |
+
/mnt/f/Data/Test/valba/0148_fabric_suit_vintage_baked.png@@/mnt/f/Data/Test/valheight/0148_fabric_suit_vintage_height.png
|
124 |
+
/mnt/f/Data/Test/valba/0149_fabric_suit_vintage_baked.png@@/mnt/f/Data/Test/valheight/0149_fabric_suit_vintage_height.png
|
125 |
+
/mnt/f/Data/Test/valba/0151_fabric_suit_vintage_baked.png@@/mnt/f/Data/Test/valheight/0151_fabric_suit_vintage_height.png
|
126 |
+
/mnt/f/Data/Test/valba/0152_fabric_suit_vintage_baked.png@@/mnt/f/Data/Test/valheight/0152_fabric_suit_vintage_height.png
|
127 |
+
/mnt/f/Data/Test/valba/0153_fabric_suit_vintage_baked.png@@/mnt/f/Data/Test/valheight/0153_fabric_suit_vintage_height.png
|
128 |
+
/mnt/f/Data/Test/valba/0154_fabric_suit_vintage_baked.png@@/mnt/f/Data/Test/valheight/0154_fabric_suit_vintage_height.png
|
129 |
+
/mnt/f/Data/Test/valba/0155_fabric_suit_vintage_baked.png@@/mnt/f/Data/Test/valheight/0155_fabric_suit_vintage_height.png
|
130 |
+
/mnt/f/Data/Test/valba/0157_fabric_suit_vintage_baked.png@@/mnt/f/Data/Test/valheight/0157_fabric_suit_vintage_height.png
|
131 |
+
/mnt/f/Data/Test/valba/0158_fabric_suit_vintage_baked.png@@/mnt/f/Data/Test/valheight/0158_fabric_suit_vintage_height.png
|
132 |
+
/mnt/f/Data/Test/valba/0159_fabric_suit_vintage_baked.png@@/mnt/f/Data/Test/valheight/0159_fabric_suit_vintage_height.png
|
133 |
+
/mnt/f/Data/Test/valba/0160_metal_bolted_square_plate_baked.png@@/mnt/f/Data/Test/valheight/0160_metal_bolted_square_plate_height.png
|
134 |
+
/mnt/f/Data/Test/valba/0164_metal_bolted_square_plate_baked.png@@/mnt/f/Data/Test/valheight/0164_metal_bolted_square_plate_height.png
|
135 |
+
/mnt/f/Data/Test/valba/0165_metal_bolted_square_plate_baked.png@@/mnt/f/Data/Test/valheight/0165_metal_bolted_square_plate_height.png
|
136 |
+
/mnt/f/Data/Test/valba/0167_marco_b_broken_concrete_tiles_baked.png@@/mnt/f/Data/Test/valheight/0167_marco_b_broken_concrete_tiles_height.png
|
137 |
+
/mnt/f/Data/Test/valba/0168_marco_b_broken_concrete_tiles_baked.png@@/mnt/f/Data/Test/valheight/0168_marco_b_broken_concrete_tiles_height.png
|
138 |
+
/mnt/f/Data/Test/valba/0169_marco_b_broken_concrete_tiles_baked.png@@/mnt/f/Data/Test/valheight/0169_marco_b_broken_concrete_tiles_height.png
|
139 |
+
/mnt/f/Data/Test/valba/0170_marco_b_broken_concrete_tiles_baked.png@@/mnt/f/Data/Test/valheight/0170_marco_b_broken_concrete_tiles_height.png
|
140 |
+
/mnt/f/Data/Test/valba/0171_marco_b_broken_concrete_tiles_baked.png@@/mnt/f/Data/Test/valheight/0171_marco_b_broken_concrete_tiles_height.png
|
141 |
+
/mnt/f/Data/Test/valba/0172_marco_b_broken_concrete_tiles_baked.png@@/mnt/f/Data/Test/valheight/0172_marco_b_broken_concrete_tiles_height.png
|
142 |
+
/mnt/f/Data/Test/valba/0173_marco_b_broken_concrete_tiles_baked.png@@/mnt/f/Data/Test/valheight/0173_marco_b_broken_concrete_tiles_height.png
|
143 |
+
/mnt/f/Data/Test/valba/0174_marco_b_broken_concrete_tiles_baked.png@@/mnt/f/Data/Test/valheight/0174_marco_b_broken_concrete_tiles_height.png
|
144 |
+
/mnt/f/Data/Test/valba/0175_marco_b_broken_concrete_tiles_baked.png@@/mnt/f/Data/Test/valheight/0175_marco_b_broken_concrete_tiles_height.png
|
145 |
+
/mnt/f/Data/Test/valba/0179_marco_b_broken_concrete_tiles_baked.png@@/mnt/f/Data/Test/valheight/0179_marco_b_broken_concrete_tiles_height.png
|
146 |
+
/mnt/f/Data/Test/valba/0181_marco_b_broken_concrete_tiles_baked.png@@/mnt/f/Data/Test/valheight/0181_marco_b_broken_concrete_tiles_height.png
|
147 |
+
/mnt/f/Data/Test/valba/0182_marco_b_broken_concrete_tiles_baked.png@@/mnt/f/Data/Test/valheight/0182_marco_b_broken_concrete_tiles_height.png
|
148 |
+
/mnt/f/Data/Test/valba/0183_marco_b_broken_concrete_tiles_baked.png@@/mnt/f/Data/Test/valheight/0183_marco_b_broken_concrete_tiles_height.png
|
149 |
+
/mnt/f/Data/Test/valba/0184_marco_b_broken_concrete_tiles_baked.png@@/mnt/f/Data/Test/valheight/0184_marco_b_broken_concrete_tiles_height.png
|
150 |
+
/mnt/f/Data/Test/valba/0185_marco_b_broken_concrete_tiles_baked.png@@/mnt/f/Data/Test/valheight/0185_marco_b_broken_concrete_tiles_height.png
|
151 |
+
/mnt/f/Data/Test/valba/0186_aluminium_brushed_baked.png@@/mnt/f/Data/Test/valheight/0186_aluminium_brushed_height.png
|
152 |
+
/mnt/f/Data/Test/valba/0188_aluminium_brushed_baked.png@@/mnt/f/Data/Test/valheight/0188_aluminium_brushed_height.png
|
153 |
+
/mnt/f/Data/Test/valba/0189_aluminium_brushed_baked.png@@/mnt/f/Data/Test/valheight/0189_aluminium_brushed_height.png
|
154 |
+
/mnt/f/Data/Test/valba/0190_aluminium_brushed_baked.png@@/mnt/f/Data/Test/valheight/0190_aluminium_brushed_height.png
|
155 |
+
/mnt/f/Data/Test/valba/0191_aluminium_brushed_baked.png@@/mnt/f/Data/Test/valheight/0191_aluminium_brushed_height.png
|
156 |
+
/mnt/f/Data/Test/valba/0192_aluminium_brushed_baked.png@@/mnt/f/Data/Test/valheight/0192_aluminium_brushed_height.png
|
157 |
+
/mnt/f/Data/Test/valba/0193_aluminium_brushed_baked.png@@/mnt/f/Data/Test/valheight/0193_aluminium_brushed_height.png
|
158 |
+
/mnt/f/Data/Test/valba/0194_aluminium_brushed_baked.png@@/mnt/f/Data/Test/valheight/0194_aluminium_brushed_height.png
|
159 |
+
/mnt/f/Data/Test/valba/0195_aluminium_brushed_baked.png@@/mnt/f/Data/Test/valheight/0195_aluminium_brushed_height.png
|
160 |
+
/mnt/f/Data/Test/valba/0196_aluminium_brushed_baked.png@@/mnt/f/Data/Test/valheight/0196_aluminium_brushed_height.png
|
161 |
+
/mnt/f/Data/Test/valba/0199_aluminium_brushed_baked.png@@/mnt/f/Data/Test/valheight/0199_aluminium_brushed_height.png
|
162 |
+
/mnt/f/Data/Test/valba/0201_aluminium_brushed_baked.png@@/mnt/f/Data/Test/valheight/0201_aluminium_brushed_height.png
|
163 |
+
/mnt/f/Data/Test/valba/0202_aluminium_brushed_baked.png@@/mnt/f/Data/Test/valheight/0202_aluminium_brushed_height.png
|
164 |
+
/mnt/f/Data/Test/valba/0203_metal_aluminium_directional_baked.png@@/mnt/f/Data/Test/valheight/0203_metal_aluminium_directional_height.png
|
165 |
+
/mnt/f/Data/Test/valba/0205_metal_aluminium_directional_baked.png@@/mnt/f/Data/Test/valheight/0205_metal_aluminium_directional_height.png
|
166 |
+
/mnt/f/Data/Test/valba/0206_metal_aluminium_directional_baked.png@@/mnt/f/Data/Test/valheight/0206_metal_aluminium_directional_height.png
|
167 |
+
/mnt/f/Data/Test/valba/0207_metal_aluminium_directional_baked.png@@/mnt/f/Data/Test/valheight/0207_metal_aluminium_directional_height.png
|
168 |
+
/mnt/f/Data/Test/valba/0214_bark_old_ginko_baked.png@@/mnt/f/Data/Test/valheight/0214_bark_old_ginko_height.png
|
169 |
+
/mnt/f/Data/Test/valba/0215_bark_old_ginko_baked.png@@/mnt/f/Data/Test/valheight/0215_bark_old_ginko_height.png
|
170 |
+
/mnt/f/Data/Test/valba/0216_bark_old_ginko_baked.png@@/mnt/f/Data/Test/valheight/0216_bark_old_ginko_height.png
|
171 |
+
/mnt/f/Data/Test/valba/0217_bark_old_ginko_baked.png@@/mnt/f/Data/Test/valheight/0217_bark_old_ginko_height.png
|
172 |
+
/mnt/f/Data/Test/valba/0218_bark_old_ginko_baked.png@@/mnt/f/Data/Test/valheight/0218_bark_old_ginko_height.png
|
173 |
+
/mnt/f/Data/Test/valba/0220_bark_old_ginko_baked.png@@/mnt/f/Data/Test/valheight/0220_bark_old_ginko_height.png
|
174 |
+
/mnt/f/Data/Test/valba/0222_bark_old_ginko_baked.png@@/mnt/f/Data/Test/valheight/0222_bark_old_ginko_height.png
|
175 |
+
/mnt/f/Data/Test/valba/0223_bark_old_ginko_baked.png@@/mnt/f/Data/Test/valheight/0223_bark_old_ginko_height.png
|
176 |
+
/mnt/f/Data/Test/valba/0224_bark_old_ginko_baked.png@@/mnt/f/Data/Test/valheight/0224_bark_old_ginko_height.png
|
177 |
+
/mnt/f/Data/Test/valba/0226_stucco_baked.png@@/mnt/f/Data/Test/valheight/0226_stucco_height.png
|
178 |
+
/mnt/f/Data/Test/valba/0227_stucco_baked.png@@/mnt/f/Data/Test/valheight/0227_stucco_height.png
|
179 |
+
/mnt/f/Data/Test/valba/0229_stucco_baked.png@@/mnt/f/Data/Test/valheight/0229_stucco_height.png
|
180 |
+
/mnt/f/Data/Test/valba/0230_stucco_baked.png@@/mnt/f/Data/Test/valheight/0230_stucco_height.png
|
181 |
+
/mnt/f/Data/Test/valba/0231_stucco_baked.png@@/mnt/f/Data/Test/valheight/0231_stucco_height.png
|
182 |
+
/mnt/f/Data/Test/valba/0234_stucco_baked.png@@/mnt/f/Data/Test/valheight/0234_stucco_height.png
|
183 |
+
/mnt/f/Data/Test/valba/0235_stucco_baked.png@@/mnt/f/Data/Test/valheight/0235_stucco_height.png
|
184 |
+
/mnt/f/Data/Test/valba/0236_stucco_baked.png@@/mnt/f/Data/Test/valheight/0236_stucco_height.png
|
185 |
+
/mnt/f/Data/Test/valba/0237_stucco_baked.png@@/mnt/f/Data/Test/valheight/0237_stucco_height.png
|
186 |
+
/mnt/f/Data/Test/valba/0239_stucco_baked.png@@/mnt/f/Data/Test/valheight/0239_stucco_height.png
|
187 |
+
/mnt/f/Data/Test/valba/0240_stucco_baked.png@@/mnt/f/Data/Test/valheight/0240_stucco_height.png
|
188 |
+
/mnt/f/Data/Test/valba/0241_stucco_baked.png@@/mnt/f/Data/Test/valheight/0241_stucco_height.png
|
189 |
+
/mnt/f/Data/Test/valba/0242_stucco_baked.png@@/mnt/f/Data/Test/valheight/0242_stucco_height.png
|
190 |
+
/mnt/f/Data/Test/valba/0243_stucco_baked.png@@/mnt/f/Data/Test/valheight/0243_stucco_height.png
|
191 |
+
/mnt/f/Data/Test/valba/0244_grass_paver_baked.png@@/mnt/f/Data/Test/valheight/0244_grass_paver_height.png
|
192 |
+
/mnt/f/Data/Test/valba/0246_grass_paver_baked.png@@/mnt/f/Data/Test/valheight/0246_grass_paver_height.png
|
193 |
+
/mnt/f/Data/Test/valba/0247_grass_paver_baked.png@@/mnt/f/Data/Test/valheight/0247_grass_paver_height.png
|
194 |
+
/mnt/f/Data/Test/valba/0248_grass_paver_baked.png@@/mnt/f/Data/Test/valheight/0248_grass_paver_height.png
|
195 |
+
/mnt/f/Data/Test/valba/0249_grass_paver_baked.png@@/mnt/f/Data/Test/valheight/0249_grass_paver_height.png
|
196 |
+
/mnt/f/Data/Test/valba/0250_grass_paver_baked.png@@/mnt/f/Data/Test/valheight/0250_grass_paver_height.png
|
197 |
+
/mnt/f/Data/Test/valba/0252_grass_paver_baked.png@@/mnt/f/Data/Test/valheight/0252_grass_paver_height.png
|
198 |
+
/mnt/f/Data/Test/valba/0253_grass_paver_baked.png@@/mnt/f/Data/Test/valheight/0253_grass_paver_height.png
|
199 |
+
/mnt/f/Data/Test/valba/0254_grass_paver_baked.png@@/mnt/f/Data/Test/valheight/0254_grass_paver_height.png
|
200 |
+
/mnt/f/Data/Test/valba/0255_grass_paver_baked.png@@/mnt/f/Data/Test/valheight/0255_grass_paver_height.png
|
201 |
+
/mnt/f/Data/Test/valba/0256_grass_paver_baked.png@@/mnt/f/Data/Test/valheight/0256_grass_paver_height.png
|
202 |
+
/mnt/f/Data/Test/valba/0257_grass_paver_baked.png@@/mnt/f/Data/Test/valheight/0257_grass_paver_height.png
|
203 |
+
/mnt/f/Data/Test/valba/0260_grass_paver_baked.png@@/mnt/f/Data/Test/valheight/0260_grass_paver_height.png
|
204 |
+
/mnt/f/Data/Test/valba/0261_grass_paver_baked.png@@/mnt/f/Data/Test/valheight/0261_grass_paver_height.png
|
205 |
+
/mnt/f/Data/Test/valba/0262_grass_paver_baked.png@@/mnt/f/Data/Test/valheight/0262_grass_paver_height.png
|
206 |
+
/mnt/f/Data/Test/valba/0263_grass_paver_baked.png@@/mnt/f/Data/Test/valheight/0263_grass_paver_height.png
|
207 |
+
/mnt/f/Data/Test/valba/0264_grass_paver_baked.png@@/mnt/f/Data/Test/valheight/0264_grass_paver_height.png
|
208 |
+
/mnt/f/Data/Test/valba/0265_fabric_padded_wall_baked.png@@/mnt/f/Data/Test/valheight/0265_fabric_padded_wall_height.png
|
209 |
+
/mnt/f/Data/Test/valba/0267_fabric_padded_wall_baked.png@@/mnt/f/Data/Test/valheight/0267_fabric_padded_wall_height.png
|
210 |
+
/mnt/f/Data/Test/valba/0268_fabric_padded_wall_baked.png@@/mnt/f/Data/Test/valheight/0268_fabric_padded_wall_height.png
|
211 |
+
/mnt/f/Data/Test/valba/0269_fabric_padded_wall_baked.png@@/mnt/f/Data/Test/valheight/0269_fabric_padded_wall_height.png
|
212 |
+
/mnt/f/Data/Test/valba/0270_fabric_padded_wall_baked.png@@/mnt/f/Data/Test/valheight/0270_fabric_padded_wall_height.png
|
213 |
+
/mnt/f/Data/Test/valba/0271_fabric_padded_wall_baked.png@@/mnt/f/Data/Test/valheight/0271_fabric_padded_wall_height.png
|
214 |
+
/mnt/f/Data/Test/valba/0272_fabric_padded_wall_baked.png@@/mnt/f/Data/Test/valheight/0272_fabric_padded_wall_height.png
|
215 |
+
/mnt/f/Data/Test/valba/0273_fabric_padded_wall_baked.png@@/mnt/f/Data/Test/valheight/0273_fabric_padded_wall_height.png
|
216 |
+
/mnt/f/Data/Test/valba/0274_fabric_padded_wall_baked.png@@/mnt/f/Data/Test/valheight/0274_fabric_padded_wall_height.png
|
217 |
+
/mnt/f/Data/Test/valba/0275_fabric_padded_wall_baked.png@@/mnt/f/Data/Test/valheight/0275_fabric_padded_wall_height.png
|
218 |
+
/mnt/f/Data/Test/valba/0276_fabric_padded_wall_baked.png@@/mnt/f/Data/Test/valheight/0276_fabric_padded_wall_height.png
|
219 |
+
/mnt/f/Data/Test/valba/0277_fabric_padded_wall_baked.png@@/mnt/f/Data/Test/valheight/0277_fabric_padded_wall_height.png
|
220 |
+
/mnt/f/Data/Test/valba/0278_fabric_padded_wall_baked.png@@/mnt/f/Data/Test/valheight/0278_fabric_padded_wall_height.png
|
221 |
+
/mnt/f/Data/Test/valba/0279_concrete_006_baked.png@@/mnt/f/Data/Test/valheight/0279_concrete_006_height.png
|
222 |
+
/mnt/f/Data/Test/valba/0280_concrete_006_baked.png@@/mnt/f/Data/Test/valheight/0280_concrete_006_height.png
|
223 |
+
/mnt/f/Data/Test/valba/0281_concrete_006_baked.png@@/mnt/f/Data/Test/valheight/0281_concrete_006_height.png
|
224 |
+
/mnt/f/Data/Test/valba/0282_concrete_006_baked.png@@/mnt/f/Data/Test/valheight/0282_concrete_006_height.png
|
225 |
+
/mnt/f/Data/Test/valba/0283_concrete_006_baked.png@@/mnt/f/Data/Test/valheight/0283_concrete_006_height.png
|
226 |
+
/mnt/f/Data/Test/valba/0285_concrete_006_baked.png@@/mnt/f/Data/Test/valheight/0285_concrete_006_height.png
|
227 |
+
/mnt/f/Data/Test/valba/0286_concrete_006_baked.png@@/mnt/f/Data/Test/valheight/0286_concrete_006_height.png
|
228 |
+
/mnt/f/Data/Test/valba/0287_concrete_006_baked.png@@/mnt/f/Data/Test/valheight/0287_concrete_006_height.png
|
229 |
+
/mnt/f/Data/Test/valba/0288_concrete_006_baked.png@@/mnt/f/Data/Test/valheight/0288_concrete_006_height.png
|
230 |
+
/mnt/f/Data/Test/valba/0289_concrete_006_baked.png@@/mnt/f/Data/Test/valheight/0289_concrete_006_height.png
|
231 |
+
/mnt/f/Data/Test/valba/0290_concrete_006_baked.png@@/mnt/f/Data/Test/valheight/0290_concrete_006_height.png
|
232 |
+
/mnt/f/Data/Test/valba/0291_concrete_006_baked.png@@/mnt/f/Data/Test/valheight/0291_concrete_006_height.png
|
233 |
+
/mnt/f/Data/Test/valba/0292_concrete_006_baked.png@@/mnt/f/Data/Test/valheight/0292_concrete_006_height.png
|
234 |
+
/mnt/f/Data/Test/valba/0293_concrete_006_baked.png@@/mnt/f/Data/Test/valheight/0293_concrete_006_height.png
|
235 |
+
/mnt/f/Data/Test/valba/0294_concrete_006_baked.png@@/mnt/f/Data/Test/valheight/0294_concrete_006_height.png
|
236 |
+
/mnt/f/Data/Test/valba/0296_concrete_006_baked.png@@/mnt/f/Data/Test/valheight/0296_concrete_006_height.png
|
237 |
+
/mnt/f/Data/Test/valba/0297_concrete_006_baked.png@@/mnt/f/Data/Test/valheight/0297_concrete_006_height.png
|
238 |
+
/mnt/f/Data/Test/valba/0298_concrete_006_baked.png@@/mnt/f/Data/Test/valheight/0298_concrete_006_height.png
|
239 |
+
/mnt/f/Data/Test/valba/0299_concrete_006_baked.png@@/mnt/f/Data/Test/valheight/0299_concrete_006_height.png
|
240 |
+
/mnt/f/Data/Test/valba/0300_concrete_010_baked.png@@/mnt/f/Data/Test/valheight/0300_concrete_010_height.png
|
241 |
+
/mnt/f/Data/Test/valba/0301_concrete_010_baked.png@@/mnt/f/Data/Test/valheight/0301_concrete_010_height.png
|
242 |
+
/mnt/f/Data/Test/valba/0303_concrete_010_baked.png@@/mnt/f/Data/Test/valheight/0303_concrete_010_height.png
|
243 |
+
/mnt/f/Data/Test/valba/0304_concrete_010_baked.png@@/mnt/f/Data/Test/valheight/0304_concrete_010_height.png
|
244 |
+
/mnt/f/Data/Test/valba/0306_concrete_010_baked.png@@/mnt/f/Data/Test/valheight/0306_concrete_010_height.png
|
245 |
+
/mnt/f/Data/Test/valba/0307_concrete_010_baked.png@@/mnt/f/Data/Test/valheight/0307_concrete_010_height.png
|
246 |
+
/mnt/f/Data/Test/valba/0308_concrete_010_baked.png@@/mnt/f/Data/Test/valheight/0308_concrete_010_height.png
|
247 |
+
/mnt/f/Data/Test/valba/0309_concrete_010_baked.png@@/mnt/f/Data/Test/valheight/0309_concrete_010_height.png
|
248 |
+
/mnt/f/Data/Test/valba/0311_concrete_010_baked.png@@/mnt/f/Data/Test/valheight/0311_concrete_010_height.png
|
249 |
+
/mnt/f/Data/Test/valba/0313_concrete_010_baked.png@@/mnt/f/Data/Test/valheight/0313_concrete_010_height.png
|
250 |
+
/mnt/f/Data/Test/valba/0314_concrete_010_baked.png@@/mnt/f/Data/Test/valheight/0314_concrete_010_height.png
|
251 |
+
/mnt/f/Data/Test/valba/0315_concrete_010_baked.png@@/mnt/f/Data/Test/valheight/0315_concrete_010_height.png
|
252 |
+
/mnt/f/Data/Test/valba/0316_marble_wall_01_baked.png@@/mnt/f/Data/Test/valheight/0316_marble_wall_01_height.png
|
253 |
+
/mnt/f/Data/Test/valba/0317_marble_wall_01_baked.png@@/mnt/f/Data/Test/valheight/0317_marble_wall_01_height.png
|
254 |
+
/mnt/f/Data/Test/valba/0318_marble_wall_01_baked.png@@/mnt/f/Data/Test/valheight/0318_marble_wall_01_height.png
|
255 |
+
/mnt/f/Data/Test/valba/0320_marble_wall_01_baked.png@@/mnt/f/Data/Test/valheight/0320_marble_wall_01_height.png
|
256 |
+
/mnt/f/Data/Test/valba/0322_marble_wall_01_baked.png@@/mnt/f/Data/Test/valheight/0322_marble_wall_01_height.png
|
257 |
+
/mnt/f/Data/Test/valba/0323_marble_wall_01_baked.png@@/mnt/f/Data/Test/valheight/0323_marble_wall_01_height.png
|
258 |
+
/mnt/f/Data/Test/valba/0324_marble_wall_01_baked.png@@/mnt/f/Data/Test/valheight/0324_marble_wall_01_height.png
|
259 |
+
/mnt/f/Data/Test/valba/0325_marble_wall_01_baked.png@@/mnt/f/Data/Test/valheight/0325_marble_wall_01_height.png
|
260 |
+
/mnt/f/Data/Test/valba/0327_marble_wall_01_baked.png@@/mnt/f/Data/Test/valheight/0327_marble_wall_01_height.png
|
261 |
+
/mnt/f/Data/Test/valba/0328_marble_wall_01_baked.png@@/mnt/f/Data/Test/valheight/0328_marble_wall_01_height.png
|
262 |
+
/mnt/f/Data/Test/valba/0331_marble_wall_01_baked.png@@/mnt/f/Data/Test/valheight/0331_marble_wall_01_height.png
|
263 |
+
/mnt/f/Data/Test/valba/27_baked.png@@/mnt/f/Data/Test/valheight/27_height.png
|
264 |
+
/mnt/f/Data/Test/valba/29_baked.png@@/mnt/f/Data/Test/valheight/29_height.png
|
265 |
+
/mnt/f/Data/Test/valba/30_baked.png@@/mnt/f/Data/Test/valheight/30_height.png
|
dataset/splits/vkitti2/train.txt
ADDED
The diff for this file is too large to render.
See raw diff
|
|
dataset/transform.py
ADDED
@@ -0,0 +1,277 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import cv2
|
2 |
+
import math
|
3 |
+
import numpy as np
|
4 |
+
import torch
|
5 |
+
import torch.nn.functional as F
|
6 |
+
|
7 |
+
|
8 |
+
def apply_min_size(sample, size, image_interpolation_method=cv2.INTER_AREA):
|
9 |
+
"""Rezise the sample to ensure the given size. Keeps aspect ratio.
|
10 |
+
|
11 |
+
Args:
|
12 |
+
sample (dict): sample
|
13 |
+
size (tuple): image size
|
14 |
+
|
15 |
+
Returns:
|
16 |
+
tuple: new size
|
17 |
+
"""
|
18 |
+
shape = list(sample["disparity"].shape)
|
19 |
+
|
20 |
+
if shape[0] >= size[0] and shape[1] >= size[1]:
|
21 |
+
return sample
|
22 |
+
|
23 |
+
scale = [0, 0]
|
24 |
+
scale[0] = size[0] / shape[0]
|
25 |
+
scale[1] = size[1] / shape[1]
|
26 |
+
|
27 |
+
scale = max(scale)
|
28 |
+
|
29 |
+
shape[0] = math.ceil(scale * shape[0])
|
30 |
+
shape[1] = math.ceil(scale * shape[1])
|
31 |
+
|
32 |
+
# resize
|
33 |
+
sample["image"] = cv2.resize(
|
34 |
+
sample["image"], tuple(shape[::-1]), interpolation=image_interpolation_method
|
35 |
+
)
|
36 |
+
|
37 |
+
sample["disparity"] = cv2.resize(
|
38 |
+
sample["disparity"], tuple(shape[::-1]), interpolation=cv2.INTER_NEAREST
|
39 |
+
)
|
40 |
+
sample["mask"] = cv2.resize(
|
41 |
+
sample["mask"].astype(np.float32),
|
42 |
+
tuple(shape[::-1]),
|
43 |
+
interpolation=cv2.INTER_NEAREST,
|
44 |
+
)
|
45 |
+
sample["mask"] = sample["mask"].astype(bool)
|
46 |
+
|
47 |
+
return tuple(shape)
|
48 |
+
|
49 |
+
|
50 |
+
class Resize(object):
|
51 |
+
"""Resize sample to given size (width, height).
|
52 |
+
"""
|
53 |
+
|
54 |
+
def __init__(
|
55 |
+
self,
|
56 |
+
width,
|
57 |
+
height,
|
58 |
+
resize_target=True,
|
59 |
+
keep_aspect_ratio=False,
|
60 |
+
ensure_multiple_of=1,
|
61 |
+
resize_method="lower_bound",
|
62 |
+
image_interpolation_method=cv2.INTER_AREA,
|
63 |
+
):
|
64 |
+
"""Init.
|
65 |
+
|
66 |
+
Args:
|
67 |
+
width (int): desired output width
|
68 |
+
height (int): desired output height
|
69 |
+
resize_target (bool, optional):
|
70 |
+
True: Resize the full sample (image, mask, target).
|
71 |
+
False: Resize image only.
|
72 |
+
Defaults to True.
|
73 |
+
keep_aspect_ratio (bool, optional):
|
74 |
+
True: Keep the aspect ratio of the input sample.
|
75 |
+
Output sample might not have the given width and height, and
|
76 |
+
resize behaviour depends on the parameter 'resize_method'.
|
77 |
+
Defaults to False.
|
78 |
+
ensure_multiple_of (int, optional):
|
79 |
+
Output width and height is constrained to be multiple of this parameter.
|
80 |
+
Defaults to 1.
|
81 |
+
resize_method (str, optional):
|
82 |
+
"lower_bound": Output will be at least as large as the given size.
|
83 |
+
"upper_bound": Output will be at max as large as the given size. (Output size might be smaller than given size.)
|
84 |
+
"minimal": Scale as least as possible. (Output size might be smaller than given size.)
|
85 |
+
Defaults to "lower_bound".
|
86 |
+
"""
|
87 |
+
self.__width = width
|
88 |
+
self.__height = height
|
89 |
+
|
90 |
+
self.__resize_target = resize_target
|
91 |
+
self.__keep_aspect_ratio = keep_aspect_ratio
|
92 |
+
self.__multiple_of = ensure_multiple_of
|
93 |
+
self.__resize_method = resize_method
|
94 |
+
self.__image_interpolation_method = image_interpolation_method
|
95 |
+
|
96 |
+
def constrain_to_multiple_of(self, x, min_val=0, max_val=None):
|
97 |
+
y = (np.round(x / self.__multiple_of) * self.__multiple_of).astype(int)
|
98 |
+
|
99 |
+
if max_val is not None and y > max_val:
|
100 |
+
y = (np.floor(x / self.__multiple_of) * self.__multiple_of).astype(int)
|
101 |
+
|
102 |
+
if y < min_val:
|
103 |
+
y = (np.ceil(x / self.__multiple_of) * self.__multiple_of).astype(int)
|
104 |
+
|
105 |
+
return y
|
106 |
+
|
107 |
+
def get_size(self, width, height):
|
108 |
+
# determine new height and width
|
109 |
+
scale_height = self.__height / height
|
110 |
+
scale_width = self.__width / width
|
111 |
+
|
112 |
+
if self.__keep_aspect_ratio:
|
113 |
+
if self.__resize_method == "lower_bound":
|
114 |
+
# scale such that output size is lower bound
|
115 |
+
if scale_width > scale_height:
|
116 |
+
# fit width
|
117 |
+
scale_height = scale_width
|
118 |
+
else:
|
119 |
+
# fit height
|
120 |
+
scale_width = scale_height
|
121 |
+
elif self.__resize_method == "upper_bound":
|
122 |
+
# scale such that output size is upper bound
|
123 |
+
if scale_width < scale_height:
|
124 |
+
# fit width
|
125 |
+
scale_height = scale_width
|
126 |
+
else:
|
127 |
+
# fit height
|
128 |
+
scale_width = scale_height
|
129 |
+
elif self.__resize_method == "minimal":
|
130 |
+
# scale as least as possbile
|
131 |
+
if abs(1 - scale_width) < abs(1 - scale_height):
|
132 |
+
# fit width
|
133 |
+
scale_height = scale_width
|
134 |
+
else:
|
135 |
+
# fit height
|
136 |
+
scale_width = scale_height
|
137 |
+
else:
|
138 |
+
raise ValueError(
|
139 |
+
f"resize_method {self.__resize_method} not implemented"
|
140 |
+
)
|
141 |
+
|
142 |
+
if self.__resize_method == "lower_bound":
|
143 |
+
new_height = self.constrain_to_multiple_of(
|
144 |
+
scale_height * height, min_val=self.__height
|
145 |
+
)
|
146 |
+
new_width = self.constrain_to_multiple_of(
|
147 |
+
scale_width * width, min_val=self.__width
|
148 |
+
)
|
149 |
+
elif self.__resize_method == "upper_bound":
|
150 |
+
new_height = self.constrain_to_multiple_of(
|
151 |
+
scale_height * height, max_val=self.__height
|
152 |
+
)
|
153 |
+
new_width = self.constrain_to_multiple_of(
|
154 |
+
scale_width * width, max_val=self.__width
|
155 |
+
)
|
156 |
+
elif self.__resize_method == "minimal":
|
157 |
+
new_height = self.constrain_to_multiple_of(scale_height * height)
|
158 |
+
new_width = self.constrain_to_multiple_of(scale_width * width)
|
159 |
+
else:
|
160 |
+
raise ValueError(f"resize_method {self.__resize_method} not implemented")
|
161 |
+
|
162 |
+
return (new_width, new_height)
|
163 |
+
|
164 |
+
def __call__(self, sample):
|
165 |
+
width, height = self.get_size(
|
166 |
+
sample["image"].shape[1], sample["image"].shape[0]
|
167 |
+
)
|
168 |
+
|
169 |
+
# resize sample
|
170 |
+
sample["image"] = cv2.resize(
|
171 |
+
sample["image"],
|
172 |
+
(width, height),
|
173 |
+
interpolation=self.__image_interpolation_method,
|
174 |
+
)
|
175 |
+
|
176 |
+
if self.__resize_target:
|
177 |
+
if "disparity" in sample:
|
178 |
+
sample["disparity"] = cv2.resize(
|
179 |
+
sample["disparity"],
|
180 |
+
(width, height),
|
181 |
+
interpolation=cv2.INTER_NEAREST,
|
182 |
+
)
|
183 |
+
|
184 |
+
if "depth" in sample:
|
185 |
+
sample["depth"] = cv2.resize(
|
186 |
+
sample["depth"], (width, height), interpolation=cv2.INTER_NEAREST
|
187 |
+
)
|
188 |
+
|
189 |
+
if "semseg_mask" in sample:
|
190 |
+
# sample["semseg_mask"] = cv2.resize(
|
191 |
+
# sample["semseg_mask"], (width, height), interpolation=cv2.INTER_NEAREST
|
192 |
+
# )
|
193 |
+
sample["semseg_mask"] = F.interpolate(torch.from_numpy(sample["semseg_mask"]).float()[None, None, ...], (height, width), mode='nearest').numpy()[0, 0]
|
194 |
+
|
195 |
+
if "mask" in sample:
|
196 |
+
sample["mask"] = cv2.resize(
|
197 |
+
sample["mask"].astype(np.float32),
|
198 |
+
(width, height),
|
199 |
+
interpolation=cv2.INTER_NEAREST,
|
200 |
+
)
|
201 |
+
# sample["mask"] = sample["mask"].astype(bool)
|
202 |
+
|
203 |
+
# print(sample['image'].shape, sample['depth'].shape)
|
204 |
+
return sample
|
205 |
+
|
206 |
+
|
207 |
+
class NormalizeImage(object):
|
208 |
+
"""Normlize image by given mean and std.
|
209 |
+
"""
|
210 |
+
|
211 |
+
def __init__(self, mean, std):
|
212 |
+
self.__mean = mean
|
213 |
+
self.__std = std
|
214 |
+
|
215 |
+
def __call__(self, sample):
|
216 |
+
sample["image"] = (sample["image"] - self.__mean) / self.__std
|
217 |
+
|
218 |
+
return sample
|
219 |
+
|
220 |
+
|
221 |
+
class PrepareForNet(object):
|
222 |
+
"""Prepare sample for usage as network input.
|
223 |
+
"""
|
224 |
+
|
225 |
+
def __init__(self):
|
226 |
+
pass
|
227 |
+
|
228 |
+
def __call__(self, sample):
|
229 |
+
image = np.transpose(sample["image"], (2, 0, 1))
|
230 |
+
sample["image"] = np.ascontiguousarray(image).astype(np.float32)
|
231 |
+
|
232 |
+
if "mask" in sample:
|
233 |
+
sample["mask"] = sample["mask"].astype(np.float32)
|
234 |
+
sample["mask"] = np.ascontiguousarray(sample["mask"])
|
235 |
+
|
236 |
+
if "depth" in sample:
|
237 |
+
depth = sample["depth"].astype(np.float32)
|
238 |
+
sample["depth"] = np.ascontiguousarray(depth)
|
239 |
+
|
240 |
+
if "semseg_mask" in sample:
|
241 |
+
sample["semseg_mask"] = sample["semseg_mask"].astype(np.float32)
|
242 |
+
sample["semseg_mask"] = np.ascontiguousarray(sample["semseg_mask"])
|
243 |
+
|
244 |
+
return sample
|
245 |
+
|
246 |
+
|
247 |
+
class Crop(object):
|
248 |
+
"""Crop sample for batch-wise training. Image is of shape CxHxW
|
249 |
+
"""
|
250 |
+
|
251 |
+
def __init__(self, size):
|
252 |
+
if isinstance(size, int):
|
253 |
+
self.size = (size, size)
|
254 |
+
else:
|
255 |
+
self.size = size
|
256 |
+
|
257 |
+
def __call__(self, sample):
|
258 |
+
h, w = sample['image'].shape[-2:]
|
259 |
+
assert h >= self.size[0] and w >= self.size[1], 'Wrong size'
|
260 |
+
|
261 |
+
h_start = np.random.randint(0, h - self.size[0] + 1)
|
262 |
+
w_start = np.random.randint(0, w - self.size[1] + 1)
|
263 |
+
h_end = h_start + self.size[0]
|
264 |
+
w_end = w_start + self.size[1]
|
265 |
+
|
266 |
+
sample['image'] = sample['image'][:, h_start: h_end, w_start: w_end]
|
267 |
+
|
268 |
+
if "depth" in sample:
|
269 |
+
sample["depth"] = sample["depth"][h_start: h_end, w_start: w_end]
|
270 |
+
|
271 |
+
if "mask" in sample:
|
272 |
+
sample["mask"] = sample["mask"][h_start: h_end, w_start: w_end]
|
273 |
+
|
274 |
+
if "semseg_mask" in sample:
|
275 |
+
sample["semseg_mask"] = sample["semseg_mask"][h_start: h_end, w_start: w_end]
|
276 |
+
|
277 |
+
return sample
|
dataset/vkitti2.py
ADDED
@@ -0,0 +1,54 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import cv2
|
2 |
+
import torch
|
3 |
+
from torch.utils.data import Dataset
|
4 |
+
from torchvision.transforms import Compose
|
5 |
+
|
6 |
+
from dataset.transform import Resize, NormalizeImage, PrepareForNet, Crop
|
7 |
+
|
8 |
+
|
9 |
+
class VKITTI2(Dataset):
|
10 |
+
def __init__(self, filelist_path, mode, size=(518, 518)):
|
11 |
+
|
12 |
+
self.mode = mode
|
13 |
+
self.size = size
|
14 |
+
|
15 |
+
with open(filelist_path, 'r') as f:
|
16 |
+
self.filelist = f.read().splitlines()
|
17 |
+
|
18 |
+
net_w, net_h = size
|
19 |
+
self.transform = Compose([
|
20 |
+
Resize(
|
21 |
+
width=net_w,
|
22 |
+
height=net_h,
|
23 |
+
resize_target=True if mode == 'train' else False,
|
24 |
+
keep_aspect_ratio=True,
|
25 |
+
ensure_multiple_of=14,
|
26 |
+
resize_method='lower_bound',
|
27 |
+
image_interpolation_method=cv2.INTER_CUBIC,
|
28 |
+
),
|
29 |
+
NormalizeImage(mean=[0.485, 0.456, 0.406], std=[0.229, 0.224, 0.225]),
|
30 |
+
PrepareForNet(),
|
31 |
+
] + ([Crop(size[0])] if self.mode == 'train' else []))
|
32 |
+
|
33 |
+
def __getitem__(self, item):
|
34 |
+
img_path = self.filelist[item].split(' ')[0]
|
35 |
+
depth_path = self.filelist[item].split(' ')[1]
|
36 |
+
|
37 |
+
image = cv2.imread(img_path)
|
38 |
+
image = cv2.cvtColor(image, cv2.COLOR_BGR2RGB) / 255.0
|
39 |
+
|
40 |
+
depth = cv2.imread(depth_path, cv2.IMREAD_ANYCOLOR | cv2.IMREAD_ANYDEPTH) / 100.0 # cm to m
|
41 |
+
|
42 |
+
sample = self.transform({'image': image, 'depth': depth})
|
43 |
+
|
44 |
+
sample['image'] = torch.from_numpy(sample['image'])
|
45 |
+
sample['depth'] = torch.from_numpy(sample['depth'])
|
46 |
+
|
47 |
+
sample['valid_mask'] = (sample['depth'] <= 80)
|
48 |
+
|
49 |
+
sample['image_path'] = self.filelist[item].split(' ')[0]
|
50 |
+
|
51 |
+
return sample
|
52 |
+
|
53 |
+
def __len__(self):
|
54 |
+
return len(self.filelist)
|