doevent commited on
Commit
3ec7fc5
·
1 Parent(s): f855070

Upload utils/util.py

Browse files
Files changed (1) hide show
  1. utils/util.py +178 -0
utils/util.py ADDED
@@ -0,0 +1,178 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from __future__ import division
2
+ from __future__ import print_function
3
+ import os, glob, shutil, math, json
4
+ from queue import Queue
5
+ from threading import Thread
6
+ from skimage.segmentation import mark_boundaries
7
+ import numpy as np
8
+ from PIL import Image
9
+ import cv2, torch
10
+
11
+ def get_gauss_kernel(size, sigma):
12
+ '''Function to mimic the 'fspecial' gaussian MATLAB function'''
13
+ x, y = np.mgrid[-size//2 + 1:size//2 + 1, -size//2 + 1:size//2 + 1]
14
+ g = np.exp(-((x**2 + y**2)/(2.0*sigma**2)))
15
+ return g/g.sum()
16
+
17
+
18
+ def batchGray2Colormap(gray_batch):
19
+ colormap = plt.get_cmap('viridis')
20
+ heatmap_batch = []
21
+ for i in range(gray_batch.shape[0]):
22
+ # quantize [-1,1] to {0,1}
23
+ gray_map = gray_batch[i, :, :, 0]
24
+ heatmap = (colormap(gray_map) * 2**16).astype(np.uint16)[:,:,:3]
25
+ heatmap_batch.append(heatmap/127.5-1.0)
26
+ return np.array(heatmap_batch)
27
+
28
+
29
+ class PlotterThread():
30
+ '''log tensorboard data in a background thread to save time'''
31
+ def __init__(self, writer):
32
+ self.writer = writer
33
+ self.task_queue = Queue(maxsize=0)
34
+ worker = Thread(target=self.do_work, args=(self.task_queue,))
35
+ worker.setDaemon(True)
36
+ worker.start()
37
+
38
+ def do_work(self, q):
39
+ while True:
40
+ content = q.get()
41
+ if content[-1] == 'image':
42
+ self.writer.add_image(*content[:-1])
43
+ elif content[-1] == 'scalar':
44
+ self.writer.add_scalar(*content[:-1])
45
+ else:
46
+ raise ValueError
47
+ q.task_done()
48
+
49
+ def add_data(self, name, value, step, data_type='scalar'):
50
+ self.task_queue.put([name, value, step, data_type])
51
+
52
+ def __len__(self):
53
+ return self.task_queue.qsize()
54
+
55
+
56
+ def save_images_from_batch(img_batch, save_dir, filename_list, batch_no=-1, suffix=None):
57
+ N,H,W,C = img_batch.shape
58
+ if C == 3:
59
+ #! rgb color image
60
+ for i in range(N):
61
+ # [-1,1] >>> [0,255]
62
+ image = Image.fromarray((127.5*(img_batch[i,:,:,:]+1.)).astype(np.uint8))
63
+ save_name = filename_list[i] if batch_no==-1 else '%05d.png' % (batch_no*N+i)
64
+ save_name = save_name.replace('.png', '-%s.png'%suffix) if suffix else save_name
65
+ image.save(os.path.join(save_dir, save_name), 'PNG')
66
+ elif C == 1:
67
+ #! single-channel gray image
68
+ for i in range(N):
69
+ # [-1,1] >>> [0,255]
70
+ image = Image.fromarray((127.5*(img_batch[i,:,:,0]+1.)).astype(np.uint8))
71
+ save_name = filename_list[i] if batch_no==-1 else '%05d.png' % (batch_no*img_batch.shape[0]+i)
72
+ save_name = save_name.replace('.png', '-%s.png'%suffix) if suffix else save_name
73
+ image.save(os.path.join(save_dir, save_name), 'PNG')
74
+ else:
75
+ #! multi-channel: save each channel as a single image
76
+ for i in range(N):
77
+ # [-1,1] >>> [0,255]
78
+ for j in range(C):
79
+ image = Image.fromarray((127.5*(img_batch[i,:,:,j]+1.)).astype(np.uint8))
80
+ if batch_no == -1:
81
+ _, file_name = os.path.split(filename_list[i])
82
+ name_only, _ = os.path.os.path.splitext(file_name)
83
+ save_name = name_only + '_c%d.png' % j
84
+ else:
85
+ save_name = '%05d_c%d.png' % (batch_no*N+i, j)
86
+ save_name = save_name.replace('.png', '-%s.png'%suffix) if suffix else save_name
87
+ image.save(os.path.join(save_dir, save_name), 'PNG')
88
+ return None
89
+
90
+
91
+ def save_normLabs_from_batch(img_batch, save_dir, filename_list, batch_no=-1, suffix=None):
92
+ N,H,W,C = img_batch.shape
93
+ if C != 3:
94
+ print('@Warning:the Lab images are NOT in 3 channels!')
95
+ return None
96
+ # denormalization: L: (L+1.0)*50.0 | a: a*110.0| b: b*110.0
97
+ img_batch[:,:,:,0] = img_batch[:,:,:,0] * 50.0 + 50.0
98
+ img_batch[:,:,:,1:3] = img_batch[:,:,:,1:3] * 110.0
99
+ #! convert into RGB color image
100
+ for i in range(N):
101
+ rgb_img = cv2.cvtColor(img_batch[i,:,:,:], cv2.COLOR_LAB2RGB)
102
+ image = Image.fromarray((rgb_img*255.0).astype(np.uint8))
103
+ save_name = filename_list[i] if batch_no==-1 else '%05d.png' % (batch_no*N+i)
104
+ save_name = save_name.replace('.png', '-%s.png'%suffix) if suffix else save_name
105
+ image.save(os.path.join(save_dir, save_name), 'PNG')
106
+ return None
107
+
108
+
109
+ def save_markedSP_from_batch(img_batch, spix_batch, save_dir, filename_list, batch_no=-1, suffix=None):
110
+ N,H,W,C = img_batch.shape
111
+ #! img_batch: BGR nd-array (range:0~1)
112
+ #! map_batch: single-channel spixel map
113
+ #print('----------', img_batch.shape, spix_batch.shape)
114
+ for i in range(N):
115
+ norm_image = img_batch[i,:,:,:]*0.5+0.5
116
+ spixel_bd_image = mark_boundaries(norm_image, spix_batch[i,:,:,0].astype(int), color=(1,1,1))
117
+ #spixel_bd_image = cv2.cvtColor(spixel_bd_image, cv2.COLOR_BGR2RGB)
118
+ image = Image.fromarray((spixel_bd_image*255.0).astype(np.uint8))
119
+ save_name = filename_list[i] if batch_no==-1 else '%05d.png' % (batch_no*N+i)
120
+ save_name = save_name.replace('.png', '-%s.png'%suffix) if suffix else save_name
121
+ image.save(os.path.join(save_dir, save_name), 'PNG')
122
+ return None
123
+
124
+
125
+ def get_filelist(data_dir):
126
+ file_list = glob.glob(os.path.join(data_dir, '*.*'))
127
+ file_list.sort()
128
+ return file_list
129
+
130
+
131
+ def collect_filenames(data_dir):
132
+ file_list = get_filelist(data_dir)
133
+ name_list = []
134
+ for file_path in file_list:
135
+ _, file_name = os.path.split(file_path)
136
+ name_list.append(file_name)
137
+ name_list.sort()
138
+ return name_list
139
+
140
+
141
+ def exists_or_mkdir(path, need_remove=False):
142
+ if not os.path.exists(path):
143
+ os.makedirs(path)
144
+ elif need_remove:
145
+ shutil.rmtree(path)
146
+ os.makedirs(path)
147
+ return None
148
+
149
+
150
+ def save_list(save_path, data_list, append_mode=False):
151
+ n = len(data_list)
152
+ if append_mode:
153
+ with open(save_path, 'a') as f:
154
+ f.writelines([str(data_list[i]) + '\n' for i in range(n-1,n)])
155
+ else:
156
+ with open(save_path, 'w') as f:
157
+ f.writelines([str(data_list[i]) + '\n' for i in range(n)])
158
+ return None
159
+
160
+
161
+ def save_dict(save_path, dict):
162
+ json.dumps(dict, open(save_path,"w"))
163
+ return None
164
+
165
+
166
+ if __name__ == '__main__':
167
+ data_dir = '../PolyNet/PolyNet/cache/'
168
+ #visualizeLossCurves(data_dir)
169
+ clbar = GamutIndex()
170
+ ab, ab_gamut_mask = clbar._get_gamut_mask()
171
+ ab2q = clbar._get_ab_to_q(ab_gamut_mask)
172
+ q2ab = clbar._get_q_to_ab(ab, ab_gamut_mask)
173
+ maps = ab_gamut_mask*255.0
174
+ image = Image.fromarray(maps.astype(np.uint8))
175
+ image.save('gamut.png', 'PNG')
176
+ print(ab2q.shape)
177
+ print(q2ab.shape)
178
+ print('label range:', np.min(ab2q), np.max(ab2q))