List of commits:
Subject Hash Author Date (UTC)
fix the dataloader for shanghaitech 5f2aee9f316e6555e6a70c6ad037a4e6b491867b Thai Thien 2020-02-02 09:19:50
context aware visualize seem ok 1bdb6ffe77ca4e40ef8f299b2506df2266243db4 Thai Thien 2020-02-02 05:07:10
visualize eval context aware network seem ok f3fe45c23dfeab3730624737efabb0b14d23c25b Thai Thien 2020-02-02 04:50:34
visualize_shanghaitech_pacnn_with_perspective run without error 12366a2de2bd60ff4bd36e6132d44e37dedf7462 Thai Thien 2020-02-02 04:21:16
eval context aware network on ShanghaiTechB can run e8c454d2b6d287c830c1286c9a37884b3cfc615f Thai Thien 2020-02-02 04:09:14
import ShanghaiTechDataPath in data_util e81eb56315d44375ff5c0e747d61456601492f8f Thai Thien 2020-02-02 04:04:36
add model_context_aware_network.py 2a36025c001d85afc064c090f4d22987b328977b Thai Thien 2020-02-02 03:46:38
PACNN (TODO: test this) 44d5ae7ec57c760fb4f105dd3e3492148a0cc075 Thai Thien 2020-02-02 03:40:26
add data path 80134de767d0137a663f343e4606bafc57a1bc1f Thai Thien 2020-02-02 03:38:21
test if ShanghaiTech datapath is correct 97ee84944a4393ec3732879b24f614826f8e7798 Thai Thien 2020-02-01 03:57:31
refactor and test ShanghaiTech datapath 9542ebc00f257edc38690180b7a4353794be4019 Thai Thien 2020-02-01 03:53:49
fix the unzip flow b53c5989935335377eb6a88c942713d3eccc5df7 Thai Thien 2020-02-01 03:53:13
data_script run seem ok 67420c08fc1c10a66404d3698994865726a106cd Thai Thien 2020-02-01 03:33:18
add perspective 642d6fff8c9f31e510fda85a7fb631fb855d8a6d Thai Thien 2019-10-06 16:54:44
fix padding with p 86c2fa07822d956a34b3b37e14da485a4249f01b Thai Thien 2019-10-06 02:52:58
pacnn perspective loss fb673e38a5f24ae9004fe2b7b93c88991e0c2304 Thai Thien 2019-10-06 01:38:28
data_flow shanghaitech_pacnn_with_perspective seem working 91d350a06f358e03223966297d124daee94123d0 Thai Thien 2019-10-06 01:31:11
multiscale loss and final loss only mode c65dd0e74ad28503821e5c8651a3b47b4a0c7c64 Thai Thien 2019-10-05 15:58:19
wip : perspective map eac63f2671dc5b064753acc4f40bf0f9f216ad2a Thai Thien 2019-10-04 16:26:56
shell script f2106e700b6f6174d4dd276f25ec6f3d9ff239bb thient 2019-10-04 07:42:51
Commit 5f2aee9f316e6555e6a70c6ad037a4e6b491867b - fix the dataloader for shanghaitech
Author: Thai Thien
Author date (UTC): 2020-02-02 09:19
Committer name: Thai Thien
Committer date (UTC): 2020-02-02 09:19
Parent(s): 7addc5d8f97a0f21972eca71b4938f7e4d126b88
Signing key:
Tree: 520339abf7f4fa27bc2a43e7bd07a43eab7300ff
File Lines added Lines deleted
args_util.py 3 3
data_flow.py 85 20
models/__init__.py 2 1
train_script/train_can_short.sh 0 0
visualize_data_loader.py 21 1
File args_util.py changed (mode: 100644) (index 2e74c07..fa2f9fc)
... ... contain dummy args with config
3 3 helpfull for copy paste Kaggle helpfull for copy paste Kaggle
4 4 """ """
5 5 import argparse import argparse
6
6 from hard_code_variable import HardCodeVariable
7 7
8 8 def make_args(gpu="0", task="task_one_"): def make_args(gpu="0", task="task_one_"):
9 9 """ """
 
... ... def real_args_parse():
62 62 parser.add_argument("--task_id", action="store", default="dev") parser.add_argument("--task_id", action="store", default="dev")
63 63 parser.add_argument('-a', action="store_true", default=False) parser.add_argument('-a', action="store_true", default=False)
64 64
65 parser.add_argument('--input', action="store", type=str)
66 parser.add_argument('--output', action="store", type=str)
65 parser.add_argument('--input', action="store", type=str, default=HardCodeVariable().SHANGHAITECH_PATH_PART_A)
66 parser.add_argument('--output', action="store", type=str, default="saved_model")
67 67 parser.add_argument('--model', action="store", default="pacnn") parser.add_argument('--model', action="store", default="pacnn")
68 68
69 69 # args with default value # args with default value
File data_flow.py changed (mode: 100644) (index a3f3d3c..ac150b0)
... ... def get_train_val_list(data_path, test_size=0.1):
46 46
47 47
48 48 def load_data(img_path, train=True): def load_data(img_path, train=True):
49 """
50 get a sample
51 :deprecate: use load_data_shanghaiTech now
52 :param img_path:
53 :param train:
54 :return:
55 """
49 56 gt_path = img_path.replace('.jpg', '.h5').replace('images', 'ground-truth-h5') gt_path = img_path.replace('.jpg', '.h5').replace('images', 'ground-truth-h5')
50 57 img = Image.open(img_path).convert('RGB') img = Image.open(img_path).convert('RGB')
51 58 gt_file = h5py.File(gt_path, 'r') gt_file = h5py.File(gt_path, 'r')
 
... ... def load_data(img_path, train=True):
57 64 return img, target return img, target
58 65
59 66
67 def load_data_shanghaitech(img_path, train=True):
68 gt_path = img_path.replace('.jpg', '.h5').replace('images', 'ground-truth-h5')
69 img = Image.open(img_path).convert('RGB')
70 gt_file = h5py.File(gt_path, 'r')
71 target = np.asarray(gt_file['density'])
72
73 if train:
74 crop_size = (int(img.size[0] / 2), int(img.size[1] / 2))
75 if random.randint(0, 9) <= -1:
76
77 dx = int(random.randint(0, 1) * img.size[0] * 1. / 2)
78 dy = int(random.randint(0, 1) * img.size[1] * 1. / 2)
79 else:
80 dx = int(random.random() * img.size[0] * 1. / 2)
81 dy = int(random.random() * img.size[1] * 1. / 2)
82
83 img = img.crop((dx, dy, crop_size[0] + dx, crop_size[1] + dy))
84 target = target[dy:crop_size[1] + dy, dx:crop_size[0] + dx]
85
86 if random.random() > 0.8:
87 target = np.fliplr(target)
88 img = img.transpose(Image.FLIP_LEFT_RIGHT)
89
90 target1 = cv2.resize(target, (int(target.shape[1] / 8), int(target.shape[0] / 8)),
91 interpolation=cv2.INTER_CUBIC) * 64
92 target1 = target1.unsqueeze(0) # make dim (batch size, channel size, x, y) to make model output
93 return img, target1
94
95
96 def load_data_shanghaitech_keepfull(img_path, train=True):
97 gt_path = img_path.replace('.jpg', '.h5').replace('images', 'ground-truth-h5')
98 img = Image.open(img_path).convert('RGB')
99 gt_file = h5py.File(gt_path, 'r')
100 target = np.asarray(gt_file['density'])
101
102 if train:
103 if random.random() > 0.8:
104 target = np.fliplr(target)
105 img = img.transpose(Image.FLIP_LEFT_RIGHT)
106
107 target1 = cv2.resize(target, (int(target.shape[1] / 8), int(target.shape[0] / 8)),
108 interpolation=cv2.INTER_CUBIC) * 64
109
110 target1 = np.expand_dims(target1, axis=0) # make dim (batch size, channel size, x, y) to make model output
111 # np.expand_dims(target1, axis=0) # again
112 return img, target1
113
114
60 115 def load_data_ucf_cc50(img_path, train=True): def load_data_ucf_cc50(img_path, train=True):
61 116 gt_path = img_path.replace('.jpg', '.h5') gt_path = img_path.replace('.jpg', '.h5')
62 117 img = Image.open(img_path).convert('RGB') img = Image.open(img_path).convert('RGB')
 
... ... class ListDataset(Dataset):
254 309 self.dataset_name = dataset_name self.dataset_name = dataset_name
255 310 # load data fn # load data fn
256 311 if dataset_name is "shanghaitech": if dataset_name is "shanghaitech":
257 self.load_data_fn = load_data
312 self.load_data_fn = load_data_shanghaitech
313 if dataset_name is "shanghaitech_keepfull":
314 self.load_data_fn = load_data_shanghaitech_keepfull
258 315 elif dataset_name is "ucf_cc_50": elif dataset_name is "ucf_cc_50":
259 316 self.load_data_fn = load_data_ucf_cc50 self.load_data_fn = load_data_ucf_cc50
260 317 elif dataset_name is "ucf_cc_50_pacnn": elif dataset_name is "ucf_cc_50_pacnn":
 
... ... class ListDataset(Dataset):
278 335 return img, target return img, target
279 336
280 337
281 def get_dataloader(train_list, val_list, test_list, dataset_name="shanghaitech"):
338 def get_dataloader(train_list, val_list, test_list, dataset_name="shanghaitech", visualize_mode=False):
339 if visualize_mode:
340 transformer = transforms.Compose([
341 transforms.ToTensor()
342 ])
343 else:
344 transformer = transforms.Compose([
345 transforms.ToTensor(), transforms.Normalize(mean=[0.485, 0.456, 0.406],
346 std=[0.229, 0.224, 0.225]),
347 ])
348
282 349 train_loader = torch.utils.data.DataLoader( train_loader = torch.utils.data.DataLoader(
283 350 ListDataset(train_list, ListDataset(train_list,
284 shuffle=True,
285 transform=transforms.Compose([
286 transforms.ToTensor(), transforms.Normalize(mean=[0.485, 0.456, 0.406],
287 std=[0.229, 0.224, 0.225]),
288 ]),
289 train=True,
290 batch_size=1,
291 num_workers=4, dataset_name=dataset_name),
292 batch_size=1, num_workers=4)
351 shuffle=True,
352 transform=transformer,
353 train=True,
354 batch_size=1,
355 num_workers=4,
356 dataset_name=dataset_name),
357 batch_size=1,
358 num_workers=4)
293 359
294 360 val_loader = torch.utils.data.DataLoader( val_loader = torch.utils.data.DataLoader(
295 361 ListDataset(val_list, ListDataset(val_list,
296 shuffle=False,
297 transform=transforms.Compose([
298 transforms.ToTensor(), transforms.Normalize(mean=[0.485, 0.456, 0.406],
299 std=[0.229, 0.224, 0.225]),
300 ]), train=False, dataset_name=dataset_name),
362 shuffle=False,
363 transform=transformer,
364 train=False,
365 dataset_name=dataset_name),
301 366 batch_size=1) batch_size=1)
367
302 368 if test_list is not None: if test_list is not None:
303 369 test_loader = torch.utils.data.DataLoader( test_loader = torch.utils.data.DataLoader(
304 370 ListDataset(test_list, ListDataset(test_list,
305 371 shuffle=False, shuffle=False,
306 transform=transforms.Compose([
307 transforms.ToTensor(), transforms.Normalize(mean=[0.485, 0.456, 0.406],
308 std=[0.229, 0.224, 0.225]),
309 ]), train=False, dataset_name=dataset_name),
372 transform=transformer,
373 train=False,
374 dataset_name=dataset_name),
310 375 batch_size=1) batch_size=1)
311 376 else: else:
312 377 test_loader = None test_loader = None
File models/__init__.py changed (mode: 100644) (index 7424e78..413a1dd)
1 1 from .csrnet import CSRNet from .csrnet import CSRNet
2 from .pacnn import PACNN, PACNNWithPerspectiveMap
2 from .pacnn import PACNN, PACNNWithPerspectiveMap
3 from .context_aware_network import CANNet
File train_script/train_can_short.sh copied from file playground/__init__.py (similarity 100%)
File visualize_data_loader.py changed (mode: 100644) (index 09c9f3b..03c78fe)
... ... def visualize_ucf_cc_50_pacnn():
50 50 print("count3 ", label[2].numpy()[0].sum()) print("count3 ", label[2].numpy()[0].sum())
51 51
52 52
53 def visualize_shanghaitech_keepfull():
54 HARD_CODE = HardCodeVariable()
55 shanghaitech_data = ShanghaiTechDataPath(root=HARD_CODE.SHANGHAITECH_PATH)
56 shanghaitech_data_part_a_train = shanghaitech_data.get_a().get_train().get()
57 saved_folder = "visualize/test_dataloader_shanghaitech"
58 os.makedirs(saved_folder, exist_ok=True)
59 train_list, val_list = get_train_val_list(shanghaitech_data_part_a_train, test_size=0.2)
60 test_list = None
61 train_loader, val_loader, test_loader = get_dataloader(train_list, val_list, test_list, dataset_name="shanghaitech_keepfull", visualize_mode=True)
62
63 # do with train loader
64 train_loader_iter = iter(train_loader)
65 for i in range(10):
66 img, label = next(train_loader_iter)
67 save_img(img, os.path.join(saved_folder, "train_img" + str(i) +".png"))
68 save_density_map(label.numpy()[0][0], os.path.join(saved_folder, "train_label" + str(i) +".png"))
69
70
71
53 72 def visualize_shanghaitech_pacnn_with_perspective(): def visualize_shanghaitech_pacnn_with_perspective():
54 73 HARD_CODE = HardCodeVariable() HARD_CODE = HardCodeVariable()
55 74 shanghaitech_data = ShanghaiTechDataPath(root=HARD_CODE.SHANGHAITECH_PATH) shanghaitech_data = ShanghaiTechDataPath(root=HARD_CODE.SHANGHAITECH_PATH)
 
... ... def visualize_shanghaitech_pacnn_with_perspective():
95 114 print("s5 ", label[4].shape) print("s5 ", label[4].shape)
96 115
97 116 if __name__ == "__main__": if __name__ == "__main__":
98 visualize_shanghaitech_pacnn_with_perspective()
117 # visualize_shanghaitech_pacnn_with_perspective()
118 visualize_shanghaitech_keepfull()
Hints:
Before first commit, do not forget to setup your git environment:
git config --global user.name "your_name_here"
git config --global user.email "your@email_here"

Clone this repository using HTTP(S):
git clone https://rocketgit.com/user/hahattpro/crowd_counting_framework

Clone this repository using ssh (do not forget to upload a key first):
git clone ssh://rocketgit@ssh.rocketgit.com/user/hahattpro/crowd_counting_framework

Clone this repository using git:
git clone git://git.rocketgit.com/user/hahattpro/crowd_counting_framework

You are allowed to anonymously push to this repository.
This means that your pushed commits will automatically be transformed into a merge request:
... clone the repository ...
... make some changes and some commits ...
git push origin main