List of commits:
Subject Hash Author Date (UTC)
fix shanghaitech_non_overlap 9332377476d2d7524822ba98e2321c75be90f709 Thai Thien 2020-07-12 15:57:08
fix flatten collate bb25738b0510915712a17daeb590b668b446b0be Thai Thien 2020-07-12 15:17:14
ccnn_v7_t6_shb c4f59ee01cbdab3f506302588b9667cd1c9f6411 Thai Thien 2020-07-12 14:19:03
do not *4 root if we have flatten augmentation list c534aa36bf314ea32643e92231194bd020d7bf1f Thai Thien 2020-07-12 14:19:00
train val split shb 61581543d16aaa2640bdee0b3573e41d1843770d Thai Thien 2020-07-12 14:06:25
flatten collate c04708ae0defc81dbf441395e1d27de6a1d598fc Thai Thien 2020-07-12 13:54:01
travis remove nightly d4b0c714823046bbafcd3c816d56f7079c76d126 Thai Thien 2020-07-12 13:27:55
travis e6368ec3102e01f1bdc71a80a78f0db3617d7e08 Thai Thien 2020-07-12 12:33:21
flatten_collate 1e460396875c205c42de27449f56e73cd4ec10e0 Thai Thien 2020-07-12 12:23:40
train val split test ratio to 0.1 5091da3f0b45d875a38c2829e4fec5e61116e869 Thai Thien 2020-07-11 03:14:26
f 1defee5dc452c2da5fb540ff1050f6e01fe1878b Thai Thien 2020-07-10 17:04:58
fix log db4d655a313b8f3951baf225a4b197fce4bcdd4b Thai Thien 2020-07-10 16:50:03
typo, trunc 3.0 not 4.0 ee29c49efd0a5087f11662997b8992d25671a33a Thai Thien 2020-07-10 16:44:55
let try with trunc 3.0 f8eac179b1fa79d6451f19a9e6a35b82b94646a4 Thai Thien 2020-07-10 16:43:41
train script 7e6984a9eaa6609182ad6c786f6742dd29f1d017 Thai Thien 2020-06-14 10:23:05
fix H3 71d41ac82f273857f7643d8e63e502c701888a5b Thai Thien 2020-06-14 10:10:39
typo bc78059e62b17a5a617e3e0294f5c72dd4bd347e Thai Thien 2020-06-14 10:06:35
H3_t1_sha H3_t2_sha H3_t2_shb_fixed ccnn_adam_t7_shb ccnn_adam_t7_sha ca7e1ccab41314f8a30e48609421db3a2c41bbe0 Thai Thien 2020-06-14 09:59:17
train script da9ad066d6d835f38eb9e9c2b771ff412124552a Thai Thien 2020-06-13 18:06:22
train script ccnnv9 8ab76b70dad11b1458f8c1bfc2f1765c6ece984d Thai Thien 2020-06-13 17:57:02
Commit 9332377476d2d7524822ba98e2321c75be90f709 - fix shanghaitech_non_overlap
Author: Thai Thien
Author date (UTC): 2020-07-12 15:57
Committer name: Thai Thien
Committer date (UTC): 2020-07-12 15:57
Parent(s): bb25738b0510915712a17daeb590b668b446b0be
Signing key:
Tree: a8c78567ca0af14d49975dae611a5a2712a77e0d
File Lines added Lines deleted
data_flow.py 7 8
data_util/dataset_utils.py 15 3
data_util/test_dataset_utils.py 10 8
debug/explore_shb_fatten_list.py 8 3
File data_flow.py changed (mode: 100644) (index 806ba3c..eea892e)
... ... def load_data_shanghaitech_non_overlap(img_path, train=True):
439 439 :return: :return:
440 440 """ """
441 441 gt_path = img_path.replace('.jpg', '.h5').replace('images', 'ground-truth-h5') gt_path = img_path.replace('.jpg', '.h5').replace('images', 'ground-truth-h5')
442 img = Image.open(img_path).convert('RGB')
442 img_origin = Image.open(img_path).convert('RGB')
443 crop_size = (int(img_origin.size[0] / 2), int(img_origin.size[1] / 2))
443 444 gt_file = h5py.File(gt_path, 'r') gt_file = h5py.File(gt_path, 'r')
444 target = np.asarray(gt_file['density'])
445 target_origin = np.asarray(gt_file['density'])
445 446 target_factor = 8 target_factor = 8
446 447
447 448 if train: if train:
 
... ... def load_data_shanghaitech_non_overlap(img_path, train=True):
452 453 crop_label = [] crop_label = []
453 454 for i in range(2): for i in range(2):
454 455 for j in range(2): for j in range(2):
455 crop_size = (int(img.size[0] / 2), int(img.size[1] / 2))
456
457 456 # crop non-overlap # crop non-overlap
458 dx = int(i * img.size[0] * 1. / 2)
459 dy = int(j * img.size[1] * 1. / 2)
460 img = img.crop((dx, dy, crop_size[0] + dx, crop_size[1] + dy))
461 target = target[dy:crop_size[1] + dy, dx:crop_size[0] + dx]
457 dx = int(i * img_origin.size[0] * 1. / 2)
458 dy = int(j * img_origin.size[1] * 1. / 2)
459 img = img_origin.crop((dx, dy, crop_size[0] + dx, crop_size[1] + dy))
460 target = target_origin[dy:crop_size[1] + dy, dx:crop_size[0] + dx]
462 461
463 462 # flip # flip
464 463 for x in range(2): for x in range(2):
File data_util/dataset_utils.py changed (mode: 100644) (index fdb1754..c8e710e)
... ... def flatten_collate_broken(batch):
30 30 return out_batch return out_batch
31 31
32 32
33 def flatten_collate(batch):
33 def _flatten_collate(batch):
34 34 """ """
35 35
36 :param batch: tuple of (data, label)
37 :return:
36 :param batch: tuple of (data, label) with type(data) == list, type(label) == list
37 :return: flatten data, label
38 38 """ """
39 39 # remove null batch # remove null batch
40 40 batch = list(filter(lambda x: x is not None, batch)) batch = list(filter(lambda x: x is not None, batch))
 
... ... def flatten_collate(batch):
51 51 out_batch = [(img, label) for data_pair in batch for img, label in zip(*data_pair)] out_batch = [(img, label) for data_pair in batch for img, label in zip(*data_pair)]
52 52
53 53 return out_batch return out_batch
54
55
56 def flatten_collate(batch):
57 """
58
59 :param batch: tuple of (data, label) with type(data) == list, type(label) == list
60 :return: flatten data, label
61 """
62 # remove null batch
63 batch1 = _flatten_collate(batch)
64 out_batch = torch.utils.data.dataloader.default_collate(batch1)
65 return out_batch
File data_util/test_dataset_utils.py changed (mode: 100644) (index f62719e..744f4cd)
1 1 import pytest import pytest
2 from data_util.dataset_utils import flatten_collate
2 from data_util.dataset_utils import _flatten_collate
3 3
4 4
5 def test_flatten_collate_should_remove_null():
6 in_batch = [None, "a", "b", None, "c"]
7 expected_output = ["a", "b", "c"]
8 actual_output = flatten_collate(in_batch)
9 assert actual_output == expected_output
5 # def test_flatten_collate_should_remove_null():
6 # in_batch = [None, "a", "b", None, "c"]
7 # expected_output = ["a", "b", "c"]
8 # actual_output = _flatten_collate(in_batch)
9 # assert actual_output == expected_output
10 10
11 11
12 12 def test_flatten_list(): def test_flatten_list():
13 13 in_batch = [(["d11", "d12", "d13"],["l11", "l12", "l13"]),(["d21", "d22", "d23"],["l21", "l22", "l23"]), (["d31", "d32", "d33"],["l31", "l32", "l33"])] in_batch = [(["d11", "d12", "d13"],["l11", "l12", "l13"]),(["d21", "d22", "d23"],["l21", "l22", "l23"]), (["d31", "d32", "d33"],["l31", "l32", "l33"])]
14 14 out_batch = [("d11", "l11"), ("d12", "l12"), ("d13", "l13"), ("d21", "l21"), ("d22", "l22"), ("d23", "l23"), ("d31", "l31"), ("d32", "l32"), ("d33", "l33")] out_batch = [("d11", "l11"), ("d12", "l12"), ("d13", "l13"), ("d21", "l21"), ("d22", "l22"), ("d23", "l23"), ("d31", "l31"), ("d32", "l32"), ("d33", "l33")]
15 actual_output = flatten_collate(in_batch)
16 assert actual_output == out_batch
15 actual_output = _flatten_collate(in_batch)
16 assert actual_output == out_batch
17 for data in actual_output:
18 print(len(data))
File debug/explore_shb_fatten_list.py changed (mode: 100644) (index c9fd3d5..6e74d94)
... ... if __name__ == "__main__":
31 31 val_list = create_image_list(VAL_PATH) val_list = create_image_list(VAL_PATH)
32 32 test_list = create_image_list(TEST_PATH) test_list = create_image_list(TEST_PATH)
33 33
34 # train_loader, train_loader_eval, val_loader, test_loader = get_dataloader(train_list, val_list, test_list,
35 # dataset_name="shanghaitech_more_random"
36 # , batch_size=1,
37 # train_loader_for_eval_check=True)
38
34 39 train_loader, train_loader_eval, val_loader, test_loader = get_dataloader(train_list, val_list, test_list, train_loader, train_loader_eval, val_loader, test_loader = get_dataloader(train_list, val_list, test_list,
35 40 dataset_name="shanghaitech_non_overlap" dataset_name="shanghaitech_non_overlap"
36 , batch_size=20,
41 , batch_size=1,
37 42 train_loader_for_eval_check=True) train_loader_for_eval_check=True)
38 43 print(len(train_loader)) print(len(train_loader))
39 44 print(len(val_loader)) print(len(val_loader))
40 45
41 for obs in train_loader:
42 print(len(obs))
46 for img, label in train_loader:
47 print(img.shape, label.shape)
Hints:
Before first commit, do not forget to setup your git environment:
git config --global user.name "your_name_here"
git config --global user.email "your@email_here"

Clone this repository using HTTP(S):
git clone https://rocketgit.com/user/hahattpro/crowd_counting_framework

Clone this repository using ssh (do not forget to upload a key first):
git clone ssh://rocketgit@ssh.rocketgit.com/user/hahattpro/crowd_counting_framework

Clone this repository using git:
git clone git://git.rocketgit.com/user/hahattpro/crowd_counting_framework

You are allowed to anonymously push to this repository.
This means that your pushed commits will automatically be transformed into a merge request:
... clone the repository ...
... make some changes and some commits ...
git push origin main