List of commits:
Subject Hash Author Date (UTC)
data_flow shanghaitech_pacnn_with_perspective seem working 91d350a06f358e03223966297d124daee94123d0 Thai Thien 2019-10-06 01:31:11
multiscale loss and final loss only mode c65dd0e74ad28503821e5c8651a3b47b4a0c7c64 Thai Thien 2019-10-05 15:58:19
wip : perspective map eac63f2671dc5b064753acc4f40bf0f9f216ad2a Thai Thien 2019-10-04 16:26:56
shell script f2106e700b6f6174d4dd276f25ec6f3d9ff239bb thient 2019-10-04 07:42:51
WIP 42c7c8e1d772fbbda61a4bdf9e329f74e1efb600 tthien 2019-10-03 17:52:47
add readme 580cf43d1edddd67b1f6a2c57fdd5cee3dba925c Thai Thien 2019-10-02 17:44:49
update script, debug ddb68b95389be1c1d398118677dd227a8bb2b70b Thai Thien 2019-10-02 15:52:31
add d (output density map) to loss function) a0c71bf4bf2ab7393d60b06a84db8dfbbfb1a6c2 tthien 2019-09-30 16:32:39
fix the args, add save interval for model, so we don't save them all 9fdf9daa2ac4bd12b7b62521d81e520db0debd01 tthien 2019-09-30 16:30:00
meow 1ad19a22a310992e27a26471feeb37375124d075 tthien 2019-09-29 18:25:43
fix pacnn perspective map 453ece3ccb818889ba895bfc4285f7905d33cba5 Thai Thien 2019-09-25 17:20:33
apex not work so well da8c0dd57297f972201f31d57e66897177922f48 Thai Thien 2019-09-24 17:25:59
fix data loader pacnn so it will scale up with correct number of people 11d55b50d764511f2491291f0208fee0905dec49 Thai Thien 2019-09-24 15:40:56
add comet ml a9d4b89ce594f5e241168ccafdcdf0f150ea0ebb Thai Thien 2019-09-23 17:07:58
fix pacnn avg schema c2140a96886195782e5689c24aeeb4fe7a2db7ad Thai Thien 2019-09-22 17:35:01
debug number not divisible by 8 a568fd7f294a8bd31b3db78437b4b6b51b5b41b9 Thai Thien 2019-09-22 04:36:06
pacnn 967074890d14ab0eefc277801860270a468e8f9f Thai Thien 2019-09-22 03:54:48
wip: pacnn 2192d7c7b449fecf3868877d9cfbc09bb6f7ae98 Thai Thien 2019-09-22 03:44:56
wip: pacnn 37620e5a9bc0f9516ea964ec58d9bdaa1c40ff36 Thai Thien 2019-09-22 03:14:42
fix training flow 2b87b1b26c7296b64493fdc49fedb421b249dfa3 Thai Thien 2019-09-17 18:00:35
Commit 91d350a06f358e03223966297d124daee94123d0 - data_flow shanghaitech_pacnn_with_perspective seem working
Author: Thai Thien
Author date (UTC): 2019-10-06 01:31
Committer name: Thai Thien
Committer date (UTC): 2019-10-06 01:31
Parent(s): c65dd0e74ad28503821e5c8651a3b47b4a0c7c64
Signing key:
Tree: 2d619b8ad96bbeae323dcc638a9e794f932051de
File Lines added Lines deleted
data_flow.py 9 6
hard_code_variable.py 2 1
train_script/train_pacnn_shanghaitechA.sh 10 1
visualize_data_loader.py 42 1
File data_flow.py changed (mode: 100644) (index 19347e3..a7b2270)
... ... def load_data_shanghaitech_pacnn_with_perspective(img_path, train=True):
114 114 :return: :return:
115 115 """ """
116 116 gt_path = img_path.replace('.jpg', '.h5').replace('images', 'ground-truth-h5') gt_path = img_path.replace('.jpg', '.h5').replace('images', 'ground-truth-h5')
117 p_path = img_path.replace(".jpg", ".mat").replace("images", "p_map")
117 p_path = img_path.replace(".jpg", ".mat").replace("images", "pmap")
118 118 img = Image.open(img_path).convert('RGB') img = Image.open(img_path).convert('RGB')
119 119 gt_file = h5py.File(gt_path, 'r') gt_file = h5py.File(gt_path, 'r')
120 120 target = np.asarray(gt_file['density']) target = np.asarray(gt_file['density'])
121 perspective = np.array(h5py.File(p_path, "r"))
122
121 perspective = np.array(h5py.File(p_path, "r")['pmap'])
122 perspective = np.rot90(perspective, k=3)
123 123 if train: if train:
124 124 crop_size = (int(img.size[0] / 2), int(img.size[1] / 2)) crop_size = (int(img.size[0] / 2), int(img.size[1] / 2))
125 125 if random.randint(0, 9) <= -1: if random.randint(0, 9) <= -1:
 
... ... def load_data_shanghaitech_pacnn_with_perspective(img_path, train=True):
132 132
133 133 img = img.crop((dx, dy, crop_size[0] + dx, crop_size[1] + dy)) img = img.crop((dx, dy, crop_size[0] + dx, crop_size[1] + dy))
134 134 target = target[dy:crop_size[1] + dy, dx:crop_size[0] + dx] target = target[dy:crop_size[1] + dy, dx:crop_size[0] + dx]
135 perspective = target[dy:crop_size[1] + dy, dx:crop_size[0] + dx]
135 perspective = perspective[dy:crop_size[1] + dy, dx:crop_size[0] + dx]
136 136 if random.random() > 0.8: if random.random() > 0.8:
137 137 target = np.fliplr(target) target = np.fliplr(target)
138 138 img = img.transpose(Image.FLIP_LEFT_RIGHT) img = img.transpose(Image.FLIP_LEFT_RIGHT)
 
... ... def load_data_shanghaitech_pacnn_with_perspective(img_path, train=True):
148 148 perspective_s = cv2.resize(perspective, (int(perspective.shape[1] / 16), int(perspective.shape[0] / 16)), perspective_s = cv2.resize(perspective, (int(perspective.shape[1] / 16), int(perspective.shape[0] / 16)),
149 149 interpolation=cv2.INTER_CUBIC) * 256 interpolation=cv2.INTER_CUBIC) * 256
150 150
151 perspective_m = cv2.resize(perspective, (int(perspective.shape[1] / 8), int(perspective.shape[0] / 8)),
151 perspective_p = cv2.resize(perspective, (int(perspective.shape[1] / 8), int(perspective.shape[0] / 8)),
152 152 interpolation=cv2.INTER_CUBIC) * 64 interpolation=cv2.INTER_CUBIC) * 64
153 153
154 return img, (target1, target2, target3, perspective_s, perspective_m)
154 return img, (target1, target2, target3, perspective_s, perspective_p)
155
155 156
156 157 def load_data_ucf_cc50_pacnn(img_path, train=True): def load_data_ucf_cc50_pacnn(img_path, train=True):
157 158 """ """
 
... ... class ListDataset(Dataset):
258 259 self.load_data_fn = load_data_ucf_cc50_pacnn self.load_data_fn = load_data_ucf_cc50_pacnn
259 260 elif dataset_name is "shanghaitech_pacnn": elif dataset_name is "shanghaitech_pacnn":
260 261 self.load_data_fn = load_data_shanghaitech_pacnn self.load_data_fn = load_data_shanghaitech_pacnn
262 elif dataset_name is "shanghaitech_pacnn_with_perspective":
263 self.load_data_fn = load_data_shanghaitech_pacnn_with_perspective
261 264
262 265 def __len__(self): def __len__(self):
263 266 return self.nSamples return self.nSamples
File hard_code_variable.py changed (mode: 100644) (index 836144a..3f5b17a)
1 1 class HardCodeVariable(): class HardCodeVariable():
2 2 def __init__(self): def __init__(self):
3 self.UCF_CC_50_PATH = "/data/cv_data/UCFCrowdCountingDataset_CVPR13_with_people_density_map/UCF_CC_50"
3 self.UCF_CC_50_PATH = "/data/cv_data/UCFCrowdCountingDataset_CVPR13_with_people_density_map/UCF_CC_50"
4 self.SHANGHAITECH_PATH = "data/ShanghaiTech/part_A/train_data"
File train_script/train_pacnn_shanghaitechA.sh changed (mode: 100644) (index 9f2955d..f3944dd)
45 45 #--load_model saved_model/train_state1_attemp6_120_checkpoint.pth.tar \ #--load_model saved_model/train_state1_attemp6_120_checkpoint.pth.tar \
46 46 #--epochs 300 \ #--epochs 300 \
47 47 #--lr 1e-9 \ #--lr 1e-9 \
48 #--task_id train_state1_attemp7
48 #--task_id train_state1_attemp7
49
50 #### no loss for d1, d2, d3 but only count d_final
51 python main_pacnn.py \
52 --input data/ShanghaiTech/part_A \
53 --load_model saved_model/train_state1_attemp7_180_checkpoint.pth.tar \
54 --epochs 300 \
55 --lr 1e-9 \
56 --PACNN_MUTILPLE_SCALE_LOSS False \
57 --task_id train_state1_attemp8_finalloss
File visualize_data_loader.py changed (mode: 100644) (index b515bbd..f4aa432)
... ... def visualize_ucf_cc_50_pacnn():
48 48 print("count2 ", label[1].numpy()[0].sum()) print("count2 ", label[1].numpy()[0].sum())
49 49 print("count3 ", label[2].numpy()[0].sum()) print("count3 ", label[2].numpy()[0].sum())
50 50
51 def visualize_shanghaitech_pacnn_with_perspective():
52 HARD_CODE = HardCodeVariable()
53 saved_folder = "visualize/test_dataloader"
54 os.makedirs(saved_folder, exist_ok=True)
55 DATA_PATH = HARD_CODE.SHANGHAITECH_PATH
56 train_list, val_list = get_train_val_list(DATA_PATH, test_size=0.2)
57 test_list = None
58
59 # create data loader
60 train_loader, val_loader, test_loader = get_dataloader(train_list, val_list, test_list, dataset_name="ucf_cc_50")
61 train_loader_pacnn = torch.utils.data.DataLoader(
62 ListDataset(train_list,
63 shuffle=True,
64 transform=transforms.Compose([
65 transforms.ToTensor()
66 ]),
67 train=True,
68 batch_size=1,
69 num_workers=4, dataset_name="shanghaitech_pacnn_with_perspective", debug=True),
70 batch_size=1, num_workers=4)
71
72 img, label = next(iter(train_loader_pacnn))
73
74 print(img.shape)
75 save_img(img, os.path.join(saved_folder, "pacnn_loader_img.png"))
76 save_density_map(label[0].numpy()[0], os.path.join(saved_folder,"pacnn_loader_with_p_density1.png"))
77 save_density_map(label[1].numpy()[0], os.path.join(saved_folder,"pacnn_loader_with_p_density2.png"))
78 save_density_map(label[2].numpy()[0], os.path.join(saved_folder,"pacnn_loader_with_p_density3.png"))
79 save_density_map(label[3].numpy()[0], os.path.join(saved_folder, "pacnn_loader_p_s_4.png"))
80 save_density_map(label[4].numpy()[0], os.path.join(saved_folder, "pacnn_loader_p_5.png"))
81 print("count1 ", label[0].numpy()[0].sum())
82 print("count2 ", label[1].numpy()[0].sum())
83 print("count3 ", label[2].numpy()[0].sum())
84 print("count4 ", label[3].numpy()[0].sum())
85 print("count5 ", label[4].numpy()[0].sum())
86
87 print("s1 ", label[0].shape)
88 print("s2 ", label[1].shape)
89 print("s3 ", label[2].shape)
90 print("s4 ", label[3].shape)
91 print("s5 ", label[4].shape)
51 92
52 93 if __name__ == "__main__": if __name__ == "__main__":
53 visualize_ucf_cc_50_pacnn()
94 visualize_shanghaitech_pacnn_with_perspective()
Hints:
Before first commit, do not forget to setup your git environment:
git config --global user.name "your_name_here"
git config --global user.email "your@email_here"

Clone this repository using HTTP(S):
git clone https://rocketgit.com/user/hahattpro/crowd_counting_framework

Clone this repository using ssh (do not forget to upload a key first):
git clone ssh://rocketgit@ssh.rocketgit.com/user/hahattpro/crowd_counting_framework

Clone this repository using git:
git clone git://git.rocketgit.com/user/hahattpro/crowd_counting_framework

You are allowed to anonymously push to this repository.
This means that your pushed commits will automatically be transformed into a merge request:
... clone the repository ...
... make some changes and some commits ...
git push origin main