List of commits:
Subject Hash Author Date (UTC)
fix data loader pacnn so it will scale up with correct number of people 11d55b50d764511f2491291f0208fee0905dec49 Thai Thien 2019-09-24 15:40:56
add comet ml a9d4b89ce594f5e241168ccafdcdf0f150ea0ebb Thai Thien 2019-09-23 17:07:58
fix pacnn avg schema c2140a96886195782e5689c24aeeb4fe7a2db7ad Thai Thien 2019-09-22 17:35:01
debug number not divisible by 8 a568fd7f294a8bd31b3db78437b4b6b51b5b41b9 Thai Thien 2019-09-22 04:36:06
pacnn 967074890d14ab0eefc277801860270a468e8f9f Thai Thien 2019-09-22 03:54:48
wip: pacnn 2192d7c7b449fecf3868877d9cfbc09bb6f7ae98 Thai Thien 2019-09-22 03:44:56
wip: pacnn 37620e5a9bc0f9516ea964ec58d9bdaa1c40ff36 Thai Thien 2019-09-22 03:14:42
fix training flow 2b87b1b26c7296b64493fdc49fedb421b249dfa3 Thai Thien 2019-09-17 18:00:35
dataset script bc5c052f5f956510ab95ef9a45434fd486c57fae Thai Thien 2019-09-16 17:21:13
evaluator ffc5bf8290ae0c469a9a18a2d061cfd1bfeee822 Thai Thien 2019-09-14 04:56:35
some more test for data loader 25173578cde7d4e9fe6c6140d1ee01caa4fcfc32 Thai Thien 2019-09-14 02:51:58
some visualize to debug data loader e4f52007616acf307bddbde79c0fb4f8c649c785 Thai Thien 2019-09-13 17:35:45
wip d7d44cad6774355bdfa45414258763f6c6a0c299 Thai Thien 2019-08-31 16:58:16
commit all 6dad7a58f7dbf9fc288ce9dd3e92be538851c2a7 Thai Thien 2019-08-29 19:10:44
input d1,d2,d3 match fc2a809241f8b6356d964c63d40cbebd55ca5f6c Thai Thien 2019-08-28 17:57:05
WIP 39eab26d061e61dfffbf164dbd5fd878299b7250 thient 2019-08-28 11:09:12
output of de is ok dd770386674df3e0fbebafdfc48a9352bc28967d thient 2019-08-28 10:54:09
code pacnn c49537b5cc91e96e4e35c9338d2c95b9bb41c672 Thai Thien 2019-08-27 16:35:27
crowd counting stuff da9f27a39cba9bdd021b6b5c562f5f7c2be50190 Thai Thien 2019-08-24 18:27:44
seem ok 53fa176c31669a0e89b04adf290cb398f0316c45 Thai Thien 2019-08-24 18:26:31
Commit 11d55b50d764511f2491291f0208fee0905dec49 - fix data loader pacnn so it will scale up with correct number of people
Author: Thai Thien
Author date (UTC): 2019-09-24 15:40
Committer name: Thai Thien
Committer date (UTC): 2019-09-24 15:40
Parent(s): a9d4b89ce594f5e241168ccafdcdf0f150ea0ebb
Signing key:
Tree: a91a77d79de0e3f35d41ab15f080fcc619f39893
File Lines added Lines deleted
data_flow.py 4 4
visualize_data_loader.py 4 1
File data_flow.py changed (mode: 100644) (index 924b307..2d67c33)
... ... def load_data_shanghaitech_pacnn(img_path, train=True):
98 98 target1 = cv2.resize(target, (int(target.shape[1] / 8), int(target.shape[0] / 8)), target1 = cv2.resize(target, (int(target.shape[1] / 8), int(target.shape[0] / 8)),
99 99 interpolation=cv2.INTER_CUBIC) * 64 interpolation=cv2.INTER_CUBIC) * 64
100 100 target2 = cv2.resize(target, (int(target.shape[1] / 16), int(target.shape[0] / 16)), target2 = cv2.resize(target, (int(target.shape[1] / 16), int(target.shape[0] / 16)),
101 interpolation=cv2.INTER_CUBIC) * 64 *2
101 interpolation=cv2.INTER_CUBIC) * 256
102 102 target3 = cv2.resize(target, (int(target.shape[1] / 32), int(target.shape[0] / 32)), target3 = cv2.resize(target, (int(target.shape[1] / 32), int(target.shape[0] / 32)),
103 interpolation=cv2.INTER_CUBIC) * 64 *4
103 interpolation=cv2.INTER_CUBIC) * 1024
104 104
105 105 return img, (target1, target2, target3) return img, (target1, target2, target3)
106 106
 
... ... def load_data_ucf_cc50_pacnn(img_path, train=True):
138 138 target1 = cv2.resize(target, (int(target.shape[1] / 8), int(target.shape[0] / 8)), target1 = cv2.resize(target, (int(target.shape[1] / 8), int(target.shape[0] / 8)),
139 139 interpolation=cv2.INTER_CUBIC) * 64 interpolation=cv2.INTER_CUBIC) * 64
140 140 target2 = cv2.resize(target, (int(target.shape[1] / 16), int(target.shape[0] / 16)), target2 = cv2.resize(target, (int(target.shape[1] / 16), int(target.shape[0] / 16)),
141 interpolation=cv2.INTER_CUBIC) * 64 #*2
141 interpolation=cv2.INTER_CUBIC) * 256
142 142 target3 = cv2.resize(target, (int(target.shape[1] / 32), int(target.shape[0] / 32)), target3 = cv2.resize(target, (int(target.shape[1] / 32), int(target.shape[0] / 32)),
143 interpolation=cv2.INTER_CUBIC) * 64 #*4
143 interpolation=cv2.INTER_CUBIC) * 1024
144 144
145 145 return img, (target1, target2, target3) return img, (target1, target2, target3)
146 146
File visualize_data_loader.py changed (mode: 100644) (index cd7d5eb..b515bbd)
... ... def visualize_ucf_cc_50_pacnn():
34 34 ]), ]),
35 35 train=True, train=True,
36 36 batch_size=1, batch_size=1,
37 num_workers=4, dataset_name="ucf_cc_50_pacnn", debug=True),
37 num_workers=4, dataset_name="shanghaitech_pacnn", debug=True),
38 38 batch_size=1, num_workers=4) batch_size=1, num_workers=4)
39 39
40 40 img, label = next(iter(train_loader_pacnn)) img, label = next(iter(train_loader_pacnn))
 
... ... def visualize_ucf_cc_50_pacnn():
44 44 save_density_map(label[0].numpy()[0], os.path.join(saved_folder,"pacnn_loader_density1.png")) save_density_map(label[0].numpy()[0], os.path.join(saved_folder,"pacnn_loader_density1.png"))
45 45 save_density_map(label[1].numpy()[0], os.path.join(saved_folder,"pacnn_loader_density2.png")) save_density_map(label[1].numpy()[0], os.path.join(saved_folder,"pacnn_loader_density2.png"))
46 46 save_density_map(label[2].numpy()[0], os.path.join(saved_folder,"pacnn_loader_density3.png")) save_density_map(label[2].numpy()[0], os.path.join(saved_folder,"pacnn_loader_density3.png"))
47 print("count1 ", label[0].numpy()[0].sum())
48 print("count2 ", label[1].numpy()[0].sum())
49 print("count3 ", label[2].numpy()[0].sum())
47 50
48 51
49 52 if __name__ == "__main__": if __name__ == "__main__":
Hints:
Before first commit, do not forget to setup your git environment:
git config --global user.name "your_name_here"
git config --global user.email "your@email_here"

Clone this repository using HTTP(S):
git clone https://rocketgit.com/user/hahattpro/crowd_counting_framework

Clone this repository using ssh (do not forget to upload a key first):
git clone ssh://rocketgit@ssh.rocketgit.com/user/hahattpro/crowd_counting_framework

Clone this repository using git:
git clone git://git.rocketgit.com/user/hahattpro/crowd_counting_framework

You are allowed to anonymously push to this repository.
This means that your pushed commits will automatically be transformed into a merge request:
... clone the repository ...
... make some changes and some commits ...
git push origin main