List of commits:
Subject Hash Author Date (UTC)
some predict demo 7f2214ad2882874d5407a2d4d5bd115b7089a020 Thai Thien 2020-12-02 14:55:22
demo 3d5fa0d764843d4fd780e44547fd0b13a367c6bc Thai Thien 2020-11-27 19:28:37
notable image b0f2b0e9732c2f2031831eb80eeaf7e6dab1f5bd Thai Thien 2020-11-22 07:11:16
notebook 5b8b840bc1fd0d806a13646477988534f4982fac Thai Thien 2020-11-22 07:10:42
l2_adamw2_bigtail13i_t13_sha 7fd22510845dec808100058190a202b805bf6a02 Thai Thien 2020-09-23 14:17:00
l2_adamw2_bigtail13i_t12_sha bf67476db55f55f348068bc7683f68834659cc96 Thai Thien 2020-09-23 14:14:35
l2_adamw2_bigtail13i_t11_sha 1365b305bf541a3d941d173b169fa14aa811beb1 Thai Thien 2020-09-22 19:31:17
l2_adamw2_bigtail13i_t10_sha 4f6f2b8d3ac4654cc2ed94ca6900643f3ca07be9 Thai Thien 2020-09-22 19:27:24
l2_adamw2_bigtail13i_t9_sha 576b424688dff640a0a7d448cbd615cfb4f33d54 Thai Thien 2020-09-22 19:21:17
t8 9e00ee3a50c93bbd726ebd8554a68f535d836691 Thai Thien 2020-09-22 19:11:37
split a a7b2115bee8ab76497528b84aee09c79631575b1 Thai Thien 2020-09-22 17:48:37
load_data_shanghaitech_256_v2 and l2_adamw2_bigtail13i_t7_sha 07bca1a460c30ed7ec7269d72dad99eef6fc96b3 Thai Thien 2020-09-22 17:04:45
adamw1_ccnnv7_t7_jhu c5f106103aa3696827627f5e815105d9432e2acb Thai Thien 2020-09-16 18:21:22
adamw1_ccnnv7_t6_jhu.sh 5c8644da7445b3dd4f6d2611d564592e8e8ed45c Thai Thien 2020-09-16 18:19:07
adamw1_CompactCNNV7_t6_jhu 1fa1d544a254cd042bcdd36e8de2984fe3d5244f Thai Thien 2020-09-16 17:56:15
adamw1_CompactCNNV7_t5_jhu 40720162c9e9b731fb852ed7e3e191228e421ced Thai Thien 2020-09-16 17:46:20
test_data cache 73633cbd6d70448268a3e1534440601ddcf75276 Thai Thien 2020-09-16 17:12:57
fix test data not exist 474f506204251e26825801499f6bc503f44f8410 Thai Thien 2020-09-16 16:41:00
fix test data not exist 6377e18d87e0fabbf9b4ed143e26ae5912c0b872 Thai Thien 2020-09-16 16:40:06
jhucrowd_downsample_512 30b8b9a63e93a5631f003858ef2ab63dc62c6563 Thai Thien 2020-09-16 16:17:02
Commit 7f2214ad2882874d5407a2d4d5bd115b7089a020 - some predict demo
Author: Thai Thien
Author date (UTC): 2020-12-02 14:55
Committer name: Thai Thien
Committer date (UTC): 2020-12-02 14:55
Parent(s): 3d5fa0d764843d4fd780e44547fd0b13a367c6bc
Signing key:
Tree: 5b33bdbdba44ee8f50c73caa8012c07236eb8ac6
File Lines added Lines deleted
data_flow.py 3 1
demo_app/predict_only.py 30 3
models/dccnn.py 68 0
predict/.gitkeep 1 0
visualize_util.py 23 0
File data_flow.py changed (mode: 100644) (index 029f79b..812e169)
... ... def get_dataloader(train_list, val_list, test_list, dataset_name="shanghaitech",
1456 1456 def simple_predict_data_load_fn(img_path): def simple_predict_data_load_fn(img_path):
1457 1457 img_name = img_path.split("/")[-1] img_name = img_path.split("/")[-1]
1458 1458 # when debug, give information on p_count and img_name # when debug, give information on p_count and img_name
1459 img_id = img_name.split(".")[0].split("_")[-1]
1459 1460 debug_info = {"img_path":img_path, debug_info = {"img_path":img_path,
1460 "name": img_name}
1461 "name": img_name,
1462 "id": img_id}
1461 1463 img_origin = Image.open(img_path).convert('RGB') img_origin = Image.open(img_path).convert('RGB')
1462 1464 return img_origin, debug_info return img_origin, debug_info
1463 1465
File demo_app/predict_only.py changed (mode: 100644) (index 6d18a33..d677298)
1 1 import os import os
2 import torch
2 3 from data_flow import get_predict_dataloader from data_flow import get_predict_dataloader
4 from models.dccnn import DCCNN
5 from visualize_util import save_density_map_normalize, save_density_map
6
3 7 if __name__ == "__main__": if __name__ == "__main__":
4 8 """ """
5 9 predict all in folder predict all in folder
6 10 output into another folder output into another folder
7 11 output density map and count in csv output density map and count in csv
8 12 """ """
9 INPUT_FOLDER = "/data/ShanghaiTech/part_B/test_data/images/"
13 NAME="bao2"
14 # INPUT_FOLDER = "/data/ShanghaiTech/part_B/test_data/images/"
15 INPUT_FOLDER = "/home/tt/Downloads/bao2"
10 16 OUTPUT_FOLDER = "/data/apps/tmp" OUTPUT_FOLDER = "/data/apps/tmp"
17 MODEL = "/home/tt/project/C-3-folder/trained_model/adamw1_bigtail13i_t1_shb_checkpoint_valid_mae=-7.574910521507263.pth"
11 18 input_list = [os.path.join(INPUT_FOLDER, dir) for dir in os.listdir(INPUT_FOLDER)] input_list = [os.path.join(INPUT_FOLDER, dir) for dir in os.listdir(INPUT_FOLDER)]
12 19 loader = get_predict_dataloader(input_list) loader = get_predict_dataloader(input_list)
20 loaded_file = torch.load(MODEL)
21 model = DCCNN()
22 model.load_state_dict(loaded_file['model'])
23 model.eval()
24 os.mkdir(os.path.join(OUTPUT_FOLDER, NAME))
25 log_file = open(os.path.join(OUTPUT_FOLDER, NAME, NAME +".log"), 'w')
26 limit_count = 100
27 count = 0
13 28 for img, info in loader: for img, info in loader:
14 print(img.shape)
15 print(info)
29 if count > limit_count:
30 break
31 predict_name = "PRED_" + info["name"][0]
32
33 predict_path = os.path.join(OUTPUT_FOLDER, NAME, predict_name)
34 pred = model(img)
35 pred = pred.detach().numpy()[0][0]
36 pred_count = pred.sum()
37 log_line = info["name"][0] + "," + str(pred_count.item()) +"\n"
38 log_file.write(log_line)
39 save_density_map(pred, predict_path)
40 print("save to ", predict_path)
41 count += 1
42 log_file.close()
File models/dccnn.py added (mode: 100644) (index 0000000..87b5e97)
1 import torch.nn as nn
2 import torch
3 import collections
4 import torch.nn.functional as F
5
6 class DCCNN(nn.Module):
7 """
8 A REAL-TIME DEEP NETWORK FOR CROWD COUNTING
9 https://arxiv.org/pdf/2002.06515.pdf
10 """
11 def __init__(self, load_weights=False):
12 super(DCCNN, self).__init__()
13 self.model_note = "BigTail12i, batchnorm default setting, add bn red, green, blue, i mean discard inplace"
14 self.red_cnn = nn.Conv2d(3, 10, 9, padding=4)
15 self.green_cnn = nn.Conv2d(3, 14, 7, padding=3)
16 self.blue_cnn = nn.Conv2d(3, 16, 5, padding=2)
17
18 self.c0 = nn.Conv2d(40, 40, 3, padding=2, dilation=2)
19
20 self.max_pooling = nn.MaxPool2d(kernel_size=2, stride=2)
21 self.avg_pooling = nn.AvgPool2d(kernel_size=2, stride=2)
22
23 self.c1 = nn.Conv2d(40, 60, 3, padding=2, dilation=2)
24 self.c2 = nn.Conv2d(60, 40, 3, padding=2, dilation=2)
25 self.c3 = nn.Conv2d(40, 20, 3, padding=2, dilation=2)
26 self.c4 = nn.Conv2d(20, 10, 3, padding=2, dilation=2)
27 self.output = nn.Conv2d(10, 1, 1)
28
29 self.bn_red = nn.BatchNorm2d(10)
30 self.bn_green = nn.BatchNorm2d(14)
31 self.bn_blue = nn.BatchNorm2d(16)
32
33 self.bn00 = nn.BatchNorm2d(40)
34 self.bn0 = nn.BatchNorm2d(40)
35 self.bn1 = nn.BatchNorm2d(60)
36 self.bn2 = nn.BatchNorm2d(40)
37 self.bn3 = nn.BatchNorm2d(20)
38 self.bn4 = nn.BatchNorm2d(10)
39
40 def forward(self,x):
41 x_red = F.relu(self.red_cnn(x))
42 x_red = self.bn_red(x_red)
43 x_green = F.relu(self.green_cnn(x))
44 x_green = self.bn_green(x_green)
45 x_blue = F.relu(self.blue_cnn(x))
46 x_blue = self.bn_blue(x_blue)
47
48 x = torch.cat((x_red, x_green, x_blue), 1)
49 x = self.bn00(x)
50 x = self.max_pooling(x)
51
52 x = F.relu(self.c0(x))
53 x = self.bn0(x)
54 x = F.relu(self.c1(x))
55 x = self.bn1(x)
56 x = self.avg_pooling(x)
57
58 x = F.relu(self.c2(x))
59 x = self.bn2(x)
60
61 x = F.relu(self.c3(x))
62 x = self.bn3(x)
63 x = self.avg_pooling(x)
64
65 x = F.relu(self.c4(x))
66 x = self.bn4(x)
67 x = self.output(x)
68 return x
File predict/.gitkeep added (mode: 100644) (index 0000000..02448eb)
1 keep the folder
File visualize_util.py changed (mode: 100644) (index 1b0f845..449bb48)
... ... def save_density_map(density_map, name):
17 17 plt.savefig(name, dpi=600, bbox_inches='tight', pad_inches=0) plt.savefig(name, dpi=600, bbox_inches='tight', pad_inches=0)
18 18 plt.close() plt.close()
19 19
20
21 def save_density_map_normalize(density_map, name):
22 den = density_map / density_map.max(density_map + 1e-20)
23 plt.figure(dpi=600)
24 plt.axis('off')
25 plt.margins(0, 0)
26 plt.imshow(den, cmap=CM.jet)
27 plt.savefig(name, dpi=600, bbox_inches='tight', pad_inches=0)
28 plt.close()
29
30
31
20 32 def save_density_map_with_colorrange(density_map, name, vmin, vmax): def save_density_map_with_colorrange(density_map, name, vmin, vmax):
21 33 plt.figure(dpi=600) plt.figure(dpi=600)
22 34 plt.axis('off') plt.axis('off')
 
... ... def save_density_map_with_colorrange(density_map, name, vmin, vmax):
26 38 plt.savefig(name, dpi=600, bbox_inches='tight', pad_inches=0) plt.savefig(name, dpi=600, bbox_inches='tight', pad_inches=0)
27 39 plt.close() plt.close()
28 40
41 def save_density_map_with_colorrange_max(density_map, name, vmin, vmax):
42 den = density_map/np.max(density_map+1e-20)
43 plt.figure(dpi=600)
44 plt.axis('off')
45 plt.margins(0, 0)
46 plt.imshow(den, cmap=CM.jet)
47 plt.clim(vmin, vmax)
48 plt.savefig(name, dpi=600, bbox_inches='tight', pad_inches=0)
49 plt.close()
50
51
29 52 def save_img(imgnp, name): def save_img(imgnp, name):
30 53 # plt.imshow(imgnp[0].permute(1, 2, 0).numpy()) # plt.imshow(imgnp[0].permute(1, 2, 0).numpy())
31 54 plt.imsave(name, imgnp[0].permute(1, 2, 0).numpy()) plt.imsave(name, imgnp[0].permute(1, 2, 0).numpy())
Hints:
Before first commit, do not forget to setup your git environment:
git config --global user.name "your_name_here"
git config --global user.email "your@email_here"

Clone this repository using HTTP(S):
git clone https://rocketgit.com/user/hahattpro/crowd_counting_framework

Clone this repository using ssh (do not forget to upload a key first):
git clone ssh://rocketgit@ssh.rocketgit.com/user/hahattpro/crowd_counting_framework

Clone this repository using git:
git clone git://git.rocketgit.com/user/hahattpro/crowd_counting_framework

You are allowed to anonymously push to this repository.
This means that your pushed commits will automatically be transformed into a merge request:
... clone the repository ...
... make some changes and some commits ...
git push origin main