List of commits:
Subject Hash Author Date (UTC)
fix evaluation shb 51cbe92724973f64cb046f7f49fb1976400827e4 Thai Thien 2020-08-23 09:18:35
typo 1cba17e02cc79ee73c4ad5c9f1faab4913f92b01 Thai Thien 2020-08-23 08:53:21
fix file, mae, mse 245396d814f5d83dff1fd1ecc9fcd403be1805cb Thai Thien 2020-08-23 08:52:16
file name strage stuff caeb9f9608e91cf6a1323d9ae9f4fb215c4dd6ea Thai Thien 2020-08-23 08:44:54
TypeError: can only concatenate str (not "list") to str e8aebcfb782966c11c3ca116b5a9cc254021a73d Thai Thien 2020-08-23 08:41:02
key error 5732100d6aca8e3fea6a4d25270edefbc8148a2a Thai Thien 2020-08-23 08:39:09
fix target fdbda2c6923dd164560448a445cd64ff413fc804 Thai Thien 2020-08-23 08:37:48
test path 06d268f873e6ceea93a8e8741d819a03b324cedb Thai Thien 2020-08-23 08:27:38
a 1d73d926894edbc600db678316b9b24a583c4cb8 Thai Thien 2020-08-23 08:25:58
test set bb1e40fc7806c8bef5e94fbaa54ac9af3b599041 Thai Thien 2020-08-23 08:23:30
remove epoch stuff 2cc6434aa298b6da90b4577ce529f971119b86c7 Thai Thien 2020-08-23 07:53:36
evaluation_shb_CompactCNNV7i_t1 28cf202a306b775967c6e466120b019ae1eb6a4d Thai Thien 2020-08-23 07:49:51
evaluation shb 44f3dfdc9e3ad2787cc5286a64f1b28363796b19 Thai Thien 2020-08-23 07:32:50
performance_shb_CompactCNNV7i_t1 2698927959f377bb8308e54aebf029d8a18abf39 Thai Thien 2020-08-14 16:47:58
t7 test size 1 again 06fd0b91bf3c4c7d7713972f91c75ad33e0f25e7 Thai Thien 2020-08-14 16:38:33
t6 pin mem c573feae8f739502b53497c3482dae7a5c55aff7 Thai Thien 2020-08-14 16:35:19
testsize 20 77b4d29cbc8305943c2ec025d60b3937e1686d33 Thai Thien 2020-08-14 16:31:53
t4 increase core 80bcac87ccb6b10ce6e438fcce2f52e52fe1600a Thai Thien 2020-08-14 16:30:02
t3 eb03a48f24f38a078e8a77ba7a25fc647ecb642d Thai Thien 2020-08-14 16:27:21
eval and no grad c6d1e1fdeb6c7ebcc5318139ef22abb53b14e34e Thai Thien 2020-08-14 16:25:11
Commit 51cbe92724973f64cb046f7f49fb1976400827e4 - fix evaluation shb
Author: Thai Thien
Author date (UTC): 2020-08-23 09:18
Committer name: Thai Thien
Committer date (UTC): 2020-08-23 09:18
Parent(s): 1cba17e02cc79ee73c4ad5c9f1faab4913f92b01
Signer:
Signing key:
Signing status: N
Tree: 9087a55d8ed5e9ee4d2f5629ba9f7075b50ea392
File Lines added Lines deleted
debug/evaluate_shb.py 9 16
debug/evaluation_shb_CompactCNNV7i_t1.txt 1 0
train_script/debug/a1/local.sh 21 0
train_script/debug/evaluation_shb_CompactCNNV7i_t1.sh 3 0
File debug/evaluate_shb.py changed (mode: 100644) (index 1e9801e..1d637ab)
... ... from data_flow import get_train_val_list, get_dataloader, create_training_image_
33 33 This file evaluation on SHB and get information on evaluation process This file evaluation on SHB and get information on evaluation process
34 34 """ """
35 35
36 "/data/ShanghaiTech/part_A/test_data"
36 37
37 38 def _parse(): def _parse():
38 39 parser = argparse.ArgumentParser(description='evaluatiuon SHB') parser = argparse.ArgumentParser(description='evaluatiuon SHB')
39 40 parser.add_argument('--input', action="store", type=str, default=HardCodeVariable().SHANGHAITECH_PATH_PART_A) parser.add_argument('--input', action="store", type=str, default=HardCodeVariable().SHANGHAITECH_PATH_PART_A)
40 41 parser.add_argument('--output', action="store", type=str, default="visualize/verify_dataloader_shanghaitech") parser.add_argument('--output', action="store", type=str, default="visualize/verify_dataloader_shanghaitech")
41 parser.add_argument('--load_model', action="store", type=str, default="visualize/verify_dataloader_shanghaitech")
42 parser.add_argument('--load_model', action="store", type=str, default=None)
42 43 parser.add_argument('--model', action="store", type=str, default="visualize/verify_dataloader_shanghaitech") parser.add_argument('--model', action="store", type=str, default="visualize/verify_dataloader_shanghaitech")
43 44 parser.add_argument('--meta_data', action="store", type=str, default="data_info.txt") parser.add_argument('--meta_data', action="store", type=str, default="data_info.txt")
44 45 parser.add_argument('--datasetname', action="store", default="shanghaitech_keepfull_r50") parser.add_argument('--datasetname', action="store", default="shanghaitech_keepfull_r50")
 
... ... def visualize_evaluation_shanghaitech_keepfull(model, args):
60 61 mae_s = 0 mae_s = 0
61 62 mse_s = 0 mse_s = 0
62 63 n = 0 n = 0
64 train_loader_iter = iter(train_loader)
65 _, gt_density,_ = next(train_loader_iter)
63 66 with torch.no_grad(): with torch.no_grad():
64 67 for item in test_loader: for item in test_loader:
65 68 img, gt_density, debug_info = item img, gt_density, debug_info = item
 
... ... def visualize_evaluation_shanghaitech_keepfull(model, args):
69 72 file_name_only = file_name[0].split(".")[0] file_name_only = file_name[0].split(".")[0]
70 73 save_path = os.path.join(saved_folder, "label_" + file_name_only +".png") save_path = os.path.join(saved_folder, "label_" + file_name_only +".png")
71 74 save_pred_path = os.path.join(saved_folder, "pred_" + file_name_only +".png") save_pred_path = os.path.join(saved_folder, "pred_" + file_name_only +".png")
72 save_density_map(gt_density.numpy()[0][0], save_path)
75 save_density_map(gt_density.numpy()[0], save_path)
73 76 pred = model(img.cuda()) pred = model(img.cuda())
74 77 predicted_density_map = pred.detach().cpu().clone().numpy() predicted_density_map = pred.detach().cpu().clone().numpy()
75 78 predicted_density_map_enlarge = cv2.resize(np.squeeze(predicted_density_map[0][0]), (int(predicted_density_map.shape[3] * 8), int(predicted_density_map.shape[2] * 8)), interpolation=cv2.INTER_CUBIC) / 64 predicted_density_map_enlarge = cv2.resize(np.squeeze(predicted_density_map[0][0]), (int(predicted_density_map.shape[3] * 8), int(predicted_density_map.shape[2] * 8)), interpolation=cv2.INTER_CUBIC) / 64
 
... ... if __name__ == "__main__":
104 107 args = _parse() args = _parse()
105 108 print(args) print(args)
106 109
107 DATA_PATH = args.input
108 TRAIN_PATH = os.path.join(DATA_PATH, "train_data_train_split")
109 VAL_PATH = os.path.join(DATA_PATH, "train_data_validate_split")
110 TEST_PATH = os.path.join(DATA_PATH, "test_data")
111 dataset_name = args.datasetname
112 if dataset_name == "shanghaitech":
113 print("will use shanghaitech dataset with crop ")
114 elif dataset_name == "shanghaitech_keepfull":
115 print("will use shanghaitech_keepfull")
116 else:
117 print("cannot detect dataset_name")
118 print("current dataset_name is ", dataset_name)
110
119 111
120 112 # # create list # # create list
121 113 # train_list = create_image_list(TRAIN_PATH) # train_list = create_image_list(TRAIN_PATH)
 
... ... if __name__ == "__main__":
200 192 print("error: you didn't pick a model") print("error: you didn't pick a model")
201 193 exit(-1) exit(-1)
202 194 model = model.to(device) model = model.to(device)
203 checkpoint = torch.load(args.load_model)
204 model.load_state_dict(checkpoint["model"])
195 if args.load_model is not None:
196 checkpoint = torch.load(args.load_model)
197 model.load_state_dict(checkpoint["model"])
205 198 model.eval() model.eval()
206 199 visualize_evaluation_shanghaitech_keepfull(model, args) visualize_evaluation_shanghaitech_keepfull(model, args)
207 200
File debug/evaluation_shb_CompactCNNV7i_t1.txt added (mode: 100644) (index 0000000..9ff254b)
1 IMG_98 146.8988037109375 153369.7569580078125
File train_script/debug/a1/local.sh added (mode: 100644) (index 0000000..63114cc)
1 task="evaluation_shb_CompactCNNV7i_t1"
2 CUDA_VISIBLE_DEVICES=2 OMP_NUM_THREADS=4 PYTHONWARNINGS="ignore" HTTPS_PROXY="http://10.60.28.99:86" nohup python debug/evaluate_shb.py \
3 --model "CompactCNNV7i" \
4 --input /data/ShanghaiTech/part_A/test_data \
5 --output visualize/$task \
6 --load_model saved_model_best/g1_ccnn_v7_t3_shb/g1_ccnn_v7_t3_shb_checkpoint_valid_mae=-8.881268501281738.pth \
7 --meta_data logs/$task.txt \
8 --datasetname shanghaitech_non_overlap \
9 > logs/$task.log &
10
11 echo logs/$task.log
12
13
14 "/data/ShanghaiTech/part_A/test_data"
15
16
17 --model "CompactCNNV7i" \
18 --input /data/ShanghaiTech/part_A/test_data
19 --output visualize/evaluation_shb_CompactCNNV7i_t1
20 --meta_data logs/evaluation_shb_CompactCNNV7i_t1.txt
21 --datasetname shanghaitech_non_overlap
File train_script/debug/evaluation_shb_CompactCNNV7i_t1.sh changed (mode: 100644) (index cff9849..b785539)
... ... CUDA_VISIBLE_DEVICES=2 OMP_NUM_THREADS=4 PYTHONWARNINGS="ignore" HTTPS_PROXY="ht
10 10
11 11 echo logs/$task.log echo logs/$task.log
12 12
13
14 "/data/ShanghaiTech/part_A/test_data"
15
13 16 ## ##
14 17 #def _parse(): #def _parse():
15 18 # parser = argparse.ArgumentParser(description='evaluatiuon SHB') # parser = argparse.ArgumentParser(description='evaluatiuon SHB')
Hints:
Before first commit, do not forget to setup your git environment:
git config --global user.name "your_name_here"
git config --global user.email "your@email_here"

Clone this repository using HTTP(S):
git clone https://rocketgit.com/user/hahattpro/crowd_counting_framework

Clone this repository using ssh (do not forget to upload a key first):
git clone ssh://rocketgit@ssh.rocketgit.com/user/hahattpro/crowd_counting_framework

Clone this repository using git:
git clone git://git.rocketgit.com/user/hahattpro/crowd_counting_framework

You are allowed to anonymously push to this repository.
This means that your pushed commits will automatically be transformed into a merge request:
... clone the repository ...
... make some changes and some commits ...
git push origin main