List of commits:
Subject Hash Author Date (UTC)
load_data_shanghaitech_256_v2 and l2_adamw2_bigtail13i_t7_sha 07bca1a460c30ed7ec7269d72dad99eef6fc96b3 Thai Thien 2020-09-22 17:04:45
adamw1_ccnnv7_t7_jhu c5f106103aa3696827627f5e815105d9432e2acb Thai Thien 2020-09-16 18:21:22
adamw1_ccnnv7_t6_jhu.sh 5c8644da7445b3dd4f6d2611d564592e8e8ed45c Thai Thien 2020-09-16 18:19:07
adamw1_CompactCNNV7_t6_jhu 1fa1d544a254cd042bcdd36e8de2984fe3d5244f Thai Thien 2020-09-16 17:56:15
adamw1_CompactCNNV7_t5_jhu 40720162c9e9b731fb852ed7e3e191228e421ced Thai Thien 2020-09-16 17:46:20
test_data cache 73633cbd6d70448268a3e1534440601ddcf75276 Thai Thien 2020-09-16 17:12:57
fix test data not exist 474f506204251e26825801499f6bc503f44f8410 Thai Thien 2020-09-16 16:41:00
fix test data not exist 6377e18d87e0fabbf9b4ed143e26ae5912c0b872 Thai Thien 2020-09-16 16:40:06
jhucrowd_downsample_512 30b8b9a63e93a5631f003858ef2ab63dc62c6563 Thai Thien 2020-09-16 16:17:02
fix 77d74f09874a7be0d8fcec141597be4ce331bf0c Thai Thien 2020-09-16 16:08:39
fix 897856c014a517da679a8a1c46c6d0b90c965876 Thai Thien 2020-09-16 16:05:29
jhucrowd_downsample_256 b992d3acc7f1e1aa0ed6265c48e9f065fa1d3085 Thai Thien 2020-09-16 16:02:19
adamw1_bigtail13i_t7_jhu 81e1a8ff445b76f0a9c9c9882a11da65b8b13088 Thai Thien 2020-09-10 18:19:34
adamw1_CompactCNNV7_t1_jhu ed5c3696081241e7ebd7370863ea08ec83467a0b Thai Thien 2020-09-10 18:12:05
no force cache 33946d1f09a1e77c7758acbcd61e385a1800e854 Thai Thien 2020-09-10 17:47:06
skip train eval 186cb449855cb567fc61801bd3a1a4c0767a7d35 Thai Thien 2020-09-10 17:17:10
70 cuda mem but why ? f63f482418b6c85d2a289da9e9e43ca2b50cdd5d Thai Thien 2020-09-10 16:42:00
n_thread int f6243317336e07b9b6177c89ec3072c7d18189c6 Thai Thien 2020-09-10 16:22:36
t3 fix num thread 8981cf96a953c643b3443f39ad73487d2e7e044d Thai Thien 2020-09-10 16:20:28
jhu float32 c0b004d6a733f83e469bb3d48f7e20a9c7113957 Thai Thien 2020-09-10 16:14:09
Commit 07bca1a460c30ed7ec7269d72dad99eef6fc96b3 - load_data_shanghaitech_256_v2 and l2_adamw2_bigtail13i_t7_sha
Author: Thai Thien
Author date (UTC): 2020-09-22 17:04
Committer name: Thai Thien
Committer date (UTC): 2020-09-22 17:04
Parent(s): c5f106103aa3696827627f5e815105d9432e2acb
Signing key:
Tree: ecc352453c516cccb5da64243045686a5821a983
File Lines added Lines deleted
data_flow.py 45 1
experiment_main.py 2 2
train_script/learnstuff/l2/l2_adamw2_bigtail13i_t7_sha.sh 6 9
File data_flow.py changed (mode: 100644) (index c4ebf96..44b669e)
... ... def load_data(img_path, train=True):
94 94 img = Image.open(img_path).convert('RGB') img = Image.open(img_path).convert('RGB')
95 95 gt_file = h5py.File(gt_path, 'r') gt_file = h5py.File(gt_path, 'r')
96 96 target = np.asarray(gt_file['density']) target = np.asarray(gt_file['density'])
97 gt_file.close()
97 98
98 99 target = cv2.resize(target, (int(target.shape[1] / 8), int(target.shape[0] / 8)), target = cv2.resize(target, (int(target.shape[1] / 8), int(target.shape[0] / 8)),
99 100 interpolation=cv2.INTER_CUBIC) * 64 interpolation=cv2.INTER_CUBIC) * 64
 
... ... def load_data_shanghaitech(img_path, train=True):
106 107 img = Image.open(img_path).convert('RGB') img = Image.open(img_path).convert('RGB')
107 108 gt_file = h5py.File(gt_path, 'r') gt_file = h5py.File(gt_path, 'r')
108 109 target = np.asarray(gt_file['density']) target = np.asarray(gt_file['density'])
110 gt_file.close()
109 111
110 112 if train: if train:
111 113 crop_size = (int(img.size[0] / 2), int(img.size[1] / 2)) crop_size = (int(img.size[0] / 2), int(img.size[1] / 2))
 
... ... def load_data_shanghaitech_rnd(img_path, train=True):
142 144 img = Image.open(img_path).convert('RGB') img = Image.open(img_path).convert('RGB')
143 145 gt_file = h5py.File(gt_path, 'r') gt_file = h5py.File(gt_path, 'r')
144 146 target = np.asarray(gt_file['density']) target = np.asarray(gt_file['density'])
145
147 gt_file.close()
146 148 if train: if train:
147 149 crop_size = (int(img.size[0] / 2), int(img.size[1] / 2)) crop_size = (int(img.size[0] / 2), int(img.size[1] / 2))
148 150 if random.randint(0, 9) <= 4: if random.randint(0, 9) <= 4:
 
... ... def load_data_shanghaitech_256(img_path, train=True):
735 737 img = Image.open(img_path).convert('RGB') img = Image.open(img_path).convert('RGB')
736 738 gt_file = h5py.File(gt_path, 'r') gt_file = h5py.File(gt_path, 'r')
737 739 target = np.asarray(gt_file['density']) target = np.asarray(gt_file['density'])
740 gt_file.close()
738 741 target_factor = 8 target_factor = 8
739 742 crop_sq_size = 256 crop_sq_size = 256
740 743 if train: if train:
 
... ... def load_data_shanghaitech_256(img_path, train=True):
756 759 target1 = np.expand_dims(target1, axis=0) # make dim (batch size, channel size, x, y) to make model output target1 = np.expand_dims(target1, axis=0) # make dim (batch size, channel size, x, y) to make model output
757 760 return img, target1 return img, target1
758 761
762 def load_data_shanghaitech_256_v2(img_path, train=True):
763 """
764 crop fixed 256, allow batch in non-uniform dataset
765 :param img_path:
766 :param train:
767 :return:
768 """
769 gt_path = img_path.replace('.jpg', '.h5').replace('images', 'ground-truth-h5')
770 img_origin = Image.open(img_path).convert('RGB')
771 gt_file = h5py.File(gt_path, 'r')
772 target = np.asarray(gt_file['density'])
773 gt_file.close()
774 target_factor = 8
775 crop_sq_size = 256
776 if train:
777 crop_size = (crop_sq_size, crop_sq_size)
778 dx = int(random.random() * (img_origin.size[0] - crop_sq_size))
779 dy = int(random.random() * (img_origin.size[1] - crop_sq_size))
780 if img_origin.size[0] - crop_sq_size < 0 or img_origin.size[1] - crop_sq_size < 0: # we crop more than we can chew, so...
781 # TODO if exception, do somehthing here
782 return None, None
783 img = img_origin.crop((dx, dy, crop_size[0] + dx, crop_size[1] + dy))
784 target = target[dy:crop_size[1] + dy, dx:crop_size[0] + dx]
785
786 if random.random() > 0.8:
787 target = np.fliplr(target)
788 img = img.transpose(Image.FLIP_LEFT_RIGHT)
789
790 if not train:
791 # get correct people head count from head annotation
792 mat_path = img_path.replace('.jpg', '.mat').replace('images', 'ground-truth').replace('IMG', 'GT_IMG')
793 gt_count = count_gt_annotation_sha(mat_path)
794 return img_origin, gt_count
795
796 target1 = cv2.resize(target, (int(target.shape[1] / target_factor), int(target.shape[0] / target_factor)),
797 interpolation=cv2.INTER_CUBIC) * target_factor * target_factor
798 # target1 = target1.unsqueeze(0) # make dim (batch size, channel size, x, y) to make model output
799 target1 = np.expand_dims(target1, axis=0) # make dim (batch size, channel size, x, y) to make model output
800 return img, target1
759 801
760 802 def load_data_shanghaitech_same_size_density_map(img_path, train=True): def load_data_shanghaitech_same_size_density_map(img_path, train=True):
761 803 gt_path = img_path.replace('.jpg', '.h5').replace('images', 'ground-truth-h5') gt_path = img_path.replace('.jpg', '.h5').replace('images', 'ground-truth-h5')
 
... ... class ListDataset(Dataset):
1255 1297 self.load_data_fn = load_data_shanghaitech_180 self.load_data_fn = load_data_shanghaitech_180
1256 1298 elif dataset_name == "shanghaitech_256": elif dataset_name == "shanghaitech_256":
1257 1299 self.load_data_fn = load_data_shanghaitech_256 self.load_data_fn = load_data_shanghaitech_256
1300 elif dataset_name == "shanghaitech_256_v2":
1301 self.load_data_fn = load_data_shanghaitech_256_v2
1258 1302 elif dataset_name == "jhucrowd_downsample_512": elif dataset_name == "jhucrowd_downsample_512":
1259 1303 self.load_data_fn = load_data_jhucrowd_downsample_512 self.load_data_fn = load_data_jhucrowd_downsample_512
1260 1304 elif dataset_name == "jhucrowd_downsample_testonly_512": elif dataset_name == "jhucrowd_downsample_testonly_512":
File experiment_main.py changed (mode: 100644) (index 917f86e..f8ecc52)
1 1 from comet_ml import Experiment from comet_ml import Experiment
2
2 import sys
3 3 from args_util import meow_parse, lr_scheduler_milestone_builder from args_util import meow_parse, lr_scheduler_milestone_builder
4 4 from data_flow import get_dataloader, create_image_list from data_flow import get_dataloader, create_image_list
5 5 from ignite.engine import Events, create_supervised_trainer, create_supervised_evaluator from ignite.engine import Events, create_supervised_trainer, create_supervised_evaluator
 
... ... if __name__ == "__main__":
140 140 model = CompactCNNV9() model = CompactCNNV9()
141 141 else: else:
142 142 print("error: you didn't pick a model") print("error: you didn't pick a model")
143 exit(-1)
143 sys.exit(-1)
144 144 n_param = very_simple_param_count(model) n_param = very_simple_param_count(model)
145 145 experiment.log_other("n_param", n_param) experiment.log_other("n_param", n_param)
146 146 if hasattr(model, 'model_note'): if hasattr(model, 'model_note'):
File train_script/learnstuff/l2/l2_adamw2_bigtail13i_t7_sha.sh copied from file train_script/learnstuff/l2/l2_adamw2_bigtail13i_t6_sha.sh (similarity 53%) (mode: 100644) (index f4f68b2..835c329)
1 task="l2_adamw2_bigtail13i_t6_sha"
1 task="l2_adamw2_bigtail13i_t7_sha"
2 2
3 CUDA_VISIBLE_DEVICES=4 OMP_NUM_THREADS=2 PYTHONWARNINGS="ignore" HTTPS_PROXY="http://10.60.28.99:86" nohup python experiment_main.py \
3 CUDA_VISIBLE_DEVICES=4 OMP_NUM_THREADS=4 PYTHONWARNINGS="ignore" HTTPS_PROXY="http://10.60.28.99:86" nohup python experiment_main.py \
4 4 --task_id $task \ --task_id $task \
5 --note "keepfull with lr scheduler, starting -3" \
5 --note "256 lr and decay -3" \
6 6 --model "BigTail13i" \ --model "BigTail13i" \
7 7 --input /data/rnd/thient/thient_data/ShanghaiTech/part_A \ --input /data/rnd/thient/thient_data/ShanghaiTech/part_A \
8 8 --lr 1e-3 \ --lr 1e-3 \
9 --lr_scheduler \
10 --step_list 30,50,70 \
11 --lr_list 1e-3,1e-4,1e-5 \
12 --decay 1e-5 \
9 --decay 1e-3 \
13 10 --loss_fn "MSEL1Mean" \ --loss_fn "MSEL1Mean" \
14 --datasetname shanghaitech_keepfull_r50 \
11 --datasetname shanghaitech_256_v2 \
15 12 --optim adamw \ --optim adamw \
16 --cache \
13 --batch_size 60 \
17 14 --epochs 1201 > logs/$task.log & --epochs 1201 > logs/$task.log &
18 15
19 16 echo logs/$task.log # for convenience echo logs/$task.log # for convenience
Hints:
Before first commit, do not forget to setup your git environment:
git config --global user.name "your_name_here"
git config --global user.email "your@email_here"

Clone this repository using HTTP(S):
git clone https://rocketgit.com/user/hahattpro/crowd_counting_framework

Clone this repository using ssh (do not forget to upload a key first):
git clone ssh://rocketgit@ssh.rocketgit.com/user/hahattpro/crowd_counting_framework

Clone this repository using git:
git clone git://git.rocketgit.com/user/hahattpro/crowd_counting_framework

You are allowed to anonymously push to this repository.
This means that your pushed commits will automatically be transformed into a merge request:
... clone the repository ...
... make some changes and some commits ...
git push origin main