List of commits:
Subject Hash Author Date (UTC)
jhucrowd_downsample_512 30b8b9a63e93a5631f003858ef2ab63dc62c6563 Thai Thien 2020-09-16 16:17:02
fix 77d74f09874a7be0d8fcec141597be4ce331bf0c Thai Thien 2020-09-16 16:08:39
fix 897856c014a517da679a8a1c46c6d0b90c965876 Thai Thien 2020-09-16 16:05:29
jhucrowd_downsample_256 b992d3acc7f1e1aa0ed6265c48e9f065fa1d3085 Thai Thien 2020-09-16 16:02:19
adamw1_bigtail13i_t7_jhu 81e1a8ff445b76f0a9c9c9882a11da65b8b13088 Thai Thien 2020-09-10 18:19:34
adamw1_CompactCNNV7_t1_jhu ed5c3696081241e7ebd7370863ea08ec83467a0b Thai Thien 2020-09-10 18:12:05
no force cache 33946d1f09a1e77c7758acbcd61e385a1800e854 Thai Thien 2020-09-10 17:47:06
skip train eval 186cb449855cb567fc61801bd3a1a4c0767a7d35 Thai Thien 2020-09-10 17:17:10
70 cuda mem but why ? f63f482418b6c85d2a289da9e9e43ca2b50cdd5d Thai Thien 2020-09-10 16:42:00
n_thread int f6243317336e07b9b6177c89ec3072c7d18189c6 Thai Thien 2020-09-10 16:22:36
t3 fix num thread 8981cf96a953c643b3443f39ad73487d2e7e044d Thai Thien 2020-09-10 16:20:28
jhu float32 c0b004d6a733f83e469bb3d48f7e20a9c7113957 Thai Thien 2020-09-10 16:14:09
fix float 8867f5d5e160c38a1a7d67b2fac728cf60e6649e Thai Thien 2020-09-10 16:03:15
try catch empty file e7d392cd6ec807f3ae242ef5090f5725ced9f897 Thai Thien 2020-09-10 15:40:14
case no label at all 51b423b2ee9271e3639a332ecab092944636d8f2 Thai Thien 2020-09-09 13:49:29
remove experiment 06478164cb8e00ed5512a0d4db0dbc6edc1b5ad1 Thai Thien 2020-09-08 15:10:33
val train test 5a43d57ba16667cb96f9514dcf4b81167ff8bd5a Thai Thien 2020-09-08 15:01:51
parallel, try catch excemption 9ac6a36880a113c1820b1e6a702ed5c08ebcb03f Thai Thien 2020-09-08 14:55:08
now show the number of point in log 54a1a16adf32c866ed65808d16da5f997d27b54f Thai Thien 2020-09-07 17:19:18
gt to ground-truth 3996d1c19d68f3669b5f54e5b621635f0b87a9fc Thai Thien 2020-09-07 17:06:15
Commit 30b8b9a63e93a5631f003858ef2ab63dc62c6563 - jhucrowd_downsample_512
Author: Thai Thien
Author date (UTC): 2020-09-16 16:17
Committer name: Thai Thien
Committer date (UTC): 2020-09-16 16:17
Parent(s): 77d74f09874a7be0d8fcec141597be4ce331bf0c
Signing key:
Tree: 2fe1b1e308f8d4c9e2a16e985a44edde0838e41d
File Lines added Lines deleted
data_flow.py 13 11
train_script/learnstuff/l3/adamw1_ccnnv7_t2_jhu.sh 1 1
File data_flow.py changed (mode: 100644) (index d7fed10..23088d0)
... ... def load_data_jhucrowd_256(img_path, train=True, debug=False):
1058 1058 return img, target1 return img, target1
1059 1059
1060 1060
1061 def load_data_jhucrowd_downsample_256(img_path, train=True, debug=False):
1061 def load_data_jhucrowd_downsample_512(img_path, train=True, debug=False):
1062 1062 """ """
1063 1063 for jhucrowd for jhucrowd
1064 1064 crop fixed 256, allow batch in non-uniform dataset crop fixed 256, allow batch in non-uniform dataset
 
... ... def load_data_jhucrowd_downsample_256(img_path, train=True, debug=False):
1068 1068 """ """
1069 1069 gt_path = img_path.replace('.jpg', '.h5').replace('images', 'ground-truth-h5') gt_path = img_path.replace('.jpg', '.h5').replace('images', 'ground-truth-h5')
1070 1070 img_origin = Image.open(img_path).convert('RGB') img_origin = Image.open(img_path).convert('RGB')
1071 # downsample by half
1072 1071 gt_file = h5py.File(gt_path, 'r') gt_file = h5py.File(gt_path, 'r')
1073 1072 target = np.asarray(gt_file['density']).astype('float32') target = np.asarray(gt_file['density']).astype('float32')
1074 downsample_factor = 2
1075 target_factor = 8 / downsample_factor
1076 crop_sq_size = 256 * downsample_factor
1073 downsample_rate = 2
1074 target_factor = 8 * downsample_rate
1075 crop_sq_size = 512
1076
1077 1077 if train: if train:
1078 1078 crop_size = (crop_sq_size, crop_sq_size) crop_size = (crop_sq_size, crop_sq_size)
1079 1079 dx = int(random.random() * (img_origin.size[0] - crop_sq_size)) dx = int(random.random() * (img_origin.size[0] - crop_sq_size))
 
... ... def load_data_jhucrowd_downsample_256(img_path, train=True, debug=False):
1081 1081 if img_origin.size[0] - crop_sq_size < 0 or img_origin.size[1] - crop_sq_size < 0: # we crop more than we can chew, so... if img_origin.size[0] - crop_sq_size < 0 or img_origin.size[1] - crop_sq_size < 0: # we crop more than we can chew, so...
1082 1082 return None, None return None, None
1083 1083 img = img_origin.crop((dx, dy, crop_size[0] + dx, crop_size[1] + dy)) img = img_origin.crop((dx, dy, crop_size[0] + dx, crop_size[1] + dy))
1084 img2 = img.resize((int(img.size[0] / 2), int(img.size[1] / 2)), resample=Image.ANTIALIAS)
1084 img2 = img.resize((int(img.size[0] / downsample_rate), int(img.size[1] / downsample_rate)),
1085 resample=Image.ANTIALIAS)
1085 1086 target = target[dy:crop_size[1] + dy, dx:crop_size[0] + dx] target = target[dy:crop_size[1] + dy, dx:crop_size[0] + dx]
1086 1087
1087 1088 if random.random() > 0.8: if random.random() > 0.8:
 
... ... def load_data_jhucrowd_downsample_256(img_path, train=True, debug=False):
1092 1093 # get correct people head count from head annotation # get correct people head count from head annotation
1093 1094 txt_path = img_path.replace('.jpg', '.txt').replace('images', 'ground-truth') txt_path = img_path.replace('.jpg', '.txt').replace('images', 'ground-truth')
1094 1095 gt_count = count_gt_annotation_jhu(txt_path) gt_count = count_gt_annotation_jhu(txt_path)
1095 img_out = img_origin.resize((int(img_origin.size[0] / 2), int(img_origin.size[1] / 2)), resample=Image.ANTIALIAS)
1096 img_eval = img_origin.resize((int(img_origin.size[0] / downsample_rate), int(img_origin.size[1] / downsample_rate)),
1097 resample=Image.ANTIALIAS)
1096 1098 if debug: if debug:
1097 1099 gt_file = h5py.File(gt_path, 'r') gt_file = h5py.File(gt_path, 'r')
1098 1100 target = np.asarray(gt_file['density']) target = np.asarray(gt_file['density'])
1099 return img_origin, gt_count, target
1100 return img_out, gt_count
1101 return img_eval, gt_count, target
1102 return img_eval, gt_count
1101 1103
1102 1104 target1 = cv2.resize(target, (int(target.shape[1] / target_factor), int(target.shape[0] / target_factor)), target1 = cv2.resize(target, (int(target.shape[1] / target_factor), int(target.shape[0] / target_factor)),
1103 1105 interpolation=cv2.INTER_CUBIC) * target_factor * target_factor interpolation=cv2.INTER_CUBIC) * target_factor * target_factor
 
... ... class ListDataset(Dataset):
1201 1203 self.load_data_fn = load_data_shanghaitech_180 self.load_data_fn = load_data_shanghaitech_180
1202 1204 elif dataset_name == "shanghaitech_256": elif dataset_name == "shanghaitech_256":
1203 1205 self.load_data_fn = load_data_shanghaitech_256 self.load_data_fn = load_data_shanghaitech_256
1204 elif dataset_name == "jhucrowd_downsample_256":
1205 self.load_data_fn = load_data_jhucrowd_downsample_256
1206 elif dataset_name == "jhucrowd_downsample_512":
1207 self.load_data_fn = load_data_jhucrowd_downsample_512
1206 1208 elif dataset_name == "shanghaitech_non_overlap": elif dataset_name == "shanghaitech_non_overlap":
1207 1209 self.load_data_fn = load_data_shanghaitech_non_overlap self.load_data_fn = load_data_shanghaitech_non_overlap
1208 1210 elif dataset_name == "shanghaitech_non_overlap_downsample": elif dataset_name == "shanghaitech_non_overlap_downsample":
File train_script/learnstuff/l3/adamw1_ccnnv7_t2_jhu.sh changed (mode: 100644) (index 63fd722..bde6dfb)
... ... CUDA_VISIBLE_DEVICES=4 OMP_NUM_THREADS=6 PYTHONWARNINGS="ignore" HTTPS_PROXY="ht
9 9 --decay 0.1 \ --decay 0.1 \
10 10 --loss_fn "MSEL1Mean" \ --loss_fn "MSEL1Mean" \
11 11 --batch_size 50 \ --batch_size 50 \
12 --datasetname jhucrowd_downsample_256 \
12 --datasetname jhucrowd_downsample_512 \
13 13 --optim adamw \ --optim adamw \
14 14 --skip_train_eval \ --skip_train_eval \
15 15 --epochs 401 > logs/$task.log & --epochs 401 > logs/$task.log &
Hints:
Before first commit, do not forget to setup your git environment:
git config --global user.name "your_name_here"
git config --global user.email "your@email_here"

Clone this repository using HTTP(S):
git clone https://rocketgit.com/user/hahattpro/crowd_counting_framework

Clone this repository using ssh (do not forget to upload a key first):
git clone ssh://rocketgit@ssh.rocketgit.com/user/hahattpro/crowd_counting_framework

Clone this repository using git:
git clone git://git.rocketgit.com/user/hahattpro/crowd_counting_framework

You are allowed to anonymously push to this repository.
This means that your pushed commits will automatically be transformed into a merge request:
... clone the repository ...
... make some changes and some commits ...
git push origin main