List of commits:
Subject Hash Author Date (UTC)
python code to split train data to train and validate 0dfb94063b0bdc7aa660b78ab61b9ee5e61a4199 Thai Thien 2020-05-18 16:19:21
fix dim mismatch 93ea7669d891301e9c00aadccdea27bb5e138656 Thai Thien 2020-05-12 17:19:33
train h1 bigtail ea6391257cd243098cbbb771e705f1f115b845df Thai Thien 2020-05-12 16:58:26
mse mean e96c22a36e305681d7fed415a5a949fa0c1791c9 Thai Thien 2020-05-10 18:32:02
no fix 7bd97e91de5d7c2d307407287c82e60e893c0c92 Thai Thien 2020-05-10 18:22:45
no fix fc20ae6922c2e53f7d37f4228fb921894cd78eab Thai Thien 2020-05-10 18:19:59
t9 d8ef865ea602670548e897d8b7ac4c925cc9b393 Thai Thien 2020-05-10 18:19:30
test with L1 loss 6492b65da4bdf6351b661f39b6bce6f08d37f17c Thai Thien 2020-05-10 18:10:49
H2 1d6d11b2eeecb67dd7d329e38de61b872870a9aa Thai Thien 2020-05-06 17:42:52
do something with l1 loss 5268c4fc163bb512f293fbac381a64a75c4fe462 Thai Thien 2020-05-06 17:32:45
typo b7b8e2303ce99b2196402ec93334598598e71e5a Thai Thien 2020-05-05 17:32:31
increase epoch 67f89509e4294c4310b42e790425c82279df16b3 Thai Thien 2020-05-05 17:25:17
H1 t8 1c692b37536bd72abaa0995001d3a396b82bc2f0 Thai Thien 2020-05-05 17:24:56
OMP_NUM_THREADS=5 ac76431f8ca1ada27ca7ffdaa289996baee064c1 Thai Thien 2020-05-05 17:14:41
train da020f46703ca4fae867a09960593ef6818b4a91 Thai Thien 2020-05-05 17:05:06
batch_size 10 6b6478b9570f9133489c8a9427a857c14a14fb13 Thai Thien 2020-05-02 11:26:38
change dataset preprocess for t3 267d31931fd80178714812fced9f86a27479d54f Thai Thien 2020-05-02 11:23:19
t3 e2a1c6f6e8a6d34b36aa8d6c86a5509bc8d41cdd Thai Thien 2020-05-02 11:20:05
batch size 20 ea5737c694cb2967cb041db99ca391d06a66100d Thai Thien 2020-05-02 11:19:18
ccn v7 shb fixed 15 4b28c4049c4b25a6afeb563864f76907a1e2360e Thai Thien 2020-05-02 11:16:14
Commit 0dfb94063b0bdc7aa660b78ab61b9ee5e61a4199 - python code to split train data to train and validate
Author: Thai Thien
Author date (UTC): 2020-05-18 16:19
Committer name: Thai Thien
Committer date (UTC): 2020-05-18 16:19
Parent(s): 93ea7669d891301e9c00aadccdea27bb5e138656
Signing key:
Tree: 9b74da59021482fc3a8fe38aa74c49ef7f7a5ddb
File Lines added Lines deleted
args_util.py 8 0
dataset_script/shanghaitech-fixed-for-kaggle.py 3 3
dataset_script/sht_train_validate_spllit.py 61 0
File args_util.py changed (mode: 100644) (index d790b68..69122ee)
... ... def meow_parse():
128 128 arg = parser.parse_args() arg = parser.parse_args()
129 129 return arg return arg
130 130
131
131 132 def sanity_check_dataloader_parse(): def sanity_check_dataloader_parse():
132 133 parser = argparse.ArgumentParser(description='Dataloader') parser = argparse.ArgumentParser(description='Dataloader')
133 134 parser.add_argument('--input', action="store", type=str, default=HardCodeVariable().SHANGHAITECH_PATH_PART_A) parser.add_argument('--input', action="store", type=str, default=HardCodeVariable().SHANGHAITECH_PATH_PART_A)
 
... ... def sanity_check_dataloader_parse():
136 137 return arg return arg
137 138
138 139
140 def train_test_split_parse():
141 parser = argparse.ArgumentParser(description='Dataloader')
142 parser.add_argument('--input', action="store", type=str, default=HardCodeVariable().SHANGHAITECH_PATH_PART_A)
143 arg = parser.parse_args()
144 return arg
145
146
139 147 def real_args_parse(): def real_args_parse():
140 148 """ """
141 149 this is not dummy this is not dummy
File dataset_script/shanghaitech-fixed-for-kaggle.py changed (mode: 100644) (index ca24ff4..bb31edc)
... ... __DATASET_ROOT = "../input/shanghaitech_h5_empty/ShanghaiTech/"
40 40 __OUTPUT_NAME = "ShanghaiTech_PartA_Train/" __OUTPUT_NAME = "ShanghaiTech_PartA_Train/"
41 41
42 42
43 __DATASET_ROOT =
44 __OUTPUT_NAME =
45 __PART =
43 __DATASET_ROOT = "ROOT_HERE"
44 __OUTPUT_NAME = "OUTPUT_HERE"
45 __PART = "PART_HERE"
46 46
47 47 def gaussian_filter_density_fixed(gt, sigma): def gaussian_filter_density_fixed(gt, sigma):
48 48 print(gt.shape) print(gt.shape)
File dataset_script/sht_train_validate_spllit.py added (mode: 100644) (index 0000000..d2f811e)
1 from sklearn.model_selection import train_test_split
2 import glob
3 import os
4 from shutil import copyfile
5 from args_util import train_test_split_parse
6
7
8 def copy_data(image_list, dest_path):
9 for image_path in image_list:
10 gt_path = image_path.replace('.jpg', '.h5').replace('images', 'ground-truth-h5')
11 gt_mat_path = image_path.replace('.jpg', '.mat').replace('images', 'ground-truth').replace('IMG', 'GT_IMG')
12
13 # dest
14 dest_image_path = image_path.replace(DATA_PATH, dest_path)
15 dest_gt_path = gt_path.replace(DATA_PATH, dest_path)
16 dest_gt_mat_path = gt_mat_path.replace(DATA_PATH, dest_path)
17
18 copyfile(image_path, dest_image_path)
19 copyfile(gt_path, dest_gt_path)
20 copyfile(gt_mat_path, dest_gt_mat_path)
21 print("copy ", image_path, dest_image_path)
22
23
24 if __name__ == "__main__":
25
26 # should contain 3 sub-folder: image, ground-truth, ground-truth-h5
27 # hey, no trailing slash
28 DATA_PATH = "/data/shanghaitech_with_people_density_map/ShanghaiTech/part_A/train_data"
29
30 args = train_test_split_parse()
31 DATA_PATH = args.input
32
33 # get list of sample
34 image_list = glob.glob(os.path.join(DATA_PATH, "images", "*.jpg"))
35
36 train_image_list, validate_image_list = train_test_split(image_list, test_size=0.2, random_state=19051890)
37
38 print("train count ", len(train_image_list))
39 print("validate count ", len(validate_image_list))
40
41
42 # remove last slash
43 if DATA_PATH[-1] == "/":
44 DATA_PATH = DATA_PATH[:-1]
45
46 # make train, validate path
47 train_path = DATA_PATH + "_train_split"
48 validate_path = DATA_PATH + "_validate_split"
49
50 # make dir
51 os.makedirs(os.path.join(train_path, "images"), exist_ok=True)
52 os.makedirs(os.path.join(train_path, "ground-truth-h5"), exist_ok=True)
53 os.makedirs(os.path.join(train_path, "ground-truth"), exist_ok=True)
54
55 os.makedirs(os.path.join(validate_path, "images"), exist_ok=True)
56 os.makedirs(os.path.join(validate_path, "ground-truth-h5"), exist_ok=True)
57 os.makedirs(os.path.join(validate_path, "ground-truth"), exist_ok=True)
58
59 copy_data(train_image_list, train_path)
60 copy_data(validate_image_list, validate_path)
61
Hints:
Before first commit, do not forget to setup your git environment:
git config --global user.name "your_name_here"
git config --global user.email "your@email_here"

Clone this repository using HTTP(S):
git clone https://rocketgit.com/user/hahattpro/crowd_counting_framework

Clone this repository using ssh (do not forget to upload a key first):
git clone ssh://rocketgit@ssh.rocketgit.com/user/hahattpro/crowd_counting_framework

Clone this repository using git:
git clone git://git.rocketgit.com/user/hahattpro/crowd_counting_framework

You are allowed to anonymously push to this repository.
This means that your pushed commits will automatically be transformed into a merge request:
... clone the repository ...
... make some changes and some commits ...
git push origin main