List of commits:
Subject Hash Author Date (UTC)
debug sha keepful e14c3307499356c7a7ed0d5f33818bd798276d94 Thai Thien 2020-08-03 15:44:02
adamw2_bigtail13i_t3_sha f769948ddd511865f62cddc80957663508e25976 Thai Thien 2020-08-02 04:25:08
g 56048b30fb4a19ae52c641b07754756f43ddcefb Thai Thien 2020-08-02 04:00:46
a d4e0e0d63613536a18f7ae0f39e4798f12abe7e8 Thai Thien 2020-08-02 03:59:14
adamw1_bigtail13i_t4_sha bc07d19cfcc88c4271d1c6f1984c1c96ff5a9413 Thai Thien 2020-08-01 16:19:17
adamw1_bigtail13i_t3_sha 17c169d6ef648e8c4f2c5383fb9226cfce48d626 Thai Thien 2020-08-01 16:11:33
adamw1_bigtail13i_t2_sha 8bc23f1401eacdc2e28f30a825503f0dd96bacc9 Thai Thien 2020-08-01 15:59:29
adamw1_bigtail13i_t2_shb aadc7244daabdad64a548342865a22beda073d94 Thai Thien 2020-08-01 15:47:08
gpu4 95c356f61c49d9f1927da7f86135a9bb60d21a59 Thai Thien 2020-08-01 14:46:30
adamw1_bigtail13i_t1_sha 34434bd013a4b05134c5ce42aa8f9956d424467d Thai Thien 2020-08-01 14:44:56
a 140443bb072f8086b6cef253bcadfb0d925ba606 Thai Thien 2020-08-01 12:09:09
adamw 57f61e52a024e91f57347c85a8c14813df16d15d Thai Thien 2020-08-01 12:05:36
adam1_bigtail13i_t1_shb 713dd1d5a0f13a2c881b00687fabb1174383da5b Thai Thien 2020-08-01 10:22:17
g1_BigTail14i_t8_sha eb361a5e64d38062fa6e5aa61ae21adc5a536aef Thai Thien 2020-07-31 15:45:50
g1_BigTail14i_t7_sha 32ff746fa0c272af36eadcd713cda94ef8f1b8e2 Thai Thien 2020-07-31 14:57:15
g1_BigTail14i_t6_sha 0656f2ac088bd2790b560931839bfa1d4bed98c4 Thai Thien 2020-07-30 19:32:13
sgd -6 g1_BigTail14i_t6_sha.sh 4c2877d478c40382109800e5a4b873d7a11b345c Thai Thien 2020-07-30 19:29:58
t5 070bf50a1a9bba8b8406c5c58b5806e39e99b56e Thai Thien 2020-07-30 19:26:48
g1_BigTail14i_t4_sha 71b259dab3cec69c9b40944cfe9e83ee723c8675 Thai Thien 2020-07-30 19:21:34
g1_BigTail14i_t3_sha 2a3ab6ec6283df6dfff8d05206403debca6f5259 Thai Thien 2020-07-30 19:11:39
Commit e14c3307499356c7a7ed0d5f33818bd798276d94 - debug sha keepful
Author: Thai Thien
Author date (UTC): 2020-08-03 15:44
Committer name: Thai Thien
Committer date (UTC): 2020-08-03 15:44
Parent(s): f769948ddd511865f62cddc80957663508e25976
Signing key:
Tree: 6fba0419a8389d80577a15b095ee108c3e3f2b1b
File Lines added Lines deleted
data_flow.py 15 5
debug/debug_sha.py 43 0
File data_flow.py changed (mode: 100644) (index 1df0ec7..26d0022)
... ... class ListDataset(Dataset):
1082 1082 def __getitem__(self, index): def __getitem__(self, index):
1083 1083 assert index <= len(self), 'index range error' assert index <= len(self), 'index range error'
1084 1084 img_path = self.lines[index] img_path = self.lines[index]
1085 if self.debug:
1086 print(img_path)
1085 # if self.debug:
1086 # print(img_path)
1087 1087 # try to check cache item if exist # try to check cache item if exist
1088 1088 if self.cache and self.train and index in self.cache_train.keys(): if self.cache and self.train and index in self.cache_train.keys():
1089 1089 img, target = self.cache_train[index] img, target = self.cache_train[index]
 
... ... class ListDataset(Dataset):
1107 1107 self.cache_train[index] = (img, target) self.cache_train[index] = (img, target)
1108 1108 else: else:
1109 1109 self.cache_eval[index] = (img, target) self.cache_eval[index] = (img, target)
1110
1111 return img, target
1110 if self.debug:
1111 _, p_count = self.load_data_fn(img_path, train=False)
1112 print(img_path + " " + str(target.sum()) + " " + str(p_count))
1113 return img, target, p_count
1114 else:
1115 return img, target
1112 1116
1113 1117
1114 1118 def get_dataloader(train_list, val_list, test_list, dataset_name="shanghaitech", visualize_mode=False, batch_size=1, def get_dataloader(train_list, val_list, test_list, dataset_name="shanghaitech", visualize_mode=False, batch_size=1,
1115 train_loader_for_eval_check=False, cache=False, pin_memory=False):
1119 train_loader_for_eval_check=False, cache=False, pin_memory=False,
1120 debug=False):
1121
1116 1122 if visualize_mode: if visualize_mode:
1117 1123 transformer = transforms.Compose([ transformer = transforms.Compose([
1118 1124 transforms.ToTensor() transforms.ToTensor()
 
... ... def get_dataloader(train_list, val_list, test_list, dataset_name="shanghaitech",
1134 1140 train=True, train=True,
1135 1141 batch_size=batch_size, batch_size=batch_size,
1136 1142 num_workers=0, num_workers=0,
1143 debug=debug,
1137 1144 dataset_name=dataset_name, cache=cache), dataset_name=dataset_name, cache=cache),
1138 1145 batch_size=batch_size, batch_size=batch_size,
1139 1146 num_workers=0, num_workers=0,
 
... ... def get_dataloader(train_list, val_list, test_list, dataset_name="shanghaitech",
1146 1153 train=False, train=False,
1147 1154 batch_size=batch_size, batch_size=batch_size,
1148 1155 num_workers=0, num_workers=0,
1156 debug=debug,
1149 1157 dataset_name=dataset_name, cache=cache), dataset_name=dataset_name, cache=cache),
1150 1158 batch_size=1, batch_size=1,
1151 1159 num_workers=0, num_workers=0,
 
... ... def get_dataloader(train_list, val_list, test_list, dataset_name="shanghaitech",
1157 1165 shuffle=False, shuffle=False,
1158 1166 transform=transformer, transform=transformer,
1159 1167 train=False, train=False,
1168 debug=debug,
1160 1169 dataset_name=dataset_name, cache=cache), dataset_name=dataset_name, cache=cache),
1161 1170 num_workers=0, num_workers=0,
1162 1171 batch_size=1, batch_size=1,
 
... ... def get_dataloader(train_list, val_list, test_list, dataset_name="shanghaitech",
1170 1179 shuffle=False, shuffle=False,
1171 1180 transform=transformer, transform=transformer,
1172 1181 train=False, train=False,
1182 debug=debug,
1173 1183 dataset_name=dataset_name), dataset_name=dataset_name),
1174 1184 num_workers=0, num_workers=0,
1175 1185 batch_size=1, batch_size=1,
File debug/debug_sha.py added (mode: 100644) (index 0000000..4a6691f)
1 # sha shanghaitech_keepfull is not convergent
2 from args_util import real_args_parse
3 from data_flow import get_train_val_list, get_dataloader, create_training_image_list
4 from ignite.engine import Events, create_supervised_trainer, create_supervised_evaluator
5 from ignite.metrics import Loss, MeanAbsoluteError, MeanSquaredError
6 from crowd_counting_error_metrics import CrowdCountingMeanAbsoluteError, CrowdCountingMeanSquaredError
7 import torch
8 from torch import nn
9 import torch.nn.functional as F
10 from models import CSRNet,PACNN
11 import os
12 import cv2
13 from torchvision import datasets, transforms
14 from data_flow import ListDataset
15 import pytorch_ssim
16
17 from hard_code_variable import HardCodeVariable
18 from data_util import ShanghaiTechDataPath
19 from visualize_util import save_img, save_density_map
20
21
22 def visualize_shanghaitech_keepfull():
23 HARD_CODE = HardCodeVariable()
24 shanghaitech_data = ShanghaiTechDataPath(root=HARD_CODE.SHANGHAITECH_PATH)
25 shanghaitech_data_part_a_train = shanghaitech_data.get_a().get_train().get()
26 saved_folder = "visualize/debug_dataloader_shanghaitech"
27 os.makedirs(saved_folder, exist_ok=True)
28 train_list, val_list = get_train_val_list(shanghaitech_data_part_a_train, test_size=0.2)
29 test_list = None
30 train_loader, val_loader, test_loader = get_dataloader(train_list, val_list, test_list, dataset_name="shanghaitech_keepfull", visualize_mode=True,
31 debug=True)
32
33 # do with train loader
34 train_loader_iter = iter(train_loader)
35 for i in range(10):
36 img, label, count = next(train_loader_iter)
37 save_img(img, os.path.join(saved_folder, "train_img" + str(i) +".png"))
38 save_path = os.path.join(saved_folder, "train_label" + str(i) +".png")
39 save_density_map(label.numpy()[0][0], save_path)
40 print("saved " + save_path)
41
42 if __name__ == "__main__":
43 visualize_shanghaitech_keepfull()
Hints:
Before first commit, do not forget to setup your git environment:
git config --global user.name "your_name_here"
git config --global user.email "your@email_here"

Clone this repository using HTTP(S):
git clone https://rocketgit.com/user/hahattpro/crowd_counting_framework

Clone this repository using ssh (do not forget to upload a key first):
git clone ssh://rocketgit@ssh.rocketgit.com/user/hahattpro/crowd_counting_framework

Clone this repository using git:
git clone git://git.rocketgit.com/user/hahattpro/crowd_counting_framework

You are allowed to anonymously push to this repository.
This means that your pushed commits will automatically be transformed into a merge request:
... clone the repository ...
... make some changes and some commits ...
git push origin main