From a6ed0a3b95d0beaf6c6528d75cae8c29201599f0 Mon Sep 17 00:00:00 2001 From: tingsong Date: Fri, 2 Aug 2019 10:46:28 +0800 Subject: [PATCH] change path --- Code/.DS_Store | Bin 0 -> 6148 bytes Code/1_data_prepare/1_2_split_dataset.py | 18 +++++++++--------- Code/1_data_prepare/1_3_generate_txt.py | 11 ++++++----- Code/2_model/2_finetune.py | 6 +++--- Code/4_viewer/.DS_Store | Bin 0 -> 6148 bytes Code/4_viewer/1_tensorboardX_demo.py | 15 ++++++++------- Code/4_viewer/2_visual_weights.py | 5 +++-- Code/4_viewer/3_visual_featuremaps.py | 7 ++++--- Code/4_viewer/4_hist_grad_weight.py | 7 ++++--- Code/4_viewer/6_hook_for_grad_cam.py | 6 +++--- Code/main_training/main.py | 6 +++--- 11 files changed, 43 insertions(+), 38 deletions(-) create mode 100644 Code/.DS_Store create mode 100644 Code/4_viewer/.DS_Store diff --git a/Code/.DS_Store b/Code/.DS_Store new file mode 100644 index 0000000000000000000000000000000000000000..530dfc5d41f9275e207926dc884d731fa3a98229 GIT binary patch literal 6148 zcmeHK!A`L(g%rM+w+rVFR zK-TUAZLZpYYq!3BQ}(%zw|Vi<&x<~J@{(M$oPYRWYln8(n@#d!G9r)f2M%z-c^*-4 zt|9U$^BWoKnVYyIyWrb2$C#=yUZ|Q?RqixWHE>1MlI$U#xA0$g@uPb_=FAzhO_+5~ zKeR3VQFJHoQRZ)y(H#3&_-D-VRLxn-qMU2YCY*Gk@agaH*oz=72d+bzo1sE!qEXzCQm~Mbu|g;)sM62{Cm|2v#3&CCof|%e@8bht_U;sj zV;Tx1(5|#M_uBE^oaOTYFpWj84pab?>4L!)ivp8z@ihy4&nD4WjtOQ+F-3=^+FwvW z_O6OL+L+@GU-vKOKK5|Kkse3J+|ffk;Q1;^yPYKMFiu{YbNa!%HP(P#95Lb%Bfbd> z#7NkBV*CgL4Ec;%At9NJF@3ws41me_5Lb@?rnn9yV7 zF>}ZfnsK2-7i#~m`Vf^L8_$M2GLoxR0o6s=$V8 zR^9a5LWC_%(-$D@6)ZZ YKa>hF@|Zbf4^4jrtPEPH0>7%j2a67P^#A|> literal 0 HcmV?d00001 diff --git a/Code/4_viewer/1_tensorboardX_demo.py b/Code/4_viewer/1_tensorboardX_demo.py index 9da8523..acb60ba 100644 --- a/Code/4_viewer/1_tensorboardX_demo.py +++ b/Code/4_viewer/1_tensorboardX_demo.py @@ -1,4 +1,5 @@ # coding: utf-8 +import os import torch import torchvision.utils as vutils import numpy as np @@ -7,7 +8,7 @@ from torchvision import datasets from tensorboardX import SummaryWriter resnet18 = models.resnet18(False) -writer = SummaryWriter('../../Result/runs') +writer = SummaryWriter(os.path.join("..", "..", "Result", "runs")) sample_rate = 44100 freqs = [262, 294, 330, 349, 392, 440, 440, 440, 440, 440, 440] @@ -23,10 +24,10 @@ for n_iter in range(100): s1 = torch.rand(1) # value to keep s2 = torch.rand(1) # data grouping by `slash` - writer.add_scalar('data/scalar_systemtime', s1[0], n_iter) + writer.add_scalar(os.path.join("data", "scalar_systemtime"), s1[0], n_iter) # data grouping by `slash` - writer.add_scalar('data/scalar_customtime', s1[0], n_iter, walltime=n_iter) - writer.add_scalars('data/scalar_group', {"xsinx": n_iter * np.sin(n_iter), + writer.add_scalar(os.path.join("data", "scalar_customtime"), s1[0], n_iter, walltime=n_iter) + writer.add_scalars(os.path.join("data", "scalar_group"), {"xsinx": n_iter * np.sin(n_iter), "xcosx": n_iter * np.cos(n_iter), "arctanx": np.arctan(n_iter)}, n_iter) x = torch.rand(32, 3, 64, 64) # output from network @@ -56,15 +57,15 @@ for n_iter in range(100): precision, recall, n_iter) # export scalar data to JSON for external processing -writer.export_scalars_to_json("../../Result/all_scalars.json") +writer.export_scalars_to_json(os.path.join("..", "..", "Result", "all_scalars.json")) -dataset = datasets.MNIST('../../Data/mnist', train=False, download=True) +dataset = datasets.MNIST(os.path.join("..", "..", "Data", "mnist"), train=False, download=True) images = dataset.test_data[:100].float() label = dataset.test_labels[:100] features = images.view(100, 784) writer.add_embedding(features, metadata=label, label_img=images.unsqueeze(1)) writer.add_embedding(features, global_step=1, tag='noMetadata') -dataset = datasets.MNIST('../../Data/mnist', train=True, download=True) +dataset = datasets.MNIST(os.path.join("..", "..", "Data", "mnist"), train=True, download=True) images_train = dataset.train_data[:100].float() labels_train = dataset.train_labels[:100] features_train = images_train.view(100, 784) diff --git a/Code/4_viewer/2_visual_weights.py b/Code/4_viewer/2_visual_weights.py index cc1e340..fa9fda7 100644 --- a/Code/4_viewer/2_visual_weights.py +++ b/Code/4_viewer/2_visual_weights.py @@ -1,4 +1,5 @@ # coding: utf-8 +import os import torch import torchvision.utils as vutils from tensorboardX import SummaryWriter @@ -42,10 +43,10 @@ class Net(nn.Module): net = Net() # 创建一个网络 -pretrained_dict = torch.load('../2_model/net_params.pkl') +pretrained_dict = torch.load(os.path.join("..", "2_model", "net_params.pkl")) net.load_state_dict(pretrained_dict) -writer = SummaryWriter(log_dir='../../Result/visual_weights') +writer = SummaryWriter(log_dir=os.path.join("..", ".." "Result", "visual_weights")) params = net.state_dict() for k, v in params.items(): if 'conv' in k and 'weight' in k: diff --git a/Code/4_viewer/3_visual_featuremaps.py b/Code/4_viewer/3_visual_featuremaps.py index 07ef0aa..a1dd4d3 100644 --- a/Code/4_viewer/3_visual_featuremaps.py +++ b/Code/4_viewer/3_visual_featuremaps.py @@ -1,4 +1,5 @@ # coding: utf-8 +import os import torch import torchvision.utils as vutils import numpy as np @@ -12,9 +13,9 @@ from torch.utils.data import DataLoader vis_layer = 'conv1' -log_dir = '../../Result/visual_featuremaps' -txt_path = '../../Data/visual.txt' -pretrained_path = '../../Data/net_params_72p.pkl' +log_dir = os.path.join("..", ".." "Result", "visual_featuremaps") +txt_path = os.path.join("..", "..", "Data", "visual.txt") +pretrained_path = os.path.join("..", "..", "Data", "net_params_72p.pkl") net = Net() pretrained_dict = torch.load(pretrained_path) diff --git a/Code/4_viewer/4_hist_grad_weight.py b/Code/4_viewer/4_hist_grad_weight.py index 677df78..79b00d8 100644 --- a/Code/4_viewer/4_hist_grad_weight.py +++ b/Code/4_viewer/4_hist_grad_weight.py @@ -9,13 +9,14 @@ from torch.autograd import Variable import torch.nn as nn import torch.optim as optim import sys +import os sys.path.append("..") from utils.utils import MyDataset, validate, show_confMat, Net from tensorboardX import SummaryWriter from datetime import datetime -train_txt_path = '../../Data/train.txt' -valid_txt_path = '../../Data/valid.txt' +train_txt_path = os.path.join("..", "..", "Data", "train.txt") +valid_txt_path = os.path.join("..", "..", "Data", "valid.txt") classes_name = ['plane', 'car', 'bird', 'cat', 'deer', 'dog', 'frog', 'horse', 'ship', 'truck'] @@ -25,7 +26,7 @@ lr_init = 0.001 max_epoch = 1 # log -log_dir = '../../Result/hist_grad_weight' +log_dir = os.path.join("..", "..", "Result", "hist_grad_weight") writer = SummaryWriter(log_dir=log_dir) diff --git a/Code/4_viewer/6_hook_for_grad_cam.py b/Code/4_viewer/6_hook_for_grad_cam.py index b6926d3..faf4b88 100644 --- a/Code/4_viewer/6_hook_for_grad_cam.py +++ b/Code/4_viewer/6_hook_for_grad_cam.py @@ -130,9 +130,9 @@ def gen_cam(feature_map, grads): if __name__ == '__main__': BASE_DIR = os.path.dirname(os.path.abspath(__file__)) - path_img = os.path.join(BASE_DIR, "..", "..", "Data/cam_img/", "test_img_8.png") - path_net = os.path.join(BASE_DIR, "..", "..", "Data/", "net_params_72p.pkl") - output_dir = os.path.join(BASE_DIR, "..", "..", "Result/backward_hook_cam/") + path_img = os.path.join(BASE_DIR, "..", "..", "Data", "cam_img", "test_img_8.png") + path_net = os.path.join(BASE_DIR, "..", "..", "Data", "net_params_72p.pkl") + output_dir = os.path.join(BASE_DIR, "..", "..", "Result", "backward_hook_cam") classes = ('plane', 'car', 'bird', 'cat', 'deer', 'dog', 'frog', 'horse', 'ship', 'truck') fmap_block = list() diff --git a/Code/main_training/main.py b/Code/main_training/main.py index 13f3cf7..8fd0c52 100644 --- a/Code/main_training/main.py +++ b/Code/main_training/main.py @@ -15,8 +15,8 @@ from utils.utils import MyDataset, validate, show_confMat from tensorboardX import SummaryWriter from datetime import datetime -train_txt_path = '../../Data/train.txt' -valid_txt_path = '../../Data/valid.txt' +train_txt_path = os.path.join("..", "..", "Data", "train.txt") +valid_txt_path = os.path.join("..", "..", "Data", "valid.txt") classes_name = ['plane', 'car', 'bird', 'cat', 'deer', 'dog', 'frog', 'horse', 'ship', 'truck'] @@ -26,7 +26,7 @@ lr_init = 0.001 max_epoch = 1 # log -result_dir = '../../Result/' +result_dir = os.path.join("..", "..", "Result") now_time = datetime.now() time_str = datetime.strftime(now_time, '%m-%d_%H-%M-%S') -- GitLab