提交 a6ed0a3b 编写于 作者: T tingsong

change path

上级 b92619da
文件已添加
......@@ -8,10 +8,10 @@ import glob
import random
import shutil
dataset_dir = '../../Data/cifar-10-png/raw_test/'
train_dir = '../../Data/train/'
valid_dir = '../../Data/valid/'
test_dir = '../../Data/test/'
dataset_dir = os.path.join("..", "..", "Data", "cifar-10-png", "raw_test")
train_dir = os.path.join("..", "..", "Data", "train")
valid_dir = os.path.join("..", "..", "Data", "valid")
test_dir = os.path.join("..", "..", "Data", "test")
train_per = 0.8
valid_per = 0.1
......@@ -27,7 +27,7 @@ if __name__ == '__main__':
for root, dirs, files in os.walk(dataset_dir):
for sDir in dirs:
imgs_list = glob.glob(os.path.join(root, sDir)+'/*.png')
imgs_list = glob.glob(os.path.join(root, sDir) + '*.png')
random.seed(666)
random.shuffle(imgs_list)
imgs_num = len(imgs_list)
......@@ -37,14 +37,14 @@ if __name__ == '__main__':
for i in range(imgs_num):
if i < train_point:
out_dir = train_dir + sDir + '/'
out_dir = os.path.join(train_dir, sDir)
elif i < valid_point:
out_dir = valid_dir + sDir + '/'
out_dir = os.path.join(valid_dir + sDir)
else:
out_dir = test_dir + sDir + '/'
out_dir = os.path.join(test_dir, sDir)
makedir(out_dir)
out_path = out_dir + os.path.split(imgs_list[i])[-1]
out_path = os.path.join(out_dir, os.path.split(imgs_list[i])[-1])
shutil.copy(imgs_list[i], out_path)
print('Class:{}, train:{}, valid:{}, test:{}'.format(sDir, train_point, valid_point-train_point, imgs_num-valid_point))
......@@ -4,11 +4,11 @@ import os
为数据集生成对应的txt文件
'''
train_txt_path = '../../Data/train.txt'
train_dir = '../../Data/train/'
train_txt_path = os.path.join("..", "..", "Data", "train.txt")
train_dir = os.path.join("..", "..", "Data", "train")
valid_txt_path = '../../Data/valid.txt'
valid_dir = '../../Data/valid/'
valid_txt_path = os.path.join("..", "..", "Data", "valid.txt")
valid_dir = os.path.join("..", "..", "Data", "valid")
def gen_txt(txt_path, img_dir):
......@@ -30,4 +30,5 @@ def gen_txt(txt_path, img_dir):
if __name__ == '__main__':
gen_txt(train_txt_path, train_dir)
gen_txt(valid_txt_path, valid_dir)
\ No newline at end of file
gen_txt(valid_txt_path, valid_dir)
......@@ -14,8 +14,8 @@ sys.path.append("..")
from utils.utils import MyDataset, validate, show_confMat
from datetime import datetime
train_txt_path = '../../Data/train.txt'
valid_txt_path = '../../Data/valid.txt'
train_txt_path = os.path.join("..", "..", "Data", "train.txt")
valid_txt_path = os.path.join("..", "..", "Data", "valid.txt")
classes_name = ['plane', 'car', 'bird', 'cat', 'deer', 'dog', 'frog', 'horse', 'ship', 'truck']
......@@ -25,7 +25,7 @@ lr_init = 0.001
max_epoch = 1
# log
result_dir = '../../Result/'
result_dir = os.path.join("..", "..", "Result")
now_time = datetime.now()
time_str = datetime.strftime(now_time, '%m-%d_%H-%M-%S')
......
# coding: utf-8
import os
import torch
import torchvision.utils as vutils
import numpy as np
......@@ -7,7 +8,7 @@ from torchvision import datasets
from tensorboardX import SummaryWriter
resnet18 = models.resnet18(False)
writer = SummaryWriter('../../Result/runs')
writer = SummaryWriter(os.path.join("..", "..", "Result", "runs"))
sample_rate = 44100
freqs = [262, 294, 330, 349, 392, 440, 440, 440, 440, 440, 440]
......@@ -23,10 +24,10 @@ for n_iter in range(100):
s1 = torch.rand(1) # value to keep
s2 = torch.rand(1)
# data grouping by `slash`
writer.add_scalar('data/scalar_systemtime', s1[0], n_iter)
writer.add_scalar(os.path.join("data", "scalar_systemtime"), s1[0], n_iter)
# data grouping by `slash`
writer.add_scalar('data/scalar_customtime', s1[0], n_iter, walltime=n_iter)
writer.add_scalars('data/scalar_group', {"xsinx": n_iter * np.sin(n_iter),
writer.add_scalar(os.path.join("data", "scalar_customtime"), s1[0], n_iter, walltime=n_iter)
writer.add_scalars(os.path.join("data", "scalar_group"), {"xsinx": n_iter * np.sin(n_iter),
"xcosx": n_iter * np.cos(n_iter),
"arctanx": np.arctan(n_iter)}, n_iter)
x = torch.rand(32, 3, 64, 64) # output from network
......@@ -56,15 +57,15 @@ for n_iter in range(100):
precision,
recall, n_iter)
# export scalar data to JSON for external processing
writer.export_scalars_to_json("../../Result/all_scalars.json")
writer.export_scalars_to_json(os.path.join("..", "..", "Result", "all_scalars.json"))
dataset = datasets.MNIST('../../Data/mnist', train=False, download=True)
dataset = datasets.MNIST(os.path.join("..", "..", "Data", "mnist"), train=False, download=True)
images = dataset.test_data[:100].float()
label = dataset.test_labels[:100]
features = images.view(100, 784)
writer.add_embedding(features, metadata=label, label_img=images.unsqueeze(1))
writer.add_embedding(features, global_step=1, tag='noMetadata')
dataset = datasets.MNIST('../../Data/mnist', train=True, download=True)
dataset = datasets.MNIST(os.path.join("..", "..", "Data", "mnist"), train=True, download=True)
images_train = dataset.train_data[:100].float()
labels_train = dataset.train_labels[:100]
features_train = images_train.view(100, 784)
......
# coding: utf-8
import os
import torch
import torchvision.utils as vutils
from tensorboardX import SummaryWriter
......@@ -42,10 +43,10 @@ class Net(nn.Module):
net = Net() # 创建一个网络
pretrained_dict = torch.load('../2_model/net_params.pkl')
pretrained_dict = torch.load(os.path.join("..", "2_model", "net_params.pkl"))
net.load_state_dict(pretrained_dict)
writer = SummaryWriter(log_dir='../../Result/visual_weights')
writer = SummaryWriter(log_dir=os.path.join("..", ".." "Result", "visual_weights"))
params = net.state_dict()
for k, v in params.items():
if 'conv' in k and 'weight' in k:
......
# coding: utf-8
import os
import torch
import torchvision.utils as vutils
import numpy as np
......@@ -12,9 +13,9 @@ from torch.utils.data import DataLoader
vis_layer = 'conv1'
log_dir = '../../Result/visual_featuremaps'
txt_path = '../../Data/visual.txt'
pretrained_path = '../../Data/net_params_72p.pkl'
log_dir = os.path.join("..", ".." "Result", "visual_featuremaps")
txt_path = os.path.join("..", "..", "Data", "visual.txt")
pretrained_path = os.path.join("..", "..", "Data", "net_params_72p.pkl")
net = Net()
pretrained_dict = torch.load(pretrained_path)
......
......@@ -9,13 +9,14 @@ from torch.autograd import Variable
import torch.nn as nn
import torch.optim as optim
import sys
import os
sys.path.append("..")
from utils.utils import MyDataset, validate, show_confMat, Net
from tensorboardX import SummaryWriter
from datetime import datetime
train_txt_path = '../../Data/train.txt'
valid_txt_path = '../../Data/valid.txt'
train_txt_path = os.path.join("..", "..", "Data", "train.txt")
valid_txt_path = os.path.join("..", "..", "Data", "valid.txt")
classes_name = ['plane', 'car', 'bird', 'cat', 'deer', 'dog', 'frog', 'horse', 'ship', 'truck']
......@@ -25,7 +26,7 @@ lr_init = 0.001
max_epoch = 1
# log
log_dir = '../../Result/hist_grad_weight'
log_dir = os.path.join("..", "..", "Result", "hist_grad_weight")
writer = SummaryWriter(log_dir=log_dir)
......
......@@ -130,9 +130,9 @@ def gen_cam(feature_map, grads):
if __name__ == '__main__':
BASE_DIR = os.path.dirname(os.path.abspath(__file__))
path_img = os.path.join(BASE_DIR, "..", "..", "Data/cam_img/", "test_img_8.png")
path_net = os.path.join(BASE_DIR, "..", "..", "Data/", "net_params_72p.pkl")
output_dir = os.path.join(BASE_DIR, "..", "..", "Result/backward_hook_cam/")
path_img = os.path.join(BASE_DIR, "..", "..", "Data", "cam_img", "test_img_8.png")
path_net = os.path.join(BASE_DIR, "..", "..", "Data", "net_params_72p.pkl")
output_dir = os.path.join(BASE_DIR, "..", "..", "Result", "backward_hook_cam")
classes = ('plane', 'car', 'bird', 'cat', 'deer', 'dog', 'frog', 'horse', 'ship', 'truck')
fmap_block = list()
......
......@@ -15,8 +15,8 @@ from utils.utils import MyDataset, validate, show_confMat
from tensorboardX import SummaryWriter
from datetime import datetime
train_txt_path = '../../Data/train.txt'
valid_txt_path = '../../Data/valid.txt'
train_txt_path = os.path.join("..", "..", "Data", "train.txt")
valid_txt_path = os.path.join("..", "..", "Data", "valid.txt")
classes_name = ['plane', 'car', 'bird', 'cat', 'deer', 'dog', 'frog', 'horse', 'ship', 'truck']
......@@ -26,7 +26,7 @@ lr_init = 0.001
max_epoch = 1
# log
result_dir = '../../Result/'
result_dir = os.path.join("..", "..", "Result")
now_time = datetime.now()
time_str = datetime.strftime(now_time, '%m-%d_%H-%M-%S')
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册