未验证 提交 cf2e96b6 编写于 作者: S shiyutang 提交者: GitHub

Use tempfile to place the temporary files (#43237)

* tempfile_fix

* update

* fix_CI

* update_word2vec.inference.model

* remove_change_in_word2vec_book

* fix_word2vec_book
上级 8bc1c82d
......@@ -24,6 +24,7 @@ import unittest
import os
import copy
import numpy as np
import tempfile
from paddle.static.amp import decorate
paddle.enable_static()
......@@ -272,18 +273,25 @@ def infer(use_cuda, save_dirname=None):
clip_extra=True)
def main(net_type, use_cuda, is_local=True):
if use_cuda and not fluid.core.is_compiled_with_cuda():
return
class TestImageClassification(unittest.TestCase):
# Directory for saving the trained model
save_dirname = "image_classification_" + net_type + ".inference.model"
def setUp(self):
self.temp_dir = tempfile.TemporaryDirectory()
train(net_type, use_cuda, save_dirname, is_local)
#infer(use_cuda, save_dirname)
def tearDown(self):
self.temp_dir.cleanup()
def main(self, net_type, use_cuda, is_local=True):
if use_cuda and not fluid.core.is_compiled_with_cuda():
return
class TestImageClassification(unittest.TestCase):
# Directory for saving the trained model
save_dirname = os.path.join(
self.temp_dir.name,
"image_classification_" + net_type + ".inference.model")
train(net_type, use_cuda, save_dirname, is_local)
#infer(use_cuda, save_dirname)
def test_amp_lists(self):
white_list = copy.copy(
......@@ -413,11 +421,11 @@ class TestImageClassification(unittest.TestCase):
def test_vgg_cuda(self):
with self.scope_prog_guard():
main('vgg', use_cuda=True)
self.main('vgg', use_cuda=True)
def test_resnet_cuda(self):
with self.scope_prog_guard():
main('resnet', use_cuda=True)
self.main('resnet', use_cuda=True)
@contextlib.contextmanager
def scope_prog_guard(self):
......
......@@ -25,6 +25,7 @@ import math
import sys
import os
import struct
import tempfile
paddle.enable_static()
......@@ -192,11 +193,13 @@ def main(use_cuda, is_local=True, use_bf16=False, pure_bf16=False):
if use_bf16 and not fluid.core.is_compiled_with_mkldnn():
return
temp_dir = tempfile.TemporaryDirectory()
# Directory for saving the trained model
save_dirname = "fit_a_line.inference.model"
save_dirname = os.path.join(temp_dir.name, "fit_a_line.inference.model")
train(use_cuda, save_dirname, is_local, use_bf16, pure_bf16)
infer(use_cuda, save_dirname, use_bf16)
temp_dir.cleanup()
class TestFitALineBase(unittest.TestCase):
......
......@@ -22,6 +22,7 @@ import sys
import numpy
import unittest
import os
import tempfile
import numpy as np
paddle.enable_static()
......@@ -244,10 +245,13 @@ def main(net_type, use_cuda, is_local=True):
return
# Directory for saving the trained model
save_dirname = "image_classification_" + net_type + ".inference.model"
temp_dir = tempfile.TemporaryDirectory()
save_dirname = os.path.join(
temp_dir.name, "image_classification_" + net_type + ".inference.model")
train(net_type, use_cuda, save_dirname, is_local)
infer(use_cuda, save_dirname)
temp_dir.cleanup()
class TestImageClassification(unittest.TestCase):
......
......@@ -20,6 +20,7 @@ import numpy as np
import os
import time
import unittest
import tempfile
import paddle
import paddle.dataset.conll05 as conll05
......@@ -354,12 +355,16 @@ def main(use_cuda, is_local=True):
if use_cuda and not fluid.core.is_compiled_with_cuda():
return
temp_dir = tempfile.TemporaryDirectory()
# Directory for saving the trained model
save_dirname = "label_semantic_roles.inference.model"
save_dirname = os.path.join(temp_dir.name,
"label_semantic_roles.inference.model")
train(use_cuda, save_dirname, is_local)
infer(use_cuda, save_dirname)
temp_dir.cleanup()
class TestLabelSemanticRoles(unittest.TestCase):
......
......@@ -23,6 +23,7 @@ import paddle.fluid as fluid
import paddle.fluid.framework as framework
import paddle.fluid.layers as layers
import paddle.fluid.nets as nets
import tempfile
from paddle.fluid.executor import Executor
from paddle.fluid.optimizer import SGDOptimizer
......@@ -318,10 +319,13 @@ def main(use_cuda):
return
# Directory for saving the inference model
save_dirname = "recommender_system.inference.model"
temp_dir = tempfile.TemporaryDirectory()
save_dirname = os.path.join(temp_dir.name,
"recommender_system.inference.model")
train(use_cuda, save_dirname)
infer(use_cuda, save_dirname)
temp_dir.cleanup()
if __name__ == '__main__':
......
......@@ -23,7 +23,9 @@ import paddle.fluid.layers as layers
import contextlib
import math
import sys
import os
import unittest
import tempfile
from paddle.fluid.executor import Executor
import paddle
......@@ -266,10 +268,13 @@ def main(use_cuda):
return
# Directory for saving the trained model
save_dirname = "rnn_encoder_decoder.inference.model"
temp_dir = tempfile.TemporaryDirectory()
save_dirname = os.path.join(temp_dir.name,
"rnn_encoder_decoder.inference.model")
train(use_cuda, save_dirname)
infer(use_cuda, save_dirname)
temp_dir.cleanup()
class TestRnnEncoderDecoder(unittest.TestCase):
......
......@@ -22,6 +22,7 @@ import os
import numpy as np
import math
import sys
import tempfile
paddle.enable_static()
......@@ -247,7 +248,7 @@ def infer(target, save_dirname=None):
infer_inputs = [to_infer_tensor(t) for t in infer_inputs]
infer_config = fluid.core.NativeConfig()
infer_config.model_dir = 'word2vec.inference.model'
infer_config.model_dir = save_dirname
if target == "cuda":
infer_config.use_gpu = True
infer_config.device = 0
......@@ -273,8 +274,9 @@ def main(target, is_sparse, is_parallel, use_bf16, pure_bf16):
if use_bf16 and not fluid.core.is_compiled_with_mkldnn():
return
temp_dir = tempfile.TemporaryDirectory()
if not is_parallel:
save_dirname = "word2vec.inference.model"
save_dirname = os.path.join(temp_dir.name, "word2vec.inference.model")
else:
save_dirname = None
......@@ -290,6 +292,7 @@ def main(target, is_sparse, is_parallel, use_bf16, pure_bf16):
use_bf16=use_bf16,
pure_bf16=pure_bf16)
infer(target, save_dirname)
temp_dir.cleanup()
FULL_TEST = os.getenv('FULL_TEST',
......
......@@ -17,8 +17,10 @@ import paddle
paddle.set_default_dtype("float64")
from paddle.fluid.layers import sequence_mask
import os
import numpy as np
import unittest
import tempfile
from convert import convert_params_for_net
from rnn_numpy import SimpleRNN, LSTM, GRU
......@@ -336,16 +338,18 @@ def predict_test_util(place, mode, stop_gradient=True):
rnn = paddle.jit.to_static(
rnn, [paddle.static.InputSpec(shape=[None, None, 16], dtype=x.dtype)])
paddle.jit.save(rnn, "./inference/%s_infer" % mode)
temp_dir = tempfile.TemporaryDirectory()
save_dirname = os.path.join(temp_dir.name, "./inference/%s_infer" % mode)
paddle.jit.save(rnn, save_dirname)
paddle.enable_static()
new_scope = paddle.static.Scope()
with paddle.static.scope_guard(new_scope):
exe = paddle.static.Executor(place)
[inference_program, feed_target_names, fetch_targets
] = paddle.static.load_inference_model("./inference/%s_infer" % mode,
exe)
[inference_program, feed_target_names,
fetch_targets] = paddle.static.load_inference_model(save_dirname, exe)
results = exe.run(inference_program,
feed={feed_target_names[0]: x.numpy()},
fetch_list=fetch_targets)
......@@ -353,6 +357,8 @@ def predict_test_util(place, mode, stop_gradient=True):
y.numpy(), results[0]) # eval results equal predict results
paddle.disable_static()
temp_dir.cleanup()
def load_tests(loader, tests, pattern):
suite = unittest.TestSuite()
......
......@@ -172,8 +172,12 @@ class TestDataset(unittest.TestCase):
"""
Testcase for InMemoryDataset from create to run.
"""
filename1 = "afs:test_in_memory_dataset_run_a.txt"
filename2 = "afs:test_in_memory_dataset_run_b.txt"
temp_dir = tempfile.TemporaryDirectory()
filename1 = os.path.join(temp_dir.name,
"afs:test_in_memory_dataset_run_a.txt")
filename2 = os.path.join(temp_dir.name,
"afs:test_in_memory_dataset_run_b.txt")
with open(filename1, "w") as f:
data = "1 1 2 3 3 4 5 5 5 5 1 1\n"
data += "1 2 2 3 4 4 6 6 6 6 1 2\n"
......@@ -223,19 +227,24 @@ class TestDataset(unittest.TestCase):
except Exception as e:
self.assertTrue(False)
os.remove(filename1)
os.remove(filename2)
temp_dir.cleanup()
def test_in_memory_dataset_run(self):
"""
Testcase for InMemoryDataset from create to run.
"""
with open("test_in_memory_dataset_run_a.txt", "w") as f:
temp_dir = tempfile.TemporaryDirectory()
filename1 = os.path.join(temp_dir.name,
"test_in_memory_dataset_run_a.txt")
filename2 = os.path.join(temp_dir.name,
"test_in_memory_dataset_run_b.txt")
with open(filename1, "w") as f:
data = "1 1 2 3 3 4 5 5 5 5 1 1\n"
data += "1 2 2 3 4 4 6 6 6 6 1 2\n"
data += "1 3 2 3 5 4 7 7 7 7 1 3\n"
f.write(data)
with open("test_in_memory_dataset_run_b.txt", "w") as f:
with open(filename2, "w") as f:
data = "1 4 2 3 3 4 5 5 5 5 1 4\n"
data += "1 5 2 3 4 4 6 6 6 6 1 5\n"
data += "1 6 2 3 5 4 7 7 7 7 1 6\n"
......@@ -257,10 +266,7 @@ class TestDataset(unittest.TestCase):
pipe_command="cat",
use_var=slots_vars)
dataset._init_distributed_settings(fea_eval=True, candidate_size=1)
dataset.set_filelist([
"test_in_memory_dataset_run_a.txt",
"test_in_memory_dataset_run_b.txt"
])
dataset.set_filelist([filename1, filename2])
dataset.load_into_memory()
dataset.slots_shuffle(["slot1"])
dataset.local_shuffle()
......@@ -282,14 +288,19 @@ class TestDataset(unittest.TestCase):
except Exception as e:
self.assertTrue(False)
os.remove("./test_in_memory_dataset_run_a.txt")
os.remove("./test_in_memory_dataset_run_b.txt")
temp_dir.cleanup()
def test_in_memory_dataset_masterpatch(self):
"""
Testcase for InMemoryDataset from create to run.
"""
with open("test_in_memory_dataset_masterpatch_a.txt", "w") as f:
temp_dir = tempfile.TemporaryDirectory()
filename1 = os.path.join(temp_dir.name,
"test_in_memory_dataset_masterpatch_a.txt")
filename2 = os.path.join(temp_dir.name,
"test_in_memory_dataset_masterpatch_b.txt")
with open(filename1, "w") as f:
data = "1 id1 1 1 2 3 3 4 5 5 5 5 1 1\n"
data += "1 id1 1 2 2 3 4 4 6 6 6 6 1 2\n"
data += "1 id2 1 1 1 1 1 0 1 0\n"
......@@ -300,7 +311,7 @@ class TestDataset(unittest.TestCase):
data += "1 id5 1 1 1 1 1 0 1 0\n"
data += "1 id5 1 1 1 1 1 0 1 0\n"
f.write(data)
with open("test_in_memory_dataset_masterpatch_b.txt", "w") as f:
with open(filename2, "w") as f:
data = "1 id6 1 4 2 3 3 4 5 5 5 5 1 4\n"
data += "1 id6 1 1 2 3 4 4 6 6 6 6 1 5\n"
data += "1 id6 1 6 2 3 5 4 7 7 7 7 1 6\n"
......@@ -353,14 +364,19 @@ class TestDataset(unittest.TestCase):
dataset.update_settings(merge_size=2)
dataset.dataset.merge_by_lineid()
os.remove("./test_in_memory_dataset_masterpatch_a.txt")
os.remove("./test_in_memory_dataset_masterpatch_b.txt")
temp_dir.cleanup()
def test_in_memory_dataset_masterpatch1(self):
"""
Testcase for InMemoryDataset from create to run.
"""
with open("test_in_memory_dataset_masterpatch1_a.txt", "w") as f:
temp_dir = tempfile.TemporaryDirectory()
filename1 = os.path.join(temp_dir.name,
"test_in_memory_dataset_masterpatch1_a.txt")
filename2 = os.path.join(temp_dir.name,
"test_in_memory_dataset_masterpatch1_b.txt")
with open(filename1, "w") as f:
data = "1 id1 1 1 2 3 3 4 5 5 5 5 1 1\n"
data += "1 id1 1 2 2 3 4 4 6 6 6 6 1 2\n"
data += "1 id2 1 1 1 1 1 0 1 0\n"
......@@ -371,7 +387,7 @@ class TestDataset(unittest.TestCase):
data += "1 id5 1 1 1 1 1 0 1 0\n"
data += "1 id5 1 1 1 1 1 0 1 0\n"
f.write(data)
with open("test_in_memory_dataset_masterpatch1_b.txt", "w") as f:
with open(filename2, "w") as f:
data = "1 id6 1 4 2 3 3 4 5 5 5 5 1 4\n"
data += "1 id6 1 1 2 3 4 4 6 6 6 6 1 5\n"
data += "1 id6 1 6 2 3 5 4 7 7 7 7 1 6\n"
......@@ -427,8 +443,7 @@ class TestDataset(unittest.TestCase):
dataset._set_merge_by_lineid(2)
dataset.dataset.merge_by_lineid()
os.remove("./test_in_memory_dataset_masterpatch1_a.txt")
os.remove("./test_in_memory_dataset_masterpatch1_b.txt")
temp_dir.cleanup()
def test_in_memory_dataset_run_2(self):
"""
......@@ -436,12 +451,18 @@ class TestDataset(unittest.TestCase):
Use CUDAPlace
Use float type id
"""
with open("test_in_memory_dataset_run_a.txt", "w") as f:
temp_dir = tempfile.TemporaryDirectory()
filename1 = os.path.join(temp_dir.name,
"test_in_memory_dataset_run_a.txt")
filename2 = os.path.join(temp_dir.name,
"test_in_memory_dataset_run_b.txt")
with open(filename1, "w") as f:
data = "1 1 2 3 3 4 5 5 5 5 1 1\n"
data += "1 2 2 3 4 4 6 6 6 6 1 2\n"
data += "1 3 2 3 5 4 7 7 7 7 1 3\n"
f.write(data)
with open("test_in_memory_dataset_run_b.txt", "w") as f:
with open(filename2, "w") as f:
data = "1 4 2 3 3 4 5 5 5 5 1 4\n"
data += "1 5 2 3 4 4 6 6 6 6 1 5\n"
data += "1 6 2 3 5 4 7 7 7 7 1 6\n"
......@@ -462,10 +483,7 @@ class TestDataset(unittest.TestCase):
thread_num=3,
pipe_command="cat",
use_var=slots_vars)
dataset.set_filelist([
"test_in_memory_dataset_run_a.txt",
"test_in_memory_dataset_run_b.txt"
])
dataset.set_filelist([filename1, filename2])
dataset.load_into_memory()
dataset.local_shuffle()
......@@ -540,19 +558,22 @@ class TestDataset(unittest.TestCase):
fleet_ptr.set_client2client_config(1, 1, 1)
fleet_ptr.get_cache_threshold(0)
os.remove("./test_in_memory_dataset_run_a.txt")
os.remove("./test_in_memory_dataset_run_b.txt")
temp_dir.cleanup()
def test_queue_dataset_run(self):
"""
Testcase for QueueDataset from create to run.
"""
with open("test_queue_dataset_run_a.txt", "w") as f:
temp_dir = tempfile.TemporaryDirectory()
filename1 = os.path.join(temp_dir.name, "test_queue_dataset_run_a.txt")
filename2 = os.path.join(temp_dir.name, "test_queue_dataset_run_b.txt")
with open(filename1, "w") as f:
data = "1 1 2 3 3 4 5 5 5 5 1 1\n"
data += "1 2 2 3 4 4 6 6 6 6 1 2\n"
data += "1 3 2 3 5 4 7 7 7 7 1 3\n"
f.write(data)
with open("test_queue_dataset_run_b.txt", "w") as f:
with open(filename2, "w") as f:
data = "1 4 2 3 3 4 5 5 5 5 1 4\n"
data += "1 5 2 3 4 4 6 6 6 6 1 5\n"
data += "1 6 2 3 5 4 7 7 7 7 1 6\n"
......@@ -573,8 +594,7 @@ class TestDataset(unittest.TestCase):
thread_num=3,
pipe_command="cat",
use_var=slots_vars)
dataset.set_filelist(
["test_queue_dataset_run_a.txt", "test_queue_dataset_run_b.txt"])
dataset.set_filelist([filename1, filename2])
exe = fluid.Executor(fluid.CPUPlace())
exe.run(fluid.default_startup_program())
......@@ -605,10 +625,7 @@ class TestDataset(unittest.TestCase):
except Exception as e:
self.assertTrue(False)
if os.path.exists("./test_queue_dataset_run_a.txt"):
os.remove("./test_queue_dataset_run_a.txt")
if os.path.exists("./test_queue_dataset_run_b.txt"):
os.remove("./test_queue_dataset_run_b.txt")
temp_dir.cleanup()
def test_queue_dataset_run_2(self):
"""
......@@ -616,12 +633,16 @@ class TestDataset(unittest.TestCase):
Use CUDAPlace
Use float type id
"""
with open("test_queue_dataset_run_a.txt", "w") as f:
temp_dir = tempfile.TemporaryDirectory()
filename1 = os.path.join(temp_dir.name, "test_queue_dataset_run_a.txt")
filename2 = os.path.join(temp_dir.name, "test_queue_dataset_run_b.txt")
with open(filename1, "w") as f:
data = "1 1 2 3 3 4 5 5 5 5 1 1\n"
data += "1 2 2 3 4 4 6 6 6 6 1 2\n"
data += "1 3 2 3 5 4 7 7 7 7 1 3\n"
f.write(data)
with open("test_queue_dataset_run_b.txt", "w") as f:
with open(filename2, "w") as f:
data = "1 4 2 3 3 4 5 5 5 5 1 4\n"
data += "1 5 2 3 4 4 6 6 6 6 1 5\n"
data += "1 6 2 3 5 4 7 7 7 7 1 6\n"
......@@ -642,8 +663,7 @@ class TestDataset(unittest.TestCase):
thread_num=3,
pipe_command="cat",
use_var=slots_vars)
dataset.set_filelist(
["test_queue_dataset_run_a.txt", "test_queue_dataset_run_b.txt"])
dataset.set_filelist([filename1, filename2])
exe = fluid.Executor(fluid.CPUPlace(
) if not core.is_compiled_with_cuda() else fluid.CUDAPlace(0))
......@@ -662,10 +682,7 @@ class TestDataset(unittest.TestCase):
except Exception as e:
self.assertTrue(False)
if os.path.exists("./test_queue_dataset_run_a.txt"):
os.remove("./test_queue_dataset_run_a.txt")
if os.path.exists("./test_queue_dataset_run_b.txt"):
os.remove("./test_queue_dataset_run_b.txt")
temp_dir.cleanup()
def test_queue_dataset_run_3(self):
"""
......@@ -673,13 +690,17 @@ class TestDataset(unittest.TestCase):
Use CUDAPlace
Use float type id
"""
with open("test_queue_dataset_run_a.txt", "w") as f:
temp_dir = tempfile.TemporaryDirectory()
filename1 = os.path.join(temp_dir.name, "test_queue_dataset_run_a.txt")
filename2 = os.path.join(temp_dir.name, "test_queue_dataset_run_b.txt")
with open(filename1, "w") as f:
data = "2 1 2 2 5 4 2 2 7 2 1 3\n"
data += "2 6 2 2 1 4 2 2 4 2 2 3\n"
data += "2 5 2 2 9 9 2 2 7 2 1 3\n"
data += "2 7 2 2 1 9 2 3 7 2 5 3\n"
f.write(data)
with open("test_queue_dataset_run_b.txt", "w") as f:
with open(filename2, "w") as f:
data = "2 1 2 2 5 4 2 2 7 2 1 3\n"
data += "2 6 2 2 1 4 2 2 4 2 2 3\n"
data += "2 5 2 2 9 9 2 2 7 2 1 3\n"
......@@ -701,8 +722,7 @@ class TestDataset(unittest.TestCase):
input_type=1,
pipe_command="cat",
use_var=slots_vars)
dataset.set_filelist(
["test_queue_dataset_run_a.txt", "test_queue_dataset_run_b.txt"])
dataset.set_filelist([filename1, filename2])
dataset.load_into_memory()
exe = fluid.Executor(fluid.CPUPlace(
......@@ -722,10 +742,7 @@ class TestDataset(unittest.TestCase):
except Exception as e:
self.assertTrue(False)
if os.path.exists("./test_queue_dataset_run_a.txt"):
os.remove("./test_queue_dataset_run_a.txt")
if os.path.exists("./test_queue_dataset_run_b.txt"):
os.remove("./test_queue_dataset_run_b.txt")
temp_dir.cleanup()
class TestDatasetWithDataLoader(TestDataset):
......@@ -789,12 +806,18 @@ class TestDatasetWithFetchHandler(unittest.TestCase):
"""
Test Dataset With Fetch Handler. TestCases.
"""
with open("test_queue_dataset_run_a.txt", "w") as f:
self.temp_dir = tempfile.TemporaryDirectory()
self.filename1 = os.path.join(self.temp_dir.name,
"test_queue_dataset_run_a.txt")
self.filename2 = os.path.join(self.temp_dir.name,
"test_queue_dataset_run_b.txt")
with open(self.filename1, "w") as f:
data = "1 1 2 3 3 4 5 5 5 5 1 1\n"
data += "1 2 2 3 4 4 6 6 6 6 1 2\n"
data += "1 3 2 3 5 4 7 7 7 7 1 3\n"
f.write(data)
with open("test_queue_dataset_run_b.txt", "w") as f:
with open(self.filename2, "w") as f:
data = "1 4 2 3 3 4 5 5 5 5 1 4\n"
data += "1 5 2 3 4 4 6 6 6 6 1 5\n"
data += "1 6 2 3 5 4 7 7 7 7 1 6\n"
......@@ -805,15 +828,14 @@ class TestDatasetWithFetchHandler(unittest.TestCase):
"""
Test Dataset With Fetch Handler. TestCases.
"""
os.remove("./test_queue_dataset_run_a.txt")
os.remove("./test_queue_dataset_run_b.txt")
self.temp_dir.cleanup()
def test_dataset_none(self):
"""
Test Dataset With Fetch Handler. TestCases.
"""
slots_vars, out = self.net()
files = ["test_queue_dataset_run_a.txt", "test_queue_dataset_run_b.txt"]
files = [self.filename1, self.filename2]
dataset = self.get_dataset(slots_vars, files)
exe = fluid.Executor(fluid.CPUPlace())
......@@ -835,7 +857,7 @@ class TestDatasetWithFetchHandler(unittest.TestCase):
Test Dataset With Fetch Handler. TestCases.
"""
slots_vars, out = self.net()
files = ["test_queue_dataset_run_a.txt", "test_queue_dataset_run_b.txt"]
files = [self.filename1, self.filename2]
dataset = self.get_dataset(slots_vars, files)
exe = fluid.Executor(fluid.CPUPlace())
......@@ -853,7 +875,7 @@ class TestDatasetWithFetchHandler(unittest.TestCase):
Test Dataset With Fetch Handler. TestCases.
"""
slots_vars, out = self.net()
files = ["test_queue_dataset_run_a.txt", "test_queue_dataset_run_b.txt"]
files = [self.filename1, self.filename2]
dataset = self.get_dataset(slots_vars, files)
exe = fluid.Executor(fluid.CPUPlace())
......@@ -888,15 +910,20 @@ class TestDataset2(unittest.TestCase):
"""
Testcase for InMemoryDataset from create to run.
"""
temp_dir = tempfile.TemporaryDirectory()
filename1 = os.path.join(temp_dir.name,
"test_in_memory_dataset2_run_a.txt")
filename2 = os.path.join(temp_dir.name,
"test_in_memory_dataset2_run_b.txt")
self.skipTest("parameter server will add pslib UT later")
with open("test_in_memory_dataset2_run_a.txt", "w") as f:
with open(filename1, "w") as f:
data = "1 1 2 3 3 4 5 5 5 5 1 1\n"
data += "1 2 2 3 4 4 6 6 6 6 1 2\n"
data += "1 3 2 3 5 4 7 7 7 7 1 3\n"
f.write(data)
with open("test_in_memory_dataset2_run_b.txt", "w") as f:
with open(filename2, "w") as f:
data = "1 4 2 3 3 4 5 5 5 5 1 4\n"
data += "1 5 2 3 4 4 6 6 6 6 1 5\n"
data += "1 6 2 3 5 4 7 7 7 7 1 6\n"
......@@ -939,27 +966,29 @@ class TestDataset2(unittest.TestCase):
thread_num=3,
pipe_command="cat",
use_var=slots_vars)
dataset.set_filelist([
"test_in_memory_dataset2_run_a.txt",
"test_in_memory_dataset2_run_b.txt"
])
dataset.set_filelist([filename1, filename2])
dataset.load_into_memory()
fleet._opt_info = None
fleet._fleet_ptr = None
os.remove("./test_in_memory_dataset2_run_a.txt")
os.remove("./test_in_memory_dataset2_run_b.txt")
temp_dir.cleanup()
def test_dataset_fleet2(self):
"""
Testcase for InMemoryDataset from create to run.
"""
with open("test_in_memory_dataset2_run2_a.txt", "w") as f:
temp_dir = tempfile.TemporaryDirectory()
filename1 = os.path.join(temp_dir.name,
"test_in_memory_dataset2_run2_a.txt")
filename2 = os.path.join(temp_dir.name,
"test_in_memory_dataset2_run2_b.txt")
with open(filename1, "w") as f:
data = "1 1 2 3 3 4 5 5 5 5 1 1\n"
data += "1 2 2 3 4 4 6 6 6 6 1 2\n"
data += "1 3 2 3 5 4 7 7 7 7 1 3\n"
f.write(data)
with open("test_in_memory_dataset2_run2_b.txt", "w") as f:
with open(filename2, "w") as f:
data = "1 4 2 3 3 4 5 5 5 5 1 4\n"
data += "1 5 2 3 4 4 6 6 6 6 1 5\n"
data += "1 6 2 3 5 4 7 7 7 7 1 6\n"
......@@ -1011,10 +1040,7 @@ class TestDataset2(unittest.TestCase):
thread_num=3,
pipe_command="cat",
use_var=slots_vars)
dataset.set_filelist([
"test_in_memory_dataset2_run2_a.txt",
"test_in_memory_dataset2_run2_b.txt"
])
dataset.set_filelist([filename1, filename2])
dataset.load_into_memory()
try:
dataset.global_shuffle(fleet)
......@@ -1073,19 +1099,24 @@ class TestDataset2(unittest.TestCase):
except:
print("warning: catch expected error")
os.remove("./test_in_memory_dataset2_run2_a.txt")
os.remove("./test_in_memory_dataset2_run2_b.txt")
temp_dir.cleanup()
def test_bosps_dataset_fleet2(self):
"""
Testcase for InMemoryDataset from create to run.
"""
with open("test_in_memory_dataset2_run2_a.txt", "w") as f:
temp_dir = tempfile.TemporaryDirectory()
filename1 = os.path.join(temp_dir.name,
"test_in_memory_dataset2_run2_a.txt")
filename2 = os.path.join(temp_dir.name,
"test_in_memory_dataset2_run2_b.txt")
with open(filename1, "w") as f:
data = "1 1 2 3 3 4 5 5 5 5 1 1\n"
data += "1 2 2 3 4 4 6 6 6 6 1 2\n"
data += "1 3 2 3 5 4 7 7 7 7 1 3\n"
f.write(data)
with open("test_in_memory_dataset2_run2_b.txt", "w") as f:
with open(filename2, "w") as f:
data = "1 4 2 3 3 4 5 5 5 5 1 4\n"
data += "1 5 2 3 4 4 6 6 6 6 1 5\n"
data += "1 6 2 3 5 4 7 7 7 7 1 6\n"
......@@ -1137,10 +1168,7 @@ class TestDataset2(unittest.TestCase):
thread_num=3,
pipe_command="cat",
use_var=slots_vars)
dataset.set_filelist([
"test_in_memory_dataset2_run2_a.txt",
"test_in_memory_dataset2_run2_b.txt"
])
dataset.set_filelist([filename1, filename2])
dataset.load_into_memory()
try:
dataset.global_shuffle(fleet)
......@@ -1190,6 +1218,7 @@ class TestDataset2(unittest.TestCase):
#dataset.get_pv_data_size()
dataset.get_memory_data_size()
dataset.get_shuffle_data_size()
temp_dir.cleanup()
if __name__ == '__main__':
......
......@@ -18,6 +18,7 @@ import numpy as np
import six
import os
import unittest
import tempfile
from simple_nets import simple_fc_net_with_inputs
BATCH_SIZE = 32
......@@ -27,8 +28,6 @@ EPOCH_NUM = 4
IMAGE_SHAPE = [2, 3]
LABEL_SHAPE = [1]
ALL_WRITTEN_FILES = set()
def get_place_string(p):
if isinstance(p, (fluid.CPUPlace or fluid.CUDAPlace)):
......@@ -42,13 +41,7 @@ def get_place_string(p):
return 'CUDAPlace()'
def remove_all_written_files():
for filename in ALL_WRITTEN_FILES:
os.remove(filename)
def write_reader_data_to_file(filename, reader):
ALL_WRITTEN_FILES.add(filename)
with open(filename, 'w') as fid:
for instance_list in reader():
for i, instance in enumerate(instance_list):
......@@ -81,10 +74,10 @@ class DatasetLoaderTestBase(unittest.TestCase):
def setUp(self):
self.dataset_name = "QueueDataset"
self.drop_last = False
self.temp_dir = tempfile.TemporaryDirectory()
def tearDown(self):
return
remove_all_written_files()
self.temp_dir.cleanup()
def build_network(self):
main_prog = fluid.Program()
......@@ -129,7 +122,8 @@ class DatasetLoaderTestBase(unittest.TestCase):
random_delta_batch_size = np.zeros(shape=[file_num])
for i in six.moves.range(file_num):
filename = 'dataset_test_{}.txt'.format(i)
filename = os.path.join(self.temp_dir.name,
'dataset_test_{}.txt'.format(i))
filelist.append(filename)
write_reader_data_to_file(
filename,
......@@ -214,6 +208,7 @@ class QueueDatasetTestWithoutDropLast(DatasetLoaderTestBase):
def setUp(self):
self.dataset_name = "QueueDataset"
self.drop_last = True
self.temp_dir = tempfile.TemporaryDirectory()
class InMemoryDatasetTestWithoutDropLast(DatasetLoaderTestBase):
......@@ -221,6 +216,7 @@ class InMemoryDatasetTestWithoutDropLast(DatasetLoaderTestBase):
def setUp(self):
self.dataset_name = "InMemoryDataset"
self.drop_last = False
self.temp_dir = tempfile.TemporaryDirectory()
class InMemoryDatasetTestWithDropLast(DatasetLoaderTestBase):
......@@ -228,6 +224,7 @@ class InMemoryDatasetTestWithDropLast(DatasetLoaderTestBase):
def setUp(self):
self.dataset_name = "InMemoryDataset"
self.drop_last = True
self.temp_dir = tempfile.TemporaryDirectory()
if __name__ == '__main__':
......
......@@ -12,10 +12,12 @@
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import unittest
import numpy as np
import paddle
from test_nms_op import nms
import tempfile
def _find(condition):
......@@ -79,6 +81,11 @@ class TestOpsNMS(unittest.TestCase):
self.devices = ['cpu']
if paddle.is_compiled_with_cuda():
self.devices.append('gpu')
self.temp_dir = tempfile.TemporaryDirectory()
self.path = os.path.join(self.temp_dir.name, './net')
def tearDown(self):
self.temp_dir.cleanup()
def test_nms(self):
for device in self.devices:
......@@ -169,7 +176,6 @@ class TestOpsNMS(unittest.TestCase):
categories, 10)
return out
path = "./net"
boxes = np.random.rand(64, 4).astype('float32')
boxes[:, 2] = boxes[:, 0] + boxes[:, 2]
boxes[:, 3] = boxes[:, 1] + boxes[:, 3]
......@@ -177,14 +183,14 @@ class TestOpsNMS(unittest.TestCase):
origin = fun(paddle.to_tensor(boxes))
paddle.jit.save(
fun,
path,
self.path,
input_spec=[
paddle.static.InputSpec(shape=[None, 4],
dtype='float32',
name='x')
],
)
load_func = paddle.jit.load(path)
load_func = paddle.jit.load(self.path)
res = load_func(paddle.to_tensor(boxes))
self.assertTrue(
np.array_equal(origin, res),
......
......@@ -20,7 +20,7 @@ import os
os.environ['FLAGS_cudnn_deterministic'] = '1'
import unittest
import tempfile
import numpy as np
import paddle
......@@ -101,7 +101,9 @@ class TestHapiWithAmp(unittest.TestCase):
batch_size=64,
num_iters=2,
log_freq=1)
model.save('./lenet_amp')
temp_dir = tempfile.TemporaryDirectory()
lenet_amp_path = os.path.join(temp_dir.name, './lenet_amp')
model.save(lenet_amp_path)
with paddle.fluid.unique_name.guard():
paddle.seed(2021)
......@@ -119,7 +121,8 @@ class TestHapiWithAmp(unittest.TestCase):
model._scaler.state_dict()['incr_count']))
# equal after load
new_model.load('./lenet_amp')
new_model.load(lenet_amp_path)
temp_dir.cleanup()
self.assertEqual(new_model._scaler.state_dict()['incr_count'],
model._scaler.state_dict()['incr_count'])
self.assertEqual(new_model._scaler.state_dict()['decr_count'],
......
......@@ -16,6 +16,7 @@ import os
import cv2
import shutil
import unittest
import tempfile
import numpy as np
import paddle
......@@ -26,23 +27,25 @@ class TestReadFile(unittest.TestCase):
def setUp(self):
fake_img = (np.random.random((400, 300, 3)) * 255).astype('uint8')
cv2.imwrite('fake.jpg', fake_img)
self.temp_dir = tempfile.TemporaryDirectory()
self.img_path = os.path.join(self.temp_dir.name, 'fake.jpg')
cv2.imwrite(self.img_path, fake_img)
def tearDown(self):
os.remove('fake.jpg')
self.temp_dir.cleanup()
def read_file_decode_jpeg(self):
if not paddle.is_compiled_with_cuda():
return
img_bytes = read_file('fake.jpg')
img_bytes = read_file(self.img_path)
img = decode_jpeg(img_bytes, mode='gray')
img = decode_jpeg(img_bytes, mode='rgb')
img = decode_jpeg(img_bytes)
img_cv2 = cv2.imread('fake.jpg')
img_cv2 = cv2.imread(self.img_path)
if paddle.in_dynamic_mode():
np.testing.assert_equal(img.shape, img_cv2.transpose(2, 0, 1).shape)
else:
......
......@@ -926,7 +926,8 @@ class TestFunctional(unittest.TestCase):
fake_img = Image.fromarray((np.random.random(
(32, 32, 3)) * 255).astype('uint8'))
path = 'temp.jpg'
temp_dir = tempfile.TemporaryDirectory()
path = os.path.join(temp_dir.name, 'temp.jpg')
fake_img.save(path)
set_image_backend('pil')
......@@ -939,7 +940,7 @@ class TestFunctional(unittest.TestCase):
np_img = image_load(path)
os.remove(path)
temp_dir.cleanup()
def test_affine(self):
np_img = (np.random.rand(32, 26, 3) * 255).astype('uint8')
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册