未验证 提交 9ee24688 编写于 作者: C Cleber Rosa

Merge remote-tracking branch 'ldoktor/multiple-tmpdirs5'

Signed-off-by: NCleber Rosa <crosa@redhat.com>
......@@ -85,9 +85,6 @@ class Job(object):
if unique_id is None:
self.args.unique_job_id = "0" * 40
self.args.sysinfo = False
base_logdir = getattr(self.args, "base_logdir", None)
if base_logdir is None:
self.args.base_logdir = tempfile.mkdtemp(prefix="avocado-dry-run-")
unique_id = getattr(self.args, 'unique_job_id', None)
if unique_id is None:
......@@ -97,7 +94,9 @@ class Job(object):
#: directory. If it's set to None, it means that the job results
#: directory has not yet been created.
self.logdir = None
self._setup_job_results()
self.logfile = None
self.tmpdir = None
self.__remove_tmpdir = False
raw_log_level = settings.get_value('job.output', 'loglevel',
default='debug')
mapping = {'info': logging.INFO,
......@@ -111,7 +110,7 @@ class Job(object):
self.loglevel = logging.DEBUG
self.status = "RUNNING"
self.result = result.Result(self)
self.result = None
self.sysinfo = None
self.timeout = getattr(self.args, 'job_timeout', 0)
#: The time at which the job has started or `-1` if it has not been
......@@ -124,7 +123,6 @@ class Job(object):
#: or `-1` if it has not been started by means of the `run()` method
self.time_elapsed = -1
self.__logging_handlers = {}
self.__start_job_logging()
self.funcatexit = data_structures.CallbackRegister("JobExit %s"
% self.unique_id,
LOG_JOB)
......@@ -145,15 +143,33 @@ class Job(object):
self._result_events_dispatcher = dispatcher.ResultEventsDispatcher(self.args)
output.log_plugin_failures(self._result_events_dispatcher.load_failures)
# Checking whether we will keep the Job tmp_dir or not.
# If yes, we set the basedir for a stable location.
basedir = None
keep_tmp = getattr(self.args, "keep_tmp", None)
if keep_tmp == 'on':
basedir = self.logdir
# Calling get_tmp_dir() early as the basedir will be set
# in the first call.
data_dir.get_tmp_dir(basedir)
def __enter__(self):
self.setup()
return self
def __exit__(self, _exc_type, _exc_value, _traceback):
self.cleanup()
def setup(self):
"""
Setup the temporary job handlers (dirs, global setting, ...)
"""
assert self.tmpdir is None, "Job.setup() already called"
if getattr(self.args, "dry_run", False): # Create the dry-run dirs
base_logdir = getattr(self.args, "base_logdir", None)
if base_logdir is None:
self.args.base_logdir = tempfile.mkdtemp(prefix="avocado-dry-run-")
self._setup_job_results()
self.result = result.Result(self)
self.__start_job_logging()
# Use "logdir" in case "keep_tmp" is set enabled
if getattr(self.args, "keep_tmp", None) == "on":
base_tmpdir = self.logdir
else:
base_tmpdir = data_dir.get_tmp_dir()
self.__remove_tmpdir = True
self.tmpdir = tempfile.mkdtemp(prefix="avocado_job_",
dir=base_tmpdir)
def _setup_job_results(self):
"""
......@@ -400,9 +416,8 @@ class Job(object):
for line in lines.splitlines():
LOG_JOB.info(line)
@staticmethod
def _log_tmp_dir():
LOG_JOB.info('Temporary dir: %s', data_dir.get_tmp_dir())
def _log_tmp_dir(self):
LOG_JOB.info('Temporary dir: %s', self.tmpdir)
LOG_JOB.info('')
def _log_job_debug_info(self, variants):
......@@ -512,6 +527,7 @@ class Job(object):
:return: Integer with overall job status. See
:mod:`avocado.core.exit_codes` for more information.
"""
assert self.tmpdir is not None, "Job.setup() not called"
if self.time_start == -1:
self.time_start = time.time()
runtime.CURRENT_JOB = self
......@@ -549,7 +565,14 @@ class Job(object):
if self.time_end == -1:
self.time_end = time.time()
self.time_elapsed = self.time_end - self.time_start
self.__stop_job_logging()
def cleanup(self):
"""
Cleanup the temporary job handlers (dirs, global setting, ...)
"""
self.__stop_job_logging()
if self.__remove_tmpdir and os.path.exists(self.tmpdir):
shutil.rmtree(self.tmpdir)
class TestProgram(object):
......@@ -591,10 +614,10 @@ class TestProgram(object):
self.args.standalone = True
self.args.show = ["test"]
output.reconfigure(self.args)
self.job = Job(self.args)
exit_status = self.job.run()
if self.args.remove_test_results is True:
shutil.rmtree(self.job.logdir)
with Job(self.args) as self.job:
exit_status = self.job.run()
if self.args.remove_test_results is True:
shutil.rmtree(self.job.logdir)
sys.exit(exit_status)
def __del__(self):
......
......@@ -25,6 +25,7 @@ import pipes
import re
import shutil
import sys
import tempfile
import time
import unittest
......@@ -394,11 +395,14 @@ class Test(unittest.TestCase, TestData):
self.__runner_queue = runner_queue
self.__workdir = os.path.join(data_dir.get_tmp_dir(),
base_tmpdir = getattr(job, "tmpdir", None)
# When tmpdir not specified by job, use logdir to preserve all data
if base_tmpdir is None:
base_tmpdir = tempfile.mkdtemp(prefix="tmp_dir", dir=self.logdir)
self.__workdir = os.path.join(base_tmpdir,
self.name.str_filesystem)
self.__srcdir_internal_access = False
self.__srcdir_warning_logged = False
self.__srcdir = None
self.__srcdir = utils_path.init_dir(self.__workdir, 'src')
self.log.debug("Test metadata:")
if self.filename:
......@@ -540,14 +544,12 @@ class Test(unittest.TestCase, TestData):
This property is deprecated and will be removed in the future.
The :meth:`workdir` function should be used instead.
"""
if not (self.__srcdir_internal_access or self.__srcdir_warning_logged):
if not self.__srcdir_warning_logged:
LOG_JOB.warn("DEPRECATION NOTICE: the test's \"srcdir\" property "
"is deprecated and is planned to be removed no later "
"than May 11 2018. Please use the \"workdir\" "
"property instead.")
self.__srcdir_warning_logged = True
if self.__srcdir is None:
self.__srcdir = utils_path.init_dir(self.workdir, 'src')
return self.__srcdir
@property
......@@ -971,14 +973,7 @@ class Test(unittest.TestCase, TestData):
os.environ['AVOCADO_TEST_OUTPUTDIR'] = self.outputdir
if self.__sysinfo_enabled:
os.environ['AVOCADO_TEST_SYSINFODIR'] = self.__sysinfodir
# srcdir is deprecated and will cause a test warning when
# accessed. It seems unfair to return a warning for all
# tests because Avocado itself will access that property.
# this is a hack to be removed when srcdir is also removed
# for good.
self.__srcdir_internal_access = True
os.environ['AVOCADO_TEST_SRCDIR'] = self.srcdir
self.__srcdir_internal_access = False
os.environ['AVOCADO_TEST_SRCDIR'] = self.__srcdir
def run_avocado(self):
"""
......
......@@ -207,17 +207,17 @@ class Run(CLICmd):
except ValueError as e:
LOG_UI.error(e.args[0])
sys.exit(exit_codes.AVOCADO_FAIL)
job_instance = job.Job(args)
pre_post_dispatcher = JobPrePostDispatcher()
try:
# Run JobPre plugins
output.log_plugin_failures(pre_post_dispatcher.load_failures)
pre_post_dispatcher.map_method('pre', job_instance)
job_run = job_instance.run()
finally:
# Run JobPost plugins
pre_post_dispatcher.map_method('post', job_instance)
with job.Job(args) as job_instance:
pre_post_dispatcher = JobPrePostDispatcher()
try:
# Run JobPre plugins
output.log_plugin_failures(pre_post_dispatcher.load_failures)
pre_post_dispatcher.map_method('pre', job_instance)
job_run = job_instance.run()
finally:
# Run JobPost plugins
pre_post_dispatcher.map_method('post', job_instance)
result_dispatcher = ResultDispatcher()
if result_dispatcher.extensions:
......
......@@ -64,8 +64,10 @@ class RemoteTestRunnerTest(unittest.TestCase):
'/tests/other/test',
'passtest.py'])
job = None
try:
job = Job(job_args)
job.setup()
runner = avocado_runner_remote.RemoteTestRunner(job, job.result)
return_value = (True, (version.MAJOR, version.MINOR))
runner.check_remote_avocado = mock.Mock(return_value=return_value)
......@@ -102,7 +104,8 @@ class RemoteTestRunnerTest(unittest.TestCase):
ignore_status=True,
timeout=61)
finally:
shutil.rmtree(job.args.base_logdir)
if job:
shutil.rmtree(job.args.base_logdir)
if __name__ == '__main__':
......
......@@ -50,8 +50,10 @@ class VMTestRunnerSetup(unittest.TestCase):
'passtest.py'],
dry_run=True,
env_keep=None)
job = None
try:
job = Job(job_args)
job.setup()
with mock.patch('avocado_runner_vm.vm_connect',
return_value=mock_vm):
# VMTestRunner()
......@@ -63,7 +65,8 @@ class VMTestRunnerSetup(unittest.TestCase):
mock_vm.stop.assert_called_once_with()
mock_vm.restore_snapshot.assert_called_once_with()
finally:
shutil.rmtree(job.args.base_logdir)
if job:
shutil.rmtree(job.args.base_logdir)
if __name__ == '__main__':
......
......@@ -20,6 +20,7 @@ from avocado.utils import path as utils_path
class JobTest(unittest.TestCase):
def setUp(self):
self.job = None
data_dir._tmp_tracker.unittest_refresh_dir_tracker()
self.tmpdir = tempfile.mkdtemp(prefix="avocado_" + __name__)
......@@ -37,32 +38,77 @@ class JobTest(unittest.TestCase):
def test_job_empty_suite(self):
args = argparse.Namespace(base_logdir=self.tmpdir)
empty_job = job.Job(args)
self.assertIsNone(empty_job.test_suite)
self.job = job.Job(args)
# Job without setup called
self.assertIsNone(self.job.logdir)
self.assertIsNone(self.job.logfile)
self.assertIsNone(self.job.replay_sourcejob)
self.assertIsNone(self.job.result)
self.assertIsNone(self.job.sysinfo)
self.assertIsNone(self.job.test_runner)
self.assertIsNone(self.job.test_suite)
self.assertIsNone(self.job.tmpdir)
self.assertFalse(self.job._Job__remove_tmpdir)
self.assertEquals(self.job.args, args)
self.assertEquals(self.job.exitcode, exit_codes.AVOCADO_ALL_OK)
self.assertEquals(self.job.references, [])
self.assertEquals(self.job.status, "RUNNING")
uid = self.job.unique_id
# Job with setup called
self.job.setup()
self.assertIsNotNone(self.job.logdir)
self.assertIsNotNone(self.job.logfile)
self.assertIsNotNone(self.job.result)
self.assertIsNotNone(self.job.tmpdir)
self.assertTrue(self.job._Job__remove_tmpdir)
self.assertEquals(uid, self.job.unique_id)
self.assertEquals(self.job.status, "RUNNING")
# Calling setup twice
self.assertRaises(AssertionError, self.job.setup)
# Job with cleanup called
self.job.cleanup()
def test_job_empty_has_id(self):
args = argparse.Namespace(base_logdir=self.tmpdir)
empty_job = job.Job(args)
self.assertIsNotNone(empty_job.unique_id)
self.job = job.Job(args)
self.assertIsNotNone(self.job.unique_id)
def test_two_jobs(self):
args = argparse.Namespace(base_logdir=self.tmpdir)
with job.Job(args) as self.job, job.Job(args) as job2:
job1 = self.job
# uids, logdirs and tmpdirs must be different
self.assertNotEqual(job1.unique_id, job2.unique_id)
self.assertNotEqual(job1.logdir, job2.logdir)
self.assertNotEqual(job1.tmpdir, job2.tmpdir)
# tmpdirs should share the same base-dir per process
self.assertEqual(os.path.dirname(job1.tmpdir), os.path.dirname(job2.tmpdir))
# due to args logdirs should share the same base-dir
self.assertEqual(os.path.dirname(job1.logdir), os.path.dirname(job2.logdir))
def test_job_test_suite_not_created(self):
args = argparse.Namespace(base_logdir=self.tmpdir)
myjob = job.Job(args)
self.assertIsNone(myjob.test_suite)
self.job = job.Job(args)
self.assertIsNone(self.job.test_suite)
def test_job_create_test_suite_empty(self):
args = argparse.Namespace(base_logdir=self.tmpdir)
myjob = job.Job(args)
self.job = job.Job(args)
self.job.setup()
self.assertRaises(exceptions.OptionValidationError,
myjob.create_test_suite)
self.job.create_test_suite)
def test_job_create_test_suite_simple(self):
simple_tests_found = self._find_simple_test_candidates()
args = argparse.Namespace(reference=simple_tests_found,
base_logdir=self.tmpdir)
myjob = job.Job(args)
myjob.create_test_suite()
self.assertEqual(len(simple_tests_found), len(myjob.test_suite))
self.job = job.Job(args)
self.job.setup()
self.job.create_test_suite()
self.assertEqual(len(simple_tests_found), len(self.job.test_suite))
def test_job_pre_tests(self):
class JobFilterTime(job.Job):
......@@ -77,21 +123,23 @@ class JobTest(unittest.TestCase):
simple_tests_found = self._find_simple_test_candidates()
args = argparse.Namespace(reference=simple_tests_found,
base_logdir=self.tmpdir)
myjob = JobFilterTime(args)
myjob.create_test_suite()
self.job = JobFilterTime(args)
self.job.setup()
self.job.create_test_suite()
try:
myjob.pre_tests()
self.job.pre_tests()
finally:
myjob.post_tests()
self.assertLessEqual(len(myjob.test_suite), 1)
self.job.post_tests()
self.assertLessEqual(len(self.job.test_suite), 1)
def test_job_run_tests(self):
simple_tests_found = self._find_simple_test_candidates(['true'])
args = argparse.Namespace(reference=simple_tests_found,
base_logdir=self.tmpdir)
myjob = job.Job(args)
myjob.create_test_suite()
self.assertEqual(myjob.run_tests(),
self.job = job.Job(args)
self.job.setup()
self.job.create_test_suite()
self.assertEqual(self.job.run_tests(),
exit_codes.AVOCADO_ALL_OK)
def test_job_post_tests(self):
......@@ -103,15 +151,16 @@ class JobTest(unittest.TestCase):
simple_tests_found = self._find_simple_test_candidates()
args = argparse.Namespace(reference=simple_tests_found,
base_logdir=self.tmpdir)
myjob = JobLogPost(args)
myjob.create_test_suite()
self.job = JobLogPost(args)
self.job.setup()
self.job.create_test_suite()
try:
myjob.pre_tests()
myjob.run_tests()
self.job.pre_tests()
self.job.run_tests()
finally:
myjob.post_tests()
with open(os.path.join(myjob.logdir, "reversed_id")) as reverse_id_file:
self.assertEqual(myjob.unique_id[::-1],
self.job.post_tests()
with open(os.path.join(self.job.logdir, "reversed_id")) as reverse_id_file:
self.assertEqual(self.job.unique_id[::-1],
reverse_id_file.read())
def test_job_run(self):
......@@ -132,61 +181,69 @@ class JobTest(unittest.TestCase):
simple_tests_found = self._find_simple_test_candidates()
args = argparse.Namespace(reference=simple_tests_found,
base_logdir=self.tmpdir)
myjob = JobFilterLog(args)
self.assertEqual(myjob.run(),
self.job = JobFilterLog(args)
self.job.setup()
self.assertEqual(self.job.run(),
exit_codes.AVOCADO_ALL_OK)
self.assertLessEqual(len(myjob.test_suite), 1)
with open(os.path.join(myjob.logdir, "reversed_id")) as reverse_id_file:
self.assertEqual(myjob.unique_id[::-1],
self.assertLessEqual(len(self.job.test_suite), 1)
with open(os.path.join(self.job.logdir, "reversed_id")) as reverse_id_file:
self.assertEqual(self.job.unique_id[::-1],
reverse_id_file.read())
def test_job_run_account_time(self):
args = argparse.Namespace(base_logdir=self.tmpdir)
myjob = job.Job(args)
myjob.run()
self.assertNotEqual(myjob.time_start, -1)
self.assertNotEqual(myjob.time_end, -1)
self.assertNotEqual(myjob.time_elapsed, -1)
self.job = job.Job(args)
self.job.setup()
self.job.run()
self.assertNotEqual(self.job.time_start, -1)
self.assertNotEqual(self.job.time_end, -1)
self.assertNotEqual(self.job.time_elapsed, -1)
def test_job_self_account_time(self):
args = argparse.Namespace(base_logdir=self.tmpdir)
myjob = job.Job(args)
myjob.time_start = 10.0
myjob.run()
myjob.time_end = 20.0
self.job = job.Job(args)
self.job.setup()
self.job.time_start = 10.0
self.job.run()
self.job.time_end = 20.0
# forcing a different value to check if it's not being
# calculated when time_start or time_end are manually set
myjob.time_elapsed = 100.0
self.assertEqual(myjob.time_start, 10.0)
self.assertEqual(myjob.time_end, 20.0)
self.assertEqual(myjob.time_elapsed, 100.0)
self.job.time_elapsed = 100.0
self.assertEqual(self.job.time_start, 10.0)
self.assertEqual(self.job.time_end, 20.0)
self.assertEqual(self.job.time_elapsed, 100.0)
def test_job_dryrun_no_unique_job_id(self):
args = argparse.Namespace(dry_run=True, base_logdir=self.tmpdir)
empty_job = job.Job(args)
self.assertIsNotNone(empty_job.args.unique_job_id)
self.job = job.Job(args)
self.job.setup()
self.assertIsNotNone(self.job.args.unique_job_id)
def test_job_no_base_logdir(self):
args = argparse.Namespace()
with mock.patch('avocado.core.job.data_dir.get_logs_dir',
return_value=self.tmpdir):
empty_job = job.Job(args)
self.assertTrue(os.path.isdir(empty_job.logdir))
self.assertEqual(os.path.dirname(empty_job.logdir), self.tmpdir)
self.assertTrue(os.path.isfile(os.path.join(empty_job.logdir, 'id')))
self.job = job.Job(args)
self.job.setup()
self.assertTrue(os.path.isdir(self.job.logdir))
self.assertEqual(os.path.dirname(self.job.logdir), self.tmpdir)
self.assertTrue(os.path.isfile(os.path.join(self.job.logdir, 'id')))
def test_job_dryrun_no_base_logdir(self):
args = argparse.Namespace(dry_run=True)
empty_job = job.Job(args)
self.job = job.Job(args)
self.job.setup()
try:
self.assertTrue(os.path.isdir(empty_job.logdir))
self.assertTrue(os.path.isfile(os.path.join(empty_job.logdir, 'id')))
self.assertTrue(os.path.isdir(self.job.logdir))
self.assertTrue(os.path.isfile(os.path.join(self.job.logdir, 'id')))
finally:
shutil.rmtree(empty_job.args.base_logdir)
shutil.rmtree(self.job.args.base_logdir)
def tearDown(self):
data_dir._tmp_tracker.unittest_refresh_dir_tracker()
shutil.rmtree(self.tmpdir)
if self.job is not None:
self.job.cleanup()
if __name__ == '__main__':
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册