提交 08b22ed0 编写于 作者: A Amador Pahim

avocado.code.jodata cleanup and refactoring

This patch makes the avocado.core.jobdata code more clean and
efficient.
Signed-off-by: NAmador Pahim <apahim@redhat.com>
上级 b3e5d548
......@@ -12,127 +12,147 @@
# Copyright: Red Hat Inc. 2016
# Author: Amador Pahim <apahim@redhat.com>
"""
Record/retrieve job information
"""
import ast
import glob
import json
import os
import pickle
import sys
from . import exit_codes
from .test import ReplaySkipTest
from .settings import settings
from ..utils import path
from ..utils.path import init_dir
"""
Record/retrieve job information for job replay
"""
JOB_DATA_DIR = 'replay'
CONFIG_FILENAME = 'config'
URLS_FILENAME = 'urls'
MUX_FILENAME = 'multiplex'
PWD_FILENAME = 'pwd'
ARGS_FILENAME = 'args'
CMDLINE_FILENAME = 'cmdline'
def record(args, logdir, mux, urls=None, cmdline=None):
replay_dir = path.init_dir(logdir, 'replay')
path_cfg = os.path.join(replay_dir, 'config')
path_urls = os.path.join(replay_dir, 'urls')
path_mux = os.path.join(replay_dir, 'multiplex')
path_pwd = os.path.join(replay_dir, 'pwd')
path_args = os.path.join(replay_dir, 'args')
path_cmdline = os.path.join(replay_dir, 'cmdline')
"""
Records all required job information.
"""
base_dir = init_dir(logdir, JOB_DATA_DIR)
path_cfg = os.path.join(base_dir, CONFIG_FILENAME)
path_urls = os.path.join(base_dir, URLS_FILENAME)
path_mux = os.path.join(base_dir, MUX_FILENAME)
path_pwd = os.path.join(base_dir, PWD_FILENAME)
path_args = os.path.join(base_dir, ARGS_FILENAME)
path_cmdline = os.path.join(base_dir, CMDLINE_FILENAME)
if urls:
with open(path_urls, 'w') as f:
f.write('%s' % urls)
with open(path_urls, 'w') as urls_file:
urls_file.write('%s' % urls)
with open(path_cfg, 'w') as f:
settings.config.write(f)
with open(path_cfg, 'w') as config_file:
settings.config.write(config_file)
with open(path_mux, 'w') as f:
pickle.dump(mux, f, pickle.HIGHEST_PROTOCOL)
with open(path_mux, 'w') as mux_file:
pickle.dump(mux, mux_file, pickle.HIGHEST_PROTOCOL)
with open(path_pwd, 'w') as f:
f.write('%s' % os.getcwd())
with open(path_pwd, 'w') as pwd_file:
pwd_file.write('%s' % os.getcwd())
with open(path_args, 'w') as f:
pickle.dump(args.__dict__, f, pickle.HIGHEST_PROTOCOL)
with open(path_args, 'w') as args_file:
pickle.dump(args.__dict__, args_file, pickle.HIGHEST_PROTOCOL)
with open(path_cmdline, 'w') as f:
f.write('%s' % cmdline)
with open(path_cmdline, 'w') as cmdline_file:
cmdline_file.write('%s' % cmdline)
def retrieve_cmdline(resultsdir):
recorded_cmdline = os.path.join(resultsdir, "replay", "cmdline")
if not os.path.exists(recorded_cmdline):
def _retrieve(resultsdir, resource):
path = os.path.join(resultsdir, JOB_DATA_DIR, resource)
if not os.path.exists(path):
return None
with open(recorded_cmdline, 'r') as f:
cmdline = f.read()
return ast.literal_eval(cmdline)
return path
def retrieve_pwd(resultsdir):
recorded_pwd = os.path.join(resultsdir, "replay", "pwd")
if not os.path.exists(recorded_pwd):
"""
Retrieves the job pwd from the results directory.
"""
recorded_pwd = _retrieve(resultsdir, PWD_FILENAME)
if recorded_pwd is None:
return None
with open(recorded_pwd, 'r') as f:
return f.read()
with open(recorded_pwd, 'r') as pwd_file:
return pwd_file.read()
def retrieve_urls(resultsdir):
recorded_urls = os.path.join(resultsdir, "replay", "urls")
if not os.path.exists(recorded_urls):
"""
Retrieves the job urls from the results directory.
"""
recorded_urls = _retrieve(resultsdir, URLS_FILENAME)
if recorded_urls is None:
return None
with open(recorded_urls, 'r') as f:
urls = f.read()
return ast.literal_eval(urls)
with open(recorded_urls, 'r') as urls_file:
return ast.literal_eval(urls_file.read())
def retrieve_mux(resultsdir):
pkl_path = os.path.join(resultsdir, 'replay', 'multiplex')
if not os.path.exists(pkl_path):
"""
Retrieves the job multiplex from the results directory.
"""
recorded_mux = _retrieve(resultsdir, MUX_FILENAME)
if recorded_mux is None:
return None
with open(recorded_mux, 'r') as mux_file:
return pickle.load(mux_file)
with open(pkl_path, 'r') as f:
return pickle.load(f)
def retrieve_replay_map(resultsdir, replay_filter):
replay_map = None
resultsfile = os.path.join(resultsdir, "results.json")
if not os.path.exists(resultsfile):
def retrieve_args(resultsdir):
"""
Retrieves the job args from the results directory.
"""
recorded_args = _retrieve(resultsdir, ARGS_FILENAME)
if recorded_args is None:
return None
with open(recorded_args, 'r') as args_file:
return pickle.load(args_file)
with open(resultsfile, 'r') as results_file_obj:
results = json.loads(results_file_obj.read())
replay_map = []
for test in results['tests']:
if test['status'] not in replay_filter:
replay_map.append(ReplaySkipTest)
else:
replay_map.append(None)
return replay_map
def retrieve_config(resultsdir):
"""
Retrieves the job settings from the results directory.
"""
recorded_config = _retrieve(resultsdir, CONFIG_FILENAME)
if recorded_config is None:
return None
return recorded_config
def retrieve_args(resultsdir):
pkl_path = os.path.join(resultsdir, 'replay', 'args')
if not os.path.exists(pkl_path):
def retrieve_cmdline(resultsdir):
"""
Retrieves the job command line from the results directory.
"""
recorded_cmdline = _retrieve(resultsdir, CMDLINE_FILENAME)
if recorded_cmdline is None:
return None
with open(pkl_path, 'r') as f:
return pickle.load(f)
with open(recorded_cmdline, 'r') as cmdline_file:
return ast.literal_eval(cmdline_file.read())
def get_resultsdir(logdir, jobid):
if jobid == 'latest':
"""
Gets the job results directory using a Job ID.
"""
if os.path.isdir(jobid):
return os.path.expanduser(jobid)
elif os.path.isfile(jobid):
return os.path.dirname(os.path.expanduser(jobid))
elif jobid == 'latest':
try:
actual_dir = os.readlink(os.path.join(logdir, 'latest'))
return os.path.join(logdir, actual_dir)
except:
except IOError:
return None
matches = 0
......@@ -157,14 +177,20 @@ def get_resultsdir(logdir, jobid):
def get_id(path, jobid):
if jobid == 'latest':
"""
Gets the full Job ID using the results directory path and a partial
Job ID or the string 'latest'.
"""
if os.path.isdir(jobid) or os.path.isfile(jobid):
jobid = ''
elif jobid == 'latest':
jobid = os.path.basename(os.path.dirname(path))[-7:]
if not os.path.exists(path):
return None
with open(path, 'r') as f:
content = f.read().strip('\n')
with open(path, 'r') as jobid_file:
content = jobid_file.read().strip('\n')
if content.startswith(jobid):
return content
else:
......
......@@ -13,6 +13,7 @@
# Author: Amador Pahim <apahim@redhat.com>
import argparse
import json
import logging
import os
import sys
......@@ -20,8 +21,10 @@ import sys
from avocado.core import exit_codes
from avocado.core import jobdata
from avocado.core import status
from avocado.core.plugin_interfaces import CLI
from avocado.core.settings import settings
from avocado.core.test import ReplaySkipTest
class Replay(CLI):
......@@ -88,9 +91,32 @@ class Replay(CLI):
return ignore_list
def load_config(self, resultsdir):
config = os.path.join(resultsdir, 'replay', 'config')
with open(config, 'r') as f:
settings.process_config_path(f.read())
config = jobdata.retrieve_config(resultsdir)
if config is not None:
settings.process_config_path(config)
def _create_replay_map(self, resultsdir, replay_filter):
"""
Creates a mapping to be used as filter for the replay. Given
the replay_filter, tests that should be filtered out will have a
correspondent ReplaySkipTest class in the map. Tests that should
be replayed will have a correspondent None in the map.
"""
json_results = os.path.join(resultsdir, "results.json")
if not os.path.exists(json_results):
return None
with open(json_results, 'r') as json_file:
results = json.loads(json_file.read())
replay_map = []
for test in results['tests']:
if test['status'] not in replay_filter:
replay_map.append(ReplaySkipTest)
else:
replay_map.append(None)
return replay_map
def run(self, args):
if getattr(args, 'replay_jobid', None) is None:
......@@ -190,7 +216,7 @@ class Replay(CLI):
setattr(args, "multiplex_files", mux)
if args.replay_teststatus:
replay_map = jobdata.retrieve_replay_map(resultsdir,
replay_map = self._create_replay_map(resultsdir,
args.replay_teststatus)
setattr(args, 'replay_map', replay_map)
......
......@@ -44,6 +44,9 @@ class ReplayTests(unittest.TestCase):
return result
def test_run_replay_noid(self):
"""
Runs a replay job with an invalid jobid.
"""
cmd_line = ('./scripts/avocado run --replay %s '
'--job-results-dir %s --replay-data-dir %s --sysinfo=off' %
('foo', self.tmpdir, self.jobdir))
......@@ -51,19 +54,39 @@ class ReplayTests(unittest.TestCase):
self.run_and_check(cmd_line, expected_rc)
def test_run_replay_latest(self):
"""
Runs a replay job using the 'latest' keyword.
"""
cmd_line = ('./scripts/avocado run --replay latest '
'--job-results-dir %s --replay-data-dir %s --sysinfo=off' %
(self.tmpdir, self.jobdir))
expected_rc = exit_codes.AVOCADO_ALL_OK
self.run_and_check(cmd_line, expected_rc)
def test_run_replay_jobdir(self):
"""
Runs a replay job pointing the --job-results-dir without
the --replay-data-dir.
"""
cmd_line = ('./scripts/avocado run --replay %s '
'--job-results-dir %s --sysinfo=off' %
(self.jobdir, self.tmpdir))
expected_rc = exit_codes.AVOCADO_ALL_OK
self.run_and_check(cmd_line, expected_rc)
def test_run_replay_data(self):
file_list = ['multiplex', 'config', 'urls', 'pwd', 'args']
"""
Checks if all expected files are there.
"""
file_list = ['multiplex', 'config', 'urls', 'pwd', 'args', 'cmdline']
for filename in file_list:
path = os.path.join(self.jobdir, 'replay', filename)
self.assertTrue(glob.glob(path))
def test_run_replay(self):
"""
Runs a replay job.
"""
cmd_line = ('./scripts/avocado run --replay %s '
'--job-results-dir %s --replay-data-dir %s --sysinfo=off'
% (self.jobid, self.tmpdir, self.jobdir))
......@@ -71,6 +94,9 @@ class ReplayTests(unittest.TestCase):
self.run_and_check(cmd_line, expected_rc)
def test_run_replay_partialid(self):
"""
Runs a replay job with a partial jobid.
"""
partial_id = self.jobid[:5]
cmd_line = ('./scripts/avocado run --replay %s '
'--job-results-dir %s --replay-data-dir %s --sysinfo=off'
......@@ -79,6 +105,9 @@ class ReplayTests(unittest.TestCase):
self.run_and_check(cmd_line, expected_rc)
def test_run_replay_invalidignore(self):
"""
Runs a replay job with an invalid option for '--replay-ignore'
"""
cmd_line = ('./scripts/avocado run --replay %s --replay-ignore foo'
'--job-results-dir %s --replay-data-dir %s --sysinfo=off'
% (self.jobid, self.tmpdir, self.jobdir))
......@@ -89,6 +118,9 @@ class ReplayTests(unittest.TestCase):
self.assertIn(msg, result.stderr)
def test_run_replay_ignoremux(self):
"""
Runs a replay job ignoring the mux.
"""
cmd_line = ('./scripts/avocado run --replay %s --replay-ignore mux '
'--job-results-dir %s --replay-data-dir %s --sysinfo=off'
% (self.jobid, self.tmpdir, self.jobdir))
......@@ -98,6 +130,9 @@ class ReplayTests(unittest.TestCase):
self.assertIn(msg, result.stderr)
def test_run_replay_invalidstatus(self):
"""
Runs a replay job with an invalid option for '--replay-test-status'
"""
cmd_line = ('./scripts/avocado run --replay %s --replay-test-status E '
'--job-results-dir %s --replay-data-dir %s --sysinfo=off'
% (self.jobid, self.tmpdir, self.jobdir))
......@@ -108,6 +143,9 @@ class ReplayTests(unittest.TestCase):
self.assertIn(msg, result.stderr)
def test_run_replay_statusfail(self):
"""
Runs a replay job only with tests that failed.
"""
cmd_line = ('./scripts/avocado run --replay %s --replay-test-status '
'FAIL --job-results-dir %s --replay-data-dir %s '
'--sysinfo=off' % (self.jobid, self.tmpdir, self.jobdir))
......@@ -117,6 +155,9 @@ class ReplayTests(unittest.TestCase):
self.assertIn(msg, result.stdout)
def test_run_replay_remotefail(self):
"""
Runs a replay job using remote plugin (not supported).
"""
cmd_line = ('./scripts/avocado run --replay %s --remote-hostname '
'localhost --job-results-dir %s --replay-data-dir %s '
'--sysinfo=off' % (self.jobid, self.tmpdir, self.jobdir))
......@@ -126,6 +167,9 @@ class ReplayTests(unittest.TestCase):
self.assertIn(msg, result.stderr)
def test_run_replay_status_and_mux(self):
"""
Runs a replay job with custom a mux and using '--replay-test-status'
"""
cmd_line = ('./scripts/avocado run --replay %s --multiplex '
'examples/mux-environment.yaml --replay-test-status FAIL '
'--job-results-dir %s --replay-data-dir %s '
......@@ -137,6 +181,9 @@ class ReplayTests(unittest.TestCase):
self.assertIn(msg, result.stderr)
def test_run_replay_status_and_urls(self):
"""
Runs a replay job with custom urls and '--replay-test-status'.
"""
cmd_line = ('./scripts/avocado run sleeptest --replay %s '
'--replay-test-status FAIL --job-results-dir %s '
'--replay-data-dir %s --sysinfo=off' %
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册