Skip to content
体验新版
项目
组织
正在加载...
登录
切换导航
打开侧边栏
openeuler
avocado
提交
ac3fca8f
A
avocado
项目概览
openeuler
/
avocado
通知
0
Star
0
Fork
0
代码
文件
提交
分支
Tags
贡献者
分支图
Diff
Issue
0
列表
看板
标记
里程碑
合并请求
0
Wiki
0
Wiki
分析
仓库
DevOps
项目成员
Pages
A
avocado
项目概览
项目概览
详情
发布
仓库
仓库
文件
提交
分支
标签
贡献者
分支图
比较
Issue
0
Issue
0
列表
看板
标记
里程碑
合并请求
0
合并请求
0
Pages
分析
分析
仓库分析
DevOps
Wiki
0
Wiki
成员
成员
收起侧边栏
关闭侧边栏
动态
分支图
创建新Issue
提交
Issue看板
体验新版 GitCode,发现更多精彩内容 >>
未验证
提交
ac3fca8f
编写于
6月 08, 2017
作者:
A
Amador Pahim
浏览文件
操作
浏览文件
下载
差异文件
Merge branch 'ldoktor-jobdata-varianter2'
Signed-off-by:
N
Amador Pahim
<
apahim@redhat.com
>
上级
26ca45c0
d31578d3
变更
5
隐藏空白更改
内联
并排
Showing
5 changed file
with
168 addition
and
21 deletion
+168
-21
avocado/core/jobdata.py
avocado/core/jobdata.py
+28
-5
avocado/core/tree.py
avocado/core/tree.py
+31
-0
avocado/core/varianter.py
avocado/core/varianter.py
+104
-6
avocado/plugins/replay.py
avocado/plugins/replay.py
+3
-8
selftests/functional/test_replay_basic.py
selftests/functional/test_replay_basic.py
+2
-2
未找到文件。
avocado/core/jobdata.py
浏览文件 @
ac3fca8f
...
...
@@ -18,9 +18,12 @@ Record/retrieve job information
import
ast
import
glob
import
json
import
os
import
pickle
from
.
import
varianter
from
.output
import
LOG_UI
,
LOG_JOB
from
.settings
import
settings
from
..utils.path
import
init_dir
...
...
@@ -30,7 +33,9 @@ JOB_DATA_FALLBACK_DIR = 'replay'
CONFIG_FILENAME
=
'config'
TEST_REFERENCES_FILENAME
=
'test_references'
TEST_REFERENCES_FILENAME_LEGACY
=
'urls'
VARIANTS_FILENAME
=
'multiplex'
VARIANTS_FILENAME
=
'variants'
# TODO: Remove when 36lts is discontinued
VARIANTS_FILENAME_LEGACY
=
'multiplex'
PWD_FILENAME
=
'pwd'
ARGS_FILENAME
=
'args'
CMDLINE_FILENAME
=
'cmdline'
...
...
@@ -40,14 +45,18 @@ def record(args, logdir, mux, references=None, cmdline=None):
"""
Records all required job information.
"""
def
json_bad_mux_obj
(
item
):
for
log
in
[
LOG_UI
,
LOG_JOB
]:
log
.
warning
(
"jobdata.variants: Unable to serialize '%s'"
,
item
)
return
str
(
item
)
base_dir
=
init_dir
(
logdir
,
JOB_DATA_DIR
)
path_cfg
=
os
.
path
.
join
(
base_dir
,
CONFIG_FILENAME
)
path_references
=
os
.
path
.
join
(
base_dir
,
TEST_REFERENCES_FILENAME
)
path_references_legacy
=
os
.
path
.
join
(
base_dir
,
TEST_REFERENCES_FILENAME_LEGACY
)
path_mux
=
os
.
path
.
join
(
base_dir
,
VARIANTS_FILENAME
)
path_mux
=
os
.
path
.
join
(
base_dir
,
VARIANTS_FILENAME
+
".json"
)
path_pwd
=
os
.
path
.
join
(
base_dir
,
PWD_FILENAME
)
path_args
=
os
.
path
.
join
(
base_dir
,
ARGS_FILENAME
)
path_args
=
os
.
path
.
join
(
base_dir
,
ARGS_FILENAME
+
".json"
)
path_cmdline
=
os
.
path
.
join
(
base_dir
,
CMDLINE_FILENAME
)
if
references
:
...
...
@@ -63,7 +72,7 @@ def record(args, logdir, mux, references=None, cmdline=None):
os
.
fsync
(
config_file
)
with
open
(
path_mux
,
'w'
)
as
mux_file
:
pickle
.
dump
(
mux
,
mux_file
,
pickle
.
HIGHEST_PROTOCOL
)
json
.
dump
(
mux
.
dump
(),
mux_file
,
default
=
json_bad_mux_obj
)
mux_file
.
flush
()
os
.
fsync
(
mux_file
)
...
...
@@ -73,7 +82,7 @@ def record(args, logdir, mux, references=None, cmdline=None):
os
.
fsync
(
pwd_file
)
with
open
(
path_args
,
'w'
)
as
args_file
:
pickle
.
dump
(
args
.
__dict__
,
args_file
,
pickle
.
HIGHEST_PROTOCOL
)
json
.
dump
(
args
.
__dict__
,
args_file
,
default
=
lambda
x
:
None
)
args_file
.
flush
()
os
.
fsync
(
args_file
)
...
...
@@ -121,9 +130,17 @@ def retrieve_variants(resultsdir):
"""
Retrieves the job Mux object from the results directory.
"""
recorded_mux
=
_retrieve
(
resultsdir
,
VARIANTS_FILENAME
+
".json"
)
if
recorded_mux
:
# new json-based dump
with
open
(
recorded_mux
,
'r'
)
as
mux_file
:
return
varianter
.
Varianter
(
state
=
json
.
load
(
mux_file
))
recorded_mux
=
_retrieve
(
resultsdir
,
VARIANTS_FILENAME
)
if
recorded_mux
is
None
:
recorded_mux
=
_retrieve
(
resultsdir
,
VARIANTS_FILENAME_LEGACY
)
if
recorded_mux
is
None
:
return
None
# old pickle-based dump
# TODO: Remove when 36lts is discontinued
with
open
(
recorded_mux
,
'r'
)
as
mux_file
:
return
pickle
.
load
(
mux_file
)
...
...
@@ -132,9 +149,15 @@ def retrieve_args(resultsdir):
"""
Retrieves the job args from the results directory.
"""
recorded_args
=
_retrieve
(
resultsdir
,
ARGS_FILENAME
+
".json"
)
if
recorded_args
:
with
open
(
recorded_args
,
'r'
)
as
args_file
:
return
json
.
load
(
args_file
)
recorded_args
=
_retrieve
(
resultsdir
,
ARGS_FILENAME
)
if
recorded_args
is
None
:
return
None
# old pickle-based dump
# TODO: Remove when 36lts is discontinued
with
open
(
recorded_args
,
'r'
)
as
args_file
:
return
pickle
.
load
(
args_file
)
...
...
avocado/core/tree.py
浏览文件 @
ac3fca8f
...
...
@@ -92,6 +92,37 @@ class TreeEnvironment(dict):
str
(
self
.
filter_out
)))
class
TreeNodeEnvOnly
(
object
):
"""
Minimal TreeNode-like class providing interface for AvocadoParams
"""
def
__init__
(
self
,
path
,
environment
=
None
):
"""
:param path: Path of this node (must not end with '/')
:param environment: List of pair/key/value items
"""
self
.
name
=
path
.
rsplit
(
"/"
)[
-
1
]
self
.
path
=
path
self
.
environment
=
TreeEnvironment
()
if
environment
:
self
.
__load_environment
(
environment
)
def
__load_environment
(
self
,
environment
):
nodes
=
{}
for
path
,
key
,
value
in
environment
:
self
.
environment
[
key
]
=
value
if
path
not
in
nodes
:
nodes
[
path
]
=
TreeNodeEnvOnly
(
path
)
self
.
environment
.
origin
[
key
]
=
nodes
[
path
]
def
get_environment
(
self
):
return
self
.
environment
def
get_path
(
self
):
return
self
.
path
class
TreeNode
(
object
):
"""
...
...
avocado/core/varianter.py
浏览文件 @
ac3fca8f
...
...
@@ -26,7 +26,6 @@ from . import dispatcher
from
.output
import
LOG_JOB
# TODO: Create multiplexer plugin and split these functions into multiple files
class
NoMatchError
(
KeyError
):
pass
...
...
@@ -310,24 +309,57 @@ class AvocadoParam(object):
yield
(
leaf
.
environment
.
origin
[
key
].
path
,
key
,
value
)
class
FakeVariantDispatcher
(
object
):
"""
This object can act instead of VarianterDispatcher to report loaded
variants.
"""
def
__init__
(
self
,
state
):
for
variant
in
state
:
variant
[
"variant"
]
=
[
tree
.
TreeNodeEnvOnly
(
path
,
env
)
for
path
,
env
in
variant
[
"variant"
]]
self
.
variants
=
state
def
map_method
(
self
,
method
,
*
args
,
**
kwargs
):
"""
Reports list containing one result of map_method on self
"""
if
hasattr
(
self
,
method
):
return
[
getattr
(
self
,
method
)(
*
args
,
**
kwargs
)]
else
:
return
[]
def
__iter__
(
self
):
return
iter
(
self
.
variants
)
def
__len__
(
self
):
return
sum
(
1
for
_
in
self
)
class
Varianter
(
object
):
"""
This object takes care of producing test variants
"""
def
__init__
(
self
,
debug
=
False
):
def
__init__
(
self
,
debug
=
False
,
state
=
None
):
"""
:param debug: Store whether this instance should debug the mux
:param state: Force-varianter state
:note: people need to check whether mux uses debug and reflect that
in order to provide the right results.
"""
self
.
default_params
=
{}
self
.
_default_params
=
None
self
.
debug
=
debug
self
.
node_class
=
tree
.
TreeNode
if
not
debug
else
tree
.
TreeNodeDebug
self
.
_variant_plugins
=
dispatcher
.
VarianterDispatcher
()
self
.
_no_variants
=
None
if
state
is
None
:
self
.
debug
=
debug
self
.
node_class
=
tree
.
TreeNodeDebug
if
debug
else
tree
.
TreeNode
self
.
_variant_plugins
=
dispatcher
.
VarianterDispatcher
()
self
.
_no_variants
=
None
else
:
self
.
load
(
state
)
def
parse
(
self
,
args
):
"""
...
...
@@ -410,6 +442,72 @@ class Varianter(object):
else
:
return
len
(
test_suite
)
def
dump
(
self
):
"""
Dump the variants in loadable-state
This is lossy representation which takes all yielded variants and
replaces the list of nodes with TreeNodeEnvOnly representations::
[{'mux_path': mux_path,
'variant_id': variant_id,
'variant': dump_tree_nodes(original_variant)},
{'mux_path': [str, str, ...],
'variant_id': str,
'variant': [(str, [(str, str, object), ...])],
{'mux_path': ['/run/*'],
'variant_id': 'aaa-26c0'
'variant': [('/foo/aaa',
[('/foo', 'bar', 'baz'),
('/foo/aaa', 'bbb', 'ccc')])]}
...]
where `dump_tree_nodes` looks like::
[(node.path, environment_representation),
(node.path, [(path1, key1, value1), (path2, key2, value2), ...]),
('/foo/aaa', [('/foo', 'bar', 'baz')])
:return: loadable Varianter representation
"""
def
dump_tree_node
(
node
):
"""
Turns TreeNode-like object into tuple(path, env_representation)
"""
return
(
str
(
node
.
path
),
[(
str
(
node
.
environment
.
origin
[
key
].
path
),
str
(
key
),
value
)
for
key
,
value
in
node
.
environment
.
iteritems
()])
if
not
self
.
is_parsed
():
raise
NotImplementedError
(
"Dumping Varianter state before "
"multiplexation is not supported."
)
variants
=
[]
for
variant
in
self
.
itertests
():
safe_variant
=
{}
safe_variant
[
"mux_path"
]
=
[
str
(
pth
)
for
pth
in
variant
.
get
(
"mux_path"
)]
safe_variant
[
"variant_id"
]
=
str
(
variant
.
get
(
"variant_id"
))
safe_variant
[
"variant"
]
=
[
dump_tree_node
(
_
)
for
_
in
variant
.
get
(
"variant"
,
[])]
variants
.
append
(
safe_variant
)
return
variants
def
load
(
self
,
state
):
"""
Load the variants state
Current implementation supports loading from a list of loadable
variants. It replaces the VariantDispatcher with fake implementation
which reports the loaded (and initialized) variants.
:param state: loadable Varianter representation
"""
self
.
debug
=
False
self
.
node_class
=
tree
.
TreeNode
self
.
_variant_plugins
=
FakeVariantDispatcher
(
state
)
self
.
_no_variants
=
sum
(
self
.
_variant_plugins
.
map_method
(
"__len__"
))
def
itertests
(
self
):
"""
Yields all variants of all plugins
...
...
avocado/plugins/replay.py
浏览文件 @
ac3fca8f
...
...
@@ -254,14 +254,9 @@ class Replay(CLI):
LOG_UI
.
error
(
'Source job variants data not found. Aborting.'
)
sys
.
exit
(
exit_codes
.
AVOCADO_FAIL
)
else
:
# Ignore data manipulation. This is necessary, because
# we replaced the unparsed object with parsed one. There
# are other plugins running before/after this which might
# want to alter the variants object.
if
args
.
avocado_variants
.
is_parsed
():
LOG_UI
.
warning
(
"Using src job Mux data only, use "
"`--replay-ignore variants` to override "
"them."
)
LOG_UI
.
warning
(
"Using src job Mux data only, use "
"`--replay-ignore variants` to override "
"them."
)
setattr
(
args
,
"avocado_variants"
,
variants
)
# Extend "replay_test_status" of "INTERRUPTED" when --replay-resume
...
...
selftests/functional/test_replay_basic.py
浏览文件 @
ac3fca8f
...
...
@@ -60,8 +60,8 @@ class ReplayTests(unittest.TestCase):
"""
Checks if all expected files are there.
"""
file_list
=
[
'
multiplex'
,
'config'
,
'test_references'
,
'pwd'
,
'args
'
,
'cmdline'
]
file_list
=
[
'
variants.json'
,
'config'
,
'test_references'
,
'pwd
'
,
'
args.json'
,
'
cmdline'
]
for
filename
in
file_list
:
path
=
os
.
path
.
join
(
self
.
jobdir
,
'jobdata'
,
filename
)
self
.
assertTrue
(
glob
.
glob
(
path
))
...
...
编辑
预览
Markdown
is supported
0%
请重试
或
添加新附件
.
添加附件
取消
You are about to add
0
people
to the discussion. Proceed with caution.
先完成此消息的编辑!
取消
想要评论请
注册
或
登录