提交 3a86c366 编写于 作者: O openeuler-ci-bot 提交者: Gitee

!1 initailize

Merge pull request !1 from tracedeng/master
#!/usr/bin/env bash
goimports="goimports"
find_files() {
find . -not \( \
\( \
-wholename './output' \
-o -wholename './_output' \
-o -wholename './_gopath' \
-o -wholename './release' \
-o -wholename './target' \
-o -wholename '*/third_party/*' \
-o -wholename '*/vendor/*' \
\) -prune \
\) -name '*.go'
}
diff=$(find_files | xargs ${goimports} -d 2>&1)
if [[ -n "${diff}" ]]; then
echo "${diff}"
exit 1
fi
#!/usr/bin/env bash
export GOPATH=$WORKSPACE/$BUILD_ID
go get golang.org/x/crypto/ssh
go install golang.org/x/crypto/ssh
go get golang.org/x/tools/cmd/goimports
go install golang.org/x/tools/cmd/goimports
export PATH=$PATH:$WORKSPACE/$BUILD_ID/bin
#go vet ./...
/bin/bash $WORKSPACE/$BUILD_ID/openeuler-jenkins/golang/scripts/format
# 基于K8s集群的打包方案
## 单包构建任务
### 设计逻辑
- 部署x86-64和aarch64架构下的k8s集群
- 将集群配置为**Jenkins slave**
- **Jenkins master** 运行在x86-64架构k8s集群内
### 流水线任务
> 相同任务只运行一个实例
#### trigger
- 码云触发
- 并行跑门禁任务,cpu架构不限,失败则中止任务并对pr评论
- 成功传递参数给下游 **job**
- 项目名(**repo**)
- 分支(**branch**)
- pull request id(**prid**)
- 发起者(**committer**)
#### multiarch
- 支持x86_64和aarch64架构
- trigger成功后触发
- 执行[**python osc_build_k8s.py $repo $arch $WORKSPACE**](https://gitee.com/src-openeuler/ci_check/blob/k8s/private_build/build/osc_build_k8s.py)进行构建
#### comment
- 收集门禁、build结果
- 调用接口[**提交Pull Request评论**](https://gitee.com/wuyu15255872976/gitee-python-client/blob/master/docs/PullRequestsApi.md#post_v5_repos_owner_repo_pulls)反馈结果给码云
- cpu架构不限
## 制作jenkins/obs镜像
### 机制
- k8s集群中部署docker service 服务,对外提供的内部服务地址为tcp://docker.jenkins:2376
- jenkins安装docker插件,并配置连接到k8s集群docker service服务
- jenkins中配置制作镜像流水线任务obs-image
- 触发方式:代码仓库ci_check打tag后手动触发,jenkins需安装build with parameterrs插件支持
### 流水线任务obs-image
> 运行该任务的K8s agent需带docker client
#### 任务:_trigger
- 检查Dockerfile文件【optional】
- 设置参数 【环境变量?】
- name 【jenkins/obs】
- version 【取自tag】
#### 任务:build-image-aarch64 & build-image-x86-64
- 构建过程选择 **Build/Publish Docker Image**
- 配置推送镜像的 **Registry Credentials**
#### 任务:manifest
多arch支持
> docker manifest push时Registry Credentials?
# 门禁检查
## 如何加入检查项
1. 在ci_check/src/ac目录下新建文件夹
2. 在ac_conf.yaml中增加配置项,可选
### 配置文件说明
```yaml
示例=>
spec: # ac项目名称
hint: check_spec # gitee中显示名,缺省使用check_+项目名称
module: spec.check_spec # ac项目模块名称,缺省使用"项目名称+check_+项目名称"
entry: Entry # ac项目入口,入口属性具备callable,缺省使用"run"
exclude: true # 忽略该项检查
ignored: [] # ac项目内忽略的检查项,就算失败也不影响最终ac项目结果
```
### entry实现模板
```yaml
class Entry(object):
def __call__(self, *args, **kwargs):
# do the work
...
```
### 检查结果
| 返回码 | 描述 | emoji |
| --- | --- | --- |
| 0 | SUCCESS | :white_check_mark:|
| 1 | WARNING | :bug: |
| 2 | FAILED | :x:|
# -*- encoding=utf-8 -*-
import os
import shutil
import logging
from src.proxy.git_proxy import GitProxy
from src.ac.framework.ac_base import BaseCheck
from src.ac.framework.ac_result import FAILED, WARNING, SUCCESS
from src.ac.common.gitee_repo import GiteeRepo
from src.ac.common.linter import LinterCheck
from src.ac.common.rpm_spec_adapter import RPMSpecAdapter
logger = logging.getLogger("ac")
class CheckCodeStyle(BaseCheck):
def __init__(self, workspace, repo, conf):
super(CheckCodeStyle, self).__init__(workspace, repo, conf)
self._work_tar_dir = os.path.join(workspace, "code") # 解压缩目标目录
self._gr = GiteeRepo(self._work_dir, self._work_tar_dir)
def check_compressed_file(self):
"""
解压缩包
"""
return SUCCESS if 0 == self._gr.decompress_all() else FAILED
def check_patch(self):
"""
应用所有patch
"""
patches = []
if self._gr.spec_file:
spec = RPMSpecAdapter(os.path.join(self._work_dir, self._gr.spec_file))
patches = spec.patches
rs = self._gr.apply_all_patches(*patches)
if 0 == rs:
return SUCCESS
return WARNING if 1 == rs else FAILED
def check_code_style(self):
"""
检查代码风格
:return:
"""
gp = GitProxy(self._work_dir)
diff_files = gp.diff_files_between_commits("HEAD~1", "HEAD~0")
logger.debug("diff files: {}".format(diff_files))
diff_code_files = [] # 仓库中变更的代码文件
diff_patch_code_files = [] # patch内的代码文件
for diff_file in diff_files:
if GiteeRepo.is_code_file(diff_file):
diff_code_files.append(diff_file)
elif GiteeRepo.is_patch_file(diff_file):
patch_dir = self._gr.patch_dir_mapping.get(diff_file)
logger.debug("diff patch {} apply at dir {}".format(diff_file, patch_dir))
if patch_dir is not None:
files_in_patch = gp.extract_files_path_of_patch(diff_file)
diff_patch_code_files.extend([os.path.join(patch_dir, file_in_patch)
for file_in_patch in files_in_patch if GiteeRepo.is_code_file(file_in_patch)])
logger.debug("diff code files: {}".format(diff_code_files))
logger.debug("diff patch code files: {}".format(diff_patch_code_files))
rs_1 = self.check_file_under_work_dir(diff_code_files)
logger.debug("check_file_under_work_dir: {}".format(rs_1))
rs_2 = self.check_files_inner_patch(diff_patch_code_files)
logger.debug("check_files_inner_patch: {}".format(rs_2))
return rs_1 + rs_2
def check_file_under_work_dir(self, diff_code_files):
"""
检查仓库中变更的代码
:return:
"""
rs = [self.__class__.check_code_file(filename) for filename in set(diff_code_files)]
return sum(rs, SUCCESS) if rs else SUCCESS
def check_files_inner_patch(self, diff_patch_code_files):
"""
检查仓库的patch内的代码
:return:
"""
rs = [self.__class__.check_code_file(os.path.join(self._work_tar_dir, filename)) for filename in set(diff_patch_code_files)]
return sum(rs, SUCCESS) if rs else SUCCESS
@classmethod
def check_code_file(cls, file_path):
"""
检查代码风格
:param file_path:
:return:
"""
if GiteeRepo.is_py_file(file_path):
rs = LinterCheck.check_python(file_path)
elif GiteeRepo.is_go_file(file_path):
rs = LinterCheck.check_golang(file_path)
elif GiteeRepo.is_c_cplusplus_file(file_path):
rs = LinterCheck.check_c_cplusplus(file_path)
else:
logger.error("error when arrive here, unsupport file {}".format(file_path))
return SUCCESS
logger.info("Linter: {:<40} {}".format(file_path, rs))
if rs.get("F", 0) > 0:
return FAILED
if rs.get("W", 0) > 0 or rs.get("E", 0) > 0:
return WARNING
return SUCCESS
def __call__(self, *args, **kwargs):
"""
入口函数
:param args:
:param kwargs:
:return:
"""
logger.info("check {} repo ...".format(self._repo))
not os.path.exists(self._work_tar_dir) and os.mkdir(self._work_tar_dir)
try:
return self.start_check_with_order("compressed_file", "patch", "code_style")
finally:
shutil.rmtree(self._work_tar_dir)
# -*- encoding=utf-8 -*-
import logging
import time
import yaml
from src.proxy.git_proxy import GitProxy
from src.proxy.requests_proxy import do_requests
from src.ac.framework.ac_result import FAILED, SUCCESS
from src.ac.framework.ac_base import BaseCheck
from src.ac.common.rpm_spec_adapter import RPMSpecAdapter
from src.ac.common.gitee_repo import GiteeRepo
logger = logging.getLogger("ac")
class CheckSpec(BaseCheck):
def __init__(self, workspace, repo, conf=None):
super(CheckSpec, self).__init__(workspace, repo, conf)
self._gp = GitProxy(self._work_dir)
self._gr = GiteeRepo(self._work_dir, None) # don't care about decompress
fp = self._gp.get_content_of_file_with_commit(self._gr.spec_file)
self._spec = RPMSpecAdapter(fp)
self._latest_commit = self._gp.commit_id_of_reverse_head_index(0)
def check_version(self):
"""
检查当前版本号是否比上一个commit新
:return:
"""
self._gp.checkout_to_commit("HEAD~1")
try:
gr = GiteeRepo(self._work_dir, None) # don't care about decompress
fp = self._gp.get_content_of_file_with_commit(gr.spec_file)
if fp is None:
# last commit has no spec file
return SUCCESS
spec_o = RPMSpecAdapter(fp)
finally:
self._gp.checkout_to_commit(self._latest_commit) # recover whatever
self._ex_pkgship(spec_o)
if self._spec > spec_o:
return SUCCESS
elif self._spec < spec_o:
if self._gp.is_revert_commit(depth=5): # revert, version back, ignore
logger.debug("revert commit")
return SUCCESS
logger.error("current version: {}-r{}, last version: {}-r{}".format(
self._spec.version, self._spec.release, spec_o.version, spec_o.release))
return FAILED
def check_homepage(self, timeout=30, retrying=3, interval=1):
"""
检查主页是否可访问
:param timeout: 超时时间
:param retrying: 重试次数
:param interval: 重试间隔
:return:
"""
homepage = self._spec.url
logger.debug("homepage: {}".format(homepage))
if not homepage:
return SUCCESS
for _ in xrange(retrying):
if 0 == do_requests("get", homepage, timeout=timeout):
return SUCCESS
time.sleep(interval)
return FAILED
def check_patches(self):
"""
检查spec中的patch是否存在
:return:
"""
patches_spec = set(self._spec.patches)
patches_file = set(self._gr.patch_files_not_recursive())
logger.debug("spec patches: {}".format(patches_spec))
logger.debug("file patches: {}".format(patches_file))
result = SUCCESS
for patch in patches_spec - patches_file:
logger.error("patch {} lost".format(patch))
result = FAILED
for patch in patches_file - patches_spec:
logger.warning("patch {} redundant".format(patch))
return result
def _ex_exclusive_arch(self):
"""
保存spec中exclusive_arch信息
:return:
"""
aarch64 = self._spec.include_aarch64_arch()
x86_64 = self._spec.include_x86_arch()
content = None
if aarch64 and not x86_64: # only build aarch64
content = "aarch64"
elif not aarch64 and x86_64: # only build x86_64
content = "x86-64"
if content is not None:
logger.info("exclusive arch \"{}\"".format(content))
try:
with open("exclusive_arch", "w") as f:
f.write(content)
except IOError:
logger.exception("save exclusive arch exception")
def _ex_pkgship(self, spec):
"""
pkgship需求
:param spec: 上一个版本spec对应的RPMSpecAdapter对象
:return:
"""
if not self._repo == "pkgship":
return
logger.debug("special repo \"pkgship\"")
compare_version = RPMSpecAdapter.compare_version(self._spec.version, spec.version)
compare_release = RPMSpecAdapter.compare_version(self._spec.release, spec.release)
compare = self._spec.compare(spec)
rs = {"repo": "pkgship", "curr_version": self._spec.version, "curr_release": self._spec.release,
"last_version": spec.version, "last_release": spec.release,
"compare_version": compare_version, "compare_release": compare_release, "compare": compare}
logger.info("{}".format(rs))
try:
with open("pkgship_notify", "w") as f:
yaml.safe_dump(rs, f)
except IOError:
logger.exception("save pkgship exception")
def __call__(self, *args, **kwargs):
logger.info("check {} spec ...".format(self._repo))
self._ex_exclusive_arch()
return self.start_check()
# -*- encoding=utf-8 -*-
import os
import logging
from src.proxy.git_proxy import GitProxy
from src.utils.shell_cmd import shell_cmd_live
logger = logging.getLogger("ac")
class GiteeRepo(object):
def __init__(self, work_dir, decompress_dir):
self._work_dir = work_dir
self._decompress_dir = decompress_dir
self._patch_files = []
self._compress_files = []
self.spec_file = None
self.patch_dir_mapping = {}
self.find_file_path()
def find_file_path(self):
"""
compress file, patch file, diff file, spec file
"""
for dirpath, dirnames, filenames in os.walk(self._work_dir):
for filename in filenames:
rel_file_path = os.path.join(dirpath, filename).replace(self._work_dir, "").lstrip("/")
if self.is_compress_file(filename):
logger.debug("find compress file: {}".format(rel_file_path))
self._compress_files.append(rel_file_path)
elif self.is_patch_file(filename):
logger.debug("find patch file: {}".format(rel_file_path))
self._patch_files.append(rel_file_path)
elif self.is_spec_file(filename):
logger.debug("find spec file: {}".format(rel_file_path))
self.spec_file = filename
def patch_files_not_recursive(self):
"""
获取当前目录下patch文件
"""
return [filename for filename in os.listdir(self._work_dir)
if os.path.isfile(os.path.join(self._work_dir, filename)) and self.is_patch_file(filename)]
def decompress_file(self, file_path):
"""
解压缩文件
:param file_path:
:return:
"""
if self._is_compress_zip_file(file_path):
decompress_cmd = "cd {}; unzip -d {} {}".format(self._work_dir, self._decompress_dir, file_path)
elif self._is_compress_tar_file(file_path):
decompress_cmd = "cd {}; tar -C {} -xavf {}".format(self._work_dir, self._decompress_dir, file_path)
else:
logger.warning("unsupport compress file: {}".format(file_path))
return False
ret, _, _ = shell_cmd_live(decompress_cmd)
if ret:
logger.debug("decompress failed")
return False
return True
def decompress_all(self):
"""
解压缩所有文件
:return: 0/全部成功,1/部分成功,-1/全部失败
"""
if not self._compress_files:
logger.warning("no compressed source file")
rs = [self.decompress_file(filepath) for filepath in self._compress_files]
return 0 if all(rs) else (1 if any(rs) else -1)
def apply_patch(self, patch, max_leading=5):
"""
尝试所有路径和leading
:param patch: 补丁
:param max_leading: leading path
"""
logger.debug("apply patch {}".format(patch))
for patch_dir in [filename for filename in os.listdir(self._decompress_dir) if os.path.isdir(os.path.join(self._decompress_dir, filename))] + ["."]:
if patch_dir.startswith(".git"):
continue
for leading in xrange(max_leading + 1):
logger.debug("try dir {} -p{}".format(patch_dir, leading))
if GitProxy.apply_patch_at_dir(os.path.join(self._decompress_dir, patch_dir),
os.path.join(self._work_dir, patch), leading):
logger.debug("patch success".format(leading))
self.patch_dir_mapping[patch] = patch_dir
return True
logger.info("apply patch {} failed".format(patch))
return False
def apply_all_patches(self, *patches):
"""
打补丁通常是有先后顺序的
:param patches: 需要打的补丁
"""
if not self._compress_files:
logger.debug("no compress source file, not need apply patch")
return 0
rs = []
for patch in patches:
if patch in set(self._patch_files):
rs.append(self.apply_patch(patch))
else:
logger.error("patch {} not exist".format(patch))
rs.append(False)
return 0 if all(rs) else (1 if any(rs) else -1)
@staticmethod
def is_py_file(filename):
return filename.endswith((".py",))
@staticmethod
def is_go_file(filename):
return filename.endswith((".go",))
@staticmethod
def is_c_cplusplus_file(filename):
return filename.endswith((".c", ".cpp", ".cc", ".cxx", ".c++", ".h", ".hpp", "hxx"))
@staticmethod
def is_code_file(filename):
return GiteeRepo.is_py_file(filename) \
or GiteeRepo.is_go_file(filename) \
or GiteeRepo.is_c_cplusplus_file(filename)
@staticmethod
def is_patch_file(filename):
return filename.endswith((".patch", ".diff"))
@staticmethod
def is_compress_file(filename):
return GiteeRepo._is_compress_tar_file(filename) or GiteeRepo._is_compress_zip_file(filename)
@staticmethod
def _is_compress_zip_file(filename):
return filename.endswith((".zip",))
@staticmethod
def _is_compress_tar_file(filename):
return filename.endswith((".tar.gz", ".tar.bz", ".tar.bz2", ".tar.xz", "tgz"))
@staticmethod
def is_spec_file(filename):
return filename.endswith((".spec",))
# -*- encoding=utf-8 -*-
import re
import logging
from src.utils.shell_cmd import shell_cmd_live
logger = logging.getLogger("ac")
class LinterCheck(object):
PYLINTRESULTPREFIX = ["C", "R", "W", "E", "F"]
@classmethod
def get_summary_of_pylint(cls, message):
"""
parser message for summary and details
"""
summary = {}
for prefix in cls.PYLINTRESULTPREFIX:
m = re.findall("{}: *[0-9]+, *[0-9]+:".format(prefix), "\n".join(message))
summary[prefix] = len(m)
return summary
@classmethod
def get_summary_of_golint(cls, message):
"""
所有都当作WARNING
"""
m = re.findall("\.go:[0-9]+:[0-9]+:", "\n".join(message))
return {"W": len(m)}
@classmethod
def get_summary_of_splint(cls, message):
"""
parser message for summary
"""
summary = {}
# summary["W"] = summary["W"] + message.count("Use -preproc to inhibit warning")
# summary["W"] = summary["W"] + message.count("Use -nestcomment to inhibit warning")
return summary
@classmethod
def check_python(cls, filepath):
"""
Check python script by pylint
Using the default text output, the message format is :
MESSAGE_TYPE: LINE_NUM:[OBJECT:] MESSAGE
There are 5 kind of message types :
* (C) convention, for programming standard violation
* (R) refactor, for bad code smell
* (W) warning, for python specific problems
* (E) error, for probable bugs in the code
* (F) fatal, if an error occurred which prevented pylint from doing
"""
logger.debug("check python file: {}".format(filepath))
pylint_cmd = "pylint3 {}".format(filepath)
ret, out, _ = shell_cmd_live(pylint_cmd, cap_out=True, verbose=True)
if ret:
logger.debug("pylint ret, {}".format(ret))
return cls.get_summary_of_pylint(out)
@classmethod
def check_golang(cls, filepath):
"""
Check golang code by golint
"""
logger.debug("check go file: {}".format(filepath))
golint_cmd = "golint {}".format(filepath)
ret, out, _ = shell_cmd_live(golint_cmd, cap_out=True, verbose=True)
if ret:
logger.debug("golint error, {}".format(ret))
return {}
return cls.get_summary_of_golint(out)
@classmethod
def check_c_cplusplus(cls, filepath):
"""
Check c/c++ code by splint
"""
logger.debug("check c/c++ file: {}".format(filepath))
splint_cmd = "splint {}".format(filepath)
ret, out, _ = shell_cmd_live(splint_cmd, cap_out=True, verbose=True)
if ret:
logger.debug("splint error, {}".format(ret))
return {}
return cls.get_summary_of_splint(out)
# -*- encoding=utf-8 -*-
"""Python module for parsing RPM spec files.
RPMs are build from a package's sources along with a spec file. The spec file controls how the RPM
is built. This module allows you to parse spec files and gives you simple access to various bits of
information that is contained in the spec file.
Current status: This module does not parse everything of a spec file. Only the pieces I needed. So
there is probably still plenty of stuff missing. However, it should not be terribly complicated to
add support for the missing pieces.
"""
import re
from abc import ABCMeta, abstractmethod
__all__ = ["Spec", "replace_macros", "Package"]
class _Tag(object):
__metaclass__ = ABCMeta
def __init__(self, name, pattern_obj, attr_type):
self.name = name
self.pattern_obj = pattern_obj
self.attr_type = attr_type
def test(self, line):
return re.search(self.pattern_obj, line)
def update(self, spec_obj, context, match_obj, line):
"""Update given spec object and parse context and return them again.
:param spec_obj: An instance of Spec class
:param context: The parse context
:param match_obj: The re.match object
:param line: The original line
:return: Given updated Spec instance and parse context dictionary.
"""
assert spec_obj
assert context
assert match_obj
assert line
return self.update_impl(spec_obj, context, match_obj, line)
@abstractmethod
def update_impl(self, spec_obj, context, match_obj, line):
pass
@staticmethod
def current_target(spec_obj, context):
target_obj = spec_obj
if context["current_subpackage"] is not None:
target_obj = context["current_subpackage"]
return target_obj
class _NameValue(_Tag):
"""Parse a simple name to value tag."""
def __init__(self, name, pattern_obj, attr_type=None):
super(_NameValue, self).__init__(name, pattern_obj, attr_type if attr_type else str)
def update_impl(self, spec_obj, context, match_obj, line):
target_obj = _Tag.current_target(spec_obj, context)
value = match_obj.group(1)
# Sub-packages
if self.name == "name":
spec_obj.packages = []
spec_obj.packages.append(Package(value))
setattr(target_obj, self.name, self.attr_type(value))
return spec_obj, context
class _MacroDef(_Tag):
"""Parse global macro definitions."""
def __init__(self, name, pattern_obj):
super(_MacroDef, self).__init__(name, pattern_obj, str)
def update_impl(self, spec_obj, context, match_obj, line):
name, value = match_obj.groups()
setattr(spec_obj, name, str(value))
return spec_obj, context
class _List(_Tag):
"""Parse a tag that expands to a list."""
def __init__(self, name, pattern_obj):
super(_List, self).__init__(name, pattern_obj, list)
def update_impl(self, spec_obj, context, match_obj, line):
target_obj = _Tag.current_target(spec_obj, context)
if not hasattr(target_obj, self.name):
setattr(target_obj, self.name, list())
value = match_obj.group(1)
if self.name == "packages":
if value == "-n":
subpackage_name = line.rsplit(" ", 1)[-1].rstrip()
else:
subpackage_name = "{}-{}".format(spec_obj.name, value)
package = Package(subpackage_name)
context["current_subpackage"] = package
package.is_subpackage = True
spec_obj.packages.append(package)
elif self.name in [
"build_requires",
"requires",
"conflicts",
"obsoletes",
"provides",
]:
# Macros are valid in requirements
value = replace_macros(value, spec=spec_obj)
# It's also legal to do:
# Requires: a b c
# Requires: b &gt;= 3.1
# Requires: a, b &gt;= 3.1, c
# 1. Tokenize
tokens = [val for val in re.split("[\t\n, ]", value) if val != ""]
values = []
# 2. Join
add = False
for val in tokens:
if add:
add = False
val = values.pop() + " " + val
elif val in ["&gt;=", "!=", "&gt;", "&lt;", "&lt;=", "==", "="]:
add = True # Add next value to this one
val = values.pop() + " " + val
values.append(val)
for val in values:
requirement = Requirement(val)
getattr(target_obj, self.name).append(requirement)
else:
getattr(target_obj, self.name).append(value)
return spec_obj, context
class _ListAndDict(_Tag):
"""Parse a tag that expands to a list and to a dict."""
def __init__(self, name, pattern_obj):
super(_ListAndDict, self).__init__(name, pattern_obj, list)
def update_impl(self, spec_obj, context, match_obj, line):
source_name, value = match_obj.groups()
dictionary = getattr(spec_obj, "{}_dict".format(self.name))
dictionary[source_name] = value
target_obj = _Tag.current_target(spec_obj, context)
getattr(target_obj, self.name).append(value)
return spec_obj, context
def re_tag_compile(tag):
return re.compile(tag, re.IGNORECASE)
_tags = [
_NameValue("name", re_tag_compile(r"^Name\s*:\s*(\S+)")),
_NameValue("version", re_tag_compile(r"^Version\s*:\s*(\S+)")),
_NameValue("epoch", re_tag_compile(r"^Epoch\s*:\s*(\S+)")),
_NameValue("release", re_tag_compile(r"^Release\s*:\s*(\S+)")),
_NameValue("summary", re_tag_compile(r"^Summary\s*:\s*(.+)")),
_NameValue("license", re_tag_compile(r"^License\s*:\s*(.+)")),
_NameValue("group", re_tag_compile(r"^Group\s*:\s*(\S+)")),
_NameValue("url", re_tag_compile(r"^URL\s*:\s*(\S+)")),
_NameValue("buildroot", re_tag_compile(r"^BuildRoot\s*:\s*(\S+)")),
_NameValue("buildarch", re_tag_compile(r"^ExclusiveArch\s*:\s*(\S+)")),
_ListAndDict("sources", re_tag_compile(r"^(Source\d*)\s*:\s*(\S+)")),
_ListAndDict("patches", re_tag_compile(r"^(Patch\d*)\s*:\s*(\S+)")),
_List("build_requires", re_tag_compile(r"^BuildRequires\s*:\s*(.+)")),
_List("requires", re_tag_compile(r"^Requires\s*:\s*(.+)")),
_List("conflicts", re_tag_compile(r"^Conflicts\s*:\s*(.+)")),
_List("obsoletes", re_tag_compile(r"^Obsoletes\s*:\s*(.+)")),
_List("provides", re_tag_compile(r"^Provides\s*:\s*(.+)")),
_List("packages", re.compile(r"^%package\s+(\S+)")),
_MacroDef("define", re.compile(r"^%define\s+(\S+)\s+(\S+)")),
_MacroDef("global", re.compile(r"^%global\s+(\S+)\s+(\S+)")),
]
_macro_pattern = re.compile(r"%{(\S+?)\}")
def _parse(spec_obj, context, line):
for tag in _tags:
match = tag.test(line)
if match:
return tag.update(spec_obj, context, match, line)
return spec_obj, context
class Requirement:
"""Represents a single requirement or build requirement in an RPM spec file.
Each spec file contains one or more requirements or build requirements.
For example, consider following spec file::
Name: foo
Version: 0.1
%description
%{name} is the library that everyone needs.
%package devel
Summary: Header files, libraries and development documentation for %{name}
Group: Development/Libraries
Requires: %{name}%{?_isa} = %{version}-%{release}
BuildRequires: gstreamer%{?_isa} &gt;= 0.1.0
%description devel
This package contains the header files, static libraries, and development
documentation for %{name}. If you like to develop programs using %{name}, you
will need to install %{name}-devel.
This spec file's requirements have a name and either a required or minimum
version.
"""
expr = re.compile(r"(.*?)\s+([&lt;&gt;]=?|=)\s+(\S+)")
def __init__(self, name):
assert isinstance(name, str)
self.line = name
match = Requirement.expr.match(name)
if match:
self.name = match.group(1)
self.operator = match.group(2)
self.version = match.group(3)
else:
self.name = name
self.operator = None
self.version = None
def __repr__(self):
return self.line
class Package:
"""Represents a single package in a RPM spec file.
Each spec file describes at least one package and can contain one or more subpackages (described
by the %package directive). For example, consider following spec file::
Name: foo
Version: 0.1
%description
%{name} is the library that everyone needs.
%package devel
Summary: Header files, libraries and development documentation for %{name}
Group: Development/Libraries
Requires: %{name}%{?_isa} = %{version}-%{release}
%description devel
This package contains the header files, static libraries, and development
documentation for %{name}. If you like to develop programs using %{name}, you
will need to install %{name}-devel.
%package -n bar
Summary: A command line client for foo.
License: GPLv2+
%description -n bar
This package contains a command line client for foo.
This spec file will create three packages:
* A package named foo, the base package.
* A package named foo-devel, a subpackage.
* A package named bar, also a subpackage, but without the foo- prefix.
As you can see above, the name of a subpackage normally includes the main package name. When the
-n option is added to the %package directive, the prefix of the base package name is omitted and
a completely new name is used.
"""
def __init__(self, name):
assert isinstance(name, str)
for tag in _tags:
if tag.attr_type is list and tag.name in [
"build_requires",
"requires",
"conflicts",
"obsoletes",
"provides",
]:
setattr(self, tag.name, tag.attr_type())
self.name = name
self.is_subpackage = False
def __repr__(self):
return "Package('{}')".format(self.name)
class Spec:
"""Represents a single spec file.
"""
def __init__(self):
for tag in _tags:
if tag.attr_type is list:
setattr(self, tag.name, tag.attr_type())
else:
setattr(self, tag.name, None)
self.sources_dict = dict()
self.patches_dict = dict()
@property
def packages_dict(self):
"""All packages in this RPM spec as a dictionary.
You can access the individual packages by their package name, e.g.,
git_spec.packages_dict['git-doc']
"""
assert self.packages
return dict(zip([package.name for package in self.packages], self.packages))
@staticmethod
def from_file(filename):
"""Creates a new Spec object from a given file.
:param filename: The path to the spec file.
:return: A new Spec object.
"""
spec = Spec()
with open(filename, "r") as f:
#with open(filename, "r", encoding="utf-8") as f:
parse_context = {"current_subpackage": None}
for line in f:
spec, parse_context = _parse(spec, parse_context, line)
return spec
@staticmethod
def from_string(string):
"""Creates a new Spec object from a given string.
:param string: The contents of a spec file.
:return: A new Spec object.
"""
spec = Spec()
parse_context = {"current_subpackage": None}
for line in string.splitlines():
spec, parse_context = _parse(spec, parse_context, line)
return spec
def replace_macros(string, spec=None):
"""Replace all macros in given string with corresponding values.
For example: a string '%{name}-%{version}.tar.gz' will be transformed to 'foo-2.0.tar.gz'.
:param string A string containing macros that you want to be replaced
:param spec An optional spec file. If given, definitions in that spec
file will be used to replace macros.
:return A string where all macros in given input are substituted as good as possible.
"""
if spec:
assert isinstance(spec, Spec)
def _is_conditional(macro):
return macro.startswith("?") or macro.startswith("!")
def _test_conditional(macro):
if macro[0] == "?":
return True
if macro[0] == "!":
return False
raise Exception("Given string is not a conditional macro")
def _macro_repl(match):
macro_name = match.group(1)
if _is_conditional(macro_name) and spec:
parts = macro_name[1:].split(":", 1)
assert parts
if _test_conditional(macro_name):
if hasattr(spec, parts[0]):
if len(parts) == 2:
return parts[1]
return getattr(spec, parts[0], None)
return ""
if not hasattr(spec, parts[0]):
if len(parts) == 2:
return parts[1]
return getattr(spec, parts[0], None)
return ""
if spec:
value = getattr(spec, macro_name, None)
if value:
return str(value)
return match.string[match.start() : match.end()]
# Recursively expand macros
# Note: If macros are not defined in the spec file, this won't try to
# expand them.
while True:
ret = re.sub(_macro_pattern, _macro_repl, string)
if ret != string:
string = ret
continue
return ret
# -*- encoding=utf-8 -*-
import re
import logging
from src.ac.common.pyrpm import Spec, replace_macros
logger = logging.getLogger("ac")
class RPMSpecAdapter(object):
def __init__(self, fp):
if isinstance(fp, str):
with open(fp, "r") as fp:
self._adapter = Spec.from_string(fp.read())
else:
self._adapter = Spec.from_string(fp.read())
fp.close()
def __getattr__(self, item):
"""
:param item:
:return
"""
value = getattr(self._adapter, item)
if isinstance(value, list):
return [replace_macros(item, self._adapter) for item in value]
return replace_macros(value, self._adapter) if value else ""
def include_x86_arch(self):
try:
value = self.buildarch
logger.debug("build arch: {}".format(value))
if "x86_64" in value.lower():
return True
return False
except AttributeError:
return True
def include_aarch64_arch(self):
try:
value = self.buildarch
logger.debug("build arch: {}".format(value))
if "aarch64" in value.lower():
return True
return False
except AttributeError:
return True
@staticmethod
def compare_version(version_n, version_o):
"""
:param version_n:
:param version_o:
:return: 0~eq, 1~gt, -1~lt
"""
# replace continued chars to dot
version_n = re.sub("[a-zA-Z_-]+", ".", version_n).strip().strip(".")
version_o = re.sub("[a-zA-Z_-]+", ".", version_o).strip().strip(".")
# replace continued dots to a dot
version_n = re.sub("\.+", ".", version_n)
version_o = re.sub("\.+", ".", version_o)
# same partitions with ".0" padding
# "..." * -n = ""
version_n = "{}{}".format(version_n, '.0' * (len(version_o.split('.')) - len(version_n.split('.'))))
version_o = "{}{}".format(version_o, '.0' * (len(version_n.split('.')) - len(version_o.split('.'))))
logger.debug("compare versions: {} vs {}".format(version_n, version_o))
z = zip(version_n.split("."), version_o.split("."))
for p in z:
try:
if int(p[0]) < int(p[1]):
return -1
elif int(p[0]) > int(p[1]):
return 1
except ValueError as exc:
logger.debug("check version exception, {}".format(exc))
continue
return 0
def compare(self, other):
"""
比较spec的版本号和发布号
:param other:
:return: 0~eq, 1~gt, -1~lt
"""
if self.__class__.compare_version(self.version, other.version) == 1:
return 1
if self.__class__.compare_version(self.version, other.version) == -1:
return -1
if self.__class__.compare_version(self.release, other.release) == 1:
return 1
if self.__class__.compare_version(self.release, other.release) == -1:
return -1
return 0
def __lt__(self, other):
return -1 == self.compare(other)
def __eq__(self, other):
return 0 == self.compare(other)
def __gt__(self, other):
return 1 == self.compare(other)
# -*- encoding=utf-8 -*-
import os
import sys
import yaml
import logging.config
import logging
import json
import argparse
from importlib import import_module
class AC(object):
def __init__(self, conf):
self._ac_check_elements = {} # 门禁项
self._ac_check_result = [] # 门禁结果结果
acl_path = os.path.realpath(os.path.join(os.path.dirname(__file__), "../acl"))
self._acl_package = "src.ac.acl" # take attention about import module
self.load_check_elements_from_acl_directory(acl_path)
self.load_check_elements_from_conf(conf)
logger.debug("check list: {}".format(self._ac_check_elements))
def check_all(self, workspace, repo):
"""
门禁检查
:param workspace:
:param repo:
:return:
"""
for element in self._ac_check_elements:
check_element = self._ac_check_elements[element]
logger.debug("check {}".format(element))
# import module
module_path = check_element.get("module", "{}.check_{}".format(element, element)) # eg: spec.check_spec
try:
module = import_module("." + module_path, self._acl_package)
logger.debug("load module {} succeed".format(module_path))
except ImportError as exc:
logger.exception("import module {} exception, {}".format(module_path, exc))
continue
# import entry
entry_name = check_element.get("entry", "Check{}".format(element.capitalize()))
try:
entry = getattr(module, entry_name)
logger.debug("load entry \"{}\" succeed".format(entry_name))
except AttributeError as exc:
logger.warning("entry \"{}\" not exist in module {}, {}".format(entry_name, module_path, exc))
continue
# new a instance
if isinstance(entry, type): # class object
try:
entry = entry(workspace, repo, check_element) # new a instance
except Exception as exc:
logger.exception("new a instance of class {} exception, {}".format(entry_name, exc))
return
if not callable(entry): # check callable
logger.warning("entry {} not callable".format(entry_name))
continue
# do ac check
try:
result = entry()
logger.debug("check result {}".format(element, result))
except Exception as exc:
logger.exception("check exception, {}".format(element, exc))
continue
# show in gitee, must starts with "check_"
hint = check_element.get("hint", "check_{}".format(element))
if not hint.startswith("check_"):
hint = "check_{}".format(hint)
self._ac_check_result.append({"name": hint, "result": result.val})
logger.debug("ac result: {}".format(self._ac_check_result))
def load_check_elements_from_acl_directory(self, acl_dir):
"""
加载当前目录下所有门禁项
:return:
"""
for filename in os.listdir(acl_dir):
if os.path.isdir(os.path.join(acl_dir, filename)):
self._ac_check_elements[filename] = {} # don't worry, using default when checking
def load_check_elements_from_conf(self, conf_file):
"""
加载门禁项目,只支持yaml格式
:param conf_file: 配置文件路径
:return:
"""
try:
with open(conf_file, "r") as f:
elements = yaml.safe_load(f)
except FileNotFoundError as exc:
logger.warning("ac conf file {} not exist".format(conf_file))
return
except Exception as exc:
logger.warning("load conf file exception, {}".format(exc))
return
for name in elements:
if name in self._ac_check_elements:
if elements[name].get("exclude"):
logger.debug("exclude: {}".format(name))
self._ac_check_elements.pop(name)
else:
self._ac_check_elements[name] = elements[name]
def save(self, ac_file):
"""
save result
:param ac_file:
:return:
"""
logger.debug("save ac result to file {}".format(ac_file))
with open(ac_file, "w") as f:
f.write("ACL={}".format(json.dumps(self._ac_check_result)))
if "__main__" == __name__:
args = argparse.ArgumentParser()
args.add_argument("-w", type=str, dest="workspace", help="workspace where to find source")
args.add_argument("-r", type=str, dest="repo", help="repo name")
args.add_argument("-n", type=str, dest="owner", default="src-openeuler", help="gitee owner")
args.add_argument("-o", type=str, dest="output", help="output file to save result")
args.add_argument("-p", type=str, dest="pr", help="pull request number")
args.add_argument("-t", type=str, dest="token", help="gitee api token")
args = args.parse_args()
# init logging
not os.path.exists("log") and os.mkdir("log")
logger_conf_path = os.path.realpath(os.path.join(os.path.dirname(os.path.realpath(__file__)), "../../conf/logger.conf"))
logging.config.fileConfig(logger_conf_path)
logger = logging.getLogger("ac")
logger.info("------------------AC START--------------")
# notify gitee
# TODO use replace all tags instead, state machine in future
from src.proxy.gitee_proxy import GiteeProxy
gp = GiteeProxy(args.owner, args.repo, args.token)
gp.delete_tag_of_pr(args.pr, "ci_finished")
gp.create_tags_of_pr(args.pr, "ci_processing")
ac = AC(os.path.join(os.path.dirname(os.path.realpath(__file__)), "ac.yaml"))
ac.check_all(workspace=args.workspace, repo=args.repo)
ac.save(args.output)
spec:
hint: check_spec_file
module: spec.check_spec
entry: CheckSpec
ignored: ["homepage"]
code:
hint: check_code_style
module: code.check_code_style
entry: CheckCodeStyle
#exclude: True
ignored: []
# -*- encoding=utf-8 -*-
from abc import ABCMeta, abstractmethod
import inspect
import logging
import os
from src.ac.framework.ac_result import SUCCESS, WARNING, FAILED
logger = logging.getLogger("ac")
class BaseCheck(object):
__metaclass__ = ABCMeta
def __init__(self, workspace, repo, conf=None):
"""
:param repo:
:param workspace:
:param conf:
"""
self._repo = repo
self._workspace = workspace
self._conf = conf
self._work_dir = os.path.join(workspace, repo)
@abstractmethod
def __call__(self, *args, **kwargs):
raise NotImplementedError("subclasses must override __call__!")
def start_check_with_order(self, *items):
"""
按照items中顺序运行
"""
result = SUCCESS
for name in items:
try:
logger.debug("check {}".format(name))
method = getattr(self, "check_{}".format(name))
rs = method()
logger.debug("{} -> {}".format(name, rs))
except Exception as e:
# 忽略代码错误
logger.exception("internal error: {}".format(e))
continue
ignored = True if self._conf and name in self._conf.get("ignored", []) else False
logger.debug("{} ignore: {}".format(name, ignored))
if rs is SUCCESS:
logger.info("check {:<30}pass".format(name))
elif rs is WARNING:
logger.warning("check {:<30}warning{}".format(name, " [ignored]" if ignored else ""))
elif rs is FAILED:
logger.error("check {:<30}fail{}".format(name, " [ignored]" if ignored else ""))
else:
# never here
logger.exception("check {:<30}exception{}".format(name, " [ignored]" if ignored else ""))
continue
if not ignored:
result += rs
return result
def start_check(self):
"""
运行所有check_开头的函数
"""
members = inspect.getmembers(self, inspect.ismethod)
items = [member[0].replace("check_", "") for member in members if member[0].startswith("check_")]
logger.debug("check items: {}".format(items))
return self.start_check_with_order(*items)
# -*- encoding=utf-8 -*-
"""
Use this variables (FAILED, WARNING, SUCCESS) at most time,
and don't new ACResult unless you have specific needs.
"""
class ACResult(object):
"""
ac test result
"""
def __init__(self, val):
self._val = val
def __add__(self, other):
return self if self.val >= other.val else other
def __str__(self):
return self.hint
def __repr__(self):
return self.__str__()
@classmethod
def get_instance(cls, val):
if isinstance(val, int):
return {0: SUCCESS, 1: WARNING, 2: FAILED}.get(val)
if isinstance(val, bool):
return {True: SUCCESS, False: FAILED}.get(val)
try:
val = int(val)
return {0: SUCCESS, 1: WARNING, 2: FAILED}.get(val)
except ValueError:
return {"success": SUCCESS, "fail": FAILED, "failed": FAILED, "failure": FAILED,
"warn": WARNING, "warning": WARNING}.get(val.lower(), FAILED)
@property
def val(self):
return self._val
@property
def hint(self):
return ["SUCCESS", "WARNING", "FAILED"][self.val]
@property
def emoji(self):
return [":white_check_mark:", ":bug:", ":x:"][self.val]
FAILED = ACResult(2)
WARNING = ACResult(1)
SUCCESS = ACResult(0)
# -*- encoding=utf-8 -*-
import os
import re
class BuildRPMPackage(object):
LINKMAGIC = "0X080480000XC0000000" # 不要和gitee中用户名相同
def __init__(self, repo, rpmbuild_dir):
"""
:param repo: 包名
:param rpmbuild_dir: rpmbuild路径
"""
self._repo = repo
self._rpmbuild_dir = rpmbuild_dir
self._rpm_packages = {"srpm": {}, "rpm": {}}
self._package_structure(rpmbuild_dir)
def main_package_local(self):
"""
返回主包在本地路径
:param arch:
:return:
"""
package = self._rpm_packages["rpm"].get(self._repo)
if not package:
# not exist
return None
return os.path.join(self._rpmbuild_dir, "RPMS", package["arch"], package["fullname"])
def main_package_in_repo(self, committer, arch, rpm_repo_url):
"""
返回主包在repo mirror路径
:param committer:
:param arch:
:param rpm_repo_url:
:return:
"""
return self.get_package_path(committer, arch, self._repo, rpm_repo_url)
def last_main_package(self, arch, rpm_repo_url):
"""
返回主包在repo mirror链接路径(上次构建的rpm包)
:param arch:
:param rpm_repo_url: 构建出的rpm包保存的远端地址
:return:
"""
return os.path.join(rpm_repo_url, self.LINKMAGIC, arch, self._repo)
def debuginfo_package_local(self):
"""
返回debuginfo包在本地路径
:return:
"""
package = self._rpm_packages["rpm"].get("{}-debuginfo".format(self._repo))
if not package:
# not exist
return None
return os.path.join(self._rpmbuild_dir, "RPMS", package["arch"], package["fullname"])
def debuginfo_package_in_repo(self, committer, arch, rpm_repo_url):
"""
返回debuginfo包在repo mirror路径
:param committer:
:param arch:
:return:
"""
return self.get_package_path(committer, arch, "{}-debuginfo".format(self._repo), rpm_repo_url)
def last_debuginfo_package(self, arch, rpm_repo_url):
"""
返回debuginfo包在repo mirror链接路径(上次构建的rpm包)
:param arch:
:return:
"""
return os.path.join(rpm_repo_url, self.LINKMAGIC, arch, "{}-debuginfo".format(self._repo))
@staticmethod
def checkabi_md_in_repo(committer, repo, arch, md, rpm_repo_url):
"""
返回checkabi结果在repo mirror路径
:param committer:
:param arch:
:param md:
:param rpm_repo_url:
:return:
"""
return os.path.join(rpm_repo_url, committer, repo, arch, md)
def get_package_path(self, committer, arch, name, remote_url):
"""
返回包在repo mirror路径
:param committer:
:param arch:
:param name: 包名
:param remote_url: 仓库远端地址
:return:
"""
package = self._rpm_packages["rpm"].get(name)
if not package:
# not exist
return None
if arch == "noarch":
return os.path.join(remote_url, committer, name, arch, package["fullname"])
else:
return os.path.join(remote_url, committer, name, arch, "noarch", package["fullname"])
def get_package_fullname(self, name):
"""
获取包全名
:param name:
:return:
"""
package = self._rpm_packages["rpm"].get(name)
return package["fullname"] if package else name
def get_srpm_path(self):
"""
for future
:return:
"""
raise NotImplementedError
@staticmethod
def extract_rpm_name(rpm_fullname):
"""
取出名字部分
:param rpm_fullname:
:return:
"""
try:
m = re.match("(.*)-[0-9.]+-.*rpm", rpm_fullname)
return m.group(1)
except:
return rpm_fullname
def _package_structure(self, rpmbuild_dir):
"""
rpm package 结构
:param rpmbuild_dir: rpmbuild路径
:return:
"""
rpms_dir = os.path.join(rpmbuild_dir, "RPMS")
for dirname, _, filenames in os.walk(rpms_dir):
arch = dirname.split("/")[-1]
if arch == "i386":
aarch = "x86-64"
for filename in filenames:
name = self.extract_rpm_name(filename)
self._rpm_packages["rpm"][name] = {"name": name, "fullname": filename, "arch": arch}
srpms = os.path.join(rpmbuild_dir, "SRPMS")
for dirname, _, filenames in os.walk(srpms):
for filename in filenames:
name = self.extract_rpm_name(filename)
self._rpm_packages["srpm"][name] = {"name": name, "fullname": filename}
# -*- encoding=utf-8 -*-
import os
import argparse
import logging.config
import logging
import yaml
class ExtraWork(object):
def __init__(self, package, rpmbuild_dir="/home/jenkins/agent/buildroot/home/abuild/rpmbuild"):
"""
:param package: obs package
:param rpmbuild_dir: rpmbuild 路径
"""
self._repo = package
self._rpm_package = BuildRPMPackage(package, rpmbuild_dir)
def is_pkgship_need_notify(self, pkgship_meta_path):
"""
是否需要发起notify
:param pkgship_meta_path: 保存门禁中解析的pkgship spec版本元信息文件路径
:return:
"""
if self._repo == "pkgship": # 只有pkgship包需要通知
try:
with open(pkgship_meta_path, "r") as f:
pkgship_meta = yaml.safe_load(f)
logger.debug("pkgship meta: {}".format(pkgship_meta))
if pkgship_meta.get("compare_version") == 1: # version upgrade
logger.debug("pkgship: notify")
return True
except IOError:
# file not exist, bug
logger.warning("pkgship meta file not exist!")
return True
return False
def pkgship_notify(self, notify_url, notify_token, package_url, package_arch, notify_jenkins_user, notify_jenkins_password):
"""
notify
:param notify_url: notify url
:param notify_token: notify token
:param package_url: package addr
:param package_arch: cpu arch
:param notify_jenkins_user:
:param notify_jenkins_password:
:return:
"""
package = self._rpm_package.last_main_package(package_arch, package_url)
querystring = {"token": notify_token, "PACKAGE_URL": package, "arch": package_arch}
ret = do_requests("get", notify_url, querystring=querystring,
auth={"user": notify_jenkins_user, "password": notify_jenkins_password}, timeout=1)
if ret in [0, 2]:
# send async, don't care about response, timeout will be ok
logger.info("notify ...ok")
else:
logger.error("notify ...fail")
def check_rpm_abi(self, package_url, package_arch, output, committer, comment_file, related_rpm=None):
"""
对比两个版本rpm包之间的接口差异,根据差异找到受影响的rpm包
:param package_arch:
:param related_rpm:
:return:
"""
cwd = os.getcwd()
check_abi_path = os.path.realpath(os.path.join(os.path.realpath(__file__), "../../utils/check_abi.py"))
curr_rpm = self._rpm_package.main_package_local()
last_rpm = self._rpm_package.last_main_package(package_arch, package_url)
logger.debug("curr_rpm: {}".format(curr_rpm))
logger.debug("last_rpm: {}".format(last_rpm))
if not curr_rpm or not last_rpm:
logger.info("no rpms")
return
check_abi_cmd = "{} -o {}".format(check_abi_path, os.path.join(cwd, output))
if related_rpm:
# obs
check_abi_cmd = "{} -i {}".format(check_abi_cmd, related_rpm)
check_abi_cmd = "{} compare_rpm -r {} {}".format(check_abi_cmd, last_rpm, curr_rpm)
curr_rpm_debug = self._rpm_package.debuginfo_package_local()
last_rpm_debug = self._rpm_package.last_debuginfo_package(package_arch, package_url)
logger.debug("curr_rpm_debug: {}".format(curr_rpm_debug))
logger.debug("last_rpm_debug: {}".format(last_rpm_debug))
if curr_rpm_debug and last_rpm_debug:
# debuginfo
check_abi_cmd = "{} -d {} {}".format(check_abi_cmd, last_rpm_debug, curr_rpm_debug)
logger.info("check cmd: {}".format(check_abi_cmd))
ret, _, err = shell_cmd_live(check_abi_cmd, verbose=True)
if ret == 1:
logger.error("check abi error: {}".format(err))
else:
logger.debug("check abi ok: {}".format(ret))
if os.path.exists(output):
# change of abi
comment = {"name": "check_abi/{}/{}".format(package_arch, self._repo), "result": "WARNING",
"link": self._rpm_package.checkabi_md_in_repo(committer, self._repo, package_arch, output, package_url)}
else:
comment = {"name": "check_abi/{}/{}".format(package_arch, self._repo), "result": "SUCCESS"}
logger.debug("check abi comment: {}".format(comment))
try:
with open(comment_file, "r") as f: # one repo with multi build package
comments = yaml.safe_load(f)
except IOError as e:
logger.debug("no history check abi comment")
comments = []
if os.path.exists(comment_file):
try:
with open(comment_file, "r") as f: # one repo with multi build package
comments = yaml.safe_load(f)
except:
logger.exception("yaml load check abi comment file exception")
comments.append(comment)
logger.debug("check abi comments: {}".format(comments))
try:
with open(comment_file, "w") as f:
yaml.safe_dump(comments, f) # list
except:
logger.exception("save check abi comment exception")
if "__main__" == __name__:
args = argparse.ArgumentParser()
args.add_argument("-f", type=str, dest="func", choices=("notify", "checkabi"), help="function")
args.add_argument("-p", type=str, dest="package", help="obs package")
args.add_argument("-a", type=str, dest="arch", help="build arch")
args.add_argument("-c", type=str, dest="committer", help="committer")
args.add_argument("-d", type=str, dest="rpmbuild_dir", default="/home/jenkins/agent/buildroot/home/abuild/rpmbuild", help="rpmbuild dir")
args.add_argument("-n", type=str, dest="notify_url", help="target branch that merged to ")
args.add_argument("-t", type=str, dest="token", default=os.getcwd(), help="obs workspace dir path")
args.add_argument("-u", type=str, dest="notify_user", default="trigger", help="notify trigger user")
args.add_argument("-w", type=str, dest="notify_password", help="notify trigger password")
args.add_argument("-l", type=str, dest="rpm_repo_url", help="rpm repo where rpm saved")
args.add_argument("-m", type=str, dest="pkgship_meta", help="meta from pkgship spec")
args.add_argument("-o", type=str, dest="output", help="checkabi result")
args.add_argument("-e", type=str, dest="comment_file", help="checkabi result comment")
args.add_argument("-b", type=str, dest="obs_repo_url", help="obs repo where rpm saved")
args = args.parse_args()
not os.path.exists("log") and os.mkdir("log")
logger_conf_path = os.path.realpath(os.path.join(os.path.realpath(__file__), "../../conf/logger.conf"))
logging.config.fileConfig(logger_conf_path)
logger = logging.getLogger("build")
from src.utils.shell_cmd import shell_cmd_live
from src.proxy.requests_proxy import do_requests
from src.build.build_rpm_package import BuildRPMPackage
ew = ExtraWork(args.package, args.rpmbuild_dir)
if args.func == "notify":
# run after copy rpm to rpm repo
if ew.is_pkgship_need_notify(args.pkgship_meta):
ew.pkgship_notify(args.notify_url, args.token, args.rpm_repo_url, args.arch, args.notify_user, args.notify_password)
elif args.func == "checkabi":
# run before copy rpm to rpm repo
ew.check_rpm_abi(args.rpm_repo_url, args.arch, args.output, args.committer, args.comment_file, args.obs_repo_url)
#!/bin/bash
# A simple script to checkout or update a svn or git repo as source service
# defaults
MYARCHIVE=""
MYFILES=""
OUTFILE="."
FILES=""
while test $# -gt 0; do
case $1 in
*-archive)
MYARCHIVE="${2##*/}"
shift
;;
*-file|*-files)
MYFILES="$MYFILES ${2}"
FILES=${2}
shift
;;
*-outfilename)
OUTFILE="${2}"
shift
;;
*-outdir)
MYOUTDIR="$2"
shift
;;
*)
echo Unknown parameter $1.
echo 'Usage: extract_file --archive $ARCHIVE --file $FILE --outdir $OUT'
exit 1
;;
esac
shift
done
if [ -z "$MYARCHIVE" ]; then
echo "ERROR: no archive specified!"
exit 1
fi
if [ -z "$MYFILES" ]; then
echo "ERROR: no checkout URL is given via --file parameter!"
exit 1
fi
if [ -z "$MYOUTDIR" ]; then
echo "ERROR: no output directory is given via --outdir parameter!"
exit 1
fi
set -x
if [ "${FILES}" == '*' ];then
MYFILES=" "
fi
existing_archive="$MYOUTDIR/$(echo $MYARCHIVE)"
cd "$MYOUTDIR"
existing_archive=`ls $existing_archive`
if [ -e "$existing_archive" ]; then
if [ "${existing_archive%.tar.gz}" != "$existing_archive" ]; then
tar xfz "$existing_archive" --wildcards $MYFILES || exit 1
elif [ "${existing_archive%.tar.bz2}" != "$existing_archive" ]; then
tar xfj "$existing_archive" --wildcards $MYFILES || exit 1
elif [ "${existing_archive%.tar.xz}" != "$existing_archive" ]; then
tar xfJ "$existing_archive" --wildcards $MYFILES || exit 1
elif [ "${existing_archive%.tar}" != "$existing_archive" ]; then
tar xf "$existing_archive" --wildcards $MYFILES || exit 1
elif [ "${existing_archive%.zip}" != "$existing_archive" ]; then
unzip "$existing_archive" $MYFILES || exit 1
else
echo "ERROR: unknown archive format $existing_archive"
exit 1
fi
for i in $MYFILES; do
mv "$i" "$OUTFILE"
done
else
echo "ERROR: archive not found: $existing_archive"
exit 1
fi
exit 0
# -*- coding: utf-8 -*-
import os
import sys
import logging.config
import logging
import json
import yaml
import argparse
class Comment(object):
def __init__(self, pr, *check_abi_comment_files):
"""
:param pr: pull request number
"""
self._pr = pr
self._check_abi_comment_files = check_abi_comment_files
def comment_build(self, jenkins_proxy, gitee_proxy):
"""
构建结果
:param jenkins_proxy:
:param gitee_proxy:
:return:
"""
comments = self._comment_build_html_format(jenkins_proxy)
gitee_proxy.comment_pr(self._pr, "\n".join(comments))
def comment_at(self, committer, gitee_proxy):
"""
通知committer
@committer
:param committer:
:param gitee_proxy:
:return:
"""
gitee_proxy.comment_pr(self._pr, "@{}".format(committer))
def _comment_build_html_format(self, jenkins_proxy):
"""
组装构建信息,并评论pr
:param jenkins_proxy: JenkinsProxy object
:return:
"""
comments = ["<table>", self._comment_html_table_th()]
base_job_name = os.environ.get("JOB_NAME")
base_build_id = os.environ.get("BUILD_ID")
base_build_id = int(base_build_id)
logger.debug("base_job_name: {}, base_build_id: {}".format(base_job_name, base_build_id))
base_build = jenkins_proxy.get_build(base_job_name, base_build_id)
logger.debug("get base build")
up_builds = jenkins_proxy.get_upstream_builds(base_build)
if up_builds:
logger.debug("get up_builds")
up_up_builds = jenkins_proxy.get_upstream_builds(up_builds[0])
if up_up_builds:
logger.debug("get up_up_builds")
comments.extend(self._comment_of_ac(up_up_builds[0]))
comments.extend(self._comment_of_build(up_builds))
comments.extend(self._comment_of_check_abi(up_builds))
comments.append("</table>")
return comments
def _comment_of_ac(self, build):
"""
组装门禁检查结果
:param build: Jenkins Build object,门禁检查jenkins构建对象
:return:
"""
if "ACL" not in os.environ:
logger.debug("no ac check")
return []
try:
acl = json.loads(os.environ["ACL"])
logger.debug("ac result: {}".format(acl))
except ValueError:
logger.exception("invalid ac result format")
return []
comments = []
try:
for index, item in enumerate(acl):
ac_result = ACResult.get_instance(item["result"])
if index == 0:
build_url = build.get_build_url()
comments.append(self.__class__._comment_html_table_tr(
item["name"], ac_result.emoji, ac_result.hint, "{}{}".format(build_url, "console"), build.buildno, rowspan=len(acl)))
else:
comments.append(self.__class__._comment_html_table_tr_rowspan(
item["name"], ac_result.emoji, ac_result.hint))
except:
# jenkins api maybe exception, who knows
logger.exception("comment of ac result exception")
logger.info("ac comment: {}".format(comments))
return comments
def _comment_of_build(self, builds):
"""
组装编译任务的评论
:return:
"""
comments = []
try:
for build in builds:
name = build.job._data["fullName"]
status = build.get_status()
ac_result = ACResult.get_instance(status)
build_url = build.get_build_url()
comments.append(self.__class__._comment_html_table_tr(
name, ac_result.emoji, ac_result.hint, "{}{}".format(build_url, "console"), build.buildno))
except:
# jenkins api maybe exception, who knows
logger.exception("comment of build exception")
logger.info("build comment: {}".format(comments))
return comments
def _comment_of_check_abi(self, builds):
"""
check abi comment
:param builds:
:return:
"""
comments = []
def match(name, comment_file):
if "aarch64" in name and "aarch64" in comment_file:
return True
if "x86-64" in name and "x86-64" in comment_file:
return True
return False
try:
for check_abi_comment_file in self._check_abi_comment_files:
logger.debug("check abi comment file: {}".format(check_abi_comment_file))
if os.path.exists(check_abi_comment_file): # check abi评论文件存在
for build in builds:
name = build.job._data["fullName"]
logger.debug("check build {}".format(name))
if match(name, check_abi_comment_file): # 找到匹配的jenkins build
logger.debug("build \"{}\" match".format(name))
status = build.get_status()
logger.debug("build state: {}".format(status))
if ACResult.get_instance(status) == SUCCESS: # 保证build状态成功
with open(check_abi_comment_file, "r") as f:
content = yaml.safe_load(f)
logger.debug("comment: {}".format(content))
for item in content:
ac_result = ACResult.get_instance(item.get("result"))
comments.append(self.__class__._comment_html_table_tr(
item.get("name"), ac_result.emoji, ac_result.hint, item.get("link", ""),
"markdown" if "link" in item else "", hashtag=False))
break
except:
# jenkins api or yaml maybe exception, who knows
logger.exception("comment of build exception")
logger.info("check abi comment: {}".format(comments))
return comments
@classmethod
def _comment_html_table_th(cls):
return "<tr><th>Check Name</th> <th>Build Result</th> <th>Build Details</th></tr>"
@classmethod
def _comment_html_table_tr(cls, name, icon, status, href, build_no, hashtag=True, rowspan=1):
return "<tr><td>{}</td> <td>{}<strong>{}</strong></td> <td rowspan={}><a href={}>{}{}</a></td></tr>".format(
name, icon, status, rowspan, href, "#" if hashtag else "", build_no)
@classmethod
def _comment_html_table_tr_rowspan(cls, name, icon, status):
return "<tr><td>{}</td> <td>{}<strong>{}</strong></td></tr>".format(name, icon, status)
if "__main__" == __name__:
args = argparse.ArgumentParser()
args.add_argument("-p", type=int, dest="pr", help="pull request number")
args.add_argument("-c", type=str, dest="committer", help="commiter")
args.add_argument("-o", type=str, dest="owner", help="gitee owner")
args.add_argument("-r", type=str, dest="repo", help="repo name")
args.add_argument("-t", type=str, dest="gitee_token", help="gitee api token")
args.add_argument("-b", type=str, dest="jenkins_base_url", help="jenkins base url")
args.add_argument("-u", type=str, dest="jenkins_user", help="repo name")
args.add_argument("-j", type=str, dest="jenkins_api_token", help="jenkins api token")
args.add_argument("-a", type=str, dest="check_abi_comment_files", nargs="*", help="check abi comment files")
args.add_argument("--disable", dest="enable", default=True, action="store_false", help="comment to gitee switch")
args = args.parse_args()
if not args.enable:
sys.exit(0)
not os.path.exists("log") and os.mkdir("log")
logger_conf_path = os.path.realpath(os.path.join(os.path.realpath(__file__), "../../conf/logger.conf"))
logging.config.fileConfig(logger_conf_path)
logger = logging.getLogger("build")
from src.ac.framework.ac_result import ACResult, SUCCESS
from src.proxy.gitee_proxy import GiteeProxy
from src.proxy.jenkins_proxy import JenkinsProxy
# gitee notify
gp = GiteeProxy(args.owner, args.repo, args.gitee_token)
gp.delete_tag_of_pr(args.pr, "ci_processing")
gp.create_tags_of_pr(args.pr, "ci_finished")
jp = JenkinsProxy(args.jenkins_base_url, args.jenkins_user, args.jenkins_api_token)
if args.check_abi_comment_files:
comment = Comment(args.pr, *args.check_abi_comment_files)
else:
comment = Comment(args.pr)
logger.info("comment: build result......")
comment.comment_build(jp, gp)
logger.info("comment: at committer......")
comment.comment_at(args.committer, gp)
# -*- encoding=utf-8 -*-
import os
import sys
import logging.config
import logging
import argparse
from xml.etree import ElementTree
class SinglePackageBuild(object):
GITEEBRANCHPROJECTMAPPING = {
"master": ["bringInRely", "openEuler:Extras", "openEuler:Factory", "openEuler:Mainline"],
"openEuler-20.03-LTS": ["openEuler:20.03:LTS"],
"openEuler-EPOL-LTS": ["bringInRely"],
"openEuler-20.09": ["openEuler:20.09"],
"mkopeneuler-20.03": ["openEuler:Extras"]
}
def __init__(self, package, arch, target_branch):
self._package = package
self._arch = arch
self._branch = target_branch
def get_need_build_obs_repos(self, project):
"""
需要构建obs repo列表
:return: list<dict>
"""
return OBSProxy.list_repos_of_arch(project, self._package, self._arch)
def build_obs_repos(self, project, repos, work_dir, code_dir):
"""
build
:param project: 项目名
:param repos: obs repo
:param code_dir: 码云代码在本地路径
:param work_dir:
:return:
"""
# osc co
if not OBSProxy.checkout_package(project, self._package):
logger.error("checkout ... failed")
return 1
logger.info("checkout ... ok")
# update package meta file "_service"
self._handle_package_meta(project, work_dir, code_dir)
logger.debug("prepare \"_service\" ... ok")
# process_service.pl
if not self._prepare_build_environ(project, work_dir):
logger.error("prepare environ ... failed")
return 2
logger.info("prepare environ ... ok")
# osc build
for repo in repos:
if not OBSProxy.build_package(project, self._package, repo["repo"], self._arch):
logger.error("build {} ... failed".format(repo["repo"]))
return 3
logger.info("build {} ... ok".format(repo["repo"]))
logger.debug("build all repos ... finished")
return 0
def _handle_package_meta(self, project, obs_work_dir, code_path):
"""
_service文件重组
<services>
<service name="tar_scm_kernel_repo">
<param name="scm">repo</param>
<param name="url">next/openEuler/perl-Archive-Zip</param>
</service>
</services>
:param project: obs项目
:param obs_work_dir: obs工作目录
:param code_path: 代码目录
:return:
"""
_service_file_path = os.path.join(obs_work_dir, project, self._package, "_service")
tree = ElementTree.parse(_service_file_path)
logger.info("before update meta------")
ElementTree.dump(tree)
sys.stdout.flush()
services = tree.findall("service")
for service in services:
if service.get("name") == "tar_scm_repo_docker":
service.set("name", "tar_local")
elif service.get("name") == "tar_scm_repo":
service.set("name", "tar_local")
elif service.get("name") == "tar_scm_kernel_repo":
service.set("name", "tar_local_kernel")
elif service.get("name") == "tar_scm_kernels_repo":
service.set("name", "tar_local_kernels")
for param in service.findall("param"):
if param.get("name") == "scm":
param.text = "local"
elif param.get("name") == "tar_scm":
param.text = "tar_local"
elif param.get("name") == "url":
if "openEuler_kernel" in param.text or "LTS_kernel" in param.text or "openEuler-20.09_kernel" in param.text:
param.text = "{}/{}".format(code_path, "code") # kernel special logical
else:
gitee_repo = param.text.split("/")[-1]
param.text = "{}/{}".format(code_path, gitee_repo)
logger.info("after update meta------")
ElementTree.dump(tree)
sys.stdout.flush()
tree.write(_service_file_path)
def _prepare_build_environ(self, project, obs_work_dir):
"""
准备obs build环境
:param project: obs项目
:param obs_work_dir: obs工作目录
:return:
"""
_process_perl_path = os.path.realpath(os.path.join(os.path.realpath(__file__), "../process_service.pl"))
_service_file_path = os.path.join(obs_work_dir, project, self._package, "_service")
_obs_package_path = os.path.join(obs_work_dir, project, self._package)
cmd = "perl {} -f {} -p {} -m {} -w {}".format(
_process_perl_path, _service_file_path, project, self._package, _obs_package_path)
ret, _, _ = shell_cmd_live(cmd, verbose=True)
if ret:
logger.error("prepare build environ error, {}".format(ret))
return False
return True
def build(self, work_dir, code_dir):
"""
入口
:param work_dir: obs工作目录
:param code_dir: 代码目录
:return:
"""
if self._branch not in self.GITEEBRANCHPROJECTMAPPING:
logger.error("branch \"{}\" not support yet".format(self._branch))
sys.exit(1)
for project in self.GITEEBRANCHPROJECTMAPPING.get(self._branch):
logger.debug("start build project {}".format(project))
obs_repos = self.get_need_build_obs_repos(project)
if not obs_repos:
logger.info("all repos ignored of project {}".format(project))
continue
logger.debug("build obs repos: {}".format(obs_repos))
ret = self.build_obs_repos(project, obs_repos, work_dir, code_dir)
if ret > 0:
logger.debug("build run return {}".format(ret))
logger.error("build {} {} {} ... {}".format(project, self._package, self._arch, "failed"))
sys.exit(1) # finish if any error
else:
logger.info("build {} {} {} ... {}".format(project, self._package, self._arch, "ok"))
if "__main__" == __name__:
args = argparse.ArgumentParser()
args.add_argument("-p", type=str, dest="package", help="obs package")
args.add_argument("-a", type=str, dest="arch", help="build arch")
args.add_argument("-b", type=str, dest="branch", help="target branch that merged to ")
args.add_argument("-c", type=str, dest="code", help="code dir path")
args.add_argument("-w", type=str, dest="workspace", default=os.getcwd(), help="obs workspace dir path")
args = args.parse_args()
not os.path.exists("log") and os.mkdir("log")
logger_conf_path = os.path.realpath(os.path.join(os.path.realpath(__file__), "../../conf/logger.conf"))
logging.config.fileConfig(logger_conf_path)
logger = logging.getLogger("build")
from src.proxy.obs_proxy import OBSProxy
from src.utils.shell_cmd import shell_cmd_live
spb = SinglePackageBuild(args.package, args.arch, args.branch)
spb.build(args.workspace, args.code)
#!/usr/bin/perl -w
use File::Spec::Functions qw(rel2abs);
use File::Basename qw(dirname);
use Getopt::Std;
use POSIX;
use Data::Dumper;
use XML::Structured;
use strict;
our $services = [
'services' =>
[[ 'service' =>
'name',
'mode', # "localonly" is skipping this service on server side, "trylocal" is trying to merge changes directly in local files, "disabled" is just skipping it
[[ 'param' =>
'name',
'_content'
]],
]],
];
die " USAGE: $0 -f service_file -p product -c code_dir -m module -w workdir\n" if (@ARGV < 5);
our ($opt_f,$opt_p,$opt_c,$opt_m,$opt_w) =("","","","","");
&getopts("Hf:p:c:m:w:");
my $service_file = $opt_f if ($opt_f);
my $product = $opt_p if ($opt_p);
my $code_dir = $opt_c if ($opt_c);
my $module = $opt_m if ($opt_m);
my $myworkdir = $opt_w if ($opt_w);
#open lg, ">/home/test.log";
my $xml_file = readstr($service_file);
my $serviceinfo = XMLin($services, $xml_file);
for my $service (@{$serviceinfo->{'service'}}) {
#print lg "Run for ".getcwd. "/$service->{'name'}"."\n";
my @run;
push @run, dirname(rel2abs($0))."/$service->{'name'}";
for my $param (@{$service->{'param'}}) {
if ($service->{'name'} eq 'recompress') {
push @run, "--$param->{'name'}";
if ($param->{'name'} eq 'file') {
push @run, $myworkdir.'/'.$param->{'_content'};
# print lg '--'. $param->{'name'} . " ".$myworkdir.'/'.$param->{'_content'}."\n";
}
else {
push @run, $param->{'_content'};
# print lg '--'. $param->{'name'}. " " .$param->{'_content'}."\n";
}
# print lg '--outdir '. $myworkdir."\n";
} else {
if ($param->{'name'} eq 'submodules'){
print 'skip submodules para';
}else{
next if $param->{'name'} eq 'outdir';
next unless $param->{'_content'};
push @run, "--$param->{'name'}";
push @run, $param->{'_content'};
}
}
}
push @run, "--outdir";
push @run, "$myworkdir";
if ($service->{'name'} =~ /tar/) {
push @run, "--project";
push @run, "$product";
push @run, "--package";
push @run, "$module";
}
print @run;
system(@run);
}
sub readstr {
my ($fn, $nonfatal) = @_;
local *F;
if (!open(F, '<', $fn)) {
die("$fn: $!\n") unless $nonfatal;
return undef;
}
my $d = '';
1 while sysread(F, $d, 8192, length($d));
close F;
return $d;
}
#!/bin/bash
# A simple script to checkout or update a svn or git repo as source service
#
# (C) 2010 by Adrian Schröter <adrian@suse.de>
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
# See http://www.gnu.org/licenses/gpl-2.0.html for full license text.
# defaults
MYCOMPRESSION=""
FILES=""
SRCDIR=""
while test $# -gt 0; do
case $1 in
*-compression)
MYCOMPRESSION="$2"
shift
;;
*-file)
SRCDIR="$FILES ${2%/*}/"
FILES="$FILES ${2##*/}"
echo 'SRCDIR ' $SRCDIR
echo 'FILES ' $FILES
shift
;;
*-outdir)
MYOUTDIR="$2"
shift
;;
*)
echo Unknown parameter $1.
echo 'Usage: recompress --compression $COMPRESSION --file $FILE --outdir $OUT'
exit 1
;;
esac
shift
done
if [ -z "$MYCOMPRESSION" ]; then
MYCOMPRESSION="bz2"
fi
if [ -z "$FILES" ]; then
echo "ERROR: no inputs files are given via --file parameter!"
exit 1
fi
if [ -z "$MYOUTDIR" ]; then
echo "ERROR: no output directory is given via --outdir parameter!"
exit 1
fi
cd $SRCDIR
echo `pwd`
echo `ls`
echo `ls $FILES`
for i in `ls $FILES`; do
#for i in "ls $SRCIDR"; do
FILE=`ls -1 "$i" || ls -1 "_service:*:$i"`
#FILE=`ls -1 "$i" || ls -1 "$i"`
if [ ! -f "$FILE" ]; then
echo "Unknown file $i"
exit 1
fi
UNCOMPRESS="cat"
BASENAME="$FILE"
if [ "${FILE%.gz}" != "$FILE" ]; then
UNCOMPRESS="gunzip -c"
BASENAME="${FILE%.gz}"
elif [ "${FILE%.tgz}" != "$FILE" ]; then
UNCOMPRESS="gunzip -c"
BASENAME="${FILE%.tgz}.tar"
elif [ "${FILE%.bz2}" != "$FILE" ]; then
UNCOMPRESS="bunzip2 -c"
BASENAME="${FILE%.bz2}"
elif [ "${FILE%.xz}" != "$FILE" ]; then
UNCOMPRESS="xz -dc"
BASENAME="${FILE%.xz}"
fi
if [ "$MYCOMPRESSION" == "gz" ]; then
COMPRESS="gzip -c -n --rsyncable -"
NEWFILE="${BASENAME#_service:}.gz"
elif [ "$MYCOMPRESSION" == "bz2" ]; then
COMPRESS="bzip2 -c -"
NEWFILE="${BASENAME#_service:}.bz2"
elif [ "$MYCOMPRESSION" == "xz" ]; then
COMPRESS="xz -c -"
NEWFILE="${BASENAME#_service:}.xz"
elif [ "$MYCOMPRESSION" == "none" ]; then
COMPRESS="cat -"
NEWFILE="${BASENAME#_service:}"
else
echo "ERROR: Unknown compression"
exit 1
fi
echo "pwd: ". `pwd`;
# do the real work
echo "UnCompress". $UNCOMPRESS
echo "file ". $FILE
echo "Compress". $COMPRESS
echo "NEWFILE ". $NEWFILE
$UNCOMPRESS "$FILE" | $COMPRESS > "$MYOUTDIR/$NEWFILE" || exit 1
# Check if the (compressed) target file already exists in the directory where
# the service is invoked and drop the newly generated one. Avoids overwriting
# otherwise identical files which only have different timestamps. Note that
# zdiff and co all fail to do that properly...
echo "pwd: ". `pwd`;
if [ -f $NEWFILE ] ; then
DIFF_TMPDIR=$(mktemp -d)
SRC_DIR="$PWD"
echo "SRC_DIR ". $SRC_DIR
echo "MYOUTDIR ". $MYOUTDIR
cd $DIFF_TMPDIR
mkdir new old
$(cd new ; tar -xxf "$MYOUTDIR/$NEWFILE" 2> /dev/null || mv "$MYOUTDIR/$NEWFILE" .)
$(cd old ; tar -xxf "$SRC_DIR/$NEWFILE" 2> /dev/null || mv "$SRC_DIR/$NEWFILE" .)
if diff -r new old > /dev/null ; then
echo "Identical target file $NEWFILE already exists, skipping.."
#rm -r "$MYOUTDIR/$NEWFILE"
else
echo "Compressed $FILE to $NEWFILE"
fi
cd $SRC_DIR
rm -r $DIFF_TMPDIR
else
echo "Compressed $FILE to $NEWFILE"
fi
# we can remove service files, no need to store them twice
rm -f "$FILE"
done
exit 0
#!/bin/bash
# A simple script to checkout or update a svn or git repo as source service
#
# (C) 2010 by Adrian Schröter <adrian@suse.de>
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
# See http://www.gnu.org/licenses/gpl-2.0.html for full license text.
SERVICE='tar_scm'
set_default_params () {
MYSCM=""
MYURL=""
MYVERSION="_auto_"
MYFORMAT=""
MYPREFIX=""
MYFILENAME=""
MYREVISION=""
MYPACKAGEMETA=""
# MYHISTORYDEPTH=""
INCLUDES=""
}
get_config_options () {
# config options for this host ?
if [ -f /etc/obs/services/$SERVICE ]; then
. /etc/obs/services/$SERVICE
fi
# config options for this user ?
if [ -f "$HOME"/.obs/$SERVICE ]; then
. "$HOME"/.obs/$SERVICE
fi
}
parse_params () {
while test $# -gt 0; do
case $1 in
*-scm)
MYSCM="$2"
shift
;;
*-url)
MYURL="$2"
CI_PRO_NAME=${MYURL%%/*}
TEMP_URL="$MYURL"
MYURL=$TEMP_URL
shift
;;
*-subdir)
MYSUBDIR="$2"
shift
;;
*-revision)
MYREVISION="$2"
shift
;;
*-version)
MYVERSION="$2"
shift
;;
*-include)
INCLUDES="$INCLUDES $2"
shift
;;
*-versionformat)
MYFORMAT="$2"
shift
;;
*-versionprefix)
MYPREFIX="$2"
shift
;;
*-exclude)
EXCLUDES="$EXCLUDES --exclude=${2#/}"
shift
;;
*-filename)
MYFILENAME="${2#/}"
shift
;;
*-package-meta)
MYPACKAGEMETA="${2#/}"
shift
;;
*-outdir)
MYOUTDIR="$2"
shift
;;
*-history-depth)
echo "history-depth parameter is obsolete and will be ignored"
shift
;;
*-project)
MYPROJECT="$2"
shift
;;
*-package)
MYPACKAGE="$2"
shift
;;
*)
echo "Unknown parameter: $1"
echo 'Usage: $SERVICE --scm $SCM --url $URL [--subdir $SUBDIR] [--revision $REVISION] [--version $VERSION] [--include $INCLUDE]* [--exclude $EXCLUDE]* [--versionformat $FORMAT] [--versionprefix $PREFIX] [--filename $FILENAME] [--package-meta $META] --outdir $OUT'
exit 1
;;
esac
shift
done
}
error () {
echo "ERROR: $*"
exit 1
}
debug () {
[ -n "$DEBUG_TAR_SCM" ] && echo "$*"
}
safe_run () {
if ! "$@"; then
error "$* failed; aborting!"
fi
}
sanitise_params () {
TAR_VERSION="$MYVERSION"
if [ -z "$MYSCM" ]; then
error "no scm is given via --scm parameter (git/svn/hg/bzr)!"
fi
if [ -z "$MYURL" ]; then
error "no checkout URL is given via --url parameter!"
fi
if [ -z "$MYOUTDIR" ]; then
error "no output directory is given via --outdir parameter!"
fi
if [ -z "$MYPROJECT" ]; then
error "no project is given via --project parameter!"
fi
if [ -z "$MYPACKAGE" ]; then
error "no package is given via --package parameter!"
fi
FILE="$MYFILENAME"
WD_VERSION="$MYVERSION"
if [ -z "$MYPACKAGEMETA" ]; then
EXCLUDES="$EXCLUDES --exclude=.svn"
fi
# if [ "$MYHISTORYDEPTH" == "full" ]; then
# MYHISTORYDEPTH="999999999"
# fi
}
detect_default_filename_param () {
if [ -n "$FILE" ]; then
return
fi
case "$MYSCM" in
git)
FILE="${MYURL%/}"
FILE="${FILE##*/}"
FILE="${FILE%.git}"
FILE="${FILE#*@*:}"
;;
svn|hg|bzr)
FILE="${MYURL%/}"
FILE="${FILE##*/}"
;;
local)
FILE="temp_dir"
;;
*)
error "unknown SCM '$MYSCM'"
esac
}
fetch_upstream () {
TOHASH="$MYURL"
[ "$MYSCM" = 'svn' ] && TOHASH="$TOHASH/$MYSUBDIR"
HASH=`echo "$TOHASH" | sha256sum | cut -d\ -f 1`
REPOCACHE=
CACHEDIRECTORY=/tmp/local_code/xdf
if [ -n "$CACHEDIRECTORY" ]; then
REPOCACHEINCOMING="$CACHEDIRECTORY/incoming"
REPOCACHEROOT="$CACHEDIRECTORY/repo"
REPOCACHE="$REPOCACHEROOT/$MYPROJECT/$MYPACKAGE"
REPOURLCACHE="$CACHEDIRECTORY/repourl/$HASH"
fi
debug "check local cache if configured"
if [ -n "$CACHEDIRECTORY" -a -d "$REPOCACHE/" ]; then
debug "cache hit: $REPOCACHE"
check_cache
else
if [ -n "$CACHEDIRECTORY" ]; then
debug "cache miss: $REPOCACHE/"
else
debug "cache not enabled"
fi
calc_dir_to_clone_to
debug "new $MYSCM checkout to $CLONE_TO"
initial_clone
if [ -n "$CACHEDIRECTORY" ]; then
#cache_repo
REPOPATH="$REPOCACHE"
else
REPOPATH="$MYOUTDIR/$FILE"
fi
if [ "$TAR_VERSION" == "_auto_" -o -n "$MYFORMAT" ]; then
detect_version
fi
#exit 22
fi
}
calc_dir_to_clone_to () {
if [ -n "$CACHEDIRECTORY" ]; then
if [ ! -d REPOCACHE ]; then
mkdir -p "$REPOCACHE"
fi
safe_run cd "$REPOCACHE"
# Use dry-run mode because git/hg refuse to clone into
# an empty directory on SLES11
#debug mktemp -u -d "tmp.XXXXXXXXXX"
#CLONE_TO=`mktemp -u -d "tmp.XXXXXXXXXX"`
CLONE_TO="$REPOCACHE"
else
CLONE_TO="$FILE"
fi
}
initial_clone () {
echo "Fetching from $MYURL ..."
case "$MYSCM" in
git)
# Clone with full depth; so that the revision can be found if specified
safe_run git clone "$MYURL" "$CLONE_TO"
;;
svn)
args=
[ -n "$MYREVISION" ] && args="-r$MYREVISION"
if [[ $(svn --version --quiet) > "1.5.99" ]]; then
TRUST_SERVER_CERT="--trust-server-cert"
fi
safe_run svn checkout --non-interactive $TRUST_SERVER_CERT \
$args "$MYURL/$MYSUBDIR" "$CLONE_TO"
MYSUBDIR= # repo root is subdir
;;
local)
echo "xdffff: $MYURL ---- $CLONE_TO --- `pwd`"
safe_run ls -A $MYURL | grep -v .git | xargs -I {} cp -a $MYURL/{} .
if [ -e $MYURL/.git ]; then
safe_run rm -f $MYURL/.git/shallow
safe_run cp -aL $MYURL/.git .
fi
if [ -d "$MYURL/.svn" ]; then
safe_run cp -av $MYURL/.svn ./
fi
;;
hg)
safe_run hg clone "$MYURL" "$CLONE_TO"
;;
bzr)
args=
[ -n "$MYREVISION" ] && args="-r $MYREVISION"
safe_run bzr checkout $args "$MYURL" "$CLONE_TO"
;;
*)
error "unknown SCM '$MYSCM'"
esac
}
cache_repo () {
if [ -e "$REPOCACHE" ]; then
error "Somebody else beat us to populating the cache for $MYURL ($REPOCACHE)"
else
# FIXME: small race window here; do source services need to be thread-safe?
if [ ! -d $REPOCACHE ]; then
mkdir -p $REPOCACHE
fi
debug mv2 "$CLONE_TO" "$REPOCACHE"
safe_run mv "$CLONE_TO" "$REPOCACHE"
echo "$MYURL" > "$REPOURLCACHE"
echo "Cached $MYURL at $REPOCACHE"
fi
}
check_cache () {
if [ -d "$MYURL/.svn" ]; then
new_version=`LC_ALL=C svn info "$MYURL" | sed -n 's,^Last Changed Rev: \(.*\),\1,p'`
else
new_version="new_version"
fi
if echo "$MYURL" | grep '/$' &> /dev/null; then
new_version="new_version"
fi
if [ -d "$REPOCACHE/.svn" ]; then
old_version=`LC_ALL=C svn info "$REPOCACHE" | sed -n 's,^Last Changed Rev: \(.*\),\1,p'`
else
old_version="old_version"
fi
#echo "xdf: $new_version $old_version"
#if [ "$new_version" != "$old_version" ]; then
echo "The code has changed for $MYPROJECT/$MYPACKAGE"
rm -rf "$REPOCACHE"
calc_dir_to_clone_to
debug "new $MYSCM checkout to $CLONE_TO"
initial_clone
if [ -n "$CACHEDIRECTORY" ]; then
#cache_repo
REPOPATH="$REPOCACHE"
else
REPOPATH="$MYOUTDIR/$FILE"
fi
safe_run cd "$REPOPATH"
switch_to_revision
if [ "$TAR_VERSION" == "_auto_" -o -n "$MYFORMAT" ]; then
detect_version
fi
}
update_cache () {
safe_run cd "$REPOCACHE"
case "$MYSCM" in
git)
safe_run git fetch
;;
svn)
args=
[ -n "$MYREVISION" ] && args="-r$MYREVISION"
safe_run svn update $args > svnupdate_info
isupdate=`cat svnupdate_info | wc -l`
if [ $isupdate -eq 1 ]; then
rm -f svnupdate_info
echo "There is no code update, so exit 22"
exit 22
fi
MYSUBDIR= # repo root is subdir
;;
hg)
if ! out=`hg pull`; then
if [[ "$out" == *'no changes found'* ]]; then
# Contrary to the docs, hg pull returns exit code 1 when
# there are no changes to pull, but we don't want to treat
# this as an error.
:
else
error "hg pull failed; aborting!"
fi
fi
;;
bzr)
args=
[ -n "$MYREVISION" ] && args="-r$MYREVISION"
safe_run bzr update $args
;;
*)
error "unknown SCM '$MYSCM'"
esac
}
switch_to_revision () {
case "$MYSCM" in
git)
safe_run git checkout "$MYREVISION"
if git branch | grep -q '^\* (no branch)$'; then
echo "$MYREVISION does not refer to a branch, not attempting git pull"
else
safe_run git pull
fi
;;
svn|bzr|local)
: # should have already happened via checkout or update
;;
hg)
safe_run hg update "$MYREVISION"
;;
# bzr)
# safe_run bzr update
# if [ -n "$MYREVISION" ]; then
# safe_run bzr revert -r "$MYREVISION"
# fi
# ;;
*)
error "unknown SCM '$MYSCM'"
esac
}
detect_version () {
if [ -z "$MYFORMAT" ]; then
case "$MYSCM" in
git)
MYFORMAT="%at"
;;
hg)
MYFORMAT="{rev}"
;;
svn|bzr)
MYFORMAT="%r"
;;
*)
error "unknown SCM '$MYSCM'"
;;
esac
fi
safe_run cd "$REPOPATH"
if [ -n "$MYFORMAT" ];then
MYPREFIX="$MYFORMAT"
else
get_version
fi
TAR_VERSION="$MYPREFIX$version"
}
get_version () {
case "$MYSCM" in
git)
#version=`safe_run git show --pretty=format:"$MYFORMAT" | head -n 1`
version=`safe_run git log -n1 --pretty=format:"$MYFORMAT"`
;;
svn)
#rev=`LC_ALL=C safe_run svn info | awk '/^Revision:/ { print $2 }'`
rev=`LC_ALL=C safe_run svn info | sed -n 's,^Last Changed Rev: \(.*\),\1,p'`
version="${MYFORMAT//%r/$rev}"
;;
hg)
rev=`safe_run hg id -n`
version=`safe_run hg log -l1 -r$rev --template "$MYFORMAT"`
;;
bzr)
#safe_run bzr log -l1 ...
rev=`safe_run bzr revno`
version="${MYFORMAT//%r/$rev}"
;;
*)
error "unknown SCM '$MYSCM'"
esac
}
prep_tree_for_tar () {
if [ ! -e "$REPOPATH/$MYSUBDIR" ]; then
error "directory does not exist: $REPOPATH/$MYSUBDIR"
fi
if [ -z "$TAR_VERSION" ]; then
TAR_BASENAME="$FILE"
else
TAR_BASENAME="${FILE}-${TAR_VERSION}"
fi
MYINCLUDES=""
for INC in $INCLUDES; do
MYINCLUDES="$MYINCLUDES $INC"
done
#if [ -z "$MYINCLUDES" ]; then
# MYINCLUDES="*"
#fi
safe_run cd "$MYOUTDIR"
if [ -n "$CACHEDIRECTORY" ]; then
debug cp -a "$REPOPATH/$MYSUBDIR" "$TAR_BASENAME"
safe_run cp -a "$REPOPATH/$MYSUBDIR" "$TAR_BASENAME"
if [ -e $REPOPATH/$MYSUBDIR/.git ]; then
# amazing copy failed, ignore fail temporary
cp -a "$REPOPATH/$MYSUBDIR/.git" "$TAR_BASENAME"
safe_run pushd "$TAR_BASENAME";git reset --hard HEAD;popd
fi
else
debug mv3 "$REPOPATH/$MYSUBDIR" "$TAR_BASENAME"
safe_run mv "$REPOPATH/$MYSUBDIR" "$TAR_BASENAME"
fi
if [ -z "$MYINCLUDES" ]; then
MYINCLUDES=`ls -A $TAR_BASENAME`
fi
}
create_tar () {
safe_run cd "$TAR_BASENAME"
compression_array=(`cat $MYOUTDIR/$TARFILE/_service | egrep '"compression"' | awk -F'>' '{print $2}' | awk -F'<' '{print $1}'`)
file_array=`cat $MYOUTDIR/$TARFILE/_service | egrep '"file"' | awk -F'>' '{print $2}' | awk -F'<' '{print $1}' | tr -d '.tar'`
index=0
for file in $file_array
do
if echo "$TAR_BASENAME" | egrep "$file"; then
break
else
((index=index+1))
fi
done
compression_type=${compression_array[index]}
if [ -e .git ]; then
MYINCLUDES="$MYINCLUDES .git"
fi
TARFILE="${TAR_BASENAME}.tar"
TARPATH="$MYOUTDIR/$TARFILE"
debug tar Pcf "$TARPATH" $EXCLUDES $MYINCLUDES
safe_run tar Pcf "$TARPATH" $EXCLUDES $MYINCLUDES
echo "Created $TARFILE"
safe_run cd "$MYOUTDIR"
}
cleanup () {
debug rm -rf "$TAR_BASENAME" "$FILE"
rm -rf "$TAR_BASENAME" "$FILE"
}
main () {
set_default_params
#xdf
DEBUG_TAR_SCM=1
if [ -z "$DEBUG_TAR_SCM" ]; then
get_config_options
else
# We're in test-mode, so don't let any local site-wide
# or per-user config impact the test suite.
:
fi
parse_params "$@"
sanitise_params
SRCDIR=$(pwd)
cd "$MYOUTDIR"
#echo "$SRCDIR $MYOUTDIR"
detect_default_filename_param
#xdf
#LOGFILE=/srv/local_code/xdf/log/$MYPROJECT/$MYPACKAGE
#mkdir -p "/srv/local_code/xdf/log/$MYPROJECT"
lockfile=$LOGFILE".lock"
if [ -f $lockfile ]; then
mypid=`cat $lockfile`
while ps -p $mypid -o comm= &> /dev/null
do
sleep 10
mypid=`cat $lockfile`
done
rm -f $lockfile
fi
touch $lockfile
echo "$$" > $lockfile
#exec 6>&1
#exec > $LOGFILE
echo "$@"
echo "myurl === $MYURL"
fetch_upstream
prep_tree_for_tar
create_tar
cleanup
rm -f $lockfile
}
main "$@"
exit 0
#!/bin/bash
# A simple script to checkout or update a svn or git repo as source service
#
# (C) 2010 by Adrian Schröter <adrian@suse.de>
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
# See http://www.gnu.org/licenses/gpl-2.0.html for full license text.
SERVICE='tar_scm'
set_default_params () {
MYSCM=""
MYURL=""
#MYVERSION="_auto_"
MYVERSION="222"
MYFORMAT=""
MYPREFIX=""
MYFILENAME=""
MYREVISION=""
MYPACKAGEMETA=""
# MYHISTORYDEPTH=""
INCLUDES=""
}
get_config_options () {
# config options for this host ?
if [ -f /etc/obs/services/$SERVICE ]; then
. /etc/obs/services/$SERVICE
fi
# config options for this user ?
if [ -f "$HOME"/.obs/$SERVICE ]; then
. "$HOME"/.obs/$SERVICE
fi
}
parse_params () {
while test $# -gt 0; do
case $1 in
*-scm)
MYSCM="$2"
shift
;;
*-url)
MYURL="$2"
CI_PRO_NAME=${MYURL%%/*}
TEMP_URL="$MYURL"
MYURL=$TEMP_URL
shift
;;
*-subdir)
MYSUBDIR="$2"
shift
;;
*-revision)
MYREVISION="$2"
shift
;;
*-version)
MYVERSION="$2"
shift
;;
*-include)
INCLUDES="$INCLUDES $2"
shift
;;
*-versionformat)
MYFORMAT="$2"
shift
;;
*-versionprefix)
MYPREFIX="$2"
shift
;;
*-exclude)
EXCLUDES="$EXCLUDES --exclude=${2#/}"
shift
;;
*-filename)
MYFILENAME="${2#/}"
shift
;;
*-package-meta)
MYPACKAGEMETA="${2#/}"
shift
;;
*-outdir)
MYOUTDIR="$2"
shift
;;
*-history-depth)
echo "history-depth parameter is obsolete and will be ignored"
shift
;;
*-project)
MYPROJECT="$2"
shift
;;
*-package)
MYPACKAGE="$2"
shift
;;
*)
echo "Unknown parameter: $1"
echo 'Usage: $SERVICE --scm $SCM --url $URL [--subdir $SUBDIR] [--revision $REVISION] [--version $VERSION] [--include $INCLUDE]* [--exclude $EXCLUDE]* [--versionformat $FORMAT] [--versionprefix $PREFIX] [--filename $FILENAME] [--package-meta $META] --outdir $OUT'
exit 1
;;
esac
shift
done
}
error () {
echo "ERROR: $*"
exit 1
}
debug () {
[ -n "$DEBUG_TAR_SCM" ] && echo "$*"
}
safe_run () {
if ! "$@"; then
error "$* failed; aborting!"
fi
}
sanitise_params () {
TAR_VERSION="$MYVERSION"
if [ -z "$MYSCM" ]; then
error "no scm is given via --scm parameter (git/svn/hg/bzr)!"
fi
if [ -z "$MYURL" ]; then
error "no checkout URL is given via --url parameter!"
fi
if [ -z "$MYOUTDIR" ]; then
error "no output directory is given via --outdir parameter!"
fi
if [ -z "$MYPROJECT" ]; then
error "no project is given via --project parameter!"
fi
if [ -z "$MYPACKAGE" ]; then
error "no package is given via --package parameter!"
fi
FILE="$MYFILENAME"
WD_VERSION="$MYVERSION"
if [ -z "$MYPACKAGEMETA" ]; then
EXCLUDES="$EXCLUDES --exclude=.svn"
fi
# if [ "$MYHISTORYDEPTH" == "full" ]; then
# MYHISTORYDEPTH="999999999"
# fi
}
detect_default_filename_param () {
if [ -n "$FILE" ]; then
return
fi
case "$MYSCM" in
git)
FILE="${MYURL%/}"
FILE="${FILE##*/}"
FILE="${FILE%.git}"
FILE="${FILE#*@*:}"
;;
svn|hg|bzr)
FILE="${MYURL%/}"
FILE="${FILE##*/}"
;;
local)
FILE="temp_dir"
;;
*)
error "unknown SCM '$MYSCM'"
esac
}
fetch_upstream () {
TOHASH="$MYURL"
[ "$MYSCM" = 'svn' ] && TOHASH="$TOHASH/$MYSUBDIR"
HASH=`echo "$TOHASH" | sha256sum | cut -d\ -f 1`
REPOCACHE=
CACHEDIRECTORY=/tmp/local_code/xdf
if [ -n "$CACHEDIRECTORY" ]; then
REPOCACHEINCOMING="$CACHEDIRECTORY/incoming"
REPOCACHEROOT="$CACHEDIRECTORY/repo"
REPOCACHE="$REPOCACHEROOT/$MYPROJECT/$MYPACKAGE"
REPOURLCACHE="$CACHEDIRECTORY/repourl/$HASH"
fi
debug "check local cache if configured"
if [ -n "$CACHEDIRECTORY" -a -d "$REPOCACHE/" ]; then
debug "cache hit: $REPOCACHE"
check_cache
else
if [ -n "$CACHEDIRECTORY" ]; then
debug "cache miss: $REPOCACHE/"
else
debug "cache not enabled"
fi
calc_dir_to_clone_to
debug "new $MYSCM checkout to $CLONE_TO"
initial_clone
if [ -n "$CACHEDIRECTORY" ]; then
#cache_repo
REPOPATH="$REPOCACHE"
else
REPOPATH="$MYOUTDIR/$FILE"
fi
if [ "$TAR_VERSION" == "_auto_" -o -n "$MYFORMAT" ]; then
detect_version
fi
#exit 22
fi
}
calc_dir_to_clone_to () {
if [ -n "$CACHEDIRECTORY" ]; then
if [ ! -d REPOCACHE ]; then
mkdir -p "$REPOCACHE"
fi
safe_run cd "$REPOCACHE"
# Use dry-run mode because git/hg refuse to clone into
# an empty directory on SLES11
#debug mktemp -u -d "tmp.XXXXXXXXXX"
#CLONE_TO=`mktemp -u -d "tmp.XXXXXXXXXX"`
CLONE_TO="$REPOCACHE"
else
CLONE_TO="$FILE"
fi
}
initial_clone () {
echo "Fetching from $MYURL ..."
case "$MYSCM" in
git)
# Clone with full depth; so that the revision can be found if specified
safe_run git clone "$MYURL" "$CLONE_TO"
;;
svn)
args=
[ -n "$MYREVISION" ] && args="-r$MYREVISION"
if [[ $(svn --version --quiet) > "1.5.99" ]]; then
TRUST_SERVER_CERT="--trust-server-cert"
fi
safe_run svn checkout --non-interactive $TRUST_SERVER_CERT \
$args "$MYURL/$MYSUBDIR" "$CLONE_TO"
MYSUBDIR= # repo root is subdir
;;
local)
echo "xdffff: $MYURL ---- $CLONE_TO --- `pwd`"
safe_run cp -av $MYURL/* ./
if [ -d "$MYURL/.svn" ]; then
safe_run cp -av $MYURL/.svn ./
fi
;;
hg)
safe_run hg clone "$MYURL" "$CLONE_TO"
;;
bzr)
args=
[ -n "$MYREVISION" ] && args="-r $MYREVISION"
safe_run bzr checkout $args "$MYURL" "$CLONE_TO"
;;
*)
error "unknown SCM '$MYSCM'"
esac
}
cache_repo () {
if [ -e "$REPOCACHE" ]; then
error "Somebody else beat us to populating the cache for $MYURL ($REPOCACHE)"
else
# FIXME: small race window here; do source services need to be thread-safe?
if [ ! -d $REPOCACHE ]; then
mkdir -p $REPOCACHE
fi
debug mv2 "$CLONE_TO" "$REPOCACHE"
safe_run mv "$CLONE_TO" "$REPOCACHE"
echo "$MYURL" > "$REPOURLCACHE"
echo "Cached $MYURL at $REPOCACHE"
fi
}
check_cache () {
if [ -d "$MYURL/.svn" ]; then
new_version=`LC_ALL=C svn info "$MYURL" | sed -n 's,^Last Changed Rev: \(.*\),\1,p'`
else
new_version="new_version"
fi
if echo "$MYURL" | grep '/$' &> /dev/null; then
new_version="new_version"
fi
if [ -d "$REPOCACHE/.svn" ]; then
old_version=`LC_ALL=C svn info "$REPOCACHE" | sed -n 's,^Last Changed Rev: \(.*\),\1,p'`
else
old_version="old_version"
fi
#echo "xdf: $new_version $old_version"
#if [ "$new_version" != "$old_version" ]; then
echo "The code has changed for $MYPROJECT/$MYPACKAGE"
rm -rf "$REPOCACHE"
calc_dir_to_clone_to
debug "new $MYSCM checkout to $CLONE_TO"
initial_clone
if [ -n "$CACHEDIRECTORY" ]; then
#cache_repo
REPOPATH="$REPOCACHE"
else
REPOPATH="$MYOUTDIR/$FILE"
fi
safe_run cd "$REPOPATH"
switch_to_revision
if [ "$TAR_VERSION" == "_auto_" -o -n "$MYFORMAT" ]; then
detect_version
fi
#else
# echo "No code is changed, so exit 22"
# exit 22
#fi
}
update_cache () {
safe_run cd "$REPOCACHE"
case "$MYSCM" in
git)
safe_run git fetch
;;
svn)
args=
[ -n "$MYREVISION" ] && args="-r$MYREVISION"
safe_run svn update $args > svnupdate_info
isupdate=`cat svnupdate_info | wc -l`
if [ $isupdate -eq 1 ]; then
rm -f svnupdate_info
echo "There is no code update, so exit 22"
exit 22
fi
MYSUBDIR= # repo root is subdir
;;
hg)
if ! out=`hg pull`; then
if [[ "$out" == *'no changes found'* ]]; then
# Contrary to the docs, hg pull returns exit code 1 when
# there are no changes to pull, but we don't want to treat
# this as an error.
:
else
error "hg pull failed; aborting!"
fi
fi
;;
bzr)
args=
[ -n "$MYREVISION" ] && args="-r$MYREVISION"
safe_run bzr update $args
;;
*)
error "unknown SCM '$MYSCM'"
esac
}
switch_to_revision () {
case "$MYSCM" in
git)
safe_run git checkout "$MYREVISION"
if git branch | grep -q '^\* (no branch)$'; then
echo "$MYREVISION does not refer to a branch, not attempting git pull"
else
safe_run git pull
fi
;;
svn|bzr|local)
: # should have already happened via checkout or update
;;
hg)
safe_run hg update "$MYREVISION"
;;
# bzr)
# safe_run bzr update
# if [ -n "$MYREVISION" ]; then
# safe_run bzr revert -r "$MYREVISION"
# fi
# ;;
*)
error "unknown SCM '$MYSCM'"
esac
}
detect_version () {
if [ -z "$MYFORMAT" ]; then
case "$MYSCM" in
git)
MYFORMAT="%at"
;;
hg)
MYFORMAT="{rev}"
;;
svn|bzr)
MYFORMAT="%r"
;;
*)
error "unknown SCM '$MYSCM'"
;;
esac
fi
safe_run cd "$REPOPATH"
if [ -n "$MYFORMAT" ];then
MYPREFIX="$MYFORMAT"
else
get_version
fi
TAR_VERSION="$MYPREFIX$version"
}
get_version () {
case "$MYSCM" in
git)
#version=`safe_run git show --pretty=format:"$MYFORMAT" | head -n 1`
version=`safe_run git log -n1 --pretty=format:"$MYFORMAT"`
;;
svn)
#rev=`LC_ALL=C safe_run svn info | awk '/^Revision:/ { print $2 }'`
rev=`LC_ALL=C safe_run svn info | sed -n 's,^Last Changed Rev: \(.*\),\1,p'`
version="${MYFORMAT//%r/$rev}"
;;
hg)
rev=`safe_run hg id -n`
version=`safe_run hg log -l1 -r$rev --template "$MYFORMAT"`
;;
bzr)
#safe_run bzr log -l1 ...
rev=`safe_run bzr revno`
version="${MYFORMAT//%r/$rev}"
;;
*)
error "unknown SCM '$MYSCM'"
esac
}
prep_tree_for_tar () {
if [ ! -e "$REPOPATH/$MYSUBDIR" ]; then
error "directory does not exist: $REPOPATH/$MYSUBDIR"
fi
if [ -z "$TAR_VERSION" ]; then
TAR_BASENAME="$FILE"
else
TAR_BASENAME="${FILE}-${TAR_VERSION}"
fi
MYINCLUDES=""
for INC in $INCLUDES; do
MYINCLUDES="$MYINCLUDES $INC"
done
if [ -z "$MYINCLUDES" ]; then
MYINCLUDES="*"
fi
safe_run cd "$MYOUTDIR"
if [ -n "$CACHEDIRECTORY" ]; then
debug cp -a "$REPOPATH/$MYSUBDIR" "$TAR_BASENAME"
safe_run cp -a "$REPOPATH/$MYSUBDIR" "$TAR_BASENAME"
else
debug mv3 "$REPOPATH/$MYSUBDIR" "$TAR_BASENAME"
safe_run mv "$REPOPATH/$MYSUBDIR" "$TAR_BASENAME"
fi
}
create_tar () {
safe_run cd "$TAR_BASENAME"
TARFILE="${TAR_BASENAME}.tar.bz2"
TARPATH="$MYOUTDIR/$TARFILE"
for INC in $MYINCLUDES; do
if [ "$INC" = ".$MYSCM" ]; then
continue
fi
if echo "$EXCLUDES" | grep -w $INC >/dev/null
then
continue
fi
if [ -d $INC ]; then
#safe_run tar jcf "$MYOUTDIR/$INC.tar.bz2" --exclude=.$MYSCM --exclude=.svn $INC
safe_run tar Pcf "$MYOUTDIR/$INC.tar" --exclude=.$MYSCM --exclude=.svn $INC
continue
fi
safe_run cp $INC "$MYOUTDIR/"
done
echo "Created $TARFILE"
safe_run cd "$MYOUTDIR"
}
cleanup () {
debug rm -rf "$TAR_BASENAME" "$FILE"
#rm -rf "$TAR_BASENAME" "$FILE"
rm -rf "$TAR_BASENAME"
}
main () {
set_default_params
#xdf
DEBUG_TAR_SCM=1
if [ -z "$DEBUG_TAR_SCM" ]; then
get_config_options
else
# We're in test-mode, so don't let any local site-wide
# or per-user config impact the test suite.
:
fi
parse_params "$@"
sanitise_params
SRCDIR=$(pwd)
cd "$MYOUTDIR"
#echo "$SRCDIR $MYOUTDIR"
detect_default_filename_param
#xdf
#LOGFILE=/srv/local_code/xdf/log/$MYPROJECT/$MYPACKAGE
#mkdir -p "/srv/local_code/xdf/log/$MYPROJECT"
lockfile=$LOGFILE".lock"
if [ -f $lockfile ]; then
mypid=`cat $lockfile`
# while ps -p $mypid -o comm= &> /dev/null
# do
# sleep 10
# mypid=`cat $lockfile`
# done
rm -f $lockfile
fi
touch $lockfile
echo "$$" > $lockfile
#exec 6>&1
#exec > $LOGFILE
echo "$@"
echo "myurl === $MYURL"
fetch_upstream
prep_tree_for_tar
create_tar
cleanup
rm -f $lockfile
}
main "$@"
exit 0
#!/bin/bash
# A simple script to checkout or update a svn or git repo as source service
#
# (C) 2010 by Adrian Schröter <adrian@suse.de>
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
# See http://www.gnu.org/licenses/gpl-2.0.html for full license text.
set -x
SERVICE='tar_scm'
set_default_params () {
MYSCM=""
MYURL=""
#MYVERSION="_auto_"
MYVERSION="222"
MYFORMAT=""
MYPREFIX=""
MYFILENAME=""
MYREVISION=""
MYPACKAGEMETA=""
# MYHISTORYDEPTH=""
INCLUDES=""
}
get_config_options () {
# config options for this host ?
if [ -f /etc/obs/services/$SERVICE ]; then
. /etc/obs/services/$SERVICE
fi
# config options for this user ?
if [ -f "$HOME"/.obs/$SERVICE ]; then
. "$HOME"/.obs/$SERVICE
fi
}
parse_params () {
while test $# -gt 0; do
case $1 in
*-scm)
MYSCM="$2"
shift
;;
*-url)
MYURL="$2"
CI_PRO_NAME=${MYURL%%/*}
TEMP_URL="$MYURL"
MYURL=$TEMP_URL
shift
;;
*-subdir)
MYSUBDIR="$2"
shift
;;
*-revision)
MYREVISION="$2"
shift
;;
*-version)
MYVERSION="$2"
shift
;;
*-include)
INCLUDES="$INCLUDES $2"
shift
;;
*-versionformat)
MYFORMAT="$2"
shift
;;
*-versionprefix)
MYPREFIX="$2"
shift
;;
*-exclude)
EXCLUDES="$EXCLUDES --exclude=${2#/}"
shift
;;
*-filename)
MYFILENAME="${2#/}"
shift
;;
*-package-meta)
MYPACKAGEMETA="${2#/}"
shift
;;
*-outdir)
MYOUTDIR="$2"
shift
;;
*-history-depth)
echo "history-depth parameter is obsolete and will be ignored"
shift
;;
*-project)
MYPROJECT="$2"
shift
;;
*-package)
MYPACKAGE="$2"
shift
;;
*)
echo "Unknown parameter: $1"
echo 'Usage: $SERVICE --scm $SCM --url $URL [--subdir $SUBDIR] [--revision $REVISION] [--version $VERSION] [--include $INCLUDE]* [--exclude $EXCLUDE]* [--versionformat $FORMAT] [--versionprefix $PREFIX] [--filename $FILENAME] [--package-meta $META] --outdir $OUT'
exit 1
;;
esac
shift
done
}
error () {
echo "ERROR: $*"
exit 1
}
debug () {
[ -n "$DEBUG_TAR_SCM" ] && echo "$*"
}
safe_run () {
if ! "$@"; then
error "$* failed; aborting!"
fi
}
sanitise_params () {
TAR_VERSION="$MYVERSION"
if [ -z "$MYSCM" ]; then
error "no scm is given via --scm parameter (git/svn/hg/bzr)!"
fi
if [ -z "$MYURL" ]; then
error "no checkout URL is given via --url parameter!"
fi
if [ -z "$MYOUTDIR" ]; then
error "no output directory is given via --outdir parameter!"
fi
if [ -z "$MYPROJECT" ]; then
error "no project is given via --project parameter!"
fi
if [ -z "$MYPACKAGE" ]; then
error "no package is given via --package parameter!"
fi
FILE="$MYFILENAME"
WD_VERSION="$MYVERSION"
if [ -z "$MYPACKAGEMETA" ]; then
EXCLUDES="$EXCLUDES --exclude=.svn"
fi
# if [ "$MYHISTORYDEPTH" == "full" ]; then
# MYHISTORYDEPTH="999999999"
# fi
}
detect_default_filename_param () {
if [ -n "$FILE" ]; then
return
fi
case "$MYSCM" in
git)
FILE="${MYURL%/}"
FILE="${FILE##*/}"
FILE="${FILE%.git}"
FILE="${FILE#*@*:}"
;;
svn|hg|bzr)
FILE="${MYURL%/}"
FILE="${FILE##*/}"
;;
local)
FILE="temp_dir"
;;
*)
error "unknown SCM '$MYSCM'"
esac
}
fetch_upstream () {
TOHASH="$MYURL"
[ "$MYSCM" = 'svn' ] && TOHASH="$TOHASH/$MYSUBDIR"
HASH=`echo "$TOHASH" | sha256sum | cut -d\ -f 1`
REPOCACHE=
CACHEDIRECTORY=/tmp/local_code/xdf
if [ -n "$CACHEDIRECTORY" ]; then
REPOCACHEINCOMING="$CACHEDIRECTORY/incoming"
REPOCACHEROOT="$CACHEDIRECTORY/repo"
REPOCACHE="$REPOCACHEROOT/$MYPROJECT/$MYPACKAGE"
REPOURLCACHE="$CACHEDIRECTORY/repourl/$HASH"
fi
debug "check local cache if configured"
if [ -n "$CACHEDIRECTORY" -a -d "$REPOCACHE/" ]; then
debug "cache hit: $REPOCACHE"
check_cache
else
if [ -n "$CACHEDIRECTORY" ]; then
debug "cache miss: $REPOCACHE/"
else
debug "cache not enabled"
fi
calc_dir_to_clone_to
debug "new $MYSCM checkout to $CLONE_TO"
initial_clone
if [ -n "$CACHEDIRECTORY" ]; then
#cache_repo
REPOPATH="$REPOCACHE"
else
REPOPATH="$MYOUTDIR/$FILE"
fi
if [ "$TAR_VERSION" == "_auto_" -o -n "$MYFORMAT" ]; then
detect_version
fi
#exit 22
fi
}
calc_dir_to_clone_to () {
if [ -n "$CACHEDIRECTORY" ]; then
if [ ! -d REPOCACHE ]; then
mkdir -p "$REPOCACHE"
fi
safe_run cd "$REPOCACHE"
# Use dry-run mode because git/hg refuse to clone into
# an empty directory on SLES11
#debug mktemp -u -d "tmp.XXXXXXXXXX"
#CLONE_TO=`mktemp -u -d "tmp.XXXXXXXXXX"`
CLONE_TO="$REPOCACHE"
else
CLONE_TO="$FILE"
fi
}
initial_clone () {
echo "Fetching from $MYURL ..."
case "$MYSCM" in
git)
# Clone with full depth; so that the revision can be found if specified
safe_run git clone "$MYURL" "$CLONE_TO"
;;
svn)
args=
[ -n "$MYREVISION" ] && args="-r$MYREVISION"
if [[ $(svn --version --quiet) > "1.5.99" ]]; then
TRUST_SERVER_CERT="--trust-server-cert"
fi
safe_run svn checkout --non-interactive $TRUST_SERVER_CERT \
$args "$MYURL/$MYSUBDIR" "$CLONE_TO"
MYSUBDIR= # repo root is subdir
;;
local)
echo "xdffff: $MYURL ---- $CLONE_TO --- `pwd`"
MYURL=`echo $MYURL | sed 's#\./##g' | sed 's/[ /]*$//g'`
pkgname=`basename $MYURL`
safe_run mkdir $pkgname
safe_run cp -av $MYURL/* $pkgname
safe_run mv $pkgname/*.spec .
if [ -f /usr/bin/rpmspec ]
then
version=`rpmspec -q --srpm --qf %{Version} *.spec`
else
version=`grep "^Version:*" *.spec | awk -F: '{print $2}' | sed 's/[ ]*//g'`
fi
pkg="${pkgname}-${version}"
safe_run mv $pkgname $pkg
if [ -d "$MYURL/.svn" ]; then
safe_run cp -av $MYURL/.svn ./
fi
;;
hg)
safe_run hg clone "$MYURL" "$CLONE_TO"
;;
bzr)
args=
[ -n "$MYREVISION" ] && args="-r $MYREVISION"
safe_run bzr checkout $args "$MYURL" "$CLONE_TO"
;;
*)
error "unknown SCM '$MYSCM'"
esac
}
cache_repo () {
if [ -e "$REPOCACHE" ]; then
error "Somebody else beat us to populating the cache for $MYURL ($REPOCACHE)"
else
# FIXME: small race window here; do source services need to be thread-safe?
if [ ! -d $REPOCACHE ]; then
mkdir -p $REPOCACHE
fi
debug mv2 "$CLONE_TO" "$REPOCACHE"
safe_run mv "$CLONE_TO" "$REPOCACHE"
echo "$MYURL" > "$REPOURLCACHE"
echo "Cached $MYURL at $REPOCACHE"
fi
}
check_cache () {
if [ -d "$MYURL/.svn" ]; then
new_version=`LC_ALL=C svn info "$MYURL" | sed -n 's,^Last Changed Rev: \(.*\),\1,p'`
else
new_version="new_version"
fi
if echo "$MYURL" | grep '/$' &> /dev/null; then
new_version="new_version"
fi
if [ -d "$REPOCACHE/.svn" ]; then
old_version=`LC_ALL=C svn info "$REPOCACHE" | sed -n 's,^Last Changed Rev: \(.*\),\1,p'`
else
old_version="old_version"
fi
#echo "xdf: $new_version $old_version"
#if [ "$new_version" != "$old_version" ]; then
echo "The code has changed for $MYPROJECT/$MYPACKAGE"
rm -rf "$REPOCACHE"
calc_dir_to_clone_to
debug "new $MYSCM checkout to $CLONE_TO"
initial_clone
if [ -n "$CACHEDIRECTORY" ]; then
#cache_repo
REPOPATH="$REPOCACHE"
else
REPOPATH="$MYOUTDIR/$FILE"
fi
safe_run cd "$REPOPATH"
switch_to_revision
if [ "$TAR_VERSION" == "_auto_" -o -n "$MYFORMAT" ]; then
detect_version
fi
#else
# echo "No code is changed, so exit 22"
# exit 22
#fi
}
update_cache () {
safe_run cd "$REPOCACHE"
case "$MYSCM" in
git)
safe_run git fetch
;;
svn)
args=
[ -n "$MYREVISION" ] && args="-r$MYREVISION"
safe_run svn update $args > svnupdate_info
isupdate=`cat svnupdate_info | wc -l`
if [ $isupdate -eq 1 ]; then
rm -f svnupdate_info
echo "There is no code update, so exit 22"
exit 22
fi
MYSUBDIR= # repo root is subdir
;;
hg)
if ! out=`hg pull`; then
if [[ "$out" == *'no changes found'* ]]; then
# Contrary to the docs, hg pull returns exit code 1 when
# there are no changes to pull, but we don't want to treat
# this as an error.
:
else
error "hg pull failed; aborting!"
fi
fi
;;
bzr)
args=
[ -n "$MYREVISION" ] && args="-r$MYREVISION"
safe_run bzr update $args
;;
*)
error "unknown SCM '$MYSCM'"
esac
}
switch_to_revision () {
case "$MYSCM" in
git)
safe_run git checkout "$MYREVISION"
if git branch | grep -q '^\* (no branch)$'; then
echo "$MYREVISION does not refer to a branch, not attempting git pull"
else
safe_run git pull
fi
;;
svn|bzr|local)
: # should have already happened via checkout or update
;;
hg)
safe_run hg update "$MYREVISION"
;;
# bzr)
# safe_run bzr update
# if [ -n "$MYREVISION" ]; then
# safe_run bzr revert -r "$MYREVISION"
# fi
# ;;
*)
error "unknown SCM '$MYSCM'"
esac
}
detect_version () {
if [ -z "$MYFORMAT" ]; then
case "$MYSCM" in
git)
MYFORMAT="%at"
;;
hg)
MYFORMAT="{rev}"
;;
svn|bzr)
MYFORMAT="%r"
;;
*)
error "unknown SCM '$MYSCM'"
;;
esac
fi
safe_run cd "$REPOPATH"
[ -n "$MYPREFIX" ] && MYPREFIX="$MYPREFIX."
get_version
TAR_VERSION="$MYPREFIX$version"
}
get_version () {
case "$MYSCM" in
git)
#version=`safe_run git show --pretty=format:"$MYFORMAT" | head -n 1`
version=`safe_run git log -n1 --pretty=format:"$MYFORMAT"`
;;
svn)
#rev=`LC_ALL=C safe_run svn info | awk '/^Revision:/ { print $2 }'`
rev=`LC_ALL=C safe_run svn info | sed -n 's,^Last Changed Rev: \(.*\),\1,p'`
version="${MYFORMAT//%r/$rev}"
;;
hg)
rev=`safe_run hg id -n`
version=`safe_run hg log -l1 -r$rev --template "$MYFORMAT"`
;;
bzr)
#safe_run bzr log -l1 ...
rev=`safe_run bzr revno`
version="${MYFORMAT//%r/$rev}"
;;
*)
error "unknown SCM '$MYSCM'"
esac
}
prep_tree_for_tar () {
if [ ! -e "$REPOPATH/$MYSUBDIR" ]; then
error "directory does not exist: $REPOPATH/$MYSUBDIR"
fi
if [ -z "$TAR_VERSION" ]; then
TAR_BASENAME="$FILE"
else
TAR_BASENAME="${FILE}-${TAR_VERSION}"
fi
MYINCLUDES=""
for INC in $INCLUDES; do
MYINCLUDES="$MYINCLUDES $INC"
done
if [ -z "$MYINCLUDES" ]; then
MYINCLUDES="*"
fi
safe_run cd "$MYOUTDIR"
if [ -n "$CACHEDIRECTORY" ]; then
debug cp -a "$REPOPATH/$MYSUBDIR" "$TAR_BASENAME"
safe_run cp -a "$REPOPATH/$MYSUBDIR" "$TAR_BASENAME"
else
debug mv3 "$REPOPATH/$MYSUBDIR" "$TAR_BASENAME"
safe_run mv "$REPOPATH/$MYSUBDIR" "$TAR_BASENAME"
fi
}
create_tar () {
safe_run cd "$TAR_BASENAME"
TARFILE="${TAR_BASENAME}.tar.bz2"
TARPATH="$MYOUTDIR/$TARFILE"
for INC in $MYINCLUDES; do
if [ "$INC" = ".$MYSCM" ]; then
continue
fi
if echo "$EXCLUDES" | grep -w $INC >/dev/null
then
continue
fi
if [ -d $INC ]; then
#safe_run tar jcf "$MYOUTDIR/$INC.tar.bz2" --exclude=.$MYSCM --exclude=.svn $INC
safe_run tar Pcf "$MYOUTDIR/$INC.tar" --exclude=.$MYSCM --exclude=.svn $INC
continue
fi
safe_run cp $INC "$MYOUTDIR/"
done
echo "Created $TARFILE"
safe_run cd "$MYOUTDIR"
}
cleanup () {
debug rm -rf "$TAR_BASENAME" "$FILE"
rm -rf "$TAR_BASENAME" "$FILE"
}
main () {
set_default_params
#xdf
DEBUG_TAR_SCM=1
if [ -z "$DEBUG_TAR_SCM" ]; then
get_config_options
else
# We're in test-mode, so don't let any local site-wide
# or per-user config impact the test suite.
:
fi
parse_params "$@"
sanitise_params
SRCDIR=$(pwd)
cd "$MYOUTDIR"
#echo "$SRCDIR $MYOUTDIR"
detect_default_filename_param
#xdf
#LOGFILE=/srv/local_code/xdf/log/$MYPROJECT/$MYPACKAGE
#mkdir -p "/srv/local_code/xdf/log/$MYPROJECT"
lockfile=$LOGFILE".lock"
if [ -f $lockfile ]; then
mypid=`cat $lockfile`
# while ps -p $mypid -o comm= &> /dev/null
# do
# sleep 10
# mypid=`cat $lockfile`
# done
rm -f $lockfile
fi
touch $lockfile
echo "$$" > $lockfile
#exec 6>&1
#exec > $LOGFILE
echo "$@"
echo "myurl === $MYURL"
fetch_upstream
prep_tree_for_tar
create_tar
cleanup
rm -f $lockfile
}
main "$@"
exit 0
ignore:
- ci_check
- build
- obs_bin
- ci-bot
- website
- community
- docs
- infrastructure
- obs_meta
- euleros-latest-release
- image-slim
- risc-v-kernel
- opensbi
#logger.conf
###############################################
[loggers]
keys=root,jobs,build,ac,common,no_fmt
[logger_root]
level=INFO
handlers=hand01
qualname=root
[logger_jobs]
level=DEBUG
handlers=hand02
qualname=jobs
[logger_build]
level=DEBUG
handlers=hand03
qualname=build
[logger_ac]
level=DEBUG
handlers=hand04
qualname=ac
[logger_common]
level=DEBUG
handlers=hand100
qualname=common
[logger_no_fmt]
level=DEBUG
handlers=hand100,hand101
qualname=no_fmt
propagate=0
###############################################
[handlers]
keys=hand01,hand02,hand03,hand04,hand100,hand101
[handler_hand01]
class=StreamHandler
level=INFO
formatter=form01
args=(sys.stderr,)
[handler_hand02]
class=handlers.RotatingFileHandler
level=DEBUG
formatter=form02
args=('log/jobs.log', 'a', 10*1024*1024, 5)
[handler_hand03]
class=handlers.RotatingFileHandler
level=DEBUG
formatter=form02
args=('log/build.log', 'a', 10*1024*1024, 5)
[handler_hand04]
class=handlers.RotatingFileHandler
level=DEBUG
formatter=form02
args=('log/ac.log', 'a', 10*1024*1024, 5)
[handler_hand100]
class=handlers.RotatingFileHandler
level=DEBUG
formatter=form02
args=('log/common.log', 'a', 10*1024*1024, 5)
[handler_hand101]
class=StreamHandler
level=INFO
formatter=form04
args=(sys.stderr,)
###############################################
[formatters]
keys=form01,form02,form03,form04
[formatter_form01]
class=src.utils.color_log.CusColoredFormatter
format=%(log_color)s%(asctime)s [%(levelname)7s] : %(message)s
[formatter_form02]
format=%(asctime)s %(filename)20s[line:%(lineno)3d] %(levelname)7s : %(message)s
#datefmt=%a, %d %b %Y %H:%M:%S
[formatter_form03]
format=%(asctime)s [%(levelname)s] : %(message)s
datefmt=
[formatter_form04]
class=src.utils.color_log.CusColoredFormatter
format=%(log_color)s%(message)s
FROM swr.cn-north-4.myhuaweicloud.com/openeuler/openjdk:11-jdk-stretch
ARG VERSION=4.3
ARG user=jenkins
ARG group=jenkins
ARG uid=1000
ARG gid=1000
ARG AGENT_WORKDIR=/home/${user}/agent
RUN echo 'deb http://deb.debian.org/debian stretch-backports main' > /etc/apt/sources.list.d/stretch-backports.list
RUN apt-get update \
&& apt-get install -y curl vim sudo git git-lfs\
&& rm -rf /var/lib/apt/lists/*
# add docker
RUN curl -fsSL https://get.docker.com -o get-docker.sh \
&& sh get-docker.sh
RUN curl --create-dirs -fsSLo /usr/share/jenkins/agent.jar https://repo.jenkins-ci.org/public/org/jenkins-ci/main/remoting/${VERSION}/remoting-${VERSION}.jar \
&& chmod 755 /usr/share/jenkins \
&& chmod 644 /usr/share/jenkins/agent.jar \
&& ln -sf /usr/share/jenkins/agent.jar /usr/share/jenkins/slave.jar
COPY jenkins-agent /usr/local/bin/jenkins-agent
RUN chmod +x /usr/local/bin/jenkins-agent \
&& ln -s /usr/local/bin/jenkins-agent /usr/local/bin/jenkins-slave
RUN groupadd -g ${gid} ${group}
RUN useradd -c "Jenkins user" -d /home/${user} -u ${uid} -g ${gid} -m ${user}
RUN adduser ${user} docker
RUN echo "${user} ALL=(ALL) NOPASSWD:ALL" >> /etc/sudoers
USER ${user}
ENV AGENT_WORKDIR=${AGENT_WORKDIR}
RUN mkdir /home/${user}/.jenkins && mkdir -p ${AGENT_WORKDIR}
VOLUME /home/${user}/.jenkins
VOLUME ${AGENT_WORKDIR}
WORKDIR /home/${user}
ENTRYPOINT ["jenkins-agent"]
# swr.cn-north-4.myhuaweicloud.com/openeuler/jenkins/imbound-agent
#!/usr/bin/env sh
# The MIT License
#
# Copyright (c) 2015-2020, CloudBees, Inc.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
# Usage jenkins-agent.sh [options] -url http://jenkins [SECRET] [AGENT_NAME]
# Optional environment variables :
# * JENKINS_TUNNEL : HOST:PORT for a tunnel to route TCP traffic to jenkins host, when jenkins can't be directly accessed over network
# * JENKINS_URL : alternate jenkins URL
# * JENKINS_SECRET : agent secret, if not set as an argument
# * JENKINS_AGENT_NAME : agent name, if not set as an argument
# * JENKINS_AGENT_WORKDIR : agent work directory, if not set by optional parameter -workDir
# * JENKINS_WEB_SOCKET: true if the connection should be made via WebSocket rather than TCP
# * JENKINS_DIRECT_CONNECTION: Connect directly to this TCP agent port, skipping the HTTP(S) connection parameter download.
# Value: "<HOST>:<PORT>"
# * JENKINS_INSTANCE_IDENTITY: The base64 encoded InstanceIdentity byte array of the Jenkins master. When this is set,
# the agent skips connecting to an HTTP(S) port for connection info.
# * JENKINS_PROTOCOLS: Specify the remoting protocols to attempt when instanceIdentity is provided.
if [ $# -eq 1 ]; then
# if `docker run` only has one arguments, we assume user is running alternate command like `bash` to inspect the image
exec "$@"
else
# if -tunnel is not provided, try env vars
case "$@" in
*"-tunnel "*) ;;
*)
if [ ! -z "$JENKINS_TUNNEL" ]; then
TUNNEL="-tunnel $JENKINS_TUNNEL"
fi ;;
esac
# if -workDir is not provided, try env vars
if [ ! -z "$JENKINS_AGENT_WORKDIR" ]; then
case "$@" in
*"-workDir"*) echo "Warning: Work directory is defined twice in command-line arguments and the environment variable" ;;
*)
WORKDIR="-workDir $JENKINS_AGENT_WORKDIR" ;;
esac
fi
if [ -n "$JENKINS_URL" ]; then
URL="-url $JENKINS_URL"
fi
if [ -n "$JENKINS_NAME" ]; then
JENKINS_AGENT_NAME="$JENKINS_NAME"
fi
if [ "$JENKINS_WEB_SOCKET" = true ]; then
WEB_SOCKET=-webSocket
fi
if [ -n "$JENKINS_PROTOCOLS" ]; then
PROTOCOLS="-protocols $JENKINS_PROTOCOLS"
fi
if [ -n "$JENKINS_DIRECT_CONNECTION" ]; then
DIRECT="-direct $JENKINS_DIRECT_CONNECTION"
fi
if [ -n "$JENKINS_INSTANCE_IDENTITY" ]; then
INSTANCE_IDENTITY="-instanceIdentity $JENKINS_INSTANCE_IDENTITY"
fi
# if java home is defined, use it
JAVA_BIN="java"
if [ "$JAVA_HOME" ]; then
JAVA_BIN="$JAVA_HOME/bin/java"
fi
# if both required options are defined, do not pass the parameters
OPT_JENKINS_SECRET=""
if [ -n "$JENKINS_SECRET" ]; then
case "$@" in
*"${JENKINS_SECRET}"*) echo "Warning: SECRET is defined twice in command-line arguments and the environment variable" ;;
*)
OPT_JENKINS_SECRET="${JENKINS_SECRET}" ;;
esac
fi
OPT_JENKINS_AGENT_NAME=""
if [ -n "$JENKINS_AGENT_NAME" ]; then
case "$@" in
*"${JENKINS_AGENT_NAME}"*) echo "Warning: AGENT_NAME is defined twice in command-line arguments and the environment variable" ;;
*)
OPT_JENKINS_AGENT_NAME="${JENKINS_AGENT_NAME}" ;;
esac
fi
#TODO: Handle the case when the command-line and Environment variable contain different values.
#It is fine it blows up for now since it should lead to an error anyway.
exec $JAVA_BIN $JAVA_OPTS -cp /usr/share/jenkins/agent.jar hudson.remoting.jnlp.Main -headless $TUNNEL $URL $WORKDIR $WEB_SOCKET $DIRECT $PROTOCOLS $INSTANCE_IDENTITY $OPT_JENKINS_SECRET $OPT_JENKINS_AGENT_NAME "$@"
fi
# build image manifest for multi arch
# usage:
# sh manifest.sh {name} {version}
# example: sh manifest.sh jenkins/obs 20200601
name=$1 # 镜像名
version=$2 # 镜像版本
image=swr.cn-north-4.myhuaweicloud.com/openeuler/${name}:${version}
image_x86_64=swr.cn-north-4.myhuaweicloud.com/openeuler/x86-64/${name}:${version}
image_aarch64=swr.cn-north-4.myhuaweicloud.com/openeuler/aarch64/${name}:${version}
echo "create manifest"
docker manifest create -a ${image} ${image_x86_64} ${image_aarch64}
echo "annotate manifest of arch amd64"
docker manifest annotate ${image} ${image_x86_64} --os linux --arch amd64
echo "annotate manifest of arch aarch64"
docker manifest annotate ${image} ${image_aarch64} --os linux --arch arm64/v8
echo "push manifest"
docker manifest push --purge ${image}
echo "build image manifest for multi arch ... pass"
FROM swr.cn-north-4.myhuaweicloud.com/openeuler/jenkins/obs:base
ARG user=jenkins
# 容器启动时会重置/home/jenkins/agent目录,顾将ci_check放到/home/jenkins目录下
# replace GiteeCloneUserName and GiteeClonePassword and tag before docker build
RUN cd /home/${user} \
&& git clone https://${GiteeCloneUserName}:${GiteeClonePassword}@gitee.com/src-openeuler/ci_check.git \
&& cd ci_check \
&& git checkout ${tag}
ENTRYPOINT ["jenkins-agent"]
# swr.cn-north-4.myhuaweicloud.com/openeuler/jenkins/obs:{tag}
# image dependency
# opensdk:11-jdk-stretch --> inbound:latest
# |
# --> obs:base --> obs:{tag}
FROM swr.cn-north-4.myhuaweicloud.com/openeuler/openjdk:11-jdk-stretch
ARG VERSION=4.3
ARG user=jenkins
ARG group=jenkins
ARG uid=1000
ARG gid=1000
ARG AGENT_WORKDIR=/home/${user}/agent
RUN echo 'deb http://deb.debian.org/debian stretch-backports main' > /etc/apt/sources.list.d/stretch-backports.list
RUN apt-get update \
&& apt-get install -y python3 python3-pip python python-pip \
&& apt-get install -y curl vim git git-lfs\
&& apt-get install -y sudo cpio bsdtar \
&& apt-get install -y sudo libxml-tokeparser-perl libxml-simpleobject-perl \
&& apt-get install -y sudo libxml-parser-easytree-perl libxml-sax-expat-perl \
&& apt-get install -y osc \
&& apt-get install -y golint splint pylint pylint3 \
&& apt-get install -y abigail-tools \
&& rm -rf /var/lib/apt/lists/* \
&& cpan install XML::Structured
RUN curl --create-dirs -fsSLo /usr/share/jenkins/agent.jar https://repo.jenkins-ci.org/public/org/jenkins-ci/main/remoting/${VERSION}/remoting-${VERSION}.jar \
&& chmod 755 /usr/share/jenkins \
&& chmod 644 /usr/share/jenkins/agent.jar \
&& ln -sf /usr/share/jenkins/agent.jar /usr/share/jenkins/slave.jar
COPY jenkins-agent /usr/local/bin/jenkins-agent
RUN chmod a+rx /usr/local/bin/jenkins-agent \
&& ln -s /usr/local/bin/jenkins-agent /usr/local/bin/jenkins-slave
RUN groupadd -g ${gid} ${group}
RUN useradd -c "Jenkins user" -d /home/${user} -u ${uid} -g ${gid} -m ${user}
RUN echo "${user} ALL=(ALL) NOPASSWD:ALL" >> /etc/sudoers
USER ${user}
ENV AGENT_WORKDIR=${AGENT_WORKDIR}
RUN mkdir /home/${user}/.jenkins && mkdir -p ${AGENT_WORKDIR}
RUN python -m pip install --upgrade pip
VOLUME /home/${user}/.jenkins
VOLUME ${AGENT_WORKDIR}
WORKDIR ${AGENT_WORKDIR}
ENTRYPOINT ["jenkins-agent"]
# swr.cn-north-4.myhuaweicloud.com/openeuler/jenkins/obs:base
FROM swr.cn-north-4.myhuaweicloud.com/openeuler/openjdk-openeuler:11-jdk-stretch
ARG VERSION=4.3
ARG user=jenkins
ARG group=jenkins
ARG uid=1000
ARG gid=1000
ARG AGENT_WORKDIR=/home/${user}/agent
RUN yum install -y shadow git
RUN curl --create-dirs -fsSLo /usr/share/jenkins/agent.jar https://repo.jenkins-ci.org/public/org/jenkins-ci/main/remoting/${VERSION}/remoting-${VERSION}.jar \
&& chmod 755 /usr/share/jenkins \
&& chmod 644 /usr/share/jenkins/agent.jar \
&& ln -sf /usr/share/jenkins/agent.jar /usr/share/jenkins/slave.jar
COPY jenkins-agent /usr/local/bin/jenkins-agent
RUN chmod a+rx /usr/local/openjdk-11 \
&& chmod a+rx /usr/local/bin/jenkins-agent \
&& ln -s /usr/local/bin/jenkins-agent /usr/local/bin/jenkins-slave
RUN groupadd -g ${gid} ${group}
RUN useradd -c "Jenkins user" -d /home/${user} -u ${uid} -g ${gid} -m ${user}
RUN echo "${user} ALL=(ALL) NOPASSWD:ALL" >> /etc/sudoers
USER ${user}
ENV AGENT_WORKDIR=${AGENT_WORKDIR}
RUN mkdir /home/${user}/.jenkins && mkdir -p ${AGENT_WORKDIR}
VOLUME /home/${user}/.jenkins
VOLUME ${AGENT_WORKDIR}
WORKDIR ${AGENT_WORKDIR}
ENTRYPOINT ["jenkins-agent"]
# swr.cn-north-4.myhuaweicloud.com/openeuler/jenkins/openeuler:base
FROM swr.cn-north-4.myhuaweicloud.com/openeuler/openeuler:20.03-lts-08-20
RUN set -eux; \
yum install -y tar wget
# Default to UTF-8 file.encoding
ENV LANG C.UTF-8
ENV JAVA_HOME /usr/local/openjdk-11
ENV PATH $JAVA_HOME/bin:$PATH
# backwards compatibility shim
RUN { echo '#/bin/sh'; echo 'echo "$JAVA_HOME"'; } > /usr/local/bin/docker-java-home && chmod +x /usr/local/bin/docker-java-home && [ "$JAVA_HOME" = "$(docker-java-home)" ]
# https://adoptopenjdk.net/upstream.html
# >
# > What are these binaries?
# >
# > These binaries are built by Red Hat on their infrastructure on behalf of the OpenJDK jdk8u and jdk11u projects. The binaries are created from the unmodified source code at OpenJDK. Although no formal support agreement is provided, please report any bugs you may find to https://bugs.java.com/.
# >
ENV JAVA_VERSION 11.0.8
# https://github.com/docker-library/openjdk/issues/320#issuecomment-494050246
# >
# > I am the OpenJDK 8 and 11 Updates OpenJDK project lead.
# > ...
# > While it is true that the OpenJDK Governing Board has not sanctioned those releases, they (or rather we, since I am a member) didn't sanction Oracle's OpenJDK releases either. As far as I am aware, the lead of an OpenJDK project is entitled to release binary builds, and there is clearly a need for them.
# >
RUN set -eux; \
\
arch="$(arch)"; \
# this "case" statement is generated via "update.sh"
case "$arch" in \
# arm64v8
arm64 | aarch64) downloadUrl=https://github.com/AdoptOpenJDK/openjdk11-upstream-binaries/releases/download/jdk-11.0.8%2B10/OpenJDK11U-jdk_aarch64_linux_11.0.8_10.tar.gz ;; \
# amd64
amd64 | i386:x86-64 | x86_64) downloadUrl=https://github.com/AdoptOpenJDK/openjdk11-upstream-binaries/releases/download/jdk-11.0.8%2B10/OpenJDK11U-jdk_x64_linux_11.0.8_10.tar.gz ;; \
# fallback
*) echo >&2 "error: unsupported architecture: '$arch'"; exit 1 ;; \
esac; \
\
wget -O openjdk.tgz.asc "$downloadUrl.sign"; \
#wget -O openjdk.tgz "$downloadUrl"; \
wget -O openjdk.tgz "$downloadUrl" --progress=dot:giga; \
\
#export GNUPGHOME="$(mktemp -d)"; \
#gpg --batch --keyserver ha.pool.sks-keyservers.net --keyserver-options no-self-sigs-only --recv-keys CA5F11C6CE22644D42C6AC4492EF8D39DC13168F; \
#gpg --batch --keyserver ha.pool.sks-keyservers.net --recv-keys EAC843EBD3EFDB98CC772FADA5CD6035332FA671; \
#gpg --batch --list-sigs --keyid-format 0xLONG CA5F11C6CE22644D42C6AC4492EF8D39DC13168F \
# | tee /dev/stderr \
# | grep '0xA5CD6035332FA671' \
# | grep 'Andrew Haley'; \
#gpg --batch --verify openjdk.tgz.asc openjdk.tgz; \
#gpgconf --kill all; \
#rm -rf "$GNUPGHOME"; \
\
mkdir -p "$JAVA_HOME"; \
tar --extract \
--file openjdk.tgz \
--directory "$JAVA_HOME" \
--strip-components 1 \
--no-same-owner \
; \
rm openjdk.tgz*; \
\
# TODO strip "demo" and "man" folders?
\
# update "cacerts" bundle to use Debian's CA certificates (and make sure it stays up-to-date with changes to Debian's store)
# see https://github.com/docker-library/openjdk/issues/327
# http://rabexc.org/posts/certificates-not-working-java#comment-4099504075
# https://salsa.debian.org/java-team/ca-certificates-java/blob/3e51a84e9104823319abeb31f880580e46f45a98/debian/jks-keystore.hook.in
# https://git.alpinelinux.org/aports/tree/community/java-cacerts/APKBUILD?id=761af65f38b4570093461e6546dcf6b179d2b624#n29
mkdir -p /etc/ca-certificates/update.d; \
{ \
echo '#!/usr/bin/env bash'; \
echo 'set -Eeuo pipefail'; \
echo 'if ! [ -d "$JAVA_HOME" ]; then echo >&2 "error: missing JAVA_HOME environment variable"; exit 1; fi'; \
# 8-jdk uses "$JAVA_HOME/jre/lib/security/cacerts" and 8-jre and 11+ uses "$JAVA_HOME/lib/security/cacerts" directly (no "jre" directory)
echo 'cacertsFile=; for f in "$JAVA_HOME/lib/security/cacerts" "$JAVA_HOME/jre/lib/security/cacerts"; do if [ -e "$f" ]; then cacertsFile="$f"; break; fi; done'; \
echo 'if [ -z "$cacertsFile" ] || ! [ -f "$cacertsFile" ]; then echo >&2 "error: failed to find cacerts file in $JAVA_HOME"; exit 1; fi'; \
echo 'trust extract --overwrite --format=java-cacerts --filter=ca-anchors --purpose=server-auth "$cacertsFile"'; \
} > /etc/ca-certificates/update.d/docker-openjdk; \
chmod +x /etc/ca-certificates/update.d/docker-openjdk; \
/etc/ca-certificates/update.d/docker-openjdk; \
\
# https://github.com/docker-library/openjdk/issues/331#issuecomment-498834472
find "$JAVA_HOME/lib" -name '*.so' -exec dirname '{}' ';' | sort -u > /etc/ld.so.conf.d/docker-openjdk.conf; \
ldconfig; \
\
# basic smoke test
javac --version; \
java --version
# "jshell" is an interactive REPL for Java (see https://en.wikipedia.org/wiki/JShell)
CMD ["jshell"]
# openeuler源码仓jenkins构建脚本
> 源码仓编译种类差异,每个仓有不同的jenkins构建过程,构建脚本放在此处
>
> 推荐使用**sh**或者**python**编写构建过程,开头请带上**shebang**
>
> 脚本名称和仓库名保持相同,可带语言对应的后缀
>
> 当前支持**aarch64、x86-64、risc-v**架构
#!/bin/bash
sudo yum install -y \
gdb \
make \
audit-libs-devel \
augeas \
autoconf \
automake \
bash-completion \
cyrus-sasl-devel \
dbus-devel \
device-mapper-devel \
dnsmasq \
ebtables \
firewalld-filesystem \
gawk \
gcc \
gettext \
gettext-devel \
git \
glib2-devel \
glusterfs-api-devel \
glusterfs-devel \
gnutls-devel \
iptables \
iscsi-initiator-utils \
libacl-devel \
libattr-devel \
libblkid-devel \
libcap-ng-devel \
libiscsi-devel \
libnl3-devel \
libpcap-devel \
libpciaccess-devel \
librados-devel \
librbd-devel \
libselinux-devel \
libssh-devel \
libssh2-devel \
libtasn1-devel \
libtirpc-devel \
libtool \
libxml2-devel \
libxslt \
lvm2 \
module-init-tools \
ncurses-devel \
netcf-devel \
nfs-utils \
numactl-devel \
numad \
parted-devel \
perl-interpreter \
polkit \
python3 \
python3-docutils \
qemu-img \
radvd \
readline-devel \
rpcgen \
sanlock-devel \
scrub \
systemd-devel \
systemd-units \
systemtap-sdt-devel \
util-linux \
wireshark-devel \
xfsprogs-devel \
yajl-devel \
--downloadonly --downloaddir=./ --allowerasing --skip-broken --nobest
sudo rpm -ivh --force --nodeps *.rpm
cd ${repo}
git submodule update --init
autoreconf --verbose --force --install
mkdir aarch64-openEuler-linux-gnu
cd aarch64-openEuler-linux-gnu
../configure \
--build=aarch64-openEuler-linux-gnu \
--host=aarch64-openEuler-linux-gnu \
--program-prefix= \
--disable-dependency-tracking \
--prefix=/usr \
--exec-prefix=/usr \
--bindir=/usr/bin \
--sbindir=/usr/sbin \
--sysconfdir=/etc \
--datadir=/usr/share \
--includedir=/usr/include \
--libdir=/usr/lib64 \
--libexecdir=/usr/libexec \
--localstatedir=/var \
--sharedstatedir=/var/lib \
--mandir=/usr/share/man \
--infodir=/usr/share/info \
--enable-dependency-tracking \
--with-runstatedir=/run \
--with-qemu \
--without-openvz \
--without-lxc \
--without-vbox \
--without-libxl \
--with-sasl \
--with-polkit \
--with-libvirtd \
--without-esx \
--without-hyperv \
--without-vmware \
--without-vz \
--without-bhyve \
--with-remote-default-mode=legacy \
--with-interface \
--with-network \
--with-storage-fs \
--with-storage-lvm \
--with-storage-iscsi \
--with-storage-iscsi-direct \
--with-storage-scsi \
--with-storage-disk \
--with-storage-mpath \
--with-storage-rbd \
--without-storage-sheepdog \
--with-storage-gluster \
--without-storage-zfs \
--without-storage-vstorage \
--with-numactl \
--with-numad \
--with-capng \
--without-fuse \
--with-netcf \
--with-selinux \
--with-selinux-mount=/sys/fs/selinux \
--without-apparmor \
--without-hal \
--with-udev \
--with-yajl \
--with-sanlock \
--with-libpcap \
--with-macvtap \
--with-audit \
--with-dtrace \
--with-driver-modules \
--with-firewalld \
--with-firewalld-zone \
--with-wireshark-dissector \
--without-pm-utils \
--with-nss-plugin \
'--with-packager=http://openeuler.org, 2020-08-20-11:11:11, ' \
--with-packager-version=7.oe1 \
--with-qemu-user=qemu \
--with-qemu-group=qemu \
--with-tls-priority=@LIBVIRT,SYSTEM \
--with-loader-nvram=/usr/share/edk2.git/ovmf-x64/OVMF_CODE-pure-efi.fd:/usr/share/edk2.git/ovmf-x64/OVMF_VARS-pure-efi.fd:/usr/share/edk2.git/ovmf-ia32/OVMF_CODE-pure-efi.fd:/usr/share/edk2.git/ovmf-ia32/OVMF_VARS-pure-efi.fd:/usr/share/edk2.git/aarch64/QEMU_EFI-pflash.raw:/usr/share/edk2.git/aarch64/vars-template-pflash.raw:/usr/share/edk2.git/arm/QEMU_EFI-pflash.raw:/usr/share/edk2.git/arm/vars-template-pflash.raw:/usr/share/edk2/ovmf/OVMF_CODE.fd:/usr/share/edk2/ovmf/OVMF_VARS.fd:/usr/share/edk2/ovmf-ia32/OVMF_CODE.fd:/usr/share/edk2/ovmf-ia32/OVMF_VARS.fd:/usr/share/edk2/aarch64/QEMU_EFI-pflash.raw:/usr/share/edk2/aarch64/vars-template-pflash.raw:/usr/share/edk2/arm/QEMU_EFI-pflash.raw:/usr/share/edk2/arm/vars-template-pflash.raw \
--enable-werror \
--enable-expensive-tests \
--with-init-script=systemd \
--without-login-shell || (cat config.log; exit 1)
make -j$(getconf _NPROCESSORS_ONLN) V=1
sed -i 's/while (kill(pid, 0) != -1)/for (int i = 0; kill(pid, 0) != -1 \&\& i < 300; i++)/' ../tests/commandtest.c
sed -i 's/while (kill(pid, SIGINT) != -1)/for (int i = 0; kill(pid, SIGINT) != -1 \&\& i < 300; i++)/' ../tests/commandtest.c
(set +x; for((i=0;i<3;i++)); do sleep 30; ps -fC make &>/dev/null || break; ps ww -e f; ps ww -ef | awk '$9~"tests/.libs/lt-commandtest"{print$2}' | xargs -n 1 pstack; done) &
timeout 120 make -j$(getconf _NPROCESSORS_ONLN) check VIR_TEST_DEBUG=1 || (cat tests/test-suite.log; exit 1)
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
# -*- encoding=utf-8 -*-
from urllib import quote as urlquote
from jenkinsapi.jenkinsbase import JenkinsBase
# hack, bug when if job under baseurl is not folder
# when use jenkins.jenkins src host
def resolve_job_folders(self, jobs):
for job in list(jobs):
if 'color' not in job.keys():
jobs.remove(job)
jobs += self.process_job_folder(job, self.baseurl)
else:
job["url"] = '%s/job/%s' % (self.baseurl, urlquote(job['name']))
return jobs
old = JenkinsBase.resolve_job_folders
JenkinsBase.resolve_job_folders = resolve_job_folders
此差异已折叠。
此差异已折叠。
此差异已折叠。
requests
jenkinsapi
colorlog
threadpool
PyYAML
gevent==1.2.2
jsonpath
此差异已折叠。
此差异已折叠。
此差异已折叠。
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册