提交 00dff354 编写于 作者: S Shinwell Hu

remove redundent files

上级 045a910e
#!/usr/bin/ruby
require 'yaml'
require 'set'
require 'optparse'
require './helper/download_spec'
require './gitee/advisor'
INACTIVE_THRESHOLD = 3
options = {}
OptionParser.new do |opts|
opts.banner = "Usage: check_inactive_repo.rb [options]"
opts.on("-p", "--push", "Push the advise to gitee.com/openeuler") do |v|
options[:push] = v
end
opts.on("-r", "--repo REPO_NAME", "Repo to check upstream info") do |n|
puts "Checking #{n}"
options[:repo] = n
end
opts.on("-h", "--help", "Prints this help") do
puts opts
exit
end
end.parse!
if not options[:repo] then
puts "Missing repo name\n"
exit 1
end
cmd = "git ls-remote https://gitee.com/openeuler/#{options[:repo]}/"
refs = %x[#{cmd}]
merge_count = 0
refs.each_line { |line|
if line.match(/\/pull\/(\d*)\/MERGE/) then
merge_count = merge_count + 1
end
puts line
}
if merge_count < INACTIVE_THRESHOLD then
if options[:push] then
ad = Advisor.new
ad.new_issue("openeuler", options[:repo],
"Inactive repository",
"Dear #{options[:repo]} developer:\n亲爱的 #{options[:repo]} 开发者:\n\n We found this repository has not fulfill what it prupose to be.\n我们发现这个代码仓并没有承载它被期望的功能。\n\n Long time no progress will discourge other developers to follow and participant this initiative.\n长期没有代码会使得关注这个项目的开发者失望。\n\n Please start submit something as soon as possible.\n建议您尽快向代码仓提交进展。\n\n This is a automatic advise from openEuler-Advisor. If you think the advise is not correct, please fill an issue at https\:\/\/gitee.com\/openeuler\/openEuler-Advisor to help us improve.\n这是一条由 openEuler-Advisor 自动生成的建议。如果您认为这个建议不对,请访问 https\:\/\/gitee.com\/openeuler\/openEuler-Advisor 来帮助我们改进。\n\n Yours openEuler Advisor.")
else
puts "#{options[:repo]} is not active. But we keep it between us"
end
else
puts "#{options[:repo]} is active and good."
end
#!/usr/bin/ruby
require 'yaml'
require 'set'
require 'optparse'
require './helper/download_spec'
require './gitee/advisor'
options = {}
OptionParser.new do |opts|
opts.banner = "Usage: check_missing_spec.rb [options]"
opts.on("-p", "--push", "Push the advise to gitee.com/src-openeuler") do |v|
options[:push] = v
end
opts.on("-r", "--repo REPO_NAME", "Repo to check upstream info") do |n|
puts "Checking #{n}"
options[:repo] = n
end
opts.on("-h", "--help", "Prints this help") do
puts opts
exit
end
end.parse!
if not options[:repo] then
puts "Missing repo name\n"
exit 1
end
specfile = download_spec(options[:repo])
if specfile == "" then
puts "no spec file found for #{options[:repo]} project\n"
if options[:push] then
puts "Push this advise to gitee\n"
ad = Advisor.new
ad.new_issue("src-openeuler", options[:repo],
"Submit spec file into this repository",
"Dear #{options[:repo]} maintainer:\n亲爱的 #{options[:repo]} 维护者:\n\n We found there is no spec file in this repository yet.\n我们发现这个代码仓中没有 spec 文件。\n\n Missing spec file implies that this components will not be integtaed into openEuler release, and your hardworking cannot help others.\n缺少 spec 文件意味着这个项目还不能被集成到 openEuler 项目中,而您的贡献还不能帮助到社区中的其他人。\n\n We courage you submit your spec file into this repository as soon as possible.\n我们鼓励您尽快提交 spec 文件到这个代码仓中\n\n This is a automatic advise from openEuler-Advisor. If you think the advise is not correct, please fill an issue at https\:\/\/gitee.com\/openeuler\/openEuler-Advisor to help us improve.\n这是一条由 openEuler-Advisor 自动生成的建议。如果您认为这个建议不对,请访问 https\:\/\/gitee.com\/openeuler\/openEuler-Advisor 来帮助我们改进。\n\n Yours openEuler Advisor.")
else
puts "Keep it between us\n"
end
else
puts "Everything's fine\n"
end
File.delete(specfile) if specfile != ""
#!/usr/bin/ruby
require 'yaml'
require 'json'
require 'date'
require 'optparse'
require './check_upstream/github'
require './check_upstream/git'
require './check_upstream/hg'
require './check_upstream/svn'
require './check_upstream/metacpan'
require './check_upstream/gnome'
require './check_upstream/pypi'
require './helper/download_spec'
require './helper/rpmparser'
require './gitee/advisor'
options = {}
OptionParser.new do |opts|
opts.banner = "Usage: check_upgradable.rb [options]"
opts.on("-p", "--push", "Push the advise to gitee.com/src-openeuler") do |v|
options[:push] = v
end
opts.on("-r", "--repo REPO_NAME", "Repo to check upstream info") do |n|
puts "Checking #{n}"
options[:repo] = n
end
opts.on("-h", "--help", "Prints this help") do
puts opts
exit
end
end.parse!
if not options[:repo] then
puts "Missing repo name\n"
exit 1
end
Prj_name = options[:repo]
specfile=download_spec(Prj_name)
if specfile == "" then
puts "no specfile found for project\n"
exit 1
end
spec_struct = Specfile.new(specfile)
Cur_ver = spec_struct.get_version
Prj_info = YAML.load(File.read "upstream-info/"+Prj_name+".yaml")
def compare_tags (a, b)
arr_a = a.split(".")
arr_b = b.split(".")
len = [arr_a.length, arr_b.length].min
idx = 0
while idx < len do
res1 = arr_a[idx].to_i <=> arr_b[idx].to_i
return res1 if res1 != 0
res2 = arr_a[idx].length <=> arr_b[idx].length
return -res2 if res2 != 0
res3 = arr_a[idx][-1].to_i <=> arr_b[idx][-1].to_i
return res3 if res3 != 0
idx = idx + 1
end
return arr_a.length <=> arr_b.length
end
def sort_tags (tags)
tags.sort! { |a, b|
compare_tags(a,b)
}
return tags
end
def clean_tags(tags)
new_tags = []
tags.each{|line|
new_tags = new_tags.append clean_tag(line, Prj_info)
}
return new_tags
end
def upgrade_recommend(tags_param, cur_tag, policy)
tags = tags_param.reverse
tag1 = cur_tag
tag2 = cur_tag
if policy == "latest" then
return tags[0]
elsif policy == "latest-stable" then
tags.each { |tag|
if tag.split(".").count {|f| f.to_i != 0 } >= 3 then
tag1 = tag
break
end
}
tags.each { |tag|
if tag.split(".").count {|f| f.to_i != 0} >= 2 then
tag2 = tag
break
end
}
if tag2[0].to_i > tag1[0].to_i then
return tag2
else
return tag1
end
elsif policy == "perfer-stable" then
tags.each { |tag|
if tag.start_with?(cur_tag) then
return tag
end
}
if cur_tag.split(".").length >= 3 then
search_tag = cur_tag.split(".")[0..1].join(".")
tags.each { |tag|
if tag.start_with?(search_tag) then
return tag
end
}
end
return cur_tag
else
return cur_tag
end
end
print Prj_name, ":\n"
if Prj_info["version_control"] == "svn" then
tags = check_upstream_svn(Prj_info)
elsif Prj_info["version_control"] == "github" then
tags = check_upstream_github_by_api(Prj_info)
if tags == nil or tags == "" then
tags = check_upstream_github_by_git(Prj_info)
end
tags = clean_tags(tags.lines)
elsif Prj_info["version_control"] == "git" then
tags = check_upstream_git(Prj_info)
tags = clean_tags(tags.lines)
elsif Prj_info["version_control"] == "hg" then
tags = check_upstream_hg(Prj_info)
tags = clean_tags(tags.lines)
elsif Prj_info["version_control"] == "metacpan" then
tags = check_upstream_metacpan(Prj_info)
tags = clean_tags(tags.lines)
elsif Prj_info["version_control"] == "gitlab.gnome" then
tags = check_upstream_gnome(Prj_info)
tags = clean_tags(tags.lines)
elsif Prj_info["version_control"] == "pypi" then
tags = check_upstream_pypi(Prj_info)
tags = clean_tags(tags.lines)
end
tags = sort_tags(tags)
print "Latest upstream is ", tags[-1], "\n"
#print "Recommended is ", upgrade_recommend(tags, Cur_ver, "latest-stable"), "\n"
print "Current version is ", Cur_ver, "\n"
puts "This package has #{spec_struct.get_diverse} patches"
if tags.length == 0 or compare_tags(tags[-1], Cur_ver) < 0 then
STDERR.puts "DEBUG #{Prj_name} > tags are #{tags}"
File.delete("upstream-info/"+Prj_name+".yaml") if File.exist?("upstream-info/"+Prj_name+".yaml")
File.open("known-issues/"+Prj_name+".yaml", "w") { |file| file.write(Prj_info.to_yaml) }
else
File.open("upstream-info/"+Prj_name+".yaml", "w") { |file| file.write(Prj_info.to_yaml) }
end
File.delete(specfile) if specfile != ""
if options[:push] then
puts "Push to gitee\n"
ad = Advisor.new
ad.new_issue("src-openeuler", Prj_name, "Upgrade to Latest Release", "Dear #{Prj_name} maintainer:\n\n We found the latst version of #{Prj_name} is #{tags[-1]}, while the current version in openEuler is #{Cur_ver}.\n\n Please consider upgrading.\n\n\nYours openEuler Advisor.")
else
puts "keep it to us\n"
end
#!/usr/bin/ruby
require 'yaml'
require 'json'
require 'date'
def compare_tags (a, b)
arr_a = a.split(".")
arr_b = b.split(".")
len = [arr_a.length, arr_b.length].min
idx = 0
while idx < len do
res1 = arr_a[idx].to_i <=> arr_b[idx].to_i
return res1 if res1 != 0
res2 = arr_a[idx].length <=> arr_b[idx].length
return -res2 if res2 != 0
res3 = arr_a[idx][-1].to_i <=> arr_b[idx][-1].to_i
return res3 if res3 != 0
idx = idx + 1
end
return arr_a.length <=> arr_b.length
end
def clean_tag(tag, prj_info)
if prj_info.has_key?("tag_pattern") then
tag = tag.gsub(Regexp.new(prj_info["tag_pattern"]), "\\1")
elsif prj_info.has_key?("tag_prefix") then
tag = tag.gsub(Regexp.new(prj_info["tag_prefix"]), "")
end
if prj_info.has_key?("seperator") and prj_info["seperator"] != "." then
tag = tag.gsub(Regexp.new(prj_info["seperator"]), ".")
end
return tag.gsub("\n", "")
end
def sort_tags (tags)
tags.sort! { |a, b|
compare_tags(a,b)
}
return tags
end
def upgrade_recommend(tags, cur_tag, policy)
tags.reverse!
tag1 = cur_tag
tag2 = cur_tag
if policy == "latest" then
return tags[0]
elsif policy == "latest-stable" then
tags.each { |tag|
if tag.split(".").count {|f| f.to_i != 0 } >= 3 then
tag1 = tag
break
end
}
tags.each { |tag|
if tag.split(".").count {|f| f.to_i != 0} >= 2 then
tag2 = tag
break
end
}
if tag2[0].to_i > tag1[0].to_i then
return tag2
else
return tag1
end
elsif policy == "perfer-stable" then
tags.each { |tag|
if tag.start_with?(cur_tag) then
return tag
end
}
if cur_tag.split(".").length >= 3 then
search_tag = cur_tag.split(".")[0..1].join(".")
tags.each { |tag|
if tag.start_with?(search_tag) then
return tag
end
}
end
return cur_tag
else
return cur_tag
end
end
def load_last_query_result(prj_info, force_reload=false)
if force_reload == true then
prj_info.delete("last_query")
STDERR.puts "DEBUG: #{prj_info["src_repo"].gsub("\n", "")} > Force Reload\n"
return ""
else
if prj_info.has_key?("last_query") then
last_query = prj_info["last_query"]
if Time.now - last_query["time_stamp"] < 60*60*24*3 then
STDERR.puts "DEBUG: #{prj_info["src_repo"].gsub("\n", "")} > Reuse Last Query\n"
return last_query["raw_data"].dup
else
prj_info.delete("last_query")
STDERR.puts "DEBUG: #{prj_info["src_repo"].gsub("\n", "")} > Last Query Too Old.\n"
return ""
end
else
return ""
end
end
end
def resp_to_git_tags(resp)
tags = ""
resp.each_line { |line|
if line.match(/refs\/tags/) then
match = line.scan(/^([^ \t]*)[ \t]*refs\/tags\/([^ \t]*)\n/)
if match != nil then
tags = tags + match[0][1].to_s + "\n"
end
end
}
return tags
end
#!/usr/bin/ruby
require 'yaml'
require 'date'
require_relative 'common'
def check_upstream_git (prj_info)
resp = load_last_query_result(prj_info)
cmd="git ls-remote --tags "+prj_info["src_repo"]
if resp == "" then
resp=%x[#{cmd}]
last_query={}
last_query["time_stamp"] = Time.now
last_query["raw_data"] = resp.dup
prj_info["last_query"] = last_query
end
tags = resp_to_git_tags(resp)
return tags
end
#!/usr/bin/ruby
require 'yaml'
require 'json'
require 'date'
require_relative 'common'
def check_upstream_github_by_api (prj_info)
cmd="/usr/bin/curl -m 60 -s https://api.github.com/repos/"+prj_info["src_repo"]+"/releases"
resp = load_last_query_result(prj_info)
if resp == "" then
STDERR.puts "DEBUG #{prj_info["src_repo"]} > Using api.github to get releases"
begin
retries ||= 0
resp=%x[#{cmd}]
release = JSON.parse(resp)
rescue
STDERR.puts "DEBUG #{prj_info["src_repo"]} > No Response or JSON Parse failed. Retry in 3 seconds.\n"
sleep 3
retry if (retries+=1) < 10
end
if release != [] and release != nil then
last_query = {}
last_query["time_stamp"] = Time.now
last_query["raw_data"] = resp.dup
prj_info["last_query"] = last_query
prj_info["query_type"] = "api.github.releases"
else
# fall back to tags
STDERR.puts "DEBUG #{prj_info["src_repo"]} > Using api.github to get tags"
resp=""
cmd="/usr/bin/curl -m 60 -s https://api.github.com/repos/"+prj_info["src_repo"]+"/tags"
tags=[]
begin
retries ||= 0
resp=%x[#{cmd}]
tags=JSON.parse(resp)
rescue
STDERR.puts "DEBUG #{prj_info["src_repo"]} > No Response or JSON Parse failed. Retry in 3 seconds.\n"
sleep 3
retry if (retries += 1) < 10
end
if tags == [] or tags == nil then
print "WARNING: #{prj_info["src_repo"]}'s upstream version not available~"
return ""
else
last_query = {}
last_query["time_stamp"] = Time.now
last_query["raw_data"] = resp.dup
prj_info["last_query"] = last_query
prj_info["query_type"] = "api.github.tags"
end
end
end
if prj_info["query_type"] == "api.github.releases" then
result = ""
begin
release = JSON.parse(resp)
release.sort_by! { |e| e["created_at"]}
release.each { |r|
result = result + clean_tag(r["tag_name"], prj_info) + "\n"
}
rescue
end
return result
elsif prj_info["query_type"] == "api.github.tags" then
result = ""
begin
tags = JSON.parse(resp)
tags.each { |r|
result = result + clean_tag(r["name"], prj_info) + "\n"
}
rescue
end
return result
else
return ""
end
end
def check_upstream_github_by_git(prj_info)
resp = load_last_query_result(prj_info)
if prj_info.has_key?("query_type") and prj_info["query_type"] != "git-ls" then
resp = ""
end
cmd="git ls-remote --tags https://github.com/"+prj_info["src_repo"]+".git"
if resp == "" then
STDERR.puts "DEBUG #{prj_info["src_repo"]} > Using git ls-remote"
resp=%x[#{cmd}]
last_query = {}
last_query["time_stamp"] = Time.now
last_query["raw_data"] = resp.dup
prj_info["last_query"] = last_query
prj_info["query_type"] = "git-ls"
end
tags = resp_to_git_tags(resp)
return tags
end
#!/usr/bin/ruby
require 'yaml'
require 'date'
require_relative 'common'
def check_upstream_gnome (prj_info)
resp = ""
resp = load_last_query_result(prj_info)
if resp == "" then
cmd="git ls-remote --tags https://gitlab.gnome.org/GNOME/"+prj_info["src_repo"]+".git"
resp = %x[#{cmd}]
last_query={}
last_query["time_stamp"] = Time.now
last_query["raw_data"] = resp.dup
prj_info["last_query"] = last_query
end
tags = resp_to_git_tags(resp)
return tags
end
#!/usr/bin/ruby
require 'yaml'
require 'date'
require_relative 'common'
def check_upstream_hg (prj_info)
cookie = ""
cmd="curl -s "+prj_info["src_repo"]+"/raw-tags"
resp = load_last_query_result(prj_info)
if resp == "" then
resp = %x[#{cmd}]
if resp.lines[0].match(/html/) then # we got html response, resend with cookie
resp.each_line { |line|
match = line.scan(/document\.cookie=\"(.*)\";/)
if match != [] then
cookie = cookie + match[0][0]
end
}
cmd="curl -s --cookie \""+cookie+"\" "+prj_info["src_repo"]+"/raw-tags"
resp = %x[#{cmd}]
end
last_query={}
last_query["time_stamp"] = Time.now
last_query["raw_data"] = resp.dup
prj_info["last_query"] = last_query
end
tags = ""
resp.each_line { |line|
if line.match(/^tip/) then
next
end
match = line.scan(/^([\w\d\-\.]*)[ \t]*([\w\d\-\.]*)/)
if match != [] then
tags = tags + match[0][0].to_s + "\n"
end
}
return tags
end
#!/usr/bin/ruby
require 'yaml'
require 'json'
require 'date'
require './check_upstream/common'
def check_upstream_metacpan (prj_info)
resp = ""
info={}
tags = ""
cmd="curl -m 60 -s https://fastapi.metacpan.org/release/"+prj_info["src_repo"]
resp = load_last_query_result(prj_info)
if resp == ""
begin
retries ||= 0
resp=%x[#{cmd}]
info=JSON.parse(resp)
rescue
STDERR.puts "DEBUG #{prj_info["src_repo"]} > No Respose or JSON parse failed\n"
sleep 3
retry if (retries += 1) < 10
end
else
info = JSON.parse(resp)
end
if info != {} then
if ! info.key?("version") then
STDERR.puts "DEBUG #{prj_info["src_repo"]} > ERROR FOUND"
return tags
else
tags = tags +info["version"].to_s+"\n"
end
else
STDERR.puts "DEBUG #{prj_info["src_repo"]} > found unsorted on cpan.metacpan.org\n"
return tags
end
last_query = {}
last_query["time_stamp"] = Time.now
last_query["raw_data"] = resp.dup
prj_info["last_query"] = last_query
return tags
end
#!/usr/bin/ruby
require 'yaml'
require 'json'
require 'date'
require './check_upstream/common.rb'
def check_upstream_pypi (prj_info)
resp = ""
info={}
tags = ""
resp = load_last_query_result(prj_info)
if resp == "" then
last_query={}
last_query["time_stamp"] = Time.now
cmd="curl -m 60 -s -L https://pypi.org/pypi/"+prj_info["src_repo"]+"/json"
begin
retries ||= 0
resp=%x[#{cmd}]
info=JSON.parse(resp)
rescue
STDERR.puts "DEBUG: #{prj_info["src_repo"].gsub("\n", "")} > No Respose or JSON parse failed\n"
sleep 3
retry if (retries+=1)<10
end
if info != {} then
last_query["raw_data"] = resp
prj_info["last_query"] = last_query
end
else
info=JSON.parse(resp)
end
if info != {} then
tags = tags + info["info"]["version"].to_s+"\n"
end
return tags
end
#!/usr/bin/ruby
require 'yaml'
require 'json'
require 'date'
require_relative 'common'
def check_upstream_svn (prj_info)
cmd="/usr/bin/svn ls -v "+prj_info["src_repo"]+"/tags"
resp = load_last_query_result(prj_info)
if resp == "" then
resp = %x[#{cmd}]
last_query = {}
last_query["time_stamp"] = Time.now
last_query["raw_data"] = resp.dup
prj_info["last_query"] = last_query
else
end
sorted_tags = []
resp.each_line { |tag_line|
match = tag_line.scan(/([.\w]+)/)
if match != nil then
if match[5][0].include?(prj_info["tag_prefix"]) then
new_tag = Hash.new
new_tag["Date"] = Date.parse(match[2][0]+" "+match[3][0]+" "+match[4][0])
tag = match[5][0]
new_tag["Tag"] = tag.gsub(prj_info["tag_prefix"], "").gsub(prj_info["seperator"], ".")
sorted_tags.append(new_tag)
end
end
}
sorted_tags.sort_by! {|t| t["Date"] }
result = []
sorted_tags.each { |t|
result.append(t["Tag"])
}
return result
end
#!/usr/bin/python3
"""
This is a command line tool for adding new repo
"""
import argparse
import yaml
import sys
if __name__ == "__main__":
par = argparse.ArgumentParser()
par.add_argument("-r", "--repo", help="YAML file for repositories", type=str, required=True)
par.add_argument("-i", "--sigs", help="YAML file for sigs", type=str, required=True)
par.add_argument("-s", "--sig", help="Sig manage this repo", type=str, required=True)
par.add_argument("-n", "--name", help="Name for new repo", type=str, required=True)
par.add_argument("-d", "--desc", help="Description for new repo", type=str, required=True)
par.add_argument("-u", "--upstream", help="Upstream for new repo", type=str, required=True)
args = par.parse_args()
f = open(args.sigs)
sigs = yaml.load(f.read(), Loader=yaml.Loader)
if not sigs:
print("Failed to load {file}".format(file=args.sigs))
sys.exit(1)
f.close()
f = open(args.repo)
repo = yaml.load(f.read(), Loader=yaml.Loader)
if not repo:
print("Failed to load {file}".format(file=args.repo))
sys.exit(1)
f.close()
nr = {}
nr["name"] = args.name
nr["description"] = args.desc
nr["upstream"] = args.upstream
nr["protected_branches"] = ["master"]
nr["type"] = "public"
exist = [x for x in repo["repositories"] if x["name"] == args.name]
if exist != []:
print("Repo already exist")
sys.exit(1)
if repo["community"] == "openeuler":
repo["repositories"].append(nr)
elif repo["community"] == "src-openeuler":
nr["upstream"] = args.upstream
repo["repositories"].append(nr)
repo["repositories"].sort(key=lambda r: r["name"])
valid_sig = False
for s in sigs["sigs"]:
if s["name"] == args.sig:
s["repositories"].append(repo["community"] + "/" + args.name)
s["repositories"].sort()
valid_sig=True
continue
if valid_sig:
f = open(args.repo, "w")
yaml.dump(repo, f)
f.close()
f = open(args.sigs, "w")
yaml.dump(sigs, f)
f.close()
else:
print("SIG name is not valid")
sys.exit(1)
#!/usr/bin/python3
"""
This is a command line tool for adding new repo
"""
import argparse
import yaml
import sys
from os import path
import subprocess
nr = {}
def get_info(pkg):
proc = subprocess.Popen(["rpm", "-qpi", pkg], stdout=subprocess.PIPE)
while (True):
line = proc.stdout.readline()
if not line:
break;
info = str(line.strip().decode()).split(':')
if (len(info) < 2):
continue
info[0] = info[0].strip()
info[1] = info[1].strip()
if (info[0] == "Name"):
nr["name"] = info[1]
elif (info[0] == "Summary"):
nr["description"] = info[1]
elif (info[0] == "URL"):
if (len(info) >= 3):
nr["upstream"] = info[1] + ":" + info[2]
else:
nr["upstream"] = info[1]
proc.stdout.close()
proc.wait()
return len(nr)
if __name__ == "__main__":
par = argparse.ArgumentParser()
par.add_argument("-r", "--repo", help="YAML file for repositories", type=str, required=True)
par.add_argument("-i", "--sigs", help="YAML file for sigs", type=str, required=True)
par.add_argument("-s", "--sig", help="The SIG which contains the package", type=str, required=True)
par.add_argument("-p", "--pkg", help="Package for upoad", type=str, required=True)
args = par.parse_args()
if (path.exists(args.pkg) and path.isfile(args.pkg)):
ret = get_info(args.pkg)
if (ret < 3):
print("Somthing is wrong\n")
sys.exit(1)
else:
print("%s does not exist\n" & args.pkg)
sys.exit(1)
f = open(args.sigs)
sigs = yaml.load(f.read(), Loader=yaml.Loader)
if not sigs:
print("Failed to load {file}".format(file=args.sigs))
sys.exit(1)
f.close()
f = open(args.repo)
repo = yaml.load(f.read(), Loader=yaml.Loader)
if not repo:
print("Failed to load {file}".format(file=args.repo))
sys.exit(1)
f.close()
nr["protected_branches"] = ["master"]
nr["type"] = "public"
exist = [x for x in repo["repositories"] if x["name"] == nr["name"]]
if exist != []:
print("Repo already exist")
sys.exit(1)
if repo["community"] == "openeuler":
del nr["upstream"]
repo["repositories"].append(nr)
elif repo["community"] == "src-openeuler":
repo["repositories"].append(nr)
repo["repositories"].sort(key=lambda r: r["name"])
valid_sig = False
for s in sigs["sigs"]:
if s["name"] == args.sig:
s["repositories"].append(repo["community"] + "/" + nr["name"])
s["repositories"].sort()
valid_sig=True
continue
if valid_sig:
f = open(args.repo, "w")
yaml.dump(repo, f)
f.close()
f = open(args.sigs, "w")
yaml.dump(sigs, f)
f.close()
else:
print("SIG name is not valid")
sys.exit(1)
print("create repo %s successfully\n" % nr["name"])
sys.exit(0)
#!/usr/bin/python3
"""
This is a helper script for working with gitee.com
"""
import urllib
import urllib.request
import urllib.parse
import urllib.error
import argparse
import yaml
import re
import os.path
import json
import pprint
class Gitee(object):
"""
Gitee is a helper class to abstract gitee.com api
"""
def __init__(self):
self.secret = open(os.path.expanduser("~/.gitee_personal_token.json"), "r")
self.token = json.load(self.secret)
self.headers = {'User-Agent':'Mozilla/5.0 (Windows NT 10.0; WOW 64; rv:50.0) Gecko/20100101 Firefox/50.0'}
self.gitee_url = "https://gitee.com/"
self.src_openeuler_url = self.gitee_url + "src-openeuler/{package}/raw/master/"
self.advisor_url = self.gitee_url + "openeuler/openEuler-Advisor/raw/master/"
self.specfile_url_template = self.src_openeuler_url + "{specfile}"
self.yamlfile_url_template = self.src_openeuler_url + "{package}.yaml"
#self.advisor_url_template = "https://gitee.com/openeuler/openEuler-Advisor/raw/master/upstream-info/{package}.yaml"
self.advisor_url_template = self.advisor_url + "upstream-info/{package}.yaml"
#self.specfile_exception_url = "https://gitee.com/openeuler/openEuler-Advisor/raw/master/helper/specfile_exceptions.yaml"
self.specfile_exception_url = self.advisor_url + "helper/specfile_exceptions.yaml"
def post_gitee(self, url, values, headers=None):
"""
POST into gitee API
"""
if headers is None:
headers = self.headers.copy()
data = urllib.parse.urlencode(values).encode('utf-8')
req = urllib.request.Request(url=url, data=data, headers=headers, method="POST")
try:
u = urllib.request.urlopen(req)
return u.read().decode("utf-8")
except urllib.error.HTTPError as err:
print("WARNING:" + str(err.code))
print("WARNING:" + str(err.headers))
return False
def fork_repo(self, repo):
"""
Fork repository in gitee
"""
url = "https://gitee.com/api/v5/repos/src-openeuler/{repo}/forks".format(repo=repo)
values = {}
values["access_token"] = self.token["access_token"]
# headers["User-Agent"] = "curl/7.66.0"
#headers["Content-Type"] = "application/json;charset=UTF-8"
#headers["HOST"] = "gitee.com"
#headers["Accept"] = "*/*"
return self.post_gitee(url, values)
def create_pr(self, head, repo):
"""
Create PR in gitee
"""
url = "https://gitee.com/api/v5/repos/src-openeuler/{repo}/pulls".format(repo=repo)
values = {}
values["access_token"] = self.token["access_token"]
values["title"] = "Upgrade to latest version of {repo}".format(repo=repo)
values["head"] = "{head}:master".format(head=head)
values["base"] = "master"
values["body"] = """This is a (mostly) automatically created PR by openEuler-Advisor.
Please be noted that it's not throughly tested.
Review carefully before accept this PR.
Thanks.
Yours openEuler-Advisor.
"""
return self.post_gitee(url, values)
def get_gitee(self, url, headers=None):
"""
GET from gitee api
"""
if headers is None:
req = urllib.request.Request(url=url, headers=self.headers)
else:
req = urllib.request.Request(url=url, headers=headers)
u = urllib.request.urlopen(req)
return u.read().decode("utf-8")
def get_gitee_json(self, url):
"""
get and load gitee json response
"""
#headers = self.headers.copy()
headers = {}
headers["Content-Type"] = "application/json;charset=UTF-8"
resp = self.get_gitee(url, headers)
return json.loads(resp)
def get_spec_exception(self):
"""
get well known spec file exceptions
"""
resp = self.get_gitee(self.specfile_exception_url)
exps = yaml.load(resp, Loader=yaml.Loader)
return exps
def get_spec(self, pkg):
"""
get openeuler spec file for specific package
"""
exp = self.get_spec_exception()
if pkg in exp:
dir_name = exp[pkg]["dir"]
file_name = exp[pkg]["file"]
specurl = self.specfile_url_template.format(package=pkg, specfile=dir_name + "/" + file_name)
else:
specurl = self.specfile_url_template.format(package=pkg, specfile=pkg + ".spec")
return self.get_gitee(specurl)
def get_yaml(self, pkg):
"""
get upstream yaml metadata for specific package
"""
yamlurl = self.advisor_url_template.format(package=pkg)
resp = self.get_gitee(yamlurl)
if re.match("Not found", resp):
yamlurl = self.yamlfile_url_template.format(package=pkg)
resp = self.get_gitee(yamlurl)
if re.match("Not found", resp):
print("Cannot find upstream metadata")
return False
else:
return resp
else:
return False
if __name__ == "__main__":
pass
#!/usr/bin/ruby
require 'json'
class Advisor
def initialize
@token = JSON.parse(File.read (File.expand_path "~/.gitee_token.json"))
@cmd = "curl -s -X POST --header 'Content-Type: application/json;charset=UTF-8'"
@param = {}
end
def new_issue(owner, repo, title, body)
@param["access_token"] = @token["access_token"]
@param["repo"] = repo
@param["title"] = title
@param["body"] = body
@cmd += " 'https://gitee.com/api/v5/repos/#{owner}/issues'"
@cmd += " -d '" + @param.to_json + "'"
#puts @cmd
resp = %x[#{@cmd}]
#puts resp
end
end
#ad = Advisor.new
#ad.new_issue("Shinwell_Hu", "openEuler-Toolbox")
#!/bin/bash
# refer to gitee.com/api/v5/oauth_doc#/list-item-2
source ~/.gitee_secret
echo "Refreshing ~/.gitee_token.json"
curl -s -X POST --data-urlencode "grant_type=password" --data-urlencode "username=$username" --data-urlencode "password=$password" --data-urlencode "client_id=$client_id" --data-urlencode "client_secret=$client_secret" --data-urlencode "scope=projects issues" https://gitee.com/oauth/token > ~/.gitee_token.json
chmod 400 ~/.gitee_token.json
#!/usr/bin/ruby
require 'yaml'
def download_spec(name)
output_dir = "."
exception_load = YAML.load(File.read(File.dirname(__FILE__)+"/specfile_exceptions.yaml"))
if exception_load.has_key?(name) then
output_file = "#{output_dir}/#{exception_load[name]["file"]}"
cmd = "curl -s https://gitee.com/src-openeuler/#{name}/raw/master/#{exception_load[name]["dir"]}/#{exception_load[name]["file"]} -o #{output_file}"
else
output_file = "#{output_dir}/#{name}.spec"
cmd = "curl -s https://gitee.com/src-openeuler/#{name}/raw/master/#{name}.spec -o #{output_file}"
end
%x[#{cmd}] if ! File.exists?(output_file)
s = File.size(output_file)
if s == 52 then
STDERR.puts "> No SPEC file found for #{name}"
File.delete output_file
return ""
end
return output_file
end
#!/usr/bin/ruby
require 'yaml'
require 'set'
def rpmspec_split_file (line, prefix)
m = line.scan (/#{prefix}\s*(.*)/)
if m != [] then
return m[0][0]
else
return nil
end
end
def rpmspec_split_tags (line, prefix)
m = line.scan (/#{prefix}\s*(.*)/)
if m != [] then
br = m[0][0]
if br.index(',') then
bra = br.split(',').map(&:strip)
return bra
elsif br =~ /\w\s+\w/ then
bra = br.split(/\s+/)
return bra
end
end
return nil
end
def rpmspec_clean_tag (oset, mac)
new_set = Set.new
oset.each { |br|
if br[0] =~ /[\d<=>!]/ then
oset.delete(br)
elsif br =~ /[<=>!]/ then
bra = br.split("\s").map(&:strip)
oset.delete(br)
new_set << bra[0]
elsif br.match(/%{/) then
m = br.scan(/%{(.*?)}/)
i = 0
nbr = br
while i < m.length do
if mac[m[i][0]] then
nbr = nbr.gsub(/%{#{m[i][0]}}/, mac[m[i][0]])
else
# some strange RPM macro needs shell expand, I dont know ohw to handle this
end
i = i + 1
end
oset.delete(br)
new_set << nbr
end
}
oset += new_set
return oset
end
def rpmspec_macro_expand(tag, macro)
##This needs a fix
if tag.match(/%{/) then
m = tag.scan(/%{(.*)}/)
if m != [] then
if macro[m[0][0]] then
tag = tag.gsub(/%{#{m[0][0]}}/, macro[m[0][0]])
end
end
end
return tag
end
class Specfile
def initialize(filepath)
spec = File.open("#{filepath}")
@macros = {}
@macros["epoch"] = "1"
@macros["?_isa"] = "aarch64"
@name = ""
@version = ""
@release = ""
@build_requires = Set.new
@requires = Set.new
@provides = Set.new
@sources = Set.new
@patches = Set.new
spec.each_line { |line|
m = line.scan (/^[Nn]ame\s*:\s*([^\s]*)\s*/)
if m != [] then
@name = m[0][0]
end
m = line.scan (/^[Vv]ersion\s*:\s*([^\s]*)\s*/)
if m != [] then
@version = m[0][0]
end
m = line.scan (/^[Rr]elease\s*:\s*([^\s]*)\s*/)
if m != [] then
@release = m[0][0]
end
m = line.scan (/%global\s*([^\s]*)\s*(.*)/)
if m != [] then
@macros[m[0][0]] = m[0][1]
end
m = line.scan (/%define\s*([^\s]*)\s*(.*)/)
if m != [] then
@macros[m[0][0]] = m[0][1]
end
bra = rpmspec_split_tags(line, "BuildRequires:")
if bra != nil then
@build_requires += bra
end
ra = rpmspec_split_tags(line, "Requires:")
if ra != nil then
@requires += ra
end
po = rpmspec_split_tags(line, "Provides:")
if po != nil then
@provides += po
end
src = rpmspec_split_file(line, "Source\\d*:")
if src != nil then
@sources << src
end
pa = rpmspec_split_file(line, "Patch\\d*:")
if pa != nil then
@patches << pa
end
}
@name = rpmspec_macro_expand(@name, @macros)
@macros["name"] = @name
@version = rpmspec_macro_expand(@version, @macros)
@macros["version"] = @version
@release = rpmspec_macro_expand(@release, @macros)
@macros["release"] = @release
@build_requires = rpmspec_clean_tag(@build_requires, @macros)
@requires = rpmspec_clean_tag(@requires, @macros)
@provides = rpmspec_clean_tag(@provides, @macros)
end
def get_name
return @name
end
def get_version
return @version
end
def get_diverse
return @patches.length
end
def get_sources
return @sources
end
def expand_macros(s)
return rpmspec_clean_tag(s, @macros)
end
#newspec = {}
#newspec["name"] = name
#newspec["release"] = release
#newspec["version"] = version
#newspec["build_requires"] = build_requires
#newspec["provides"] = provides
#newspec["requires"] = requires
end
---
libnetwork:
dir: script
file: docker-proxy.spec
authz:
dir: hack
file: authz.spec
lxcfs-tools:
dir: hack
file: lxcfs-tools.spec
libkae:
dir: .
file: kae.spec
autotune:
dir: .
file: atune.spec
dvdplusrw-tools:
dir: .
file: dvd+rw-tools.spec
gtk:
dir: .
file: gtk+.spec
docker:
dir: .
file: docker-engine-openeuler.spec
libsigcpp20:
dir: .
file: libsigc++20.spec
libwd:
dir: .
file: warpdrive.spec
kmod-kvdo:
dir: .
file: kvdo.spec
jboss-el:
dir: .
file: jboss-el-2.2-api.spec
openEuler-rpm-config:
dir: .
file: generic-rpm-config.spec
openEuler-release:
dir: .
file: generic-release.spec
openjdk-1.8.0:
dir: .
file: java-1.8.0-openjdk.spec
openjdk-11:
dir: .
file: java-11-openjdk.spec
A-Tune:
dir: .
file: atune.spec
runc:
dir: .
file: runc-openeuler.spec
\ No newline at end of file
#!/usr/bin/python3
"""
This is a packager bot for python modules from pypi.org
"""
#******************************************************************************
# Copyright (c) Huawei Technologies Co., Ltd. 2018-2019. All rights reserved.
# licensed under the Mulan PSL v2.
# You can use this software according to the terms and conditions of the Mulan PSL v2.
# You may obtain a copy of Mulan PSL v2 at:
# http://license.coscl.org.cn/MulanPSL2
# THIS SOFTWARE IS PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES OF ANY KIND, EITHER EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO NON-INFRINGEMENT, MERCHANTABILITY OR FIT FOR A PARTICULAR
# PURPOSE.
# See the Mulan PSL v2 for more details.
# Author: Shinwell_Hu Myeuler
# Create: 2020-05-07
# Description: provide a tool to package python module automatically
# ******************************************************************************/
import urllib
import urllib.request
from pprint import pprint
from os import path
import json
import sys
import re
import datetime
import argparse
import subprocess
import os
from pathlib import Path
# python3-wget is not default available on openEuler yet.
# import wget
url_template = 'https://pypi.org/pypi/{pkg_name}/json'
json_file_template = '{pkg_name}.json'
name_tag_template = 'Name:\t\tpython-{pkg_name}'
summary_tag_template = 'Summary:\t{pkg_sum}'
version_tag_template = 'Version:\t{pkg_ver}'
release_tag_template = 'Release:\t1'
license_tag_template = 'License:\t{pkg_lic}'
home_tag_template = 'URL:\t\t{pkg_home}'
source_tag_template = 'Source0:\t{pkg_source}'
buildreq_tag_template = 'BuildRequires:\t{req}'
build_noarch = True # Usually python modules are arch independent
def get_license(j):
"""
By default, the license info can be achieved from json["info"]["license"]
In rare cases it doesn't work.
We fall back to json["info"]["classifiers"], it looks like License :: OSI Approved :: BSD Clause
"""
if j["info"]["license"] != "":
return j["info"]["license"]
for k in j["info"]["classifiers"]:
if k.startswith("License"):
ks = k.split("::")
return ks[2].strip()
return ""
def get_source_url(j):
"""
return URL for source file for the latest version
return "" in errors
"""
v = j["info"]["version"]
rs = j["releases"][v]
for r in rs:
if r["packagetype"] == "sdist":
return r["url"]
return ""
def transform_module_name(n):
"""
return module name with version restriction.
Any string with '.' or '/' is considered file, and will be ignored
Modules start with python- will be changed to python3- for consistency.
"""
# remove ()
ns = re.split("[()]", n)
ver_constrain = []
ns[0] = ns[0].strip()
if ns[0].startswith("python-"):
ns[0] = ns[0].replace("python-", "python3-")
else:
ns[0] = "python3-" + ns[0]
if ns[0].find("/") != -1 or ns[0].find(".") != -1:
return ""
if len(ns) > 1:
vers = ns[1].split(",")
for ver in vers:
m = re.match(r"([!<>=]+)( *)(\d.*)", ver.strip())
ver_constrain.append(ns[0] + " " + m[1] + " " + m[3])
return ", ".join(ver_constrain)
else:
return ns[0]
def get_requires(j):
"""
return all requires no matter if extra is required.
"""
rs = j["info"]["requires_dist"]
if rs is None:
return
for r in rs:
idx = r.find(";")
mod = transform_module_name(r[:idx])
print("Requires:\t" + mod)
def refine_requires(req):
"""
return only requires without ';' (thus no extra)
"""
ra = req.split(";", 1)
#
# Do not add requires which has ;, which is often has very complicated precondition
# Will need more parsing of the denpency after ;
return transform_module_name(ra[0])
def get_build_requires(resp):
req_list=[]
rds = resp["info"]["requires_dist"]
if rds is not None:
for rp in rds:
br = refine_requires(rp)
if (br == ""):
continue
#
# Do not output BuildRequires:
# just collect all build requires and using pip to install
# than can help to build all rpm withoud trap into
# build dependency nightmare
#
#print(buildreq_tag_template.format(req=br))
name=str.lstrip(br).split(" ")
req_list.append(name[0])
return req_list
def get_buildarch(j):
"""
If this module has a prebuild package for amd64, then it is arch dependent.
print BuildArch tag if needed.
"""
v = j["info"]["version"]
rs = j["releases"][v]
for r in rs:
if r["packagetype"] == "bdist_wheel":
if r["url"].find("amd64") != -1:
global build_noarch
build_noarch = False
return
print("BuildArch:\tnoarch")
def get_description(j):
"""
return description.
Usually it's json["info"]["description"]
If it's rst style, then only use the content for the first paragraph, and remove all tag line.
For empty description, use summary instead.
"""
desc = j["info"]["description"].splitlines()
res = []
paragraph = 0
for d in desc:
if len(d.strip()) == 0:
continue
first_char = d.strip()[0]
ignore_line = False
if d.strip().startswith("===") or d.strip().startswith("---"):
paragraph = paragraph + 1
ignore_line = True
elif d.strip().startswith(":") or d.strip().startswith(".."):
ignore_line = True
if ignore_line != True and paragraph == 1:
res.append(d)
if paragraph >= 2:
del res[-1]
return "\n".join(res)
if res != []:
return "\n".join(res)
elif paragraph == 0:
return j["info"]["description"]
else:
return j["info"]["summary"]
def store_json(j, pkg, spath):
"""
save json file
"""
fname = json_file_template.format(pkg_name=pkg)
json_file = os.path.join(spath, fname)
# if file exist, do nothing
if path.exists(json_file) and path.isfile(json_file):
with open(json_file, 'r') as f:
resp = json.load(f)
else:
with open(json_file, 'w') as f:
json.dump(j, f)
def get_pkg_json(pkg):
"""
recieve json from pypi.org
"""
url = url_template.format(pkg_name=pkg)
u = urllib.request.urlopen(url)
resp = json.loads(u.read().decode('utf-8'))
return resp
def download_source(j, tgtpath):
"""
download source file from url, and save it to target path
"""
if (os.path.exists(tgtpath) == False):
print("download path %s does not exist\n", tgtpath)
return False
s_url = get_source_url(j)
return subprocess.call(["wget", s_url, "-P", tgtpath])
def prepare_rpm_build_env(buildroot):
"""
prepare environment for rpmbuild
"""
if (os.path.exists(buildroot) == False):
print("Build Root path %s does not exist\n", buildroot)
return False
for sdir in ['SPECS', 'BUILD', 'SOURCES', 'SRPMS', 'RPMS', 'BUILDROOT']:
bpath = os.path.join(buildroot, sdir)
if (os.path.exists(bpath) == False):
os.mkdir(bpath)
return True
def try_install_package(pkg):
"""
install packages listed in build requires
"""
print(pkg)
ret = subprocess.call(["rpm", "-qi", pkg])
if ret == 0:
return True
# try pip installation
pip_name = pkg.split("-")
if len(pip_name) == 2:
ret = subprocess.call(["pip3", "install", "--user", pip_name[1]])
else:
ret = subprocess.call(["pip3", "install", "--user", pip_name[0]])
if ret != 0:
print("%s can not be installed correctly, Fix it later, go ahead to do building..." % pip_name)
#
# Try to build anyway, fix it later
#
return True
def prepare_dependencies(req_list):
for req in req_list:
if (try_install_package(req) == False):
return req
return ""
def build_package(specfile):
"""
build rpm package with rpmbuild
"""
ret = subprocess.call(["rpmbuild", "-ba", specfile])
return ret
def build_rpm(j, buildroot):
"""
full process to build rpm
"""
if(prepare_rpm_build_env(buildroot) == False):
return False
specfile = os.path.join(buildroot, "SPECS", "python-" + j["info"]["name"] + ".spec")
req_list = build_spec(j, specfile)
ret = prepare_dependencies(req_list)
if ret != "":
print("%s can not be installed automatically, Please handle it" % ret)
return ret
download_source(j, os.path.join(buildroot, "SOURCES"))
build_package(specfile)
return ""
def build_spec(resp, output):
"""
print out the spec file
"""
if os.path.isdir(output):
output = os.path.join(output, "python3-" + resp["info"]["name"])
tmp = sys.stdout
if (output == ""):
print()
else:
sys.stdout = open(output, 'w+')
print(name_tag_template.format(pkg_name=resp["info"]["name"]))
print(version_tag_template.format(pkg_ver=resp["info"]["version"]))
print(release_tag_template)
print(summary_tag_template.format(pkg_sum=resp["info"]["summary"]))
print(license_tag_template.format(pkg_lic=get_license(resp)))
print(home_tag_template.format(pkg_home=resp["info"]["project_urls"]["Homepage"]))
print(source_tag_template.format(pkg_source=get_source_url(resp)))
get_buildarch(resp)
print("")
get_requires(resp)
print("")
print("%description")
print(get_description(resp))
print("")
print("%package -n python3-{name}".format(name=resp["info"]["name"]))
print(summary_tag_template.format(pkg_sum=resp["info"]["summary"]))
print("Provides:\tpython-" + resp["info"]["name"])
print(buildreq_tag_template.format(req='python3-devel'))
print(buildreq_tag_template.format(req='python3-setuptools'))
if build_noarch == False:
print(buildreq_tag_template.format(req='python3-cffi'))
print(buildreq_tag_template.format(req='gcc'))
print(buildreq_tag_template.format(req='gdb'))
build_req_list=get_build_requires(resp)
print("%description -n python3-" + resp["info"]["name"])
print(get_description(resp))
print("")
print("%package help")
print("Summary:\tDevelopment documents and examples for {name}".format(name=resp["info"]["name"]))
print("Provides:\tpython3-{name}-doc".format(name=resp["info"]["name"]))
print("%description help")
print(get_description(resp))
print("")
print("%prep")
print("%autosetup -n {name}-{ver}".format(name=resp["info"]["name"], ver=resp["info"]["version"]))
print("")
print("%build")
print("%py3_build")
print("")
print("%install")
print("%py3_install")
print("install -d -m755 %{buildroot}/%{_pkgdocdir}")
print("if [ -d doc ]; then cp -arf doc %{buildroot}/%{_pkgdocdir}; fi")
print("if [ -d docs ]; then cp -arf docs %{buildroot}/%{_pkgdocdir}; fi")
print("if [ -d example ]; then cp -arf example %{buildroot}/%{_pkgdocdir}; fi")
print("if [ -d examples ]; then cp -arf examples %{buildroot}/%{_pkgdocdir}; fi")
print("pushd %{buildroot}")
print("if [ -d usr/lib ]; then")
print("\tfind usr/lib -type f -printf \"/%h/%f\\n\" >> filelist.lst")
print("fi")
print("if [ -d usr/lib64 ]; then")
print("\tfind usr/lib64 -type f -printf \"/%h/%f\\n\" >> filelist.lst")
print("fi")
print("if [ -d usr/bin ]; then")
print("\tfind usr/bin -type f -printf \"/%h/%f\\n\" >> filelist.lst")
print("fi")
print("if [ -d usr/sbin ]; then")
print("\tfind usr/sbin -type f -printf \"/%h/%f\\n\" >> filelist.lst")
print("fi")
print("popd")
print("mv %{buildroot}/filelist.lst .")
print("")
print("%files -n python3-{name} -f filelist.lst".format(name=resp["info"]["name"]))
# print("%{python3_sitelib}/*.egg-info/")
# print("%{python3_sitelib}/" + resp["info"]["name"])
if build_noarch:
print("%dir %{python3_sitelib}/*")
else:
print("%dir %{python3_sitearch}/*")
print("")
print("%files help")
print("%{_pkgdocdir}")
print("")
print("%changelog")
date_str = datetime.date.today().strftime("%a %b %d %Y")
print("* {today} Python_Bot <Python_Bot@openeuler.org>".format(today=date_str))
print("- Package Spec generated")
sys.stdout = tmp
return build_req_list
if __name__ == "__main__":
dft_root_path=os.path.join(str(Path.home()), "rpmbuild")
parser = argparse.ArgumentParser()
parser.add_argument("-s", "--spec", help="Create spec file", action="store_true")
parser.add_argument("-b", "--build", help="Build rpm package", action="store_true")
parser.add_argument("-r", "--rootpath", help="Build rpm package in root path", type=str, default=dft_root_path)
parser.add_argument("-d", "--download", help="Download source file indicated path", action="store_true")
parser.add_argument("-p", "--path", help="indicated path to store files", type=str, default=os.getcwd())
parser.add_argument("-j", "--json", help="Get Package JSON info", action="store_true")
parser.add_argument("-o", "--output", help="Output to file", type=str, default="")
parser.add_argument("pkg", type=str, help="The Python Module Name")
args = parser.parse_args()
response = get_pkg_json(args.pkg)
if (args.spec):
build_spec(response, args.output)
if (args.build):
ret = build_rpm(response, args.rootpath)
if ret != "":
print("BuildRequire : %s" % ret)
if (args.download):
download_source(response, args.path)
if (args.json):
store_json(response, args.pkg, args.path)
#!/usr/bin/python3
"""
This is a robot to do package upgrade automation
Expected process:
1. get URL to download updated version
2. Change Version to new one
3. Change Source or Source0 if needed
4. Update %changelog
5. try rpmbuild -bb (not yet)
6. fork on gitee
7. git clone, git add, git commit, git push (manually now)
8. PR on gitee
"""
from pyrpm.spec import Spec, replace_macros
import yaml
import argparse
import gitee
import sys
import subprocess
import os.path
import re
import datetime
def download_source_url(spec, o_ver, n_ver):
"""
Download source file from Source or Source0 URL
"""
source = replace_macros(spec.sources[0], spec).replace(o_ver, n_ver)
if re.match(r"%{.*?}", source):
print("Extra macros in URL which failed to be expanded")
return False
elif source.startswith("http") or source.startswith("ftp"):
fn = os.path.basename(source)
subprocess.call(["curl", "-L", source, "-o", fn])
return fn
else:
print("Not valid URL for Source code")
return False
def download_upstream_url(gt, repo, n_ver):
"""
Download source from upstream metadata URL
"""
upstream_yaml = gt.get_yaml(repo)
if not upstream_yaml:
return False
rp_yaml = yaml.loads(upstream_yaml, Loader=yaml.Loader)
if rp_yaml["version_control"] == "github":
url = "https://github.com/{rp}/archive/{nv}.tar.gz".format(rp=rp_yaml["src_repo"], nv=n_ver)
fn = "{rp}.{nv}.tar.gz".format(rp=repo, nv=n_ver)
subprocess.call(["curl", "-L", url, "-o", fn])
return fn
else:
print("Handling {vc} is still under developing".format(vc=rp_yaml["version_control"]))
return False
def create_spec(repo, spec_str, o_ver, n_ver, src_fn=None):
"""
Create new spec file for upgraded package
"""
fn = open(repo + ".spec", "w")
in_changelog = False
for l in spec_str.splitlines():
if l.startswith("Release:"):
fn.write("Release:\t0\n")
continue
if l.startswith("Source:") or l.startswith("Source0:"):
if src_fn:
fn.write("Source: {src_fn}\n".format(src_fn=src_fn).replace(o_ver, n_ver))
else:
fn.write(l.replace(o_ver, n_ver)+"\n")
continue
if not in_changelog:
nl = l.replace(o_ver, n_ver)
else:
nl = l
fn.write(nl + "\n")
if nl.startswith("%changelog"):
in_changelog = True
d = datetime.date.today()
fn.write(d.strftime("* %a %b %d %Y SimpleUpdate Robot <tc@openeuler.org> - {ver}-0\n").format(ver=n_ver))
fn.write("- Update to version {ver}\n".format(ver=n_ver))
fn.write("\n")
fn.close()
if __name__ == "__main__":
pars = argparse.ArgumentParser()
pars.add_argument("pkg", type=str, help="The package to be upgraded")
pars.add_argument("-o", "--old_version", type=str, help="Current upstream version of package")
pars.add_argument("-n", "--new_version", type=str, help="New upstream version of package will be upgrade to")
pars.add_argument("-s", "--create_spec", help="Create spec file", action="store_true")
pars.add_argument("-d", "--download", help="Download upstream source code", action="store_true")
pars.add_argument("-f", "--fork", help="fork src-openeuler repo into users", action="store_true")
pars.add_argument("-c", "--clone", help="clone privatge repo to local", action="store_true")
pars.add_argument("-p", "--PR", help="Create upgrade PR", action="store_true")
args = pars.parse_args()
my_gitee = gitee.Gitee()
spec_string= my_gitee.get_spec(args.pkg)
s_spec = Spec.from_string(spec_string)
if args.fork:
my_gitee.fork_repo(args.pkg)
if args.clone:
user=my_gitee.token["user"]
subprocess.call(["git", "clone", "git@gitee.com:{user}/{pkg}".format(user=user, pkg=args.pkg)])
os.chdir(args.pkg)
if args.download:
source_file = download_source_url(s_spec, args.old_version, args.new_version)
if source_file:
print(source_file)
else:
source_file = download_upstream_url(my_gitee, args.pkg, args.new_version)
if source_file:
print(source_file)
else:
print("Failed to download the latest source code.")
sys.exit(1)
if args.create_spec:
if len(s_spec.patches) >= 1:
print("I'm too naive to handle complicated package.")
print("This package has multiple in-house patches.")
sys.exit(1)
create_spec(args.pkg, spec_string, args.old_version, args.new_version)
if args.PR:
my_gitee.create_pr(my_gitee.token["user"], args.pkg)
#!/usr/bin/python3
"""
This is a command line tool to create reminder list for TC member
"""
import urllib
import urllib.request
import urllib.parse
import argparse
import json
import sys
import os
import yaml
from pprint import pprint
from datetime import datetime
class Advisor(object):
"""
This is a object abstract TC robot
"""
def __init__(self):
self.secret = open(os.path.expanduser("~/.gitee_personal_token.json"), "r")
self.token = json.load(self.secret)
self.header = {"User-Agent":"Mozilla/5.0 (Windows NT 10.0; WOW64; rv:50.0) Gecko/20100101 Firefox/50.0"}
self.tc_members = None
self.time_format = "%Y-%m-%dT%H:%M:%S%z"
def get_json(self, url):
"""
Return object parsed from remote json
"""
headers = self.header.copy()
headers["Content-Type"] = "application/json;charset=UTF-8"
req = urllib.request.Request(url = url,
headers = headers,
method = "GET")
with urllib.request.urlopen(req) as u:
resp = json.loads(u.read().decode("utf-8"))
return resp
def get_file(self, repo, path):
"""
Get remote raw file
"""
url = "https://gitee.com/{repo}/raw/master/{path}".format(repo=repo, path=path)
req = urllib.request.Request(url = url,
headers = self.header,
method = "GET")
with urllib.request.urlopen(req) as u:
resp = u.read()
return resp
def get_prs(self):
"""
Get list of PRs
"""
pulls_url = "https://gitee.com/api/v5/repos/openeuler/community/pulls"
list_url = pulls_url + "?access_token={token}&state=open&sort=created&direction=desc&page=1&per_page=100"
url = list_url.format(token=self.token["access_token"])
return self.get_json(url)
def get_pr_comments(self, number):
"""
Get Comments for a specific PR
"""
pulls_url = "https://gitee.com/api/v5/repos/openeuler/community/pulls"
desc_url = pulls_url + "/{number}/comments?access_token={token}&page=1&per_page=100"
url = desc_url.format(number=number, token=self.token["access_token"])
return self.get_json(url)
def get_tc_members(self):
"""
Get list of current TC members
"""
m = yaml.load(adv.get_file("openeuler/community", "sig/TC/OWNERS"), Loader=yaml.Loader)
self.tc_members = m["maintainers"]
return m["maintainers"]
def filter_out_tc(self, users):
"""
Pick TC members from users
"""
if not self.tc_members:
self.get_tc_members()
return [x for x in self.tc_members if x in users]
if __name__ == "__main__":
par = argparse.ArgumentParser()
args = par.parse_args()
adv = Advisor()
PRs = adv.get_prs()
PRs.reverse()
for pr in PRs:
commenters = []
commenters.append(pr["user"]["login"])
last_update = pr["updated_at"]
print("URL: https://gitee.com/openeuler/community/pulls/{number}".format(number=pr["number"]))
print("Title: " + pr["title"])
comments = adv.get_pr_comments(pr["number"])
last_update = datetime.strptime(comments[0]["updated_at"], adv.time_format)
comments.reverse()
current_lgtm = 0
current_approve = False
for comment in comments:
commenters.append(comment["user"]["login"])
if comment["body"].startswith("new changes are detected"):
last_update = datetime.strptime(comment["updated_at"], adv.time_format)
break # older comments are ignored
elif comment["body"].startswith("***lgtm*** is added in this pull request"):
current_lgtm = current_lgtm + 1
elif comment["body"].startswith("***approved*** is added in this pull request"):
current_approve = True
tc = adv.filter_out_tc(commenters)
age = datetime.now() - last_update.replace(tzinfo=None)
age_days = max(age.days, 0)
print("Currently {num} days old".format(num=age_days))
print("Currently involved TC members: " + ", ".join(tc))
print("Currently has {num} /lgtm".format(num=current_lgtm))
if current_approve:
print("Currently /approve")
print("")
#!/usr/bin/python3
"""
This is a simple script to query that contact person for specific package
"""
import urllib
import urllib.request
import argparse
import yaml
import re
# Useful default setting
headers = {'User-Agent':'Mozilla/5.0 (Windows NT 6.1; WOW 64; rv:23.0) Gecko/20100101 Firefox/23.0'}
# known important information
sigs_url = "https://gitee.com/openeuler/community/raw/master/sig/sigs.yaml"
sigs_owner_url_template = "https://gitee.com/openeuler/community/raw/master/sig/{signame}/OWNERS"
specfile_url_template = "https://gitee.com/src-openeuler/{package}/raw/master/{specfile}"
specfile_exception_url = "https://gitee.com/shinwell_hu/openEuler-Advisor/raw/master/helper/specfile_exceptions.yaml"
def get_gitee(url):
req = urllib.request.Request(url=url, headers=headers)
u = urllib.request.urlopen(req)
return u.read().decode("utf-8")
def get_sigs():
req = urllib.request.Request(url=sigs_url, headers=headers)
u = urllib.request.urlopen(req)
sigs = yaml.load(u.read().decode("utf-8"), Loader=yaml.Loader)
return sigs
def get_spec(pkg, specfile):
url = specfile_url_template.format(package=pkg, specfile=specfile)
req = urllib.request.Request(url=url, headers=headers)
u = urllib.request.urlopen(req)
return u.read().decode("utf-8")
def get_spec_exception():
req = urllib.request.Request(url=specfile_exception_url, headers=headers)
u = urllib.request.urlopen(req)
exps = yaml.load(u.read().decode("utf-8"), Loader=yaml.Loader)
return exps
def get_manager_sig(pkg):
sis_load = get_sigs()
for sig in sis_load["sigs"]:
for repo in sig["repositories"]:
if repo == "src-openeuler/"+pkg:
return sig["name"]
def get_sig_owners(sig_name):
url = sigs_owner_url_template.format(signame=sig_name)
r = get_gitee(url)
owners = yaml.load(r, Loader=yaml.Loader)
return owners["maintainers"]
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument("pkg", type=str, help="The Package to be Queried")
args = parser.parse_args()
s = get_manager_sig(args.pkg)
o = get_sig_owners(s)
print("SIG Owner:")
for owner in o:
print("\t"+owner)
exp = get_spec_exception()
if args.pkg in exp:
dir_name = exp[args.pkg]["dir"]
file_name = exp[args.pkg]["file"]
specurl = specfile_url_template.format(package=args.pkg, specfile=dir_name + "/" + file_name)
else:
specurl = specfile_url_template.format(package=args.pkg, specfile=args.pkg+".spec")
spec = get_gitee(specurl)
in_changelog = False
emails = set()
for line in spec.splitlines():
if line.startswith("%changelog"):
in_changelog = True
if line.startswith("*") and in_changelog:
m = re.match(r".*\d\d\d\d (.*) .*", line)
if m is None:
emails.add(line)
else:
n = m[1].split("<")
if len(n) == 1:
emails.add(n[0])
else:
emails.add(n[0].strip() + " <" + n[1].strip())
print("Package Contributor:")
for email in emails:
print("\t"+email)
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册