提交 1bcac7a3 编写于 作者: A Anthony Towns

support for ~ in versions; use _ as an internal separator instead

上级 2dd958f4
...@@ -77,10 +77,10 @@ def get_id (package, version, architecture): ...@@ -77,10 +77,10 @@ def get_id (package, version, architecture):
ql = q.getresult() ql = q.getresult()
if not ql: if not ql:
daklib.utils.warn("Couldn't find '%s~%s~%s'." % (package, version, architecture)) daklib.utils.warn("Couldn't find '%s_%s_%s'." % (package, version, architecture))
return None return None
if len(ql) > 1: if len(ql) > 1:
daklib.utils.warn("Found more than one match for '%s~%s~%s'." % (package, version, architecture)) daklib.utils.warn("Found more than one match for '%s_%s_%s'." % (package, version, architecture))
return None return None
id = ql[0][0] id = ql[0][0]
return id return id
...@@ -178,13 +178,13 @@ def process_file (file, suite, action): ...@@ -178,13 +178,13 @@ def process_file (file, suite, action):
# Take action # Take action
if action == "add": if action == "add":
if assoication_id: if assoication_id:
daklib.utils.warn("'%s~%s~%s' already exists in suite %s." % (package, version, architecture, suite)) daklib.utils.warn("'%s_%s_%s' already exists in suite %s." % (package, version, architecture, suite))
continue continue
else: else:
q = projectB.query("INSERT INTO src_associations (suite, source) VALUES (%s, %s)" % (suite_id, id)) q = projectB.query("INSERT INTO src_associations (suite, source) VALUES (%s, %s)" % (suite_id, id))
elif action == "remove": elif action == "remove":
if assoication_id == None: if assoication_id == None:
daklib.utils.warn("'%s~%s~%s' doesn't exist in suite %s." % (package, version, architecture, suite)) daklib.utils.warn("'%s_%s_%s' doesn't exist in suite %s." % (package, version, architecture, suite))
continue continue
else: else:
q = projectB.query("DELETE FROM src_associations WHERE id = %s" % (assoication_id)) q = projectB.query("DELETE FROM src_associations WHERE id = %s" % (assoication_id))
...@@ -199,13 +199,13 @@ def process_file (file, suite, action): ...@@ -199,13 +199,13 @@ def process_file (file, suite, action):
# Take action # Take action
if action == "add": if action == "add":
if assoication_id: if assoication_id:
daklib.utils.warn("'%s~%s~%s' already exists in suite %s." % (package, version, architecture, suite)) daklib.utils.warn("'%s_%s_%s' already exists in suite %s." % (package, version, architecture, suite))
continue continue
else: else:
q = projectB.query("INSERT INTO bin_associations (suite, bin) VALUES (%s, %s)" % (suite_id, id)) q = projectB.query("INSERT INTO bin_associations (suite, bin) VALUES (%s, %s)" % (suite_id, id))
elif action == "remove": elif action == "remove":
if assoication_id == None: if assoication_id == None:
daklib.utils.warn("'%s~%s~%s' doesn't exist in suite %s." % (package, version, architecture, suite)) daklib.utils.warn("'%s_%s_%s' doesn't exist in suite %s." % (package, version, architecture, suite))
continue continue
else: else:
q = projectB.query("DELETE FROM bin_associations WHERE id = %s" % (assoication_id)) q = projectB.query("DELETE FROM bin_associations WHERE id = %s" % (assoication_id))
......
...@@ -202,7 +202,7 @@ def do_dubious_nbs(dubious_nbs): ...@@ -202,7 +202,7 @@ def do_dubious_nbs(dubious_nbs):
def do_obsolete_source(duplicate_bins, bin2source): def do_obsolete_source(duplicate_bins, bin2source):
obsolete = {} obsolete = {}
for key in duplicate_bins.keys(): for key in duplicate_bins.keys():
(source_a, source_b) = key.split('~') (source_a, source_b) = key.split('_')
for source in [ source_a, source_b ]: for source in [ source_a, source_b ]:
if not obsolete.has_key(source): if not obsolete.has_key(source):
if not source_binaries.has_key(source): if not source_binaries.has_key(source):
...@@ -340,7 +340,7 @@ def main (): ...@@ -340,7 +340,7 @@ def main ():
if bin_pkgs.has_key(binary): if bin_pkgs.has_key(binary):
key_list = [ source, bin_pkgs[binary] ] key_list = [ source, bin_pkgs[binary] ]
key_list.sort() key_list.sort()
key = '~'.join(key_list) key = '_'.join(key_list)
duplicate_bins.setdefault(key, []) duplicate_bins.setdefault(key, [])
duplicate_bins[key].append(binary) duplicate_bins[key].append(binary)
bin_pkgs[binary] = source bin_pkgs[binary] = source
...@@ -390,7 +390,7 @@ def main (): ...@@ -390,7 +390,7 @@ def main ():
if previous_source != source: if previous_source != source:
key_list = [ source, previous_source ] key_list = [ source, previous_source ]
key_list.sort() key_list.sort()
key = '~'.join(key_list) key = '_'.join(key_list)
duplicate_bins.setdefault(key, []) duplicate_bins.setdefault(key, [])
if package not in duplicate_bins[key]: if package not in duplicate_bins[key]:
duplicate_bins[key].append(package) duplicate_bins[key].append(package)
...@@ -445,7 +445,7 @@ def main (): ...@@ -445,7 +445,7 @@ def main ():
keys = duplicate_bins.keys() keys = duplicate_bins.keys()
keys.sort() keys.sort()
for key in keys: for key in keys:
(source_a, source_b) = key.split("~") (source_a, source_b) = key.split("_")
print " o %s & %s => %s" % (source_a, source_b, ", ".join(duplicate_bins[key])) print " o %s & %s => %s" % (source_a, source_b, ", ".join(duplicate_bins[key]))
print print
......
...@@ -307,7 +307,7 @@ def get_location_path(directory): ...@@ -307,7 +307,7 @@ def get_location_path(directory):
def get_or_set_files_id (filename, size, md5sum, location_id): def get_or_set_files_id (filename, size, md5sum, location_id):
global files_id_cache, files_id_serial, files_query_cache global files_id_cache, files_id_serial, files_query_cache
cache_key = "~".join((filename, size, md5sum, repr(location_id))) cache_key = "_".join((filename, size, md5sum, repr(location_id)))
if not files_id_cache.has_key(cache_key): if not files_id_cache.has_key(cache_key):
files_id_serial += 1 files_id_serial += 1
files_query_cache.write("%d\t%s\t%s\t%s\t%d\t\\N\n" % (files_id_serial, filename, size, md5sum, location_id)) files_query_cache.write("%d\t%s\t%s\t%s\t%d\t\\N\n" % (files_id_serial, filename, size, md5sum, location_id))
...@@ -363,7 +363,7 @@ def process_sources (filename, suite, component, archive): ...@@ -363,7 +363,7 @@ def process_sources (filename, suite, component, archive):
(md5sum, size, filename) = line.strip().split() (md5sum, size, filename) = line.strip().split()
# Don't duplicate .orig.tar.gz's # Don't duplicate .orig.tar.gz's
if filename.endswith(".orig.tar.gz"): if filename.endswith(".orig.tar.gz"):
cache_key = "%s~%s~%s" % (filename, size, md5sum) cache_key = "%s_%s_%s" % (filename, size, md5sum)
if orig_tar_gz_cache.has_key(cache_key): if orig_tar_gz_cache.has_key(cache_key):
id = orig_tar_gz_cache[cache_key] id = orig_tar_gz_cache[cache_key]
else: else:
...@@ -376,9 +376,9 @@ def process_sources (filename, suite, component, archive): ...@@ -376,9 +376,9 @@ def process_sources (filename, suite, component, archive):
if filename.endswith(".dsc"): if filename.endswith(".dsc"):
files_id = id files_id = id
filename = directory + package + '_' + no_epoch_version + '.dsc' filename = directory + package + '_' + no_epoch_version + '.dsc'
cache_key = "%s~%s" % (package, version) cache_key = "%s_%s" % (package, version)
if not source_cache.has_key(cache_key): if not source_cache.has_key(cache_key):
nasty_key = "%s~%s" % (package, version) nasty_key = "%s_%s" % (package, version)
source_id_serial += 1 source_id_serial += 1
if not source_cache_for_binaries.has_key(nasty_key): if not source_cache_for_binaries.has_key(nasty_key):
source_cache_for_binaries[nasty_key] = source_id_serial source_cache_for_binaries[nasty_key] = source_id_serial
...@@ -438,16 +438,16 @@ def process_packages (filename, suite, component, archive): ...@@ -438,16 +438,16 @@ def process_packages (filename, suite, component, archive):
filename = poolify (filename, location) filename = poolify (filename, location)
if architecture == "all": if architecture == "all":
filename = re_arch_from_filename.sub("binary-all", filename) filename = re_arch_from_filename.sub("binary-all", filename)
cache_key = "%s~%s" % (source, source_version) cache_key = "%s_%s" % (source, source_version)
source_id = source_cache_for_binaries.get(cache_key, None) source_id = source_cache_for_binaries.get(cache_key, None)
size = Scanner.Section["size"] size = Scanner.Section["size"]
md5sum = Scanner.Section["md5sum"] md5sum = Scanner.Section["md5sum"]
files_id = get_or_set_files_id (filename, size, md5sum, location_id) files_id = get_or_set_files_id (filename, size, md5sum, location_id)
type = "deb"; # FIXME type = "deb"; # FIXME
cache_key = "%s~%s~%s~%d~%d~%d~%d" % (package, version, repr(source_id), architecture_id, location_id, files_id, suite_id) cache_key = "%s_%s_%s_%d_%d_%d_%d" % (package, version, repr(source_id), architecture_id, location_id, files_id, suite_id)
if not arch_all_cache.has_key(cache_key): if not arch_all_cache.has_key(cache_key):
arch_all_cache[cache_key] = 1 arch_all_cache[cache_key] = 1
cache_key = "%s~%s~%s~%d" % (package, version, repr(source_id), architecture_id) cache_key = "%s_%s_%s_%d" % (package, version, repr(source_id), architecture_id)
if not binary_cache.has_key(cache_key): if not binary_cache.has_key(cache_key):
if not source_id: if not source_id:
source_id = "\N" source_id = "\N"
......
...@@ -139,7 +139,7 @@ def main(): ...@@ -139,7 +139,7 @@ def main():
lhs = split[0] lhs = split[0]
maintainer = fix_maintainer(" ".join(split[1:])) maintainer = fix_maintainer(" ".join(split[1:]))
if lhs.find('~') != -1: if lhs.find('~') != -1:
(package, version) = lhs.split('~') (package, version) = lhs.split('~', 1)
else: else:
package = lhs package = lhs
version = '*' version = '*'
......
...@@ -39,7 +39,7 @@ from types import * ...@@ -39,7 +39,7 @@ from types import *
################################################################################ ################################################################################
re_valid_version = re.compile(r"^([0-9]+:)?[0-9A-Za-z\.\-\+:]+$") re_valid_version = re.compile(r"^([0-9]+:)?[0-9A-Za-z\.\-\+:~]+$")
re_valid_pkg_name = re.compile(r"^[\dA-Za-z][\dA-Za-z\+\-\.]+$") re_valid_pkg_name = re.compile(r"^[\dA-Za-z][\dA-Za-z\+\-\.]+$")
re_changelog_versions = re.compile(r"^\w[-+0-9a-z.]+ \([^\(\) \t]+\)") re_changelog_versions = re.compile(r"^\w[-+0-9a-z.]+ \([^\(\) \t]+\)")
re_strip_revision = re.compile(r"-([^-]+)$") re_strip_revision = re.compile(r"-([^-]+)$")
......
...@@ -73,7 +73,7 @@ def daily_install_stats(): ...@@ -73,7 +73,7 @@ def daily_install_stats():
stats = {} stats = {}
file = daklib.utils.open_file("2001-11") file = daklib.utils.open_file("2001-11")
for line in file.readlines(): for line in file.readlines():
split = line.strip().split('~') split = line.strip().split('|')
program = split[1] program = split[1]
if program != "katie" and program != "process-accepted": if program != "katie" and program != "process-accepted":
continue continue
......
...@@ -185,7 +185,7 @@ def get_component_id (component): ...@@ -185,7 +185,7 @@ def get_component_id (component):
def get_location_id (location, component, archive): def get_location_id (location, component, archive):
global location_id_cache global location_id_cache
cache_key = location + '~' + component + '~' + location cache_key = location + '_' + component + '_' + location
if location_id_cache.has_key(cache_key): if location_id_cache.has_key(cache_key):
return location_id_cache[cache_key] return location_id_cache[cache_key]
...@@ -208,7 +208,7 @@ def get_location_id (location, component, archive): ...@@ -208,7 +208,7 @@ def get_location_id (location, component, archive):
def get_source_id (source, version): def get_source_id (source, version):
global source_id_cache global source_id_cache
cache_key = source + '~' + version + '~' cache_key = source + '_' + version + '_'
if source_id_cache.has_key(cache_key): if source_id_cache.has_key(cache_key):
return source_id_cache[cache_key] return source_id_cache[cache_key]
...@@ -278,7 +278,7 @@ def get_or_set_fingerprint_id (fingerprint): ...@@ -278,7 +278,7 @@ def get_or_set_fingerprint_id (fingerprint):
def get_files_id (filename, size, md5sum, location_id): def get_files_id (filename, size, md5sum, location_id):
global files_id_cache global files_id_cache
cache_key = "%s~%d" % (filename, location_id) cache_key = "%s_%d" % (filename, location_id)
if files_id_cache.has_key(cache_key): if files_id_cache.has_key(cache_key):
return files_id_cache[cache_key] return files_id_cache[cache_key]
...@@ -331,7 +331,7 @@ def set_files_id (filename, size, md5sum, location_id): ...@@ -331,7 +331,7 @@ def set_files_id (filename, size, md5sum, location_id):
## ##
##q = projectB.query("SELECT id FROM files WHERE id = currval('files_id_seq')") ##q = projectB.query("SELECT id FROM files WHERE id = currval('files_id_seq')")
##ql = q.getresult()[0] ##ql = q.getresult()[0]
##cache_key = "%s~%d" % (filename, location_id) ##cache_key = "%s_%d" % (filename, location_id)
##files_id_cache[cache_key] = ql[0] ##files_id_cache[cache_key] = ql[0]
##return files_id_cache[cache_key] ##return files_id_cache[cache_key]
......
...@@ -2,7 +2,7 @@ Assumptions ...@@ -2,7 +2,7 @@ Assumptions
----------- -----------
o Usernames do not contain ",". [dak import-users-from-passwd] o Usernames do not contain ",". [dak import-users-from-passwd]
o Package names do not contain "~" [dak cruft-report] o Package names and versions do not contain "_" [dak cruft-report]
o Suites are case-independent in conf files, but forced lower case in use. [dak make-suite-file-list] o Suites are case-independent in conf files, but forced lower case in use. [dak make-suite-file-list]
o Components are case-sensitive. [dak make-suite-file-list] o Components are case-sensitive. [dak make-suite-file-list]
o There's always source of some sort o There's always source of some sort
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册