未验证 提交 060ddb5f 编写于 作者: A Ansgar Burchardt

Merge remote-tracking branch 'waldi/byhash-symlink'

...@@ -45,7 +45,7 @@ import commands ...@@ -45,7 +45,7 @@ import commands
from sqlalchemy.orm import object_session from sqlalchemy.orm import object_session
from daklib import utils, daklog from daklib import utils, daklog
from daklib.regexes import re_gensubrelease, re_includeinrelease from daklib.regexes import re_gensubrelease, re_includeinrelease_byhash, re_includeinrelease_plain
from daklib.dak_exceptions import * from daklib.dak_exceptions import *
from daklib.dbconn import * from daklib.dbconn import *
from daklib.config import Config from daklib.config import Config
...@@ -199,7 +199,7 @@ class ReleaseWriter(object): ...@@ -199,7 +199,7 @@ class ReleaseWriter(object):
# Update the hashfile table with new or updated files # Update the hashfile table with new or updated files
for filename in fileinfo: for filename in fileinfo:
if not os.path.exists(filename): if not os.path.lexists(filename):
# probably an uncompressed index we didn't generate # probably an uncompressed index we didn't generate
continue continue
byhashdir = os.path.join(os.path.dirname(filename), 'by-hash') byhashdir = os.path.join(os.path.dirname(filename), 'by-hash')
...@@ -227,20 +227,10 @@ class ReleaseWriter(object): ...@@ -227,20 +227,10 @@ class ReleaseWriter(object):
def _make_byhash_links(self, fileinfo, hashes): def _make_byhash_links(self, fileinfo, hashes):
# Create hardlinks in by-hash directories # Create hardlinks in by-hash directories
for filename in fileinfo: for filename in fileinfo:
if not os.path.exists(filename): if not os.path.lexists(filename):
# probably an uncompressed index we didn't generate # probably an uncompressed index we didn't generate
continue continue
for h in hashes:
field = h.release_field
hashfile = os.path.join(os.path.dirname(filename), 'by-hash', field, fileinfo[filename][field])
# if the hash is known to exist, re-use the old file
if os.path.exists(hashfile):
os.unlink(filename)
os.link(hashfile, filename)
break
for h in hashes: for h in hashes:
field = h.release_field field = h.release_field
hashfile = os.path.join(os.path.dirname(filename), 'by-hash', field, fileinfo[filename][field]) hashfile = os.path.join(os.path.dirname(filename), 'by-hash', field, fileinfo[filename][field])
...@@ -255,6 +245,23 @@ class ReleaseWriter(object): ...@@ -255,6 +245,23 @@ class ReleaseWriter(object):
if exc.errno != errno.EEXIST: if exc.errno != errno.EEXIST:
raise raise
def _make_byhash_base_symlink(self, fileinfo, hashes):
# Create symlinks to files in by-hash directories
for filename in fileinfo:
if not os.path.lexists(filename):
# probably an uncompressed index we didn't generate
continue
besthash = hashes[-1]
field = besthash.release_field
hashfilebase = os.path.join('by-hash', field, fileinfo[filename][field])
hashfile = os.path.join(os.path.dirname(filename), hashfilebase)
assert os.path.exists(hashfile), 'by-hash file {} is missing'.format(hashfile)
os.unlink(filename)
os.symlink(hashfilebase, filename)
def generate_release_files(self): def generate_release_files(self):
""" """
Generate Release files for the given suite Generate Release files for the given suite
...@@ -367,20 +374,25 @@ class ReleaseWriter(object): ...@@ -367,20 +374,25 @@ class ReleaseWriter(object):
hashes = [x for x in RELEASE_HASHES if x.db_name in suite.checksums] hashes = [x for x in RELEASE_HASHES if x.db_name in suite.checksums]
fileinfo = {} fileinfo = {}
fileinfo_byhash = {}
uncompnotseen = {} uncompnotseen = {}
for dirpath, dirnames, filenames in os.walk(".", followlinks=True, topdown=True): for dirpath, dirnames, filenames in os.walk(".", followlinks=True, topdown=True):
for entry in filenames: for entry in filenames:
# Skip things we don't want to include
if not re_includeinrelease.match(entry):
continue
if dirpath == '.' and entry in ["Release", "Release.gpg", "InRelease"]: if dirpath == '.' and entry in ["Release", "Release.gpg", "InRelease"]:
continue continue
filename = os.path.join(dirpath.lstrip('./'), entry) filename = os.path.join(dirpath.lstrip('./'), entry)
fileinfo[filename] = {}
if re_includeinrelease_byhash.match(entry):
fileinfo[filename] = fileinfo_byhash[filename] = {}
elif re_includeinrelease_plain.match(entry):
fileinfo[filename] = {}
# Skip things we don't want to include
else:
continue
contents = open(filename, 'r').read() contents = open(filename, 'r').read()
# If we find a file for which we have a compressed version and # If we find a file for which we have a compressed version and
...@@ -423,8 +435,9 @@ class ReleaseWriter(object): ...@@ -423,8 +435,9 @@ class ReleaseWriter(object):
out.close() out.close()
os.rename(outfile + '.new', outfile) os.rename(outfile + '.new', outfile)
self._update_hashfile_table(session, fileinfo, hashes) self._update_hashfile_table(session, fileinfo_byhash, hashes)
self._make_byhash_links(fileinfo, hashes) self._make_byhash_links(fileinfo_byhash, hashes)
self._make_byhash_base_symlink(fileinfo_byhash, hashes)
sign_release_dir(suite, os.path.dirname(outfile)) sign_release_dir(suite, os.path.dirname(outfile))
......
...@@ -103,7 +103,8 @@ re_parse_lintian = re.compile(r"^(?P<level>W|E|O): (?P<package>.*?): (?P<tag>[^ ...@@ -103,7 +103,8 @@ re_parse_lintian = re.compile(r"^(?P<level>W|E|O): (?P<package>.*?): (?P<tag>[^
# in generate-releases # in generate-releases
re_gensubrelease = re.compile (r".*/(binary-[0-9a-z-]+|source)$") re_gensubrelease = re.compile (r".*/(binary-[0-9a-z-]+|source)$")
re_includeinrelease = re.compile (r"(Translation-[a-zA-Z_]+\.(?:bz2|xz)|Contents-[0-9a-z-]+.gz|Index|Packages(.gz|.bz2|.xz)?|Sources(.gz|.bz2|.xz)?|Components-[0-9a-z-]+.yml(.gz|.xz)|icons-[0-9x-]+.tar(.gz|.xz)|MD5SUMS|SHA256SUMS|Release)$") re_includeinrelease_byhash = re.compile (r"(Translation-[a-zA-Z_]+\.(?:bz2|xz)|Contents-[0-9a-z-]+.gz|Index|Packages(.gz|.bz2|.xz)?|Sources(.gz|.bz2|.xz)?|Components-[0-9a-z-]+.yml(.gz|.xz)|icons-[0-9x-]+.tar(.gz|.xz)|Release)$")
re_includeinrelease_plain = re.compile (r"(MD5SUMS|SHA256SUMS)$")
# in generate_index_diffs # in generate_index_diffs
re_includeinpdiff = re.compile(r"(Translation-[a-zA-Z_]+\.(?:bz2|xz))") re_includeinpdiff = re.compile(r"(Translation-[a-zA-Z_]+\.(?:bz2|xz))")
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册