提交 c16123ef 编写于 作者: J Joerg Jaspert

Merge commit 'lamby/master' into merge

* commit 'lamby/master': (23 commits)
  'as' is a reserved word in Python 2.6.
  Don't shadow 'file' builtin in daklib.queue.
  Use 4 spaces for indentation.
  Correct code to match documentation of Binary.check_utf8_package
  Remove unused 'time' import.
  Correct isinstance call; DictType is not an actual type.
  dak's "Dict" is a synonym of the 'dict' builtin
  Use foo.iteritems() to avoid reentrancy with foo[k].
  Don't shadow 'map' builtin in daklib.utils.TemplateSubst
  Update comment about our monkeypatching of commands.getstatusoutput.
  Remove duplicated 'commands' import.
  Don't use deprecated string module.
  Remove unused imports from daklib.queue.
  Remove shadowing builtins in daklib.queue
  Remove daklib's now-unused extension functionality.
  Remove shadowing builtins in daklib.dbconn.
  Don't shadow 'dir' builtin in daklib.utils.get_changes_files
  We don't use re_is_orig_source in daklib.utils anymore.
  Don't shadow "dir" builtin.
  Fix uid references in DM upload error codepaths.
  ...
Signed-off-by: NJoerg Jaspert <joerg@debian.org>
......@@ -54,10 +54,6 @@ Options = None #: Parsed CommandLine arguments
################################################################################
def Dict(**dict): return dict
################################################################################
def usage (exit_code=0):
print """Usage: dak make-suite-file-list [OPTION]
Write out file lists suitable for use with apt-ftparchive.
......@@ -359,7 +355,7 @@ SELECT s.id, s.source, 'source', s.version, l.path, f.filename, c.name, f.id,
# 'id' comes from either 'binaries' or 'source', so it's not unique
unique_id += 1
packages[unique_id] = Dict(sourceid=sourceid, pkg=pkg, arch=arch, version=version,
packages[unique_id] = dict(sourceid=sourceid, pkg=pkg, arch=arch, version=version,
path=path, filename=filename,
component=component, file_id=file_id,
suite=suite, filetype = filetype)
......
......@@ -152,9 +152,9 @@ def advisory_info():
svs = srcverarches.keys()
svs.sort()
for sv in svs:
as = srcverarches[sv].keys()
as.sort()
print " %s (%s)" % (sv, ", ".join(as))
as_ = srcverarches[sv].keys()
as_.sort()
print " %s (%s)" % (sv, ", ".join(as_))
def prompt(opts, default):
p = ""
......
......@@ -248,12 +248,16 @@ class Binary(object):
except:
print >> sys.stderr, "E: %s has non-unicode filename: %s" % (package,tarinfo.name)
result = True
except:
traceback.print_exc()
result = False
os.chdir(cwd)
return result
__all__.append('Binary')
def copy_temporary_contents(package, version, archname, deb, reject, session=None):
......
......@@ -29,7 +29,6 @@ Changes class for dak
import os
import stat
import time
import datetime
from cPickle import Unpickler, Pickler
......
......@@ -66,10 +66,10 @@ class DebVersion(sqltypes.Text):
sa_major_version = sqlalchemy.__version__[0:3]
if sa_major_version == "0.5":
from sqlalchemy.databases import postgres
postgres.ischema_names['debversion'] = DebVersion
from sqlalchemy.databases import postgres
postgres.ischema_names['debversion'] = DebVersion
else:
raise Exception("dak isn't ported to SQLA versions != 0.5 yet. See daklib/dbconn.py")
raise Exception("dak isn't ported to SQLA versions != 0.5 yet. See daklib/dbconn.py")
################################################################################
......@@ -286,12 +286,12 @@ def get_suites_binary_in(package, session=None):
__all__.append('get_suites_binary_in')
@session_wrapper
def get_binary_from_id(id, session=None):
def get_binary_from_id(binary_id, session=None):
"""
Returns DBBinary object for given C{id}
@type id: int
@param id: Id of the required binary
@type binary_id: int
@param binary_id: Id of the required binary
@type session: Session
@param session: Optional SQLA session object (a temporary one will be
......@@ -301,7 +301,7 @@ def get_binary_from_id(id, session=None):
@return: DBBinary object for the given binary (None if not present)
"""
q = session.query(DBBinary).filter_by(binary_id=id)
q = session.query(DBBinary).filter_by(binary_id=binary_id)
try:
return q.one()
......@@ -769,7 +769,7 @@ def check_poolfile(filename, filesize, md5sum, location_id, session=None):
ret = (False, None)
else:
obj = q.one()
if obj.md5sum != md5sum or obj.filesize != filesize:
if obj.md5sum != md5sum or obj.filesize != int(filesize):
ret = (False, obj)
if ret is None:
......@@ -944,8 +944,8 @@ class Keyring(object):
def __repr__(self):
return '<Keyring %s>' % self.keyring_name
def de_escape_gpg_str(self, str):
esclist = re.split(r'(\\x..)', str)
def de_escape_gpg_str(self, txt):
esclist = re.split(r'(\\x..)', txt)
for x in range(1,len(esclist),2):
esclist[x] = "%c" % (int(esclist[x][2:],16))
return "".join(esclist)
......@@ -1444,13 +1444,13 @@ def insert_pending_content_paths(package, fullpaths, session=None):
# Insert paths
pathcache = {}
for fullpath in fullpaths:
(path, file) = os.path.split(fullpath)
(path, filename) = os.path.split(fullpath)
if path.startswith( "./" ):
path = path[2:]
filepath_id = get_or_set_contents_path_id(path, session)
filename_id = get_or_set_contents_file_id(file, session)
filename_id = get_or_set_contents_file_id(filename, session)
pathcache[fullpath] = (filepath_id, filename_id)
......
#!/usr/bin/env python
"""
Utility functions for extensions
@contact: Debian FTP Master <ftpmaster@debian.org>
@copyright: 2008 Anthony Towns <ajt@dbeian.org>
@license: GNU General Public License version 2 or later
"""
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
dak_functions_to_replace = {}
dak_replaced_functions = {}
def replace_dak_function(module, name):
"""
Decorator to make a function replace a standard dak function
in a given module.
@type module: string
@param module: name of module where replaced function is in
@type name: string
@param name: name of the function to replace
"""
def x(f):
def myfunc(*a,**kw):
global replaced_funcs
f(dak_replaced_functions[name], *a, **kw)
myfunc.__name__ = f.__name__
myfunc.__doc__ = f.__doc__
myfunc.__dict__.update(f.__dict__)
fnname = "%s:%s" % (module, name)
if fnname in dak_functions_to_replace:
raise Exception, \
"%s in %s already marked to be replaced" % (name, module)
dak_functions_to_replace["%s:%s" % (module,name)] = myfunc
return f
return x
################################################################################
def init(name, module, userext):
global dak_replaced_functions
# This bit should be done automatically too
dak_replaced_functions = {}
for f,newfunc in dak_functions_to_replace.iteritems():
m,f = f.split(":",1)
if len(f) > 0 and m == name:
dak_replaced_functions[f] = module.__dict__[f]
module.__dict__[f] = newfunc
......@@ -28,7 +28,6 @@ Queue utility functions for dak
import errno
import os
import pg
import stat
import sys
import time
......@@ -38,7 +37,6 @@ import utils
import commands
import shutil
import textwrap
import tempfile
from types import *
import yaml
......@@ -298,7 +296,7 @@ class Upload(object):
# If 'dak process-unchecked' crashed out in the right place, architecture may still be a string.
if not self.pkg.changes.has_key("architecture") or not \
isinstance(self.pkg.changes["architecture"], DictType):
isinstance(self.pkg.changes["architecture"], dict):
self.pkg.changes["architecture"] = { "Unknown" : "" }
# and maintainer2047 may not exist.
......@@ -408,7 +406,7 @@ class Upload(object):
fix_maintainer (self.pkg.changes["maintainer"])
except ParseMaintError, msg:
self.rejects.append("%s: Maintainer field ('%s') failed to parse: %s" \
% (filename, changes["maintainer"], msg))
% (filename, self.pkg.changes["maintainer"], msg))
# ...likewise for the Changed-By: field if it exists.
try:
......@@ -755,7 +753,7 @@ class Upload(object):
# Validate the component
if not get_component(entry["component"], session):
self.rejects.append("file '%s' has unknown component '%s'." % (f, component))
self.rejects.append("file '%s' has unknown component '%s'." % (f, entry["component"]))
return
# See if the package is NEW
......@@ -770,7 +768,7 @@ class Upload(object):
location = cnf["Dir::Pool"]
l = get_location(location, entry["component"], archive, session)
if l is None:
self.rejects.append("[INTERNAL ERROR] couldn't determine location (Component: %s, Archive: %s)" % (component, archive))
self.rejects.append("[INTERNAL ERROR] couldn't determine location (Component: %s, Archive: %s)" % (entry["component"], archive))
entry["location id"] = -1
else:
entry["location id"] = l.location_id
......@@ -1565,10 +1563,10 @@ class Upload(object):
rej = False
for f in self.pkg.files.keys():
if self.pkg.files[f].has_key("byhand"):
self.rejects.append("%s may not upload BYHAND file %s" % (uid, f))
self.rejects.append("%s may not upload BYHAND file %s" % (fpr.uid.uid, f))
rej = True
if self.pkg.files[f].has_key("new"):
self.rejects.append("%s may not upload NEW file %s" % (uid, f))
self.rejects.append("%s may not upload NEW file %s" % (fpr.uid.uid, f))
rej = True
if rej:
......@@ -1978,14 +1976,14 @@ distribution."""
###########################################################################
def remove(self, dir=None):
def remove(self, from_dir=None):
"""
Used (for instance) in p-u to remove the package from unchecked
"""
if dir is None:
if from_dir is None:
os.chdir(self.pkg.directory)
else:
os.chdir(dir)
os.chdir(from_dir)
for f in self.pkg.files.keys():
os.unlink(f)
......@@ -2150,7 +2148,7 @@ distribution."""
return 0
################################################################################
def in_override_p(self, package, component, suite, binary_type, file, session):
def in_override_p(self, package, component, suite, binary_type, filename, session):
"""
Check if a package already has override entries in the DB
......@@ -2166,8 +2164,8 @@ distribution."""
@type binary_type: string
@param binary_type: type of the package
@type file: string
@param file: filename we check
@type filename: string
@param filename: filename we check
@return: the database result. But noone cares anyway.
......@@ -2193,8 +2191,8 @@ distribution."""
# Remember the section and priority so we can check them later if appropriate
if len(result) > 0:
result = result[0]
self.pkg.files[file]["override section"] = result.section.section
self.pkg.files[file]["override priority"] = result.priority.priority
self.pkg.files[filename]["override section"] = result.section.section
self.pkg.files[filename]["override priority"] = result.priority.priority
return result
return None
......@@ -2222,13 +2220,13 @@ distribution."""
################################################################################
def cross_suite_version_check(self, sv_list, file, new_version, sourceful=False):
def cross_suite_version_check(self, sv_list, filename, new_version, sourceful=False):
"""
@type sv_list: list
@param sv_list: list of (suite, version) tuples to check
@type file: string
@param file: XXX
@type filename: string
@param filename: XXX
@type new_version: string
@param new_version: XXX
......@@ -2253,7 +2251,7 @@ distribution."""
vercmp = apt_pkg.VersionCompare(new_version, existent_version)
if suite in must_be_newer_than and sourceful and vercmp < 1:
self.rejects.append("%s: old version (%s) in %s >= new version (%s) targeted at %s." % (file, existent_version, suite, new_version, target_suite))
self.rejects.append("%s: old version (%s) in %s >= new version (%s) targeted at %s." % (filename, existent_version, suite, new_version, target_suite))
if suite in must_be_older_than and vercmp > -1:
cansave = 0
......@@ -2286,7 +2284,7 @@ distribution."""
self.rejects.append("Won't propogate NEW packages.")
elif apt_pkg.VersionCompare(new_version, add_version) < 0:
# propogation would be redundant. no need to reject though.
self.warnings.append("ignoring versionconflict: %s: old version (%s) in %s <= new version (%s) targeted at %s." % (file, existent_version, suite, new_version, target_suite))
self.warnings.append("ignoring versionconflict: %s: old version (%s) in %s <= new version (%s) targeted at %s." % (filename, existent_version, suite, new_version, target_suite))
cansave = 1
elif apt_pkg.VersionCompare(new_version, add_version) > 0 and \
apt_pkg.VersionCompare(add_version, target_version) >= 0:
......@@ -2297,29 +2295,29 @@ distribution."""
cansave = 1
if not cansave:
self.reject.append("%s: old version (%s) in %s <= new version (%s) targeted at %s." % (file, existent_version, suite, new_version, target_suite))
self.reject.append("%s: old version (%s) in %s <= new version (%s) targeted at %s." % (filename, existent_version, suite, new_version, target_suite))
################################################################################
def check_binary_against_db(self, file, session):
def check_binary_against_db(self, filename, session):
# Ensure version is sane
q = session.query(BinAssociation)
q = q.join(DBBinary).filter(DBBinary.package==self.pkg.files[file]["package"])
q = q.join(Architecture).filter(Architecture.arch_string.in_([self.pkg.files[file]["architecture"], 'all']))
q = q.join(DBBinary).filter(DBBinary.package==self.pkg.files[filename]["package"])
q = q.join(Architecture).filter(Architecture.arch_string.in_([self.pkg.files[filename]["architecture"], 'all']))
self.cross_suite_version_check([ (x.suite.suite_name, x.binary.version) for x in q.all() ],
file, self.pkg.files[file]["version"], sourceful=False)
filename, self.pkg.files[filename]["version"], sourceful=False)
# Check for any existing copies of the file
q = session.query(DBBinary).filter_by(package=self.pkg.files[file]["package"])
q = q.filter_by(version=self.pkg.files[file]["version"])
q = q.join(Architecture).filter_by(arch_string=self.pkg.files[file]["architecture"])
q = session.query(DBBinary).filter_by(package=self.pkg.files[filename]["package"])
q = q.filter_by(version=self.pkg.files[filename]["version"])
q = q.join(Architecture).filter_by(arch_string=self.pkg.files[filename]["architecture"])
if q.count() > 0:
self.rejects.append("%s: can not overwrite existing copy already in the archive." % (file))
self.rejects.append("%s: can not overwrite existing copy already in the archive." % filename)
################################################################################
def check_source_against_db(self, file, session):
def check_source_against_db(self, filename, session):
"""
"""
source = self.pkg.dsc.get("source")
......@@ -2330,10 +2328,10 @@ distribution."""
q = q.join(DBSource).filter(DBSource.source==source)
self.cross_suite_version_check([ (x.suite.suite_name, x.source.version) for x in q.all() ],
file, version, sourceful=True)
filename, version, sourceful=True)
################################################################################
def check_dsc_against_db(self, file, session):
def check_dsc_against_db(self, filename, session):
"""
@warning: NB: this function can remove entries from the 'files' index [if
......@@ -2459,15 +2457,15 @@ distribution."""
orig_files[dsc_name]["path"] = in_otherdir
if not found:
self.rejects.append("%s refers to %s, but I can't find it in the queue or in the pool." % (file, dsc_name))
self.rejects.append("%s refers to %s, but I can't find it in the queue or in the pool." % (filename, dsc_name))
continue
else:
self.rejects.append("%s refers to %s, but I can't find it in the queue." % (file, dsc_name))
self.rejects.append("%s refers to %s, but I can't find it in the queue." % (filename, dsc_name))
continue
if actual_md5 != dsc_entry["md5sum"]:
self.rejects.append("md5sum for %s doesn't match %s." % (found, file))
self.rejects.append("md5sum for %s doesn't match %s." % (found, filename))
if actual_size != int(dsc_entry["size"]):
self.rejects.append("size for %s doesn't match %s." % (found, file))
self.rejects.append("size for %s doesn't match %s." % (found, filename))
################################################################################
# This is used by process-new and process-holding to recheck a changes file
......
......@@ -36,7 +36,6 @@ import stat
import apt_pkg
import time
import re
import string
import email as modemail
import subprocess
......@@ -45,8 +44,7 @@ from dak_exceptions import *
from textutils import fix_maintainer
from regexes import re_html_escaping, html_escaping, re_single_line_field, \
re_multi_line_field, re_srchasver, re_taint_free, \
re_gpg_uid, re_re_mark, re_whitespace_comment, re_issource, \
re_is_orig_source
re_gpg_uid, re_re_mark, re_whitespace_comment, re_issource
from formats import parse_format, validate_changes_format
from srcformats import get_format_from_string
......@@ -64,9 +62,9 @@ key_uid_email_cache = {} #: Cache for email addresses from gpg key uids
known_hashes = [("sha1", apt_pkg.sha1sum, (1, 8)),
("sha256", apt_pkg.sha256sum, (1, 8))] #: hashes we accept for entries in .changes/.dsc
# Monkeypatch commands.getstatusoutput as it returns a "0" exit code in
# all situations under lenny's Python.
import commands
# Monkeypatch commands.getstatusoutput as it may not return the correct exit
# code in lenny's Python. This also affects commands.getoutput and
# commands.getstatus.
def dak_getstatusoutput(cmd):
pipe = subprocess.Popen(cmd, shell=True, universal_newlines=True,
stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
......@@ -558,7 +556,7 @@ def build_file_list(changes, is_a_dsc=0, field="files", hashname="md5sum"):
(section, component) = extract_component_from_section(section)
files[name] = Dict(size=size, section=section,
files[name] = dict(size=size, section=section,
priority=priority, component=component)
files[name][hashname] = md5
......@@ -616,7 +614,7 @@ def send_mail (message, filename=""):
if len(match) == 0:
del message_raw[field]
else:
message_raw.replace_header(field, string.join(match, ", "))
message_raw.replace_header(field, ', '.join(match))
# Change message fields in order if we don't have a To header
if not message_raw.has_key("To"):
......@@ -757,12 +755,12 @@ def which_alias_file():
################################################################################
def TemplateSubst(map, filename):
def TemplateSubst(subst_map, filename):
""" Perform a substition of template """
templatefile = open_file(filename)
template = templatefile.read()
for x in map.keys():
template = template.replace(x, str(map[x]))
for k, v in subst_map.iteritems():
template = template.replace(k, str(v))
templatefile.close()
return template
......@@ -1095,10 +1093,6 @@ def split_args (s, dwim=1):
################################################################################
def Dict(**dict): return dict
########################################
def gpgv_get_status_output(cmd, status_read, status_write):
"""
Our very own version of commands.getouputstatus(), hacked to support
......@@ -1366,7 +1360,7 @@ def check_signature (sig_filename, data_filename="", keyrings=None, autofetch=No
rejects.append("signature on %s does not appear to be valid [No SIG_ID]." % (sig_filename))
# Finally ensure there's not something we don't recognise
known_keywords = Dict(VALIDSIG="",SIG_ID="",GOODSIG="",BADSIG="",ERRSIG="",
known_keywords = dict(VALIDSIG="",SIG_ID="",GOODSIG="",BADSIG="",ERRSIG="",
SIGEXPIRED="",KEYREVOKED="",NO_PUBKEY="",BADARMOR="",
NODATA="",NOTATION_DATA="",NOTATION_NAME="",KEYEXPIRED="")
......@@ -1488,7 +1482,7 @@ def is_email_alias(email):
################################################################################
def get_changes_files(dir):
def get_changes_files(from_dir):
"""
Takes a directory and lists all .changes files in it (as well as chdir'ing
to the directory; this is due to broken behaviour on the part of p-u/p-a
......@@ -1498,10 +1492,10 @@ def get_changes_files(dir):
"""
try:
# Much of the rest of p-u/p-a depends on being in the right place
os.chdir(dir)
changes_files = [x for x in os.listdir(dir) if x.endswith('.changes')]
os.chdir(from_dir)
changes_files = [x for x in os.listdir(from_dir) if x.endswith('.changes')]
except OSError, e:
fubar("Failed to read list from directory %s (%s)" % (dir, e))
fubar("Failed to read list from directory %s (%s)" % (from_dir, e))
return changes_files
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册