提交 41742863 编写于 作者: J Joerg Jaspert

Merge remote-tracking branch 'ansgar/pu/multiarchive-1' into merge

* ansgar/pu/multiarchive-1:
  Drop column origin_server from archive table.
  Reference archive table from suite and add path to archive root
  typo: "pakcages" → "packages"
  Also import re_bin_only_nmu from daklib.regexes
  don't require all binaries to be included
  use discard instead of remove
  Add module to process policy queue uploads.
  Add module with pre-acceptance tests.
  Add module to handle archive manipulation.
  Add module to handle uploads not yet installed to the archive.
  process-unchecked.new: drop __SUMMARY__ for now
  remove two template variables
  utils.py: add call_editor function
  utils (send_message): add option to write mail to files
  utils.py (mail_addresses_for_upload): only try to use address from key if there is one
Signed-off-by: NJoerg Jaspert <joerg@debian.org>
#!/usr/bin/env python
# coding=utf8
"""
Reference archive table from suite and add path to archive root
@contact: Debian FTP Master <ftpmaster@debian.org>
@copyright: 2012 Ansgar Burchardt <ansgar@debian.org>
@license: GNU General Public License version 2 or later
"""
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
################################################################################
import psycopg2
from daklib.dak_exceptions import DBUpdateError
from daklib.config import Config
################################################################################
def do_update(self):
print __doc__
try:
cnf = Config()
c = self.db.cursor()
archive_root = cnf["Dir::Root"]
c.execute("ALTER TABLE archive ADD COLUMN path TEXT NOT NULL DEFAULT %s", (archive_root,))
c.execute("ALTER TABLE archive ALTER COLUMN path DROP DEFAULT")
c.execute("ALTER TABLE archive ADD COLUMN mode CHAR(4) NOT NULL DEFAULT '0644' CHECK (mode SIMILAR TO '[0-7]{4}')")
c.execute("ALTER TABLE archive ADD COLUMN tainted BOOLEAN NOT NULL DEFAULT 'f'")
c.execute("ALTER TABLE archive ADD COLUMN use_morgue BOOLEAN NOT NULL DEFAULT 't'")
c.execute("SELECT id FROM archive")
(archive_id,) = c.fetchone()
if c.fetchone() is not None:
raise DBUpdateError("Cannot automatically upgrade form installation with multiple archives.")
c.execute("ALTER TABLE suite ADD COLUMN archive_id INT REFERENCES archive(id) NOT NULL DEFAULT %s", (archive_id,))
c.execute("ALTER TABLE suite ALTER COLUMN archive_id DROP DEFAULT")
c.execute("UPDATE config SET value = '73' WHERE name = 'db_revision'")
self.db.commit()
except psycopg2.ProgrammingError as msg:
self.db.rollback()
raise DBUpdateError('Unable to apply sick update 73, rollback issued. Error message : %s' % (str(msg)))
#!/usr/bin/env python
# coding=utf8
"""
Drop origin_server column from archive table
@contact: Debian FTP Master <ftpmaster@debian.org>
@copyright: 2012 Ansgar Burchardt <ansgar@debian.org>
@license: GNU General Public License version 2 or later
"""
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
################################################################################
import psycopg2
from daklib.dak_exceptions import DBUpdateError
from daklib.config import Config
################################################################################
def do_update(self):
print __doc__
try:
cnf = Config()
c = self.db.cursor()
c.execute("ALTER TABLE archive DROP COLUMN origin_server")
c.execute("UPDATE config SET value = '74' WHERE name = 'db_revision'")
self.db.commit()
except psycopg2.ProgrammingError as msg:
self.db.rollback()
raise DBUpdateError('Unable to apply sick update 74, rollback issued. Error message : %s' % (str(msg)))
......@@ -639,10 +639,8 @@ def main ():
Archive = get_archive(whereami, session)
if Archive is None:
utils.warn("Cannot find archive %s. Setting blank values for origin" % whereami)
Subst_close_rm["__MASTER_ARCHIVE__"] = ""
Subst_close_rm["__PRIMARY_MIRROR__"] = ""
else:
Subst_close_rm["__MASTER_ARCHIVE__"] = Archive.origin_server
Subst_close_rm["__PRIMARY_MIRROR__"] = Archive.primary_mirror
for bug in utils.split_args(Options["Done"]):
......@@ -673,7 +671,7 @@ def main ():
if len(sources) == 1:
source_pkg = source.split("_", 1)[0]
else:
utils.fubar("Closing bugs for multiple source pakcages is not supported. Do it yourself.")
utils.fubar("Closing bugs for multiple source packages is not supported. Do it yourself.")
Subst_close_other["__BUG_NUMBER_ALSO__"] = ""
Subst_close_other["__SOURCE__"] = source_pkg
other_bugs = bts.get_bugs('src', source_pkg, 'status', 'open')
......
......@@ -46,7 +46,7 @@ from daklib.daklog import Logger
################################################################################
Cnf = None
required_database_schema = 72
required_database_schema = 74
################################################################################
......
此差异已折叠。
此差异已折叠。
......@@ -3596,7 +3596,8 @@ class DBConn(object):
copy_queues = relation(BuildQueue,
secondary=self.tbl_suite_build_queue_copy),
srcformats = relation(SrcFormat, secondary=self.tbl_suite_src_formats,
backref=backref('suites', lazy='dynamic'))),
backref=backref('suites', lazy='dynamic')),
archive = relation(Archive, backref='suites')),
extension = validator)
mapper(Uid, self.tbl_uid,
......
# Copyright (C) 2012, Ansgar Burchardt <ansgar@debian.org>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
"""module to process policy queue uploads"""
from .config import Config
from .dbconn import BinaryMetadata, Component, MetadataKey, Override, OverrideType
from .fstransactions import FilesystemTransaction
from .regexes import re_file_changes, re_file_safe
import errno
import os
import shutil
import tempfile
class UploadCopy(object):
"""export a policy queue upload
This class can be used in a with-statements:
with UploadCopy(...) as copy:
...
Doing so will provide a temporary copy of the upload in the directory
given by the `directory` attribute. The copy will be removed on leaving
the with-block.
Args:
upload (daklib.dbconn.PolicyQueueUpload)
"""
def __init__(self, upload):
self.directory = None
self.upload = upload
def export(self, directory, mode=None, symlink=True):
"""export a copy of the upload
Args:
directory (str)
Kwargs:
mode (int): permissions to use for the copied files
symlink (bool): use symlinks instead of copying the files (default: True)
"""
with FilesystemTransaction() as fs:
source = self.upload.source
queue = self.upload.policy_queue
if source is not None:
for dsc_file in source.srcfiles:
f = dsc_file.poolfile
dst = os.path.join(directory, os.path.basename(f.filename))
fs.copy(f.fullpath, dst, mode=mode, symlink=symlink)
for binary in self.upload.binaries:
f = binary.poolfile
dst = os.path.join(directory, os.path.basename(f.filename))
fs.copy(f.fullpath, dst, mode=mode, symlink=symlink)
# copy byhand files
for byhand in self.upload.byhand:
src = os.path.join(queue.path, byhand.filename)
dst = os.path.join(directory, byhand.filename)
fs.copy(src, dst, mode=mode, symlink=symlink)
# copy .changes
src = os.path.join(queue.path, self.upload.changes.changesname)
dst = os.path.join(directory, self.upload.changes.changesname)
fs.copy(src, dst, mode=mode, symlink=symlink)
def __enter__(self):
assert self.directory is None
cnf = Config()
self.directory = tempfile.mkdtemp(dir=cnf.get('Dir::TempPath'))
self.export(self.directory, symlink=True)
return self
def __exit__(self, *args):
if self.directory is not None:
shutil.rmtree(self.directory)
self.directory = None
return None
class PolicyQueueUploadHandler(object):
"""process uploads to policy queues
This class allows to accept or reject uploads and to get a list of missing
overrides (for NEW processing).
"""
def __init__(self, upload, session):
"""initializer
Args:
upload (daklib.dbconn.PolicyQueueUpload): upload to process
session: database session
"""
self.upload = upload
self.session = session
@property
def _overridesuite(self):
overridesuite = self.upload.target_suite
if overridesuite.overridesuite is not None:
overridesuite = self.session.query(Suite).filter_by(suite_name=overridesuite.overridesuite).one()
return overridesuite
def _source_override(self, component_name):
package = self.upload.source.source
suite = self._overridesuite
query = self.session.query(Override).filter_by(package=package, suite=suite) \
.join(OverrideType).filter(OverrideType.overridetype == 'dsc') \
.join(Component).filter(Component.component_name == component_name)
return query.first()
def _binary_override(self, binary, component_name):
package = binary.package
suite = self._overridesuite
overridetype = binary.binarytype
query = self.session.query(Override).filter_by(package=package, suite=suite) \
.join(OverrideType).filter(OverrideType.overridetype == overridetype) \
.join(Component).filter(Component.component_name == component_name)
return query.first()
def _binary_metadata(self, binary, key):
metadata_key = self.session.query(MetadataKey).filter_by(key=key).first()
if metadata_key is None:
return None
metadata = self.session.query(BinaryMetadata).filter_by(binary=binary, key=metadata_key).first()
if metadata is None:
return None
return metadata.value
@property
def _changes_prefix(self):
changesname = self.upload.changes.changesname
assert changesname.endswith('.changes')
assert re_file_changes.match(changesname)
return changesname[0:-8]
def accept(self):
"""mark upload as accepted"""
assert len(self.missing_overrides()) == 0
fn1 = 'ACCEPT.{0}'.format(self._changes_prefix)
fn = os.path.join(self.upload.policy_queue.path, 'COMMENTS', fn1)
try:
fh = os.open(fn, os.O_CREAT | os.O_EXCL | os.O_WRONLY, 0o644)
os.write(fh, 'OK\n')
os.close(fh)
except OSError as e:
if e.errno == errno.EEXIST:
pass
else:
raise
def reject(self, reason):
"""mark upload as rejected
Args:
reason (str): reason for the rejection
"""
fn1 = 'REJECT.{0}'.format(self._changes_prefix)
assert re_file_safe.match(fn1)
fn = os.path.join(self.upload.policy_queue.path, 'COMMENTS', fn1)
try:
fh = os.open(fn, os.O_CREAT | os.O_EXCL | os.O_WRONLY)
os.write(fh, 'NOTOK\n')
os.write(fh, reason)
os.close(fh)
except OSError as e:
if e.errno == errno.EEXIST:
pass
else:
raise
def get_action(self):
"""get current action
Returns:
string giving the current action, one of 'ACCEPT', 'ACCEPTED', 'REJECT'
"""
changes_prefix = self._changes_prefix
for action in ('ACCEPT', 'ACCEPTED', 'REJECT'):
fn1 = '{0}.{1}'.format(action, changes_prefix)
fn = os.path.join(self.upload.policy_queue.path, 'COMMENTS', fn1)
if os.path.exists(fn):
return action
return None
def missing_overrides(self, hints=None):
"""get missing override entries for the upload
Kwargs:
hints (list of dict): suggested hints for new overrides in the same
format as the return value
Returns:
list of dicts with the following keys:
package: package name
priority: default priority (from upload)
section: default section (from upload)
component: default component (from upload)
type: type of required override ('dsc', 'deb' or 'udeb')
All values are strings.
"""
# TODO: use Package-List field
missing = []
components = set()
if hints is None:
hints = []
hints_map = dict([ ((o['type'], o['package']), o) for o in hints ])
for binary in self.upload.binaries:
priority = self._binary_metadata(binary, 'Priority')
section = self._binary_metadata(binary, 'Section')
component = 'main'
if section.find('/') != -1:
component = section.split('/', 1)[0]
override = self._binary_override(binary, component)
if override is None:
hint = hints_map.get((binary.binarytype, binary.package))
if hint is not None:
missing.append(hint)
component = hint['component']
else:
missing.append(dict(
package = binary.package,
priority = priority,
section = section,
component = component,
type = binary.binarytype,
))
components.add(component)
source_component = '(unknown)'
for component in ('main', 'contrib', 'non-free'):
if component in components:
source_component = component
break
source = self.upload.source
if source is not None:
override = self._source_override(source_component)
if override is None:
hint = hints_map.get(('dsc', source.source))
if hint is not None:
missing.append(hint)
else:
section = 'misc'
if component != 'main':
section = "{0}/{1}".format(component, section)
missing.append(dict(
package = source.source,
priority = 'extra',
section = section,
component = source_component,
type = 'dsc',
))
return missing
# Copyright (C) 2012, Ansgar Burchardt <ansgar@debian.org>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
"""module to handle uploads not yet installed to the archive
This module provides classes to handle uploads not yet installed to the
archive. Central is the `Changes` class which represents a changes file.
It provides methods to access the included binary and source packages.
"""
import apt_inst
import apt_pkg
import os
import re
from .gpg import SignedFile
from .regexes import *
class InvalidChangesException(Exception):
pass
class InvalidBinaryException(Exception):
pass
class InvalidSourceException(Exception):
pass
class InvalidHashException(Exception):
def __init__(self, filename, hash_name, expected, actual):
self.filename = filename
self.hash_name = hash_name
self.expected = expected
self.actual = actual
def __str__(self):
return "Invalid {0} hash for {1}: expected {2}, but got {3}.".format(self.hash_name, self.filename, self.expected, self.actual)
class InvalidFilenameException(Exception):
def __init__(self, filename):
self.filename = filename
def __str__(self):
return "Invalid filename '{0}'.".format(self.filename)
class HashedFile(object):
"""file with checksums
Attributes:
filename (str): name of the file
size (long): size in bytes
md5sum (str): MD5 hash in hexdigits
sha1sum (str): SHA1 hash in hexdigits
sha256sum (str): SHA256 hash in hexdigits
section (str): section or None
priority (str): priority or None
"""
def __init__(self, filename, size, md5sum, sha1sum, sha256sum, section=None, priority=None):
self.filename = filename
self.size = size
self.md5sum = md5sum
self.sha1sum = sha1sum
self.sha256sum = sha256sum
self.section = section
self.priority = priority
def check(self, directory):
"""Validate hashes
Check if size and hashes match the expected value.
Args:
directory (str): directory the file is located in
Raises:
InvalidHashException: hash mismatch
"""
path = os.path.join(directory, self.filename)
fh = open(path, 'r')
size = os.stat(path).st_size
if size != self.size:
raise InvalidHashException(self.filename, 'size', self.size, size)
md5sum = apt_pkg.md5sum(fh)
if md5sum != self.md5sum:
raise InvalidHashException(self.filename, 'md5sum', self.md5sum, md5sum)
fh.seek(0)
sha1sum = apt_pkg.sha1sum(fh)
if sha1sum != self.sha1sum:
raise InvalidHashException(self.filename, 'sha1sum', self.sha1sum, sha1sum)
fh.seek(0)
sha256sum = apt_pkg.sha256sum(fh)
if sha256sum != self.sha256sum:
raise InvalidHashException(self.filename, 'sha256sum', self.sha256sum, sha256sum)
def parse_file_list(control, has_priority_and_section):
"""Parse Files and Checksums-* fields
Args:
control (dict-like): control file to take fields from
has_priority_and_section (bool): Files include section and priority (as in .changes)
Raises:
InvalidChangesException: missing fields or other grave errors
Returns:
dictonary mapping filenames to `daklib.upload.HashedFile` objects
"""
entries = {}
for line in control["Files"].split('\n'):
if len(line) == 0:
continue
if has_priority_and_section:
(md5sum, size, section, priority, filename) = line.split()
entry = dict(md5sum=md5sum, size=long(size), section=section, priority=priority, filename=filename)
else:
(md5sum, size, filename) = line.split()
entry = dict(md5sum=md5sum, size=long(size), filename=filename)
entries[filename] = entry
for line in control["Checksums-Sha1"].split('\n'):
if len(line) == 0:
continue
(sha1sum, size, filename) = line.split()
entry = entries.get(filename, None)
if entry.get('size', None) != long(size):
raise InvalidChangesException('Size for {0} in Files and Checksum-Sha1 fields differ.'.format(filename))
entry['sha1sum'] = sha1sum
for line in control["Checksums-Sha256"].split('\n'):
if len(line) == 0:
continue
(sha256sum, size, filename) = line.split()
entry = entries.get(filename, None)
if entry is None:
raise InvalidChangesException('No sha256sum for {0}.'.format(filename))
if entry.get('size', None) != long(size):
raise InvalidChangesException('Size for {0} in Files and Checksum-Sha256 fields differ.'.format(filename))
entry['sha256sum'] = sha256sum
files = {}
for entry in entries.itervalues():
filename = entry['filename']
if 'size' not in entry:
raise InvalidChangesException('No size for {0}.'.format(filename))
if 'md5sum' not in entry:
raise InvalidChangesException('No md5sum for {0}.'.format(filename))
if 'sha1sum' not in entry:
raise InvalidChangesException('No sha1sum for {0}.'.format(filename))
if 'sha256sum' not in entry:
raise InvalidChangesException('No sha256sum for {0}.'.format(filename))
if not re_file_safe.match(filename):
raise InvalidChangesException("{0}: References file with unsafe filename {1}.".format(self.filename, filename))
f = files[filename] = HashedFile(**entry)
return files
class Changes(object):
"""Representation of a .changes file
Attributes:
architectures (list of str): list of architectures included in the upload
binaries (list of daklib.upload.Binary): included binary packages
binary_names (list of str): names of included binary packages
byhand_files (list of daklib.upload.HashedFile): included byhand files
bytes (int): total size of files included in this upload in bytes
changes (dict-like): dict to access fields of the .changes file
closed_bugs (list of str): list of bugs closed by this upload
directory (str): directory the .changes is located in
distributions (list of str): list of target distributions for the upload
filename (str): name of the .changes file
files (dict): dict mapping filenames to daklib.upload.HashedFile objects
path (str): path to the .changes files
primary_fingerprint (str): fingerprint of the PGP key used for the signature
source (daklib.upload.Source or None): included source
valid_signature (bool): True if the changes has a valid signature
"""
def __init__(self, directory, filename, keyrings, require_signature=True):
if not re_file_safe.match(filename):
raise InvalidChangesException('{0}: unsafe filename'.format(filename))
self.directory = directory
self.filename = filename
data = open(self.path).read()
self._signed_file = SignedFile(data, keyrings, require_signature)
self.changes = apt_pkg.TagSection(self._signed_file.contents)
self._binaries = None
self._source = None
self._files = None
self._keyrings = keyrings
self._require_signature = require_signature
@property
def path(self):
return os.path.join(self.directory, self.filename)
@property
def primary_fingerprint(self):
return self._signed_file.primary_fingerprint
@property
def valid_signature(self):
return self._signed_file.valid
@property
def architectures(self):
return self.changes['Architecture'].split()
@property
def distributions(self):
return self.changes['Distribution'].split()
@property
def source(self):
if self._source is None:
source_files = []
for f in self.files.itervalues():
if re_file_dsc.match(f.filename) or re_file_source.match(f.filename):
source_files.append(f)
if len(source_files) > 0:
self._source = Source(self.directory, source_files, self._keyrings, self._require_signature)
return self._source
@property
def binaries(self):
if self._binaries is None:
binaries = []
for f in self.files.itervalues():
if re_file_binary.match(f.filename):
binaries.append(Binary(self.directory, f))
self._binaries = binaries
return self._binaries
@property
def byhand_files(self):
byhand = []
for f in self.files.itervalues():
if re_file_dsc.match(f.filename) or re_file_source.match(f.filename) or re_file_binary.match(f.filename):
continue
if f.section != 'byhand' and f.section[:4] != 'raw-':
raise InvalidChangesException("{0}: {1} looks like a byhand package, but is in section {2}".format(self.filename, f.filename, f.section))
byhand.append(f)
return byhand
@property
def binary_names(self):
return self.changes['Binary'].split()
@property
def closed_bugs(self):
return self.changes.get('Closes', '').split()
@property
def files(self):
if self._files is None:
self._files = parse_file_list(self.changes, True)
return self._files
@property
def bytes(self):
count = 0
for f in self.files.itervalues():
count += f.size
return count
def __cmp__(self, other):
"""Compare two changes packages
We sort by source name and version first. If these are identical,
we sort changes that include source before those without source (so
that sourceful uploads get processed first), and finally fall back
to the filename (this should really never happen).
Returns:
-1 if self < other, 0 if self == other, 1 if self > other
"""
ret = cmp(self.changes.get('Source'), other.changes.get('Source'))
if ret == 0:
# compare version
ret = apt_pkg.version_compare(self.changes.get('Version', ''), other.changes.get('Version', ''))
if ret == 0:
# sort changes with source before changes without source
if 'source' in self.architectures and 'source' not in other.architectures:
ret = -1
elif 'source' not in self.architectures and 'source' in other.architectures:
ret = 1
else:
ret = 0
if ret == 0:
# fall back to filename
ret = cmp(self.filename, other.filename)
return ret
class Binary(object):
"""Representation of a binary package
Attributes:
component (str): component name
control (dict-like): dict to access fields in DEBIAN/control
hashed_file (HashedFile): HashedFile object for the .deb
"""
def __init__(self, directory, hashed_file):
self.hashed_file = hashed_file
path = os.path.join(directory, hashed_file.filename)
data = apt_inst.DebFile(path).control.extractdata("control")
self.control = apt_pkg.TagSection(data)
@property
def source(self):
"""Get source package name and version
Returns:
tuple containing source package name and version
"""
source = self.control.get("Source", None)
if source is None:
return (self.control["Package"], self.control["Version"])
match = re_field_source.match(source)
if not match:
raise InvalidBinaryException('{0}: Invalid Source field.'.format(self.hashed_file.filename))
version = match.group('version')
if version is None:
version = self.control['Version']
return (match.group('package'), version)
@property
def type(self):
"""Get package type
Returns:
String with the package type ('deb' or 'udeb')
"""
match = re_file_binary.match(self.hashed_file.filename)
if not match:
raise InvalidBinaryException('{0}: Does not match re_file_binary'.format(self.hashed_file.filename))
return match.group('type')
@property
def component(self):
fields = self.control['Section'].split('/')
if len(fields) > 1:
return fields[0]
return "main"
class Source(object):
"""Representation of a source package
Attributes:
component (str): guessed component name. Might be wrong!
dsc (dict-like): dict to access fields in the .dsc file
hashed_files (list of daklib.upload.HashedFile): list of source files (including .dsc)
files (dict): dictonary mapping filenames to HashedFile objects for
additional source files (not including .dsc)
primary_fingerprint (str): fingerprint of the PGP key used for the signature
valid_signature (bool): True if the dsc has a valid signature
"""
def __init__(self, directory, hashed_files, keyrings, require_signature=True):
self.hashed_files = hashed_files
self._dsc_file = None
for f in hashed_files:
if re_file_dsc.match(f.filename):
if self._dsc_file is not None:
raise InvalidSourceException("Multiple .dsc found ({0} and {1})".format(self._dsc_file.filename, f.filename))
else:
self._dsc_file = f
dsc_file_path = os.path.join(directory, self._dsc_file.filename)
data = open(dsc_file_path, 'r').read()
self._signed_file = SignedFile(data, keyrings, require_signature)
self.dsc = apt_pkg.TagSection(self._signed_file.contents)
self._files = None
@property
def files(self):
if self._files is None:
self._files = parse_file_list(self.dsc, False)
return self._files
@property
def primary_fingerprint(self):
return self._signed_file.primary_fingerprint
@property
def valid_signature(self):
return self._signed_file.valid
@property
def component(self):
if 'Section' not in self.dsc:
return 'main'
fields = self.dsc['Section'].split('/')
if len(fields) > 1:
return fields[0]
return "main"
......@@ -23,6 +23,7 @@
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
import commands
import datetime
import email.Header
import os
import pwd
......@@ -608,6 +609,14 @@ def build_package_list(dsc, session = None):
def send_mail (message, filename=""):
"""sendmail wrapper, takes _either_ a message string or a file as arguments"""
maildir = Cnf.get('Dir::Mail')
if maildir:
path = os.path.join(maildir, datetime.datetime.now().isoformat())
path = find_next_free(path)
fh = open(path, 'w')
print >>fh, message,
fh.close()
# Check whether we're supposed to be sending mail
if Cnf.has_key("Dinstall::Options::No-Mail") and Cnf["Dinstall::Options::No-Mail"]:
return
......@@ -1581,8 +1590,30 @@ def mail_addresses_for_upload(maintainer, changed_by, fingerprint):
addresses.append(changed_by)
fpr_addresses = gpg_get_key_addresses(fingerprint)
if fix_maintainer(changed_by)[3] not in fpr_addresses and fix_maintainer(maintainer)[3] not in fpr_addresses:
if len(fpr_addresses) > 0 and fix_maintainer(changed_by)[3] not in fpr_addresses and fix_maintainer(maintainer)[3] not in fpr_addresses:
addresses.append(fpr_addresses[0])
encoded_addresses = [ fix_maintainer(e)[1] for e in addresses ]
return encoded_addresses
################################################################################
def call_editor(text="", suffix=".txt"):
"""Run editor and return the result as a string
Kwargs:
text (str): initial text
suffix (str): extension for temporary file
Returns:
string with the edited text
"""
editor = os.environ.get('VISUAL', os.environ.get('EDITOR', 'vi'))
tmp = tempfile.NamedTemporaryFile(suffix=suffix, delete=False)
try:
print >>tmp, text,
tmp.close()
subprocess.check_call([editor, tmp.name])
return open(tmp.name, 'r').read()
finally:
os.unlink(tmp.name)
......@@ -29,8 +29,6 @@ Misc
o __BUG_NUMBER__
o __CONTROL_MESSAGE__
o __MANUAL_REJECT_MESSAGE__
o __SHORT_SUMMARY__
o __SUMMARY__
o __STABLE_WARNING__
o __SUITE__
......@@ -9,6 +9,3 @@ Content-Transfer-Encoding: 8bit
Subject: Accepted __SOURCE__ __VERSION__ (__ARCHITECTURE__)
__FILE_CONTENTS__
Accepted:
__SHORT_SUMMARY__
......@@ -12,10 +12,7 @@ Source: __SOURCE__
Source-Version: __VERSION__
We believe that the bug you reported is fixed in the latest version of
__SOURCE__, which is due to be installed in the __DISTRO__ FTP archive:
__SHORT_SUMMARY__
__STABLE_WARNING__
__SOURCE__, which is due to be installed in the __DISTRO__ FTP archive.
A summary of the changes between this version and the previous one is
attached.
......
......@@ -9,11 +9,6 @@ Content-Type: text/plain; charset="utf-8"
Content-Transfer-Encoding: 8bit
Subject: __CHANGES_FILENAME__ is NEW
__SUMMARY__
Your package contains new components which requires manual editing of
the override file. It is ok otherwise, so please be patient. New
packages are usually added to the override file about once a week.
You may have gotten the distribution wrong. You'll get warnings above
if files already exist in other distributions.
......@@ -17,9 +17,8 @@ database and may (or may not) still be in the pool; this is not a bug.
The package(s) will be physically removed automatically when no suite
references them (and in the case of source, when no binary references
it). Please also remember that the changes have been done on the
master archive (__MASTER_ARCHIVE__) and will not propagate to any
mirrors (__PRIMARY_MIRROR__ included) until the next cron.daily run at the
earliest.
master archive and will not propagate to any mirrors (__PRIMARY_MIRROR__
included) until the next dinstall run at the earliest.
Packages are usually not removed from testing by hand. Testing tracks
unstable and will automatically remove packages which were removed
......
......@@ -17,9 +17,8 @@ database and may (or may not) still be in the pool; this is not a bug.
The package(s) will be physically removed automatically when no suite
references them (and in the case of source, when no binary references
it). Please also remember that the changes have been done on the
master archive (__MASTER_ARCHIVE__) and will not propagate to any
mirrors (__PRIMARY_MIRROR__ included) until the next cron.daily run at the
earliest.
master archive and will not propagate to any mirrors (__PRIMARY_MIRROR__
included) until the next dinstall run at the earliest.
Packages are usually not removed from testing by hand. Testing tracks
unstable and will automatically remove packages which were removed
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册