提交 41742863 编写于 作者: J Joerg Jaspert

Merge remote-tracking branch 'ansgar/pu/multiarchive-1' into merge

* ansgar/pu/multiarchive-1:
  Drop column origin_server from archive table.
  Reference archive table from suite and add path to archive root
  typo: "pakcages" → "packages"
  Also import re_bin_only_nmu from daklib.regexes
  don't require all binaries to be included
  use discard instead of remove
  Add module to process policy queue uploads.
  Add module with pre-acceptance tests.
  Add module to handle archive manipulation.
  Add module to handle uploads not yet installed to the archive.
  process-unchecked.new: drop __SUMMARY__ for now
  remove two template variables
  utils.py: add call_editor function
  utils (send_message): add option to write mail to files
  utils.py (mail_addresses_for_upload): only try to use address from key if there is one
Signed-off-by: NJoerg Jaspert <joerg@debian.org>
#!/usr/bin/env python
# coding=utf8
"""
Reference archive table from suite and add path to archive root
@contact: Debian FTP Master <ftpmaster@debian.org>
@copyright: 2012 Ansgar Burchardt <ansgar@debian.org>
@license: GNU General Public License version 2 or later
"""
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
################################################################################
import psycopg2
from daklib.dak_exceptions import DBUpdateError
from daklib.config import Config
################################################################################
def do_update(self):
print __doc__
try:
cnf = Config()
c = self.db.cursor()
archive_root = cnf["Dir::Root"]
c.execute("ALTER TABLE archive ADD COLUMN path TEXT NOT NULL DEFAULT %s", (archive_root,))
c.execute("ALTER TABLE archive ALTER COLUMN path DROP DEFAULT")
c.execute("ALTER TABLE archive ADD COLUMN mode CHAR(4) NOT NULL DEFAULT '0644' CHECK (mode SIMILAR TO '[0-7]{4}')")
c.execute("ALTER TABLE archive ADD COLUMN tainted BOOLEAN NOT NULL DEFAULT 'f'")
c.execute("ALTER TABLE archive ADD COLUMN use_morgue BOOLEAN NOT NULL DEFAULT 't'")
c.execute("SELECT id FROM archive")
(archive_id,) = c.fetchone()
if c.fetchone() is not None:
raise DBUpdateError("Cannot automatically upgrade form installation with multiple archives.")
c.execute("ALTER TABLE suite ADD COLUMN archive_id INT REFERENCES archive(id) NOT NULL DEFAULT %s", (archive_id,))
c.execute("ALTER TABLE suite ALTER COLUMN archive_id DROP DEFAULT")
c.execute("UPDATE config SET value = '73' WHERE name = 'db_revision'")
self.db.commit()
except psycopg2.ProgrammingError as msg:
self.db.rollback()
raise DBUpdateError('Unable to apply sick update 73, rollback issued. Error message : %s' % (str(msg)))
#!/usr/bin/env python
# coding=utf8
"""
Drop origin_server column from archive table
@contact: Debian FTP Master <ftpmaster@debian.org>
@copyright: 2012 Ansgar Burchardt <ansgar@debian.org>
@license: GNU General Public License version 2 or later
"""
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
################################################################################
import psycopg2
from daklib.dak_exceptions import DBUpdateError
from daklib.config import Config
################################################################################
def do_update(self):
print __doc__
try:
cnf = Config()
c = self.db.cursor()
c.execute("ALTER TABLE archive DROP COLUMN origin_server")
c.execute("UPDATE config SET value = '74' WHERE name = 'db_revision'")
self.db.commit()
except psycopg2.ProgrammingError as msg:
self.db.rollback()
raise DBUpdateError('Unable to apply sick update 74, rollback issued. Error message : %s' % (str(msg)))
......@@ -639,10 +639,8 @@ def main ():
Archive = get_archive(whereami, session)
if Archive is None:
utils.warn("Cannot find archive %s. Setting blank values for origin" % whereami)
Subst_close_rm["__MASTER_ARCHIVE__"] = ""
Subst_close_rm["__PRIMARY_MIRROR__"] = ""
else:
Subst_close_rm["__MASTER_ARCHIVE__"] = Archive.origin_server
Subst_close_rm["__PRIMARY_MIRROR__"] = Archive.primary_mirror
for bug in utils.split_args(Options["Done"]):
......@@ -673,7 +671,7 @@ def main ():
if len(sources) == 1:
source_pkg = source.split("_", 1)[0]
else:
utils.fubar("Closing bugs for multiple source pakcages is not supported. Do it yourself.")
utils.fubar("Closing bugs for multiple source packages is not supported. Do it yourself.")
Subst_close_other["__BUG_NUMBER_ALSO__"] = ""
Subst_close_other["__SOURCE__"] = source_pkg
other_bugs = bts.get_bugs('src', source_pkg, 'status', 'open')
......
......@@ -46,7 +46,7 @@ from daklib.daklog import Logger
################################################################################
Cnf = None
required_database_schema = 72
required_database_schema = 74
################################################################################
......
# Copyright (C) 2012, Ansgar Burchardt <ansgar@debian.org>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
"""module to manipulate the archive
This module provides classes to manipulate the archive.
"""
from .dbconn import *
import daklib.checks as checks
from daklib.config import Config
import daklib.upload as upload
import daklib.utils as utils
from .fstransactions import FilesystemTransaction
from .regexes import re_changelog_versions, re_bin_only_nmu
import apt_pkg
from datetime import datetime
import os
import shutil
import subprocess
from sqlalchemy.orm.exc import NoResultFound
import tempfile
import traceback
class ArchiveException(Exception):
pass
class HashMismatchException(ArchiveException):
pass
class ArchiveTransaction(object):
"""manipulate the archive in a transaction
"""
def __init__(self):
self.fs = FilesystemTransaction()
self.session = DBConn().session()
def get_file(self, hashed_file, source_name):
"""Look for file `hashed_file` in database
Args:
hashed_file (daklib.upload.HashedFile): file to look for in the database
Raises:
KeyError: file was not found in the database
HashMismatchException: hash mismatch
Returns:
`daklib.dbconn.PoolFile` object for the database
"""
poolname = os.path.join(utils.poolify(source_name), hashed_file.filename)
try:
poolfile = self.session.query(PoolFile).filter_by(filename=poolname).one()
if poolfile.filesize != hashed_file.size or poolfile.md5sum != hashed_file.md5sum or poolfile.sha1sum != hashed_file.sha1sum or poolfile.sha256sum != hashed_file.sha256sum:
raise HashMismatchException('{0}: Does not match file already existing in the pool.'.format(hashed_file.filename))
return poolfile
except NoResultFound:
raise KeyError('{0} not found in database.'.format(poolname))
def _install_file(self, directory, hashed_file, archive, component, source_name):
"""Install a file
Will not give an error when the file is already present.
Returns:
`daklib.dbconn.PoolFile` object for the new file
"""
session = self.session
poolname = os.path.join(utils.poolify(source_name), hashed_file.filename)
try:
poolfile = self.get_file(hashed_file, source_name)
except KeyError:
poolfile = PoolFile(filename=poolname, filesize=hashed_file.size)
poolfile.md5sum = hashed_file.md5sum
poolfile.sha1sum = hashed_file.sha1sum
poolfile.sha256sum = hashed_file.sha256sum
session.add(poolfile)
session.flush()
try:
session.query(ArchiveFile).filter_by(archive=archive, component=component, file=poolfile).one()
except NoResultFound:
archive_file = ArchiveFile(archive, component, poolfile)
session.add(archive_file)
session.flush()
path = os.path.join(archive.path, 'pool', component.component_name, poolname)
hashed_file_path = os.path.join(directory, hashed_file.filename)
self.fs.copy(hashed_file_path, path, link=False, mode=archive.mode)
return poolfile
def install_binary(self, directory, binary, suite, component, allow_tainted=False, fingerprint=None, source_suites=None, extra_source_archives=None):
"""Install a binary package
Args:
directory (str): directory the binary package is located in
binary (daklib.upload.Binary): binary package to install
suite (daklib.dbconn.Suite): target suite
component (daklib.dbconn.Component): target component
Kwargs:
allow_tainted (bool): allow to copy additional files from tainted archives
fingerprint (daklib.dbconn.Fingerprint): optional fingerprint
source_suites (list of daklib.dbconn.Suite or True): suites to copy
the source from if they are not in `suite` or True to allow
copying from any suite.
This can also be a SQLAlchemy (sub)query object.
extra_source_archives (list of daklib.dbconn.Archive): extra archives to copy Built-Using sources from
Returns:
`daklib.dbconn.DBBinary` object for the new package
"""
session = self.session
control = binary.control
maintainer = get_or_set_maintainer(control['Maintainer'], session)
architecture = get_architecture(control['Architecture'], session)
(source_name, source_version) = binary.source
source_query = session.query(DBSource).filter_by(source=source_name, version=source_version)
source = source_query.filter(DBSource.suites.contains(suite)).first()
if source is None:
if source_suites != True:
source_query = source_query.filter(DBSource.suites.any(source_suites))
source = source_query.first()
if source is None:
raise ArchiveException('{0}: trying to install to {1}, but could not find source'.format(binary.hashed_file.filename, suite.suite_name))
self.copy_source(source, suite, component)
db_file = self._install_file(directory, binary.hashed_file, suite.archive, component, source_name)
unique = dict(
package=control['Package'],
version=control['Version'],
architecture=architecture,
)
rest = dict(
source=source,
maintainer=maintainer,
poolfile=db_file,
binarytype=binary.type,
fingerprint=fingerprint,
)
try:
db_binary = session.query(DBBinary).filter_by(**unique).one()
for key, value in rest.iteritems():
if getattr(db_binary, key) != value:
raise ArchiveException('{0}: Does not match binary in database.'.format(binary.hashed_file.filename))
except NoResultFound:
db_binary = DBBinary(**unique)
for key, value in rest.iteritems():
setattr(db_binary, key, value)
session.add(db_binary)
session.flush()
import_metadata_into_db(db_binary, session)
self._add_built_using(db_binary, binary.hashed_file.filename, control, suite, extra_archives=extra_source_archives)
if suite not in db_binary.suites:
db_binary.suites.append(suite)
session.flush()
return db_binary
def _ensure_extra_source_exists(self, filename, source, archive, extra_archives=None):
"""ensure source exists in the given archive
This is intended to be used to check that Built-Using sources exist.
Args:
filename (str): filename to use in error messages
source (daklib.dbconn.DBSource): source to look for
archive (daklib.dbconn.Archive): archive to look in
Kwargs:
extra_archives (list of daklib.dbconn.Archive): list of archives to copy
the source package from if it is not yet present in `archive`
"""
session = self.session
db_file = session.query(ArchiveFile).filter_by(file=source.poolfile, archive=archive).first()
if db_file is not None:
return True
# Try to copy file from one extra archive
if extra_archives is None:
extra_archives = []
db_file = session.query(ArchiveFile).filter_by(file=source.poolfile).filter(ArchiveFile.archive_id.in_([ a.archive_id for a in extra_archives])).first()
if db_file is None:
raise ArchiveException('{0}: Built-Using refers to package {1} (= {2}) not in target archive {3}.'.format(filename, source.source, source.version, archive.archive_name))
source_archive = db_file.archive
for dsc_file in source.srcfiles:
af = session.query(ArchiveFile).filter_by(file=dsc_file.poolfile, archive=source_archive, component=db_file.component).one()
# We were given an explicit list of archives so it is okay to copy from tainted archives.
self._copy_file(af.file, archive, db_file.component, allow_tainted=True)
def _add_built_using(self, db_binary, filename, control, suite, extra_archives=None):
"""Add Built-Using sources to `db_binary.extra_sources`
"""
session = self.session
built_using = control.get('Built-Using', None)
if built_using is not None:
for dep in apt_pkg.parse_depends(built_using):
assert len(dep) == 1, 'Alternatives are not allowed in Built-Using field'
bu_source_name, bu_source_version, comp = dep[0]
assert comp == '=', 'Built-Using must contain strict dependencies'
bu_source = session.query(DBSource).filter_by(source=bu_source_name, version=bu_source_version).first()
if bu_source is None:
raise ArchiveException('{0}: Built-Using refers to non-existing source package {1} (= {2})'.format(filename, bu_source_name, bu_source_version))
self._ensure_extra_source_exists(filename, bu_source, suite.archive, extra_archives=extra_archives)
db_binary.extra_sources.append(bu_source)
def install_source(self, directory, source, suite, component, changed_by, allow_tainted=False, fingerprint=None):
"""Install a source package
Args:
directory (str): directory the source package is located in
source (daklib.upload.Source): source package to install
suite (daklib.dbconn.Suite): target suite
component (daklib.dbconn.Component): target component
changed_by (daklib.dbconn.Maintainer): person who prepared this version of the package
Kwargs:
allow_tainted (bool): allow to copy additional files from tainted archives
fingerprint (daklib.dbconn.Fingerprint): optional fingerprint
Returns:
`daklib.dbconn.DBSource` object for the new source
"""
session = self.session
archive = suite.archive
control = source.dsc
maintainer = get_or_set_maintainer(control['Maintainer'], session)
source_name = control['Source']
### Add source package to database
# We need to install the .dsc first as the DBSource object refers to it.
db_file_dsc = self._install_file(directory, source._dsc_file, archive, component, source_name)
unique = dict(
source=source_name,
version=control['Version'],
)
rest = dict(
maintainer=maintainer,
changedby=changed_by,
#install_date=datetime.now().date(),
poolfile=db_file_dsc,
fingerprint=fingerprint,
dm_upload_allowed=(control.get('DM-Upload-Allowed', 'no') == 'yes'),
)
created = False
try:
db_source = session.query(DBSource).filter_by(**unique).one()
for key, value in rest.iteritems():
if getattr(db_source, key) != value:
raise ArchiveException('{0}: Does not match source in database.'.format(source._dsc_file.filename))
except NoResultFound:
created = True
db_source = DBSource(**unique)
for key, value in rest.iteritems():
setattr(db_source, key, value)
# XXX: set as default in postgres?
db_source.install_date = datetime.now().date()
session.add(db_source)
session.flush()
# Add .dsc file. Other files will be added later.
db_dsc_file = DSCFile()
db_dsc_file.source = db_source
db_dsc_file.poolfile = db_file_dsc
session.add(db_dsc_file)
session.flush()
if suite in db_source.suites:
return db_source
db_source.suites.append(suite)
if not created:
return db_source
### Now add remaining files and copy them to the archive.
for hashed_file in source.files.itervalues():
hashed_file_path = os.path.join(directory, hashed_file.filename)
if os.path.exists(hashed_file_path):
db_file = self._install_file(directory, hashed_file, archive, component, source_name)
session.add(db_file)
else:
db_file = self.get_file(hashed_file, source_name)
self._copy_file(db_file, archive, component, allow_tainted=allow_tainted)
db_dsc_file = DSCFile()
db_dsc_file.source = db_source
db_dsc_file.poolfile = db_file
session.add(db_dsc_file)
session.flush()
# Importing is safe as we only arrive here when we did not find the source already installed earlier.
import_metadata_into_db(db_source, session)
# Uploaders are the maintainer and co-maintainers from the Uploaders field
db_source.uploaders.append(maintainer)
if 'Uploaders' in control:
def split_uploaders(field):
import re
for u in re.sub(">[ ]*,", ">\t", field).split("\t"):
yield u.strip()
for u in split_uploaders(control['Uploaders']):
db_source.uploaders.append(get_or_set_maintainer(u, session))
session.flush()
return db_source
def _copy_file(self, db_file, archive, component, allow_tainted=False):
"""Copy a file to the given archive and component
Args:
db_file (daklib.dbconn.PoolFile): file to copy
archive (daklib.dbconn.Archive): target archive
component (daklib.dbconn.Component): target component
Kwargs:
allow_tainted (bool): allow to copy from tainted archives (such as NEW)
"""
session = self.session
if session.query(ArchiveFile).filter_by(archive=archive, component=component, file=db_file).first() is None:
query = session.query(ArchiveFile).filter_by(file=db_file, component=component)
if not allow_tainted:
query = query.join(Archive).filter(Archive.tainted == False)
source_af = query.first()
if source_af is None:
raise ArchiveException('cp: Could not find {0} in component {1} in any archive.'.format(db_file.filename, component.component_name))
target_af = ArchiveFile(archive, component, db_file)
session.add(target_af)
session.flush()
self.fs.copy(source_af.path, target_af.path, link=False, mode=archive.mode)
def copy_binary(self, db_binary, suite, component, allow_tainted=False, extra_archives=None):
"""Copy a binary package to the given suite and component
Args:
db_binary (daklib.dbconn.DBBinary): binary to copy
suite (daklib.dbconn.Suite): target suite
component (daklib.dbconn.Component): target component
Kwargs:
allow_tainted (bool): allow to copy from tainted archives (such as NEW)
extra_archives (list of daklib.dbconn.Archive): extra archives to copy Built-Using sources from
"""
session = self.session
archive = suite.archive
if archive.tainted:
allow_tainted = True
# make sure built-using packages are present in target archive
filename = db_binary.poolfile.filename
for db_source in db_binary.extra_sources:
self._ensure_extra_source_exists(filename, db_source, archive, extra_archives=extra_archives)
# copy binary
db_file = db_binary.poolfile
self._copy_file(db_file, suite.archive, component, allow_tainted=allow_tainted)
if suite not in db_binary.suites:
db_binary.suites.append(suite)
self.session.flush()
def copy_source(self, db_source, suite, component, allow_tainted=False):
"""Copy a source package to the given suite and component
Args:
db_source (daklib.dbconn.DBSource): source to copy
suite (daklib.dbconn.Suite): target suite
component (daklib.dbconn.Component): target component
Kwargs:
allow_tainted (bool): allow to copy from tainted archives (such as NEW)
"""
archive = suite.archive
if archive.tainted:
allow_tainted = True
for db_dsc_file in db_source.srcfiles:
self._copy_file(db_dsc_file.poolfile, archive, component, allow_tainted=allow_tainted)
if suite not in db_source.suites:
db_source.suites.append(suite)
self.session.flush()
def remove_file(self, db_file, archive, component):
"""Remove a file from a given archive and component
Args:
db_file (daklib.dbconn.PoolFile): file to remove
archive (daklib.dbconn.Archive): archive to remove the file from
component (daklib.dbconn.Component): component to remove the file from
"""
af = self.session.query(ArchiveFile).filter_by(file=db_file, archive=archive, component=component)
self.fs.unlink(af.path)
self.session.delete(af)
def remove_binary(self, binary, suite):
"""Remove a binary from a given suite and component
Args:
binary (daklib.dbconn.DBBinary): binary to remove
suite (daklib.dbconn.Suite): suite to remove the package from
"""
binary.suites.remove(suite)
self.session.flush()
def remove_source(self, source, suite):
"""Remove a source from a given suite and component
Raises:
ArchiveException: source package is still referenced by other
binaries in the suite
Args:
binary (daklib.dbconn.DBSource): source to remove
suite (daklib.dbconn.Suite): suite to remove the package from
"""
session = self.session
query = session.query(DBBinary).filter_by(source=source) \
.filter(DBBinary.suites.contains(suite))
if query.first() is not None:
raise ArchiveException('src:{0} is still used by binaries in suite {1}'.format(source.source, suite.suite_name))
source.suites.remove(suite)
session.flush()
def commit(self):
"""commit changes"""
try:
self.session.commit()
self.fs.commit()
finally:
self.session.rollback()
self.fs.rollback()
def rollback(self):
"""rollback changes"""
self.session.rollback()
self.fs.rollback()
def __enter__(self):
return self
def __exit__(self, type, value, traceback):
if type is None:
self.commit()
else:
self.rollback()
return None
class ArchiveUpload(object):
"""handle an upload
This class can be used in a with-statement:
with ArchiveUpload(...) as upload:
...
Doing so will automatically run any required cleanup and also rollback the
transaction if it was not committed.
Attributes:
changes (daklib.upload.Changes): upload to process
directory (str): directory with temporary copy of files. set by `prepare`
fingerprint (daklib.dbconn.Fingerprint): fingerprint used to sign the upload
new (bool): upload is NEW. set by `check`
reject_reasons (list of str): reasons why the upload cannot be accepted
session: database session
transaction (daklib.archive.ArchiveTransaction): transaction used to handle the upload
warnings (list of str): warnings (NOT USED YET)
"""
def __init__(self, directory, changes, keyrings):
self.transaction = ArchiveTransaction()
self.session = self.transaction.session
self.original_directory = directory
self.original_changes = changes
self.changes = None
self.directory = None
self.keyrings = keyrings
self.fingerprint = self.session.query(Fingerprint).filter_by(fingerprint=changes.primary_fingerprint).one()
self.reject_reasons = []
self.warnings = []
self.final_suites = None
self.new = False
self._new_queue = self.session.query(PolicyQueue).filter_by(queue_name='new').one()
self._new = self._new_queue.suite
def prepare(self):
"""prepare upload for further processing
This copies the files involved to a temporary directory. If you use
this method directly, you have to remove the directory given by the
`directory` attribute later on your own.
Instead of using the method directly, you can also use a with-statement:
with ArchiveUpload(...) as upload:
...
This will automatically handle any required cleanup.
"""
assert self.directory is None
assert self.original_changes.valid_signature
cnf = Config()
session = self.transaction.session
self.directory = tempfile.mkdtemp(dir=cnf.get('Dir::TempPath'))
with FilesystemTransaction() as fs:
src = os.path.join(self.original_directory, self.original_changes.filename)
dst = os.path.join(self.directory, self.original_changes.filename)
fs.copy(src, dst)
self.changes = upload.Changes(self.directory, self.original_changes.filename, self.keyrings)
for f in self.changes.files.itervalues():
src = os.path.join(self.original_directory, f.filename)
dst = os.path.join(self.directory, f.filename)
fs.copy(src, dst)
source = self.changes.source
if source is not None:
for f in source.files.itervalues():
src = os.path.join(self.original_directory, f.filename)
dst = os.path.join(self.directory, f.filename)
if f.filename not in self.changes.files:
db_file = self.transaction.get_file(f, source.dsc['Source'])
db_archive_file = session.query(ArchiveFile).filter_by(file=db_file).first()
fs.copy(db_archive_file.path, dst, symlink=True)
def unpacked_source(self):
"""Path to unpacked source
Get path to the unpacked source. This method does unpack the source
into a temporary directory under `self.directory` if it has not
been done so already.
Returns:
String giving the path to the unpacked source directory
or None if no source was included in the upload.
"""
assert self.directory is not None
source = self.changes.source
if source is None:
return None
dsc_path = os.path.join(self.directory, source._dsc_file.filename)
sourcedir = os.path.join(self.directory, 'source')
if not os.path.exists(sourcedir):
subprocess.check_call(["dpkg-source", "--no-copy", "-x", dsc_path, sourcedir], shell=False)
if not os.path.isdir(sourcedir):
raise Exception("{0} is not a directory after extracting source package".format(sourcedir))
return sourcedir
def _map_suite(self, suite_name):
for rule in Config().value_list("SuiteMappings"):
fields = rule.split()
rtype = fields[0]
if rtype == "map" or rtype == "silent-map":
(src, dst) = fields[1:3]
if src == suite_name:
suite_name = dst
if rtype != "silent-map":
self.warnings.append('Mapping {0} to {0}.'.format(src, dst))
elif rtype == "ignore":
ignored = fields[1]
if suite_name == ignored:
self.warnings.append('Ignoring target suite {0}.'.format(ignored))
suite_name = None
elif rtype == "reject":
rejected = fields[1]
if suite_name == rejected:
self.reject_reasons.append('Uploads to {0} are not accepted.'.format(suite))
## XXX: propup-version and map-unreleased not yet implemented
return suite_name
def _mapped_suites(self):
"""Get target suites after mappings
Returns:
list of daklib.dbconn.Suite giving the mapped target suites of this upload
"""
session = self.session
suite_names = []
for dist in self.changes.distributions:
suite_name = self._map_suite(dist)
if suite_name is not None:
suite_names.append(suite_name)
suites = session.query(Suite).filter(Suite.suite_name.in_(suite_names))
return suites
def _check_new(self, suite):
"""Check if upload is NEW
An upload is NEW if it has binary or source packages that do not have
an override in `suite` OR if it references files ONLY in a tainted
archive (eg. when it references files in NEW).
Returns:
True if the upload is NEW, False otherwise
"""
session = self.session
# Check for missing overrides
for b in self.changes.binaries:
override = self._binary_override(suite, b)
if override is None:
return True
if self.changes.source is not None:
override = self._source_override(suite, self.changes.source)
if override is None:
return True
# Check if we reference a file only in a tainted archive
files = self.changes.files.values()
if self.changes.source is not None:
files.extend(self.changes.source.files.values())
for f in files:
query = session.query(ArchiveFile).join(PoolFile).filter(PoolFile.sha1sum == f.sha1sum)
query_untainted = query.join(Archive).filter(Archive.tainted == False)
in_archive = (query.first() is not None)
in_untainted_archive = (query_untainted.first() is not None)
if in_archive and not in_untainted_archive:
return True
def _final_suites(self):
session = self.session
mapped_suites = self._mapped_suites()
final_suites = set()
for suite in mapped_suites:
overridesuite = suite
if suite.overridesuite is not None:
overridesuite = session.query(Suite).filter_by(suite_name=suite.overridesuite).one()
if self._check_new(overridesuite):
self.new = True
final_suites.add(suite)
return final_suites
def _binary_override(self, suite, binary):
"""Get override entry for a binary
Args:
suite (daklib.dbconn.Suite)
binary (daklib.upload.Binary)
Returns:
daklib.dbconn.Override or None
"""
if suite.overridesuite is not None:
suite = session.query(Suite).filter_by(suite_name=suite.overridesuite).one()
query = self.session.query(Override).filter_by(suite=suite, package=binary.control['Package']) \
.join(Component).filter(Component.component_name == binary.component) \
.join(OverrideType).filter(OverrideType.overridetype == binary.type)
try:
return query.one()
except NoResultFound:
return None
def _source_override(self, suite, source):
"""Get override entry for a source
Args:
suite (daklib.dbconn.Suite)
source (daklib.upload.Source)
Returns:
daklib.dbconn.Override or None
"""
if suite.overridesuite is not None:
suite = session.query(Suite).filter_by(suite_name=suite.overridesuite).one()
# XXX: component for source?
query = self.session.query(Override).filter_by(suite=suite, package=source.dsc['Source']) \
.join(OverrideType).filter(OverrideType.overridetype == 'dsc')
try:
return query.one()
except NoResultFound:
return None
def check(self, force=False):
"""run checks against the upload
Args:
force (bool): ignore failing forcable checks
Returns:
True if all checks passed, False otherwise
"""
# XXX: needs to be better structured.
assert self.changes.valid_signature
try:
for chk in (
checks.SignatureCheck,
checks.ChangesCheck,
checks.HashesCheck,
checks.SourceCheck,
checks.BinaryCheck,
checks.ACLCheck,
checks.SingleDistributionCheck,
checks.NoSourceOnlyCheck,
checks.LintianCheck,
):
chk().check(self)
final_suites = self._final_suites()
if len(final_suites) == 0:
self.reject_reasons.append('Ended with no suite to install to.')
return False
for chk in (
checks.SourceFormatCheck,
checks.SuiteArchitectureCheck,
checks.VersionCheck,
):
for suite in final_suites:
chk().per_suite_check(self, suite)
if len(self.reject_reasons) != 0:
return False
self.final_suites = final_suites
return True
except checks.Reject as e:
self.reject_reasons.append(unicode(e))
except Exception as e:
self.reject_reasons.append("Processing raised an exception: {0}.\n{1}".format(e, traceback.format_exc()))
return False
def _install_to_suite(self, suite, source_component_func, binary_component_func, source_suites=None, extra_source_archives=None):
"""Install upload to the given suite
Args:
suite (daklib.dbconn.Suite): suite to install the package into.
This is the real suite, ie. after any redirection to NEW or a policy queue
source_component_func: function to get the `daklib.dbconn.Component`
for a `daklib.upload.Source` object
binary_component_func: function to get the `daklib.dbconn.Component`
for a `daklib.upload.Binary` object
Kwargs:
source_suites: see `daklib.archive.ArchiveTransaction.install_binary`
extra_source_archives: see `daklib.archive.ArchiveTransaction.install_binary`
Returns:
tuple with two elements. The first is a `daklib.dbconn.DBSource`
object for the install source or None if no source was included.
The second is a list of `daklib.dbconn.DBBinary` objects for the
installed binary packages.
"""
# XXX: move this function to ArchiveTransaction?
control = self.changes.changes
changed_by = get_or_set_maintainer(control.get('Changed-By', control['Maintainer']), self.session)
if source_suites is None:
source_suites = self.session.query(Suite).join(VersionCheck, VersionCheck.reference_id == Suite.suite_id).filter(VersionCheck.suite == suite).subquery()
source = self.changes.source
if source is not None:
component = source_component_func(source)
db_source = self.transaction.install_source(self.directory, source, suite, component, changed_by, fingerprint=self.fingerprint)
else:
db_source = None
db_binaries = []
for binary in self.changes.binaries:
component = binary_component_func(binary)
db_binary = self.transaction.install_binary(self.directory, binary, suite, component, fingerprint=self.fingerprint, source_suites=source_suites, extra_source_archives=extra_source_archives)
db_binaries.append(db_binary)
if suite.copychanges:
src = os.path.join(self.directory, self.changes.filename)
dst = os.path.join(suite.archive.path, 'dists', suite.suite_name, self.changes.filename)
self.transaction.fs.copy(src, dst)
return (db_source, db_binaries)
def _install_changes(self):
assert self.changes.valid_signature
control = self.changes.changes
session = self.transaction.session
config = Config()
changelog_id = None
# Only add changelog for sourceful uploads and binNMUs
if 'source' in self.changes.architectures or re_bin_only_nmu.search(control['Version']):
query = 'INSERT INTO changelogs_text (changelog) VALUES (:changelog) RETURNING id'
changelog_id = session.execute(query, {'changelog': control['Changes']}).scalar()
assert changelog_id is not None
db_changes = DBChange()
db_changes.changesname = self.changes.filename
db_changes.source = control['Source']
db_changes.binaries = control.get('Binary', None)
db_changes.architecture = control['Architecture']
db_changes.version = control['Version']
db_changes.distribution = control['Distribution']
db_changes.urgency = control['Urgency']
db_changes.maintainer = control['Maintainer']
db_changes.changedby = control.get('Changed-By', control['Maintainer'])
db_changes.date = control['Date']
db_changes.fingerprint = self.fingerprint.fingerprint
db_changes.changelog_id = changelog_id
db_changes.closes = self.changes.closed_bugs
self.transaction.session.add(db_changes)
self.transaction.session.flush()
return db_changes
def _install_policy(self, policy_queue, target_suite, db_changes, db_source, db_binaries):
u = PolicyQueueUpload()
u.policy_queue = policy_queue
u.target_suite = target_suite
u.changes = db_changes
u.source = db_source
u.binaries = db_binaries
self.transaction.session.add(u)
self.transaction.session.flush()
dst = os.path.join(policy_queue.path, self.changes.filename)
self.transaction.fs.copy(self.changes.path, dst)
return u
def try_autobyhand(self):
"""Try AUTOBYHAND
Try to handle byhand packages automatically.
Returns:
list of `daklib.upload.hashed_file` for the remaining byhand packages
"""
assert len(self.reject_reasons) == 0
assert self.changes.valid_signature
assert self.final_suites is not None
byhand = self.changes.byhand_files
if len(byhand) == 0:
return True
suites = list(self.final_suites)
assert len(suites) == 1, "BYHAND uploads must be to a single suite"
suite = suites[0]
cnf = Config()
control = self.changes.changes
automatic_byhand_packages = cnf.subtree("AutomaticByHandPackages")
remaining = []
for f in byhand:
package, version, archext = f.filename.split('_', 2)
arch, ext = archext.split('.', 1)
rule = automatic_byhand_packages.get(package)
if rule is None:
remaining.append(f)
continue
if rule['Source'] != control['Source'] or rule['Section'] != f.section or rule['Extension'] != ext:
remaining.append(f)
continue
script = rule['Script']
retcode = subprocess.call([script, os.path.join(self.directory, f.filename), control['Version'], arch, os.path.join(self.directory, self.changes.filename)], shell=False)
if retcode != 0:
print "W: error processing {0}.".format(f.filename)
remaining.append(f)
return len(remaining) == 0
def _install_byhand(self, policy_queue_upload, hashed_file):
"""
Args:
policy_queue_upload (daklib.dbconn.PolicyQueueUpload): XXX
hashed_file (daklib.upload.HashedFile): XXX
"""
fs = self.transaction.fs
session = self.transaction.session
policy_queue = policy_queue_upload.policy_queue
byhand_file = PolicyQueueByhandFile()
byhand_file.upload = policy_queue_upload
byhand_file.filename = hashed_file.filename
session.add(byhand_file)
session.flush()
src = os.path.join(self.directory, hashed_file.filename)
dst = os.path.join(policy_queue.path, hashed_file.filename)
fs.copy(src, dst)
return byhand_file
def _do_bts_versiontracking(self):
cnf = Config()
fs = self.transaction.fs
btsdir = cnf.get('Dir::BTSVersionTrack')
if btsdir is None or btsdir == '':
return
base = os.path.join(btsdir, self.changes.filename[:-8])
# version history
sourcedir = self.unpacked_source()
if sourcedir is not None:
fh = open(os.path.join(sourcedir, 'debian', 'changelog'), 'r')
versions = fs.create("{0}.versions".format(base), mode=0o644)
for line in fh.readlines():
if re_changelog_versions.match(line):
versions.write(line)
fh.close()
versions.close()
# binary -> source mapping
debinfo = fs.create("{0}.debinfo".format(base), mode=0o644)
for binary in self.changes.binaries:
control = binary.control
source_package, source_version = binary.source
line = " ".join([control['Package'], control['Version'], source_package, source_version])
print >>debinfo, line
debinfo.close()
def install(self):
"""install upload
Install upload to a suite or policy queue. This method does *not*
handle uploads to NEW.
You need to have called the `check` method before calling this method.
"""
assert len(self.reject_reasons) == 0
assert self.changes.valid_signature
assert self.final_suites is not None
assert not self.new
db_changes = self._install_changes()
for suite in self.final_suites:
overridesuite = suite
if suite.overridesuite is not None:
overridesuite = self.session.query(Suite).filter_by(suite_name=suite.overridesuite).one()
redirected_suite = suite
if suite.policy_queue is not None:
redirected_suite = suite.policy_queue.suite
source_component_func = lambda source: self._source_override(overridesuite, source).component
binary_component_func = lambda binary: self._binary_override(overridesuite, binary).component
(db_source, db_binaries) = self._install_to_suite(redirected_suite, source_component_func, binary_component_func, extra_source_archives=[suite.archive])
if suite.policy_queue is not None:
self._install_policy(suite.policy_queue, suite, db_changes, db_source, db_binaries)
# copy to build queues
if suite.policy_queue is None or suite.policy_queue.send_to_build_queues:
for build_queue in suite.copy_queues:
self._install_to_suite(build_queue.suite, source_component_func, binary_component_func, extra_source_archives=[suite.archive])
self._do_bts_versiontracking()
def install_to_new(self):
"""install upload to NEW
Install upload to NEW. This method does *not* handle regular uploads
to suites or policy queues.
You need to have called the `check` method before calling this method.
"""
# Uploads to NEW are special as we don't have overrides.
assert len(self.reject_reasons) == 0
assert self.changes.valid_signature
assert self.final_suites is not None
source = self.changes.source
binaries = self.changes.binaries
byhand = self.changes.byhand_files
new_queue = self.transaction.session.query(PolicyQueue).filter_by(queue_name='new').one()
if len(byhand) > 0:
new_queue = self.transaction.session.query(PolicyQueue).filter_by(queue_name='byhand').one()
new_suite = new_queue.suite
# we need a suite to guess components
suites = list(self.final_suites)
assert len(suites) == 1, "NEW uploads must be to a single suite"
suite = suites[0]
def binary_component_func(binary):
override = self._binary_override(suite, binary)
if override is not None:
return override.component
component_name = binary.component
component = self.session.query(Component).filter_by(component_name=component_name).one()
return component
# guess source component
# XXX: should be moved into an extra method
binary_component_names = set()
for binary in binaries:
component = binary_component_func(binary)
binary_component_names.add(component.component_name)
source_component_name = None
for guess in ('main', 'contrib', 'non-free'):
if guess in binary_component_names:
source_component_name = guess
break
if source_component_name is None:
raise Exception('Could not guess source component.')
source_component = self.session.query(Component).filter_by(component_name=source_component_name).one()
source_component_func = lambda source: source_component
db_changes = self._install_changes()
(db_source, db_binaries) = self._install_to_suite(new_suite, source_component_func, binary_component_func, source_suites=True, extra_source_archives=[suite.archive])
policy_upload = self._install_policy(new_queue, suite, db_changes, db_source, db_binaries)
for f in byhand:
self._install_byhand(policy_upload, f)
self._do_bts_versiontracking()
def commit(self):
"""commit changes"""
self.transaction.commit()
def rollback(self):
"""rollback changes"""
self.transaction.rollback()
def __enter__(self):
self.prepare()
return self
def __exit__(self, type, value, traceback):
if self.directory is not None:
shutil.rmtree(self.directory)
self.directory = None
self.changes = None
self.transaction.rollback()
return None
# Copyright (C) 2012, Ansgar Burchardt <ansgar@debian.org>
#
# Parts based on code that is
# Copyright (C) 2001-2006, James Troup <james@nocrew.org>
# Copyright (C) 2009-2010, Joerg Jaspert <joerg@debian.org>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
"""module provided pre-acceptance tests
Please read the documentation for the `Check` class for the interface.
"""
from daklib.config import Config
from .dbconn import *
import daklib.dbconn as dbconn
from .regexes import *
from .textutils import fix_maintainer, ParseMaintError
import daklib.lintian as lintian
import daklib.utils as utils
import apt_pkg
from apt_pkg import version_compare
import os
import yaml
# TODO: replace by subprocess
import commands
class Reject(Exception):
"""exception raised by failing checks"""
pass
class Check(object):
"""base class for checks
checks are called by daklib.archive.ArchiveUpload. Failing tests should
raise a `daklib.checks.Reject` exception including a human-readable
description why the upload should be rejected.
"""
def check(self, upload):
"""do checks
Args:
upload (daklib.archive.ArchiveUpload): upload to check
Raises:
daklib.checks.Reject
"""
raise NotImplemented
def per_suite_check(self, upload, suite):
"""do per-suite checks
Args:
upload (daklib.archive.ArchiveUpload): upload to check
suite (daklib.dbconn.Suite): suite to check
Raises:
daklib.checks.Reject
"""
raise NotImplemented
@property
def forcable(self):
"""allow to force ignore failing test
True if it is acceptable to force ignoring a failing test,
False otherwise
"""
return False
class SignatureCheck(Check):
"""Check signature of changes and dsc file (if included in upload)
Make sure the signature is valid and done by a known user.
"""
def check(self, upload):
changes = upload.changes
if not changes.valid_signature:
raise Reject("Signature for .changes not valid.")
if changes.source is not None:
if not changes.source.valid_signature:
raise Reject("Signature for .dsc not valid.")
if changes.source.primary_fingerprint != changes.primary_fingerprint:
raise Reject(".changes and .dsc not signed by the same key.")
if upload.fingerprint is None or upload.fingerprint.uid is None:
raise Reject(".changes signed by unknown key.")
class ChangesCheck(Check):
"""Check changes file for syntax errors."""
def check(self, upload):
changes = upload.changes
control = changes.changes
fn = changes.filename
for field in ('Distribution', 'Source', 'Binary', 'Architecture', 'Version', 'Maintainer', 'Files', 'Changes', 'Description'):
if field not in control:
raise Reject('{0}: misses mandatory field {1}'.format(fn, field))
source_match = re_field_source.match(control['Source'])
if not source_match:
raise Reject('{0}: Invalid Source field'.format(fn))
version_match = re_field_version.match(control['Version'])
if not version_match:
raise Reject('{0}: Invalid Version field'.format(fn))
version_without_epoch = version_match.group('without_epoch')
match = re_file_changes.match(fn)
if not match:
raise Reject('{0}: Does not match re_file_changes'.format(fn))
if match.group('package') != source_match.group('package'):
raise Reject('{0}: Filename does not match Source field'.format(fn))
if match.group('version') != version_without_epoch:
raise Reject('{0}: Filename does not match Version field'.format(fn))
for bn in changes.binary_names:
if not re_field_package.match(bn):
raise Reject('{0}: Invalid binary package name {1}'.format(fn, bn))
if 'source' in changes.architectures and changes.source is None:
raise Reject("Changes has architecture source, but no source found.")
if changes.source is not None and 'source' not in changes.architectures:
raise Reject("Upload includes source, but changes does not say so.")
try:
fix_maintainer(changes.changes['Maintainer'])
except ParseMaintError as e:
raise Reject('{0}: Failed to parse Maintainer field: {1}'.format(changes.filename, e))
try:
changed_by = changes.changes.get('Changed-By')
if changed_by is not None:
fix_maintainer(changed_by)
except ParseMaintError as e:
raise Reject('{0}: Failed to parse Changed-By field: {1}'.format(changes.filename, e))
if len(changes.files) == 0:
raise Reject("Changes includes no files.")
for bugnum in changes.closed_bugs:
if not re_isanum.match(bugnum):
raise Reject('{0}: "{1}" in Closes field is not a number'.format(changes.filename, bugnum))
return True
class HashesCheck(Check):
"""Check hashes in .changes and .dsc are valid."""
def check(self, upload):
changes = upload.changes
for f in changes.files.itervalues():
f.check(upload.directory)
source = changes.source
if source is not None:
for f in source.files.itervalues():
f.check(upload.directory)
class BinaryCheck(Check):
"""Check binary packages for syntax errors."""
def check(self, upload):
for binary in upload.changes.binaries:
self.check_binary(upload, binary)
binary_names = set([ binary.control['Package'] for binary in upload.changes.binaries ])
for bn in binary_names:
if bn not in upload.changes.binary_names:
raise Reject('Package {0} is not mentioned in Binary field in changes'.format(bn))
return True
def check_binary(self, upload, binary):
fn = binary.hashed_file.filename
control = binary.control
for field in ('Package', 'Architecture', 'Version', 'Description'):
if field not in control:
raise Reject('{0}: Missing mandatory field {0}.'.format(fn, field))
# check fields
package = control['Package']
if not re_field_package.match(package):
raise Reject('{0}: Invalid Package field'.format(fn))
version = control['Version']
version_match = re_field_version.match(version)
if not version_match:
raise Reject('{0}: Invalid Version field'.format(fn))
version_without_epoch = version_match.group('without_epoch')
architecture = control['Architecture']
if architecture not in upload.changes.architectures:
raise Reject('{0}: Architecture not in Architecture field in changes file'.format(fn))
if architecture == 'source':
raise Reject('{0}: Architecture "source" invalid for binary packages'.format(fn))
source = control.get('Source')
if source is not None and not re_field_source.match(source):
raise Reject('{0}: Invalid Source field'.format(fn))
# check filename
match = re_file_binary.match(fn)
if package != match.group('package'):
raise Reject('{0}: filename does not match Package field'.format(fn))
if version_without_epoch != match.group('version'):
raise Reject('{0}: filename does not match Version field'.format(fn))
if architecture != match.group('architecture'):
raise Reject('{0}: filename does not match Architecture field'.format(fn))
# check dependency field syntax
for field in ('Breaks', 'Conflicts', 'Depends', 'Enhances', 'Pre-Depends',
'Provides', 'Recommends', 'Replaces', 'Suggests'):
value = control.get(field)
if value is not None:
if value.strip() == '':
raise Reject('{0}: empty {1} field'.format(fn, field))
try:
apt_pkg.parse_depends(value)
except:
raise Reject('{0}: APT could not parse {1} field'.format(fn, field))
for field in ('Built-Using',):
value = control.get(field)
if value is not None:
if value.strip() == '':
raise Reject('{0}: empty {1} field'.format(fn, field))
try:
apt_pkg.parse_src_depends(value)
except:
raise Reject('{0}: APT could not parse {1} field'.format(fn, field))
class SourceCheck(Check):
"""Check source package for syntax errors."""
def check_filename(self, control, filename, regex):
# In case we have an .orig.tar.*, we have to strip the Debian revison
# from the version number. So handle this special case first.
is_orig = True
match = re_file_orig.match(filename)
if not match:
is_orig = False
match = regex.match(filename)
if not match:
raise Reject('{0}: does not match regular expression for source filenames'.format(filename))
if match.group('package') != control['Source']:
raise Reject('{0}: filename does not match Source field'.format(filename))
version = control['Version']
if is_orig:
version = re_field_version_upstream.match(version).group('upstream')
version_match = re_field_version.match(version)
version_without_epoch = version_match.group('without_epoch')
if match.group('version') != version_without_epoch:
raise Reject('{0}: filename does not match Version field'.format(filename))
def check(self, upload):
if upload.changes.source is None:
return True
changes = upload.changes.changes
source = upload.changes.source
control = source.dsc
dsc_fn = source._dsc_file.filename
# check fields
if not re_field_package.match(control['Source']):
raise Reject('{0}: Invalid Source field'.format(dsc_fn))
if control['Source'] != changes['Source']:
raise Reject('{0}: Source field does not match Source field in changes'.format(dsc_fn))
if control['Version'] != changes['Version']:
raise Reject('{0}: Version field does not match Version field in changes'.format(dsc_fn))
# check filenames
self.check_filename(control, dsc_fn, re_file_dsc)
for f in source.files.itervalues():
self.check_filename(control, f.filename, re_file_source)
# check dependency field syntax
for field in ('Build-Conflicts', 'Build-Conflicts-Indep', 'Build-Depends', 'Build-Depends-Arch', 'Build-Depends-Indep'):
value = control.get(field)
if value is not None:
if value.strip() == '':
raise Reject('{0}: empty {1} field'.format(dsc_fn, field))
try:
apt_pkg.parse_src_depends(value)
except Exception as e:
raise Reject('{0}: APT could not parse {1} field: {2}'.format(dsc_fn, field, e))
# TODO: check all expected files for given source format are included
class SingleDistributionCheck(Check):
"""Check that the .changes targets only a single distribution."""
def check(self, upload):
if len(upload.changes.distributions) != 1:
raise Reject("Only uploads to a single distribution are allowed.")
class ACLCheck(Check):
"""Check the uploader is allowed to upload the packages in .changes"""
def _check_dm(self, upload):
# This code is not very nice, but hopefully works until we can replace
# DM-Upload-Allowed, cf. https://lists.debian.org/debian-project/2012/06/msg00029.html
session = upload.session
if 'source' not in upload.changes.architectures:
raise Reject('DM uploads must include source')
distributions = upload.changes.distributions
for dist in distributions:
if dist not in ('unstable', 'experimental', 'squeeze-backports'):
raise Reject("Uploading to {0} is not allowed for DMs.".format(dist))
for f in upload.changes.files.itervalues():
if f.section == 'byhand' or f.section[:4] == "raw-":
raise Reject("Uploading byhand packages is not allowed for DMs.")
# Reject NEW packages
assert len(distributions) == 1
suite = session.query(Suite).filter_by(suite_name=distributions[0]).one()
overridesuite = suite
if suite.overridesuite is not None:
overridesuite = session.query(Suite).filter_by(suite_name=suite.overridesuite).one()
if upload._check_new(overridesuite):
raise Reject('Uploading NEW packages is not allowed for DMs.')
# Check DM-Upload-Allowed
last_suites = ['unstable', 'experimental']
if suite.suite_name.endswith('-backports'):
last_suites = [suite.suite_name]
last = session.query(DBSource).filter_by(source=upload.changes.changes['Source']) \
.join(DBSource.suites).filter(Suite.suite_name.in_(last_suites)) \
.order_by(DBSource.version.desc()).limit(1).first()
if last is None:
raise Reject('No existing source found in {0}'.format(' or '.join(last_suites)))
if not last.dm_upload_allowed:
raise Reject('DM-Upload-Allowed is not set in {0}={1}'.format(last.source, last.version))
# check current Changed-by is in last Maintainer or Uploaders
uploader_names = [ u.name for u in last.uploaders ]
changed_by_field = upload.changes.changes.get('Changed-By', upload.changes.changes['Maintainer'])
if changed_by_field not in uploader_names:
raise Reject('{0} is not an uploader for {1}={2}'.format(changed_by_field, last.source, last.version))
# check Changed-by is the DM
changed_by = fix_maintainer(changed_by_field)
uid = upload.fingerprint.uid
if uid is None:
raise Reject('Unknown uid for fingerprint {0}'.format(upload.fingerprint.fingerprint))
if uid.uid != changed_by[3] and uid.name != changed_by[2]:
raise Reject('DMs are not allowed to sponsor uploads (expected {0} <{1}> as maintainer, but got {2})'.format(uid.name, uid.uid, changed_by_field))
# Try to catch hijacks.
# This doesn't work correctly. Uploads to experimental can still
# "hijack" binaries from unstable. Also one can hijack packages
# via buildds (but people who try this should not be DMs).
for binary_name in upload.changes.binary_names:
binaries = session.query(DBBinary).join(DBBinary.source) \
.join(DBBinary.suites).filter(Suite.suite_name.in_(upload.changes.distributions)) \
.filter(DBBinary.package == binary_name)
for binary in binaries:
if binary.source.source != upload.changes.changes['Source']:
raise Reject('DMs must not hijack binaries (binary={0}, other-source={1})'.format(binary_name, binary.source.source))
return True
def check(self, upload):
fingerprint = upload.fingerprint
source_acl = fingerprint.source_acl
if source_acl is None:
if 'source' in upload.changes.architectures:
raise Reject('Fingerprint {0} must not upload source'.format(fingerprint.fingerprint))
elif source_acl.access_level == 'dm':
self._check_dm(upload)
elif source_acl.access_level != 'full':
raise Reject('Unknown source_acl access level {0} for fingerprint {1}'.format(source_acl.access_level, fingerprint.fingerprint))
bin_architectures = set(upload.changes.architectures)
bin_architectures.discard('source')
binary_acl = fingerprint.binary_acl
if binary_acl is None:
if len(bin_architectures) > 0:
raise Reject('Fingerprint {0} must not upload binary packages'.format(fingerprint.fingerprint))
elif binary_acl.access_level == 'map':
query = upload.session.query(BinaryACLMap).filter_by(fingerprint=fingerprint)
allowed_architectures = [ m.architecture.arch_string for m in query ]
for arch in upload.changes.architectures:
if arch not in allowed_architectures:
raise Reject('Fingerprint {0} must not upload binaries for architecture {1}'.format(fingerprint.fingerprint, arch))
elif binary_acl.access_level != 'full':
raise Reject('Unknown binary_acl access level {0} for fingerprint {1}'.format(binary_acl.access_level, fingerprint.fingerprint))
return True
class NoSourceOnlyCheck(Check):
"""Check for source-only upload
Source-only uploads are only allowed if Dinstall::AllowSourceOnlyUploads is
set. Otherwise they are rejected.
"""
def check(self, upload):
if Config().find_b("Dinstall::AllowSourceOnlyUploads"):
return True
changes = upload.changes
if changes.source is not None and len(changes.binaries) == 0:
raise Reject('Source-only uploads are not allowed.')
return True
class LintianCheck(Check):
"""Check package using lintian"""
def check(self, upload):
changes = upload.changes
# Only check sourceful uploads.
if changes.source is None:
return True
# Only check uploads to unstable or experimental.
if 'unstable' not in changes.distributions and 'experimental' not in changes.distributions:
return True
cnf = Config()
if 'Dinstall::LintianTags' not in cnf:
return True
tagfile = cnf['Dinstall::LintianTags']
with open(tagfile, 'r') as sourcefile:
sourcecontent = sourcefile.read()
try:
lintiantags = yaml.load(sourcecontent)['lintian']
except yaml.YAMLError as msg:
raise Exception('Could not read lintian tags file {0}, YAML error: {1}'.format(tagfile, msg))
fd, temp_filename = utils.temp_filename()
temptagfile = os.fdopen(fd, 'w')
for tags in lintiantags.itervalues():
for tag in tags:
print >>temptagfile, tag
temptagfile.close()
changespath = os.path.join(upload.directory, changes.filename)
try:
# FIXME: no shell
cmd = "lintian --show-overrides --tags-from-file {0} {1}".format(temp_filename, changespath)
result, output = commands.getstatusoutput(cmd)
finally:
os.unlink(temp_filename)
if result == 2:
utils.warn("lintian failed for %s [return code: %s]." % \
(changespath, result))
utils.warn(utils.prefix_multi_line_string(output, \
" [possible output:] "))
parsed_tags = lintian.parse_lintian_output(output)
rejects = list(lintian.generate_reject_messages(parsed_tags, lintiantags))
if len(rejects) != 0:
raise Reject('\n'.join(rejects))
return True
class SourceFormatCheck(Check):
"""Check source format is allowed in the target suite"""
def per_suite_check(self, upload, suite):
source = upload.changes.source
session = upload.session
if source is None:
return True
source_format = source.dsc['Format']
query = session.query(SrcFormat).filter_by(format_name=source_format).filter(SrcFormat.suites.contains(suite))
if query.first() is None:
raise Reject('source format {0} is not allowed in suite {1}'.format(source_format, suite.suite_name))
class SuiteArchitectureCheck(Check):
def per_suite_check(self, upload, suite):
session = upload.session
for arch in upload.changes.architectures:
query = session.query(Architecture).filter_by(arch_string=arch).filter(Architecture.suites.contains(suite))
if query.first() is None:
raise Reject('Architecture {0} is not allowed in suite {2}'.format(arch, suite.suite_name))
return True
class VersionCheck(Check):
"""Check version constraints"""
def _highest_source_version(self, session, source_name, suite):
db_source = session.query(DBSource).filter_by(source=source_name) \
.filter(DBSource.suites.contains(suite)).order_by(DBSource.version.desc()).first()
if db_source is None:
return None
else:
return db_source.version
def _highest_binary_version(self, session, binary_name, suite, architecture):
db_binary = session.query(DBBinary).filter_by(package=binary_name) \
.filter(DBBinary.suites.contains(suite)) \
.filter(Architecture.arch_string.in_(['all', architecture])) \
.order_by(DBBinary.version.desc()).first()
if db_binary is None:
return None
else:
return db_binary.version
def _version_checks(self, upload, suite, expected_result):
session = upload.session
if upload.changes.source is not None:
source_name = upload.changes.source.dsc['Source']
source_version = upload.changes.source.dsc['Version']
v = self._highest_source_version(session, source_name, suite)
if v is not None and version_compare(source_version, v) != expected_result:
raise Reject('Version check failed (source={0}, version={1}, suite={2})'.format(source_name, source_version, suite.suite_name))
for binary in upload.changes.binaries:
binary_name = binary.control['Package']
binary_version = binary.control['Version']
architecture = binary.control['Architecture']
v = self._highest_binary_version(session, binary_name, suite, architecture)
if v is not None and version_compare(binary_version, v) != expected_result:
raise Reject('Version check failed (binary={0}, version={1}, suite={2})'.format(binary_name, binary_version, suite.suite_name))
def per_suite_check(self, upload, suite):
session = upload.session
vc_newer = session.query(dbconn.VersionCheck).filter_by(suite=suite) \
.filter(dbconn.VersionCheck.check.in_(['MustBeNewerThan', 'Enhances']))
must_be_newer_than = [ vc.reference for vc in vc_newer ]
# Must be newer than old versions in `suite`
must_be_newer_than.append(suite)
for s in must_be_newer_than:
self._version_checks(upload, s, 1)
vc_older = session.query(dbconn.VersionCheck).filter_by(suite=suite, check='MustBeOlderThan')
must_be_older_than = [ vc.reference for vc in vc_older ]
for s in must_be_older_than:
self._version_checks(upload, s, -1)
return True
@property
def forcable(self):
return True
......@@ -3596,7 +3596,8 @@ class DBConn(object):
copy_queues = relation(BuildQueue,
secondary=self.tbl_suite_build_queue_copy),
srcformats = relation(SrcFormat, secondary=self.tbl_suite_src_formats,
backref=backref('suites', lazy='dynamic'))),
backref=backref('suites', lazy='dynamic')),
archive = relation(Archive, backref='suites')),
extension = validator)
mapper(Uid, self.tbl_uid,
......
# Copyright (C) 2012, Ansgar Burchardt <ansgar@debian.org>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
"""module to process policy queue uploads"""
from .config import Config
from .dbconn import BinaryMetadata, Component, MetadataKey, Override, OverrideType
from .fstransactions import FilesystemTransaction
from .regexes import re_file_changes, re_file_safe
import errno
import os
import shutil
import tempfile
class UploadCopy(object):
"""export a policy queue upload
This class can be used in a with-statements:
with UploadCopy(...) as copy:
...
Doing so will provide a temporary copy of the upload in the directory
given by the `directory` attribute. The copy will be removed on leaving
the with-block.
Args:
upload (daklib.dbconn.PolicyQueueUpload)
"""
def __init__(self, upload):
self.directory = None
self.upload = upload
def export(self, directory, mode=None, symlink=True):
"""export a copy of the upload
Args:
directory (str)
Kwargs:
mode (int): permissions to use for the copied files
symlink (bool): use symlinks instead of copying the files (default: True)
"""
with FilesystemTransaction() as fs:
source = self.upload.source
queue = self.upload.policy_queue
if source is not None:
for dsc_file in source.srcfiles:
f = dsc_file.poolfile
dst = os.path.join(directory, os.path.basename(f.filename))
fs.copy(f.fullpath, dst, mode=mode, symlink=symlink)
for binary in self.upload.binaries:
f = binary.poolfile
dst = os.path.join(directory, os.path.basename(f.filename))
fs.copy(f.fullpath, dst, mode=mode, symlink=symlink)
# copy byhand files
for byhand in self.upload.byhand:
src = os.path.join(queue.path, byhand.filename)
dst = os.path.join(directory, byhand.filename)
fs.copy(src, dst, mode=mode, symlink=symlink)
# copy .changes
src = os.path.join(queue.path, self.upload.changes.changesname)
dst = os.path.join(directory, self.upload.changes.changesname)
fs.copy(src, dst, mode=mode, symlink=symlink)
def __enter__(self):
assert self.directory is None
cnf = Config()
self.directory = tempfile.mkdtemp(dir=cnf.get('Dir::TempPath'))
self.export(self.directory, symlink=True)
return self
def __exit__(self, *args):
if self.directory is not None:
shutil.rmtree(self.directory)
self.directory = None
return None
class PolicyQueueUploadHandler(object):
"""process uploads to policy queues
This class allows to accept or reject uploads and to get a list of missing
overrides (for NEW processing).
"""
def __init__(self, upload, session):
"""initializer
Args:
upload (daklib.dbconn.PolicyQueueUpload): upload to process
session: database session
"""
self.upload = upload
self.session = session
@property
def _overridesuite(self):
overridesuite = self.upload.target_suite
if overridesuite.overridesuite is not None:
overridesuite = self.session.query(Suite).filter_by(suite_name=overridesuite.overridesuite).one()
return overridesuite
def _source_override(self, component_name):
package = self.upload.source.source
suite = self._overridesuite
query = self.session.query(Override).filter_by(package=package, suite=suite) \
.join(OverrideType).filter(OverrideType.overridetype == 'dsc') \
.join(Component).filter(Component.component_name == component_name)
return query.first()
def _binary_override(self, binary, component_name):
package = binary.package
suite = self._overridesuite
overridetype = binary.binarytype
query = self.session.query(Override).filter_by(package=package, suite=suite) \
.join(OverrideType).filter(OverrideType.overridetype == overridetype) \
.join(Component).filter(Component.component_name == component_name)
return query.first()
def _binary_metadata(self, binary, key):
metadata_key = self.session.query(MetadataKey).filter_by(key=key).first()
if metadata_key is None:
return None
metadata = self.session.query(BinaryMetadata).filter_by(binary=binary, key=metadata_key).first()
if metadata is None:
return None
return metadata.value
@property
def _changes_prefix(self):
changesname = self.upload.changes.changesname
assert changesname.endswith('.changes')
assert re_file_changes.match(changesname)
return changesname[0:-8]
def accept(self):
"""mark upload as accepted"""
assert len(self.missing_overrides()) == 0
fn1 = 'ACCEPT.{0}'.format(self._changes_prefix)
fn = os.path.join(self.upload.policy_queue.path, 'COMMENTS', fn1)
try:
fh = os.open(fn, os.O_CREAT | os.O_EXCL | os.O_WRONLY, 0o644)
os.write(fh, 'OK\n')
os.close(fh)
except OSError as e:
if e.errno == errno.EEXIST:
pass
else:
raise
def reject(self, reason):
"""mark upload as rejected
Args:
reason (str): reason for the rejection
"""
fn1 = 'REJECT.{0}'.format(self._changes_prefix)
assert re_file_safe.match(fn1)
fn = os.path.join(self.upload.policy_queue.path, 'COMMENTS', fn1)
try:
fh = os.open(fn, os.O_CREAT | os.O_EXCL | os.O_WRONLY)
os.write(fh, 'NOTOK\n')
os.write(fh, reason)
os.close(fh)
except OSError as e:
if e.errno == errno.EEXIST:
pass
else:
raise
def get_action(self):
"""get current action
Returns:
string giving the current action, one of 'ACCEPT', 'ACCEPTED', 'REJECT'
"""
changes_prefix = self._changes_prefix
for action in ('ACCEPT', 'ACCEPTED', 'REJECT'):
fn1 = '{0}.{1}'.format(action, changes_prefix)
fn = os.path.join(self.upload.policy_queue.path, 'COMMENTS', fn1)
if os.path.exists(fn):
return action
return None
def missing_overrides(self, hints=None):
"""get missing override entries for the upload
Kwargs:
hints (list of dict): suggested hints for new overrides in the same
format as the return value
Returns:
list of dicts with the following keys:
package: package name
priority: default priority (from upload)
section: default section (from upload)
component: default component (from upload)
type: type of required override ('dsc', 'deb' or 'udeb')
All values are strings.
"""
# TODO: use Package-List field
missing = []
components = set()
if hints is None:
hints = []
hints_map = dict([ ((o['type'], o['package']), o) for o in hints ])
for binary in self.upload.binaries:
priority = self._binary_metadata(binary, 'Priority')
section = self._binary_metadata(binary, 'Section')
component = 'main'
if section.find('/') != -1:
component = section.split('/', 1)[0]
override = self._binary_override(binary, component)
if override is None:
hint = hints_map.get((binary.binarytype, binary.package))
if hint is not None:
missing.append(hint)
component = hint['component']
else:
missing.append(dict(
package = binary.package,
priority = priority,
section = section,
component = component,
type = binary.binarytype,
))
components.add(component)
source_component = '(unknown)'
for component in ('main', 'contrib', 'non-free'):
if component in components:
source_component = component
break
source = self.upload.source
if source is not None:
override = self._source_override(source_component)
if override is None:
hint = hints_map.get(('dsc', source.source))
if hint is not None:
missing.append(hint)
else:
section = 'misc'
if component != 'main':
section = "{0}/{1}".format(component, section)
missing.append(dict(
package = source.source,
priority = 'extra',
section = section,
component = source_component,
type = 'dsc',
))
return missing
# Copyright (C) 2012, Ansgar Burchardt <ansgar@debian.org>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
"""module to handle uploads not yet installed to the archive
This module provides classes to handle uploads not yet installed to the
archive. Central is the `Changes` class which represents a changes file.
It provides methods to access the included binary and source packages.
"""
import apt_inst
import apt_pkg
import os
import re
from .gpg import SignedFile
from .regexes import *
class InvalidChangesException(Exception):
pass
class InvalidBinaryException(Exception):
pass
class InvalidSourceException(Exception):
pass
class InvalidHashException(Exception):
def __init__(self, filename, hash_name, expected, actual):
self.filename = filename
self.hash_name = hash_name
self.expected = expected
self.actual = actual
def __str__(self):
return "Invalid {0} hash for {1}: expected {2}, but got {3}.".format(self.hash_name, self.filename, self.expected, self.actual)
class InvalidFilenameException(Exception):
def __init__(self, filename):
self.filename = filename
def __str__(self):
return "Invalid filename '{0}'.".format(self.filename)
class HashedFile(object):
"""file with checksums
Attributes:
filename (str): name of the file
size (long): size in bytes
md5sum (str): MD5 hash in hexdigits
sha1sum (str): SHA1 hash in hexdigits
sha256sum (str): SHA256 hash in hexdigits
section (str): section or None
priority (str): priority or None
"""
def __init__(self, filename, size, md5sum, sha1sum, sha256sum, section=None, priority=None):
self.filename = filename
self.size = size
self.md5sum = md5sum
self.sha1sum = sha1sum
self.sha256sum = sha256sum
self.section = section
self.priority = priority
def check(self, directory):
"""Validate hashes
Check if size and hashes match the expected value.
Args:
directory (str): directory the file is located in
Raises:
InvalidHashException: hash mismatch
"""
path = os.path.join(directory, self.filename)
fh = open(path, 'r')
size = os.stat(path).st_size
if size != self.size:
raise InvalidHashException(self.filename, 'size', self.size, size)
md5sum = apt_pkg.md5sum(fh)
if md5sum != self.md5sum:
raise InvalidHashException(self.filename, 'md5sum', self.md5sum, md5sum)
fh.seek(0)
sha1sum = apt_pkg.sha1sum(fh)
if sha1sum != self.sha1sum:
raise InvalidHashException(self.filename, 'sha1sum', self.sha1sum, sha1sum)
fh.seek(0)
sha256sum = apt_pkg.sha256sum(fh)
if sha256sum != self.sha256sum:
raise InvalidHashException(self.filename, 'sha256sum', self.sha256sum, sha256sum)
def parse_file_list(control, has_priority_and_section):
"""Parse Files and Checksums-* fields
Args:
control (dict-like): control file to take fields from
has_priority_and_section (bool): Files include section and priority (as in .changes)
Raises:
InvalidChangesException: missing fields or other grave errors
Returns:
dictonary mapping filenames to `daklib.upload.HashedFile` objects
"""
entries = {}
for line in control["Files"].split('\n'):
if len(line) == 0:
continue
if has_priority_and_section:
(md5sum, size, section, priority, filename) = line.split()
entry = dict(md5sum=md5sum, size=long(size), section=section, priority=priority, filename=filename)
else:
(md5sum, size, filename) = line.split()
entry = dict(md5sum=md5sum, size=long(size), filename=filename)
entries[filename] = entry
for line in control["Checksums-Sha1"].split('\n'):
if len(line) == 0:
continue
(sha1sum, size, filename) = line.split()
entry = entries.get(filename, None)
if entry.get('size', None) != long(size):
raise InvalidChangesException('Size for {0} in Files and Checksum-Sha1 fields differ.'.format(filename))
entry['sha1sum'] = sha1sum
for line in control["Checksums-Sha256"].split('\n'):
if len(line) == 0:
continue
(sha256sum, size, filename) = line.split()
entry = entries.get(filename, None)
if entry is None:
raise InvalidChangesException('No sha256sum for {0}.'.format(filename))
if entry.get('size', None) != long(size):
raise InvalidChangesException('Size for {0} in Files and Checksum-Sha256 fields differ.'.format(filename))
entry['sha256sum'] = sha256sum
files = {}
for entry in entries.itervalues():
filename = entry['filename']
if 'size' not in entry:
raise InvalidChangesException('No size for {0}.'.format(filename))
if 'md5sum' not in entry:
raise InvalidChangesException('No md5sum for {0}.'.format(filename))
if 'sha1sum' not in entry:
raise InvalidChangesException('No sha1sum for {0}.'.format(filename))
if 'sha256sum' not in entry:
raise InvalidChangesException('No sha256sum for {0}.'.format(filename))
if not re_file_safe.match(filename):
raise InvalidChangesException("{0}: References file with unsafe filename {1}.".format(self.filename, filename))
f = files[filename] = HashedFile(**entry)
return files
class Changes(object):
"""Representation of a .changes file
Attributes:
architectures (list of str): list of architectures included in the upload
binaries (list of daklib.upload.Binary): included binary packages
binary_names (list of str): names of included binary packages
byhand_files (list of daklib.upload.HashedFile): included byhand files
bytes (int): total size of files included in this upload in bytes
changes (dict-like): dict to access fields of the .changes file
closed_bugs (list of str): list of bugs closed by this upload
directory (str): directory the .changes is located in
distributions (list of str): list of target distributions for the upload
filename (str): name of the .changes file
files (dict): dict mapping filenames to daklib.upload.HashedFile objects
path (str): path to the .changes files
primary_fingerprint (str): fingerprint of the PGP key used for the signature
source (daklib.upload.Source or None): included source
valid_signature (bool): True if the changes has a valid signature
"""
def __init__(self, directory, filename, keyrings, require_signature=True):
if not re_file_safe.match(filename):
raise InvalidChangesException('{0}: unsafe filename'.format(filename))
self.directory = directory
self.filename = filename
data = open(self.path).read()
self._signed_file = SignedFile(data, keyrings, require_signature)
self.changes = apt_pkg.TagSection(self._signed_file.contents)
self._binaries = None
self._source = None
self._files = None
self._keyrings = keyrings
self._require_signature = require_signature
@property
def path(self):
return os.path.join(self.directory, self.filename)
@property
def primary_fingerprint(self):
return self._signed_file.primary_fingerprint
@property
def valid_signature(self):
return self._signed_file.valid
@property
def architectures(self):
return self.changes['Architecture'].split()
@property
def distributions(self):
return self.changes['Distribution'].split()
@property
def source(self):
if self._source is None:
source_files = []
for f in self.files.itervalues():
if re_file_dsc.match(f.filename) or re_file_source.match(f.filename):
source_files.append(f)
if len(source_files) > 0:
self._source = Source(self.directory, source_files, self._keyrings, self._require_signature)
return self._source
@property
def binaries(self):
if self._binaries is None:
binaries = []
for f in self.files.itervalues():
if re_file_binary.match(f.filename):
binaries.append(Binary(self.directory, f))
self._binaries = binaries
return self._binaries
@property
def byhand_files(self):
byhand = []
for f in self.files.itervalues():
if re_file_dsc.match(f.filename) or re_file_source.match(f.filename) or re_file_binary.match(f.filename):
continue
if f.section != 'byhand' and f.section[:4] != 'raw-':
raise InvalidChangesException("{0}: {1} looks like a byhand package, but is in section {2}".format(self.filename, f.filename, f.section))
byhand.append(f)
return byhand
@property
def binary_names(self):
return self.changes['Binary'].split()
@property
def closed_bugs(self):
return self.changes.get('Closes', '').split()
@property
def files(self):
if self._files is None:
self._files = parse_file_list(self.changes, True)
return self._files
@property
def bytes(self):
count = 0
for f in self.files.itervalues():
count += f.size
return count
def __cmp__(self, other):
"""Compare two changes packages
We sort by source name and version first. If these are identical,
we sort changes that include source before those without source (so
that sourceful uploads get processed first), and finally fall back
to the filename (this should really never happen).
Returns:
-1 if self < other, 0 if self == other, 1 if self > other
"""
ret = cmp(self.changes.get('Source'), other.changes.get('Source'))
if ret == 0:
# compare version
ret = apt_pkg.version_compare(self.changes.get('Version', ''), other.changes.get('Version', ''))
if ret == 0:
# sort changes with source before changes without source
if 'source' in self.architectures and 'source' not in other.architectures:
ret = -1
elif 'source' not in self.architectures and 'source' in other.architectures:
ret = 1
else:
ret = 0
if ret == 0:
# fall back to filename
ret = cmp(self.filename, other.filename)
return ret
class Binary(object):
"""Representation of a binary package
Attributes:
component (str): component name
control (dict-like): dict to access fields in DEBIAN/control
hashed_file (HashedFile): HashedFile object for the .deb
"""
def __init__(self, directory, hashed_file):
self.hashed_file = hashed_file
path = os.path.join(directory, hashed_file.filename)
data = apt_inst.DebFile(path).control.extractdata("control")
self.control = apt_pkg.TagSection(data)
@property
def source(self):
"""Get source package name and version
Returns:
tuple containing source package name and version
"""
source = self.control.get("Source", None)
if source is None:
return (self.control["Package"], self.control["Version"])
match = re_field_source.match(source)
if not match:
raise InvalidBinaryException('{0}: Invalid Source field.'.format(self.hashed_file.filename))
version = match.group('version')
if version is None:
version = self.control['Version']
return (match.group('package'), version)
@property
def type(self):
"""Get package type
Returns:
String with the package type ('deb' or 'udeb')
"""
match = re_file_binary.match(self.hashed_file.filename)
if not match:
raise InvalidBinaryException('{0}: Does not match re_file_binary'.format(self.hashed_file.filename))
return match.group('type')
@property
def component(self):
fields = self.control['Section'].split('/')
if len(fields) > 1:
return fields[0]
return "main"
class Source(object):
"""Representation of a source package
Attributes:
component (str): guessed component name. Might be wrong!
dsc (dict-like): dict to access fields in the .dsc file
hashed_files (list of daklib.upload.HashedFile): list of source files (including .dsc)
files (dict): dictonary mapping filenames to HashedFile objects for
additional source files (not including .dsc)
primary_fingerprint (str): fingerprint of the PGP key used for the signature
valid_signature (bool): True if the dsc has a valid signature
"""
def __init__(self, directory, hashed_files, keyrings, require_signature=True):
self.hashed_files = hashed_files
self._dsc_file = None
for f in hashed_files:
if re_file_dsc.match(f.filename):
if self._dsc_file is not None:
raise InvalidSourceException("Multiple .dsc found ({0} and {1})".format(self._dsc_file.filename, f.filename))
else:
self._dsc_file = f
dsc_file_path = os.path.join(directory, self._dsc_file.filename)
data = open(dsc_file_path, 'r').read()
self._signed_file = SignedFile(data, keyrings, require_signature)
self.dsc = apt_pkg.TagSection(self._signed_file.contents)
self._files = None
@property
def files(self):
if self._files is None:
self._files = parse_file_list(self.dsc, False)
return self._files
@property
def primary_fingerprint(self):
return self._signed_file.primary_fingerprint
@property
def valid_signature(self):
return self._signed_file.valid
@property
def component(self):
if 'Section' not in self.dsc:
return 'main'
fields = self.dsc['Section'].split('/')
if len(fields) > 1:
return fields[0]
return "main"
......@@ -23,6 +23,7 @@
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
import commands
import datetime
import email.Header
import os
import pwd
......@@ -608,6 +609,14 @@ def build_package_list(dsc, session = None):
def send_mail (message, filename=""):
"""sendmail wrapper, takes _either_ a message string or a file as arguments"""
maildir = Cnf.get('Dir::Mail')
if maildir:
path = os.path.join(maildir, datetime.datetime.now().isoformat())
path = find_next_free(path)
fh = open(path, 'w')
print >>fh, message,
fh.close()
# Check whether we're supposed to be sending mail
if Cnf.has_key("Dinstall::Options::No-Mail") and Cnf["Dinstall::Options::No-Mail"]:
return
......@@ -1581,8 +1590,30 @@ def mail_addresses_for_upload(maintainer, changed_by, fingerprint):
addresses.append(changed_by)
fpr_addresses = gpg_get_key_addresses(fingerprint)
if fix_maintainer(changed_by)[3] not in fpr_addresses and fix_maintainer(maintainer)[3] not in fpr_addresses:
if len(fpr_addresses) > 0 and fix_maintainer(changed_by)[3] not in fpr_addresses and fix_maintainer(maintainer)[3] not in fpr_addresses:
addresses.append(fpr_addresses[0])
encoded_addresses = [ fix_maintainer(e)[1] for e in addresses ]
return encoded_addresses
################################################################################
def call_editor(text="", suffix=".txt"):
"""Run editor and return the result as a string
Kwargs:
text (str): initial text
suffix (str): extension for temporary file
Returns:
string with the edited text
"""
editor = os.environ.get('VISUAL', os.environ.get('EDITOR', 'vi'))
tmp = tempfile.NamedTemporaryFile(suffix=suffix, delete=False)
try:
print >>tmp, text,
tmp.close()
subprocess.check_call([editor, tmp.name])
return open(tmp.name, 'r').read()
finally:
os.unlink(tmp.name)
......@@ -29,8 +29,6 @@ Misc
o __BUG_NUMBER__
o __CONTROL_MESSAGE__
o __MANUAL_REJECT_MESSAGE__
o __SHORT_SUMMARY__
o __SUMMARY__
o __STABLE_WARNING__
o __SUITE__
......@@ -9,6 +9,3 @@ Content-Transfer-Encoding: 8bit
Subject: Accepted __SOURCE__ __VERSION__ (__ARCHITECTURE__)
__FILE_CONTENTS__
Accepted:
__SHORT_SUMMARY__
......@@ -12,10 +12,7 @@ Source: __SOURCE__
Source-Version: __VERSION__
We believe that the bug you reported is fixed in the latest version of
__SOURCE__, which is due to be installed in the __DISTRO__ FTP archive:
__SHORT_SUMMARY__
__STABLE_WARNING__
__SOURCE__, which is due to be installed in the __DISTRO__ FTP archive.
A summary of the changes between this version and the previous one is
attached.
......
......@@ -9,11 +9,6 @@ Content-Type: text/plain; charset="utf-8"
Content-Transfer-Encoding: 8bit
Subject: __CHANGES_FILENAME__ is NEW
__SUMMARY__
Your package contains new components which requires manual editing of
the override file. It is ok otherwise, so please be patient. New
packages are usually added to the override file about once a week.
You may have gotten the distribution wrong. You'll get warnings above
if files already exist in other distributions.
......@@ -17,9 +17,8 @@ database and may (or may not) still be in the pool; this is not a bug.
The package(s) will be physically removed automatically when no suite
references them (and in the case of source, when no binary references
it). Please also remember that the changes have been done on the
master archive (__MASTER_ARCHIVE__) and will not propagate to any
mirrors (__PRIMARY_MIRROR__ included) until the next cron.daily run at the
earliest.
master archive and will not propagate to any mirrors (__PRIMARY_MIRROR__
included) until the next dinstall run at the earliest.
Packages are usually not removed from testing by hand. Testing tracks
unstable and will automatically remove packages which were removed
......
......@@ -17,9 +17,8 @@ database and may (or may not) still be in the pool; this is not a bug.
The package(s) will be physically removed automatically when no suite
references them (and in the case of source, when no binary references
it). Please also remember that the changes have been done on the
master archive (__MASTER_ARCHIVE__) and will not propagate to any
mirrors (__PRIMARY_MIRROR__ included) until the next cron.daily run at the
earliest.
master archive and will not propagate to any mirrors (__PRIMARY_MIRROR__
included) until the next dinstall run at the earliest.
Packages are usually not removed from testing by hand. Testing tracks
unstable and will automatically remove packages which were removed
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册