checks.py 38.2 KB
Newer Older
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22
# Copyright (C) 2012, Ansgar Burchardt <ansgar@debian.org>
#
# Parts based on code that is
# Copyright (C) 2001-2006, James Troup <james@nocrew.org>
# Copyright (C) 2009-2010, Joerg Jaspert <joerg@debian.org>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.

"""module provided pre-acceptance tests

A
Ansgar Burchardt 已提交
23
Please read the documentation for the L{Check} class for the interface.
24 25 26
"""

from daklib.config import Config
27
import daklib.daksubprocess
A
Ansgar Burchardt 已提交
28
from daklib.dbconn import *
29
import daklib.dbconn as dbconn
A
Ansgar Burchardt 已提交
30 31
from daklib.regexes import *
from daklib.textutils import fix_maintainer, ParseMaintError
32 33
import daklib.lintian as lintian
import daklib.utils as utils
34
import daklib.upload
35

A
Ansgar Burchardt 已提交
36
import apt_inst
37 38
import apt_pkg
from apt_pkg import version_compare
39
import datetime
40
import errno
41
import os
42
import subprocess
43
import textwrap
A
Ansgar Burchardt 已提交
44
import time
45 46
import yaml

47 48 49 50 51 52 53 54 55
def check_fields_for_valid_utf8(filename, control):
    """Check all fields of a control file for valid UTF-8"""
    for field in control.keys():
        try:
            field.decode('utf-8')
            control[field].decode('utf-8')
        except UnicodeDecodeError:
            raise Reject('{0}: The {1} field is not valid UTF-8'.format(filename, field))

56 57 58 59
class Reject(Exception):
    """exception raised by failing checks"""
    pass

A
Ansgar Burchardt 已提交
60
class RejectExternalFilesMismatch(Reject):
61 62 63 64 65
    """exception raised by failing the external hashes check"""

    def __str__(self):
        return "'%s' has mismatching %s from the external files db ('%s' [current] vs '%s' [external])" % self.args[:4]

66 67 68 69 70 71 72 73 74
class RejectACL(Reject):
    """exception raise by failing ACL checks"""
    def __init__(self, acl, reason):
        self.acl = acl
        self.reason = reason

    def __str__(self):
        return "ACL {0}: {1}".format(self.acl.name, self.reason)

75 76 77
class Check(object):
    """base class for checks

A
Ansgar Burchardt 已提交
78 79
    checks are called by L{daklib.archive.ArchiveUpload}. Failing tests should
    raise a L{daklib.checks.Reject} exception including a human-readable
80 81 82 83 84
    description why the upload should be rejected.
    """
    def check(self, upload):
        """do checks

A
Ansgar Burchardt 已提交
85 86
        @type  upload: L{daklib.archive.ArchiveUpload}
        @param upload: upload to check
87

A
Ansgar Burchardt 已提交
88
        @raise daklib.checks.Reject: upload should be rejected
89 90 91 92 93
        """
        raise NotImplemented
    def per_suite_check(self, upload, suite):
        """do per-suite checks

A
Ansgar Burchardt 已提交
94 95
        @type  upload: L{daklib.archive.ArchiveUpload}
        @param upload: upload to check
96

A
Ansgar Burchardt 已提交
97 98 99 100
        @type  suite: L{daklib.dbconn.Suite}
        @param suite: suite to check

        @raise daklib.checks.Reject: upload should be rejected
101 102 103 104 105 106
        """
        raise NotImplemented
    @property
    def forcable(self):
        """allow to force ignore failing test

A
Ansgar Burchardt 已提交
107 108
        C{True} if it is acceptable to force ignoring a failing test,
        C{False} otherwise
109 110 111
        """
        return False

112
class SignatureAndHashesCheck(Check):
113 114 115
    def check_replay(self, upload):
        # Use private session as we want to remember having seen the .changes
        # in all cases.
116
        session = upload.session
117 118 119
        history = SignatureHistory.from_signed_file(upload.changes)
        r = history.query(session)
        if r is not None:
120
            raise Reject('Signature for changes file was already seen at {0}.\nPlease refresh the signature of the changes file if you want to upload it again.'.format(r.seen))
121 122
        return True

123 124 125 126 127 128 129 130
    """Check signature of changes and dsc file (if included in upload)

    Make sure the signature is valid and done by a known user.
    """
    def check(self, upload):
        changes = upload.changes
        if not changes.valid_signature:
            raise Reject("Signature for .changes not valid.")
131
        self.check_replay(upload)
132 133 134 135 136 137 138 139 140
        self._check_hashes(upload, changes.filename, changes.files.itervalues())

        source = None
        try:
            source = changes.source
        except Exception as e:
            raise Reject("Invalid dsc file: {0}".format(e))
        if source is not None:
            if not source.valid_signature:
141
                raise Reject("Signature for .dsc not valid.")
142
            if source.primary_fingerprint != changes.primary_fingerprint:
143
                raise Reject(".changes and .dsc not signed by the same key.")
144 145
            self._check_hashes(upload, source.filename, source.files.itervalues())

146 147 148
        if upload.fingerprint is None or upload.fingerprint.uid is None:
            raise Reject(".changes signed by unknown key.")

149 150 151 152 153 154 155 156 157 158 159 160 161 162 163
    """Make sure hashes match existing files

    @type  upload: L{daklib.archive.ArchiveUpload}
    @param upload: upload we are processing

    @type  filename: str
    @param filename: name of the file the expected hash values are taken from

    @type  files: sequence of L{daklib.upload.HashedFile}
    @param files: files to check the hashes for
    """
    def _check_hashes(self, upload, filename, files):
        try:
            for f in files:
                f.check(upload.directory)
164 165 166 167 168
        except daklib.upload.FileDoesNotExist as e:
            raise Reject('{0}: {1}\n'
                         'Perhaps you need to include the file in your upload?'
                         .format(filename, unicode(e)))
        except daklib.upload.UploadException as e:
169
            raise Reject('{0}: {1}'.format(filename, unicode(e)))
170

171 172 173 174 175 176 177 178 179 180 181 182 183 184
class WeakSignatureCheck(Check):
    """Check that .changes and .dsc are not signed using a weak algorithm"""
    def check(self, upload):
        changes = upload.changes
        if changes.weak_signature:
            raise Reject("The .changes was signed using a weak algorithm (such as SHA-1)")

        source = changes.source
        if source is not None:
            if source.weak_signature:
                raise Reject("The source package was signed using a weak algorithm (such as SHA-1)")

        return True

185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 202 203
class SignatureTimestampCheck(Check):
    """Check timestamp of .changes signature"""
    def check(self, upload):
        changes = upload.changes

        now = datetime.datetime.utcnow()
        timestamp = changes.signature_timestamp
        age = now - timestamp

        age_max = datetime.timedelta(days=365)
        age_min = datetime.timedelta(days=-7)

        if age > age_max:
            raise Reject('{0}: Signature from {1} is too old (maximum age is {2} days)'.format(changes.filename, timestamp, age_max.days))
        if age < age_min:
            raise Reject('{0}: Signature from {1} is too far in the future (tolerance is {2} days)'.format(changes.filename, timestamp, abs(age_min.days)))

        return True

204 205 206 207 208 209 210 211 212 213 214
class ChangesCheck(Check):
    """Check changes file for syntax errors."""
    def check(self, upload):
        changes = upload.changes
        control = changes.changes
        fn = changes.filename

        for field in ('Distribution', 'Source', 'Binary', 'Architecture', 'Version', 'Maintainer', 'Files', 'Changes', 'Description'):
            if field not in control:
                raise Reject('{0}: misses mandatory field {1}'.format(fn, field))

215 216
        check_fields_for_valid_utf8(fn, control)

217 218 219 220 221 222 223 224 225 226 227 228 229 230 231 232 233 234 235 236 237 238 239 240 241 242 243 244 245 246 247 248 249 250 251 252 253 254 255 256 257 258 259 260 261 262
        source_match = re_field_source.match(control['Source'])
        if not source_match:
            raise Reject('{0}: Invalid Source field'.format(fn))
        version_match = re_field_version.match(control['Version'])
        if not version_match:
            raise Reject('{0}: Invalid Version field'.format(fn))
        version_without_epoch = version_match.group('without_epoch')

        match = re_file_changes.match(fn)
        if not match:
            raise Reject('{0}: Does not match re_file_changes'.format(fn))
        if match.group('package') != source_match.group('package'):
            raise Reject('{0}: Filename does not match Source field'.format(fn))
        if match.group('version') != version_without_epoch:
            raise Reject('{0}: Filename does not match Version field'.format(fn))

        for bn in changes.binary_names:
            if not re_field_package.match(bn):
                raise Reject('{0}: Invalid binary package name {1}'.format(fn, bn))

        if 'source' in changes.architectures and changes.source is None:
            raise Reject("Changes has architecture source, but no source found.")
        if changes.source is not None and 'source' not in changes.architectures:
            raise Reject("Upload includes source, but changes does not say so.")

        try:
            fix_maintainer(changes.changes['Maintainer'])
        except ParseMaintError as e:
            raise Reject('{0}: Failed to parse Maintainer field: {1}'.format(changes.filename, e))

        try:
            changed_by = changes.changes.get('Changed-By')
            if changed_by is not None:
                fix_maintainer(changed_by)
        except ParseMaintError as e:
            raise Reject('{0}: Failed to parse Changed-By field: {1}'.format(changes.filename, e))

        if len(changes.files) == 0:
            raise Reject("Changes includes no files.")

        for bugnum in changes.closed_bugs:
            if not re_isanum.match(bugnum):
                raise Reject('{0}: "{1}" in Closes field is not a number'.format(changes.filename, bugnum))

        return True

263 264 265 266 267 268 269 270 271 272
class ExternalHashesCheck(Check):
    """Checks hashes in .changes and .dsc against an external database."""
    def check_single(self, session, f):
        q = session.execute("SELECT size, md5sum, sha1sum, sha256sum FROM external_files WHERE filename LIKE '%%/%s'" % f.filename)
        (ext_size, ext_md5sum, ext_sha1sum, ext_sha256sum) = q.fetchone() or (None, None, None, None)

        if not ext_size:
            return

        if ext_size != f.size:
A
Ansgar Burchardt 已提交
273
            raise RejectExternalFilesMismatch(f.filename, 'size', f.size, ext_size)
274 275

        if ext_md5sum != f.md5sum:
A
Ansgar Burchardt 已提交
276
            raise RejectExternalFilesMismatch(f.filename, 'md5sum', f.md5sum, ext_md5sum)
277 278

        if ext_sha1sum != f.sha1sum:
A
Ansgar Burchardt 已提交
279
            raise RejectExternalFilesMismatch(f.filename, 'sha1sum', f.sha1sum, ext_sha1sum)
280 281

        if ext_sha256sum != f.sha256sum:
A
Ansgar Burchardt 已提交
282
            raise RejectExternalFilesMismatch(f.filename, 'sha256sum', f.sha256sum, ext_sha256sum)
283 284 285 286 287 288 289 290 291 292 293 294 295 296 297 298 299

    def check(self, upload):
        cnf = Config()

        if not cnf.use_extfiles:
            return

        session = upload.session
        changes = upload.changes

        for f in changes.files.itervalues():
            self.check_single(session, f)
        source = changes.source
        if source is not None:
            for f in source.files.itervalues():
                self.check_single(session, f)

300 301 302
class BinaryCheck(Check):
    """Check binary packages for syntax errors."""
    def check(self, upload):
303 304 305
        debug_deb_name_postfix = "-dbgsym"
        # XXX: Handle dynamic debug section name here

306 307 308
        for binary in upload.changes.binaries:
            self.check_binary(upload, binary)

309 310 311 312
        binaries = {binary.control['Package']: binary
                        for binary in upload.changes.binaries}

        for name, binary in binaries.items():
313 314 315 316 317
            if name in upload.changes.binary_names:
                # Package is listed in Binary field. Everything is good.
                pass
            elif daklib.utils.is_in_debug_section(binary.control):
                # If we have a binary package in the debug section, we
318 319 320 321
                # can allow it to not be present in the Binary field
                # in the .changes file, so long as its name (without
                # -dbgsym) is present in the Binary list.
                if not name.endswith(debug_deb_name_postfix):
322 323
                    raise Reject('Package {0} is in the debug section, but '
                                 'does not end in {1}.'.format(name, debug_deb_name_postfix))
324 325 326 327 328 329 330 331 332

                # Right, so, it's named properly, let's check that
                # the corresponding package is in the Binary list
                origin_package_name = name[:-len(debug_deb_name_postfix)]
                if origin_package_name not in upload.changes.binary_names:
                    raise Reject(
                        "Debug package {debug}'s corresponding binary package "
                        "{origin} is not present in the Binary field.".format(
                            debug=name, origin=origin_package_name))
333
            else:
334 335
                # Someone was a nasty little hacker and put a package
                # into the .changes that isn't in debian/control. Bad,
336
                # bad person.
337
                raise Reject('Package {0} is not mentioned in Binary field in changes'.format(name))
338 339 340 341 342 343 344

        return True

    def check_binary(self, upload, binary):
        fn = binary.hashed_file.filename
        control = binary.control

345
        for field in ('Package', 'Architecture', 'Version', 'Description', 'Section'):
346 347 348
            if field not in control:
                raise Reject('{0}: Missing mandatory field {0}.'.format(fn, field))

349 350
        check_fields_for_valid_utf8(fn, control)

351 352 353 354 355 356 357 358 359 360 361 362 363 364 365 366 367 368 369 370 371 372 373 374 375 376 377 378 379 380 381 382 383 384 385 386 387 388 389 390 391 392 393 394 395 396 397 398 399 400 401 402 403 404 405
        # check fields

        package = control['Package']
        if not re_field_package.match(package):
            raise Reject('{0}: Invalid Package field'.format(fn))

        version = control['Version']
        version_match = re_field_version.match(version)
        if not version_match:
            raise Reject('{0}: Invalid Version field'.format(fn))
        version_without_epoch = version_match.group('without_epoch')

        architecture = control['Architecture']
        if architecture not in upload.changes.architectures:
            raise Reject('{0}: Architecture not in Architecture field in changes file'.format(fn))
        if architecture == 'source':
            raise Reject('{0}: Architecture "source" invalid for binary packages'.format(fn))

        source = control.get('Source')
        if source is not None and not re_field_source.match(source):
            raise Reject('{0}: Invalid Source field'.format(fn))

        # check filename

        match = re_file_binary.match(fn)
        if package != match.group('package'):
            raise Reject('{0}: filename does not match Package field'.format(fn))
        if version_without_epoch != match.group('version'):
            raise Reject('{0}: filename does not match Version field'.format(fn))
        if architecture != match.group('architecture'):
            raise Reject('{0}: filename does not match Architecture field'.format(fn))

        # check dependency field syntax

        for field in ('Breaks', 'Conflicts', 'Depends', 'Enhances', 'Pre-Depends',
                      'Provides', 'Recommends', 'Replaces', 'Suggests'):
            value = control.get(field)
            if value is not None:
                if value.strip() == '':
                    raise Reject('{0}: empty {1} field'.format(fn, field))
                try:
                    apt_pkg.parse_depends(value)
                except:
                    raise Reject('{0}: APT could not parse {1} field'.format(fn, field))

        for field in ('Built-Using',):
            value = control.get(field)
            if value is not None:
                if value.strip() == '':
                    raise Reject('{0}: empty {1} field'.format(fn, field))
                try:
                    apt_pkg.parse_src_depends(value)
                except:
                    raise Reject('{0}: APT could not parse {1} field'.format(fn, field))

406 407 408 409 410
        # "Multi-Arch: no" breaks wanna-build, #768353
        multi_arch = control.get("Multi-Arch")
        if multi_arch == 'no':
            raise Reject('{0}: Multi-Arch: no support in Debian is broken (#768353)'.format(fn))

A
Ansgar Burchardt 已提交
411 412 413 414 415 416 417 418 419
class BinaryTimestampCheck(Check):
    """check timestamps of files in binary packages

    Files in the near future cause ugly warnings and extreme time travel
    can cause errors on extraction.
    """
    def check(self, upload):
        cnf = Config()
        future_cutoff = time.time() + cnf.find_i('Dinstall::FutureTimeTravelGrace', 24*3600)
420
        past_cutoff = time.mktime(time.strptime(cnf.find('Dinstall::PastCutoffYear', '1975'), '%Y'))
A
Ansgar Burchardt 已提交
421 422 423 424 425 426 427

        class TarTime(object):
            def __init__(self):
                self.future_files = dict()
                self.past_files = dict()
            def callback(self, member, data):
                if member.mtime > future_cutoff:
428
                    self.future_files[member.name] = member.mtime
A
Ansgar Burchardt 已提交
429
                elif member.mtime < past_cutoff:
430
                    self.past_files[member.name] = member.mtime
A
Ansgar Burchardt 已提交
431 432 433 434 435 436 437 438 439 440 441 442 443 444 445 446 447 448

        def format_reason(filename, direction, files):
            reason = "{0}: has {1} file(s) with a timestamp too far in the {2}:\n".format(filename, len(files), direction)
            for fn, ts in files.iteritems():
                reason += "  {0} ({1})".format(fn, time.ctime(ts))
            return reason

        for binary in upload.changes.binaries:
            filename = binary.hashed_file.filename
            path = os.path.join(upload.directory, filename)
            deb = apt_inst.DebFile(path)
            tar = TarTime()
            deb.control.go(tar.callback)
            if tar.future_files:
                raise Reject(format_reason(filename, 'future', tar.future_files))
            if tar.past_files:
                raise Reject(format_reason(filename, 'past', tar.past_files))

449 450 451 452 453 454 455 456 457 458 459 460 461 462 463 464 465 466
class SourceCheck(Check):
    """Check source package for syntax errors."""
    def check_filename(self, control, filename, regex):
        # In case we have an .orig.tar.*, we have to strip the Debian revison
        # from the version number. So handle this special case first.
        is_orig = True
        match = re_file_orig.match(filename)
        if not match:
            is_orig = False
            match = regex.match(filename)

        if not match:
            raise Reject('{0}: does not match regular expression for source filenames'.format(filename))
        if match.group('package') != control['Source']:
            raise Reject('{0}: filename does not match Source field'.format(filename))

        version = control['Version']
        if is_orig:
467 468 469 470
            upstream_match = re_field_version_upstream.match(version)
            if not upstream_match:
                raise Reject('{0}: Source package includes upstream tarball, but {0} has no Debian revision.'.format(filename, version))
            version = upstream_match.group('upstream')
471 472 473 474 475 476 477 478 479 480 481 482 483 484
        version_match =  re_field_version.match(version)
        version_without_epoch = version_match.group('without_epoch')
        if match.group('version') != version_without_epoch:
            raise Reject('{0}: filename does not match Version field'.format(filename))

    def check(self, upload):
        if upload.changes.source is None:
            return True

        changes = upload.changes.changes
        source = upload.changes.source
        control = source.dsc
        dsc_fn = source._dsc_file.filename

485 486
        check_fields_for_valid_utf8(dsc_fn, control)

487 488 489 490 491 492 493 494 495 496 497 498 499 500 501 502 503 504 505 506 507 508 509 510
        # check fields
        if not re_field_package.match(control['Source']):
            raise Reject('{0}: Invalid Source field'.format(dsc_fn))
        if control['Source'] != changes['Source']:
            raise Reject('{0}: Source field does not match Source field in changes'.format(dsc_fn))
        if control['Version'] != changes['Version']:
            raise Reject('{0}: Version field does not match Version field in changes'.format(dsc_fn))

        # check filenames
        self.check_filename(control, dsc_fn, re_file_dsc)
        for f in source.files.itervalues():
            self.check_filename(control, f.filename, re_file_source)

        # check dependency field syntax
        for field in ('Build-Conflicts', 'Build-Conflicts-Indep', 'Build-Depends', 'Build-Depends-Arch', 'Build-Depends-Indep'):
            value = control.get(field)
            if value is not None:
                if value.strip() == '':
                    raise Reject('{0}: empty {1} field'.format(dsc_fn, field))
                try:
                    apt_pkg.parse_src_depends(value)
                except Exception as e:
                    raise Reject('{0}: APT could not parse {1} field: {2}'.format(dsc_fn, field, e))

511 512 513 514 515
        rejects = utils.check_dsc_files(dsc_fn, control, source.files.keys())
        if len(rejects) > 0:
            raise Reject("\n".join(rejects))

        return True
516 517 518 519 520 521 522 523 524

class SingleDistributionCheck(Check):
    """Check that the .changes targets only a single distribution."""
    def check(self, upload):
        if len(upload.changes.distributions) != 1:
            raise Reject("Only uploads to a single distribution are allowed.")

class ACLCheck(Check):
    """Check the uploader is allowed to upload the packages in .changes"""
525 526

    def _does_hijack(self, session, upload, suite):
527 528 529 530
        # Try to catch hijacks.
        # This doesn't work correctly. Uploads to experimental can still
        # "hijack" binaries from unstable. Also one can hijack packages
        # via buildds (but people who try this should not be DMs).
531 532 533 534 535 536
        for binary_name in upload.changes.binary_names:
            binaries = session.query(DBBinary).join(DBBinary.source) \
                .filter(DBBinary.suites.contains(suite)) \
                .filter(DBBinary.package == binary_name)
            for binary in binaries:
                if binary.source.source != upload.changes.changes['Source']:
537
                    return True, binary.package, binary.source.source
538 539 540 541 542 543 544 545 546 547 548 549 550 551 552 553 554 555 556 557 558 559 560 561 562 563 564 565
        return False, None, None

    def _check_acl(self, session, upload, acl):
        source_name = upload.changes.source_name

        if acl.match_fingerprint and upload.fingerprint not in acl.fingerprints:
            return None, None
        if acl.match_keyring is not None and upload.fingerprint.keyring != acl.match_keyring:
            return None, None

        if not acl.allow_new:
            if upload.new:
                return False, "NEW uploads are not allowed"
            for f in upload.changes.files.itervalues():
                if f.section == 'byhand' or f.section.startswith("raw-"):
                    return False, "BYHAND uploads are not allowed"
        if not acl.allow_source and upload.changes.source is not None:
            return False, "sourceful uploads are not allowed"
        binaries = upload.changes.binaries
        if len(binaries) != 0:
            if not acl.allow_binary:
                return False, "binary uploads are not allowed"
            if upload.changes.source is None and not acl.allow_binary_only:
                return False, "binary-only uploads are not allowed"
            if not acl.allow_binary_all:
                uploaded_arches = set(upload.changes.architectures)
                uploaded_arches.discard('source')
                allowed_arches = set(a.arch_string for a in acl.architectures)
566 567 568
                forbidden_arches = uploaded_arches - allowed_arches
                if len(forbidden_arches) != 0:
                    return False, "uploads for architecture(s) {0} are not allowed".format(", ".join(forbidden_arches))
569 570 571 572 573 574 575 576
        if not acl.allow_hijack:
            for suite in upload.final_suites:
                does_hijack, hijacked_binary, hijacked_from = self._does_hijack(session, upload, suite)
                if does_hijack:
                    return False, "hijacks are not allowed (binary={0}, other-source={1})".format(hijacked_binary, hijacked_from)

        acl_per_source = session.query(ACLPerSource).filter_by(acl=acl, fingerprint=upload.fingerprint, source=source_name).first()
        if acl.allow_per_source:
577
            if acl_per_source is None:
J
Joerg Jaspert 已提交
578
                return False, "not allowed to upload source package '{0}'".format(source_name)
579 580 581 582 583
        if acl.deny_per_source and acl_per_source is not None:
            return False, acl_per_source.reason or "forbidden to upload source package '{0}'".format(source_name)

        return True, None

584
    def check(self, upload):
585
        session = upload.session
586
        fingerprint = upload.fingerprint
587
        keyring = fingerprint.keyring
588

589 590 591 592
        if keyring is None:
            raise Reject('No keyring for fingerprint {0}'.format(fingerprint.fingerprint))
        if not keyring.active:
            raise Reject('Keyring {0} is not active'.format(keyring.name))
593

594 595 596 597 598
        acl = fingerprint.acl or keyring.acl
        if acl is None:
            raise Reject('No ACL for fingerprint {0}'.format(fingerprint.fingerprint))
        result, reason = self._check_acl(session, upload, acl)
        if not result:
599
            raise RejectACL(acl, reason)
600

601 602 603
        for acl in session.query(ACL).filter_by(is_global=True):
            result, reason = self._check_acl(session, upload, acl)
            if result == False:
604
                raise RejectACL(acl, reason)
605

606
        return True
607

608 609 610 611 612 613 614 615 616 617 618
    def per_suite_check(self, upload, suite):
        acls = suite.acls
        if len(acls) != 0:
            accept = False
            for acl in acls:
                result, reason = self._check_acl(upload.session, upload, acl)
                if result == False:
                    raise Reject(reason)
                accept = accept or result
            if not accept:
                raise Reject('Not accepted by any per-suite acl (suite={0})'.format(suite.suite_name))
619 620
        return True

621 622 623 624 625 626 627 628 629 630
class TransitionCheck(Check):
    """check for a transition"""
    def check(self, upload):
        if 'source' not in upload.changes.architectures:
            return True

        transitions = self.get_transitions()
        if transitions is None:
            return True

631 632
        session = upload.session

633
        control = upload.changes.changes
634 635 636 637
        source = re_field_source.match(control['Source']).group('package')

        for trans in transitions:
            t = transitions[trans]
638
            transition_source = t["source"]
639 640 641
            expected = t["new"]

            # Will be None if nothing is in testing.
642
            current = get_source_in_suite(transition_source, "testing", session)
643 644 645 646 647 648 649 650 651 652 653 654 655 656 657 658 659 660 661 662 663 664 665 666 667 668
            if current is not None:
                compare = apt_pkg.version_compare(current.version, expected)

            if current is None or compare < 0:
                # This is still valid, the current version in testing is older than
                # the new version we wait for, or there is none in testing yet

                # Check if the source we look at is affected by this.
                if source in t['packages']:
                    # The source is affected, lets reject it.

                    rejectmsg = "{0}: part of the {1} transition.\n\n".format(source, trans)

                    if current is not None:
                        currentlymsg = "at version {0}".format(current.version)
                    else:
                        currentlymsg = "not present in testing"

                    rejectmsg += "Transition description: {0}\n\n".format(t["reason"])

                    rejectmsg += "\n".join(textwrap.wrap("""Your package
is part of a testing transition designed to get {0} migrated (it is
currently {1}, we need version {2}).  This transition is managed by the
Release Team, and {3} is the Release-Team member responsible for it.
Please mail debian-release@lists.debian.org or contact {3} directly if you
need further assistance.  You might want to upload to experimental until this
669
transition is done.""".format(transition_source, currentlymsg, expected,t["rm"])))
670 671 672 673 674 675 676 677 678 679 680 681 682

                    raise Reject(rejectmsg)

        return True

    def get_transitions(self):
        cnf = Config()
        path = cnf.get('Dinstall::ReleaseTransitions', '')
        if path == '' or not os.path.exists(path):
            return None

        contents = file(path, 'r').read()
        try:
683
            transitions = yaml.safe_load(contents)
684 685 686 687 688 689
            return transitions
        except yaml.YAMLError as msg:
            utils.warn('Not checking transitions, the transitions file is broken: {0}'.format(msg))

        return None

690
class NoSourceOnlyCheck(Check):
691 692 693 694 695 696
    def is_source_only_upload(self, upload):
        changes = upload.changes
        if changes.source is not None and len(changes.binaries) == 0:
            return True
        return False

697 698 699 700
    """Check for source-only upload

    Source-only uploads are only allowed if Dinstall::AllowSourceOnlyUploads is
    set. Otherwise they are rejected.
701 702 703 704 705 706 707 708 709 710 711 712

    Source-only uploads are only accepted for source packages having a
    Package-List field that also lists architectures per package. This
    check can be disabled via
    Dinstall::AllowSourceOnlyUploadsWithoutPackageList.

    Source-only uploads to NEW are only allowed if
    Dinstall::AllowSourceOnlyNew is set.

    Uploads not including architecture-independent packages are only
    allowed if Dinstall::AllowNoArchIndepUploads is set.

713 714
    """
    def check(self, upload):
715
        if not self.is_source_only_upload(upload):
716
            return True
717 718 719 720

        allow_source_only_uploads = Config().find_b('Dinstall::AllowSourceOnlyUploads')
        allow_source_only_uploads_without_package_list = Config().find_b('Dinstall::AllowSourceOnlyUploadsWithoutPackageList')
        allow_source_only_new = Config().find_b('Dinstall::AllowSourceOnlyNew')
721
        allow_no_arch_indep_uploads = Config().find_b('Dinstall::AllowNoArchIndepUploads', True)
722
        changes = upload.changes
723 724

        if not allow_source_only_uploads:
725
            raise Reject('Source-only uploads are not allowed.')
726 727 728 729 730 731
        if not allow_source_only_uploads_without_package_list \
           and changes.source.package_list.fallback:
            raise Reject('Source-only uploads are only allowed if a Package-List field that also list architectures is included in the source package. dpkg (>= 1.17.7) includes this information.')
        if not allow_source_only_new and upload.new:
            raise Reject('Source-only uploads to NEW are not allowed.')

732 733 734 735 736
        if 'all' not in changes.architectures and changes.source.package_list.has_arch_indep_packages():
            if not allow_no_arch_indep_uploads:
                raise Reject('Uploads must include architecture-independent packages.')
            for suite in ('oldstable', 'oldstable-proposed-updates', 'oldstable-security',
                          'jessie', 'jessie-proposed-updates', 'jessie-security',
737 738
                          'oldstable-backports', 'oldstable-backports-sloppy',
                          'jessie-backports', 'jessie-backports-sloppy',
739
                          'oldoldstable', 'oldoldstable-security',
740 741 742
                          'wheezy', 'wheezy-security',
                          'oldoldstable-backports', 'oldoldstable-backports-sloppy',
                          'wheezy-backports', 'wheezy-backports-sloppy'):
743 744
                if suite in changes.distributions:
                    raise Reject('Suite {} is not configured to build arch:all packages. Please include them in your upload')
745

746 747
        return True

A
Ansgar Burchardt 已提交
748 749 750 751 752 753 754 755 756 757
class ArchAllBinNMUCheck(Check):
    """Check for arch:all binNMUs"""
    def check(self, upload):
        changes = upload.changes

        if 'all' in changes.architectures and changes.changes.get('Binary-Only') == 'yes':
            raise Reject('arch:all binNMUs are not allowed.')

        return True

758 759 760 761 762 763 764 765 766 767 768 769 770 771 772 773 774 775 776 777
class LintianCheck(Check):
    """Check package using lintian"""
    def check(self, upload):
        changes = upload.changes

        # Only check sourceful uploads.
        if changes.source is None:
            return True
        # Only check uploads to unstable or experimental.
        if 'unstable' not in changes.distributions and 'experimental' not in changes.distributions:
            return True

        cnf = Config()
        if 'Dinstall::LintianTags' not in cnf:
            return True
        tagfile = cnf['Dinstall::LintianTags']

        with open(tagfile, 'r') as sourcefile:
            sourcecontent = sourcefile.read()
        try:
778
            lintiantags = yaml.safe_load(sourcecontent)['lintian']
779 780 781
        except yaml.YAMLError as msg:
            raise Exception('Could not read lintian tags file {0}, YAML error: {1}'.format(tagfile, msg))

J
Joerg Jaspert 已提交
782
        fd, temp_filename = utils.temp_filename(mode=0o644)
783 784 785 786 787 788 789 790
        temptagfile = os.fdopen(fd, 'w')
        for tags in lintiantags.itervalues():
            for tag in tags:
                print >>temptagfile, tag
        temptagfile.close()

        changespath = os.path.join(upload.directory, changes.filename)
        try:
791
            cmd = []
792
            result = 0
793 794 795 796 797

            user = cnf.get('Dinstall::UnprivUser') or None
            if user is not None:
                cmd.extend(['sudo', '-H', '-u', user])

798
            cmd.extend(['/usr/bin/lintian', '--show-overrides', '--tags-from-file', temp_filename, changespath])
799 800 801 802
            output = daklib.daksubprocess.check_output(cmd, stderr=subprocess.STDOUT)
        except subprocess.CalledProcessError as e:
            result = e.returncode
            output = e.output
803 804 805 806 807 808 809 810 811 812 813 814 815 816 817 818 819 820 821 822 823 824 825 826 827 828 829 830 831
        finally:
            os.unlink(temp_filename)

        if result == 2:
            utils.warn("lintian failed for %s [return code: %s]." % \
                (changespath, result))
            utils.warn(utils.prefix_multi_line_string(output, \
                " [possible output:] "))

        parsed_tags = lintian.parse_lintian_output(output)
        rejects = list(lintian.generate_reject_messages(parsed_tags, lintiantags))
        if len(rejects) != 0:
            raise Reject('\n'.join(rejects))

        return True

class SourceFormatCheck(Check):
    """Check source format is allowed in the target suite"""
    def per_suite_check(self, upload, suite):
        source = upload.changes.source
        session = upload.session
        if source is None:
            return True

        source_format = source.dsc['Format']
        query = session.query(SrcFormat).filter_by(format_name=source_format).filter(SrcFormat.suites.contains(suite))
        if query.first() is None:
            raise Reject('source format {0} is not allowed in suite {1}'.format(source_format, suite.suite_name))

832 833 834 835 836 837 838 839
class SuiteCheck(Check):
    def per_suite_check(self, upload, suite):
        if not suite.accept_source_uploads and upload.changes.source is not None:
            raise Reject('The suite "{0}" does not accept source uploads.'.format(suite.suite_name))
        if not suite.accept_binary_uploads and len(upload.changes.binaries) != 0:
            raise Reject('The suite "{0}" does not accept binary uploads.'.format(suite.suite_name))
        return True

840 841 842 843 844 845
class SuiteArchitectureCheck(Check):
    def per_suite_check(self, upload, suite):
        session = upload.session
        for arch in upload.changes.architectures:
            query = session.query(Architecture).filter_by(arch_string=arch).filter(Architecture.suites.contains(suite))
            if query.first() is None:
846
                raise Reject('Architecture {0} is not allowed in suite {1}'.format(arch, suite.suite_name))
847 848 849 850 851 852 853 854 855 856 857 858 859 860 861 862

        return True

class VersionCheck(Check):
    """Check version constraints"""
    def _highest_source_version(self, session, source_name, suite):
        db_source = session.query(DBSource).filter_by(source=source_name) \
            .filter(DBSource.suites.contains(suite)).order_by(DBSource.version.desc()).first()
        if db_source is None:
            return None
        else:
            return db_source.version

    def _highest_binary_version(self, session, binary_name, suite, architecture):
        db_binary = session.query(DBBinary).filter_by(package=binary_name) \
            .filter(DBBinary.suites.contains(suite)) \
863
            .join(DBBinary.architecture) \
864 865 866 867 868 869 870
            .filter(Architecture.arch_string.in_(['all', architecture])) \
            .order_by(DBBinary.version.desc()).first()
        if db_binary is None:
            return None
        else:
            return db_binary.version

871
    def _version_checks(self, upload, suite, other_suite, op, op_name):
872 873 874 875 876
        session = upload.session

        if upload.changes.source is not None:
            source_name = upload.changes.source.dsc['Source']
            source_version = upload.changes.source.dsc['Version']
877
            v = self._highest_source_version(session, source_name, other_suite)
878
            if v is not None and not op(version_compare(source_version, v)):
879 880
                raise Reject("Version check failed:\n"
                             "Your upload included the source package {0}, version {1},\n"
881
                             "however {3} already has version {2}.\n"
882 883
                             "Uploads to {5} must have a {4} version than present in {3}."
                             .format(source_name, source_version, v, other_suite.suite_name, op_name, suite.suite_name))
884 885 886 887 888

        for binary in upload.changes.binaries:
            binary_name = binary.control['Package']
            binary_version = binary.control['Version']
            architecture = binary.control['Architecture']
889
            v = self._highest_binary_version(session, binary_name, other_suite, architecture)
890
            if v is not None and not op(version_compare(binary_version, v)):
891 892
                raise Reject("Version check failed:\n"
                             "Your upload included the binary package {0}, version {1}, for {2},\n"
893
                             "however {4} already has version {3}.\n"
894 895
                             "Uploads to {6} must have a {5} version than present in {4}."
                             .format(binary_name, binary_version, architecture, v, other_suite.suite_name, op_name, suite.suite_name))
896 897 898 899 900 901 902 903 904 905 906

    def per_suite_check(self, upload, suite):
        session = upload.session

        vc_newer = session.query(dbconn.VersionCheck).filter_by(suite=suite) \
            .filter(dbconn.VersionCheck.check.in_(['MustBeNewerThan', 'Enhances']))
        must_be_newer_than = [ vc.reference for vc in vc_newer ]
        # Must be newer than old versions in `suite`
        must_be_newer_than.append(suite)

        for s in must_be_newer_than:
907
            self._version_checks(upload, suite, s, lambda result: result > 0, 'higher')
908 909 910 911 912

        vc_older = session.query(dbconn.VersionCheck).filter_by(suite=suite, check='MustBeOlderThan')
        must_be_older_than = [ vc.reference for vc in vc_older ]

        for s in must_be_older_than:
913
            self._version_checks(upload, suite, s, lambda result: result < 0, 'lower')
914 915 916 917 918 919

        return True

    @property
    def forcable(self):
        return True