From e8eacb00ee41d7ab974ba913fff40e5c082b8e1a Mon Sep 17 00:00:00 2001 From: James Troup Date: Wed, 13 Dec 2000 03:18:50 +0000 Subject: [PATCH] auric implementation --- Contents.top | 32 ++++++++ THANKS | 2 + TODO | 1 - apt.conf | 67 +++++++++++++++ cron.daily | 45 +++++++---- cron.weekly | 32 ++++++++ cron.weekly-non-US | 33 ++++++++ debian/rules | 14 ++-- katie | 61 ++++++++------ katie.conf | 47 +++++++---- katie.conf-non-US | 23 +++++- rhona | 21 +++-- shania | 198 +++++++++++++++++++++++++++++++++++++++++++++ update-bugdoctxt | 27 +++++++ update-mirrorlists | 24 ++++++ utils.py | 5 +- vars | 16 ++++ 17 files changed, 574 insertions(+), 74 deletions(-) create mode 100644 Contents.top create mode 100644 apt.conf create mode 100755 cron.weekly create mode 100755 cron.weekly-non-US create mode 100755 shania create mode 100755 update-bugdoctxt create mode 100755 update-mirrorlists create mode 100644 vars diff --git a/Contents.top b/Contents.top new file mode 100644 index 00000000..1fd59187 --- /dev/null +++ b/Contents.top @@ -0,0 +1,32 @@ +This file maps each file available in the Debian GNU/Linux system to +the package from which it originates. It includes packages from the +DIST distribution for the ARCH architecture. + +You can use this list to determine which package contains a specific +file, or whether or not a specific file is available. The list is +updated weekly, each architecture on a different day. + +When a file is contained in more than one package, all packages are +listed. When a directory is contained in more than one package, only +the first is listed. + +The best way to search quickly for a file is with the Unix `grep' +utility, as in `grep CONTENTS': + + $ grep nose Contents + etc/nosendfile net/sendfile + usr/X11R6/bin/noseguy x11/xscreensaver + usr/X11R6/man/man1/noseguy.1x.gz x11/xscreensaver + usr/doc/examples/ucbmpeg/mpeg_encode/nosearch.param graphics/ucbmpeg + usr/lib/cfengine/bin/noseyparker admin/cfengine + +This list contains files in all packages, even though not all of the +packages are installed on an actual system at once. If you want to +find out which packages on an installed Debian system provide a +particular file, you can use `dpkg --search ': + + $ dpkg --search /usr/bin/dselect + dpkg: /usr/bin/dselect + + +FILE LOCATION diff --git a/THANKS b/THANKS index b190663b..e8ef9ef5 100644 --- a/THANKS +++ b/THANKS @@ -5,8 +5,10 @@ Anthony Towns Antti-Juhani Kaijanaho Brendan O'Dea Chris Leishman +Daniel Jacobwitz Drake Diedrich Guy Maor Jason Gunthorpe +Joey Hess Michael Beattie Robert Bihlmeyer diff --git a/TODO b/TODO index acfc5c26..9022c3c8 100644 --- a/TODO +++ b/TODO @@ -1,7 +1,6 @@ Show Stopper ------------ - o finish new cron.daily file for auric o finish rhona o claire needs to know about sections on non-non-US diff --git a/apt.conf b/apt.conf new file mode 100644 index 00000000..86c7b17c --- /dev/null +++ b/apt.conf @@ -0,0 +1,67 @@ +Dir +{ + ArchiveDir "/org/ftp.debian.org/ftp/"; + OverrideDir "/org/ftp.debian.org/scripts/override/"; + CacheDir "/org/ftp.debian.org/database/"; +}; + +Default +{ + Packages::Compress ". gzip"; + Sources::Compress "gzip"; + Contents::Compress "gzip"; + DeLinkLimit 0; + MaxContentsChange 6000; + FileMode 0664; +} + +TreeDefault +{ + Contents::Header "/org/ftp.debian.org/katie/Contents.top"; +}; + +tree "dists/stable" +{ + FileList "/org/ftp.debian.org/database/dists/stable_$(SECTION)_binary-$(ARCH).list"; + SourceFileList "/org/ftp.debian.org/database/dists/stable_$(SECTION)_source.list"; + Sections "main contrib non-free"; + Architectures "alpha arm i386 m68k powerpc sparc source"; + BinOverride "override.potato.$(SECTION)"; + SrcOverride "override.potato.$(SECTION).src"; +}; + +tree "dists/unstable" +{ + FileList "/org/ftp.debian.org/database/dists/unstable_$(SECTION)_binary-$(ARCH).list"; + SourceFileList "/org/ftp.debian.org/database/dists/unstable_$(SECTION)_source.list"; + Sections "main contrib non-free"; + Architectures "alpha arm hppa hurd-i386 i386 mips mipsel m68k powerpc sh sparc source"; + BinOverride "override.woody.$(SECTION)"; + SrcOverride "override.woody.$(SECTION).src"; +}; + +tree "dists/unstable/main" +{ + FileList "/org/ftp.debian.org/database/dists/unstable_main_$(SECTION)_binary-$(ARCH).list"; + Sections "debian-installer"; + Architectures "alpha arm hppa hurd-i386 i386 mips mipsel m68k powerpc sh sparc"; + BinOverride "override.woody.$(SECTION)"; + BinCacheDB "packages-debian-installer-$(ARCH).db"; + Packages::Extensions ".udeb"; + Contents ""; +}; + +bindirectory "dists/proposed-updates" +{ + + FileList "/org/ftp.debian.org/database/dists/proposed-updates_-_binary.list"; + SourceFileList "/org/ftp.debian.org/database/proposed-updates_-_source.list"; + Packages "dists/proposed-updates/Packages"; + Sources "dists/proposed-updates/Sources"; + Contents ""; + + BinOverride "override.potato.all3"; + BinCacheDB "packages-proposed-updates.db"; + + PathPrefix ""; +}; diff --git a/cron.daily b/cron.daily index 59bf12a8..be42fa0d 100755 --- a/cron.daily +++ b/cron.daily @@ -3,10 +3,10 @@ # Executed daily via cron, out of troup's crontab. set -e -export SCRIPTVARS=/org/ftp.debian.org/scripts/masterfiles/vars +export SCRIPTVARS=/org/ftp.debian.org/katie/vars . $SCRIPTVARS -##### +################################################################################ echo Archive maintenance started at $(date +%X) @@ -26,24 +26,26 @@ ending at about 15:30. This file is then removed. You should not mirror the archive during this period. EOF -##### +################################################################################ -TERM=vt100 update-bugdoctxt +echo "Creating pre-daily-cron-job backup of projectb database..." +pg_dump projectb > /org/ftp.debian.org/backup/dump_$(date +%Y.%m.%d-%H:%M:%S) -##### +################################################################################ -pg_dump > FIXME +update-bugdoctxt +update-mirrorlists -##### +################################################################################ -# temporary hack to work around the lack of an apt-utils package +# temporary hack to work around the lack of an apt-utils & python-apt package export PYTHONPATH=$PYTHONPATH:/org/ftp.debian.org/scripts/apt/build/bin/ export LD_LIBRARY_PATH=$LD_LIBRARY_PATH:/org/ftp.debian.org/scripts/apt/build/bin/ export PATH=$PATH:/org/ftp.debian.org/scripts/apt/build/bin/ cd $incoming rm -f REPORT -dak-install -pak *.changes | direport | tee REPORT | \ +katie -pak *.changes | direport | tee REPORT | \ mail -s "Install for $(date +%D)" ftpmaster@ftp-master.debian.org chgrp debadmin REPORT chmod 664 REPORT @@ -51,21 +53,34 @@ chmod 664 REPORT cd $masterdir symlinks -d -r $ftpdir +cd $masterdir +jenna +# FIXME +cd /org/ftp.debian.org/database/dists/ +for i in proposed-updates_-_binary-*; do cat $i >> proposed-updates_-_binary.list; done cd $masterdir apt-ftparchive generate apt.conf -dak-mkmaintainers +rhona +cd $indices +charisma > .new-maintainers +mv -f .new-maintainers Maintainers +gzip -9v .new-maintainers.gz +mv -f .new-maintainers.gz Maintainers.gz +cd $masterdir copyoverrides mklslar mkchecksums -# [JT] temporary hack to make the buildd daemons and proposed-updates get along -pushd /org/ftp.debian.org/ftp/dists/proposed-updates -/home/troup/katie/drow *.dsc -popd - rm -f $NOTICE echo Archive maintenance finished at $(date +%X) +################################################################################ + +echo "Creating post-daily-cron-job backup of projectb database..." +pg_dump projectb > /org/ftp.debian.org/backup/dump_$(date +%Y.%m.%d-%H:%M:%S) + +################################################################################ + ulimit -m 90000 -d 90000 -s 10000 -v 90000 run-parts --report /org/ftp.debian.org/scripts/distmnt diff --git a/cron.weekly b/cron.weekly new file mode 100755 index 00000000..bafca02e --- /dev/null +++ b/cron.weekly @@ -0,0 +1,32 @@ +#!/bin/sh +# +# Run once a week via cron, out of troup's crontab. + +set -e +export SCRIPTVARS=/org/ftp.debian.org/katie/vars +. $SCRIPTVARS + +################################################################################ + +echo "Creating pre-weekly-cron-job backup of projectb database..." +pg_dump projectb > /org/ftp.debian.org/backup/dump_$(date +%Y.%m.%d-%H:%M:%S) + +################################################################################ + +# temporary hack to work around the lack of an apt-utils & python-apt package +export PYTHONPATH=$PYTHONPATH:/org/ftp.debian.org/scripts/apt/build/bin/ +export LD_LIBRARY_PATH=$LD_LIBRARY_PATH:/org/ftp.debian.org/scripts/apt/build/bin/ +export PATH=$PATH:/org/ftp.debian.org/scripts/apt/build/bin/ + +cd $masterdir +shania -v +shania -v -m -i $incoming + +# TODO: weekly postins to d-c (?), backup of report (?), backup of changes.tgz (?) + +################################################################################ + +echo "Creating post-weekly-cron-job backup of projectb database..." +pg_dump projectb > /org/ftp.debian.org/backup/dump_$(date +%Y.%m.%d-%H:%M:%S) + +################################################################################ diff --git a/cron.weekly-non-US b/cron.weekly-non-US new file mode 100755 index 00000000..02e3bf25 --- /dev/null +++ b/cron.weekly-non-US @@ -0,0 +1,33 @@ +#!/bin/sh +# +# Run once a week via cron, out of troup's crontab. + +set -e +export SCRIPTVARS=/org/non-us.debian.org/katie/vars-non-US +. $SCRIPTVARS + +################################################################################ + +echo "Creating pre-weekly-cron-job backup of projectb database..." +pg_dump projectb > /org/non-us.debian.org/backup/dump_$(date +%Y.%m.%d-%H:%M:%S) + +################################################################################ + +# temporary hack to work around the lack of an apt-utils & python-apt package +export PYTHONPATH=$PYTHONPATH:/org/non-us.debian.org/scripts/apt/build/bin/ +export LD_LIBRARY_PATH=$LD_LIBRARY_PATH:/org/non-us.debian.org/scripts/apt/build/bin/ +export PATH=$PATH:/org/non-us.debian.org/scripts/apt/build/bin/ +export PATH=$PATH:/org/non-us.debian.org/scripts/apt/build/bin/ + +cd $masterdir +shania -v +shania -v -m -i $incoming + +# TODO: weekly postins to d-c (?), backup of report (?), backup of changes.tgz (?) + +################################################################################ + +echo "Creating post-weekly-cron-job backup of projectb database..." +pg_dump projectb > /org/non-us.debian.org/backup/dump_$(date +%Y.%m.%d-%H:%M:%S) + +################################################################################ diff --git a/debian/rules b/debian/rules index 74c4a05b..57a2eeb7 100755 --- a/debian/rules +++ b/debian/rules @@ -26,13 +26,13 @@ binary-indep: checkroot install -m 755 heidi debian/tmp/usr/bin/da_tags install -m 755 charisma debian/tmp/usr/bin/da_mkmaintainers install -m 755 neve debian/tmp/usr/bin/da_populate - install -m 755 leon debian/tmp/usr/bin/da_clean - install -d debian/tmp/usr/share/doc/gnupg/ - install -m 644 debian/changelog debian/tmp/usr/share/doc/gnupg/changelog.Debian - install -m 644 README NEWS THANKS TODO debian/tmp/usr/share/doc/gnupg/ - install -m 644 ChangeLog debian/tmp/usr/share/doc/gnupg/changelog - gzip -9v debian/tmp/usr/share/doc/gnupg/* - install -m 644 debian/copyright debian/tmp/usr/share/doc/gnupg/ + install -m 755 rhona debian/tmp/usr/bin/da_clean + install -d debian/tmp/usr/share/doc/katie/ + install -m 644 debian/changelog debian/tmp/usr/share/doc/katie/changelog.Debian + install -m 644 README NEWS THANKS TODO debian/tmp/usr/share/doc/katie/ + install -m 644 ChangeLog debian/tmp/usr/share/doc/katie/changelog + gzip -9v debian/tmp/usr/share/doc/katie/* + install -m 644 debian/copyright debian/tmp/usr/share/doc/katie/ dpkg-gencontrol -isp chown -R root.root debian/tmp chmod -R go=rX debian/tmp diff --git a/katie b/katie index 3656f428..80a06445 100755 --- a/katie +++ b/katie @@ -2,7 +2,7 @@ # Installs Debian packaes # Copyright (C) 2000 James Troup -# $Id: katie,v 1.9 2000-12-05 04:27:48 troup Exp $ +# $Id: katie,v 1.10 2000-12-13 03:18:50 troup Exp $ # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by @@ -130,7 +130,7 @@ def read_override_file (filename, suite, component): # See if a given package is in the override file. Caches and only loads override files on demand. -def in_override_p (package, component, suite): +def in_override_p (package, component, suite, binary_type): global overrides; # Avoid on unknown distributions @@ -149,9 +149,13 @@ def in_override_p (package, component, suite): override_filename = Cnf["Dir::OverrideDir"] + 'override.' + Cnf["Suite::%s::OverrideCodeName" % (suite)]; read_override_file (override_filename, suite, component); else: # all others. - for src in ("", ".src"): - override_filename = Cnf["Dir::OverrideDir"] + 'override.' + Cnf["Suite::%s::OverrideCodeName" % (suite)] + '.' + component + src; + if binary_type == "udeb": + override_filename = Cnf["Dir::OverrideDir"] + 'override.' + Cnf["Suite::%s::OverrideCodeName" % (suite)] + '.debian-installer.' + component; read_override_file (override_filename, suite, component); + else: + for src in ("", ".src"): + override_filename = Cnf["Dir::OverrideDir"] + 'override.' + Cnf["Suite::%s::OverrideCodeName" % (suite)] + '.' + component + src; + read_override_file (override_filename, suite, component); return overrides[suite][component].get(package, None); @@ -365,7 +369,7 @@ def check_files(): continue # See if the package is NEW - if not in_override_p(files[file]["package"], files[file]["component"], suite): + if not in_override_p(files[file]["package"], files[file]["component"], suite, files[file].get("dbtype","")): files[file]["new"] = 1 # Find any old binary packages @@ -427,7 +431,7 @@ def check_files(): def check_dsc (): global dsc, dsc_files, reject_message, reprocess, orig_tar_id; - + for file in files.keys(): if files[file]["type"] == "dsc": try: @@ -458,13 +462,14 @@ def check_dsc (): reject_message = reject_message + "Rejected: can not overwrite existing copy of '%s' already in the archive.\n" % (dsc_file) elif dsc_file[-12:] == ".orig.tar.gz": # Check in the pool - q = projectB.query("SELECT l.path, f.filename, l.type, f.id FROM files f, location l WHERE f.filename ~ '%s' AND l.id = f.location" % (utils.regex_safe(dsc_file))); + q = projectB.query("SELECT l.path, f.filename, l.type, f.id, l.id FROM files f, location l WHERE f.filename ~ '%s' AND l.id = f.location" % (utils.regex_safe(dsc_file))); ql = q.getresult(); if len(ql) > 0: old_file = ql[0][0] + ql[0][1]; actual_md5 = apt_pkg.md5sum(utils.open_file(old_file,"r")); found = old_file; suite_type = ql[0][2]; + dsc_files[dsc_file]["location id"] = ql[0][4]; # need this for updating dsc_files in install() # See install()... if suite_type == "legacy" or suite_type == "legacy-mixed": orig_tar_id = ql[0][3]; @@ -641,13 +646,17 @@ def install (changes_filename, summary, short_summary): projectB.query("INSERT INTO dsc_files (source, file) VALUES (currval('source_id_seq'), %d)" % (files[file]["files id"])); for dsc_file in dsc_files.keys(): filename = files[file]["pool name"] + dsc_file; - files_id = db_access.get_files_id(filename, dsc_files[dsc_file]["size"], dsc_files[dsc_file]["md5sum"], files[file]["location id"]); + # use location id from dsc_files first if it exists as + # the .orig.tar.gz may still be in a legacy location + location_id = dsc_files[dsc_file].get("location id", None); + if location_id == None: + location_id = files[file]["location id"]; + files_id = db_access.get_files_id(filename, dsc_files[dsc_file]["size"], dsc_files[dsc_file]["md5sum"], location_id); # FIXME: needs to check for -1/-2 and or handle exception if files_id == None: - files_id = db_access.set_files_id (filename, dsc_files[dsc_file]["size"], dsc_files[dsc_file]["md5sum"], files[file]["location id"]); + files_id = db_access.set_files_id (filename, dsc_files[dsc_file]["size"], dsc_files[dsc_file]["md5sum"], location_id); projectB.query("INSERT INTO dsc_files (source, file) VALUES (currval('source_id_seq'), %d)" % (files_id)); - # Add the .deb files to the DB for file in files.keys(): if files[file]["type"] == "deb": @@ -682,20 +691,6 @@ def install (changes_filename, summary, short_summary): suite_id = db_access.get_suite_id(suite); projectB.query("INSERT INTO bin_associations (suite, bin) VALUES (%d, currval('binaries_id_seq'))" % (suite_id)); - # Install the files into the pool - for file in files.keys(): - if files[file].has_key("byhand"): - continue - destination = Cnf["Dir::PoolDir"] + files[file]["pool name"] + file - destdir = os.path.dirname(destination) - utils.move (file, destination) - install_bytes = install_bytes + float(files[file]["size"]) - - # Copy the .changes file across for suite which need it. - for suite in changes["distribution"].keys(): - if Cnf.has_key("Suite::%s::CopyChanges" % (suite)): - utils.copy (changes_filename, Cnf["Dir::RootDir"] + Cnf["Suite::%s::CopyChanges" % (suite)]); - # If the .orig.tar.gz is in a legacy directory we need to poolify # it, so that apt-get source (and anything else that goes by the # "Directory:" field in the Sources.gz file) works. @@ -723,6 +718,20 @@ def install (changes_filename, summary, short_summary): # Remove old data from the DB: files table projectB.query("DELETE FROM files WHERE id = %s" % (qid["files_id"])); + # Install the files into the pool + for file in files.keys(): + if files[file].has_key("byhand"): + continue + destination = Cnf["Dir::PoolDir"] + files[file]["pool name"] + file + destdir = os.path.dirname(destination) + utils.move (file, destination) + install_bytes = install_bytes + float(files[file]["size"]) + + # Copy the .changes file across for suite which need it. + for suite in changes["distribution"].keys(): + if Cnf.has_key("Suite::%s::CopyChanges" % (suite)): + utils.copy (changes_filename, Cnf["Dir::RootDir"] + Cnf["Suite::%s::CopyChanges" % (suite)]); + projectB.query("COMMIT WORK"); utils.move (changes_filename, Cnf["Dir::IncomingDir"] + 'DONE/' + os.path.basename(changes_filename)) @@ -864,7 +873,7 @@ Subject: %s REJECTED %s === -%s""" % (Cnf["Dinstall::MyEmailAddress"], changes["maintainer822"], changes_filename, reject_message, reject_footer); +%s""" % (Cnf["Dinstall::MyEmailAddress"], changes["maintainer822"], os.path.basename(changes_filename), reject_message, reject_footer); else: # Have a manual rejection file to use reject_mail_message = ""; # avoid 's @@ -889,7 +898,7 @@ Subject: %s REJECTED %s %s === -%s""" % (user_email_address, Cnf["Dinstall::MyEmailAddress"], changes["maintainer822"], changes_filename, manual_reject_message, reject_message, reject_footer) +%s""" % (user_email_address, Cnf["Dinstall::MyEmailAddress"], changes["maintainer822"], os.path.basename(changes_filename), manual_reject_message, reject_message, reject_footer) # Write the rejection email out as the .reason file reason_filename = re_changes.sub("reason", os.path.basename(changes_filename)); diff --git a/katie.conf b/katie.conf index 4a9af230..0d436a41 100644 --- a/katie.conf +++ b/katie.conf @@ -20,8 +20,8 @@ Dinstall SendmailCommand "/usr/sbin/sendmail -oi -t"; MyEmailAddress "Debian Installer "; MyHost "debian.org"; // used for generating user@my_host addresses in e.g. manual_reject() - NewAckList "/home/troup/katie/log"; // !!FIXME!! - LockFile "/home/troup/katie/lock"; // !!FIXME!! + NewAckList "/org/ftp.debian.org/katie/log"; + LockFile "/org/ftp.debian.org/katie/lock"; }; @@ -57,10 +57,28 @@ Jenna }; +Shania +{ + + Options + { + Debug ""; + Help ""; + Version ""; + Days 14; + Incoming ""; + Mail ""; + No-Action ""; + Verbose ""; + }; + + MorgueSubDir "shania"; +}; + Neve { - ExportDir "/home/troup/katie/neve-files/"; + ExportDir "/org/ftp.debian.org/katie/neve-files/"; }; @@ -75,10 +93,9 @@ Rhona Version ""; }; - //Morgue "/org/ftp.debian.org/morgue/"; - Morgue "/org/scratch/troup/morgue/"; - // How long (in seconds) dead packages are left before being killed - StayOfExecution 172800; // 2 days + // How long (in seconds) dead packages are left before being killed + StayOfExecution 172800; // 2 days + MorgueSubDir "rhona"; }; Suite @@ -131,11 +148,12 @@ Suite Announce "debian-changes@lists.debian.org"; Version "2.2r0"; Origin "Debian"; - Description "Debian 2.2 Released 14th August 2000"; + Description "Debian 2.2r2 Released 5th December 2000"; CodeName "potato"; OverrideCodeName "potato"; Priority "1"; Untouchable "1"; + ChangeLogBase "dists/stable/non-US/"; }; Proposed-Updates @@ -153,9 +171,9 @@ Suite }; Announce "debian-changes@lists.debian.org"; CopyChanges "dists/proposed-updates/"; - Version "2.2r1"; + Version "2.2r3"; Origin "Debian"; - Description "Proposed Updates for Debian 2.2r1 - Not Released"; + Description "Proposed Updates for Debian 2.2r3 - Not Released"; CodeName "proposed-updates"; OverrideCodeName "potato"; Priority "2"; @@ -224,11 +242,12 @@ Suite Dir { RootDir "/org/ftp.debian.org/ftp/"; - PoolDir "/org/scratch/troup/pool/"; + PoolDir "/org/ftp.debian.org/ftp/pool/"; PoolRoot "pool/"; IncomingDir "/org/ftp.debian.org/incoming/"; OverrideDir "/org/ftp.debian.org/scripts/override/"; ListsDir "/org/ftp.debian.org/database/dists/"; + Morgue "/org/ftp.debian.org/morgue/"; }; DB @@ -236,7 +255,7 @@ DB Host "ftp-master.debian.org"; Port -1; ROUser "nobody"; -} +}; Architectures { @@ -322,10 +341,10 @@ Location }; // New pool locations on ftp-master.debian.org - /org/scratch/troup/pool/ + /org/ftp.debian.org/ftp/pool/ { Archive "ftp-master"; Type "pool" }; -} +}; diff --git a/katie.conf-non-US b/katie.conf-non-US index d2cf41e9..c5374d41 100644 --- a/katie.conf-non-US +++ b/katie.conf-non-US @@ -57,6 +57,24 @@ Jenna }; +Shania +{ + + Options + { + Debug ""; + Help ""; + Version ""; + Days 14; + Incoming ""; + Mail ""; + No-Action ""; + Verbose ""; + }; + + MorgueSubDir "shania"; +}; + Neve { @@ -75,9 +93,9 @@ Rhona Version ""; }; - Morgue "/org/non-us.debian.org/morgue/"; // How long (in seconds) dead packages are left before being killed - StayOfExecution 172800; // 2 days + StayOfExecution 0; // 0 days + MorgueSubDir "rhona"; }; Suite @@ -229,6 +247,7 @@ Dir IncomingDir "/org/non-us.debian.org/incoming/"; OverrideDir "/org/non-us.debian.org/scripts/override/"; ListsDir "/org/non-us.debian.org/database/dists/"; + Morgue "/org/non-us.debian.org/morgue/"; }; DB diff --git a/rhona b/rhona index 7b0d9310..877cdb01 100755 --- a/rhona +++ b/rhona @@ -2,7 +2,7 @@ # rhona, cleans up unassociated binary (and source) packages # Copyright (C) 2000 James Troup -# $Id: rhona,v 1.2 2000-11-24 04:04:23 troup Exp $ +# $Id: rhona,v 1.3 2000-12-13 03:18:50 troup Exp $ # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by @@ -144,20 +144,23 @@ def check_sources(): projectB.query("UPDATE files SET last_used = NULL WHERE id = %s" % (i[0])); projectB.query("COMMIT WORK"); +def clean_binaries(): + date = time.strftime("%Y-%m-%d %H:%M", time.localtime(time.time()-int(Cnf["Rhona::StayOfExecution"]))); + print projectB.query("DELETE FROM binaries WHERE file IN (SELECT id FROM files WHERE last_used < '%s')" % (date)); + def clean(): date = time.strftime("%Y-%m-%d %H:%M", time.localtime(time.time()-int(Cnf["Rhona::StayOfExecution"]))); - q = projectB.query("SELECT l.path, f.filename FROM location l, files f WHERE f.last_used < '%s' AND l.id = f.location" % (date)) - ql = q.getresult(); - for i in ql: + # Delete from source + dsc_files + q = projectB.query("SELECT l.path, f.filename FROM location l, files f WHERE f.last_used < '%s' AND l.id = f.location" % (date)); + for i in q.getresult(): filename = i[0] + i[1]; - dest = Cnf["Rhona::Morgue"]+os.path.basename(filename); if not os.path.exists(filename): sys.stderr.write("E: can not find %s.\n" % (filename)); continue; - print "Cleaning %s to %s..." % (filename, dest) + dest = Cnf["Dir::Morgue"] + '/' + Cnf["Rhona::MorgueSubDir"]; + print "Cleaning %s to %s..." % (filename, dest); #utils.move(filename, dest); - #projectB.query("DELETE FROM binaries WHERE id = %s" % (i[0])); - #FIXME: need to remove from "or source" + files + dsc_files.. etc. + # delete from files def main(): global Cnf, projectB; @@ -171,6 +174,8 @@ def main(): print "Checking for orphaned binary packages..." check_binaries(); + print "Cleaning binaries from the DB..." + clean_binaries(); print "Checking for orphaned source packages..." check_sources(); print "Cleaning orphaned packages..." diff --git a/shania b/shania new file mode 100755 index 00000000..b12fb0ab --- /dev/null +++ b/shania @@ -0,0 +1,198 @@ +#!/usr/bin/env python + +# Clean incoming of old unused files +# Copyright (C) 2000 James Troup +# $Id: shania,v 1.1 2000-12-13 03:18:50 troup Exp $ + +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation; either version 2 of the License, or +# (at your option) any later version. + +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. + +# You should have received a copy of the GNU General Public License +# along with this program; if not, write to the Free Software +# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA + +# Caveat Emptor: If run with -m/--mail argument, expects to be in +# Incoming/REJECT or somewhere with .reason files. + +################################################################################ + +import os, re, stat, string, sys, time +import utils +import apt_pkg; + +################################################################################ + +re_ischanges = re.compile(r"\.changes$") +re_isdsc = re.compile(r"\.dsc$") + +################################################################################ + +def file_to_string (filename): + try: + file = utils.open_file(filename, 'r'); + except utils.cant_open_exc: + return ""; + + contents = ""; + for line in file.readlines(): + contents = contents + line; + return contents; + +################################################################################ + +def main (): + Cnf = None; + all_files = {}; + changes_files = []; + + apt_pkg.init(); + + Cnf = apt_pkg.newConfiguration(); + apt_pkg.ReadConfigFileISC(Cnf,utils.which_conf_file()); + + Arguments = [('D',"debug","Shania::Options::Debug", "IntVal"), + ('h',"help","Shania::Options::Help"), + ('V',"version","Shania::Options::Version"), + ('d',"days","Shania::Options::Days", "IntVal"), + ('i',"incoming","Shania::Options::Incoming", "HasArg"), + ('m',"mail","Shania::Options::Mail"), + ('n',"no-action","Shania::Options::No-Action"), + ('v',"verbose","Shania::Options::Verbose")]; + + apt_pkg.ParseCommandLine(Cnf,Arguments,sys.argv); + + delete_date = int(time.time())-(int(Cnf["Shania::Options::Days"])*84600); + del_dir = Cnf["Dir::Morgue"] + '/' + Cnf["Shania::MorgueSubDir"]; + + if not os.path.exists(del_dir): + sys.stderr.write("W: Creating morgue directory '%s'.\n" % (del_dir)); + os.mkdir(del_dir); + elif not os.path.isdir(del_dir): + sys.stderr.write("E: %s must be a directory.\n" % (del_dir)); + + incoming = Cnf["Shania::Options::Incoming"]; + if incoming == "": + incoming = Cnf["Dir::IncomingDir"]; + + os.chdir(incoming); + + # Build up the list of all files in the directory + for i in os.listdir('.'): + if os.path.isfile(i): + all_files[i] = 1; + if re_ischanges.search(i) != None: + changes_files.append(i); + + # Proces all .changes and .dsc files. + for changes_filename in changes_files: + try: + changes = utils.parse_changes(changes_filename) + except: + continue; + try: + files = utils.build_file_list(changes, ""); + except: + continue; + + dsc_files = {}; + for file in files.keys(): + if re_isdsc.search(file) != None: + try: + dsc = utils.parse_changes(file) + except: + continue; + try: + dsc_files = utils.build_file_list(dsc, 1) + except: + continue; + + # If passed -m/--mail, assume in REJECT/ and send appropriate mails + if Cnf["Shania::Options::Mail"]: + reason_filename = re_ischanges.sub('.reason', changes_filename); + if not os.access(reason_filename, os.R_OK): + sys.stderr.write("W: %s lacks a (readable) reason file ('%s').\n" % (changes_filename, reason_filename)); + continue; + + if os.stat(reason_filename)[stat.ST_MTIME] > delete_date: + # Ensure the files aren't later deleted. + for keys in (files.keys(), dsc_files.keys(), changes_filename, reason_filename): + for i in keys: + if all_files.has_key(i): + del all_files[i]; + + # Grab a copy of the .changes and .reason files for inclusion in the mail + try: + changes_contents = file_to_string(changes_filename); + except utils.cant_open_exc: + sys.stderr.write("W: %s lacks a (readable) changes file ('%s').\n" % (changes_filename, changes_filename)); + continue; + + reason_contents = file_to_string(reason_filename); + + # Fix the maintainer address to be RFC-822 compatible + (changes["maintainer822"], changes["maintainername"], changes["maintaineremail"]) = utils.fix_maintainer (changes["maintainer"]) + + if Cnf["Shania::Options::No-Action"]: + print "Would send a reminder email to %s." % (changes["maintainer822"]); + else: # FIXME: need msg to be configurable + mail_message = """Return-Path: %s +From: %s +To: %s +Bcc: troup@auric.debian.org +Subject: Reminder: %s was rejected + +This is an automated reminder. Your Debian upload was rejected. Its +files are in %s/REJECT on %s. + +If the upload has been superceded, please delete it. If not, please +correct the error. You do not have to reupload good files; simply +move them from incoming/REJECT to incoming. Do erase any bad files. +This reminder is sent on Monday mornings. After two reminders, the +upload is deleted. + +---------------------------------------------------------------------- +%s +---------------------------------------------------------------------- + +---------------------------------------------------------------------- +%s +---------------------------------------------------------------------- + +-- +Debian distribution maintenance software +""" % (Cnf["Dinstall::MyEmailAddress"], Cnf["Dinstall::MyEmailAddress"], changes["maintainer822"], changes_filename, Cnf["Dir::IncomingDir"], Cnf["Archive::%s::OriginServer" % (utils.where_am_i())], changes_contents, reason_contents) + utils.send_mail(mail_message, ""); + if Cnf["Shania::Options::Verbose"]: + print "Sent reminder email to %s." % (changes["maintainer822"]); + else: + # Ensure the files aren't deleted + for keys in (files.keys(), dsc_files.keys(), changes_filename): + for i in keys: + if all_files.has_key(i): + del all_files[i]; + + # Anthing left at this stage is not referenced by a .changes or + # .dsc and should be deleted if old enough. + for file in all_files.keys(): + if os.stat(file)[stat.ST_MTIME] < delete_date: + if Cnf["Shania::Options::No-Action"]: + print "Would delete '%s'." % (os.path.basename(file)); + else: + if Cnf["Shania::Options::Verbose"]: + print "Removing '%s' (to '%s')." % (os.path.basename(file), del_dir); + utils.move(file, del_dir); + else: + if Cnf["Shania::Options::Verbose"]: + print "Skipping, too new, '%s'." % (os.path.basename(file)); + +####################################################################################### + +if __name__ == '__main__': + main() diff --git a/update-bugdoctxt b/update-bugdoctxt new file mode 100755 index 00000000..d3bc086c --- /dev/null +++ b/update-bugdoctxt @@ -0,0 +1,27 @@ +#!/bin/sh -e + +. vars + +export TERM=linux + +destdir=$ftpdir/doc +urlbase=http://www.debian.org/Bugs/ + +cd $destdir + +convert () { + src=$1; dst=$2 + rm -f .new-$dst + echo Generating $dst from http://www.debian.org/Bugs/$src ... + lynx -nolist -dump $urlbase$src | sed -e 's/^ *$//' | perl -00 -ne 'exit if /Back to the Debian Project homepage/; print unless ($.==1 || /^\s*Other pages:$/m)' >.new-$dst + if cmp -s .new-$dst $dst ; then rm -f .new-$dst + else mv -f .new-$dst $dst + fi +} + +convert Reporting.html bug-reporting.txt +convert Access.html bug-log-access.txt +convert server-request.html bug-log-mailserver.txt +convert Developer.html bug-maint-info.txt +convert server-control.html bug-maint-mailcontrol.txt +convert server-refcard.html bug-mailserver-refcard.txt diff --git a/update-mirrorlists b/update-mirrorlists new file mode 100755 index 00000000..864fbb39 --- /dev/null +++ b/update-mirrorlists @@ -0,0 +1,24 @@ +#!/bin/sh +# +# Very Very hackish script... dont laugh. +# Michael Beattie + +. vars + +prog=$scriptdir/mirrorlist/mirror_list.pl +masterlist=$scriptdir/mirrorlist/Mirrors.masterlist + +test ! -f $HOME/.cvspass && \ + echo ":pserver:anonymous@cvs.debian.org:/cvs/webwml A" > $HOME/.cvspass +grep -q "cvs.debian.org:/cvs/webwml" ~/.cvspass || \ + echo ":pserver:anonymous@cvs.debian.org:/cvs/webwml A" >> $HOME/.cvspass + +cd $(dirname $masterlist) +cvs update + +if [ ! -f $ftpdir/README.mirrors.html -o $masterlist -nt $ftpdir/README.mirrors.html ] ; then + rm -f $ftpdir/README.mirrors.html $ftpdir/README.mirrors.txt + $prog -m $masterlist -t html > $ftpdir/README.mirrors.html + $prog -m $masterlist -t text > $ftpdir/README.mirrors.txt + echo Updated archive version of mirrors file +fi diff --git a/utils.py b/utils.py index 6326806f..54ab406b 100644 --- a/utils.py +++ b/utils.py @@ -1,6 +1,6 @@ # Utility functions # Copyright (C) 2000 James Troup -# $Id: utils.py,v 1.7 2000-12-05 04:27:48 troup Exp $ +# $Id: utils.py,v 1.8 2000-12-13 03:18:50 troup Exp $ # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by @@ -188,6 +188,9 @@ def send_mail (message, filename): def poolify (source, component): if component != "": component = component + '/'; + # FIXME: this is nasty + component = string.lower(component); + component = string.replace(component, 'non-us/', 'non-US/'); if source[:3] == "lib": return component + source[:4] + '/' + source + '/' else: diff --git a/vars b/vars new file mode 100644 index 00000000..cdbf61dd --- /dev/null +++ b/vars @@ -0,0 +1,16 @@ +# locations used by many scripts + +ftpdir=/org/ftp.debian.org/ftp +indices=$ftpdir/indices +archs="alpha arm hppa hurd-i386 i386 m68k powerpc sparc mips mipsel sh" + +scriptdir=/org/ftp.debian.org/scripts +masterdir=/org/ftp.debian.org/katie/ +overridedir=$scriptdir/override + +incoming=/org/ftp.debian.org/incoming + +copyoverrides="potato potato.contrib potato.non-free woody woody.contrib woody.non-free" + +PATH=$masterdir:$PATH +umask 022 -- GitLab