[calm - Cygwin server-side packaging maintenance script] branch master, updated. 20181020-6-gf119196
jturney@sourceware.org
jturney@sourceware.org
Fri Mar 15 17:25:00 GMT 2019
https://sourceware.org/git/gitweb.cgi?p=cygwin-apps/calm.git;h=f119196ce2b61a1cedec15dba83d49b022bfe639
commit f119196ce2b61a1cedec15dba83d49b022bfe639
Author: Jon Turney <jon.turney@dronecode.org.uk>
Date: Thu Mar 14 20:16:25 2019 +0000
Fix some over-indentation reported by latest pycodestyle
https://sourceware.org/git/gitweb.cgi?p=cygwin-apps/calm.git;h=9961193ebad96cb0a454e2095aef4ad87250e241
commit 9961193ebad96cb0a454e2095aef4ad87250e241
Author: Jon Turney <jon.turney@dronecode.org.uk>
Date: Thu Mar 14 19:52:05 2019 +0000
Be a bit more verbose to IRC about starting and stopping requested work
https://sourceware.org/git/gitweb.cgi?p=cygwin-apps/calm.git;h=8febef34efba3b6b1312d490e3bb031b1c15b567
commit 8febef34efba3b6b1312d490e3bb031b1c15b567
Author: Jon Turney <jon.turney@dronecode.org.uk>
Date: Thu Oct 25 15:32:46 2018 +0100
Fix some issues reported by lgtm.com analysis
https://sourceware.org/git/gitweb.cgi?p=cygwin-apps/calm.git;h=f4a2d0c6435a90ab9d647da69518bbb43ec09287
commit f4a2d0c6435a90ab9d647da69518bbb43ec09287
Author: Jon Turney <jon.turney@dronecode.org.uk>
Date: Thu Oct 25 15:26:52 2018 +0100
Don't warn about non-maintainer upload attempts on every run
Don't warn about non-maintainer package upload attempts on every run, only
when something is ready to move
Diff:
---
calm/calm.py | 291 +++++++++++++++++++++++++-----------------------
calm/compare-arches | 1 -
calm/hint-migrate.py | 1 -
calm/mkgitoliteconf.py | 4 +-
calm/spelling.py | 8 +-
calm/uploads.py | 41 ++++---
6 files changed, 182 insertions(+), 164 deletions(-)
diff --git a/calm/calm.py b/calm/calm.py
index fa89a0a..b5eeaca 100755
--- a/calm/calm.py
+++ b/calm/calm.py
@@ -90,37 +90,37 @@ class CalmState(object):
#
def process_relarea(args):
- packages = {}
- error = False
+ packages = {}
+ error = False
- # for each arch
- for arch in common_constants.ARCHES:
- logging.debug("reading existing packages for arch %s" % (arch))
+ # for each arch
+ for arch in common_constants.ARCHES:
+ logging.debug("reading existing packages for arch %s" % (arch))
- # build package list
- packages[arch] = package.read_packages(args.rel_area, arch)
+ # build package list
+ packages[arch] = package.read_packages(args.rel_area, arch)
- # validate the package set
- if not package.validate_packages(args, packages[arch]):
- logging.error("existing %s package set has errors" % (arch))
- error = True
+ # validate the package set
+ if not package.validate_packages(args, packages[arch]):
+ logging.error("existing %s package set has errors" % (arch))
+ error = True
- if error:
- return None
+ if error:
+ return None
- # packages can be stale due to changes made directly in the release
- # area, so first check here if there are any stale packages to vault
- if args.stale:
- stale_to_vault = remove_stale_packages(args, packages)
- if stale_to_vault:
- for arch in common_constants.ARCHES + ['noarch', 'src']:
- logging.info("vaulting %d old package(s) for arch %s" % (len(stale_to_vault[arch]), arch))
- uploads.move_to_vault(args, stale_to_vault[arch])
- else:
- logging.error("error while evaluating stale packages")
- return None
+ # packages can be stale due to changes made directly in the release
+ # area, so first check here if there are any stale packages to vault
+ if args.stale:
+ stale_to_vault = remove_stale_packages(args, packages)
+ if stale_to_vault:
+ for arch in common_constants.ARCHES + ['noarch', 'src']:
+ logging.info("vaulting %d old package(s) for arch %s" % (len(stale_to_vault[arch]), arch))
+ uploads.move_to_vault(args, stale_to_vault[arch])
+ else:
+ logging.error("error while evaluating stale packages")
+ return None
- return packages
+ return packages
#
@@ -128,138 +128,138 @@ def process_relarea(args):
#
def process_uploads(args, state):
- # read maintainer list
- mlist = maintainers.Maintainer.read(args)
-
- # make the list of all packages
- all_packages = maintainers.Maintainer.all_packages(mlist)
+ # read maintainer list
+ mlist = maintainers.Maintainer.read(args)
- # for each maintainer
- for name in sorted(mlist.keys()):
- m = mlist[name]
+ # make the list of all packages
+ all_packages = maintainers.Maintainer.all_packages(mlist)
- # also send a mail to each maintainer about their packages
- with mail_logs(args.email, toaddrs=m.email, subject='%s for %s' % (state.subject, name), thresholdLevel=logging.INFO) as maint_email:
+ # for each maintainer
+ for name in sorted(mlist.keys()):
+ m = mlist[name]
- # for each arch and noarch
- scan_result = {}
- skip_maintainer = False
- for arch in common_constants.ARCHES + ['noarch', 'src']:
- logging.debug("reading uploaded arch %s packages from maintainer %s" % (arch, name))
-
- # read uploads
- scan_result[arch] = uploads.scan(m, all_packages, arch, args)
+ # also send a mail to each maintainer about their packages
+ with mail_logs(args.email, toaddrs=m.email, subject='%s for %s' % (state.subject, name), thresholdLevel=logging.INFO) as maint_email:
- # remove triggers
- uploads.remove(args, scan_result[arch].remove_always)
+ # for each arch and noarch
+ scan_result = {}
+ skip_maintainer = False
+ for arch in common_constants.ARCHES + ['noarch', 'src']:
+ logging.debug("reading uploaded arch %s packages from maintainer %s" % (arch, name))
- if scan_result[arch].error:
- logging.error("error while reading uploaded arch %s packages from maintainer %s" % (arch, name))
- skip_maintainer = True
- continue
+ # read uploads
+ scan_result[arch] = uploads.scan(m, all_packages, arch, args)
- # queue for source package validator
- queue.add(args, scan_result[arch].to_relarea, os.path.join(m.homedir()))
+ # remove triggers
+ uploads.remove(args, scan_result[arch].remove_always)
- # if there are no uploaded or removed packages for this
- # maintainer, we don't have anything to do
- if not any([scan_result[a].packages or scan_result[a].to_vault for a in scan_result]):
- logging.debug("nothing to do for maintainer %s" % (name))
+ if scan_result[arch].error:
+ logging.error("error while reading uploaded arch %s packages from maintainer %s" % (arch, name))
skip_maintainer = True
-
- if skip_maintainer:
continue
- # for each arch
- merged_packages = {}
- valid = True
- for arch in common_constants.ARCHES:
- logging.debug("merging %s package set with uploads from maintainer %s" % (arch, name))
-
- # merge package sets
- merged_packages[arch] = package.merge(state.packages[arch], scan_result[arch].packages, scan_result['noarch'].packages, scan_result['src'].packages)
- if not merged_packages[arch]:
- logging.error("error while merging uploaded %s packages for %s" % (arch, name))
- valid = False
- break
-
- # remove files which are to be removed
- for p in scan_result[arch].to_vault:
- for f in scan_result[arch].to_vault[p]:
- package.delete(merged_packages[arch], p, f)
-
- # validate the package set
- logging.debug("validating merged %s package set for maintainer %s" % (arch, name))
- if not package.validate_packages(args, merged_packages[arch]):
- logging.error("error while validating merged %s packages for %s" % (arch, name))
- valid = False
+ # queue for source package validator
+ queue.add(args, scan_result[arch].to_relarea, os.path.join(m.homedir()))
+
+ # if there are no uploaded or removed packages for this
+ # maintainer, we don't have anything to do
+ if not any([scan_result[a].packages or scan_result[a].to_vault for a in scan_result]):
+ logging.debug("nothing to do for maintainer %s" % (name))
+ skip_maintainer = True
+
+ if skip_maintainer:
+ continue
+
+ # for each arch
+ merged_packages = {}
+ valid = True
+ for arch in common_constants.ARCHES:
+ logging.debug("merging %s package set with uploads from maintainer %s" % (arch, name))
+
+ # merge package sets
+ merged_packages[arch] = package.merge(state.packages[arch], scan_result[arch].packages, scan_result['noarch'].packages, scan_result['src'].packages)
+ if not merged_packages[arch]:
+ logging.error("error while merging uploaded %s packages for %s" % (arch, name))
+ valid = False
+ break
+
+ # remove files which are to be removed
+ for p in scan_result[arch].to_vault:
+ for f in scan_result[arch].to_vault[p]:
+ package.delete(merged_packages[arch], p, f)
+
+ # validate the package set
+ logging.debug("validating merged %s package set for maintainer %s" % (arch, name))
+ if not package.validate_packages(args, merged_packages[arch]):
+ logging.error("error while validating merged %s packages for %s" % (arch, name))
+ valid = False
+
+ # if an error occurred ...
+ if not valid:
+ # ... discard move list and merged_packages
+ continue
+
+ # check for packages which are stale as a result of this upload,
+ # which we will want in the same report
+ if args.stale:
+ stale_to_vault = remove_stale_packages(args, merged_packages)
# if an error occurred ...
- if not valid:
+ if not stale_to_vault:
# ... discard move list and merged_packages
+ logging.error("error while evaluating stale packages for %s" % (name))
continue
- # check for packages which are stale as a result of this upload,
- # which we will want in the same report
+ # check for conflicting movelists
+ conflicts = False
+ for arch in common_constants.ARCHES + ['noarch', 'src']:
+ conflicts = conflicts or report_movelist_conflicts(scan_result[arch].to_relarea, scan_result[arch].to_vault, "manually")
if args.stale:
- stale_to_vault = remove_stale_packages(args, merged_packages)
-
- # if an error occurred ...
- if not stale_to_vault:
- # ... discard move list and merged_packages
- logging.error("error while evaluating stale packages for %s" % (name))
- continue
-
- # check for conflicting movelists
- conflicts = False
+ conflicts = conflicts or report_movelist_conflicts(scan_result[arch].to_relarea, stale_to_vault[arch], "automatically")
+
+ # if an error occurred ...
+ if conflicts:
+ # ... discard move list and merged_packages
+ logging.error("error while validating movelists for %s" % (name))
+ continue
+
+ # for each arch and noarch
+ for arch in common_constants.ARCHES + ['noarch', 'src']:
+ logging.debug("moving %s packages for maintainer %s" % (arch, name))
+
+ # process the move lists
+ if scan_result[arch].to_vault:
+ logging.info("vaulting %d package(s) for arch %s, by request" % (len(scan_result[arch].to_vault), arch))
+ uploads.move_to_vault(args, scan_result[arch].to_vault)
+ uploads.remove(args, scan_result[arch].remove_success)
+ if scan_result[arch].to_relarea:
+ logging.info("adding %d package(s) for arch %s" % (len(scan_result[arch].to_relarea), arch))
+ uploads.move_to_relarea(m, args, scan_result[arch].to_relarea)
+
+ # for each arch
+ if args.stale:
for arch in common_constants.ARCHES + ['noarch', 'src']:
- conflicts = conflicts or report_movelist_conflicts(scan_result[arch].to_relarea, scan_result[arch].to_vault, "manually")
- if args.stale:
- conflicts = conflicts or report_movelist_conflicts(scan_result[arch].to_relarea, stale_to_vault[arch], "automatically")
+ if stale_to_vault[arch]:
+ logging.info("vaulting %d old package(s) for arch %s" % (len(stale_to_vault[arch]), arch))
+ uploads.move_to_vault(args, stale_to_vault[arch])
+
+ # for each arch
+ for arch in common_constants.ARCHES:
+ # use merged package list
+ state.packages[arch] = merged_packages[arch]
+
+ # report what we've done
+ added = []
+ for arch in common_constants.ARCHES + ['noarch', 'src']:
+ added.append('%d (%s)' % (len(scan_result[arch].packages), arch))
+ msg = "added %s packages from maintainer %s" % (' + '.join(added), name)
+ logging.debug(msg)
+ irk.irk("calm %s" % msg)
+
+ # record updated reminder times for maintainers
+ maintainers.Maintainer.update_reminder_times(mlist)
- # if an error occurred ...
- if conflicts:
- # ... discard move list and merged_packages
- logging.error("error while validating movelists for %s" % (name))
- continue
-
- # for each arch and noarch
- for arch in common_constants.ARCHES + ['noarch', 'src']:
- logging.debug("moving %s packages for maintainer %s" % (arch, name))
-
- # process the move lists
- if scan_result[arch].to_vault:
- logging.info("vaulting %d package(s) for arch %s, by request" % (len(scan_result[arch].to_vault), arch))
- uploads.move_to_vault(args, scan_result[arch].to_vault)
- uploads.remove(args, scan_result[arch].remove_success)
- if scan_result[arch].to_relarea:
- logging.info("adding %d package(s) for arch %s" % (len(scan_result[arch].to_relarea), arch))
- uploads.move_to_relarea(m, args, scan_result[arch].to_relarea)
-
- # for each arch
- if args.stale:
- for arch in common_constants.ARCHES + ['noarch', 'src']:
- if stale_to_vault[arch]:
- logging.info("vaulting %d old package(s) for arch %s" % (len(stale_to_vault[arch]), arch))
- uploads.move_to_vault(args, stale_to_vault[arch])
-
- # for each arch
- for arch in common_constants.ARCHES:
- # use merged package list
- state.packages[arch] = merged_packages[arch]
-
- # report what we've done
- added = []
- for arch in common_constants.ARCHES + ['noarch', 'src']:
- added.append('%d (%s)' % (len(scan_result[arch].packages), arch))
- msg = "added %s packages from maintainer %s" % (' + '.join(added), name)
- logging.debug(msg)
- irk.irk(msg)
-
- # record updated reminder times for maintainers
- maintainers.Maintainer.update_reminder_times(mlist)
-
- return state.packages
+ return state.packages
#
@@ -428,6 +428,7 @@ def do_output(args, state):
# replace setup.ini
logging.info("moving %s to %s" % (tmpfile.name, inifile))
shutil.move(tmpfile.name, inifile)
+ irk.irk("calm updated setup.ini for arch '%s'" % (arch))
# compress and re-sign
for ext in ['.ini', '.bz2', '.xz']:
@@ -470,20 +471,27 @@ def do_daemon(args, state):
running = True
read_relarea = True
read_uploads = True
+ last_signal = None
# signals! the first, and best, interprocess communications mechanism! :)
def sigusr1(signum, frame):
logging.debug("SIGUSR1")
+ nonlocal last_signal
+ last_signal = signum
nonlocal read_uploads
read_uploads = True
def sigusr2(signum, frame):
logging.debug("SIGUSR2")
+ nonlocal last_signal
+ last_signal = signum
nonlocal read_relarea
read_relarea = True
def sigalrm(signum, frame):
logging.debug("SIGALRM")
+ nonlocal last_signal
+ last_signal = signum
nonlocal read_relarea
read_relarea = True
nonlocal read_uploads
@@ -513,6 +521,8 @@ def do_daemon(args, state):
with mail_logs(args.email, toaddrs=args.email, subject='%s' % (state.subject), thresholdLevel=logging.ERROR) as leads_email:
# re-read relarea on SIGALRM or SIGUSR2
if read_relarea:
+ if last_signal != signal.SIGALRM:
+ irk.irk("calm processing release area")
read_relarea = False
state.packages = process_relarea(args)
@@ -520,6 +530,7 @@ def do_daemon(args, state):
logging.error("not processing uploads or writing setup.ini")
else:
if read_uploads:
+ irk.irk("calm processing uploads")
# read uploads on SIGUSR1
read_uploads = False
state.packages = process_uploads(args, state)
@@ -544,6 +555,8 @@ def do_daemon(args, state):
signal.alarm(int(delay))
# wait until interrupted by a signal
+ if last_signal != signal.SIGALRM:
+ irk.irk("calm processing done")
logging.info("sleeping for %d seconds" % (delay))
signal.pause()
logging.info("woken")
diff --git a/calm/compare-arches b/calm/compare-arches
index f5ffe76..feb31be 100755
--- a/calm/compare-arches
+++ b/calm/compare-arches
@@ -33,7 +33,6 @@ import sys
import common_constants
import maintainers
import package
-import uploads
#
diff --git a/calm/hint-migrate.py b/calm/hint-migrate.py
index 858e68b..d4c166e 100644
--- a/calm/hint-migrate.py
+++ b/calm/hint-migrate.py
@@ -43,7 +43,6 @@ def hint_migrate(args):
basedir = os.path.join(args.rel_area, arch, 'release')
for (dirpath, subdirs, files) in os.walk(basedir):
- relpath = os.path.relpath(dirpath, basedir)
if 'setup.hint' not in files:
continue
diff --git a/calm/mkgitoliteconf.py b/calm/mkgitoliteconf.py
index 42508b1..f5560af 100755
--- a/calm/mkgitoliteconf.py
+++ b/calm/mkgitoliteconf.py
@@ -54,7 +54,7 @@ def do_main(args):
mlist = maintainers.Maintainer.add_packages(mlist, args.pkglist, getattr(args, 'orphanmaint', None))
# make the list of all packages
- all_packages = maintainers.Maintainer.all_packages(mlist)
+ maintainers.Maintainer.all_packages(mlist)
# invert to a per-package list of maintainers
pkgs = defaultdict(list)
@@ -95,6 +95,8 @@ def main():
do_main(args)
+ return 0
+
#
#
diff --git a/calm/spelling.py b/calm/spelling.py
index 793edd1..d3d53f4 100644
--- a/calm/spelling.py
+++ b/calm/spelling.py
@@ -105,10 +105,10 @@ def spellcheck_hints(args, packages):
for w1 in [w, re.sub(r'^lib', '', w)]:
# add the package name unless it exists in the list above, which
# will give a canonical capitalization
- if w.lower() not in wordlist:
- spelldict.add(w.lower())
- spelldict.add(w)
- spelldict.add(w.capitalize())
+ if wl.lower() not in wordlist:
+ spelldict.add(wl.lower())
+ spelldict.add(wl)
+ spelldict.add(wl.capitalize())
# for each package
for p in sorted(packages.keys()):
diff --git a/calm/uploads.py b/calm/uploads.py
index 0861388..1d8c1bf 100644
--- a/calm/uploads.py
+++ b/calm/uploads.py
@@ -107,6 +107,29 @@ def scan(m, all_packages, arch, args):
else:
mtimes.pop()
+ # only process files newer than !ready
+ for f in sorted(files):
+ fn = os.path.join(dirpath, f)
+ file_mtime = os.path.getmtime(fn)
+ if file_mtime > mtime:
+ if mtime == 0:
+ m.reminders_timestamp_checked = True
+
+ logging.debug("ignoring %s as there is no !ready" % fn)
+ ignored += 1
+
+ # don't warn until file is at least REMINDER_GRACE old
+ if (file_mtime < (time.time() - REMINDER_GRACE)):
+ if not args.dryrun:
+ m.reminders_issued = True
+ else:
+ logging.warning("ignoring %s as it is newer than !ready" % fn)
+ files.remove(f)
+
+ # any file remaining?
+ if not files:
+ continue
+
# package doesn't appear in package list at all
if not package.is_in_package_list(relpath, all_packages):
logging.error("package '%s' is not in the package list" % dirpath)
@@ -166,24 +189,6 @@ def scan(m, all_packages, arch, args):
files.remove(f)
continue
- # only process files newer than !ready
- file_mtime = os.path.getmtime(fn)
- if file_mtime > mtime:
- if mtime == 0:
- m.reminders_timestamp_checked = True
-
- logging.debug("ignoring %s as there is no !ready" % fn)
- ignored += 1
-
- # don't warn until file is at least REMINDER_GRACE old
- if (file_mtime < (time.time() - REMINDER_GRACE)):
- if not args.dryrun:
- m.reminders_issued = True
- else:
- logging.warning("ignoring %s as it is newer than !ready" % fn)
- files.remove(f)
- continue
-
# a remove file, which indicates some other file should be removed
if f.startswith('-'):
if ('*' in f) or ('?' in f):
More information about the Cygwin-apps-cvs
mailing list