1#!/usr/bin/env python
2# python: coding=utf-8
3#
4# Licensed to the Apache Software Foundation (ASF) under one
5# or more contributor license agreements.  See the NOTICE file
6# distributed with this work for additional information
7# regarding copyright ownership.  The ASF licenses this file
8# to you under the Apache License, Version 2.0 (the
9# "License"); you may not use this file except in compliance
10# with the License.  You may obtain a copy of the License at
11#
12#   http://www.apache.org/licenses/LICENSE-2.0
13#
14# Unless required by applicable law or agreed to in writing,
15# software distributed under the License is distributed on an
16# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
17# KIND, either express or implied.  See the License for the
18# specific language governing permissions and limitations
19# under the License.
20#
21
22
23# About this script:
24#   This script is intended to simplify creating Subversion releases for
25#   any of the supported release lines of Subversion.
26#   It works well with our Apache infrastructure, and should make rolling,
27#   posting, and announcing releases dirt simple.
28#
29#   This script may be run on a number of platforms, but it is intended to
30#   be run on people.apache.org.  As such, it may have dependencies (such
31#   as Python version) which may not be common, but are guaranteed to be
32#   available on people.apache.org.
33
34# It'd be kind of nice to use the Subversion python bindings in this script,
35# but people.apache.org doesn't currently have them installed
36
37# Stuff we need
38import os
39import re
40import sys
41import glob
42import fnmatch
43import shutil
44try:
45  from urllib.request import urlopen  # Python 3
46except:
47  from urllib2 import urlopen  # Python 2
48import hashlib
49import tarfile
50import logging
51import datetime
52import tempfile
53import operator
54import itertools
55import subprocess
56import argparse       # standard in Python 2.7
57import io
58import yaml
59
60import backport.status
61
62# Find ezt, using Subversion's copy, if there isn't one on the system.
63try:
64    import ezt
65except ImportError:
66    ezt_path = os.path.dirname(os.path.dirname(os.path.abspath(sys.path[0])))
67    ezt_path = os.path.join(ezt_path, 'build', 'generator')
68    sys.path.append(ezt_path)
69
70    import ezt
71    sys.path.remove(ezt_path)
72
73
74def get_dist_metadata_file_path():
75    return os.path.join(os.path.abspath(sys.path[0]), 'release-lines.yaml')
76
77# Read the dist metadata (about release lines)
78with open(get_dist_metadata_file_path(), 'r') as stream:
79    dist_metadata = yaml.safe_load(stream)
80
81# Our required / recommended release tool versions by release branch
82tool_versions = dist_metadata['tool_versions']
83
84# The version that is our current recommended release
85recommended_release = dist_metadata['recommended_release']
86# For clean-dist, a whitelist of artifacts to keep, by version.
87supported_release_lines = frozenset(dist_metadata['supported_release_lines'])
88# Long-Term Support (LTS) versions
89lts_release_lines = frozenset(dist_metadata['lts_release_lines'])
90
91# Some constants
92svn_repos = os.getenv('SVN_RELEASE_SVN_REPOS',
93                      'https://svn.apache.org/repos/asf/subversion')
94dist_repos = os.getenv('SVN_RELEASE_DIST_REPOS',
95                       'https://dist.apache.org/repos/dist')
96dist_dev_url = dist_repos + '/dev/subversion'
97dist_release_url = dist_repos + '/release/subversion'
98dist_archive_url = 'https://archive.apache.org/dist/subversion'
99buildbot_repos = os.getenv('SVN_RELEASE_BUILDBOT_REPOS',
100                           'https://svn.apache.org/repos/infra/infrastructure/buildbot/aegis/buildmaster')
101KEYS = 'https://people.apache.org/keys/group/subversion.asc'
102extns = ['zip', 'tar.gz', 'tar.bz2']
103
104
105#----------------------------------------------------------------------
106# Utility functions
107
108class Version(object):
109    regex = re.compile(r'(\d+).(\d+).(\d+)(?:-(?:(rc|alpha|beta)(\d+)))?')
110
111    def __init__(self, ver_str):
112        # Special case the 'trunk-nightly' version
113        if ver_str == 'trunk-nightly':
114            self.major = None
115            self.minor = None
116            self.patch = None
117            self.pre = 'nightly'
118            self.pre_num = None
119            self.base = 'nightly'
120            self.branch = 'trunk'
121            return
122
123        match = self.regex.search(ver_str)
124
125        if not match:
126            raise RuntimeError("Bad version string '%s'" % ver_str)
127
128        self.major = int(match.group(1))
129        self.minor = int(match.group(2))
130        self.patch = int(match.group(3))
131
132        if match.group(4):
133            self.pre = match.group(4)
134            self.pre_num = int(match.group(5))
135        else:
136            self.pre = None
137            self.pre_num = None
138
139        self.base = '%d.%d.%d' % (self.major, self.minor, self.patch)
140        self.branch = '%d.%d' % (self.major, self.minor)
141
142    def is_prerelease(self):
143        return self.pre != None
144
145    def get_ver_tags(self, revnum):
146        # These get substituted into svn_version.h
147        ver_tag = ''
148        ver_numtag = ''
149        if self.pre == 'alpha':
150            ver_tag = '" (Alpha %d)"' % self.pre_num
151            ver_numtag = '"-alpha%d"' % self.pre_num
152        elif self.pre == 'beta':
153            ver_tag = '" (Beta %d)"' % self.pre_num
154            ver_numtag = '"-beta%d"' % self.pre_num
155        elif self.pre == 'rc':
156            ver_tag = '" (Release Candidate %d)"' % self.pre_num
157            ver_numtag = '"-rc%d"' % self.pre_num
158        elif self.pre == 'nightly':
159            ver_tag = '" (Nightly Build r%d)"' % revnum
160            ver_numtag = '"-nightly-r%d"' % revnum
161        else:
162            ver_tag = '" (r%d)"' % revnum
163            ver_numtag = '""'
164        return (ver_tag, ver_numtag)
165
166    def __serialize(self):
167        return (self.major, self.minor, self.patch, self.pre, self.pre_num)
168
169    def __eq__(self, that):
170        return self.__serialize() == that.__serialize()
171
172    def __ne__(self, that):
173        return self.__serialize() != that.__serialize()
174
175    def __hash__(self):
176        return hash(self.__serialize())
177
178    def __lt__(self, that):
179        if self.major < that.major: return True
180        if self.major > that.major: return False
181
182        if self.minor < that.minor: return True
183        if self.minor > that.minor: return False
184
185        if self.patch < that.patch: return True
186        if self.patch > that.patch: return False
187
188        if not self.pre and not that.pre: return False
189        if not self.pre and that.pre: return False
190        if self.pre and not that.pre: return True
191
192        # We are both pre-releases
193        if self.pre != that.pre:
194            return self.pre < that.pre
195        else:
196            return self.pre_num < that.pre_num
197
198    def __str__(self):
199        "Return an SVN_VER_NUMBER-formatted string, or 'nightly'."
200        if self.pre:
201            if self.pre == 'nightly':
202                return 'nightly'
203            else:
204                extra = '-%s%d' % (self.pre, self.pre_num)
205        else:
206            extra = ''
207
208        return self.base + extra
209
210    def __repr__(self):
211
212        return "Version(%s)" % repr(str(self))
213
214def get_prefix(base_dir):
215    return os.path.join(base_dir, 'prefix')
216
217def get_tempdir(base_dir):
218    return os.path.join(base_dir, 'tempdir')
219
220def get_workdir(base_dir):
221    return os.path.join(get_tempdir(base_dir), 'working')
222
223# The name of this directory is also used to name the tarball and for
224# the root of paths within the tarball, e.g. subversion-1.9.5 or
225# subversion-nightly-r1800000
226def get_exportdir(base_dir, version, revnum):
227    if version.pre != 'nightly':
228        return os.path.join(get_tempdir(base_dir), 'subversion-'+str(version))
229    return os.path.join(get_tempdir(base_dir),
230                        'subversion-%s-r%d' % (version, revnum))
231
232def get_target(args):
233    "Return the location of the artifacts"
234    if args.target:
235        return args.target
236    else:
237        return os.path.join(args.base_dir, 'deploy')
238
239def get_branch_path(args):
240    if not args.branch:
241        try:
242            args.branch = 'branches/%d.%d.x' % (args.version.major, args.version.minor)
243        except AttributeError:
244            raise RuntimeError("Please specify the release version label or --branch-path")
245
246    return args.branch.rstrip('/')  # canonicalize for later comparisons
247
248def get_tmpldir():
249    return os.path.join(os.path.abspath(sys.path[0]), 'templates')
250
251def get_tmplfile(filename):
252    try:
253        return open(os.path.join(get_tmpldir(), filename))
254    except IOError:
255        # Hmm, we had a problem with the local version, let's try the repo
256        return urlopen(svn_repos + '/trunk/tools/dist/templates/' + filename)
257
258def get_nullfile():
259    return open(os.path.devnull, 'w')
260
261def run_command(cmd, verbose=True, hide_stderr=False, dry_run=False):
262    if verbose:
263        print("+ " + ' '.join(cmd))
264    stderr = None
265    if verbose:
266        stdout = None
267    else:
268        stdout = get_nullfile()
269        if hide_stderr:
270            stderr = get_nullfile()
271
272    if not dry_run:
273        subprocess.check_call(cmd, stdout=stdout, stderr=stderr)
274    else:
275        print('  ## dry-run; not executed')
276
277def run_script(verbose, script, hide_stderr=False):
278    for l in script.split('\n'):
279        run_command(l.split(), verbose, hide_stderr)
280
281def download_file(url, target, checksum):
282    """Download the file at URL to the local path TARGET.
283    If CHECKSUM is a string, verify the checksum of the downloaded
284    file and raise RuntimeError if it does not match.  If CHECKSUM
285    is None, do not verify the downloaded file.
286    """
287    assert checksum is None or isinstance(checksum, str)
288
289    response = urlopen(url)
290    target_file = open(target, 'w+b')
291    target_file.write(response.read())
292    target_file.seek(0)
293    m = hashlib.sha256()
294    m.update(target_file.read())
295    target_file.close()
296    checksum2 = m.hexdigest()
297    if checksum is not None and checksum != checksum2:
298        raise RuntimeError("Checksum mismatch for '%s': "\
299                           "downloaded: '%s'; expected: '%s'" % \
300                           (target, checksum, checksum2))
301
302def run_svn(cmd, verbose=True, dry_run=False, username=None):
303    if (username):
304        cmd[:0] = ['--username', username]
305    run_command(['svn'] + cmd, verbose=verbose, dry_run=dry_run)
306
307def run_svnmucc(cmd, verbose=True, dry_run=False, username=None):
308    if (username):
309        cmd[:0] = ['--username', username]
310    run_command(['svnmucc'] + cmd, verbose=verbose, dry_run=dry_run)
311
312#----------------------------------------------------------------------
313def is_lts(version):
314    return version.branch in lts_release_lines
315
316def is_recommended(version):
317    return version.branch == recommended_release
318
319def get_download_anchor(version):
320    if version.is_prerelease():
321        return 'pre-releases'
322    else:
323        if is_recommended(version):
324            return 'recommended-release'
325        else:
326            return 'supported-releases'
327
328#----------------------------------------------------------------------
329# ezt helpers
330
331# In ezt, «[if-any foo]» is true when «data['foo'] == False»,
332# hence, provide this constant for readability.
333ezt_False = ""
334
335# And this constant for symmetry.
336ezt_True = True
337
338# And this for convenience.
339def ezt_bool(boolean_value):
340    return ezt_True if boolean_value else ezt_False
341
342#----------------------------------------------------------------------
343# Cleaning up the environment
344
345def cleanup(args):
346    'Remove generated files and folders.'
347    logging.info('Cleaning')
348
349    shutil.rmtree(get_prefix(args.base_dir), True)
350    shutil.rmtree(get_tempdir(args.base_dir), True)
351    shutil.rmtree(get_target(args), True)
352
353
354#----------------------------------------------------------------------
355# Creating an environment to roll the release
356
357class RollDep(object):
358    'The super class for each of the build dependencies.'
359    def __init__(self, base_dir, use_existing, verbose):
360        self._base_dir = base_dir
361        self._use_existing = use_existing
362        self._verbose = verbose
363
364    def _test_version(self, cmd):
365        proc = subprocess.Popen(cmd, stdout=subprocess.PIPE,
366                                stderr=subprocess.STDOUT,
367                                universal_newlines=True)
368        (stdout, stderr) = proc.communicate()
369        rc = proc.wait()
370        if rc: return ''
371
372        return stdout.split('\n')
373
374    def build(self):
375        if not hasattr(self, '_extra_configure_flags'):
376            self._extra_configure_flags = ''
377        cwd = os.getcwd()
378        tempdir = get_tempdir(self._base_dir)
379        tarball = os.path.join(tempdir, self._filebase + '.tar.gz')
380
381        if os.path.exists(tarball):
382            if not self._use_existing:
383                raise RuntimeError('autoconf tarball "%s" already exists'
384                                                                    % tarball)
385            logging.info('Using existing %s.tar.gz' % self._filebase)
386        else:
387            logging.info('Fetching %s' % self._filebase)
388            download_file(self._url, tarball, self._checksum)
389
390        # Extract tarball
391        tarfile.open(tarball).extractall(tempdir)
392
393        logging.info('Building ' + self.label)
394        os.chdir(os.path.join(tempdir, self._filebase))
395        run_script(self._verbose,
396                   '''./configure --prefix=%s %s
397                      make
398                      make install''' % (get_prefix(self._base_dir),
399                                         self._extra_configure_flags))
400
401        os.chdir(cwd)
402
403
404class AutoconfDep(RollDep):
405    def __init__(self, base_dir, use_existing, verbose, autoconf_ver, checksum):
406        RollDep.__init__(self, base_dir, use_existing, verbose)
407        self.label = 'autoconf'
408        self._filebase = 'autoconf-' + autoconf_ver
409        self._autoconf_ver =  autoconf_ver
410        self._url = 'https://ftp.gnu.org/gnu/autoconf/%s.tar.gz' % self._filebase
411        self._checksum = checksum
412
413    def have_usable(self):
414        output = self._test_version(['autoconf', '-V'])
415        if not output: return False
416
417        version = output[0].split()[-1:][0]
418        return version == self._autoconf_ver
419
420    def use_system(self):
421        if not self._use_existing: return False
422        return self.have_usable()
423
424
425class LibtoolDep(RollDep):
426    def __init__(self, base_dir, use_existing, verbose, libtool_ver, checksum):
427        RollDep.__init__(self, base_dir, use_existing, verbose)
428        self.label = 'libtool'
429        self._filebase = 'libtool-' + libtool_ver
430        self._libtool_ver = libtool_ver
431        self._url = 'https://ftp.gnu.org/gnu/libtool/%s.tar.gz' % self._filebase
432        self._checksum = checksum
433
434    def have_usable(self):
435        output = self._test_version(['libtool', '--version'])
436        if not output: return False
437
438        return self._libtool_ver in output[0]
439
440    def use_system(self):
441        # We unconditionally return False here, to avoid using a borked
442        # system libtool (I'm looking at you, Debian).
443        return False
444
445    def build(self):
446        RollDep.build(self)
447        # autogen.sh looks for glibtoolize before libtoolize
448        bin_dir = os.path.join(get_prefix(self._base_dir), "bin")
449        os.symlink("libtoolize", os.path.join(bin_dir, "glibtoolize"))
450        os.symlink("libtool", os.path.join(bin_dir, "glibtool"))
451
452
453class SwigDep(RollDep):
454    def __init__(self, base_dir, use_existing, verbose, swig_ver, checksum,
455        sf_mirror):
456        RollDep.__init__(self, base_dir, use_existing, verbose)
457        self.label = 'swig'
458        self._filebase = 'swig-' + swig_ver
459        self._swig_ver = swig_ver
460        self._url = 'https://sourceforge.net/projects/swig/files/swig/%(swig)s/%(swig)s.tar.gz/download?use_mirror=%(sf_mirror)s' % \
461            { 'swig' : self._filebase,
462              'sf_mirror' : sf_mirror }
463        self._checksum = checksum
464        self._extra_configure_flags = '--without-pcre'
465
466    def have_usable(self):
467        output = self._test_version(['swig', '-version'])
468        if not output: return False
469
470        version = output[1].split()[-1:][0]
471        return version == self._swig_ver
472
473    def use_system(self):
474        if not self._use_existing: return False
475        return self.have_usable()
476
477
478def build_env(args):
479    'Download prerequisites for a release and prepare the environment.'
480    logging.info('Creating release environment')
481
482    try:
483        os.mkdir(get_prefix(args.base_dir))
484        os.mkdir(get_tempdir(args.base_dir))
485    except OSError:
486        if not args.use_existing:
487            raise
488
489    autoconf = AutoconfDep(args.base_dir, args.use_existing, args.verbose,
490                           tool_versions[args.version.branch]['autoconf'][0],
491                           tool_versions[args.version.branch]['autoconf'][1])
492    libtool = LibtoolDep(args.base_dir, args.use_existing, args.verbose,
493                         tool_versions[args.version.branch]['libtool'][0],
494                         tool_versions[args.version.branch]['libtool'][1])
495    swig = SwigDep(args.base_dir, args.use_existing, args.verbose,
496                   tool_versions[args.version.branch]['swig'][0],
497                   tool_versions[args.version.branch]['swig'][1],
498                   args.sf_mirror)
499
500    # iterate over our rolling deps, and build them if needed
501    for dep in [autoconf, libtool, swig]:
502        if dep.use_system():
503            logging.info('Using system %s' % dep.label)
504        else:
505            dep.build()
506
507
508#----------------------------------------------------------------------
509# Create a new minor release branch
510
511def get_trunk_wc_path(base_dir, path=None):
512    trunk_wc_path = os.path.join(get_tempdir(base_dir), 'svn-trunk')
513    if path is None: return trunk_wc_path
514    return os.path.join(trunk_wc_path, path)
515
516def get_buildbot_wc_path(base_dir, path=None):
517    buildbot_wc_path = os.path.join(get_tempdir(base_dir), 'svn-buildmaster')
518    if path is None: return buildbot_wc_path
519    return os.path.join(buildbot_wc_path, path)
520
521def get_trunk_url(revnum=None):
522    return svn_repos + '/trunk' + '@' + (str(revnum) if revnum else '')
523
524def get_branch_url(ver):
525    return svn_repos + '/branches/' + ver.branch + '.x'
526
527def get_tag_url(ver):
528    return svn_repos + '/tags/' + ver.base
529
530def edit_file(path, pattern, replacement):
531    print("Editing '%s'" % (path,))
532    print("  pattern='%s'" % (pattern,))
533    print("  replace='%s'" % (replacement,))
534    old_text = open(path, 'r').read()
535    new_text = re.sub(pattern, replacement, old_text)
536    assert new_text != old_text
537    open(path, 'w').write(new_text)
538
539def edit_changes_file(path, newtext):
540    """Insert NEWTEXT in the 'CHANGES' file found at PATH,
541       just before the first line that starts with 'Version '.
542    """
543    print("Prepending to '%s'" % (path,))
544    print("  text='%s'" % (newtext,))
545    lines = open(path, 'r').readlines()
546    for i, line in enumerate(lines):
547      if line.startswith('Version '):
548        with open(path, 'w') as newfile:
549          newfile.writelines(lines[:i])
550          newfile.write(newtext)
551          newfile.writelines(lines[i:])
552        break
553
554#----------------------------------------------------------------------
555def make_release_branch(args):
556    ver = args.version
557    run_svn(['copy',
558             get_trunk_url(args.revnum),
559             get_branch_url(ver),
560             '-m', 'Create the ' + ver.branch + '.x release branch.'],
561            dry_run=args.dry_run)
562
563#----------------------------------------------------------------------
564def update_minor_ver_in_trunk(args):
565    """Change the minor version in trunk to the next (future) minor version.
566    """
567    ver = args.version
568    trunk_wc = get_trunk_wc_path(args.base_dir)
569    run_svn(['checkout',
570             get_trunk_url(args.revnum),
571             trunk_wc])
572
573    prev_ver = Version('1.%d.0' % (ver.minor - 1,))
574    next_ver = Version('1.%d.0' % (ver.minor + 1,))
575    relpaths = []
576
577    relpath = 'subversion/include/svn_version.h'
578    relpaths.append(relpath)
579    edit_file(get_trunk_wc_path(args.base_dir, relpath),
580              r'(#define SVN_VER_MINOR *)%s' % (ver.minor,),
581              r'\g<1>%s' % (next_ver.minor,))
582
583    relpath = 'subversion/tests/cmdline/svntest/main.py'
584    relpaths.append(relpath)
585    edit_file(get_trunk_wc_path(args.base_dir, relpath),
586              r'(SVN_VER_MINOR = )%s' % (ver.minor,),
587              r'\g<1>%s' % (next_ver.minor,))
588
589    relpath = 'subversion/bindings/javahl/src/org/apache/subversion/javahl/NativeResources.java'
590    relpaths.append(relpath)
591    try:
592        # since r1817921 (just after branching 1.10)
593        edit_file(get_trunk_wc_path(args.base_dir, relpath),
594                  r'SVN_VER_MINOR = %s;' % (ver.minor,),
595                  r'SVN_VER_MINOR = %s;' % (next_ver.minor,))
596    except:
597        # before r1817921: two separate places
598        edit_file(get_trunk_wc_path(args.base_dir, relpath),
599                  r'version.isAtLeast\(1, %s, 0\)' % (ver.minor,),
600                  r'version.isAtLeast\(1, %s, 0\)' % (next_ver.minor,))
601        edit_file(get_trunk_wc_path(args.base_dir, relpath),
602                  r'1.%s.0, but' % (ver.minor,),
603                  r'1.%s.0, but' % (next_ver.minor,))
604
605    relpath = 'CHANGES'
606    relpaths.append(relpath)
607    # insert at beginning of CHANGES file
608    edit_changes_file(get_trunk_wc_path(args.base_dir, relpath),
609                 'Version ' + next_ver.base + '\n'
610                 + '(?? ??? 20XX, from /branches/' + next_ver.branch + '.x)\n'
611                 + get_tag_url(next_ver) + '\n'
612                 + '\n')
613
614    log_msg = '''\
615Increment the trunk version number to %s, and introduce a new CHANGES
616section, following the creation of the %s.x release branch.
617
618* subversion/include/svn_version.h,
619  subversion/bindings/javahl/src/org/apache/subversion/javahl/NativeResources.java,
620  subversion/tests/cmdline/svntest/main.py
621    (SVN_VER_MINOR): Increment to %s.
622
623* CHANGES: New section for %s.0.
624''' % (next_ver.branch, ver.branch, next_ver.minor, next_ver.branch)
625    commit_paths = [get_trunk_wc_path(args.base_dir, p) for p in relpaths]
626    run_svn(['commit'] + commit_paths + ['-m', log_msg],
627            dry_run=args.dry_run)
628
629#----------------------------------------------------------------------
630def create_status_file_on_branch(args):
631    ver = args.version
632    branch_wc = get_workdir(args.base_dir)
633    branch_url = get_branch_url(ver)
634    run_svn(['checkout', branch_url, branch_wc, '--depth=immediates'])
635
636    status_local_path = os.path.join(branch_wc, 'STATUS')
637    template_filename = 'STATUS.ezt'
638    data = { 'major-minor'          : ver.branch,
639             'major-minor-patch'    : ver.base,
640           }
641
642    template = ezt.Template(compress_whitespace=False)
643    template.parse(get_tmplfile(template_filename).read())
644
645    with open(status_local_path, 'wx') as g:
646        template.generate(g, data)
647    run_svn(['add', status_local_path])
648    run_svn(['commit', status_local_path,
649             '-m', '* branches/' + ver.branch + '.x/STATUS: New file.'],
650            dry_run=args.dry_run)
651
652#----------------------------------------------------------------------
653def update_backport_bot(args):
654    ver = args.version
655    print("""\
656
657*** MANUAL STEP REQUIRED ***
658
659  Ask someone with appropriate access to add the %s.x branch
660  to the backport merge bot.  See
661  http://subversion.apache.org/docs/community-guide/releasing.html#backport-merge-bot
662
663***
664
665""" % (ver.branch,))
666
667#----------------------------------------------------------------------
668def update_buildbot_config(args):
669    """Add the new branch to the list of branches monitored by the buildbot
670       master.
671    """
672    ver = args.version
673    buildbot_wc = get_buildbot_wc_path(args.base_dir)
674    run_svn(['checkout', buildbot_repos, buildbot_wc])
675
676    prev_ver = Version('1.%d.0' % (ver.minor - 1,))
677    next_ver = Version('1.%d.0' % (ver.minor + 1,))
678
679    relpath = 'master1/projects/subversion.conf'
680    edit_file(get_buildbot_wc_path(args.base_dir, relpath),
681              r'(MINOR_LINES=\[.*%s)(\])' % (prev_ver.minor,),
682              r'\1, %s\2' % (ver.minor,))
683
684    log_msg = '''\
685Subversion: start monitoring the %s branch.
686''' % (ver.branch)
687    commit_paths = [get_buildbot_wc_path(args.base_dir, relpath)]
688    run_svn(['commit'] + commit_paths + ['-m', log_msg],
689            dry_run=args.dry_run)
690
691#----------------------------------------------------------------------
692def create_release_branch(args):
693    make_release_branch(args)
694    update_minor_ver_in_trunk(args)
695    create_status_file_on_branch(args)
696    update_backport_bot(args)
697    update_buildbot_config(args)
698
699
700#----------------------------------------------------------------------
701def write_release_notes(args):
702
703    # Create a skeleton release notes file from template
704
705    template_filename = \
706        'release-notes-lts.ezt' if is_lts(args.version) else 'release-notes.ezt'
707
708    prev_ver = Version('%d.%d.0' % (args.version.major, args.version.minor - 1))
709    data = { 'major-minor'          : args.version.branch,
710             'previous-major-minor' : prev_ver.branch,
711           }
712
713    template = ezt.Template(compress_whitespace=False)
714    template.parse(get_tmplfile(template_filename).read())
715
716    if args.edit_html_file:
717        with open(args.edit_html_file, 'w') as g:
718            template.generate(g, data)
719    else:
720        template.generate(sys.stdout, data)
721
722    # Add an "in progress" entry in the release notes index
723    #
724    index_file = os.path.normpath(args.edit_html_file + '/../index.html')
725    marker = '<ul id="release-notes-list">\n'
726    new_item = '<li><a href="%s.html">Subversion %s</a> – <i>in progress</i></li>\n' % (args.version.branch, args.version.branch)
727    edit_file(index_file,
728              re.escape(marker),
729              (marker + new_item).replace('\\', r'\\'))
730
731#----------------------------------------------------------------------
732# Create release artifacts
733
734def compare_changes(repos, branch, revision):
735    mergeinfo_cmd = ['svn', 'mergeinfo', '--show-revs=eligible',
736                     repos + '/trunk/CHANGES',
737                     repos + '/' + branch + '/' + 'CHANGES']
738    stdout = subprocess.check_output(mergeinfo_cmd, universal_newlines=True)
739    if stdout:
740      # Treat this as a warning since we are now putting entries for future
741      # minor releases in CHANGES on trunk.
742      logging.warning('CHANGES has unmerged revisions: %s' %
743                      stdout.replace("\n", " "))
744
745
746_current_year = str(datetime.datetime.now().year)
747_copyright_re = re.compile(r'Copyright (?:\(C\) )?(?P<year>[0-9]+)'
748                           r' The Apache Software Foundation',
749                           re.MULTILINE)
750
751def check_copyright_year(repos, branch, revision):
752    def check_file(branch_relpath):
753        file_url = (repos + '/' + branch + '/'
754                    + branch_relpath + '@' + str(revision))
755        cat_cmd = ['svn', 'cat', file_url]
756        stdout = subprocess.check_output(cat_cmd, universal_newlines=True)
757        m = _copyright_re.search(stdout)
758        if m:
759            year = m.group('year')
760        else:
761            year = None
762        if year != _current_year:
763            logging.warning('Copyright year in ' + branch_relpath
764                            + ' is not the current year')
765    check_file('NOTICE')
766    check_file('subversion/libsvn_subr/version.c')
767
768def replace_lines(path, actions):
769    with open(path, 'r') as old_content:
770        lines = old_content.readlines()
771    with open(path, 'w') as new_content:
772        for line in lines:
773            for start, pattern, repl in actions:
774                if line.startswith(start):
775                    line = re.sub(pattern, repl, line)
776            new_content.write(line)
777
778def roll_tarballs(args):
779    'Create the release artifacts.'
780
781    branch = get_branch_path(args)
782
783    logging.info('Rolling release %s from branch %s@%d' % (args.version,
784                                                           branch, args.revnum))
785
786    check_copyright_year(svn_repos, branch, args.revnum)
787
788    # Ensure we've got the appropriate rolling dependencies available
789    autoconf = AutoconfDep(args.base_dir, False, args.verbose,
790                         tool_versions[args.version.branch]['autoconf'][0],
791                         tool_versions[args.version.branch]['autoconf'][1])
792    libtool = LibtoolDep(args.base_dir, False, args.verbose,
793                         tool_versions[args.version.branch]['libtool'][0],
794                         tool_versions[args.version.branch]['libtool'][1])
795    swig = SwigDep(args.base_dir, False, args.verbose,
796                   tool_versions[args.version.branch]['swig'][0],
797                   tool_versions[args.version.branch]['swig'][1], None)
798
799    for dep in [autoconf, libtool, swig]:
800        if not dep.have_usable():
801           raise RuntimeError('Cannot find usable %s' % dep.label)
802
803    if branch != 'trunk':
804        # Make sure CHANGES is sync'd.
805        compare_changes(svn_repos, branch, args.revnum)
806
807    # Ensure the output directory doesn't already exist
808    if os.path.exists(get_target(args)):
809        raise RuntimeError('output directory \'%s\' already exists'
810                                            % get_target(args))
811
812    os.mkdir(get_target(args))
813
814    logging.info('Preparing working copy source')
815    shutil.rmtree(get_workdir(args.base_dir), True)
816    run_svn(['checkout',
817             svn_repos + '/' + branch + '@' + str(args.revnum),
818             get_workdir(args.base_dir)],
819            verbose=args.verbose)
820
821    # Exclude stuff we don't want in the tarball, it will not be present
822    # in the exported tree.
823    exclude = ['contrib', 'notes']
824    if branch != 'trunk':
825        exclude += ['STATUS']
826        if args.version.minor < 7:
827            exclude += ['packages', 'www']
828    cwd = os.getcwd()
829    os.chdir(get_workdir(args.base_dir))
830    run_svn(['update', '--set-depth=exclude'] + exclude,
831            verbose=args.verbose)
832    os.chdir(cwd)
833
834    if args.patches:
835        # Assume patches are independent and can be applied in any
836        # order, no need to sort.
837        majmin = '%d.%d' % (args.version.major, args.version.minor)
838        for name in os.listdir(args.patches):
839            if name.find(majmin) != -1 and name.endswith('patch'):
840                logging.info('Applying patch %s' % name)
841                run_svn(['patch',
842                         os.path.join(args.patches, name),
843                         get_workdir(args.base_dir)],
844                        verbose=args.verbose)
845
846    # Massage the new version number into svn_version.h.
847    ver_tag, ver_numtag = args.version.get_ver_tags(args.revnum)
848    replacements = [('#define SVN_VER_TAG',
849                     '".*"', ver_tag),
850                    ('#define SVN_VER_NUMTAG',
851                     '".*"', ver_numtag),
852                    ('#define SVN_VER_REVISION',
853                     '[0-9][0-9]*', str(args.revnum))]
854    if args.version.pre != 'nightly':
855        # SVN_VER_PATCH might change for security releases, e.g., when
856        # releasing 1.9.7 from the magic revision of 1.9.6.
857        #
858        # ### Would SVN_VER_MAJOR / SVN_VER_MINOR ever change?
859        # ### Note that SVN_VER_MINOR is duplicated in some places, see
860        # ### <https://subversion.apache.org/docs/community-guide/releasing.html#release-branches>
861        replacements += [('#define SVN_VER_MAJOR',
862                          '[0-9][0-9]*', str(args.version.major)),
863                         ('#define SVN_VER_MINOR',
864                          '[0-9][0-9]*', str(args.version.minor)),
865                         ('#define SVN_VER_PATCH',
866                          '[0-9][0-9]*', str(args.version.patch))]
867    replace_lines(os.path.join(get_workdir(args.base_dir),
868                               'subversion', 'include', 'svn_version.h'),
869                  replacements)
870
871    # Basename for export and tarballs, e.g. subversion-1.9.5 or
872    # subversion-nightly-r1800000
873    exportdir = get_exportdir(args.base_dir, args.version, args.revnum)
874    basename = os.path.basename(exportdir)
875
876    def export(windows):
877        shutil.rmtree(exportdir, True)
878        if windows:
879            eol_style = "--native-eol=CRLF"
880        else:
881            eol_style = "--native-eol=LF"
882        run_svn(['export',
883                 eol_style, get_workdir(args.base_dir), exportdir],
884                verbose=args.verbose)
885
886    def transform_sql():
887        for root, dirs, files in os.walk(exportdir):
888            for fname in files:
889                if fname.endswith('.sql'):
890                    run_script(args.verbose,
891                               'python build/transform_sql.py %s/%s %s/%s'
892                               % (root, fname, root, fname[:-4] + '.h'))
893
894    def clean_autom4te():
895        for root, dirs, files in os.walk(get_workdir(args.base_dir)):
896            for dname in dirs:
897                if dname.startswith('autom4te') and dname.endswith('.cache'):
898                    shutil.rmtree(os.path.join(root, dname))
899
900    logging.info('Building Windows tarballs')
901    export(windows=True)
902    os.chdir(exportdir)
903    transform_sql()
904    # Can't use the po-update.sh in the Windows export since it has CRLF
905    # line endings and won't run, so use the one in the working copy.
906    run_script(args.verbose,
907               '%s/tools/po/po-update.sh pot' % get_workdir(args.base_dir))
908    os.chdir(cwd)
909    clean_autom4te() # dist.sh does it but pointless on Windows?
910    os.chdir(get_tempdir(args.base_dir))
911    run_script(args.verbose,
912               'zip -q -r %s %s' % (basename + '.zip', basename))
913    os.chdir(cwd)
914
915    logging.info('Building Unix tarballs')
916    export(windows=False)
917    os.chdir(exportdir)
918    transform_sql()
919    run_script(args.verbose,
920               '''tools/po/po-update.sh pot
921                  ./autogen.sh --release''',
922               hide_stderr=True) # SWIG is noisy
923    os.chdir(cwd)
924    clean_autom4te() # dist.sh does it but probably pointless
925
926    # Do not use tar, it's probably GNU tar which produces tar files
927    # that are not compliant with POSIX.1 when including filenames
928    # longer than 100 chars.  Platforms without a tar that understands
929    # the GNU tar extension will not be able to extract the resulting
930    # tar file.  Use pax to produce POSIX.1 tar files.
931    #
932    # Use the gzip -n flag - this prevents it from storing the
933    # original name of the .tar file, and far more importantly, the
934    # mtime of the .tar file, in the produced .tar.gz file. This is
935    # important, because it makes the gzip encoding reproducable by
936    # anyone else who has an similar version of gzip, and also uses
937    # "gzip -9n". This means that committers who want to GPG-sign both
938    # the .tar.gz and the .tar.bz2 can download the .tar.bz2 (which is
939    # smaller), and locally generate an exact duplicate of the
940    # official .tar.gz file. This metadata is data on the temporary
941    # uncompressed tarball itself, not any of its contents, so there
942    # will be no effect on end-users.
943    os.chdir(get_tempdir(args.base_dir))
944    run_script(args.verbose,
945               '''pax -x ustar -w -f %s %s
946                  bzip2 -9fk %s
947                  gzip -9nf %s'''
948               % (basename + '.tar', basename,
949                  basename + '.tar',
950                  basename + '.tar'))
951    os.chdir(cwd)
952
953    # Move the results to the deploy directory
954    logging.info('Moving artifacts and calculating checksums')
955    for e in extns:
956        filename = basename + '.' + e
957        filepath = os.path.join(get_tempdir(args.base_dir), filename)
958        shutil.move(filepath, get_target(args))
959        filepath = os.path.join(get_target(args), filename)
960        if args.version < Version("1.11.0-alpha1"):
961            # 1.10 and earlier generate *.sha1 files for compatibility reasons.
962            # They are deprecated, however, so we don't publicly link them in
963            # the announcements any more.
964            m = hashlib.sha1()
965            m.update(open(filepath, 'rb').read())
966            open(filepath + '.sha1', 'w').write(m.hexdigest())
967        m = hashlib.sha512()
968        m.update(open(filepath, 'rb').read())
969        open(filepath + '.sha512', 'w').write(m.hexdigest())
970
971    # Nightlies do not get tagged so do not need the header
972    if args.version.pre != 'nightly':
973        shutil.copy(os.path.join(get_workdir(args.base_dir),
974                                 'subversion', 'include', 'svn_version.h'),
975                    os.path.join(get_target(args),
976                                 'svn_version.h.dist-%s'
977                                   % (str(args.version),)))
978
979        # Download and "tag" the KEYS file (in case a signing key is removed
980        # from a committer's LDAP profile down the road)
981        basename = 'subversion-%s.KEYS' % (str(args.version),)
982        filepath = os.path.join(get_tempdir(args.base_dir), basename)
983        download_file(KEYS, filepath, None)
984        shutil.move(filepath, get_target(args))
985
986    # And we're done!
987
988#----------------------------------------------------------------------
989# Sign the candidate release artifacts
990
991def sign_candidates(args):
992    'Sign candidate artifacts in the dist development directory.'
993
994    def sign_file(filename):
995        asc_file = open(filename + '.asc', 'a')
996        logging.info("Signing %s" % filename)
997        if args.userid:
998            proc = subprocess.check_call(['gpg', '-ba', '-u', args.userid,
999                                         '-o', '-', filename], stdout=asc_file)
1000        else:
1001            proc = subprocess.check_call(['gpg', '-ba', '-o', '-', filename],
1002                                         stdout=asc_file)
1003        asc_file.close()
1004
1005    target = get_target(args)
1006
1007    for e in extns:
1008        filename = os.path.join(target, 'subversion-%s.%s' % (args.version, e))
1009        sign_file(filename)
1010        if args.version.major == 1 and args.version.minor <= 6:
1011            filename = os.path.join(target,
1012                                   'subversion-deps-%s.%s' % (args.version, e))
1013            sign_file(filename)
1014
1015
1016#----------------------------------------------------------------------
1017# Post the candidate release artifacts
1018
1019def post_candidates(args):
1020    'Post candidate artifacts to the dist development directory.'
1021
1022    target = get_target(args)
1023
1024    logging.info('Importing tarballs to %s' % dist_dev_url)
1025    ver = str(args.version)
1026    svn_cmd = ['import', '-m',
1027               'Add Subversion %s candidate release artifacts' % ver,
1028               '--auto-props', '--config-option',
1029               'config:auto-props:*.asc=svn:eol-style=native;svn:mime-type=text/plain',
1030               target, dist_dev_url]
1031    run_svn(svn_cmd, verbose=args.verbose, username=args.username)
1032
1033#----------------------------------------------------------------------
1034# Create tag
1035# Bump versions on branch
1036
1037def create_tag_only(args):
1038    'Create tag in the repository'
1039
1040    target = get_target(args)
1041
1042    logging.info('Creating tag for %s' % str(args.version))
1043
1044    branch_url = svn_repos + '/' + get_branch_path(args)
1045
1046    tag = svn_repos + '/tags/' + str(args.version)
1047
1048    svnmucc_cmd = ['-m', 'Tagging release ' + str(args.version)]
1049    svnmucc_cmd += ['cp', str(args.revnum), branch_url, tag]
1050    svnmucc_cmd += ['put', os.path.join(target, 'svn_version.h.dist' + '-' +
1051                                        str(args.version)),
1052                    tag + '/subversion/include/svn_version.h']
1053
1054    # don't redirect stdout/stderr since svnmucc might ask for a password
1055    try:
1056        run_svnmucc(svnmucc_cmd, verbose=args.verbose, username=args.username)
1057    except subprocess.CalledProcessError:
1058        if args.version.is_prerelease():
1059            logging.error("Do you need to pass --branch=trunk?")
1060        raise
1061
1062def bump_versions_on_branch(args):
1063    'Bump version numbers on branch'
1064
1065    logging.info('Bumping version numbers on the branch')
1066
1067    branch_url = svn_repos + '/' + get_branch_path(args)
1068
1069    def replace_in_place(fd, startofline, flat, spare):
1070        """In file object FD, replace FLAT with SPARE in the first line
1071        starting with regex STARTOFLINE."""
1072
1073        pattern = r'^(%s)%s' % (startofline, re.escape(flat))
1074        repl =    r'\g<1>%s' % (spare,)
1075        fd.seek(0, os.SEEK_SET)
1076        lines = fd.readlines()
1077        for i, line in enumerate(lines):
1078            replacement = re.sub(pattern, repl, line)
1079            if replacement != line:
1080                lines[i] = replacement
1081                break
1082        else:
1083            raise RuntimeError("Could not replace r'%s' with r'%s' in '%s'"
1084                               % (pattern, repl, fd.url))
1085
1086        fd.seek(0, os.SEEK_SET)
1087        fd.writelines(lines)
1088        fd.truncate() # for current callers, new value is never shorter.
1089
1090    new_version = Version('%d.%d.%d' %
1091                          (args.version.major, args.version.minor,
1092                           args.version.patch + 1))
1093
1094    HEAD = subprocess.check_output(['svn', 'info', '--show-item=revision',
1095                                    '--', branch_url],
1096                                   universal_newlines=True).strip()
1097    HEAD = int(HEAD)
1098    def file_object_for(relpath):
1099        fd = tempfile.NamedTemporaryFile()
1100        url = branch_url + '/' + relpath
1101        fd.url = url
1102        subprocess.check_call(['svn', 'cat', '%s@%d' % (url, HEAD)],
1103                              stdout=fd)
1104        return fd
1105
1106    svn_version_h = file_object_for('subversion/include/svn_version.h')
1107    replace_in_place(svn_version_h, '#define SVN_VER_PATCH  *',
1108                     str(args.version.patch), str(new_version.patch))
1109
1110    STATUS = file_object_for('STATUS')
1111    replace_in_place(STATUS, 'Status of ',
1112                     str(args.version), str(new_version))
1113
1114    svn_version_h.seek(0, os.SEEK_SET)
1115    STATUS.seek(0, os.SEEK_SET)
1116    run_svnmucc(['-r', str(HEAD),
1117                 '-m', 'Post-release housekeeping: '
1118                       'bump the %s branch to %s.'
1119                 % (branch_url.split('/')[-1], str(new_version)),
1120                 'put', svn_version_h.name, svn_version_h.url,
1121                 'put', STATUS.name, STATUS.url,
1122                ],
1123                verbose=args.verbose, username=args.username)
1124    del svn_version_h
1125    del STATUS
1126
1127def create_tag_and_bump_versions(args):
1128    '''Create tag in the repository and, if not a prerelease version,
1129       bump version numbers on the branch'''
1130
1131    create_tag_only(args)
1132
1133    if not args.version.is_prerelease():
1134        bump_versions_on_branch(args)
1135
1136#----------------------------------------------------------------------
1137# Clean dist
1138
1139def clean_dist(args):
1140    '''Clean the distribution directory of release artifacts of
1141    no-longer-supported minor lines.'''
1142
1143    stdout = subprocess.check_output(['svn', 'list', dist_release_url],
1144                                     universal_newlines=True)
1145
1146    def minor(version):
1147        """Return the minor release line of the parameter, which must be
1148        a Version object."""
1149        return (version.major, version.minor)
1150
1151    filenames = stdout.split('\n')
1152    filenames = filter(lambda x: x.startswith('subversion-'), filenames)
1153    versions = set(map(Version, filenames))
1154    to_keep = set()
1155    # TODO: When we release 1.A.0 GA we'll have to manually remove 1.(A-2).* artifacts.
1156    for line_to_keep in [minor(Version(x + ".0")) for x in supported_release_lines]:
1157        candidates = list(
1158            x for x in versions
1159            if minor(x) == line_to_keep
1160        )
1161        if candidates:
1162            to_keep.add(max(candidates))
1163    for i in sorted(to_keep):
1164        logging.info("Saving release '%s'", i)
1165
1166    svnmucc_cmd = ['-m', 'Remove old Subversion releases.\n' +
1167                   'They are still available at ' + dist_archive_url]
1168    for filename in filenames:
1169        if Version(filename) not in to_keep:
1170            logging.info("Removing %r", filename)
1171            svnmucc_cmd += ['rm', dist_release_url + '/' + filename]
1172
1173    # don't redirect stdout/stderr since svnmucc might ask for a password
1174    if 'rm' in svnmucc_cmd:
1175        run_svnmucc(svnmucc_cmd, verbose=args.verbose, username=args.username)
1176    else:
1177        logging.info("Nothing to remove")
1178
1179#----------------------------------------------------------------------
1180# Move to dist
1181
1182def move_to_dist(args):
1183    'Move candidate artifacts to the distribution directory.'
1184
1185    stdout = subprocess.check_output(['svn', 'list', dist_dev_url],
1186                                     universal_newlines=True)
1187
1188    filenames = []
1189    for entry in stdout.split('\n'):
1190      if fnmatch.fnmatch(entry, 'subversion-%s.*' % str(args.version)):
1191        filenames.append(entry)
1192    svnmucc_cmd = ['-m',
1193                   'Publish Subversion-%s.' % str(args.version)]
1194    svnmucc_cmd += ['rm', dist_dev_url + '/' + 'svn_version.h.dist'
1195                          + '-' + str(args.version)]
1196    for filename in filenames:
1197        svnmucc_cmd += ['mv', dist_dev_url + '/' + filename,
1198                        dist_release_url + '/' + filename]
1199
1200    # don't redirect stdout/stderr since svnmucc might ask for a password
1201    logging.info('Moving release artifacts to %s' % dist_release_url)
1202    run_svnmucc(svnmucc_cmd, verbose=args.verbose, username=args.username)
1203
1204#----------------------------------------------------------------------
1205# Write announcements
1206
1207def write_news(args):
1208    'Write text for the Subversion website.'
1209    if args.news_release_date:
1210        release_date = datetime.datetime.strptime(args.news_release_date, '%Y-%m-%d')
1211    else:
1212        release_date = datetime.date.today()
1213    data = { 'date' : release_date.strftime('%Y%m%d'),
1214             'date_pres' : release_date.strftime('%Y-%m-%d'),
1215             'major-minor' : args.version.branch,
1216             'version' : str(args.version),
1217             'version_base' : args.version.base,
1218             'anchor': get_download_anchor(args.version),
1219             'is_recommended': ezt_bool(is_recommended(args.version)),
1220             'announcement_url': args.announcement_url,
1221           }
1222
1223    if args.version.is_prerelease():
1224        template_filename = 'rc-news.ezt'
1225    else:
1226        template_filename = 'stable-news.ezt'
1227
1228    template = ezt.Template()
1229    template.parse(get_tmplfile(template_filename).read())
1230
1231    # Insert the output into an existing file if requested, else print it
1232    if args.edit_html_file:
1233        tmp_name = args.edit_html_file + '.tmp'
1234        with open(args.edit_html_file, 'r') as f, open(tmp_name, 'w') as g:
1235            inserted = False
1236            for line in f:
1237                if not inserted and line.startswith('<div class="h3" id="news-'):
1238                    template.generate(g, data)
1239                    g.write('\n')
1240                    inserted = True
1241                g.write(line)
1242        os.remove(args.edit_html_file)
1243        os.rename(tmp_name, args.edit_html_file)
1244    else:
1245        template.generate(sys.stdout, data)
1246
1247
1248def get_fileinfo(args):
1249    'Return a list of file info (filenames) for the release tarballs'
1250
1251    target = get_target(args)
1252
1253    files = glob.glob(os.path.join(target, 'subversion*-%s.*.asc' % args.version))
1254    files.sort()
1255
1256    class info(object):
1257        pass
1258
1259    fileinfo = []
1260    for f in files:
1261        i = info()
1262        # strip ".asc"
1263        i.filename = os.path.basename(f)[:-4]
1264        fileinfo.append(i)
1265
1266    return fileinfo
1267
1268
1269def write_announcement(args):
1270    'Write the release announcement.'
1271    siginfo = get_siginfo(args, True)
1272    if not siginfo:
1273      raise RuntimeError("No signatures found for %s at %s" % (args.version, args.target))
1274
1275    data = { 'version'              : str(args.version),
1276             'siginfo'              : "\n".join(siginfo) + "\n",
1277             'major-minor'          : args.version.branch,
1278             'major-minor-patch'    : args.version.base,
1279             'anchor'               : get_download_anchor(args.version),
1280           }
1281
1282    if args.version.is_prerelease():
1283        template_filename = 'rc-release-ann.ezt'
1284    else:
1285        data['dot-zero'] = ezt_bool(args.version.patch == 0)
1286        # TODO: instead of requiring the RM to remember to pass --security,
1287        #   read the private repository where CVE announcements are staged,
1288        #   parse the json file that identifies which versions are affected,
1289        #   and accordingly automagically set data['security'].
1290        data['security'] = ezt_bool(args.security)
1291        template_filename = 'stable-release-ann.ezt'
1292
1293        # The template text assumes these two are mutually exclusive.
1294        # If you ever find a reason to make a x.y.0 release with a security
1295        # bug, just comment this out and update the template before sending.
1296        assert not (data['dot-zero'] and data['security'])
1297
1298    template = ezt.Template(compress_whitespace = False)
1299    template.parse(get_tmplfile(template_filename).read())
1300    template.generate(sys.stdout, data)
1301
1302
1303def write_downloads(args):
1304    'Output the download section of the website.'
1305    fileinfo = get_fileinfo(args)
1306
1307    data = { 'version'              : str(args.version),
1308             'fileinfo'             : fileinfo,
1309           }
1310
1311    template = ezt.Template(compress_whitespace = False)
1312    template.parse(get_tmplfile('download.ezt').read())
1313    template.generate(sys.stdout, data)
1314
1315
1316#----------------------------------------------------------------------
1317# Validate the signatures for a release
1318
1319key_start = '-----BEGIN PGP SIGNATURE-----'
1320key_end = '-----END PGP SIGNATURE-----'
1321
1322PUBLIC_KEY_ALGORITHMS = {
1323    # These values are taken from the RFC's registry at:
1324    # https://www.iana.org/assignments/pgp-parameters/pgp-parameters.xhtml#pgp-parameters-12
1325    #
1326    # The values are callables that produce gpg1-like key length and type
1327    # indications, e.g., "4096R" for a 4096-bit RSA key.
1328    1: (lambda keylen: str(keylen) + 'R'), # RSA
1329}
1330
1331def _make_human_readable_fingerprint(fingerprint):
1332    return re.compile(r'(....)' * 10).sub(r'\1 \2 \3 \4 \5  \6 \7 \8 \9 \10',
1333                                          fingerprint)
1334
1335def get_siginfo(args, quiet=False):
1336    'Returns a list of signatures for the release.'
1337
1338    try:
1339        import gnupg
1340    except ImportError:
1341        import security._gnupg as gnupg
1342    gpg = gnupg.GPG()
1343
1344    good_sigs = {}
1345    fingerprints = {}
1346    output = []
1347
1348    for fileinfo in get_fileinfo(args):
1349        filename = os.path.join(get_target(args), fileinfo.filename + '.asc')
1350        text = open(filename).read()
1351        keys = text.split(key_start)
1352
1353        # Check the keys file syntax. We've been bitten in the past
1354        # with syntax errors in the key delimiters that GPG didn't
1355        # catch for us, but the ASF key checker tool did.
1356        if keys[0]:
1357            sys.stderr.write("SYNTAX ERROR: %s does not start with '%s'\n"
1358                             % (filename, key_start))
1359            sys.exit(1)
1360        keys = keys[1:]
1361
1362        if not quiet:
1363            logging.info("Checking %d sig(s) in %s" % (len(keys), filename))
1364
1365        n = 0
1366        for key in keys:
1367            n += 1
1368            if not key.rstrip().endswith(key_end):
1369                sys.stderr.write("SYNTAX ERROR: Key %d in %s"
1370                                 " does not end with '%s'\n"
1371                                 % (n, filename, key_end))
1372                sys.exit(1)
1373
1374            fd, fn = tempfile.mkstemp(text=True)
1375            with os.fdopen(fd, 'w') as key_file:
1376              key_file.write(key_start + key)
1377            verified = gpg.verify_file(open(fn, 'rb'), filename[:-4])
1378            os.unlink(fn)
1379
1380            if verified.valid:
1381                good_sigs[verified.fingerprint] = True
1382            else:
1383                sys.stderr.write("BAD SIGNATURE: Key %d in %s\n"
1384                                 % (n, filename))
1385                if verified.key_id:
1386                    sys.stderr.write("  key id: %s\n" % verified.key_id)
1387                sys.exit(1)
1388
1389    for id in good_sigs.keys():
1390        # Most potential signers have public short keyid (32-bit) collisions in
1391        # the https://evil32.com/ set, which has been uploaded to the
1392        # keyservers, so generate the long keyid (see use of LONG_KEY_ID below).
1393        #
1394        # TODO: in the future it'd be nice to use the 'gnupg' module here.
1395        gpg_output = subprocess.check_output(
1396            ['gpg', '--fixed-list-mode', '--with-colons', '--fingerprint', id],
1397            stderr=subprocess.STDOUT,
1398            universal_newlines=True,
1399        )
1400        gpg_output = gpg_output.splitlines()
1401
1402        # This code was added in r934990, but there was no comment (nor log
1403        # message text) explaining its purpose.  I've commented it out since
1404        # ignoring arbitrary warnings in a verification codepath is Bad.  If
1405        # you run into warnings on your machine, feel free to uncomment it,
1406        # but when you do so please make it match specific warnings only.
1407        #
1408        #gpg_output = "\n".join([ l for l in gpg_output.splitlines()
1409        #                                             if l[0:7] != 'Warning' ])
1410
1411        # Parse gpg's output.  This happens to work for both gpg1 and gpg2,
1412        # even though their outputs are slightly different.
1413        #
1414        # See http://git.gnupg.org/cgi-bin/gitweb.cgi?p=gnupg.git;a=blob_plain;f=doc/DETAILS
1415        for line in gpg_output:
1416            parts = line.split(':')
1417            if parts[0] == 'pub':
1418                keylen = int(parts[2])
1419                keytype = int(parts[3])
1420                formatter = PUBLIC_KEY_ALGORITHMS[keytype]
1421                long_key_id = parts[4]
1422                length_and_type = formatter(keylen) + '/' + long_key_id
1423                del keylen, keytype, formatter, long_key_id
1424                break
1425        else:
1426            raise RuntimeError("Failed to determine LONG_KEY_ID")
1427        for line in gpg_output:
1428            parts = line.split(':')
1429            if parts[0] == 'fpr':
1430                fingerprint = parts[9]
1431                break
1432        else:
1433            raise RuntimeError("Failed to determine FINGERPRINT")
1434        for line in gpg_output:
1435            parts = line.split(':')
1436            if parts[0] == 'uid':
1437                name = parts[9].split(' <')[0]
1438                break
1439        else:
1440            raise RuntimeError("Failed to determine NAME")
1441
1442        format_expandos = dict(
1443            name=name,
1444            length_and_type=length_and_type,
1445            fingerprint=_make_human_readable_fingerprint(fingerprint),
1446        )
1447        del name, length_and_type, fingerprint
1448        line = "   {name} [{length_and_type}] with fingerprint:"
1449        output.append( line.format(**format_expandos) )
1450        line = "    {fingerprint}"
1451        output.append( line.format(**format_expandos) )
1452
1453    return output
1454
1455def check_sigs(args):
1456    'Check the signatures for the release.'
1457
1458    output = get_siginfo(args)
1459    for line in output:
1460        print(line)
1461
1462def get_keys(args):
1463    'Import the LDAP-based KEYS file to gpg'
1464    # We use a tempfile because urlopen() objects don't have a .fileno()
1465    with tempfile.SpooledTemporaryFile() as fd:
1466        fd.write(urlopen(KEYS).read())
1467        fd.flush()
1468        fd.seek(0)
1469        subprocess.check_call(['gpg', '--import'], stdin=fd)
1470
1471def add_to_changes_dict(changes_dict, audience, section, change, revision):
1472    # Normalize arguments
1473    if audience:
1474        audience = audience.upper()
1475    if section:
1476        section = section.lower()
1477    change = change.strip()
1478
1479    if not audience in changes_dict:
1480        changes_dict[audience] = dict()
1481    if not section in changes_dict[audience]:
1482        changes_dict[audience][section] = dict()
1483
1484    changes = changes_dict[audience][section]
1485    if change in changes:
1486        changes[change].add(revision)
1487    else:
1488        changes[change] = set([revision])
1489
1490def print_section(changes_dict, audience, section, title, mandatory=False):
1491    if audience in changes_dict:
1492        audience_changes = changes_dict[audience]
1493        if mandatory or (section in audience_changes):
1494            if title:
1495                print('  - %s:' % title)
1496        if section in audience_changes:
1497            print_changes(audience_changes[section])
1498        elif mandatory:
1499            print('    (none)')
1500
1501def print_changes(changes):
1502    # Print in alphabetical order, so entries with the same prefix are together
1503    for change in sorted(changes):
1504        revs = changes[change]
1505        rev_string = 'r' + str(min(revs)) + (' et al' if len(revs) > 1 else '')
1506        print('    * %s (%s)' % (change, rev_string))
1507
1508def write_changelog(args):
1509    'Write changelog, parsed from commit messages'
1510    # Changelog lines are lines with the following format:
1511    #   '['[audience[:section]]']' <message>
1512    # or:
1513    #   <message> '['[audience[:section]]']'
1514    # where audience = U (User-visible) or D (Developer-visible)
1515    #       section = general|major|minor|client|server|clientserver|other|api|bindings
1516    #                 (section is optional and is treated case-insensitively)
1517    #       message = the actual text for CHANGES
1518    #
1519    # This means the "changes label" can be used as prefix or suffix, and it
1520    # can also be left empty (which results in an uncategorized changes entry),
1521    # if the committer isn't sure where the changelog entry belongs.
1522    #
1523    # Putting [skip], [ignore], [c:skip] or [c:ignore] somewhere in the
1524    # log message means this commit must be ignored for Changelog processing
1525    # (ignored even with the --include-unlabeled-summaries option).
1526    #
1527    # If there is no changes label anywhere in the commit message, and the
1528    # --include-unlabeled-summaries option is used, we'll consider the summary
1529    # line of the commit message (= first line except if it starts with a *)
1530    # as an uncategorized changes entry, except if it contains "status",
1531    # "changes", "post-release housekeeping" or "follow-up".
1532    #
1533    # Examples:
1534    #   [U:major] Better interactive conflict resolution for tree conflicts
1535    #   ra_serf: Adjustments for serf versions with HTTP/2 support [U:minor]
1536    #   [U] Fix 'svn diff URL@REV WC' wrongly looks up URL@HEAD (issue #4597)
1537    #   Fix bug with canonicalizing Window-specific drive-relative URL []
1538    #   New svn_ra_list() API function [D:api]
1539    #   [D:bindings] JavaHL: Allow access to constructors of a couple JavaHL classes
1540
1541    branch_url = svn_repos + '/' + get_branch_path(args)
1542    previous = svn_repos + '/' + args.previous
1543    include_unlabeled = args.include_unlabeled
1544    separator_line = ('-' * 72) + '\n'
1545
1546    mergeinfo = subprocess.check_output(['svn', 'mergeinfo', '--show-revs',
1547                    'eligible', '--log', branch_url, previous],
1548                                        universal_newlines=True)
1549    log_messages_dict = {
1550        # This is a dictionary mapping revision numbers to their respective
1551        # log messages.  The expression in the "key:" part of the dict
1552        # comprehension extracts the revision number, as integer, from the
1553        # 'svn log' output.
1554        int(log_message.splitlines()[0].split()[0][1:]): log_message
1555        # The [1:-1] ignores the empty first and last element of the split().
1556        for log_message in mergeinfo.split(separator_line)[1:-1]
1557    }
1558    mergeinfo = mergeinfo.splitlines()
1559
1560    separator_pattern = re.compile('^-{72}$')
1561    revline_pattern = re.compile('^r(\d+) \| [^\|]+ \| [^\|]+ \| \d+ lines?$')
1562    changes_prefix_pattern = re.compile(r'^\[(U|D)?:?([^\]]+)?\](.+)$')
1563    changes_suffix_pattern = re.compile(r'^(.+)\[(U|D)?:?([^\]]+)?\]$')
1564    # TODO: push this into backport.status as a library function
1565    auto_merge_pattern = \
1566        re.compile(r'^Merge (r\d+,? |the r\d+ group |the \S+ branch:)')
1567
1568    changes_dict = dict()  # audience -> (section -> (change -> set(revision)))
1569    revision = -1
1570    got_firstline = False
1571    unlabeled_summary = None
1572    changes_ignore = False
1573    audience = None
1574    section = None
1575    message = None
1576
1577    for line in mergeinfo:
1578        if separator_pattern.match(line):
1579            # New revision section. Reset variables.
1580            # If there's an unlabeled summary from a previous section, and
1581            # include_unlabeled is True, put it into uncategorized_changes.
1582            if include_unlabeled and unlabeled_summary and not changes_ignore:
1583                if auto_merge_pattern.match(unlabeled_summary):
1584                    # 1. Parse revision numbers from the first line
1585                    merged_revisions = [
1586                        int(x) for x in
1587                        re.compile(r'(?<=\br)\d+\b').findall(unlabeled_summary)
1588                    ]
1589                    # TODO pass each revnum in MERGED_REVISIONS through this
1590                    #      logic, in order to extract CHANGES_PREFIX_PATTERN
1591                    #      and CHANGES_SUFFIX_PATTERN lines from the trunk log
1592                    #      message.
1593
1594                    # 2. Parse the STATUS entry
1595                    this_log_message = log_messages_dict[revision]
1596                    status_paragraph = this_log_message.split('\n\n')[2]
1597                    logsummary = \
1598                        backport.status.StatusEntry(status_paragraph).logsummary
1599                    add_to_changes_dict(changes_dict, None, None,
1600                                        ' '.join(logsummary), revision)
1601                else:
1602                    add_to_changes_dict(changes_dict, None, None,
1603                                        unlabeled_summary, revision)
1604            revision = -1
1605            got_firstline = False
1606            unlabeled_summary = None
1607            changes_ignore = False
1608            audience = None
1609            section = None
1610            message = None
1611            continue
1612
1613        revmatch = revline_pattern.match(line)
1614        if revmatch and (revision == -1):
1615            # A revision line: get the revision number
1616            revision = int(revmatch.group(1))
1617            logging.debug('Changelog processing revision r%d' % revision)
1618            continue
1619
1620        if line.strip() == '':
1621            # Skip empty / whitespace lines
1622            continue
1623
1624        if not got_firstline:
1625            got_firstline = True
1626            if (not re.search(r'status|changes|post-release housekeeping|follow-up|^\*',
1627                              line, re.IGNORECASE)
1628                    and not changes_prefix_pattern.match(line)
1629                    and not changes_suffix_pattern.match(line)):
1630                unlabeled_summary = line
1631
1632        if re.search(r'\[(c:)?(skip|ignore)\]', line, re.IGNORECASE):
1633            changes_ignore = True
1634
1635        prefix_match = changes_prefix_pattern.match(line)
1636        if prefix_match:
1637            audience = prefix_match.group(1)
1638            section = prefix_match.group(2)
1639            message = prefix_match.group(3)
1640            add_to_changes_dict(changes_dict, audience, section, message, revision)
1641
1642        suffix_match = changes_suffix_pattern.match(line)
1643        if suffix_match:
1644            message = suffix_match.group(1)
1645            audience = suffix_match.group(2)
1646            section = suffix_match.group(3)
1647            add_to_changes_dict(changes_dict, audience, section, message, revision)
1648
1649    # Output the sorted changelog entries
1650    # 1) Uncategorized changes
1651    print_section(changes_dict, None, None, None)
1652    print
1653    # 2) User-visible changes
1654    print(' User-visible changes:')
1655    print_section(changes_dict, 'U', None, None)
1656    print_section(changes_dict, 'U', 'general', 'General')
1657    print_section(changes_dict, 'U', 'major', 'Major new features')
1658    print_section(changes_dict, 'U', 'minor', 'Minor new features and improvements')
1659    print_section(changes_dict, 'U', 'client', 'Client-side bugfixes', mandatory=True)
1660    print_section(changes_dict, 'U', 'server', 'Server-side bugfixes', mandatory=True)
1661    print_section(changes_dict, 'U', 'clientserver', 'Client-side and server-side bugfixes')
1662    print_section(changes_dict, 'U', 'other', 'Other tool improvements and bugfixes')
1663    print_section(changes_dict, 'U', 'bindings', 'Bindings bugfixes', mandatory=True)
1664    print
1665    # 3) Developer-visible changes
1666    print(' Developer-visible changes:')
1667    print_section(changes_dict, 'D', None, None)
1668    print_section(changes_dict, 'D', 'general', 'General', mandatory=True)
1669    print_section(changes_dict, 'D', 'api', 'API changes', mandatory=True)
1670    print_section(changes_dict, 'D', 'bindings', 'Bindings')
1671
1672#----------------------------------------------------------------------
1673# Main entry point for argument parsing and handling
1674
1675def main():
1676    'Parse arguments, and drive the appropriate subcommand.'
1677
1678    # Setup our main parser
1679    parser = argparse.ArgumentParser(
1680                            description='Create an Apache Subversion release.')
1681    parser.add_argument('--clean', action='store_true', default=False,
1682                   help='''Remove any directories previously created by %(prog)s,
1683                           including the 'prefix' dir, the 'temp' dir, and the
1684                           default or specified target dir.''')
1685    parser.add_argument('--verbose', action='store_true', default=False,
1686                   help='Increase output verbosity')
1687    parser.add_argument('--base-dir', default=os.getcwd(),
1688                   help='''The directory in which to create needed files and
1689                           folders.  The default is the current working
1690                           directory.''')
1691    parser.add_argument('--target',
1692                   help='''The full path to the directory containing
1693                           release artifacts. Default: <BASE_DIR>/deploy''')
1694    parser.add_argument('--branch',
1695                   help='''The branch to base the release on,
1696                           as a path relative to ^/subversion/.
1697                           Default: 'branches/MAJOR.MINOR.x'.''')
1698    parser.add_argument('--username',
1699                   help='Username for committing to ' + svn_repos +
1700                        ' or ' + dist_repos + '.')
1701    subparsers = parser.add_subparsers(title='subcommands')
1702
1703    # Setup the parser for the build-env subcommand
1704    subparser = subparsers.add_parser('build-env',
1705                    help='''Download release prerequisistes, including autoconf,
1706                            libtool, and swig.''')
1707    subparser.set_defaults(func=build_env)
1708    subparser.add_argument('version', type=Version,
1709                    help='''The release label, such as '1.7.0-alpha1'.''')
1710    subparser.add_argument('--sf-mirror', default='softlayer',
1711                    help='''The mirror to use for downloading files from
1712                            SourceForge.  If in the EU, you may want to use
1713                            'kent' for this value.''')
1714    subparser.add_argument('--use-existing', action='store_true', default=False,
1715                    help='''Attempt to use existing build dependencies before
1716                            downloading and building a private set.''')
1717
1718    # Setup the parser for the create-release-branch subcommand
1719    subparser = subparsers.add_parser('create-release-branch',
1720                    help='''Create a minor release branch: branch from trunk,
1721                            update version numbers on trunk, create status
1722                            file on branch, update backport bot,
1723                            update buildbot config.''')
1724    subparser.set_defaults(func=create_release_branch)
1725    subparser.add_argument('version', type=Version,
1726                    help='''A version number to indicate the branch, such as
1727                            '1.7.0' (the '.0' is required).''')
1728    subparser.add_argument('revnum', type=lambda arg: int(arg.lstrip('r')),
1729                           nargs='?', default=None,
1730                    help='''The trunk revision number to base the branch on.
1731                            Default is HEAD.''')
1732    subparser.add_argument('--dry-run', action='store_true', default=False,
1733                   help='Avoid committing any changes to repositories.')
1734
1735    # Setup the parser for the create-release-branch subcommand
1736    subparser = subparsers.add_parser('write-release-notes',
1737                    help='''Write a template release-notes file.''')
1738    subparser.set_defaults(func=write_release_notes)
1739    subparser.add_argument('version', type=Version,
1740                    help='''A version number to indicate the branch, such as
1741                            '1.7.0' (the '.0' is required).''')
1742    subparser.add_argument('revnum', type=lambda arg: int(arg.lstrip('r')),
1743                           nargs='?', default=None,
1744                    help='''The trunk revision number to base the branch on.
1745                            Default is HEAD.''')
1746    subparser.add_argument('--edit-html-file',
1747                    help='''Write the template release-notes to this file,
1748                            and update 'index.html' in the same directory.''')
1749    subparser.add_argument('--dry-run', action='store_true', default=False,
1750                   help='Avoid committing any changes to repositories.')
1751
1752    # Setup the parser for the roll subcommand
1753    subparser = subparsers.add_parser('roll',
1754                    help='''Create the release artifacts.''')
1755    subparser.set_defaults(func=roll_tarballs)
1756    subparser.add_argument('version', type=Version,
1757                    help='''The release label, such as '1.7.0-alpha1'.''')
1758    subparser.add_argument('revnum', type=lambda arg: int(arg.lstrip('r')),
1759                    help='''The revision number to base the release on.''')
1760    subparser.add_argument('--patches',
1761                    help='''The path to the directory containing patches.''')
1762
1763    # Setup the parser for the sign-candidates subcommand
1764    subparser = subparsers.add_parser('sign-candidates',
1765                    help='''Sign the release artifacts.''')
1766    subparser.set_defaults(func=sign_candidates)
1767    subparser.add_argument('version', type=Version,
1768                    help='''The release label, such as '1.7.0-alpha1'.''')
1769    subparser.add_argument('--userid',
1770                    help='''The (optional) USER-ID specifying the key to be
1771                            used for signing, such as '110B1C95' (Key-ID). If
1772                            omitted, uses the default key.''')
1773
1774    # Setup the parser for the post-candidates subcommand
1775    subparser = subparsers.add_parser('post-candidates',
1776                    help='''Commit candidates to the release development area
1777                            of the dist.apache.org repository.''')
1778    subparser.set_defaults(func=post_candidates)
1779    subparser.add_argument('version', type=Version,
1780                    help='''The release label, such as '1.7.0-alpha1'.''')
1781
1782    # Setup the parser for the create-tag subcommand
1783    subparser = subparsers.add_parser('create-tag',
1784                    help='''Create the release tag and, if not a prerelease
1785                            version, bump version numbers on the branch.''')
1786    subparser.set_defaults(func=create_tag_and_bump_versions)
1787    subparser.add_argument('version', type=Version,
1788                    help='''The release label, such as '1.7.0-alpha1'.''')
1789    subparser.add_argument('revnum', type=lambda arg: int(arg.lstrip('r')),
1790                    help='''The revision number to base the release on.''')
1791
1792    # Setup the parser for the bump-versions-on-branch subcommand
1793    subparser = subparsers.add_parser('bump-versions-on-branch',
1794                    help='''Bump version numbers on branch.''')
1795    subparser.set_defaults(func=bump_versions_on_branch)
1796    subparser.add_argument('version', type=Version,
1797                    help='''The release label, such as '1.7.0-alpha1'.''')
1798    subparser.add_argument('revnum', type=lambda arg: int(arg.lstrip('r')),
1799                    help='''The revision number to base the release on.''')
1800
1801    # The clean-dist subcommand
1802    subparser = subparsers.add_parser('clean-dist',
1803                    help=clean_dist.__doc__.split('\n\n')[0])
1804    subparser.set_defaults(func=clean_dist)
1805    subparser.add_argument('--dist-dir',
1806                    help='''The directory to clean.''')
1807
1808    # The move-to-dist subcommand
1809    subparser = subparsers.add_parser('move-to-dist',
1810                    help='''Move candiates and signatures from the temporary
1811                            release dev location to the permanent distribution
1812                            directory.''')
1813    subparser.set_defaults(func=move_to_dist)
1814    subparser.add_argument('version', type=Version,
1815                    help='''The release label, such as '1.7.0-alpha1'.''')
1816
1817    # The write-news subcommand
1818    subparser = subparsers.add_parser('write-news',
1819                    help='''Output to stdout template text for use in the news
1820                            section of the Subversion website.''')
1821    subparser.set_defaults(func=write_news)
1822    subparser.add_argument('--announcement-url',
1823                    help='''The URL to the archived announcement email.''')
1824    subparser.add_argument('--news-release-date',
1825                    help='''The release date for the news, as YYYY-MM-DD.
1826                            Default: today.''')
1827    subparser.add_argument('--edit-html-file',
1828                    help='''Insert the text into this file
1829                            news.html, index.html).''')
1830    subparser.add_argument('version', type=Version,
1831                    help='''The release label, such as '1.7.0-alpha1'.''')
1832
1833    # write-announcement
1834    subparser = subparsers.add_parser('write-announcement',
1835                    help='''Output to stdout template text for the emailed
1836                            release announcement.''')
1837    subparser.set_defaults(func=write_announcement)
1838    subparser.add_argument('--security', action='store_true', default=False,
1839                    help='''The release being announced includes security
1840                            fixes.''')
1841    subparser.add_argument('version', type=Version,
1842                    help='''The release label, such as '1.7.0-alpha1'.''')
1843
1844    # write-downloads
1845    subparser = subparsers.add_parser('write-downloads',
1846                    help='''Output to stdout template text for the download
1847                            table for subversion.apache.org''')
1848    subparser.set_defaults(func=write_downloads)
1849    subparser.add_argument('version', type=Version,
1850                    help='''The release label, such as '1.7.0-alpha1'.''')
1851
1852    # check-sigs
1853    subparser = subparsers.add_parser('check-sigs',
1854                    help='''Output to stdout the signatures collected for this
1855                            release''')
1856    subparser.set_defaults(func=check_sigs)
1857    subparser.add_argument('version', type=Version,
1858                    help='''The release label, such as '1.7.0-alpha1'.''')
1859
1860    # get-keys
1861    subparser = subparsers.add_parser('get-keys',
1862                    help='''Import committers' public keys to ~/.gpg/''')
1863    subparser.set_defaults(func=get_keys)
1864
1865    # A meta-target
1866    subparser = subparsers.add_parser('clean',
1867                    help='''The same as the '--clean' switch, but as a
1868                            separate subcommand.''')
1869    subparser.set_defaults(func=cleanup)
1870
1871    # write-changelog
1872    subparser = subparsers.add_parser('write-changelog',
1873                    help='''Output to stdout changelog entries parsed from
1874                            commit messages, optionally labeled with a category
1875                            like [U:client], [D:api], [U], ...''')
1876    subparser.set_defaults(func=write_changelog)
1877    subparser.add_argument('previous',
1878                    help='''The "previous" branch or tag, relative to
1879                            ^/subversion/, to compare "branch" against.''')
1880    subparser.add_argument('--include-unlabeled-summaries',
1881                    dest='include_unlabeled',
1882                    action='store_true', default=False,
1883                    help='''Include summary lines that do not have a changes
1884                            label, unless an explicit [c:skip] or [c:ignore]
1885                            is part of the commit message (except if the
1886                            summary line contains 'STATUS', 'CHANGES',
1887                            'Post-release housekeeping', 'Follow-up' or starts
1888                            with '*').''')
1889
1890    # Parse the arguments
1891    args = parser.parse_args()
1892
1893    # first, process any global operations
1894    if args.clean:
1895        cleanup(args)
1896
1897    # Set up logging
1898    logger = logging.getLogger()
1899    if args.verbose:
1900        logger.setLevel(logging.DEBUG)
1901    else:
1902        logger.setLevel(logging.INFO)
1903
1904    # Fix up our path so we can use our installed versions
1905    os.environ['PATH'] = os.path.join(get_prefix(args.base_dir), 'bin') + ':' \
1906                                                            + os.environ['PATH']
1907
1908    # Make timestamps in tarballs independent of local timezone
1909    os.environ['TZ'] = 'UTC'
1910
1911    # finally, run the subcommand, and give it the parsed arguments
1912    args.func(args)
1913
1914
1915if __name__ == '__main__':
1916    main()
1917