1# Copyright 2011 OpenStack Foundation
2# Copyright 2012-2013 Hewlett-Packard Development Company, L.P.
3# All Rights Reserved.
4#
5#    Licensed under the Apache License, Version 2.0 (the "License"); you may
6#    not use this file except in compliance with the License. You may obtain
7#    a copy of the License at
8#
9#         http://www.apache.org/licenses/LICENSE-2.0
10#
11#    Unless required by applicable law or agreed to in writing, software
12#    distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
13#    WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
14#    License for the specific language governing permissions and limitations
15#    under the License.
16
17"""
18Utilities with minimum-depends for use in setup.py
19"""
20
21from __future__ import unicode_literals
22
23from distutils.command import install as du_install
24from distutils import log
25
26# (hberaud) do not use six here to import urlparse
27# to keep this module free from external dependencies
28# to avoid cross dependencies errors on minimal system
29# free from dependencies.
30try:
31    from urllib.parse import urlparse
32except ImportError:
33    from urlparse import urlparse
34
35import email
36import email.errors
37import os
38import re
39import sys
40import warnings
41
42import pkg_resources
43import setuptools
44from setuptools.command import develop
45from setuptools.command import easy_install
46from setuptools.command import egg_info
47from setuptools.command import install
48from setuptools.command import install_scripts
49from setuptools.command import sdist
50
51from pbr import extra_files
52from pbr import git
53from pbr import options
54import pbr.pbr_json
55from pbr import testr_command
56from pbr import version
57
58REQUIREMENTS_FILES = ('requirements.txt', 'tools/pip-requires')
59PY_REQUIREMENTS_FILES = [x % sys.version_info[0] for x in (
60    'requirements-py%d.txt', 'tools/pip-requires-py%d')]
61TEST_REQUIREMENTS_FILES = ('test-requirements.txt', 'tools/test-requires')
62
63
64def get_requirements_files():
65    files = os.environ.get("PBR_REQUIREMENTS_FILES")
66    if files:
67        return tuple(f.strip() for f in files.split(','))
68    # Returns a list composed of:
69    # - REQUIREMENTS_FILES with -py2 or -py3 in the name
70    #   (e.g. requirements-py3.txt)
71    # - REQUIREMENTS_FILES
72
73    return PY_REQUIREMENTS_FILES + list(REQUIREMENTS_FILES)
74
75
76def append_text_list(config, key, text_list):
77    """Append a \n separated list to possibly existing value."""
78    new_value = []
79    current_value = config.get(key, "")
80    if current_value:
81        new_value.append(current_value)
82    new_value.extend(text_list)
83    config[key] = '\n'.join(new_value)
84
85
86def _any_existing(file_list):
87    return [f for f in file_list if os.path.exists(f)]
88
89
90# Get requirements from the first file that exists
91def get_reqs_from_files(requirements_files):
92    existing = _any_existing(requirements_files)
93
94    # TODO(stephenfin): Remove this in pbr 6.0+
95    deprecated = [f for f in existing if f in PY_REQUIREMENTS_FILES]
96    if deprecated:
97        warnings.warn('Support for \'-pyN\'-suffixed requirements files is '
98                      'removed in pbr 5.0 and these files are now ignored. '
99                      'Use environment markers instead. Conflicting files: '
100                      '%r' % deprecated,
101                      DeprecationWarning)
102
103    existing = [f for f in existing if f not in PY_REQUIREMENTS_FILES]
104    for requirements_file in existing:
105        with open(requirements_file, 'r') as fil:
106            return fil.read().split('\n')
107
108    return []
109
110
111def egg_fragment(match):
112    return re.sub(r'(?P<PackageName>[\w.-]+)-'
113                  r'(?P<GlobalVersion>'
114                  r'(?P<VersionTripple>'
115                  r'(?P<Major>0|[1-9][0-9]*)\.'
116                  r'(?P<Minor>0|[1-9][0-9]*)\.'
117                  r'(?P<Patch>0|[1-9][0-9]*)){1}'
118                  r'(?P<Tags>(?:\-'
119                  r'(?P<Prerelease>(?:(?=[0]{1}[0-9A-Za-z-]{0})(?:[0]{1})|'
120                  r'(?=[1-9]{1}[0-9]*[A-Za-z]{0})(?:[0-9]+)|'
121                  r'(?=[0-9]*[A-Za-z-]+[0-9A-Za-z-]*)(?:[0-9A-Za-z-]+)){1}'
122                  r'(?:\.(?=[0]{1}[0-9A-Za-z-]{0})(?:[0]{1})|'
123                  r'\.(?=[1-9]{1}[0-9]*[A-Za-z]{0})(?:[0-9]+)|'
124                  r'\.(?=[0-9]*[A-Za-z-]+[0-9A-Za-z-]*)'
125                  r'(?:[0-9A-Za-z-]+))*){1}){0,1}(?:\+'
126                  r'(?P<Meta>(?:[0-9A-Za-z-]+(?:\.[0-9A-Za-z-]+)*))){0,1}))',
127                  r'\g<PackageName>>=\g<GlobalVersion>',
128                  match.groups()[-1])
129
130
131def parse_requirements(requirements_files=None, strip_markers=False):
132
133    if requirements_files is None:
134        requirements_files = get_requirements_files()
135
136    requirements = []
137    for line in get_reqs_from_files(requirements_files):
138        # Ignore comments
139        if (not line.strip()) or line.startswith('#'):
140            continue
141
142        # Ignore index URL lines
143        if re.match(r'^\s*(-i|--index-url|--extra-index-url|--find-links).*',
144                    line):
145            continue
146
147        # Handle nested requirements files such as:
148        # -r other-requirements.txt
149        if line.startswith('-r'):
150            req_file = line.partition(' ')[2]
151            requirements += parse_requirements(
152                [req_file], strip_markers=strip_markers)
153            continue
154
155        try:
156            project_name = pkg_resources.Requirement.parse(line).project_name
157        except ValueError:
158            project_name = None
159
160        # For the requirements list, we need to inject only the portion
161        # after egg= so that distutils knows the package it's looking for
162        # such as:
163        # -e git://github.com/openstack/nova/master#egg=nova
164        # -e git://github.com/openstack/nova/master#egg=nova-1.2.3
165        # -e git+https://foo.com/zipball#egg=bar&subdirectory=baz
166        # http://github.com/openstack/nova/zipball/master#egg=nova
167        # http://github.com/openstack/nova/zipball/master#egg=nova-1.2.3
168        # git+https://foo.com/zipball#egg=bar&subdirectory=baz
169        # git+[ssh]://github.com/openstack/nova/zipball/master#egg=nova-1.2.3
170        # hg+[ssh]://github.com/openstack/nova/zipball/master#egg=nova-1.2.3
171        # svn+[proto]://github.com/openstack/nova/zipball/master#egg=nova-1.2.3
172        # -f lines are for index locations, and don't get used here
173        if re.match(r'\s*-e\s+', line):
174            extract = re.match(r'\s*-e\s+(.*)$', line)
175            line = extract.group(1)
176        egg = urlparse(line)
177        if egg.scheme:
178            line = re.sub(r'egg=([^&]+).*$', egg_fragment, egg.fragment)
179        elif re.match(r'\s*-f\s+', line):
180            line = None
181            reason = 'Index Location'
182
183        if line is not None:
184            line = re.sub('#.*$', '', line)
185            if strip_markers:
186                semi_pos = line.find(';')
187                if semi_pos < 0:
188                    semi_pos = None
189                line = line[:semi_pos]
190            requirements.append(line)
191        else:
192            log.info(
193                '[pbr] Excluding %s: %s' % (project_name, reason))
194
195    return requirements
196
197
198def parse_dependency_links(requirements_files=None):
199    if requirements_files is None:
200        requirements_files = get_requirements_files()
201    dependency_links = []
202    # dependency_links inject alternate locations to find packages listed
203    # in requirements
204    for line in get_reqs_from_files(requirements_files):
205        # skip comments and blank lines
206        if re.match(r'(\s*#)|(\s*$)', line):
207            continue
208        # lines with -e or -f need the whole line, minus the flag
209        if re.match(r'\s*-[ef]\s+', line):
210            dependency_links.append(re.sub(r'\s*-[ef]\s+', '', line))
211        # lines that are only urls can go in unmolested
212        elif re.match(r'^\s*(https?|git(\+(https|ssh))?|svn|hg)\S*:', line):
213            dependency_links.append(line)
214    return dependency_links
215
216
217class InstallWithGit(install.install):
218    """Extracts ChangeLog and AUTHORS from git then installs.
219
220    This is useful for e.g. readthedocs where the package is
221    installed and then docs built.
222    """
223
224    command_name = 'install'
225
226    def run(self):
227        _from_git(self.distribution)
228        return install.install.run(self)
229
230
231class LocalInstall(install.install):
232    """Runs python setup.py install in a sensible manner.
233
234    Force a non-egg installed in the manner of
235    single-version-externally-managed, which allows us to install manpages
236    and config files.
237    """
238
239    command_name = 'install'
240
241    def run(self):
242        _from_git(self.distribution)
243        return du_install.install.run(self)
244
245
246class TestrTest(testr_command.Testr):
247    """Make setup.py test do the right thing."""
248
249    command_name = 'test'
250    description = 'DEPRECATED: Run unit tests using testr'
251
252    def run(self):
253        warnings.warn('testr integration is deprecated in pbr 4.2 and will '
254                      'be removed in a future release. Please call your test '
255                      'runner directly',
256                      DeprecationWarning)
257
258        # Can't use super - base class old-style class
259        testr_command.Testr.run(self)
260
261
262class LocalRPMVersion(setuptools.Command):
263    __doc__ = """Output the rpm *compatible* version string of this package"""
264    description = __doc__
265
266    user_options = []
267    command_name = "rpm_version"
268
269    def run(self):
270        log.info("[pbr] Extracting rpm version")
271        name = self.distribution.get_name()
272        print(version.VersionInfo(name).semantic_version().rpm_string())
273
274    def initialize_options(self):
275        pass
276
277    def finalize_options(self):
278        pass
279
280
281class LocalDebVersion(setuptools.Command):
282    __doc__ = """Output the deb *compatible* version string of this package"""
283    description = __doc__
284
285    user_options = []
286    command_name = "deb_version"
287
288    def run(self):
289        log.info("[pbr] Extracting deb version")
290        name = self.distribution.get_name()
291        print(version.VersionInfo(name).semantic_version().debian_string())
292
293    def initialize_options(self):
294        pass
295
296    def finalize_options(self):
297        pass
298
299
300def have_testr():
301    return testr_command.have_testr
302
303
304try:
305    from nose import commands
306
307    class NoseTest(commands.nosetests):
308        """Fallback test runner if testr is a no-go."""
309
310        command_name = 'test'
311        description = 'DEPRECATED: Run unit tests using nose'
312
313        def run(self):
314            warnings.warn('nose integration in pbr is deprecated. Please use '
315                          'the native nose setuptools configuration or call '
316                          'nose directly',
317                          DeprecationWarning)
318
319            # Can't use super - base class old-style class
320            commands.nosetests.run(self)
321
322    _have_nose = True
323
324except ImportError:
325    _have_nose = False
326
327
328def have_nose():
329    return _have_nose
330
331
332_wsgi_text = """#PBR Generated from %(group)r
333
334import threading
335
336from %(module_name)s import %(import_target)s
337
338if __name__ == "__main__":
339    import argparse
340    import socket
341    import sys
342    import wsgiref.simple_server as wss
343
344    parser = argparse.ArgumentParser(
345        description=%(import_target)s.__doc__,
346        formatter_class=argparse.ArgumentDefaultsHelpFormatter,
347        usage='%%(prog)s [-h] [--port PORT] [--host IP] -- [passed options]')
348    parser.add_argument('--port', '-p', type=int, default=8000,
349                        help='TCP port to listen on')
350    parser.add_argument('--host', '-b', default='',
351                        help='IP to bind the server to')
352    parser.add_argument('args',
353                        nargs=argparse.REMAINDER,
354                        metavar='-- [passed options]',
355                        help="'--' is the separator of the arguments used "
356                        "to start the WSGI server and the arguments passed "
357                        "to the WSGI application.")
358    args = parser.parse_args()
359    if args.args:
360        if args.args[0] == '--':
361            args.args.pop(0)
362        else:
363            parser.error("unrecognized arguments: %%s" %% ' '.join(args.args))
364    sys.argv[1:] = args.args
365    server = wss.make_server(args.host, args.port, %(invoke_target)s())
366
367    print("*" * 80)
368    print("STARTING test server %(module_name)s.%(invoke_target)s")
369    url = "http://%%s:%%d/" %% (server.server_name, server.server_port)
370    print("Available at %%s" %% url)
371    print("DANGER! For testing only, do not use in production")
372    print("*" * 80)
373    sys.stdout.flush()
374
375    server.serve_forever()
376else:
377    application = None
378    app_lock = threading.Lock()
379
380    with app_lock:
381        if application is None:
382            application = %(invoke_target)s()
383
384"""
385
386_script_text = """# PBR Generated from %(group)r
387
388import sys
389
390from %(module_name)s import %(import_target)s
391
392
393if __name__ == "__main__":
394    sys.exit(%(invoke_target)s())
395"""
396
397
398# the following allows us to specify different templates per entry
399# point group when generating pbr scripts.
400ENTRY_POINTS_MAP = {
401    'console_scripts': _script_text,
402    'gui_scripts': _script_text,
403    'wsgi_scripts': _wsgi_text
404}
405
406
407def generate_script(group, entry_point, header, template):
408    """Generate the script based on the template.
409
410    :param str group:
411        The entry-point group name, e.g., "console_scripts".
412    :param str header:
413        The first line of the script, e.g., "!#/usr/bin/env python".
414    :param str template:
415        The script template.
416    :returns:
417        The templated script content
418    :rtype:
419        str
420    """
421    if not entry_point.attrs or len(entry_point.attrs) > 2:
422        raise ValueError("Script targets must be of the form "
423                         "'func' or 'Class.class_method'.")
424    script_text = template % dict(
425        group=group,
426        module_name=entry_point.module_name,
427        import_target=entry_point.attrs[0],
428        invoke_target='.'.join(entry_point.attrs),
429    )
430    return header + script_text
431
432
433def override_get_script_args(
434        dist, executable=os.path.normpath(sys.executable)):
435    """Override entrypoints console_script."""
436    header = easy_install.get_script_header("", executable)
437    for group, template in ENTRY_POINTS_MAP.items():
438        for name, ep in dist.get_entry_map(group).items():
439            yield (name, generate_script(group, ep, header, template))
440
441
442class LocalDevelop(develop.develop):
443
444    command_name = 'develop'
445
446    def install_wrapper_scripts(self, dist):
447        if sys.platform == 'win32':
448            return develop.develop.install_wrapper_scripts(self, dist)
449        if not self.exclude_scripts:
450            for args in override_get_script_args(dist):
451                self.write_script(*args)
452
453
454class LocalInstallScripts(install_scripts.install_scripts):
455    """Intercepts console scripts entry_points."""
456    command_name = 'install_scripts'
457
458    def _make_wsgi_scripts_only(self, dist, executable):
459        # get_script_header() is deprecated since Setuptools 12.0
460        try:
461            header = easy_install.ScriptWriter.get_header("", executable)
462        except AttributeError:
463            header = easy_install.get_script_header("", executable)
464        wsgi_script_template = ENTRY_POINTS_MAP['wsgi_scripts']
465        for name, ep in dist.get_entry_map('wsgi_scripts').items():
466            content = generate_script(
467                'wsgi_scripts', ep, header, wsgi_script_template)
468            self.write_script(name, content)
469
470    def run(self):
471        import distutils.command.install_scripts
472
473        self.run_command("egg_info")
474        if self.distribution.scripts:
475            # run first to set up self.outfiles
476            distutils.command.install_scripts.install_scripts.run(self)
477        else:
478            self.outfiles = []
479
480        ei_cmd = self.get_finalized_command("egg_info")
481        dist = pkg_resources.Distribution(
482            ei_cmd.egg_base,
483            pkg_resources.PathMetadata(ei_cmd.egg_base, ei_cmd.egg_info),
484            ei_cmd.egg_name, ei_cmd.egg_version,
485        )
486        bs_cmd = self.get_finalized_command('build_scripts')
487        executable = getattr(
488            bs_cmd, 'executable', easy_install.sys_executable)
489        if 'bdist_wheel' in self.distribution.have_run:
490            # We're building a wheel which has no way of generating mod_wsgi
491            # scripts for us. Let's build them.
492            # NOTE(sigmavirus24): This needs to happen here because, as the
493            # comment below indicates, no_ep is True when building a wheel.
494            self._make_wsgi_scripts_only(dist, executable)
495
496        if self.no_ep:
497            # no_ep is True if we're installing into an .egg file or building
498            # a .whl file, in those cases, we do not want to build all of the
499            # entry-points listed for this package.
500            return
501
502        if os.name != 'nt':
503            get_script_args = override_get_script_args
504        else:
505            get_script_args = easy_install.get_script_args
506            executable = '"%s"' % executable
507
508        for args in get_script_args(dist, executable):
509            self.write_script(*args)
510
511
512class LocalManifestMaker(egg_info.manifest_maker):
513    """Add any files that are in git and some standard sensible files."""
514
515    def _add_pbr_defaults(self):
516        for template_line in [
517            'include AUTHORS',
518            'include ChangeLog',
519            'exclude .gitignore',
520            'exclude .gitreview',
521            'global-exclude *.pyc'
522        ]:
523            self.filelist.process_template_line(template_line)
524
525    def add_defaults(self):
526        """Add all the default files to self.filelist:
527
528        Extends the functionality provided by distutils to also included
529        additional sane defaults, such as the ``AUTHORS`` and ``ChangeLog``
530        files generated by *pbr*.
531
532        Warns if (``README`` or ``README.txt``) or ``setup.py`` are missing;
533        everything else is optional.
534        """
535        option_dict = self.distribution.get_option_dict('pbr')
536
537        sdist.sdist.add_defaults(self)
538        self.filelist.append(self.template)
539        self.filelist.append(self.manifest)
540        self.filelist.extend(extra_files.get_extra_files())
541        should_skip = options.get_boolean_option(option_dict, 'skip_git_sdist',
542                                                 'SKIP_GIT_SDIST')
543        if not should_skip:
544            rcfiles = git._find_git_files()
545            if rcfiles:
546                self.filelist.extend(rcfiles)
547        elif os.path.exists(self.manifest):
548            self.read_manifest()
549        ei_cmd = self.get_finalized_command('egg_info')
550        self._add_pbr_defaults()
551        self.filelist.include_pattern("*", prefix=ei_cmd.egg_info)
552
553
554class LocalEggInfo(egg_info.egg_info):
555    """Override the egg_info command to regenerate SOURCES.txt sensibly."""
556
557    command_name = 'egg_info'
558
559    def find_sources(self):
560        """Generate SOURCES.txt only if there isn't one already.
561
562        If we are in an sdist command, then we always want to update
563        SOURCES.txt. If we are not in an sdist command, then it doesn't
564        matter one flip, and is actually destructive.
565        However, if we're in a git context, it's always the right thing to do
566        to recreate SOURCES.txt
567        """
568        manifest_filename = os.path.join(self.egg_info, "SOURCES.txt")
569        if (not os.path.exists(manifest_filename) or
570                os.path.exists('.git') or
571                'sdist' in sys.argv):
572            log.info("[pbr] Processing SOURCES.txt")
573            mm = LocalManifestMaker(self.distribution)
574            mm.manifest = manifest_filename
575            mm.run()
576            self.filelist = mm.filelist
577        else:
578            log.info("[pbr] Reusing existing SOURCES.txt")
579            self.filelist = egg_info.FileList()
580            for entry in open(manifest_filename, 'r').read().split('\n'):
581                self.filelist.append(entry)
582
583
584def _from_git(distribution):
585    option_dict = distribution.get_option_dict('pbr')
586    changelog = git._iter_log_oneline()
587    if changelog:
588        changelog = git._iter_changelog(changelog)
589    git.write_git_changelog(option_dict=option_dict, changelog=changelog)
590    git.generate_authors(option_dict=option_dict)
591
592
593class LocalSDist(sdist.sdist):
594    """Builds the ChangeLog and Authors files from VC first."""
595
596    command_name = 'sdist'
597
598    def checking_reno(self):
599        """Ensure reno is installed and configured.
600
601        We can't run reno-based commands if reno isn't installed/available, and
602        don't want to if the user isn't using it.
603        """
604        if hasattr(self, '_has_reno'):
605            return self._has_reno
606
607        option_dict = self.distribution.get_option_dict('pbr')
608        should_skip = options.get_boolean_option(option_dict, 'skip_reno',
609                                                 'SKIP_GENERATE_RENO')
610        if should_skip:
611            self._has_reno = False
612            return False
613
614        try:
615            # versions of reno witout this module will not have the required
616            # feature, hence the import
617            from reno import setup_command  # noqa
618        except ImportError:
619            log.info('[pbr] reno was not found or is too old. Skipping '
620                     'release notes')
621            self._has_reno = False
622            return False
623
624        conf, output_file, cache_file = setup_command.load_config(
625            self.distribution)
626
627        if not os.path.exists(os.path.join(conf.reporoot, conf.notespath)):
628            log.info('[pbr] reno does not appear to be configured. Skipping '
629                     'release notes')
630            self._has_reno = False
631            return False
632
633        self._files = [output_file, cache_file]
634
635        log.info('[pbr] Generating release notes')
636        self._has_reno = True
637
638        return True
639
640    sub_commands = [('build_reno', checking_reno)] + sdist.sdist.sub_commands
641
642    def run(self):
643        _from_git(self.distribution)
644        # sdist.sdist is an old style class, can't use super()
645        sdist.sdist.run(self)
646
647    def make_distribution(self):
648        # This is included in make_distribution because setuptools doesn't use
649        # 'get_file_list'. As such, this is the only hook point that runs after
650        # the commands in 'sub_commands'
651        if self.checking_reno():
652            self.filelist.extend(self._files)
653            self.filelist.sort()
654        sdist.sdist.make_distribution(self)
655
656
657try:
658    from pbr import builddoc
659    _have_sphinx = True
660    # Import the symbols from their new home so the package API stays
661    # compatible.
662    LocalBuildDoc = builddoc.LocalBuildDoc
663except ImportError:
664    _have_sphinx = False
665    LocalBuildDoc = None
666
667
668def have_sphinx():
669    return _have_sphinx
670
671
672def _get_increment_kwargs(git_dir, tag):
673    """Calculate the sort of semver increment needed from git history.
674
675    Every commit from HEAD to tag is consider for Sem-Ver metadata lines.
676    See the pbr docs for their syntax.
677
678    :return: a dict of kwargs for passing into SemanticVersion.increment.
679    """
680    result = {}
681    if tag:
682        version_spec = tag + "..HEAD"
683    else:
684        version_spec = "HEAD"
685    # Get the raw body of the commit messages so that we don't have to
686    # parse out any formatting whitespace and to avoid user settings on
687    # git log output affecting out ability to have working sem ver headers.
688    changelog = git._run_git_command(['log', '--pretty=%B', version_spec],
689                                     git_dir)
690    header_len = len('sem-ver:')
691    commands = [line[header_len:].strip() for line in changelog.split('\n')
692                if line.lower().startswith('sem-ver:')]
693    symbols = set()
694    for command in commands:
695        symbols.update([symbol.strip() for symbol in command.split(',')])
696
697    def _handle_symbol(symbol, symbols, impact):
698        if symbol in symbols:
699            result[impact] = True
700            symbols.discard(symbol)
701    _handle_symbol('bugfix', symbols, 'patch')
702    _handle_symbol('feature', symbols, 'minor')
703    _handle_symbol('deprecation', symbols, 'minor')
704    _handle_symbol('api-break', symbols, 'major')
705    for symbol in symbols:
706        log.info('[pbr] Unknown Sem-Ver symbol %r' % symbol)
707    # We don't want patch in the kwargs since it is not a keyword argument -
708    # its the default minimum increment.
709    result.pop('patch', None)
710    return result
711
712
713def _get_revno_and_last_tag(git_dir):
714    """Return the commit data about the most recent tag.
715
716    We use git-describe to find this out, but if there are no
717    tags then we fall back to counting commits since the beginning
718    of time.
719    """
720    changelog = git._iter_log_oneline(git_dir=git_dir)
721    row_count = 0
722    for row_count, (ignored, tag_set, ignored) in enumerate(changelog):
723        version_tags = set()
724        semver_to_tag = dict()
725        for tag in list(tag_set):
726            try:
727                semver = version.SemanticVersion.from_pip_string(tag)
728                semver_to_tag[semver] = tag
729                version_tags.add(semver)
730            except Exception:
731                pass
732        if version_tags:
733            return semver_to_tag[max(version_tags)], row_count
734    return "", row_count
735
736
737def _get_version_from_git_target(git_dir, target_version):
738    """Calculate a version from a target version in git_dir.
739
740    This is used for untagged versions only. A new version is calculated as
741    necessary based on git metadata - distance to tags, current hash, contents
742    of commit messages.
743
744    :param git_dir: The git directory we're working from.
745    :param target_version: If None, the last tagged version (or 0 if there are
746        no tags yet) is incremented as needed to produce an appropriate target
747        version following semver rules. Otherwise target_version is used as a
748        constraint - if semver rules would result in a newer version then an
749        exception is raised.
750    :return: A semver version object.
751    """
752    tag, distance = _get_revno_and_last_tag(git_dir)
753    last_semver = version.SemanticVersion.from_pip_string(tag or '0')
754    if distance == 0:
755        new_version = last_semver
756    else:
757        new_version = last_semver.increment(
758            **_get_increment_kwargs(git_dir, tag))
759    if target_version is not None and new_version > target_version:
760        raise ValueError(
761            "git history requires a target version of %(new)s, but target "
762            "version is %(target)s" %
763            dict(new=new_version, target=target_version))
764    if distance == 0:
765        return last_semver
766    new_dev = new_version.to_dev(distance)
767    if target_version is not None:
768        target_dev = target_version.to_dev(distance)
769        if target_dev > new_dev:
770            return target_dev
771    return new_dev
772
773
774def _get_version_from_git(pre_version=None):
775    """Calculate a version string from git.
776
777    If the revision is tagged, return that. Otherwise calculate a semantic
778    version description of the tree.
779
780    The number of revisions since the last tag is included in the dev counter
781    in the version for untagged versions.
782
783    :param pre_version: If supplied use this as the target version rather than
784        inferring one from the last tag + commit messages.
785    """
786    git_dir = git._run_git_functions()
787    if git_dir:
788        try:
789            tagged = git._run_git_command(
790                ['describe', '--exact-match'], git_dir,
791                throw_on_error=True).replace('-', '.')
792            target_version = version.SemanticVersion.from_pip_string(tagged)
793        except Exception:
794            if pre_version:
795                # not released yet - use pre_version as the target
796                target_version = version.SemanticVersion.from_pip_string(
797                    pre_version)
798            else:
799                # not released yet - just calculate from git history
800                target_version = None
801        result = _get_version_from_git_target(git_dir, target_version)
802        return result.release_string()
803    # If we don't know the version, return an empty string so at least
804    # the downstream users of the value always have the same type of
805    # object to work with.
806    try:
807        return unicode()
808    except NameError:
809        return ''
810
811
812def _get_version_from_pkg_metadata(package_name):
813    """Get the version from package metadata if present.
814
815    This looks for PKG-INFO if present (for sdists), and if not looks
816    for METADATA (for wheels) and failing that will return None.
817    """
818    pkg_metadata_filenames = ['PKG-INFO', 'METADATA']
819    pkg_metadata = {}
820    for filename in pkg_metadata_filenames:
821        try:
822            pkg_metadata_file = open(filename, 'r')
823        except (IOError, OSError):
824            continue
825        try:
826            pkg_metadata = email.message_from_file(pkg_metadata_file)
827        except email.errors.MessageError:
828            continue
829
830    # Check to make sure we're in our own dir
831    if pkg_metadata.get('Name', None) != package_name:
832        return None
833    return pkg_metadata.get('Version', None)
834
835
836def get_version(package_name, pre_version=None):
837    """Get the version of the project.
838
839    First, try getting it from PKG-INFO or METADATA, if it exists. If it does,
840    that means we're in a distribution tarball or that install has happened.
841    Otherwise, if there is no PKG-INFO or METADATA file, pull the version
842    from git.
843
844    We do not support setup.py version sanity in git archive tarballs, nor do
845    we support packagers directly sucking our git repo into theirs. We expect
846    that a source tarball be made from our git repo - or that if someone wants
847    to make a source tarball from a fork of our repo with additional tags in it
848    that they understand and desire the results of doing that.
849
850    :param pre_version: The version field from setup.cfg - if set then this
851        version will be the next release.
852    """
853    version = os.environ.get(
854        "PBR_VERSION",
855        os.environ.get("OSLO_PACKAGE_VERSION", None))
856    if version:
857        return version
858    version = _get_version_from_pkg_metadata(package_name)
859    if version:
860        return version
861    version = _get_version_from_git(pre_version)
862    # Handle http://bugs.python.org/issue11638
863    # version will either be an empty unicode string or a valid
864    # unicode version string, but either way it's unicode and needs to
865    # be encoded.
866    if sys.version_info[0] == 2:
867        version = version.encode('utf-8')
868    if version:
869        return version
870    raise Exception("Versioning for this project requires either an sdist"
871                    " tarball, or access to an upstream git repository."
872                    " It's also possible that there is a mismatch between"
873                    " the package name in setup.cfg and the argument given"
874                    " to pbr.version.VersionInfo. Project name {name} was"
875                    " given, but was not able to be found.".format(
876                        name=package_name))
877
878
879# This is added because pbr uses pbr to install itself. That means that
880# any changes to the egg info writer entrypoints must be forward and
881# backward compatible. This maintains the pbr.packaging.write_pbr_json
882# path.
883write_pbr_json = pbr.pbr_json.write_pbr_json
884