diff --git a/.gitattributes b/.gitattributes new file mode 100644 index 0000000..13d3d80 --- /dev/null +++ b/.gitattributes @@ -0,0 +1 @@ +backtrackbb/version.py export-subst diff --git a/.github/workflows/github-deploy.yml b/.github/workflows/github-deploy.yml new file mode 100644 index 0000000..9c47f83 --- /dev/null +++ b/.github/workflows/github-deploy.yml @@ -0,0 +1,73 @@ +name: Build + +on: [push, pull_request] + +jobs: + build_wheels: + name: Build wheels on ${{ matrix.os }} + runs-on: ${{ matrix.os }} + strategy: + matrix: + os: [ubuntu-latest, windows-latest, macos-latest] + fail-fast: false + + steps: + - uses: actions/checkout@v3 + + - name: Fetch tags + run: | + git fetch --prune --unshallow --tags -f + echo exit code $? + git tag --list + + - name: Build wheels + uses: pypa/cibuildwheel@v2.13.0 + env: + CIBW_ARCHS_MACOS: x86_64 arm64 + + - uses: actions/upload-artifact@v3 + with: + path: ./wheelhouse/*.whl + + build_sdist: + name: Build source distribution + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v3 + + - name: Fetch tags + run: | + git fetch --prune --unshallow --tags -f + echo exit code $? + git tag --list + + - uses: actions/setup-python@v4 + name: Install Python + with: + python-version: '3.8' + + - name: Build sdist + run: pip install numpy && python setup.py sdist + + - uses: actions/upload-artifact@v3 + with: + path: dist/*.tar.gz + + upload_pypi: + needs: [build_wheels, build_sdist] + runs-on: ubuntu-latest + # upload to PyPI on every tag starting with 'v' + if: github.event_name == 'push' && startsWith(github.event.ref, 'refs/tags/v') + # alternatively, to publish when a GitHub Release is created, use the following rule: + # if: github.event_name == 'release' && github.event.action == 'published' + steps: + - uses: actions/download-artifact@v3 + with: + name: artifact + path: dist + + - uses: pypa/gh-action-pypi-publish@release/v1 + with: + user: claudiodsf + password: ${{ secrets.pypi_password }} + # To test: repository_url: https://test.pypi.org/legacy/ diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml new file mode 100644 index 0000000..6cd5600 --- /dev/null +++ b/.github/workflows/release.yml @@ -0,0 +1,16 @@ +name: Release on GitHub + +on: + push: + tags: + - "v*" + +jobs: + release_version: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v3 + - name: Generate GitHub Release + uses: lsegal/github-release-from-changelog-action@latest + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} diff --git a/.gitignore b/.gitignore index 1d839f7..a989363 100644 --- a/.gitignore +++ b/.gitignore @@ -15,3 +15,6 @@ RELEASE-VERSION # other unnecessary files .directory .DS_Store + +# IDE files +.vscode/ diff --git a/backtrackbb/Config.py b/backtrackbb/Config.py index b3d5fe3..d4c7705 100644 --- a/backtrackbb/Config.py +++ b/backtrackbb/Config.py @@ -1,8 +1,4 @@ # -*- coding: utf8 -*- -from __future__ import (absolute_import, division, print_function, - unicode_literals) - - class Config(dict): #The following is to make Config keys accessible as attributes def __setitem__(self, key, value): diff --git a/backtrackbb/LocalCC.py b/backtrackbb/LocalCC.py index b643233..4e8d5d4 100644 --- a/backtrackbb/LocalCC.py +++ b/backtrackbb/LocalCC.py @@ -1,6 +1,3 @@ -from __future__ import (absolute_import, division, print_function, - unicode_literals) - import numpy as np import scipy as sp from .rec_cc import local_CCr diff --git a/backtrackbb/__init__.py b/backtrackbb/__init__.py index 52d7214..f0c8d0e 100644 --- a/backtrackbb/__init__.py +++ b/backtrackbb/__init__.py @@ -1,3 +1,2 @@ -from .version import get_git_version - -__version__ = get_git_version() +from . import version +__version__ = version.get_versions()['version'] diff --git a/backtrackbb/bp_types.py b/backtrackbb/bp_types.py index 3da23ce..c4c7738 100644 --- a/backtrackbb/bp_types.py +++ b/backtrackbb/bp_types.py @@ -1,8 +1,5 @@ # bp_types.py # Data types for BackTrackBB -from __future__ import (absolute_import, division, print_function, - unicode_literals) - import numpy as np from obspy import UTCDateTime from ctypes import c_double diff --git a/backtrackbb/configobj/LICENSE b/backtrackbb/configobj/LICENSE index a989b12..dddf422 100644 --- a/backtrackbb/configobj/LICENSE +++ b/backtrackbb/configobj/LICENSE @@ -1,9 +1,10 @@ Copyright (c): -2003-2010, Michael Foord -2014, Eli Courtwright, Rob Dennis +2003-2010, Michael Foord, Nicola Larosa +2014-2023, Eli Courtwright, Rob Dennis All rights reserved. E-mails : -fuzzyman AT voidspace DOT org DOT uk +michael AT python DOT org +nico AT tekNico DOT net eli AT courtwright DOT org rdennis AT gmail DOT com @@ -12,7 +13,7 @@ Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: - +2014 * Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. diff --git a/backtrackbb/configobj/__init__.py b/backtrackbb/configobj/__init__.py index 96dd3a9..0d752bc 100644 --- a/backtrackbb/configobj/__init__.py +++ b/backtrackbb/configobj/__init__.py @@ -1,9 +1,5 @@ # configobj.py -# -*- coding: utf-8 -*- -# pylint: disable=bad-continuation - -"""A config file reader/writer that supports nested sections in config files.""" - +# A config file reader/writer that supports nested sections in config files. # Copyright (C) 2005-2014: # (name) : (email) # Michael Foord: fuzzyman AT voidspace DOT org DOT uk @@ -20,7 +16,6 @@ import os import re import sys -import collections from codecs import BOM_UTF8, BOM_UTF16, BOM_UTF16_BE, BOM_UTF16_LE @@ -144,29 +139,28 @@ class UnknownType(Exception): class Builder(object): - + def build(self, o): - # XXX: There is no 'm' anywhere?! (and the name really sucks) - if m is None: # pylint: disable=undefined-variable + if m is None: raise UnknownType(o.__class__.__name__) - return m(o) # pylint: disable=undefined-variable - + return m(o) + def build_List(self, o): return list(map(self.build, o.getChildren())) - + def build_Const(self, o): return o.value - + def build_Dict(self, o): d = {} i = iter(map(self.build, o.getChildren())) for el in i: d[el] = next(i) return d - + def build_Tuple(self, o): return tuple(self.build_List(o)) - + def build_Name(self, o): if o.name == 'None': return None @@ -174,10 +168,10 @@ def build_Name(self, o): return True if o.name == 'False': return False - + # An undefined Name raise UnknownType('Undefined Name') - + def build_Add(self, o): real, imag = list(map(self.build_Const, o.getChildren())) try: @@ -187,14 +181,14 @@ def build_Add(self, o): if not isinstance(imag, complex) or imag.real != 0.0: raise UnknownType('Add') return real+imag - + def build_Getattr(self, o): parent = self.build(o.expr) return getattr(parent, o.attrname) - + def build_UnarySub(self, o): return -self.build_Const(o.getChildren()[0]) - + def build_UnaryAdd(self, o): return self.build_Const(o.getChildren()[0]) @@ -205,7 +199,7 @@ def build_UnaryAdd(self, o): def unrepr(s): if not s: return s - + # this is supposed to be safe import ast return ast.literal_eval(s) @@ -310,7 +304,7 @@ def interpolate(self, key, value): # short-cut if not self._cookie in value: return value - + def recursive_interpolate(key, value, section, backtrail): """The function that does the actual work. @@ -410,7 +404,7 @@ def _parse_match(self, match): (e.g., if we interpolated "$$" and returned "$"). """ raise NotImplementedError() - + class ConfigParserInterpolation(InterpolationEngine): @@ -459,27 +453,27 @@ def _parse_match(self, match): def __newobj__(cls, *args): # Hack for pickle - return cls.__new__(cls, *args) + return cls.__new__(cls, *args) class Section(dict): """ A dictionary-like object that represents a section in a config file. - + It does string interpolation if the 'interpolation' attribute of the 'main' object is set to True. - + Interpolation is tried first from this object, then from the 'DEFAULT' section of this object, next from the parent and its 'DEFAULT' section, and so on until the main object is reached. - + A Section will behave like an ordered dictionary - following the order of the ``scalars`` and ``sections`` attributes. You can use this to change the order of members. - + Iteration follows the order: scalars, then sections. """ - + def __setstate__(self, state): dict.update(self, state[0]) self.__dict__.update(state[1]) @@ -487,8 +481,8 @@ def __setstate__(self, state): def __reduce__(self): state = (dict(self), self.__dict__) return (__newobj__, (self.__class__,), state) - - + + def __init__(self, parent, depth, main, indict=None, name=None): """ * parent is the section above @@ -513,8 +507,8 @@ def __init__(self, parent, depth, main, indict=None, name=None): # (rather than just passing to ``dict.__init__``) for entry, value in indict.items(): self[entry] = value - - + + def _initialise(self): # the sequence of scalar values in this Section self.scalars = [] @@ -558,7 +552,7 @@ def _interpolate(self, key, value): def __getitem__(self, key): """Fetch the item and do string interpolation.""" val = dict.__getitem__(self, key) - if self.main.interpolation: + if self.main.interpolation: if isinstance(val, six.string_types): return self._interpolate(key, val) if isinstance(val, list): @@ -575,20 +569,20 @@ def _check(entry): def __setitem__(self, key, value, unrepr=False): """ Correctly set a value. - + Making dictionary values Section instances. (We have to special case 'Section' instances - which are also dicts) - + Keys must be strings. Values need only be strings (or lists of strings) if ``main.stringify`` is set. - + ``unrepr`` must be set when setting a value to a dictionary, without creating a new sub-section. """ if not isinstance(key, six.string_types): raise ValueError('The key "%s" is not a string.' % key) - + # add the comment if key not in self.comments: self.comments[key] = [] @@ -601,7 +595,7 @@ def __setitem__(self, key, value, unrepr=False): if key not in self: self.sections.append(key) dict.__setitem__(self, key, value) - elif isinstance(value, collections.Mapping) and not unrepr: + elif isinstance(value, dict) and not unrepr: # First create the new depth level, # then create the section if key not in self: @@ -689,7 +683,7 @@ def clear(self): """ A version of clear that also affects scalars/sections Also clears comments and configspec. - + Leaves other attributes alone : depth/main/parent are not affected """ @@ -763,10 +757,10 @@ def _getval(key): def dict(self): """ Return a deepcopy of self as a dictionary. - + All members that are ``Section`` instances are recursively turned to ordinary dictionaries - by calling their ``dict`` method. - + >>> n = a.dict() >>> n == a 1 @@ -791,7 +785,7 @@ def dict(self): def merge(self, indict): """ A recursive update - useful for merging config files. - + >>> a = '''[section1] ... option1 = True ... [[subsection]] @@ -808,20 +802,20 @@ def merge(self, indict): ConfigObj({'section1': {'option1': 'False', 'subsection': {'more_options': 'False'}}}) """ for key, val in list(indict.items()): - if (key in self and isinstance(self[key], collections.Mapping) and - isinstance(val, collections.Mapping)): + if (key in self and isinstance(self[key], dict) and + isinstance(val, dict)): self[key].merge(val) - else: + else: self[key] = val def rename(self, oldkey, newkey): """ Change a keyname to another, without changing position in sequence. - + Implemented so that transformations can be made on keys, as well as on values. (used by encode and decode) - + Also renames comments. """ if oldkey in self.scalars: @@ -849,30 +843,30 @@ def walk(self, function, raise_errors=True, call_on_sections=False, **keywargs): """ Walk every member and call a function on the keyword and value. - + Return a dictionary of the return values - + If the function raises an exception, raise the errror unless ``raise_errors=False``, in which case set the return value to ``False``. - + Any unrecognised keyword arguments you pass to walk, will be pased on to the function you pass in. - + Note: if ``call_on_sections`` is ``True`` then - on encountering a subsection, *first* the function is called for the *whole* subsection, and then recurses into it's members. This means your function must be able to handle strings, dictionaries and lists. This allows you to change the key of subsections as well as for ordinary members. The return value when called on the whole subsection has to be discarded. - + See the encode and decode methods for examples, including functions. - + .. admonition:: caution - + You can use ``walk`` to transform the names of members of a section but you mustn't add or delete members. - + >>> config = '''[XXXXsection] ... XXXXkey = XXXXvalue'''.splitlines() >>> cfg = ConfigObj(config) @@ -935,17 +929,17 @@ def as_bool(self, key): Accepts a key as input. The corresponding value must be a string or the objects (``True`` or 1) or (``False`` or 0). We allow 0 and 1 to retain compatibility with Python 2.2. - - If the string is one of ``True``, ``On``, ``Yes``, or ``1`` it returns + + If the string is one of ``True``, ``On``, ``Yes``, or ``1`` it returns ``True``. - - If the string is one of ``False``, ``Off``, ``No``, or ``0`` it returns + + If the string is one of ``False``, ``Off``, ``No``, or ``0`` it returns ``False``. - + ``as_bool`` is not case sensitive. - + Any other input will raise a ``ValueError``. - + >>> a = ConfigObj() >>> a['a'] = 'fish' >>> a.as_bool('a') @@ -977,10 +971,10 @@ def as_bool(self, key): def as_int(self, key): """ A convenience method which coerces the specified value to an integer. - + If the value is an invalid literal for ``int``, a ``ValueError`` will be raised. - + >>> a = ConfigObj() >>> a['a'] = 'fish' >>> a.as_int('a') @@ -1000,10 +994,10 @@ def as_int(self, key): def as_float(self, key): """ A convenience method which coerces the specified value to a float. - + If the value is an invalid literal for ``float``, a ``ValueError`` will be raised. - + >>> a = ConfigObj() >>> a['a'] = 'fish' >>> a.as_float('a') #doctest: +IGNORE_EXCEPTION_DETAIL @@ -1017,13 +1011,13 @@ def as_float(self, key): 3.2... """ return float(self[key]) - - + + def as_list(self, key): """ A convenience method which fetches the specified value, guaranteeing that it is a list. - + >>> a = ConfigObj() >>> a['a'] = 1 >>> a.as_list('a') @@ -1039,15 +1033,15 @@ def as_list(self, key): if isinstance(result, (tuple, list)): return list(result) return [result] - + def restore_default(self, key): """ Restore (and return) default value for the specified key. - + This method will only work for a ConfigObj that was created with a configspec and has been validated. - + If there is no default value for this key, ``KeyError`` is raised. """ default = self.default_values[key] @@ -1056,20 +1050,20 @@ def restore_default(self, key): self.defaults.append(key) return default - + def restore_defaults(self): """ Recursively restore default values to all members that have them. - + This method will only work for a ConfigObj that was created with a configspec and has been validated. - + It doesn't delete or modify entries without default values. """ for key in self.default_values: self.restore_default(key) - + for section in self.sections: self[section].restore_defaults() @@ -1184,7 +1178,7 @@ def __init__(self, infile=None, options=None, configspec=None, encoding=None, write_empty_values=False, _inspec=False): """ Parse a config file or create a config file object. - + ``ConfigObj(infile=None, configspec=None, encoding=None, interpolation=True, raise_errors=False, list_values=True, create_empty=False, file_error=False, stringify=True, @@ -1194,9 +1188,9 @@ def __init__(self, infile=None, options=None, configspec=None, encoding=None, self._inspec = _inspec # init the superclass Section.__init__(self, self, 0, self) - + infile = infile or [] - + _options = {'configspec': configspec, 'encoding': encoding, 'interpolation': interpolation, 'raise_errors': raise_errors, 'list_values': list_values, @@ -1211,8 +1205,8 @@ def __init__(self, infile=None, options=None, configspec=None, encoding=None, import warnings warnings.warn('Passing in an options dictionary to ConfigObj() is ' 'deprecated. Use **options instead.', - DeprecationWarning) - + DeprecationWarning, stacklevel=2) + # TODO: check the values too. for entry in options: if entry not in OPTION_DEFAULTS: @@ -1223,18 +1217,18 @@ def __init__(self, infile=None, options=None, configspec=None, encoding=None, keyword_value = _options[entry] if value != keyword_value: options[entry] = keyword_value - + # XXXX this ignores an explicit list_values = True in combination # with _inspec. The user should *never* do that anyway, but still... if _inspec: options['list_values'] = False - + self._initialise(options) configspec = options['configspec'] self._original_configspec = configspec self._load(infile, configspec) - - + + def _load(self, infile, configspec): if isinstance(infile, six.string_types): self.filename = infile @@ -1252,10 +1246,10 @@ def _load(self, infile, configspec): with open(infile, 'w') as h: h.write('') content = [] - + elif isinstance(infile, (list, tuple)): content = list(infile) - + elif isinstance(infile, dict): # initialise self # the Section class handles creating subsections @@ -1268,18 +1262,18 @@ def set_section(in_section, this_section): this_section[section] = {} set_section(in_section[section], this_section[section]) set_section(infile, self) - + else: for entry in infile: self[entry] = infile[entry] del self._errors - + if configspec is not None: self._handle_configspec(configspec) else: self.configspec = None return - + elif getattr(infile, 'read', MISSING) is not MISSING: # This supports file like objects content = infile.read() or [] @@ -1306,7 +1300,7 @@ def set_section(in_section, this_section): assert all(isinstance(line, six.string_types) for line in content), repr(content) content = [line.rstrip('\r\n') for line in content] - + self._parse(content) # if we had any errors, now is the time to raise them if self._errors: @@ -1324,17 +1318,17 @@ def set_section(in_section, this_section): raise error # delete private attributes del self._errors - + if configspec is None: self.configspec = None else: self._handle_configspec(configspec) - - + + def _initialise(self, options=None): if options is None: options = OPTION_DEFAULTS - + # initialise a few variables self.filename = None self._errors = [] @@ -1351,48 +1345,48 @@ def _initialise(self, options=None): self.newlines = None self.write_empty_values = options['write_empty_values'] self.unrepr = options['unrepr'] - + self.initial_comment = [] self.final_comment = [] self.configspec = None - + if self._inspec: self.list_values = False - + # Clear section attributes as well Section._initialise(self) - - + + def __repr__(self): def _getval(key): try: return self[key] except MissingInterpolationOption: return dict.__getitem__(self, key) - return ('%s({%s})' % (self.__class__.__name__, - ', '.join([('%s: %s' % (repr(key), repr(_getval(key)))) - for key in (self.scalars + self.sections)]))) - - + return ('ConfigObj({%s})' % + ', '.join([('%s: %s' % (repr(key), repr(_getval(key)))) + for key in (self.scalars + self.sections)])) + + def _handle_bom(self, infile): """ Handle any BOM, and decode if necessary. - + If an encoding is specified, that *must* be used - but the BOM should still be removed (and the BOM attribute set). - + (If the encoding is wrongly specified, then a BOM for an alternative encoding won't be discovered or removed.) - + If an encoding is not specified, UTF8 or UTF16 BOM will be detected and removed. The BOM attribute will be set. UTF16 will be decoded to unicode. - + NOTE: This method must not be called with an empty ``infile``. - + Specifying the *wrong* encoding is likely to cause a ``UnicodeDecodeError``. - + ``infile`` must always be returned as a list of lines, but may be passed in as a single string. """ @@ -1403,7 +1397,7 @@ def _handle_bom(self, infile): # the encoding specified doesn't have one # just decode return self._decode(infile, self.encoding) - + if isinstance(infile, (list, tuple)): line = infile[0] else: @@ -1432,18 +1426,18 @@ def _handle_bom(self, infile): ##self.BOM = True # Don't need to remove BOM return self._decode(infile, encoding) - + # If we get this far, will *probably* raise a DecodeError # As it doesn't appear to start with a BOM return self._decode(infile, self.encoding) - + # Must be UTF8 BOM = BOM_SET[enc] if not line.startswith(BOM): return self._decode(infile, self.encoding) - + newline = line[len(BOM):] - + # BOM removed if isinstance(infile, (list, tuple)): infile[0] = newline @@ -1451,7 +1445,7 @@ def _handle_bom(self, infile): infile = newline self.BOM = True return self._decode(infile, self.encoding) - + # No encoding specified - so we need to check for UTF8/UTF16 for BOM, (encoding, final_encoding) in list(BOMS.items()): if not isinstance(line, six.binary_type) or not line.startswith(BOM): @@ -1478,7 +1472,7 @@ def _handle_bom(self, infile): return self._decode(infile, 'utf-8') # UTF16 - have to decode return self._decode(infile, encoding) - + if six.PY2 and isinstance(line, str): # don't actually do any decoding, since we're on python 2 and @@ -1502,7 +1496,7 @@ def _a_to_u(self, aString): def _decode(self, infile, encoding): """ Decode infile to unicode. Using the specified encoding. - + if is a string, it also needs converting to a list. """ if isinstance(infile, six.string_types): @@ -1551,14 +1545,14 @@ def _parse(self, infile): temp_list_values = self.list_values if self.unrepr: self.list_values = False - + comment_list = [] done_start = False this_section = self maxline = len(infile) - 1 cur_index = -1 reset_comment = False - + while cur_index < maxline: if reset_comment: comment_list = [] @@ -1570,13 +1564,13 @@ def _parse(self, infile): reset_comment = False comment_list.append(line) continue - + if not done_start: # preserve initial comment self.initial_comment = comment_list comment_list = [] done_start = True - + reset_comment = True # first we check if it's a section marker mat = self._sectionmarker.match(line) @@ -1590,7 +1584,7 @@ def _parse(self, infile): self._handle_error("Cannot compute the section depth", NestingError, infile, cur_index) continue - + if cur_depth < this_section.depth: # the new section is dropping back to a previous level try: @@ -1610,13 +1604,13 @@ def _parse(self, infile): self._handle_error("Section too nested", NestingError, infile, cur_index) continue - + sect_name = self._unquote(sect_name) if sect_name in parent: self._handle_error('Duplicate section name', DuplicateError, infile, cur_index) continue - + # create the new section this_section = Section( parent, @@ -1656,24 +1650,26 @@ def _parse(self, infile): comment = '' try: value = unrepr(value) - except Exception as cause: - if isinstance(cause, UnknownType): + except Exception as e: + if type(e) == UnknownType: msg = 'Unknown name or type in value' else: msg = 'Parse error from unrepr-ing multiline value' - self._handle_error(msg, UnreprError, infile, cur_index) + self._handle_error(msg, UnreprError, infile, + cur_index) continue else: if self.unrepr: comment = '' try: value = unrepr(value) - except Exception as cause: - if isinstance(cause, UnknownType): + except Exception as e: + if isinstance(e, UnknownType): msg = 'Unknown name or type in value' else: msg = 'Parse error from unrepr-ing value' - self._handle_error(msg, UnreprError, infile, cur_index) + self._handle_error(msg, UnreprError, infile, + cur_index) continue else: # extract comment and lists @@ -1715,7 +1711,7 @@ def _match_depth(self, sect, depth): """ Given a section and a depth level, walk back through the sections parents to see if the depth level matches a previous section. - + Return a reference to the right section, or raise a SyntaxError. """ @@ -1733,7 +1729,7 @@ def _match_depth(self, sect, depth): def _handle_error(self, text, ErrorClass, infile, cur_index): """ Handle an error according to the error settings. - + Either raise the error or store it. The error will have occured at ``cur_index`` """ @@ -1762,19 +1758,19 @@ def _unquote(self, value): def _quote(self, value, multiline=True): """ Return a safely quoted version of a value. - + Raise a ConfigObjError if the value cannot be safely quoted. If multiline is ``True`` (default) then use triple quotes if necessary. - + * Don't quote values that don't need it. * Recursively quote members of a list and return a comma joined list. * Multiline is ``False`` for lists. * Obey list syntax for empty and single member lists. - + If ``list_values=False`` then the value is only quoted if it contains a ``\\n`` (is multiline) or '#'. - + If ``write_empty_values`` is set, and the value is an empty string, it won't be quoted. """ @@ -1782,7 +1778,7 @@ def _quote(self, value, multiline=True): # Only if multiline is set, so that it is used for values not # keys, and not values that are part of a list return '' - + if multiline and isinstance(value, (list, tuple)): if not value: return ',' @@ -1800,12 +1796,12 @@ def _quote(self, value, multiline=True): if not value: return '""' - + no_lists_no_quotes = not self.list_values and '\n' not in value and '#' not in value need_triple = multiline and ((("'" in value) and ('"' in value)) or ('\n' in value )) hash_triple_quote = multiline and not need_triple and ("'" in value) and ('"' in value) and ('#' in value) check_for_single = (no_lists_no_quotes or not need_triple) and not hash_triple_quote - + if check_for_single: if not self.list_values: # we don't quote if ``list_values=False`` @@ -1823,13 +1819,13 @@ def _quote(self, value, multiline=True): else: # if value has '\n' or "'" *and* '"', it will need triple quotes quot = self._get_triple_quote(value) - + if quot == noquot and '#' in value and self.list_values: quot = self._get_single_quote(value) - + return quot % value - - + + def _get_single_quote(self, value): if ("'" in value) and ('"' in value): raise ConfigObjError('Value "%s" cannot be safely quoted.' % value) @@ -1838,15 +1834,15 @@ def _get_single_quote(self, value): else: quot = dquot return quot - - + + def _get_triple_quote(self, value): if (value.find('"""') != -1) and (value.find("'''") != -1): raise ConfigObjError('Value "%s" cannot be safely quoted.' % value) if value.find('"""') == -1: quot = tdquot else: - quot = tsquot + quot = tsquot return quot @@ -1936,7 +1932,7 @@ def _multiline(self, value, infile, cur_index, maxline): def _handle_configspec(self, configspec): """Parse the configspec.""" - # FIXME: Should we check that the configspec was created with the + # FIXME: Should we check that the configspec was created with the # correct settings ? (i.e. ``list_values=False``) if not isinstance(configspec, ConfigObj): try: @@ -1944,17 +1940,17 @@ def _handle_configspec(self, configspec): raise_errors=True, file_error=True, _inspec=True) - except ConfigObjError as cause: + except ConfigObjError as e: # FIXME: Should these errors have a reference # to the already parsed ConfigObj ? - raise ConfigspecError('Parsing configspec failed: %s' % cause) - except IOError as cause: - raise IOError('Reading configspec failed: %s' % cause) - + raise ConfigspecError('Parsing configspec failed: %s' % e) + except IOError as e: + raise IOError('Reading configspec failed: %s' % e) + self.configspec = configspec + - - + def _set_configspec(self, section, copy): """ Called by validate. Handles setting the configspec on subsections @@ -1966,7 +1962,7 @@ def _set_configspec(self, section, copy): for entry in section.sections: if entry not in configspec: section[entry].configspec = many - + for entry in configspec.sections: if entry == '__many__': continue @@ -1977,11 +1973,11 @@ def _set_configspec(self, section, copy): # copy comments section.comments[entry] = configspec.comments.get(entry, []) section.inline_comments[entry] = configspec.inline_comments.get(entry, '') - + # Could be a scalar when we expect a section if isinstance(section[entry], Section): section[entry].configspec = configspec[entry] - + def _write_line(self, indent_string, entry, this_entry, comment): """Write an individual line, for the write method""" @@ -2021,9 +2017,9 @@ def _handle_comment(self, comment): def write(self, outfile=None, section=None): """ Write the current ConfigObj as a file - + tekNico: FIXME: use StringIO instead of real files - + >>> filename = a.filename >>> a.filename = 'test.ini' >>> a.write() @@ -2036,7 +2032,7 @@ def write(self, outfile=None, section=None): if self.indent_type is None: # this can be true if initialised from a dictionary self.indent_type = DEFAULT_INDENT_TYPE - + out = [] cs = self._a_to_u('#') csp = self._a_to_u('# ') @@ -2050,7 +2046,7 @@ def write(self, outfile=None, section=None): if stripped_line and not stripped_line.startswith(cs): line = csp + line out.append(line) - + indent_string = self.indent_type * section.depth for entry in (section.scalars + section.sections): if entry in section.defaults: @@ -2063,7 +2059,7 @@ def write(self, outfile=None, section=None): out.append(indent_string + comment_line) this_entry = section[entry] comment = self._handle_comment(section.inline_comments[entry]) - + if isinstance(this_entry, Section): # a section out.append(self._write_marker( @@ -2078,7 +2074,7 @@ def write(self, outfile=None, section=None): entry, this_entry, comment)) - + if section is self: for line in self.final_comment: line = self._decode_element(line) @@ -2087,10 +2083,10 @@ def write(self, outfile=None, section=None): line = csp + line out.append(line) self.interpolation = int_val - + if section is not self: return out - + if (self.filename is None) and (outfile is None): # output a list of lines # might need to encode @@ -2104,7 +2100,7 @@ def write(self, outfile=None, section=None): out.append('') out[0] = BOM_UTF8 + out[0] return out - + # Turn the list to a string, joined with correct newlines newline = self.newlines or os.linesep if (getattr(outfile, 'mode', None) is not None and outfile.mode == 'w' @@ -2136,34 +2132,34 @@ def validate(self, validator, preserve_errors=False, copy=False, section=None): """ Test the ConfigObj against a configspec. - + It uses the ``validator`` object from *validate.py*. - + To run ``validate`` on the current ConfigObj, call: :: - + test = config.validate(validator) - + (Normally having previously passed in the configspec when the ConfigObj was created - you can dynamically assign a dictionary of checks to the ``configspec`` attribute of a section though). - + It returns ``True`` if everything passes, or a dictionary of pass/fails (True/False). If every member of a subsection passes, it will just have the value ``True``. (It also returns ``False`` if all members fail). - + In addition, it converts the values from strings to their native types if their checks pass (and ``stringify`` is set). - + If ``preserve_errors`` is ``True`` (``False`` is default) then instead of a marking a fail with a ``False``, it will preserve the actual exception object. This can contain info about the reason for failure. For example the ``VdtValueTooSmallError`` indicates that the value supplied was too small. If a value (or section) is missing it will still be marked as ``False``. - + You must have the validate module to use ``preserve_errors=True``. - + You can then use the ``flatten_errors`` function to turn your nested results dictionary into a flattened list of failures - useful for displaying meaningful error messages. @@ -2174,9 +2170,9 @@ def validate(self, validator, preserve_errors=False, copy=False, if preserve_errors: # We do this once to remove a top level dependency on the validate module # Which makes importing configobj faster - from validate import VdtMissingValue + from configobj.validate import VdtMissingValue self._vdtMissingValue = VdtMissingValue - + section = self if copy: @@ -2186,34 +2182,34 @@ def validate(self, validator, preserve_errors=False, copy=False, section.BOM = section.configspec.BOM section.newlines = section.configspec.newlines section.indent_type = section.configspec.indent_type - + # # section.default_values.clear() #?? configspec = section.configspec self._set_configspec(section, copy) - + def validate_entry(entry, spec, val, missing, ret_true, ret_false): section.default_values.pop(entry, None) - + try: section.default_values[entry] = validator.get_default_value(configspec[entry]) except (KeyError, AttributeError, validator.baseErrorClass): # No default, bad default or validator has no 'get_default_value' # (e.g. SimpleVal) pass - + try: check = validator.check(spec, val, missing=missing ) - except validator.baseErrorClass as cause: - if not preserve_errors or isinstance(cause, self._vdtMissingValue): + except validator.baseErrorClass as e: + if not preserve_errors or isinstance(e, self._vdtMissingValue): out[entry] = False else: # preserve the error - out[entry] = cause + out[entry] = e ret_false = False ret_true = False else: @@ -2236,16 +2232,16 @@ def validate_entry(entry, spec, val, missing, ret_true, ret_false): if not copy and missing and entry not in section.defaults: section.defaults.append(entry) return ret_true, ret_false - + # out = {} ret_true = True ret_false = True - + unvalidated = [k for k in section.scalars if k not in configspec] - incorrect_sections = [k for k in configspec.sections if k in section.scalars] + incorrect_sections = [k for k in configspec.sections if k in section.scalars] incorrect_scalars = [k for k in configspec.scalars if k in section.sections] - + for entry in configspec.scalars: if entry in ('__many__', '___many___'): # reserved names @@ -2265,16 +2261,16 @@ def validate_entry(entry, spec, val, missing, ret_true, ret_false): else: missing = False val = section[entry] - - ret_true, ret_false = validate_entry(entry, configspec[entry], val, + + ret_true, ret_false = validate_entry(entry, configspec[entry], val, missing, ret_true, ret_false) - + many = None if '__many__' in configspec.scalars: many = configspec['__many__'] elif '___many___' in configspec.scalars: many = configspec['___many___'] - + if many is not None: for entry in unvalidated: val = section[entry] @@ -2298,7 +2294,7 @@ def validate_entry(entry, spec, val, missing, ret_true, ret_false): ret_false = False msg = 'Section %r was provided as a single value' % entry out[entry] = validator.baseErrorClass(msg) - + # Missing sections will have been created as empty ones when the # configspec was read. for entry in section.sections: @@ -2319,7 +2315,7 @@ def validate_entry(entry, spec, val, missing, ret_true, ret_false): ret_false = False else: ret_true = False - + section.extra_values = unvalidated if preserve_errors and not section._created: # If the section wasn't created (i.e. it wasn't missing) @@ -2348,12 +2344,12 @@ def reset(self): self.configspec = None # Just to be sure ;-) self._original_configspec = None - - + + def reload(self): """ Reload a ConfigObj from file. - + This method raises a ``ReloadError`` if the ConfigObj doesn't have a filename attribute pointing to a file. """ @@ -2366,31 +2362,31 @@ def reload(self): if entry == 'configspec': continue current_options[entry] = getattr(self, entry) - + configspec = self._original_configspec current_options['configspec'] = configspec - + self.clear() self._initialise(current_options) self._load(filename, configspec) - + class SimpleVal(object): """ A simple validator. Can be used to check that all members expected are present. - + To use it, provide a configspec with all your members in (the value given will be ignored). Pass an instance of ``SimpleVal`` to the ``validate`` method of your ``ConfigObj``. ``validate`` will return ``True`` if all members are present, or a dictionary with True/False meaning present/missing. (Whole missing sections will be replaced with ``False``) """ - + def __init__(self): self.baseErrorClass = ConfigObjError - + def check(self, check, member, missing=False): """A dummy check method, always returns the value unchanged.""" if missing: @@ -2402,32 +2398,32 @@ def flatten_errors(cfg, res, levels=None, results=None): """ An example function that will turn a nested dictionary of results (as returned by ``ConfigObj.validate``) into a flat list. - + ``cfg`` is the ConfigObj instance being checked, ``res`` is the results dictionary returned by ``validate``. - + (This is a recursive function, so you shouldn't use the ``levels`` or ``results`` arguments - they are used by the function.) - + Returns a list of keys that failed. Each member of the list is a tuple:: - + ([list of sections...], key, result) - + If ``validate`` was called with ``preserve_errors=False`` (the default) then ``result`` will always be ``False``. *list of sections* is a flattened list of sections that the key was found in. - + If the section was missing (or a section was expected and a scalar provided - or vice-versa) then key will be ``None``. - + If the value (or section) was missing then ``result`` will be ``False``. - + If ``validate`` was called with ``preserve_errors=True`` and a value was present, but failed the check, then ``result`` will be the exception object returned. You can use this as a string that describes the failure. - + For example *The value "3" is of the wrong type*. """ if levels is None: @@ -2444,7 +2440,7 @@ def flatten_errors(cfg, res, levels=None, results=None): for (key, val) in list(res.items()): if val == True: continue - if isinstance(cfg.get(key), collections.Mapping): + if isinstance(cfg.get(key), dict): # Go down one level levels.append(key) flatten_errors(cfg[key], val, levels, results) @@ -2462,21 +2458,21 @@ def get_extra_values(conf, _prepend=()): """ Find all the values and sections not in the configspec from a validated ConfigObj. - + ``get_extra_values`` returns a list of tuples where each tuple represents either an extra section, or an extra value. - - The tuples contain two values, a tuple representing the section the value + + The tuples contain two values, a tuple representing the section the value is in and the name of the extra values. For extra values in the top level section the first member will be an empty tuple. For values in the 'foo' section the first member will be ``('foo',)``. For members in the 'bar' subsection of the 'foo' section the first member will be ``('foo', 'bar')``. - + NOTE: If you call ``get_extra_values`` on a ConfigObj instance that hasn't been validated it will return an empty list. """ out = [] - + out.extend([(_prepend, name) for name in conf.extra_values]) for name in conf.sections: if name not in conf.extra_values: diff --git a/backtrackbb/configobj/_version.py b/backtrackbb/configobj/_version.py index 19a7140..f9d71a5 100644 --- a/backtrackbb/configobj/_version.py +++ b/backtrackbb/configobj/_version.py @@ -1,2 +1 @@ -"""Project version""" -__version__ = '5.1.0' +__version__ = '5.0.8' \ No newline at end of file diff --git a/backtrackbb/configobj/validate.py b/backtrackbb/configobj/validate.py index ba9eca4..9267a3f 100644 --- a/backtrackbb/configobj/validate.py +++ b/backtrackbb/configobj/validate.py @@ -1,9 +1,5 @@ # validate.py -# -*- coding: utf-8 -*- -# pylint: disable= -# -# A Validator object. -# +# A Validator object # Copyright (C) 2005-2014: # (name) : (email) # Michael Foord: fuzzyman AT voidspace DOT org DOT uk @@ -19,123 +15,122 @@ # https://github.com/DiffSK/configobj """ - The Validator object is used to check that supplied values + The Validator object is used to check that supplied values conform to a specification. - + The value can be supplied as a string - e.g. from a config file. In this case the check will also *convert* the value to the required type. This allows you to add validation as a transparent layer to access data stored as strings. The validation checks that the data is correct *and* converts it to the expected type. - + Some standard checks are provided for basic data types. Additional checks are easy to write. They can be provided when the ``Validator`` is instantiated or added afterwards. - + The standard functions work with the following basic data types : - + * integers * floats * booleans * strings * ip_addr - + plus lists of these datatypes - + Adding additional checks is done through coding simple functions. - - The full set of standard checks are : - + + The full set of standard checks are : + * 'integer': matches integer values (including negative) Takes optional 'min' and 'max' arguments : :: - + integer() integer(3, 9) # any value from 3 to 9 integer(min=0) # any positive value integer(max=9) - + * 'float': matches float values Has the same parameters as the integer check. - + * 'boolean': matches boolean values - ``True`` or ``False`` Acceptable string values for True are : true, on, yes, 1 Acceptable string values for False are : false, off, no, 0 - + Any other value raises an error. - + * 'ip_addr': matches an Internet Protocol address, v.4, represented by a dotted-quad string, i.e. '1.2.3.4'. - + * 'string': matches any string. Takes optional keyword args 'min' and 'max' to specify min and max lengths of the string. - + * 'list': matches any list. Takes optional keyword args 'min', and 'max' to specify min and max sizes of the list. (Always returns a list.) - + * 'tuple': matches any tuple. Takes optional keyword args 'min', and 'max' to specify min and max sizes of the tuple. (Always returns a tuple.) - + * 'int_list': Matches a list of integers. Takes the same arguments as list. - + * 'float_list': Matches a list of floats. Takes the same arguments as list. - + * 'bool_list': Matches a list of boolean values. Takes the same arguments as list. - + * 'ip_addr_list': Matches a list of IP addresses. Takes the same arguments as list. - + * 'string_list': Matches a list of strings. Takes the same arguments as list. - - * 'mixed_list': Matches a list with different types in + + * 'mixed_list': Matches a list with different types in specific positions. List size must match the number of arguments. - + Each position can be one of : 'integer', 'float', 'ip_addr', 'string', 'boolean' - + So to specify a list with two strings followed by two integers, you write the check as : :: - + mixed_list('string', 'string', 'integer', 'integer') - + * 'pass': This check matches everything ! It never fails and the value is unchanged. - + It is also the default if no check is specified. - + * 'option': This check matches any from a list of options. You specify this check with : :: - + option('option 1', 'option 2', 'option 3') - + You can supply a default value (returned if no value is supplied) using the default keyword argument. - + You specify a list argument for default using a list constructor syntax in the check : :: - + checkname(arg1, arg2, default=list('val 1', 'val 2', 'val 3')) - + A badly formatted set of arguments will raise a ``VdtParamError``. """ -import re -import sys -from pprint import pprint __version__ = '1.0.1' + __all__ = ( + '__version__', 'dottedQuadToNum', 'numToDottedQuad', 'ValidateError', @@ -163,10 +158,13 @@ 'is_ip_addr_list', 'is_mixed_list', 'is_option', - # '__docformat__', -- where is this supposed to come from? [jhe 2015-04-11] ) +import re +import sys +from pprint import pprint + #TODO - #21 - six is part of the repo now, but we didn't switch over to it here # this could be replaced if six is used for compatibility, or there are no # more assertions about items being a string @@ -275,7 +273,7 @@ def bool(val): def dottedQuadToNum(ip): """ Convert decimal dotted quad string to long integer - + >>> int(dottedQuadToNum('1 ')) 1 >>> int(dottedQuadToNum(' 1.2')) @@ -290,10 +288,10 @@ def dottedQuadToNum(ip): Traceback (most recent call last): ValueError: Not a good dotted-quad IP: 255.255.255.256 """ - + # import here to avoid it when ip_addr values are not used import socket, struct - + try: return struct.unpack('!L', socket.inet_aton(ip.strip()))[0] @@ -305,7 +303,7 @@ def dottedQuadToNum(ip): def numToDottedQuad(num): """ Convert int or long int to dotted quad string - + >>> numToDottedQuad(long(-1)) Traceback (most recent call last): ValueError: Not a good numeric IP: -1 @@ -340,10 +338,10 @@ def numToDottedQuad(num): ValueError: Not a good numeric IP: 4294967296 """ - + # import here to avoid it when ip_addr values are not used import socket, struct - + # no need to intercept here, 4294967295L is fine if num > long(4294967295) or num < 0: raise ValueError('Not a good numeric IP: %s' % num) @@ -358,10 +356,10 @@ class ValidateError(Exception): """ This error indicates that the check failed. It can be the base class for more specific errors. - + Any check function that fails ought to raise this error. (or a subclass) - + >>> raise ValidateError Traceback (most recent call last): ValidateError @@ -387,21 +385,13 @@ def __init__(self, value): class VdtParamError(SyntaxError): """An incorrect parameter was passed""" - NOT_GIVEN = object() - - def __init__(self, name_or_msg, value=NOT_GIVEN): + def __init__(self, name, value): """ >>> raise VdtParamError('yoda', 'jedi') Traceback (most recent call last): VdtParamError: passed an incorrect value "jedi" for parameter "yoda". - >>> raise VdtParamError('preformatted message') - Traceback (most recent call last): - VdtParamError: preformatted message """ - if value is self.NOT_GIVEN: - SyntaxError.__init__(self, name_or_msg) - else: - SyntaxError.__init__(self, 'passed an incorrect value "%s" for parameter "%s".' % (value, name_or_msg)) + SyntaxError.__init__(self, 'passed an incorrect value "%s" for parameter "%s".' % (value, name)) class VdtTypeError(ValidateError): @@ -418,7 +408,7 @@ def __init__(self, value): class VdtValueError(ValidateError): """The value supplied was of the correct type, but was not an allowed value.""" - + def __init__(self, value): """ >>> raise VdtValueError('jedi') @@ -482,18 +472,18 @@ class Validator(object): """ Validator is an object that allows you to register a set of 'checks'. These checks take input and test that it conforms to the check. - + This can also involve converting the value from a string into the correct datatype. - + The ``check`` method takes an input string which configures which check is to be used and applies that check to a supplied value. - + An example input string would be: 'int_range(param1, param2)' - + You would then provide something like: - + >>> def int_range_check(value, min, max): ... # turn min and max from strings to integers ... min = int(min) @@ -516,7 +506,7 @@ class Validator(object): ... if not value <= max: ... raise VdtValueTooBigError(value) ... return value - + >>> fdict = {'int_range': int_range_check} >>> vtr1 = Validator(fdict) >>> vtr1.check('int_range(20, 40)', '30') @@ -524,25 +514,25 @@ class Validator(object): >>> vtr1.check('int_range(20, 40)', '60') Traceback (most recent call last): VdtValueTooBigError: the value "60" is too big. - + New functions can be added with : :: - - >>> vtr2 = Validator() + + >>> vtr2 = Validator() >>> vtr2.functions['int_range'] = int_range_check - - Or by passing in a dictionary of functions when Validator + + Or by passing in a dictionary of functions when Validator is instantiated. - + Your functions *can* use keyword arguments, but the first argument should always be 'value'. - + If the function doesn't take additional arguments, the parentheses are optional in the check. It can be written with either of : :: - + keyword = function_name keyword = function_name() - + The first program to utilise Validator() was Michael Foord's ConfigObj, an alternative to ConfigParser which supports lists and can validate a config file using a config schema. @@ -602,36 +592,35 @@ def __init__(self, functions=None): def check(self, check, value, missing=False): """ Usage: check(check, value) - + Arguments: check: string representing check to apply (including arguments) value: object to be checked - Returns value, converted to correct type if necessary - + If the check fails, raises a ``ValidateError`` subclass. - + >>> vtor.check('yoda', '') Traceback (most recent call last): VdtUnknownCheckError: the check "yoda" is unknown. >>> vtor.check('yoda()', '') Traceback (most recent call last): VdtUnknownCheckError: the check "yoda" is unknown. - + >>> vtor.check('string(default="")', '', missing=True) '' """ fun_name, fun_args, fun_kwargs, default = self._parse_with_caching(check) - + if missing: if default is None: # no information needed here - to be handled by caller raise VdtMissingValue() value = self._handle_none(default) - + if value is None: return None - + return self._check_value(value, fun_name, fun_args, fun_kwargs) @@ -656,8 +645,8 @@ def _parse_with_caching(self, check): fun_kwargs = dict([(str(key), value) for (key, value) in list(fun_kwargs.items())]) self._cache[check] = fun_name, list(fun_args), dict(fun_kwargs), default return fun_name, fun_args, fun_kwargs, default - - + + def _check_value(self, value, fun_name, fun_args, fun_kwargs): try: fun = self.functions[fun_name] @@ -695,7 +684,7 @@ def _parse_check(self, check): val = self._unquote(val) fun_kwargs[keymatch.group(1)] = val continue - + fun_args.append(self._unquote(arg)) else: # allows for function names without (args) @@ -727,20 +716,20 @@ def _list_handle(self, listmatch): def _pass(self, value): """ Dummy check that always passes - + >>> vtor.check('', 0) 0 >>> vtor.check('', '0') '0' """ return value - - + + def get_default_value(self, check): """ Given a check, return the default value for the check (converted to the right type). - + If the check doesn't specify a default value then a ``KeyError`` will be raised. """ @@ -756,11 +745,11 @@ def get_default_value(self, check): def _is_num_param(names, values, to_float=False): """ Return numbers from inputs or raise VdtParamError. - + Lets ``None`` pass through. Pass in keyword argument ``to_float=True`` to use float for the conversion rather than int. - + >>> _is_num_param(('', ''), (0, 1.0)) [0, 1] >>> _is_num_param(('', ''), (0, 1.0), to_float=True) @@ -777,7 +766,7 @@ def _is_num_param(names, values, to_float=False): elif isinstance(val, (int, long, float, string_type)): try: out_params.append(fun(val)) - except ValueError: + except ValueError as e: raise VdtParamError(name, val) else: raise VdtParamError(name, val) @@ -795,10 +784,10 @@ def is_integer(value, min=None, max=None): A check that tests that a given value is an integer (int, or long) and optionally, between bounds. A negative value is accepted, while a float will fail. - + If the value is a string, then the conversion is done - if possible. Otherwise a VdtError is raised. - + >>> vtor.check('integer', '-1') -1 >>> vtor.check('integer', '0') @@ -830,8 +819,7 @@ def is_integer(value, min=None, max=None): >>> vtor.check('integer(0, 9)', False) 0 """ - (min_val, max_val) = _is_num_param( # pylint: disable=unbalanced-tuple-unpacking - ('min', 'max'), (min, max)) + (min_val, max_val) = _is_num_param(('min', 'max'), (min, max)) if not isinstance(value, (int, long, string_type)): raise VdtTypeError(value) if isinstance(value, string_type): @@ -851,17 +839,17 @@ def is_float(value, min=None, max=None): """ A check that tests that a given value is a float (an integer will be accepted), and optionally - that it is between bounds. - + If the value is a string, then the conversion is done - if possible. Otherwise a VdtError is raised. - + This can accept negative values. - + >>> vtor.check('float', '2') 2.0 - + From now on we multiply the value to avoid comparing decimals - + >>> vtor.check('float', '-6.8') * 10 -68.0 >>> vtor.check('float', '12.2') * 10 @@ -882,7 +870,7 @@ def is_float(value, min=None, max=None): Traceback (most recent call last): VdtValueTooBigError: the value "35.0" is too big. """ - (min_val, max_val) = _is_num_param( # pylint: disable=unbalanced-tuple-unpacking + (min_val, max_val) = _is_num_param( ('min', 'max'), (min, max), to_float=True) if not isinstance(value, (int, long, float, string_type)): raise VdtTypeError(value) @@ -900,7 +888,7 @@ def is_float(value, min=None, max=None): bool_dict = { - True: True, 'on': True, '1': True, 'true': True, 'yes': True, + True: True, 'on': True, '1': True, 'true': True, 'yes': True, False: False, 'off': False, '0': False, 'false': False, 'no': False, } @@ -908,7 +896,7 @@ def is_float(value, min=None, max=None): def is_boolean(value): """ Check if the value represents a boolean. - + >>> vtor.check('boolean', 0) 0 >>> vtor.check('boolean', False) @@ -947,7 +935,7 @@ def is_boolean(value): >>> vtor.check('boolean', 'up') Traceback (most recent call last): VdtTypeError: the value "up" is of the wrong type. - + """ if isinstance(value, string_type): try: @@ -969,7 +957,7 @@ def is_ip_addr(value): """ Check that the supplied value is an Internet Protocol address, v.4, represented by a dotted-quad string, i.e. '1.2.3.4'. - + >>> vtor.check('ip_addr', '1 ') '1' >>> vtor.check('ip_addr', ' 1.2') @@ -1005,11 +993,11 @@ def is_ip_addr(value): def is_list(value, min=None, max=None): """ Check that the value is a list of values. - + You can optionally specify the minimum and maximum number of members. - + It does no check on list members. - + >>> vtor.check('list', ()) [] >>> vtor.check('list', []) @@ -1033,8 +1021,7 @@ def is_list(value, min=None, max=None): Traceback (most recent call last): VdtTypeError: the value "12" is of the wrong type. """ - (min_len, max_len) = _is_num_param( # pylint: disable=unbalanced-tuple-unpacking - ('min', 'max'), (min, max)) + (min_len, max_len) = _is_num_param(('min', 'max'), (min, max)) if isinstance(value, string_type): raise VdtTypeError(value) try: @@ -1051,11 +1038,11 @@ def is_list(value, min=None, max=None): def is_tuple(value, min=None, max=None): """ Check that the value is a tuple of values. - + You can optionally specify the minimum and maximum number of members. - + It does no check on members. - + >>> vtor.check('tuple', ()) () >>> vtor.check('tuple', []) @@ -1085,9 +1072,9 @@ def is_tuple(value, min=None, max=None): def is_string(value, min=None, max=None): """ Check that the supplied value is a string. - + You can optionally specify the minimum and maximum number of members. - + >>> vtor.check('string', '0') '0' >>> vtor.check('string', 0) @@ -1106,8 +1093,7 @@ def is_string(value, min=None, max=None): """ if not isinstance(value, string_type): raise VdtTypeError(value) - (min_len, max_len) = _is_num_param( # pylint: disable=unbalanced-tuple-unpacking - ('min', 'max'), (min, max)) + (min_len, max_len) = _is_num_param(('min', 'max'), (min, max)) try: num_members = len(value) except TypeError: @@ -1122,11 +1108,11 @@ def is_string(value, min=None, max=None): def is_int_list(value, min=None, max=None): """ Check that the value is a list of integers. - + You can optionally specify the minimum and maximum number of members. - + Each list member is checked that it is an integer. - + >>> vtor.check('int_list', ()) [] >>> vtor.check('int_list', []) @@ -1145,11 +1131,11 @@ def is_int_list(value, min=None, max=None): def is_bool_list(value, min=None, max=None): """ Check that the value is a list of booleans. - + You can optionally specify the minimum and maximum number of members. - + Each list member is checked that it is a boolean. - + >>> vtor.check('bool_list', ()) [] >>> vtor.check('bool_list', []) @@ -1170,11 +1156,11 @@ def is_bool_list(value, min=None, max=None): def is_float_list(value, min=None, max=None): """ Check that the value is a list of floats. - + You can optionally specify the minimum and maximum number of members. - + Each list member is checked that it is a float. - + >>> vtor.check('float_list', ()) [] >>> vtor.check('float_list', []) @@ -1193,11 +1179,11 @@ def is_float_list(value, min=None, max=None): def is_string_list(value, min=None, max=None): """ Check that the value is a list of strings. - + You can optionally specify the minimum and maximum number of members. - + Each list member is checked that it is a string. - + >>> vtor.check('string_list', ()) [] >>> vtor.check('string_list', []) @@ -1219,11 +1205,11 @@ def is_string_list(value, min=None, max=None): def is_ip_addr_list(value, min=None, max=None): """ Check that the value is a list of IP addresses. - + You can optionally specify the minimum and maximum number of members. - + Each list member is checked that it is an IP address. - + >>> vtor.check('ip_addr_list', ()) [] >>> vtor.check('ip_addr_list', []) @@ -1242,11 +1228,11 @@ def force_list(value, min=None, max=None): Check that a value is a list, coercing strings into a list with one member. Useful where users forget the trailing comma that turns a single value into a list. - + You can optionally specify the minimum and maximum number of members. A minumum of greater than one will fail if the user only supplies a string. - + >>> vtor.check('force_list', ()) [] >>> vtor.check('force_list', []) @@ -1257,8 +1243,8 @@ def force_list(value, min=None, max=None): if not isinstance(value, (list, tuple)): value = [value] return is_list(value, min, max) - - + + fun_dict = { 'integer': is_integer, @@ -1274,20 +1260,20 @@ def is_mixed_list(value, *args): Check that the value is a list. Allow specifying the type of each member. Work on lists of specific lengths. - + You specify each member as a positional argument specifying type - + Each type should be one of the following strings : 'integer', 'float', 'ip_addr', 'string', 'boolean' - + So you can specify a list of two strings, followed by two integers as : - + mixed_list('string', 'string', 'integer', 'integer') - + The length of the list must match the number of positional arguments you supply. - + >>> mix_str = "mixed_list('integer', 'float', 'ip_addr', 'string', 'boolean')" >>> check_res = vtor.check(mix_str, (1, 2.0, '1.2.3.4', 'a', True)) >>> check_res == [1, 2.0, '1.2.3.4', 'a', True] @@ -1322,14 +1308,14 @@ def is_mixed_list(value, *args): raise VdtValueTooLongError(value) try: return [fun_dict[arg](val) for arg, val in zip(args, value)] - except KeyError as cause: - raise VdtParamError('mixed_list', cause) + except KeyError as e: + raise VdtParamError('mixed_list', e) def is_option(value, *options): """ This check matches the value to any of a set of options. - + >>> vtor.check('option("yoda", "jedi")', 'yoda') 'yoda' >>> vtor.check('option("yoda", "jedi")', 'jed') @@ -1349,7 +1335,7 @@ def is_option(value, *options): def _test(value, *args, **keywargs): """ A function that exists for test purposes. - + >>> checks = [ ... '3, 6, min=1, max=3, test=list(a, b, c)', ... '3', @@ -1383,7 +1369,7 @@ def _test(value, *args, **keywargs): (3, ('3',), {'max': '3', 'test': ['a', 'b', 'c']}) (3, ('3',), {'max': '3', 'test': ["'a'", 'b', 'x=(c)']}) (3, (), {'test': 'x=fish(3)'}) - + >>> v = Validator() >>> v.check('integer(default=6)', '3') 3 @@ -1401,7 +1387,7 @@ def _test(value, *args, **keywargs): KeyError: 'Check "pass" has no default value.' >>> v.get_default_value('pass(default=list(1, 2, 3, 4))') ['1', '2', '3', '4'] - + >>> v = Validator() >>> v.check("pass(default=None)", None, True) >>> v.check("pass(default='None')", None, True) @@ -1410,18 +1396,18 @@ def _test(value, *args, **keywargs): 'None' >>> v.check('pass(default=list(1, 2, 3, 4))', None, True) ['1', '2', '3', '4'] - + Bug test for unicode arguments >>> v = Validator() >>> v.check(unicode('string(min=4)'), unicode('test')) == unicode('test') True - + >>> v = Validator() >>> v.get_default_value(unicode('string(min=4, default="1234")')) == unicode('1234') True >>> v.check(unicode('string(min=4, default="1234")'), unicode('test')) == unicode('test') True - + >>> v = Validator() >>> default = v.get_default_value('string(default=None)') >>> default == None @@ -1432,7 +1418,7 @@ def _test(value, *args, **keywargs): def _test2(): """ - >>> + >>> >>> v = Validator() >>> v.get_default_value('string(default="#ff00dd")') '#ff00dd' @@ -1467,8 +1453,8 @@ def _test3(): >>> vtor.check("string_list(default=list('\n'))", '', missing=True) ['\n'] """ - - + + if __name__ == '__main__': # run the code tests in doctest format import sys diff --git a/backtrackbb/grid_projection.py b/backtrackbb/grid_projection.py index 3078fe3..5f18598 100644 --- a/backtrackbb/grid_projection.py +++ b/backtrackbb/grid_projection.py @@ -1,7 +1,4 @@ # -*- coding: utf8 -*- -from __future__ import (absolute_import, division, print_function, - unicode_literals) - import numpy as np import scipy as sp from .LocalCC import LocalCC diff --git a/backtrackbb/init_filter.py b/backtrackbb/init_filter.py index b1f3fb9..1f9750a 100644 --- a/backtrackbb/init_filter.py +++ b/backtrackbb/init_filter.py @@ -1,7 +1,4 @@ # -*- coding: utf8 -*- -from __future__ import (absolute_import, division, print_function, - unicode_literals) - from .rec_filter import rec_filter_coeff, rec_filter_norm from .mod_filter_picker import make_LinFq, make_LogFq diff --git a/backtrackbb/lib_names.py b/backtrackbb/lib_names.py index 926b50f..dbae516 100644 --- a/backtrackbb/lib_names.py +++ b/backtrackbb/lib_names.py @@ -1,15 +1,9 @@ # -*- coding: utf8 -*- import os -import sysconfig +import importlib.machinery def get_lib_path(lib): - suffix = sysconfig.get_config_var('EXT_SUFFIX') - if not suffix: - suffix = sysconfig.get_config_var('SO') - if os.name == 'nt': - py_version_nodot = sysconfig.get_config_var('py_version_nodot') - platform = sysconfig.get_platform().replace('-', '_') - suffix = '.cp{}-{}{}'.format(py_version_nodot, platform, suffix) + suffix = importlib.machinery.EXTENSION_SUFFIXES[0] libname = lib + suffix return os.path.join(os.path.dirname(__file__), 'lib', libname) diff --git a/backtrackbb/map_project.py b/backtrackbb/map_project.py index 44c9f7b..87c2a24 100644 --- a/backtrackbb/map_project.py +++ b/backtrackbb/map_project.py @@ -1,11 +1,8 @@ # -*- coding: utf8 -*- -from __future__ import (absolute_import, division, print_function, - unicode_literals) - import ctypes try: from .lib_names import get_lib_path -except (ImportError, ValueError): +except ImportError: from lib_names import get_lib_path diff --git a/backtrackbb/mod_btbb.py b/backtrackbb/mod_btbb.py index 40db137..8621105 100644 --- a/backtrackbb/mod_btbb.py +++ b/backtrackbb/mod_btbb.py @@ -1,7 +1,4 @@ # -*- coding: utf8 -*- -from __future__ import (absolute_import, division, print_function, - unicode_literals) - import sys import os import numpy as np diff --git a/backtrackbb/mod_filter_picker.py b/backtrackbb/mod_filter_picker.py index 3586708..22bcebd 100644 --- a/backtrackbb/mod_filter_picker.py +++ b/backtrackbb/mod_filter_picker.py @@ -1,8 +1,4 @@ # -*- coding: utf8 -*- -from __future__ import (absolute_import, division, print_function, - unicode_literals) -from past.builtins import xrange - import numpy as np import sys try: @@ -11,7 +7,7 @@ from .rec_hos import recursive_hos from .rec_gauss_filter import recursive_gauss_filter from .rosenberger import rosenberger -except (ImportError, ValueError): +except ImportError: from rec_filter import recursive_filter from rec_rms import recursive_rms from rec_hos import recursive_hos @@ -76,7 +72,7 @@ def MBfilter_CF(st, frequencies, YN1 = np.zeros((Nb, len(y)), float) CF1 = np.zeros((Nb, len(y)), float) - for n in xrange(Nb): + for n in range(Nb): if rec_memory is not None: rmem = rec_memory[(tr.id, wave_type)][n] else: @@ -117,7 +113,7 @@ def MBfilter_CF(st, frequencies, YN1 = np.zeros((Nb, len(y1)), float) CF1 = np.zeros((Nb, len(y1)), float) - for n in xrange(Nb): + for n in range(Nb): if rec_memory is not None: rmem1 = rec_memory[(tr1.id, wave_type)][n] rmem2 = rec_memory[(tr2.id, wave_type)][n] @@ -172,7 +168,7 @@ def MBfilter_CF(st, frequencies, if full_output: CF2 = np.zeros((Nb, len(y1)), float) - for n in xrange(Nb): + for n in range(Nb): if rec_memory is not None: rmem1 = rec_memory[(tr1.id, wave_type)][n] rmem2 = rec_memory[(tr2.id, wave_type)][n] diff --git a/backtrackbb/mod_setup.py b/backtrackbb/mod_setup.py index 5a6c0d3..1f6b84e 100644 --- a/backtrackbb/mod_setup.py +++ b/backtrackbb/mod_setup.py @@ -1,8 +1,5 @@ # -*- coding: utf8 -*- """Setup functions.""" -from __future__ import (absolute_import, division, print_function, - unicode_literals) - import sys import os from argparse import ArgumentParser diff --git a/backtrackbb/mod_utils.py b/backtrackbb/mod_utils.py index b2f72b7..dc746ab 100644 --- a/backtrackbb/mod_utils.py +++ b/backtrackbb/mod_utils.py @@ -1,7 +1,4 @@ # -*- coding: utf8 -*- -from __future__ import (absolute_import, division, print_function, - unicode_literals) - from obspy.signal.util import util_geo_km diff --git a/backtrackbb/nllgrid/NLLGrid.py b/backtrackbb/nllgrid/NLLGrid.py index 6f4b205..1a4d6c3 100644 --- a/backtrackbb/nllgrid/NLLGrid.py +++ b/backtrackbb/nllgrid/NLLGrid.py @@ -18,7 +18,10 @@ from ctypes import Union, c_float, c_ushort from copy import deepcopy from pyproj import Proj -from collections import Iterable +try: + from collections.abc import Iterable +except ImportError: + from collections import Iterable valid_grid_types = ( diff --git a/backtrackbb/plot.py b/backtrackbb/plot.py index 5c720da..608d3e9 100644 --- a/backtrackbb/plot.py +++ b/backtrackbb/plot.py @@ -1,8 +1,6 @@ # -*- coding: utf8 -*- -from __future__ import (absolute_import, division, print_function, - unicode_literals) - import os +import copy import numpy as np import pylab import matplotlib @@ -39,7 +37,7 @@ def bp_plot(config, proj_grid, trig_smbl_size = 200 / ratio scmap = config.scmap - scmap2 = pylab.cm.jet + scmap2 = copy.copy(pylab.cm.jet) scmap2.set_under('w', LTrig) if trigger is not None: diff --git a/backtrackbb/readNLL_grid.py b/backtrackbb/readNLL_grid.py index dbb4a11..f324ee7 100644 --- a/backtrackbb/readNLL_grid.py +++ b/backtrackbb/readNLL_grid.py @@ -1,7 +1,4 @@ #!/usr/bin/python -from __future__ import (absolute_import, division, print_function, - unicode_literals) - import numpy as np import matplotlib.pyplot as plt # diff --git a/backtrackbb/read_grids.py b/backtrackbb/read_grids.py index c0877b9..331f0b1 100644 --- a/backtrackbb/read_grids.py +++ b/backtrackbb/read_grids.py @@ -1,7 +1,4 @@ # -*- coding: utf8 -*- -from __future__ import (absolute_import, division, print_function, - unicode_literals) - import os from glob import glob from collections import defaultdict diff --git a/backtrackbb/read_traces.py b/backtrackbb/read_traces.py index 4716c11..22db98c 100644 --- a/backtrackbb/read_traces.py +++ b/backtrackbb/read_traces.py @@ -1,7 +1,4 @@ # -*- coding: utf8 -*- -from __future__ import (absolute_import, division, print_function, - unicode_literals) - import sys import os from glob import glob diff --git a/backtrackbb/rec_cc.py b/backtrackbb/rec_cc.py index 0d90ff4..706a4de 100644 --- a/backtrackbb/rec_cc.py +++ b/backtrackbb/rec_cc.py @@ -1,13 +1,10 @@ # -*- coding: utf8 -*- -from __future__ import (absolute_import, division, print_function, - unicode_literals) - import numpy as np from ctypes import CDLL, c_int, c_double, c_void_p from numpy.ctypeslib import ndpointer try: from .lib_names import get_lib_path -except (ImportError, ValueError): +except ImportError: from lib_names import get_lib_path diff --git a/backtrackbb/rec_filter.py b/backtrackbb/rec_filter.py index c8581f3..9eb6d6d 100644 --- a/backtrackbb/rec_filter.py +++ b/backtrackbb/rec_filter.py @@ -1,13 +1,10 @@ # -*- coding: utf8 -*- -from __future__ import (absolute_import, division, print_function, - unicode_literals) - import numpy as np from ctypes import CDLL, c_int, c_float, c_double, c_void_p, POINTER, byref from numpy.ctypeslib import ndpointer try: from .lib_names import get_lib_path -except (ImportError, ValueError): +except ImportError: from lib_names import get_lib_path diff --git a/backtrackbb/rec_gauss_filter.py b/backtrackbb/rec_gauss_filter.py index bbdd567..41b77b2 100644 --- a/backtrackbb/rec_gauss_filter.py +++ b/backtrackbb/rec_gauss_filter.py @@ -1,13 +1,10 @@ # -*- coding: utf8 -*- -from __future__ import (absolute_import, division, print_function, - unicode_literals) - import numpy as np from ctypes import CDLL, c_int, c_double, c_void_p from numpy.ctypeslib import ndpointer try: from .lib_names import get_lib_path -except (ImportError, ValueError): +except ImportError: from lib_names import get_lib_path diff --git a/backtrackbb/rec_hos.py b/backtrackbb/rec_hos.py index 95d15ff..cc1d371 100644 --- a/backtrackbb/rec_hos.py +++ b/backtrackbb/rec_hos.py @@ -7,15 +7,12 @@ # (c) 2013-2014 - Natalia Poiata , # Claudio Satriano , # Pierre Romanet -from __future__ import (absolute_import, division, print_function, - unicode_literals) - import numpy as np from ctypes import CDLL, c_int, c_float, c_double, c_void_p, POINTER, byref from numpy.ctypeslib import ndpointer try: from .lib_names import get_lib_path -except (ImportError, ValueError): +except ImportError: from lib_names import get_lib_path diff --git a/backtrackbb/rec_memory.py b/backtrackbb/rec_memory.py index 81ac692..45de95c 100644 --- a/backtrackbb/rec_memory.py +++ b/backtrackbb/rec_memory.py @@ -1,8 +1,4 @@ # -*- coding: utf8 -*- -from __future__ import (absolute_import, division, print_function, - unicode_literals) -from past.builtins import xrange - import itertools from .bp_types import RecursiveMemory @@ -20,5 +16,5 @@ def init_recursive_memory(config): [RecursiveMemory(trid=trid, wave=wave, band=n, nsamples=nsamples, overlap=overlap, filter_npoles=config.filter_npoles) - for n in xrange(n_bands)] + for n in range(n_bands)] return rec_memory diff --git a/backtrackbb/rec_rms.py b/backtrackbb/rec_rms.py index db2427b..d910177 100644 --- a/backtrackbb/rec_rms.py +++ b/backtrackbb/rec_rms.py @@ -1,13 +1,10 @@ # -*- coding: utf8 -*- -from __future__ import (absolute_import, division, print_function, - unicode_literals) - import numpy as np from ctypes import CDLL, c_int, c_float, c_double, c_void_p, POINTER, byref from numpy.ctypeslib import ndpointer try: from .lib_names import get_lib_path -except (ImportError, ValueError): +except ImportError: from lib_names import get_lib_path diff --git a/backtrackbb/recursive_cc.py b/backtrackbb/recursive_cc.py index 5aed310..bc1308c 100644 --- a/backtrackbb/recursive_cc.py +++ b/backtrackbb/recursive_cc.py @@ -1,7 +1,4 @@ # -*- coding: utf8 -*- -from __future__ import (absolute_import, division, print_function, - unicode_literals) - from scipy.signal import lfilter from math import floor, ceil import numpy as np diff --git a/backtrackbb/rosenberger.py b/backtrackbb/rosenberger.py index b908868..6d7134c 100644 --- a/backtrackbb/rosenberger.py +++ b/backtrackbb/rosenberger.py @@ -11,16 +11,12 @@ # (c) 2014 - Claudio Satriano , # Pierre Romanet # (c) 2014 - 2018 Claudio Satriano -from __future__ import (absolute_import, division, print_function, - unicode_literals) - import numpy as np from ctypes import CDLL, c_int, c_float, c_char, c_void_p from numpy.ctypeslib import ndpointer -from future.utils import PY2 try: from .lib_names import get_lib_path -except (ImportError, ValueError): +except ImportError: from lib_names import get_lib_path @@ -66,12 +62,8 @@ def rosenberger(dataX, dataY, dataZ, pol_filter = np.zeros_like(dataX) delta = c_float(delta) - if PY2: - proj = chr(proj) - rl_filter = chr(rl_filter) - else: - proj = c_char(proj) - rl_filter = c_char(rl_filter) + proj = c_char(proj) + rl_filter = c_char(rl_filter) lib_rosenberger.rosenberger(dataX, dataY, dataZ, pol_filter, diff --git a/backtrackbb/scripts/bt2eventdata.py b/backtrackbb/scripts/bt2eventdata.py index 86da94f..9c1fdd8 100644 --- a/backtrackbb/scripts/bt2eventdata.py +++ b/backtrackbb/scripts/bt2eventdata.py @@ -1,7 +1,4 @@ # -*- coding: utf8 -*- -from __future__ import (absolute_import, division, print_function, - unicode_literals) - import os from obspy.core import AttribDict from ..mod_setup import configure diff --git a/backtrackbb/scripts/btbb.py b/backtrackbb/scripts/btbb.py index ff1e730..9231a3c 100644 --- a/backtrackbb/scripts/btbb.py +++ b/backtrackbb/scripts/btbb.py @@ -1,7 +1,4 @@ # -*- coding: utf8 -*- -from __future__ import (absolute_import, division, print_function, - unicode_literals) - import os import numpy as np from multiprocessing import Pool diff --git a/backtrackbb/scripts/group_triggers.py b/backtrackbb/scripts/group_triggers.py index bb6ec84..e6d380c 100644 --- a/backtrackbb/scripts/group_triggers.py +++ b/backtrackbb/scripts/group_triggers.py @@ -1,7 +1,4 @@ # -*- coding: utf8 -*- -from __future__ import (absolute_import, division, print_function, - unicode_literals) - import os from ..mod_setup import configure from ..bp_types import Trigger, Pick diff --git a/backtrackbb/scripts/mbf_plot.py b/backtrackbb/scripts/mbf_plot.py index c602b67..34cba0a 100644 --- a/backtrackbb/scripts/mbf_plot.py +++ b/backtrackbb/scripts/mbf_plot.py @@ -1,7 +1,4 @@ # -*- coding: utf8 -*- -from __future__ import (absolute_import, division, print_function, - unicode_literals) - import os import numpy as np from ..mod_setup import configure diff --git a/backtrackbb/summary_cf.py b/backtrackbb/summary_cf.py index c67243a..5904a31 100644 --- a/backtrackbb/summary_cf.py +++ b/backtrackbb/summary_cf.py @@ -1,7 +1,4 @@ # -*- coding: utf8 -*- -from __future__ import (absolute_import, division, print_function, - unicode_literals) - import numpy as np from obspy import Stream from .mod_filter_picker import MBfilter_CF, GaussConv diff --git a/backtrackbb/version.py b/backtrackbb/version.py index 92638b3..976bc3b 100644 --- a/backtrackbb/version.py +++ b/backtrackbb/version.py @@ -1,118 +1,658 @@ -# -*- coding: utf-8 -*- -# Author: Douglas Creager -# This file is placed into the public domain. -# -# Modified from the ObsPy project - -# Calculates the current version number. If possible, this is the -# output of “git describe”, modified to conform to the versioning -# scheme that setuptools uses. If “git describe” returns an error -# (most likely because we're in an unpacked copy of a release tarball, -# rather than in a git working copy), then we fall back on reading the -# contents of the RELEASE-VERSION file. -# -# To use this script, simply import it your setup.py file, and use the -# results of get_git_version() as your package version: -# -# from version import * -# -# setup( -# version=get_git_version(), -# . -# . -# . -# ) -# -# This will automatically update the RELEASE-VERSION file, if -# necessary. Note that the RELEASE-VERSION file should *not* be -# checked into git; please add it to your top-level .gitignore file. -# -# You'll probably want to distribute the RELEASE-VERSION file in your -# sdist tarballs; to do this, just create a MANIFEST.in file that -# contains the following line: -# -# include RELEASE-VERSION -# NO IMPORTS FROM SOURCE_SPEC OR FUTURE IN THIS FILE! (file gets used at -# installation time) -import io + +# This file helps to compute a version number in source trees obtained from +# git-archive tarball (such as those provided by githubs download-from-tag +# feature). Distribution tarballs (built by setup.py sdist) and build +# directories (produced by setup.py build) will contain a much shorter file +# that just contains the computed version number. + +# This file is released into the public domain. +# Generated by versioneer-0.28 +# https://github.com/python-versioneer/python-versioneer + +"""Git implementation of _version.py.""" + +import errno import os -import inspect -from subprocess import Popen, PIPE +import re +import subprocess +import sys +from typing import Callable, Dict +import functools -__all__ = ("get_git_version") +def get_keywords(): + """Get the keywords needed to look up the version information.""" + # these strings will be replaced by git during git-archive. + # setup.py/versioneer.py will grep for the variable names, so they must + # each be defined on a line of their own. _version.py will just call + # get_keywords(). + git_refnames = "$Format:%d$" + git_full = "$Format:%H$" + git_date = "$Format:%ci$" + keywords = {"refnames": git_refnames, "full": git_full, "date": git_date} + return keywords -script_dir = os.path.abspath(os.path.dirname(inspect.getfile( - inspect.currentframe()))) -SOURCE_SPEC_ROOT = os.path.abspath(os.path.join(script_dir, os.pardir)) -VERSION_FILE = os.path.join(script_dir, "RELEASE-VERSION") +class VersioneerConfig: + """Container for Versioneer configuration parameters.""" -def call_git_describe(abbrev=4): - try: - p = Popen(['git', 'rev-parse', '--show-toplevel'], - cwd=SOURCE_SPEC_ROOT, stdout=PIPE, stderr=PIPE) - p.stderr.close() - path = p.stdout.readline().decode().strip() - p.stdout.close() - except: - return None - if os.path.normpath(path) != SOURCE_SPEC_ROOT: - return None - try: - p = Popen(['git', 'describe', '--dirty', '--abbrev=%d' % abbrev, - '--always', '--tags'], - cwd=SOURCE_SPEC_ROOT, stdout=PIPE, stderr=PIPE) - p.stderr.close() - line = p.stdout.readline().decode() - p.stdout.close() +def get_config(): + """Create, populate and return the VersioneerConfig() object.""" + # these strings are filled in when 'setup.py versioneer' creates + # _version.py + cfg = VersioneerConfig() + cfg.VCS = "git" + cfg.style = "pep440" + cfg.tag_prefix = "v" + cfg.parentdir_prefix = "backtrackbb-" + cfg.versionfile_source = "backtrackbb/version.py" + cfg.verbose = False + return cfg + + +class NotThisMethod(Exception): + """Exception raised if a method is not valid for the current scenario.""" + + +LONG_VERSION_PY: Dict[str, str] = {} +HANDLERS: Dict[str, Dict[str, Callable]] = {} + - if "-" not in line and "." not in line: - line = "0.0.0-g%s" % line - return line.strip() - except: - return None +def register_vcs_handler(vcs, method): # decorator + """Create decorator to mark a method as the handler of a VCS.""" + def decorate(f): + """Store f in HANDLERS[vcs][method].""" + if vcs not in HANDLERS: + HANDLERS[vcs] = {} + HANDLERS[vcs][method] = f + return f + return decorate -def read_release_version(): +def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False, + env=None): + """Call the given command(s).""" + assert isinstance(commands, list) + process = None + + popen_kwargs = {} + if sys.platform == "win32": + # This hides the console window if pythonw.exe is used + startupinfo = subprocess.STARTUPINFO() + startupinfo.dwFlags |= subprocess.STARTF_USESHOWWINDOW + popen_kwargs["startupinfo"] = startupinfo + + for command in commands: + try: + dispcmd = str([command] + args) + # remember shell=False, so use git.cmd on windows, not just git + process = subprocess.Popen([command] + args, cwd=cwd, env=env, + stdout=subprocess.PIPE, + stderr=(subprocess.PIPE if hide_stderr + else None), **popen_kwargs) + break + except OSError: + e = sys.exc_info()[1] + if e.errno == errno.ENOENT: + continue + if verbose: + print("unable to run %s" % dispcmd) + print(e) + return None, None + else: + if verbose: + print("unable to find command, tried %s" % (commands,)) + return None, None + stdout = process.communicate()[0].strip().decode() + if process.returncode != 0: + if verbose: + print("unable to run %s (error)" % dispcmd) + print("stdout was %s" % stdout) + return None, process.returncode + return stdout, process.returncode + + +def versions_from_parentdir(parentdir_prefix, root, verbose): + """Try to determine the version from the parent directory name. + + Source tarballs conventionally unpack into a directory that includes both + the project name and a version string. We will also support searching up + two directory levels for an appropriately named parent directory + """ + rootdirs = [] + + for _ in range(3): + dirname = os.path.basename(root) + if dirname.startswith(parentdir_prefix): + return {"version": dirname[len(parentdir_prefix):], + "full-revisionid": None, + "dirty": False, "error": None, "date": None} + rootdirs.append(root) + root = os.path.dirname(root) # up a level + + if verbose: + print("Tried directories %s but none started with prefix %s" % + (str(rootdirs), parentdir_prefix)) + raise NotThisMethod("rootdir doesn't start with parentdir_prefix") + + +@register_vcs_handler("git", "get_keywords") +def git_get_keywords(versionfile_abs): + """Extract version information from the given file.""" + # the code embedded in _version.py can just fetch the value of these + # keywords. When used from setup.py, we don't want to import _version.py, + # so we do it with a regexp instead. This function is not used from + # _version.py. + keywords = {} try: - with io.open(VERSION_FILE, "rt") as fh: - version = fh.readline() - return version.strip() - except: - return None + with open(versionfile_abs, "r") as fobj: + for line in fobj: + if line.strip().startswith("git_refnames ="): + mo = re.search(r'=\s*"(.*)"', line) + if mo: + keywords["refnames"] = mo.group(1) + if line.strip().startswith("git_full ="): + mo = re.search(r'=\s*"(.*)"', line) + if mo: + keywords["full"] = mo.group(1) + if line.strip().startswith("git_date ="): + mo = re.search(r'=\s*"(.*)"', line) + if mo: + keywords["date"] = mo.group(1) + except OSError: + pass + return keywords + + +@register_vcs_handler("git", "keywords") +def git_versions_from_keywords(keywords, tag_prefix, verbose): + """Get version information from git keywords.""" + if "refnames" not in keywords: + raise NotThisMethod("Short version file found") + date = keywords.get("date") + if date is not None: + # Use only the last line. Previous lines may contain GPG signature + # information. + date = date.splitlines()[-1] + + # git-2.2.0 added "%cI", which expands to an ISO-8601 -compliant + # datestamp. However we prefer "%ci" (which expands to an "ISO-8601 + # -like" string, which we must then edit to make compliant), because + # it's been around since git-1.5.3, and it's too difficult to + # discover which version we're using, or to work around using an + # older one. + date = date.strip().replace(" ", "T", 1).replace(" ", "", 1) + refnames = keywords["refnames"].strip() + if refnames.startswith("$Format"): + if verbose: + print("keywords are unexpanded, not using") + raise NotThisMethod("unexpanded keywords, not a git-archive tarball") + refs = {r.strip() for r in refnames.strip("()").split(",")} + # starting in git-1.8.3, tags are listed as "tag: foo-1.0" instead of + # just "foo-1.0". If we see a "tag: " prefix, prefer those. + TAG = "tag: " + tags = {r[len(TAG):] for r in refs if r.startswith(TAG)} + if not tags: + # Either we're using git < 1.8.3, or there really are no tags. We use + # a heuristic: assume all version tags have a digit. The old git %d + # expansion behaves like git log --decorate=short and strips out the + # refs/heads/ and refs/tags/ prefixes that would let us distinguish + # between branches and tags. By ignoring refnames without digits, we + # filter out many common branch names like "release" and + # "stabilization", as well as "HEAD" and "master". + tags = {r for r in refs if re.search(r'\d', r)} + if verbose: + print("discarding '%s', no digits" % ",".join(refs - tags)) + if verbose: + print("likely tags: %s" % ",".join(sorted(tags))) + for ref in sorted(tags): + # sorting will prefer e.g. "2.0" over "2.0rc1" + if ref.startswith(tag_prefix): + r = ref[len(tag_prefix):] + # Filter out refs that exactly match prefix or that don't start + # with a number once the prefix is stripped (mostly a concern + # when prefix is '') + if not re.match(r'\d', r): + continue + if verbose: + print("picking %s" % r) + return {"version": r, + "full-revisionid": keywords["full"].strip(), + "dirty": False, "error": None, + "date": date} + # no suitable tags, so version is "0+unknown", but full hex is still there + if verbose: + print("no suitable tags, using unknown + full revision id") + return {"version": "0+unknown", + "full-revisionid": keywords["full"].strip(), + "dirty": False, "error": "no suitable tags", "date": None} + + +@register_vcs_handler("git", "pieces_from_vcs") +def git_pieces_from_vcs(tag_prefix, root, verbose, runner=run_command): + """Get version from 'git describe' in the root of the source tree. + + This only gets called if the git-archive 'subst' keywords were *not* + expanded, and _version.py hasn't already been rewritten with a short + version string, meaning we're inside a checked out source tree. + """ + GITS = ["git"] + if sys.platform == "win32": + GITS = ["git.cmd", "git.exe"] + + # GIT_DIR can interfere with correct operation of Versioneer. + # It may be intended to be passed to the Versioneer-versioned project, + # but that should not change where we get our version from. + env = os.environ.copy() + env.pop("GIT_DIR", None) + runner = functools.partial(runner, env=env) + + _, rc = runner(GITS, ["rev-parse", "--git-dir"], cwd=root, + hide_stderr=not verbose) + if rc != 0: + if verbose: + print("Directory %s not under git control" % root) + raise NotThisMethod("'git rev-parse --git-dir' returned error") + + # if there is a tag matching tag_prefix, this yields TAG-NUM-gHEX[-dirty] + # if there isn't one, this yields HEX[-dirty] (no NUM) + describe_out, rc = runner(GITS, [ + "describe", "--tags", "--dirty", "--always", "--long", + "--match", f"{tag_prefix}[[:digit:]]*" + ], cwd=root) + # --long was added in git-1.5.5 + if describe_out is None: + raise NotThisMethod("'git describe' failed") + describe_out = describe_out.strip() + full_out, rc = runner(GITS, ["rev-parse", "HEAD"], cwd=root) + if full_out is None: + raise NotThisMethod("'git rev-parse' failed") + full_out = full_out.strip() + + pieces = {} + pieces["long"] = full_out + pieces["short"] = full_out[:7] # maybe improved later + pieces["error"] = None + + branch_name, rc = runner(GITS, ["rev-parse", "--abbrev-ref", "HEAD"], + cwd=root) + # --abbrev-ref was added in git-1.6.3 + if rc != 0 or branch_name is None: + raise NotThisMethod("'git rev-parse --abbrev-ref' returned error") + branch_name = branch_name.strip() + + if branch_name == "HEAD": + # If we aren't exactly on a branch, pick a branch which represents + # the current commit. If all else fails, we are on a branchless + # commit. + branches, rc = runner(GITS, ["branch", "--contains"], cwd=root) + # --contains was added in git-1.5.4 + if rc != 0 or branches is None: + raise NotThisMethod("'git branch --contains' returned error") + branches = branches.split("\n") + + # Remove the first line if we're running detached + if "(" in branches[0]: + branches.pop(0) + + # Strip off the leading "* " from the list of branches. + branches = [branch[2:] for branch in branches] + if "master" in branches: + branch_name = "master" + elif not branches: + branch_name = None + else: + # Pick the first branch that is returned. Good or bad. + branch_name = branches[0] + + pieces["branch"] = branch_name + + # parse describe_out. It will be like TAG-NUM-gHEX[-dirty] or HEX[-dirty] + # TAG might have hyphens. + git_describe = describe_out + + # look for -dirty suffix + dirty = git_describe.endswith("-dirty") + pieces["dirty"] = dirty + if dirty: + git_describe = git_describe[:git_describe.rindex("-dirty")] + + # now we have TAG-NUM-gHEX or HEX + + if "-" in git_describe: + # TAG-NUM-gHEX + mo = re.search(r'^(.+)-(\d+)-g([0-9a-f]+)$', git_describe) + if not mo: + # unparsable. Maybe git-describe is misbehaving? + pieces["error"] = ("unable to parse git-describe output: '%s'" + % describe_out) + return pieces + + # tag + full_tag = mo.group(1) + if not full_tag.startswith(tag_prefix): + if verbose: + fmt = "tag '%s' doesn't start with prefix '%s'" + print(fmt % (full_tag, tag_prefix)) + pieces["error"] = ("tag '%s' doesn't start with prefix '%s'" + % (full_tag, tag_prefix)) + return pieces + pieces["closest-tag"] = full_tag[len(tag_prefix):] + + # distance: number of commits since tag + pieces["distance"] = int(mo.group(2)) + + # commit: short hex revision ID + pieces["short"] = mo.group(3) + + else: + # HEX: no tags + pieces["closest-tag"] = None + out, rc = runner(GITS, ["rev-list", "HEAD", "--left-right"], cwd=root) + pieces["distance"] = len(out.split()) # total number of commits + + # commit date: see ISO-8601 comment in git_versions_from_keywords() + date = runner(GITS, ["show", "-s", "--format=%ci", "HEAD"], cwd=root)[0].strip() + # Use only the last line. Previous lines may contain GPG signature + # information. + date = date.splitlines()[-1] + pieces["date"] = date.strip().replace(" ", "T", 1).replace(" ", "", 1) + return pieces -def write_release_version(version): - with io.open(VERSION_FILE, "wb") as fh: - fh.write(("%s\n" % version).encode('ascii', 'strict')) +def plus_or_dot(pieces): + """Return a + if we don't already have one, else return a .""" + if "+" in pieces.get("closest-tag", ""): + return "." + return "+" -def get_git_version(abbrev=4): - # Read in the version that's currently in RELEASE-VERSION. - release_version = read_release_version() - # First try to get the current version using “git describe”. - version = call_git_describe(abbrev) +def render_pep440(pieces): + """Build up version string, with post-release "local version identifier". - # If that doesn't work, fall back on the value that's in - # RELEASE-VERSION. - if version is None: - version = release_version + Our goal: TAG[+DISTANCE.gHEX[.dirty]] . Note that if you + get a tagged build and then dirty it, you'll get TAG+0.gHEX.dirty - # If we still don't have anything, that's an error. - if version is None: - return '0.0.0-tar/zipball' + Exceptions: + 1: no tags. git_describe was just HEX. 0+untagged.DISTANCE.gHEX[.dirty] + """ + if pieces["closest-tag"]: + rendered = pieces["closest-tag"] + if pieces["distance"] or pieces["dirty"]: + rendered += plus_or_dot(pieces) + rendered += "%d.g%s" % (pieces["distance"], pieces["short"]) + if pieces["dirty"]: + rendered += ".dirty" + else: + # exception #1 + rendered = "0+untagged.%d.g%s" % (pieces["distance"], + pieces["short"]) + if pieces["dirty"]: + rendered += ".dirty" + return rendered - # If the current version is different from what's in the - # RELEASE-VERSION file, update the file to be current. - if version != release_version: - write_release_version(version) - # Finally, return the current version. - return version +def render_pep440_branch(pieces): + """TAG[[.dev0]+DISTANCE.gHEX[.dirty]] . + The ".dev0" means not master branch. Note that .dev0 sorts backwards + (a feature branch will appear "older" than the master branch). + + Exceptions: + 1: no tags. 0[.dev0]+untagged.DISTANCE.gHEX[.dirty] + """ + if pieces["closest-tag"]: + rendered = pieces["closest-tag"] + if pieces["distance"] or pieces["dirty"]: + if pieces["branch"] != "master": + rendered += ".dev0" + rendered += plus_or_dot(pieces) + rendered += "%d.g%s" % (pieces["distance"], pieces["short"]) + if pieces["dirty"]: + rendered += ".dirty" + else: + # exception #1 + rendered = "0" + if pieces["branch"] != "master": + rendered += ".dev0" + rendered += "+untagged.%d.g%s" % (pieces["distance"], + pieces["short"]) + if pieces["dirty"]: + rendered += ".dirty" + return rendered + + +def pep440_split_post(ver): + """Split pep440 version string at the post-release segment. + + Returns the release segments before the post-release and the + post-release version number (or -1 if no post-release segment is present). + """ + vc = str.split(ver, ".post") + return vc[0], int(vc[1] or 0) if len(vc) == 2 else None + + +def render_pep440_pre(pieces): + """TAG[.postN.devDISTANCE] -- No -dirty. + + Exceptions: + 1: no tags. 0.post0.devDISTANCE + """ + if pieces["closest-tag"]: + if pieces["distance"]: + # update the post release segment + tag_version, post_version = pep440_split_post(pieces["closest-tag"]) + rendered = tag_version + if post_version is not None: + rendered += ".post%d.dev%d" % (post_version + 1, pieces["distance"]) + else: + rendered += ".post0.dev%d" % (pieces["distance"]) + else: + # no commits, use the tag as the version + rendered = pieces["closest-tag"] + else: + # exception #1 + rendered = "0.post0.dev%d" % pieces["distance"] + return rendered + + +def render_pep440_post(pieces): + """TAG[.postDISTANCE[.dev0]+gHEX] . + + The ".dev0" means dirty. Note that .dev0 sorts backwards + (a dirty tree will appear "older" than the corresponding clean one), + but you shouldn't be releasing software with -dirty anyways. + + Exceptions: + 1: no tags. 0.postDISTANCE[.dev0] + """ + if pieces["closest-tag"]: + rendered = pieces["closest-tag"] + if pieces["distance"] or pieces["dirty"]: + rendered += ".post%d" % pieces["distance"] + if pieces["dirty"]: + rendered += ".dev0" + rendered += plus_or_dot(pieces) + rendered += "g%s" % pieces["short"] + else: + # exception #1 + rendered = "0.post%d" % pieces["distance"] + if pieces["dirty"]: + rendered += ".dev0" + rendered += "+g%s" % pieces["short"] + return rendered + + +def render_pep440_post_branch(pieces): + """TAG[.postDISTANCE[.dev0]+gHEX[.dirty]] . + + The ".dev0" means not master branch. + + Exceptions: + 1: no tags. 0.postDISTANCE[.dev0]+gHEX[.dirty] + """ + if pieces["closest-tag"]: + rendered = pieces["closest-tag"] + if pieces["distance"] or pieces["dirty"]: + rendered += ".post%d" % pieces["distance"] + if pieces["branch"] != "master": + rendered += ".dev0" + rendered += plus_or_dot(pieces) + rendered += "g%s" % pieces["short"] + if pieces["dirty"]: + rendered += ".dirty" + else: + # exception #1 + rendered = "0.post%d" % pieces["distance"] + if pieces["branch"] != "master": + rendered += ".dev0" + rendered += "+g%s" % pieces["short"] + if pieces["dirty"]: + rendered += ".dirty" + return rendered + + +def render_pep440_old(pieces): + """TAG[.postDISTANCE[.dev0]] . + + The ".dev0" means dirty. + + Exceptions: + 1: no tags. 0.postDISTANCE[.dev0] + """ + if pieces["closest-tag"]: + rendered = pieces["closest-tag"] + if pieces["distance"] or pieces["dirty"]: + rendered += ".post%d" % pieces["distance"] + if pieces["dirty"]: + rendered += ".dev0" + else: + # exception #1 + rendered = "0.post%d" % pieces["distance"] + if pieces["dirty"]: + rendered += ".dev0" + return rendered + + +def render_git_describe(pieces): + """TAG[-DISTANCE-gHEX][-dirty]. + + Like 'git describe --tags --dirty --always'. + + Exceptions: + 1: no tags. HEX[-dirty] (note: no 'g' prefix) + """ + if pieces["closest-tag"]: + rendered = pieces["closest-tag"] + if pieces["distance"]: + rendered += "-%d-g%s" % (pieces["distance"], pieces["short"]) + else: + # exception #1 + rendered = pieces["short"] + if pieces["dirty"]: + rendered += "-dirty" + return rendered + + +def render_git_describe_long(pieces): + """TAG-DISTANCE-gHEX[-dirty]. + + Like 'git describe --tags --dirty --always -long'. + The distance/hash is unconditional. + + Exceptions: + 1: no tags. HEX[-dirty] (note: no 'g' prefix) + """ + if pieces["closest-tag"]: + rendered = pieces["closest-tag"] + rendered += "-%d-g%s" % (pieces["distance"], pieces["short"]) + else: + # exception #1 + rendered = pieces["short"] + if pieces["dirty"]: + rendered += "-dirty" + return rendered + + +def render(pieces, style): + """Render the given version pieces into the requested style.""" + if pieces["error"]: + return {"version": "unknown", + "full-revisionid": pieces.get("long"), + "dirty": None, + "error": pieces["error"], + "date": None} + + if not style or style == "default": + style = "pep440" # the default + + if style == "pep440": + rendered = render_pep440(pieces) + elif style == "pep440-branch": + rendered = render_pep440_branch(pieces) + elif style == "pep440-pre": + rendered = render_pep440_pre(pieces) + elif style == "pep440-post": + rendered = render_pep440_post(pieces) + elif style == "pep440-post-branch": + rendered = render_pep440_post_branch(pieces) + elif style == "pep440-old": + rendered = render_pep440_old(pieces) + elif style == "git-describe": + rendered = render_git_describe(pieces) + elif style == "git-describe-long": + rendered = render_git_describe_long(pieces) + else: + raise ValueError("unknown style '%s'" % style) + + return {"version": rendered, "full-revisionid": pieces["long"], + "dirty": pieces["dirty"], "error": None, + "date": pieces.get("date")} + + +def get_versions(): + """Get version information or return default if unable to do so.""" + # I am in _version.py, which lives at ROOT/VERSIONFILE_SOURCE. If we have + # __file__, we can work backwards from there to the root. Some + # py2exe/bbfreeze/non-CPython implementations don't do __file__, in which + # case we can only use expanded keywords. + + cfg = get_config() + verbose = cfg.verbose + + try: + return git_versions_from_keywords(get_keywords(), cfg.tag_prefix, + verbose) + except NotThisMethod: + pass + + try: + root = os.path.realpath(__file__) + # versionfile_source is the relative path from the top of the source + # tree (where the .git directory might live) to this file. Invert + # this to find the root from __file__. + for _ in cfg.versionfile_source.split('/'): + root = os.path.dirname(root) + except NameError: + return {"version": "0+unknown", "full-revisionid": None, + "dirty": None, + "error": "unable to find root of source tree", + "date": None} + + try: + pieces = git_pieces_from_vcs(cfg.tag_prefix, root, verbose) + return render(pieces, cfg.style) + except NotThisMethod: + pass + + try: + if cfg.parentdir_prefix: + return versions_from_parentdir(cfg.parentdir_prefix, root, verbose) + except NotThisMethod: + pass -if __name__ == "__main__": - print(get_git_version()) + return {"version": "0+unknown", "full-revisionid": None, + "dirty": None, + "error": "unable to compute version", "date": None} diff --git a/pyproject.toml b/pyproject.toml new file mode 100644 index 0000000..4636a28 --- /dev/null +++ b/pyproject.toml @@ -0,0 +1,9 @@ +[build-system] +requires = [ + 'wheel', + 'setuptools', + 'numpy>=1.18' +] + +[tool.cibuildwheel] +build = ['cp36-*', 'cp37-*', 'cp38-*', 'cp39-*', 'cp310-*', 'cp311-*'] \ No newline at end of file diff --git a/setup.cfg b/setup.cfg new file mode 100644 index 0000000..41e1d39 --- /dev/null +++ b/setup.cfg @@ -0,0 +1,7 @@ +[versioneer] +VCS = git +style = pep440 +versionfile_source = backtrackbb/version.py +versionfile_build = backtrackbb/version.py +tag_prefix = v +parentdir_prefix = backtrackbb- \ No newline at end of file diff --git a/setup.py b/setup.py index bf1572a..a57a1a6 100644 --- a/setup.py +++ b/setup.py @@ -2,16 +2,7 @@ """setup.py: setuptools control.""" from setuptools import setup from distutils.core import Extension - -import inspect -import os -import sys - -# Import the version string. -path = os.path.join(os.path.abspath(os.path.dirname(inspect.getfile( - inspect.currentframe()))), 'backtrackbb') -sys.path.insert(0, path) -from version import get_git_version +import versioneer with open('README.md', 'rb') as f: long_descr = f.read().decode('utf-8') @@ -55,7 +46,7 @@ 'group_triggers = backtrackbb.scripts.group_triggers:main', ] }, - version=get_git_version(), + version=versioneer.get_version(), ext_package='backtrackbb.lib', ext_modules=ext_modules, description='Multi-band array detection and location of seismic sources', @@ -73,10 +64,14 @@ 'Agreement, Version 2.1', 'Operating System :: OS Independent', 'Programming Language :: Python', - 'Programming Language :: Python :: 2', - 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.5', + 'Programming Language :: Python :: 3.6', + 'Programming Language :: Python :: 3.7', + 'Programming Language :: Python :: 3.8', + 'Programming Language :: Python :: 3.9', + 'Programming Language :: Python :: 3.10', + 'Programming Language :: Python :: 3.11', 'Topic :: Scientific/Engineering', 'Topic :: Scientific/Engineering :: Physics'], install_requires=['obspy>=1.0.0', 'scipy>=0.17', 'pyproj'] diff --git a/versioneer.py b/versioneer.py new file mode 100644 index 0000000..18e34c2 --- /dev/null +++ b/versioneer.py @@ -0,0 +1,2205 @@ + +# Version: 0.28 + +"""The Versioneer - like a rocketeer, but for versions. + +The Versioneer +============== + +* like a rocketeer, but for versions! +* https://github.com/python-versioneer/python-versioneer +* Brian Warner +* License: Public Domain (Unlicense) +* Compatible with: Python 3.7, 3.8, 3.9, 3.10 and pypy3 +* [![Latest Version][pypi-image]][pypi-url] +* [![Build Status][travis-image]][travis-url] + +This is a tool for managing a recorded version number in setuptools-based +python projects. The goal is to remove the tedious and error-prone "update +the embedded version string" step from your release process. Making a new +release should be as easy as recording a new tag in your version-control +system, and maybe making new tarballs. + + +## Quick Install + +Versioneer provides two installation modes. The "classic" vendored mode installs +a copy of versioneer into your repository. The experimental build-time dependency mode +is intended to allow you to skip this step and simplify the process of upgrading. + +### Vendored mode + +* `pip install versioneer` to somewhere in your $PATH + * A [conda-forge recipe](https://github.com/conda-forge/versioneer-feedstock) is + available, so you can also use `conda install -c conda-forge versioneer` +* add a `[tool.versioneer]` section to your `pyproject.toml` or a + `[versioneer]` section to your `setup.cfg` (see [Install](INSTALL.md)) + * Note that you will need to add `tomli; python_version < "3.11"` to your + build-time dependencies if you use `pyproject.toml` +* run `versioneer install --vendor` in your source tree, commit the results +* verify version information with `python setup.py version` + +### Build-time dependency mode + +* `pip install versioneer` to somewhere in your $PATH + * A [conda-forge recipe](https://github.com/conda-forge/versioneer-feedstock) is + available, so you can also use `conda install -c conda-forge versioneer` +* add a `[tool.versioneer]` section to your `pyproject.toml` or a + `[versioneer]` section to your `setup.cfg` (see [Install](INSTALL.md)) +* add `versioneer` (with `[toml]` extra, if configuring in `pyproject.toml`) + to the `requires` key of the `build-system` table in `pyproject.toml`: + ```toml + [build-system] + requires = ["setuptools", "versioneer[toml]"] + build-backend = "setuptools.build_meta" + ``` +* run `versioneer install --no-vendor` in your source tree, commit the results +* verify version information with `python setup.py version` + +## Version Identifiers + +Source trees come from a variety of places: + +* a version-control system checkout (mostly used by developers) +* a nightly tarball, produced by build automation +* a snapshot tarball, produced by a web-based VCS browser, like github's + "tarball from tag" feature +* a release tarball, produced by "setup.py sdist", distributed through PyPI + +Within each source tree, the version identifier (either a string or a number, +this tool is format-agnostic) can come from a variety of places: + +* ask the VCS tool itself, e.g. "git describe" (for checkouts), which knows + about recent "tags" and an absolute revision-id +* the name of the directory into which the tarball was unpacked +* an expanded VCS keyword ($Id$, etc) +* a `_version.py` created by some earlier build step + +For released software, the version identifier is closely related to a VCS +tag. Some projects use tag names that include more than just the version +string (e.g. "myproject-1.2" instead of just "1.2"), in which case the tool +needs to strip the tag prefix to extract the version identifier. For +unreleased software (between tags), the version identifier should provide +enough information to help developers recreate the same tree, while also +giving them an idea of roughly how old the tree is (after version 1.2, before +version 1.3). Many VCS systems can report a description that captures this, +for example `git describe --tags --dirty --always` reports things like +"0.7-1-g574ab98-dirty" to indicate that the checkout is one revision past the +0.7 tag, has a unique revision id of "574ab98", and is "dirty" (it has +uncommitted changes). + +The version identifier is used for multiple purposes: + +* to allow the module to self-identify its version: `myproject.__version__` +* to choose a name and prefix for a 'setup.py sdist' tarball + +## Theory of Operation + +Versioneer works by adding a special `_version.py` file into your source +tree, where your `__init__.py` can import it. This `_version.py` knows how to +dynamically ask the VCS tool for version information at import time. + +`_version.py` also contains `$Revision$` markers, and the installation +process marks `_version.py` to have this marker rewritten with a tag name +during the `git archive` command. As a result, generated tarballs will +contain enough information to get the proper version. + +To allow `setup.py` to compute a version too, a `versioneer.py` is added to +the top level of your source tree, next to `setup.py` and the `setup.cfg` +that configures it. This overrides several distutils/setuptools commands to +compute the version when invoked, and changes `setup.py build` and `setup.py +sdist` to replace `_version.py` with a small static file that contains just +the generated version data. + +## Installation + +See [INSTALL.md](./INSTALL.md) for detailed installation instructions. + +## Version-String Flavors + +Code which uses Versioneer can learn about its version string at runtime by +importing `_version` from your main `__init__.py` file and running the +`get_versions()` function. From the "outside" (e.g. in `setup.py`), you can +import the top-level `versioneer.py` and run `get_versions()`. + +Both functions return a dictionary with different flavors of version +information: + +* `['version']`: A condensed version string, rendered using the selected + style. This is the most commonly used value for the project's version + string. The default "pep440" style yields strings like `0.11`, + `0.11+2.g1076c97`, or `0.11+2.g1076c97.dirty`. See the "Styles" section + below for alternative styles. + +* `['full-revisionid']`: detailed revision identifier. For Git, this is the + full SHA1 commit id, e.g. "1076c978a8d3cfc70f408fe5974aa6c092c949ac". + +* `['date']`: Date and time of the latest `HEAD` commit. For Git, it is the + commit date in ISO 8601 format. This will be None if the date is not + available. + +* `['dirty']`: a boolean, True if the tree has uncommitted changes. Note that + this is only accurate if run in a VCS checkout, otherwise it is likely to + be False or None + +* `['error']`: if the version string could not be computed, this will be set + to a string describing the problem, otherwise it will be None. It may be + useful to throw an exception in setup.py if this is set, to avoid e.g. + creating tarballs with a version string of "unknown". + +Some variants are more useful than others. Including `full-revisionid` in a +bug report should allow developers to reconstruct the exact code being tested +(or indicate the presence of local changes that should be shared with the +developers). `version` is suitable for display in an "about" box or a CLI +`--version` output: it can be easily compared against release notes and lists +of bugs fixed in various releases. + +The installer adds the following text to your `__init__.py` to place a basic +version in `YOURPROJECT.__version__`: + + from ._version import get_versions + __version__ = get_versions()['version'] + del get_versions + +## Styles + +The setup.cfg `style=` configuration controls how the VCS information is +rendered into a version string. + +The default style, "pep440", produces a PEP440-compliant string, equal to the +un-prefixed tag name for actual releases, and containing an additional "local +version" section with more detail for in-between builds. For Git, this is +TAG[+DISTANCE.gHEX[.dirty]] , using information from `git describe --tags +--dirty --always`. For example "0.11+2.g1076c97.dirty" indicates that the +tree is like the "1076c97" commit but has uncommitted changes (".dirty"), and +that this commit is two revisions ("+2") beyond the "0.11" tag. For released +software (exactly equal to a known tag), the identifier will only contain the +stripped tag, e.g. "0.11". + +Other styles are available. See [details.md](details.md) in the Versioneer +source tree for descriptions. + +## Debugging + +Versioneer tries to avoid fatal errors: if something goes wrong, it will tend +to return a version of "0+unknown". To investigate the problem, run `setup.py +version`, which will run the version-lookup code in a verbose mode, and will +display the full contents of `get_versions()` (including the `error` string, +which may help identify what went wrong). + +## Known Limitations + +Some situations are known to cause problems for Versioneer. This details the +most significant ones. More can be found on Github +[issues page](https://github.com/python-versioneer/python-versioneer/issues). + +### Subprojects + +Versioneer has limited support for source trees in which `setup.py` is not in +the root directory (e.g. `setup.py` and `.git/` are *not* siblings). The are +two common reasons why `setup.py` might not be in the root: + +* Source trees which contain multiple subprojects, such as + [Buildbot](https://github.com/buildbot/buildbot), which contains both + "master" and "slave" subprojects, each with their own `setup.py`, + `setup.cfg`, and `tox.ini`. Projects like these produce multiple PyPI + distributions (and upload multiple independently-installable tarballs). +* Source trees whose main purpose is to contain a C library, but which also + provide bindings to Python (and perhaps other languages) in subdirectories. + +Versioneer will look for `.git` in parent directories, and most operations +should get the right version string. However `pip` and `setuptools` have bugs +and implementation details which frequently cause `pip install .` from a +subproject directory to fail to find a correct version string (so it usually +defaults to `0+unknown`). + +`pip install --editable .` should work correctly. `setup.py install` might +work too. + +Pip-8.1.1 is known to have this problem, but hopefully it will get fixed in +some later version. + +[Bug #38](https://github.com/python-versioneer/python-versioneer/issues/38) is tracking +this issue. The discussion in +[PR #61](https://github.com/python-versioneer/python-versioneer/pull/61) describes the +issue from the Versioneer side in more detail. +[pip PR#3176](https://github.com/pypa/pip/pull/3176) and +[pip PR#3615](https://github.com/pypa/pip/pull/3615) contain work to improve +pip to let Versioneer work correctly. + +Versioneer-0.16 and earlier only looked for a `.git` directory next to the +`setup.cfg`, so subprojects were completely unsupported with those releases. + +### Editable installs with setuptools <= 18.5 + +`setup.py develop` and `pip install --editable .` allow you to install a +project into a virtualenv once, then continue editing the source code (and +test) without re-installing after every change. + +"Entry-point scripts" (`setup(entry_points={"console_scripts": ..})`) are a +convenient way to specify executable scripts that should be installed along +with the python package. + +These both work as expected when using modern setuptools. When using +setuptools-18.5 or earlier, however, certain operations will cause +`pkg_resources.DistributionNotFound` errors when running the entrypoint +script, which must be resolved by re-installing the package. This happens +when the install happens with one version, then the egg_info data is +regenerated while a different version is checked out. Many setup.py commands +cause egg_info to be rebuilt (including `sdist`, `wheel`, and installing into +a different virtualenv), so this can be surprising. + +[Bug #83](https://github.com/python-versioneer/python-versioneer/issues/83) describes +this one, but upgrading to a newer version of setuptools should probably +resolve it. + + +## Updating Versioneer + +To upgrade your project to a new release of Versioneer, do the following: + +* install the new Versioneer (`pip install -U versioneer` or equivalent) +* edit `setup.cfg` and `pyproject.toml`, if necessary, + to include any new configuration settings indicated by the release notes. + See [UPGRADING](./UPGRADING.md) for details. +* re-run `versioneer install --[no-]vendor` in your source tree, to replace + `SRC/_version.py` +* commit any changed files + +## Future Directions + +This tool is designed to make it easily extended to other version-control +systems: all VCS-specific components are in separate directories like +src/git/ . The top-level `versioneer.py` script is assembled from these +components by running make-versioneer.py . In the future, make-versioneer.py +will take a VCS name as an argument, and will construct a version of +`versioneer.py` that is specific to the given VCS. It might also take the +configuration arguments that are currently provided manually during +installation by editing setup.py . Alternatively, it might go the other +direction and include code from all supported VCS systems, reducing the +number of intermediate scripts. + +## Similar projects + +* [setuptools_scm](https://github.com/pypa/setuptools_scm/) - a non-vendored build-time + dependency +* [minver](https://github.com/jbweston/miniver) - a lightweight reimplementation of + versioneer +* [versioningit](https://github.com/jwodder/versioningit) - a PEP 518-based setuptools + plugin + +## License + +To make Versioneer easier to embed, all its code is dedicated to the public +domain. The `_version.py` that it creates is also in the public domain. +Specifically, both are released under the "Unlicense", as described in +https://unlicense.org/. + +[pypi-image]: https://img.shields.io/pypi/v/versioneer.svg +[pypi-url]: https://pypi.python.org/pypi/versioneer/ +[travis-image]: +https://img.shields.io/travis/com/python-versioneer/python-versioneer.svg +[travis-url]: https://travis-ci.com/github/python-versioneer/python-versioneer + +""" +# pylint:disable=invalid-name,import-outside-toplevel,missing-function-docstring +# pylint:disable=missing-class-docstring,too-many-branches,too-many-statements +# pylint:disable=raise-missing-from,too-many-lines,too-many-locals,import-error +# pylint:disable=too-few-public-methods,redefined-outer-name,consider-using-with +# pylint:disable=attribute-defined-outside-init,too-many-arguments + +import configparser +import errno +import json +import os +import re +import subprocess +import sys +from pathlib import Path +from typing import Callable, Dict +import functools + +have_tomllib = True +if sys.version_info >= (3, 11): + import tomllib +else: + try: + import tomli as tomllib + except ImportError: + have_tomllib = False + + +class VersioneerConfig: + """Container for Versioneer configuration parameters.""" + + +def get_root(): + """Get the project root directory. + + We require that all commands are run from the project root, i.e. the + directory that contains setup.py, setup.cfg, and versioneer.py . + """ + root = os.path.realpath(os.path.abspath(os.getcwd())) + setup_py = os.path.join(root, "setup.py") + versioneer_py = os.path.join(root, "versioneer.py") + if not (os.path.exists(setup_py) or os.path.exists(versioneer_py)): + # allow 'python path/to/setup.py COMMAND' + root = os.path.dirname(os.path.realpath(os.path.abspath(sys.argv[0]))) + setup_py = os.path.join(root, "setup.py") + versioneer_py = os.path.join(root, "versioneer.py") + if not (os.path.exists(setup_py) or os.path.exists(versioneer_py)): + err = ("Versioneer was unable to run the project root directory. " + "Versioneer requires setup.py to be executed from " + "its immediate directory (like 'python setup.py COMMAND'), " + "or in a way that lets it use sys.argv[0] to find the root " + "(like 'python path/to/setup.py COMMAND').") + raise VersioneerBadRootError(err) + try: + # Certain runtime workflows (setup.py install/develop in a setuptools + # tree) execute all dependencies in a single python process, so + # "versioneer" may be imported multiple times, and python's shared + # module-import table will cache the first one. So we can't use + # os.path.dirname(__file__), as that will find whichever + # versioneer.py was first imported, even in later projects. + my_path = os.path.realpath(os.path.abspath(__file__)) + me_dir = os.path.normcase(os.path.splitext(my_path)[0]) + vsr_dir = os.path.normcase(os.path.splitext(versioneer_py)[0]) + if me_dir != vsr_dir and "VERSIONEER_PEP518" not in globals(): + print("Warning: build in %s is using versioneer.py from %s" + % (os.path.dirname(my_path), versioneer_py)) + except NameError: + pass + return root + + +def get_config_from_root(root): + """Read the project setup.cfg file to determine Versioneer config.""" + # This might raise OSError (if setup.cfg is missing), or + # configparser.NoSectionError (if it lacks a [versioneer] section), or + # configparser.NoOptionError (if it lacks "VCS="). See the docstring at + # the top of versioneer.py for instructions on writing your setup.cfg . + root = Path(root) + pyproject_toml = root / "pyproject.toml" + setup_cfg = root / "setup.cfg" + section = None + if pyproject_toml.exists() and have_tomllib: + try: + with open(pyproject_toml, 'rb') as fobj: + pp = tomllib.load(fobj) + section = pp['tool']['versioneer'] + except (tomllib.TOMLDecodeError, KeyError): + pass + if not section: + parser = configparser.ConfigParser() + with open(setup_cfg) as cfg_file: + parser.read_file(cfg_file) + parser.get("versioneer", "VCS") # raise error if missing + + section = parser["versioneer"] + + cfg = VersioneerConfig() + cfg.VCS = section['VCS'] + cfg.style = section.get("style", "") + cfg.versionfile_source = section.get("versionfile_source") + cfg.versionfile_build = section.get("versionfile_build") + cfg.tag_prefix = section.get("tag_prefix") + if cfg.tag_prefix in ("''", '""', None): + cfg.tag_prefix = "" + cfg.parentdir_prefix = section.get("parentdir_prefix") + cfg.verbose = section.get("verbose") + return cfg + + +class NotThisMethod(Exception): + """Exception raised if a method is not valid for the current scenario.""" + + +# these dictionaries contain VCS-specific tools +LONG_VERSION_PY: Dict[str, str] = {} +HANDLERS: Dict[str, Dict[str, Callable]] = {} + + +def register_vcs_handler(vcs, method): # decorator + """Create decorator to mark a method as the handler of a VCS.""" + def decorate(f): + """Store f in HANDLERS[vcs][method].""" + HANDLERS.setdefault(vcs, {})[method] = f + return f + return decorate + + +def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False, + env=None): + """Call the given command(s).""" + assert isinstance(commands, list) + process = None + + popen_kwargs = {} + if sys.platform == "win32": + # This hides the console window if pythonw.exe is used + startupinfo = subprocess.STARTUPINFO() + startupinfo.dwFlags |= subprocess.STARTF_USESHOWWINDOW + popen_kwargs["startupinfo"] = startupinfo + + for command in commands: + try: + dispcmd = str([command] + args) + # remember shell=False, so use git.cmd on windows, not just git + process = subprocess.Popen([command] + args, cwd=cwd, env=env, + stdout=subprocess.PIPE, + stderr=(subprocess.PIPE if hide_stderr + else None), **popen_kwargs) + break + except OSError: + e = sys.exc_info()[1] + if e.errno == errno.ENOENT: + continue + if verbose: + print("unable to run %s" % dispcmd) + print(e) + return None, None + else: + if verbose: + print("unable to find command, tried %s" % (commands,)) + return None, None + stdout = process.communicate()[0].strip().decode() + if process.returncode != 0: + if verbose: + print("unable to run %s (error)" % dispcmd) + print("stdout was %s" % stdout) + return None, process.returncode + return stdout, process.returncode + + +LONG_VERSION_PY['git'] = r''' +# This file helps to compute a version number in source trees obtained from +# git-archive tarball (such as those provided by githubs download-from-tag +# feature). Distribution tarballs (built by setup.py sdist) and build +# directories (produced by setup.py build) will contain a much shorter file +# that just contains the computed version number. + +# This file is released into the public domain. +# Generated by versioneer-0.28 +# https://github.com/python-versioneer/python-versioneer + +"""Git implementation of _version.py.""" + +import errno +import os +import re +import subprocess +import sys +from typing import Callable, Dict +import functools + + +def get_keywords(): + """Get the keywords needed to look up the version information.""" + # these strings will be replaced by git during git-archive. + # setup.py/versioneer.py will grep for the variable names, so they must + # each be defined on a line of their own. _version.py will just call + # get_keywords(). + git_refnames = "%(DOLLAR)sFormat:%%d%(DOLLAR)s" + git_full = "%(DOLLAR)sFormat:%%H%(DOLLAR)s" + git_date = "%(DOLLAR)sFormat:%%ci%(DOLLAR)s" + keywords = {"refnames": git_refnames, "full": git_full, "date": git_date} + return keywords + + +class VersioneerConfig: + """Container for Versioneer configuration parameters.""" + + +def get_config(): + """Create, populate and return the VersioneerConfig() object.""" + # these strings are filled in when 'setup.py versioneer' creates + # _version.py + cfg = VersioneerConfig() + cfg.VCS = "git" + cfg.style = "%(STYLE)s" + cfg.tag_prefix = "%(TAG_PREFIX)s" + cfg.parentdir_prefix = "%(PARENTDIR_PREFIX)s" + cfg.versionfile_source = "%(VERSIONFILE_SOURCE)s" + cfg.verbose = False + return cfg + + +class NotThisMethod(Exception): + """Exception raised if a method is not valid for the current scenario.""" + + +LONG_VERSION_PY: Dict[str, str] = {} +HANDLERS: Dict[str, Dict[str, Callable]] = {} + + +def register_vcs_handler(vcs, method): # decorator + """Create decorator to mark a method as the handler of a VCS.""" + def decorate(f): + """Store f in HANDLERS[vcs][method].""" + if vcs not in HANDLERS: + HANDLERS[vcs] = {} + HANDLERS[vcs][method] = f + return f + return decorate + + +def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False, + env=None): + """Call the given command(s).""" + assert isinstance(commands, list) + process = None + + popen_kwargs = {} + if sys.platform == "win32": + # This hides the console window if pythonw.exe is used + startupinfo = subprocess.STARTUPINFO() + startupinfo.dwFlags |= subprocess.STARTF_USESHOWWINDOW + popen_kwargs["startupinfo"] = startupinfo + + for command in commands: + try: + dispcmd = str([command] + args) + # remember shell=False, so use git.cmd on windows, not just git + process = subprocess.Popen([command] + args, cwd=cwd, env=env, + stdout=subprocess.PIPE, + stderr=(subprocess.PIPE if hide_stderr + else None), **popen_kwargs) + break + except OSError: + e = sys.exc_info()[1] + if e.errno == errno.ENOENT: + continue + if verbose: + print("unable to run %%s" %% dispcmd) + print(e) + return None, None + else: + if verbose: + print("unable to find command, tried %%s" %% (commands,)) + return None, None + stdout = process.communicate()[0].strip().decode() + if process.returncode != 0: + if verbose: + print("unable to run %%s (error)" %% dispcmd) + print("stdout was %%s" %% stdout) + return None, process.returncode + return stdout, process.returncode + + +def versions_from_parentdir(parentdir_prefix, root, verbose): + """Try to determine the version from the parent directory name. + + Source tarballs conventionally unpack into a directory that includes both + the project name and a version string. We will also support searching up + two directory levels for an appropriately named parent directory + """ + rootdirs = [] + + for _ in range(3): + dirname = os.path.basename(root) + if dirname.startswith(parentdir_prefix): + return {"version": dirname[len(parentdir_prefix):], + "full-revisionid": None, + "dirty": False, "error": None, "date": None} + rootdirs.append(root) + root = os.path.dirname(root) # up a level + + if verbose: + print("Tried directories %%s but none started with prefix %%s" %% + (str(rootdirs), parentdir_prefix)) + raise NotThisMethod("rootdir doesn't start with parentdir_prefix") + + +@register_vcs_handler("git", "get_keywords") +def git_get_keywords(versionfile_abs): + """Extract version information from the given file.""" + # the code embedded in _version.py can just fetch the value of these + # keywords. When used from setup.py, we don't want to import _version.py, + # so we do it with a regexp instead. This function is not used from + # _version.py. + keywords = {} + try: + with open(versionfile_abs, "r") as fobj: + for line in fobj: + if line.strip().startswith("git_refnames ="): + mo = re.search(r'=\s*"(.*)"', line) + if mo: + keywords["refnames"] = mo.group(1) + if line.strip().startswith("git_full ="): + mo = re.search(r'=\s*"(.*)"', line) + if mo: + keywords["full"] = mo.group(1) + if line.strip().startswith("git_date ="): + mo = re.search(r'=\s*"(.*)"', line) + if mo: + keywords["date"] = mo.group(1) + except OSError: + pass + return keywords + + +@register_vcs_handler("git", "keywords") +def git_versions_from_keywords(keywords, tag_prefix, verbose): + """Get version information from git keywords.""" + if "refnames" not in keywords: + raise NotThisMethod("Short version file found") + date = keywords.get("date") + if date is not None: + # Use only the last line. Previous lines may contain GPG signature + # information. + date = date.splitlines()[-1] + + # git-2.2.0 added "%%cI", which expands to an ISO-8601 -compliant + # datestamp. However we prefer "%%ci" (which expands to an "ISO-8601 + # -like" string, which we must then edit to make compliant), because + # it's been around since git-1.5.3, and it's too difficult to + # discover which version we're using, or to work around using an + # older one. + date = date.strip().replace(" ", "T", 1).replace(" ", "", 1) + refnames = keywords["refnames"].strip() + if refnames.startswith("$Format"): + if verbose: + print("keywords are unexpanded, not using") + raise NotThisMethod("unexpanded keywords, not a git-archive tarball") + refs = {r.strip() for r in refnames.strip("()").split(",")} + # starting in git-1.8.3, tags are listed as "tag: foo-1.0" instead of + # just "foo-1.0". If we see a "tag: " prefix, prefer those. + TAG = "tag: " + tags = {r[len(TAG):] for r in refs if r.startswith(TAG)} + if not tags: + # Either we're using git < 1.8.3, or there really are no tags. We use + # a heuristic: assume all version tags have a digit. The old git %%d + # expansion behaves like git log --decorate=short and strips out the + # refs/heads/ and refs/tags/ prefixes that would let us distinguish + # between branches and tags. By ignoring refnames without digits, we + # filter out many common branch names like "release" and + # "stabilization", as well as "HEAD" and "master". + tags = {r for r in refs if re.search(r'\d', r)} + if verbose: + print("discarding '%%s', no digits" %% ",".join(refs - tags)) + if verbose: + print("likely tags: %%s" %% ",".join(sorted(tags))) + for ref in sorted(tags): + # sorting will prefer e.g. "2.0" over "2.0rc1" + if ref.startswith(tag_prefix): + r = ref[len(tag_prefix):] + # Filter out refs that exactly match prefix or that don't start + # with a number once the prefix is stripped (mostly a concern + # when prefix is '') + if not re.match(r'\d', r): + continue + if verbose: + print("picking %%s" %% r) + return {"version": r, + "full-revisionid": keywords["full"].strip(), + "dirty": False, "error": None, + "date": date} + # no suitable tags, so version is "0+unknown", but full hex is still there + if verbose: + print("no suitable tags, using unknown + full revision id") + return {"version": "0+unknown", + "full-revisionid": keywords["full"].strip(), + "dirty": False, "error": "no suitable tags", "date": None} + + +@register_vcs_handler("git", "pieces_from_vcs") +def git_pieces_from_vcs(tag_prefix, root, verbose, runner=run_command): + """Get version from 'git describe' in the root of the source tree. + + This only gets called if the git-archive 'subst' keywords were *not* + expanded, and _version.py hasn't already been rewritten with a short + version string, meaning we're inside a checked out source tree. + """ + GITS = ["git"] + if sys.platform == "win32": + GITS = ["git.cmd", "git.exe"] + + # GIT_DIR can interfere with correct operation of Versioneer. + # It may be intended to be passed to the Versioneer-versioned project, + # but that should not change where we get our version from. + env = os.environ.copy() + env.pop("GIT_DIR", None) + runner = functools.partial(runner, env=env) + + _, rc = runner(GITS, ["rev-parse", "--git-dir"], cwd=root, + hide_stderr=not verbose) + if rc != 0: + if verbose: + print("Directory %%s not under git control" %% root) + raise NotThisMethod("'git rev-parse --git-dir' returned error") + + # if there is a tag matching tag_prefix, this yields TAG-NUM-gHEX[-dirty] + # if there isn't one, this yields HEX[-dirty] (no NUM) + describe_out, rc = runner(GITS, [ + "describe", "--tags", "--dirty", "--always", "--long", + "--match", f"{tag_prefix}[[:digit:]]*" + ], cwd=root) + # --long was added in git-1.5.5 + if describe_out is None: + raise NotThisMethod("'git describe' failed") + describe_out = describe_out.strip() + full_out, rc = runner(GITS, ["rev-parse", "HEAD"], cwd=root) + if full_out is None: + raise NotThisMethod("'git rev-parse' failed") + full_out = full_out.strip() + + pieces = {} + pieces["long"] = full_out + pieces["short"] = full_out[:7] # maybe improved later + pieces["error"] = None + + branch_name, rc = runner(GITS, ["rev-parse", "--abbrev-ref", "HEAD"], + cwd=root) + # --abbrev-ref was added in git-1.6.3 + if rc != 0 or branch_name is None: + raise NotThisMethod("'git rev-parse --abbrev-ref' returned error") + branch_name = branch_name.strip() + + if branch_name == "HEAD": + # If we aren't exactly on a branch, pick a branch which represents + # the current commit. If all else fails, we are on a branchless + # commit. + branches, rc = runner(GITS, ["branch", "--contains"], cwd=root) + # --contains was added in git-1.5.4 + if rc != 0 or branches is None: + raise NotThisMethod("'git branch --contains' returned error") + branches = branches.split("\n") + + # Remove the first line if we're running detached + if "(" in branches[0]: + branches.pop(0) + + # Strip off the leading "* " from the list of branches. + branches = [branch[2:] for branch in branches] + if "master" in branches: + branch_name = "master" + elif not branches: + branch_name = None + else: + # Pick the first branch that is returned. Good or bad. + branch_name = branches[0] + + pieces["branch"] = branch_name + + # parse describe_out. It will be like TAG-NUM-gHEX[-dirty] or HEX[-dirty] + # TAG might have hyphens. + git_describe = describe_out + + # look for -dirty suffix + dirty = git_describe.endswith("-dirty") + pieces["dirty"] = dirty + if dirty: + git_describe = git_describe[:git_describe.rindex("-dirty")] + + # now we have TAG-NUM-gHEX or HEX + + if "-" in git_describe: + # TAG-NUM-gHEX + mo = re.search(r'^(.+)-(\d+)-g([0-9a-f]+)$', git_describe) + if not mo: + # unparsable. Maybe git-describe is misbehaving? + pieces["error"] = ("unable to parse git-describe output: '%%s'" + %% describe_out) + return pieces + + # tag + full_tag = mo.group(1) + if not full_tag.startswith(tag_prefix): + if verbose: + fmt = "tag '%%s' doesn't start with prefix '%%s'" + print(fmt %% (full_tag, tag_prefix)) + pieces["error"] = ("tag '%%s' doesn't start with prefix '%%s'" + %% (full_tag, tag_prefix)) + return pieces + pieces["closest-tag"] = full_tag[len(tag_prefix):] + + # distance: number of commits since tag + pieces["distance"] = int(mo.group(2)) + + # commit: short hex revision ID + pieces["short"] = mo.group(3) + + else: + # HEX: no tags + pieces["closest-tag"] = None + out, rc = runner(GITS, ["rev-list", "HEAD", "--left-right"], cwd=root) + pieces["distance"] = len(out.split()) # total number of commits + + # commit date: see ISO-8601 comment in git_versions_from_keywords() + date = runner(GITS, ["show", "-s", "--format=%%ci", "HEAD"], cwd=root)[0].strip() + # Use only the last line. Previous lines may contain GPG signature + # information. + date = date.splitlines()[-1] + pieces["date"] = date.strip().replace(" ", "T", 1).replace(" ", "", 1) + + return pieces + + +def plus_or_dot(pieces): + """Return a + if we don't already have one, else return a .""" + if "+" in pieces.get("closest-tag", ""): + return "." + return "+" + + +def render_pep440(pieces): + """Build up version string, with post-release "local version identifier". + + Our goal: TAG[+DISTANCE.gHEX[.dirty]] . Note that if you + get a tagged build and then dirty it, you'll get TAG+0.gHEX.dirty + + Exceptions: + 1: no tags. git_describe was just HEX. 0+untagged.DISTANCE.gHEX[.dirty] + """ + if pieces["closest-tag"]: + rendered = pieces["closest-tag"] + if pieces["distance"] or pieces["dirty"]: + rendered += plus_or_dot(pieces) + rendered += "%%d.g%%s" %% (pieces["distance"], pieces["short"]) + if pieces["dirty"]: + rendered += ".dirty" + else: + # exception #1 + rendered = "0+untagged.%%d.g%%s" %% (pieces["distance"], + pieces["short"]) + if pieces["dirty"]: + rendered += ".dirty" + return rendered + + +def render_pep440_branch(pieces): + """TAG[[.dev0]+DISTANCE.gHEX[.dirty]] . + + The ".dev0" means not master branch. Note that .dev0 sorts backwards + (a feature branch will appear "older" than the master branch). + + Exceptions: + 1: no tags. 0[.dev0]+untagged.DISTANCE.gHEX[.dirty] + """ + if pieces["closest-tag"]: + rendered = pieces["closest-tag"] + if pieces["distance"] or pieces["dirty"]: + if pieces["branch"] != "master": + rendered += ".dev0" + rendered += plus_or_dot(pieces) + rendered += "%%d.g%%s" %% (pieces["distance"], pieces["short"]) + if pieces["dirty"]: + rendered += ".dirty" + else: + # exception #1 + rendered = "0" + if pieces["branch"] != "master": + rendered += ".dev0" + rendered += "+untagged.%%d.g%%s" %% (pieces["distance"], + pieces["short"]) + if pieces["dirty"]: + rendered += ".dirty" + return rendered + + +def pep440_split_post(ver): + """Split pep440 version string at the post-release segment. + + Returns the release segments before the post-release and the + post-release version number (or -1 if no post-release segment is present). + """ + vc = str.split(ver, ".post") + return vc[0], int(vc[1] or 0) if len(vc) == 2 else None + + +def render_pep440_pre(pieces): + """TAG[.postN.devDISTANCE] -- No -dirty. + + Exceptions: + 1: no tags. 0.post0.devDISTANCE + """ + if pieces["closest-tag"]: + if pieces["distance"]: + # update the post release segment + tag_version, post_version = pep440_split_post(pieces["closest-tag"]) + rendered = tag_version + if post_version is not None: + rendered += ".post%%d.dev%%d" %% (post_version + 1, pieces["distance"]) + else: + rendered += ".post0.dev%%d" %% (pieces["distance"]) + else: + # no commits, use the tag as the version + rendered = pieces["closest-tag"] + else: + # exception #1 + rendered = "0.post0.dev%%d" %% pieces["distance"] + return rendered + + +def render_pep440_post(pieces): + """TAG[.postDISTANCE[.dev0]+gHEX] . + + The ".dev0" means dirty. Note that .dev0 sorts backwards + (a dirty tree will appear "older" than the corresponding clean one), + but you shouldn't be releasing software with -dirty anyways. + + Exceptions: + 1: no tags. 0.postDISTANCE[.dev0] + """ + if pieces["closest-tag"]: + rendered = pieces["closest-tag"] + if pieces["distance"] or pieces["dirty"]: + rendered += ".post%%d" %% pieces["distance"] + if pieces["dirty"]: + rendered += ".dev0" + rendered += plus_or_dot(pieces) + rendered += "g%%s" %% pieces["short"] + else: + # exception #1 + rendered = "0.post%%d" %% pieces["distance"] + if pieces["dirty"]: + rendered += ".dev0" + rendered += "+g%%s" %% pieces["short"] + return rendered + + +def render_pep440_post_branch(pieces): + """TAG[.postDISTANCE[.dev0]+gHEX[.dirty]] . + + The ".dev0" means not master branch. + + Exceptions: + 1: no tags. 0.postDISTANCE[.dev0]+gHEX[.dirty] + """ + if pieces["closest-tag"]: + rendered = pieces["closest-tag"] + if pieces["distance"] or pieces["dirty"]: + rendered += ".post%%d" %% pieces["distance"] + if pieces["branch"] != "master": + rendered += ".dev0" + rendered += plus_or_dot(pieces) + rendered += "g%%s" %% pieces["short"] + if pieces["dirty"]: + rendered += ".dirty" + else: + # exception #1 + rendered = "0.post%%d" %% pieces["distance"] + if pieces["branch"] != "master": + rendered += ".dev0" + rendered += "+g%%s" %% pieces["short"] + if pieces["dirty"]: + rendered += ".dirty" + return rendered + + +def render_pep440_old(pieces): + """TAG[.postDISTANCE[.dev0]] . + + The ".dev0" means dirty. + + Exceptions: + 1: no tags. 0.postDISTANCE[.dev0] + """ + if pieces["closest-tag"]: + rendered = pieces["closest-tag"] + if pieces["distance"] or pieces["dirty"]: + rendered += ".post%%d" %% pieces["distance"] + if pieces["dirty"]: + rendered += ".dev0" + else: + # exception #1 + rendered = "0.post%%d" %% pieces["distance"] + if pieces["dirty"]: + rendered += ".dev0" + return rendered + + +def render_git_describe(pieces): + """TAG[-DISTANCE-gHEX][-dirty]. + + Like 'git describe --tags --dirty --always'. + + Exceptions: + 1: no tags. HEX[-dirty] (note: no 'g' prefix) + """ + if pieces["closest-tag"]: + rendered = pieces["closest-tag"] + if pieces["distance"]: + rendered += "-%%d-g%%s" %% (pieces["distance"], pieces["short"]) + else: + # exception #1 + rendered = pieces["short"] + if pieces["dirty"]: + rendered += "-dirty" + return rendered + + +def render_git_describe_long(pieces): + """TAG-DISTANCE-gHEX[-dirty]. + + Like 'git describe --tags --dirty --always -long'. + The distance/hash is unconditional. + + Exceptions: + 1: no tags. HEX[-dirty] (note: no 'g' prefix) + """ + if pieces["closest-tag"]: + rendered = pieces["closest-tag"] + rendered += "-%%d-g%%s" %% (pieces["distance"], pieces["short"]) + else: + # exception #1 + rendered = pieces["short"] + if pieces["dirty"]: + rendered += "-dirty" + return rendered + + +def render(pieces, style): + """Render the given version pieces into the requested style.""" + if pieces["error"]: + return {"version": "unknown", + "full-revisionid": pieces.get("long"), + "dirty": None, + "error": pieces["error"], + "date": None} + + if not style or style == "default": + style = "pep440" # the default + + if style == "pep440": + rendered = render_pep440(pieces) + elif style == "pep440-branch": + rendered = render_pep440_branch(pieces) + elif style == "pep440-pre": + rendered = render_pep440_pre(pieces) + elif style == "pep440-post": + rendered = render_pep440_post(pieces) + elif style == "pep440-post-branch": + rendered = render_pep440_post_branch(pieces) + elif style == "pep440-old": + rendered = render_pep440_old(pieces) + elif style == "git-describe": + rendered = render_git_describe(pieces) + elif style == "git-describe-long": + rendered = render_git_describe_long(pieces) + else: + raise ValueError("unknown style '%%s'" %% style) + + return {"version": rendered, "full-revisionid": pieces["long"], + "dirty": pieces["dirty"], "error": None, + "date": pieces.get("date")} + + +def get_versions(): + """Get version information or return default if unable to do so.""" + # I am in _version.py, which lives at ROOT/VERSIONFILE_SOURCE. If we have + # __file__, we can work backwards from there to the root. Some + # py2exe/bbfreeze/non-CPython implementations don't do __file__, in which + # case we can only use expanded keywords. + + cfg = get_config() + verbose = cfg.verbose + + try: + return git_versions_from_keywords(get_keywords(), cfg.tag_prefix, + verbose) + except NotThisMethod: + pass + + try: + root = os.path.realpath(__file__) + # versionfile_source is the relative path from the top of the source + # tree (where the .git directory might live) to this file. Invert + # this to find the root from __file__. + for _ in cfg.versionfile_source.split('/'): + root = os.path.dirname(root) + except NameError: + return {"version": "0+unknown", "full-revisionid": None, + "dirty": None, + "error": "unable to find root of source tree", + "date": None} + + try: + pieces = git_pieces_from_vcs(cfg.tag_prefix, root, verbose) + return render(pieces, cfg.style) + except NotThisMethod: + pass + + try: + if cfg.parentdir_prefix: + return versions_from_parentdir(cfg.parentdir_prefix, root, verbose) + except NotThisMethod: + pass + + return {"version": "0+unknown", "full-revisionid": None, + "dirty": None, + "error": "unable to compute version", "date": None} +''' + + +@register_vcs_handler("git", "get_keywords") +def git_get_keywords(versionfile_abs): + """Extract version information from the given file.""" + # the code embedded in _version.py can just fetch the value of these + # keywords. When used from setup.py, we don't want to import _version.py, + # so we do it with a regexp instead. This function is not used from + # _version.py. + keywords = {} + try: + with open(versionfile_abs, "r") as fobj: + for line in fobj: + if line.strip().startswith("git_refnames ="): + mo = re.search(r'=\s*"(.*)"', line) + if mo: + keywords["refnames"] = mo.group(1) + if line.strip().startswith("git_full ="): + mo = re.search(r'=\s*"(.*)"', line) + if mo: + keywords["full"] = mo.group(1) + if line.strip().startswith("git_date ="): + mo = re.search(r'=\s*"(.*)"', line) + if mo: + keywords["date"] = mo.group(1) + except OSError: + pass + return keywords + + +@register_vcs_handler("git", "keywords") +def git_versions_from_keywords(keywords, tag_prefix, verbose): + """Get version information from git keywords.""" + if "refnames" not in keywords: + raise NotThisMethod("Short version file found") + date = keywords.get("date") + if date is not None: + # Use only the last line. Previous lines may contain GPG signature + # information. + date = date.splitlines()[-1] + + # git-2.2.0 added "%cI", which expands to an ISO-8601 -compliant + # datestamp. However we prefer "%ci" (which expands to an "ISO-8601 + # -like" string, which we must then edit to make compliant), because + # it's been around since git-1.5.3, and it's too difficult to + # discover which version we're using, or to work around using an + # older one. + date = date.strip().replace(" ", "T", 1).replace(" ", "", 1) + refnames = keywords["refnames"].strip() + if refnames.startswith("$Format"): + if verbose: + print("keywords are unexpanded, not using") + raise NotThisMethod("unexpanded keywords, not a git-archive tarball") + refs = {r.strip() for r in refnames.strip("()").split(",")} + # starting in git-1.8.3, tags are listed as "tag: foo-1.0" instead of + # just "foo-1.0". If we see a "tag: " prefix, prefer those. + TAG = "tag: " + tags = {r[len(TAG):] for r in refs if r.startswith(TAG)} + if not tags: + # Either we're using git < 1.8.3, or there really are no tags. We use + # a heuristic: assume all version tags have a digit. The old git %d + # expansion behaves like git log --decorate=short and strips out the + # refs/heads/ and refs/tags/ prefixes that would let us distinguish + # between branches and tags. By ignoring refnames without digits, we + # filter out many common branch names like "release" and + # "stabilization", as well as "HEAD" and "master". + tags = {r for r in refs if re.search(r'\d', r)} + if verbose: + print("discarding '%s', no digits" % ",".join(refs - tags)) + if verbose: + print("likely tags: %s" % ",".join(sorted(tags))) + for ref in sorted(tags): + # sorting will prefer e.g. "2.0" over "2.0rc1" + if ref.startswith(tag_prefix): + r = ref[len(tag_prefix):] + # Filter out refs that exactly match prefix or that don't start + # with a number once the prefix is stripped (mostly a concern + # when prefix is '') + if not re.match(r'\d', r): + continue + if verbose: + print("picking %s" % r) + return {"version": r, + "full-revisionid": keywords["full"].strip(), + "dirty": False, "error": None, + "date": date} + # no suitable tags, so version is "0+unknown", but full hex is still there + if verbose: + print("no suitable tags, using unknown + full revision id") + return {"version": "0+unknown", + "full-revisionid": keywords["full"].strip(), + "dirty": False, "error": "no suitable tags", "date": None} + + +@register_vcs_handler("git", "pieces_from_vcs") +def git_pieces_from_vcs(tag_prefix, root, verbose, runner=run_command): + """Get version from 'git describe' in the root of the source tree. + + This only gets called if the git-archive 'subst' keywords were *not* + expanded, and _version.py hasn't already been rewritten with a short + version string, meaning we're inside a checked out source tree. + """ + GITS = ["git"] + if sys.platform == "win32": + GITS = ["git.cmd", "git.exe"] + + # GIT_DIR can interfere with correct operation of Versioneer. + # It may be intended to be passed to the Versioneer-versioned project, + # but that should not change where we get our version from. + env = os.environ.copy() + env.pop("GIT_DIR", None) + runner = functools.partial(runner, env=env) + + _, rc = runner(GITS, ["rev-parse", "--git-dir"], cwd=root, + hide_stderr=not verbose) + if rc != 0: + if verbose: + print("Directory %s not under git control" % root) + raise NotThisMethod("'git rev-parse --git-dir' returned error") + + # if there is a tag matching tag_prefix, this yields TAG-NUM-gHEX[-dirty] + # if there isn't one, this yields HEX[-dirty] (no NUM) + describe_out, rc = runner(GITS, [ + "describe", "--tags", "--dirty", "--always", "--long", + "--match", f"{tag_prefix}[[:digit:]]*" + ], cwd=root) + # --long was added in git-1.5.5 + if describe_out is None: + raise NotThisMethod("'git describe' failed") + describe_out = describe_out.strip() + full_out, rc = runner(GITS, ["rev-parse", "HEAD"], cwd=root) + if full_out is None: + raise NotThisMethod("'git rev-parse' failed") + full_out = full_out.strip() + + pieces = {} + pieces["long"] = full_out + pieces["short"] = full_out[:7] # maybe improved later + pieces["error"] = None + + branch_name, rc = runner(GITS, ["rev-parse", "--abbrev-ref", "HEAD"], + cwd=root) + # --abbrev-ref was added in git-1.6.3 + if rc != 0 or branch_name is None: + raise NotThisMethod("'git rev-parse --abbrev-ref' returned error") + branch_name = branch_name.strip() + + if branch_name == "HEAD": + # If we aren't exactly on a branch, pick a branch which represents + # the current commit. If all else fails, we are on a branchless + # commit. + branches, rc = runner(GITS, ["branch", "--contains"], cwd=root) + # --contains was added in git-1.5.4 + if rc != 0 or branches is None: + raise NotThisMethod("'git branch --contains' returned error") + branches = branches.split("\n") + + # Remove the first line if we're running detached + if "(" in branches[0]: + branches.pop(0) + + # Strip off the leading "* " from the list of branches. + branches = [branch[2:] for branch in branches] + if "master" in branches: + branch_name = "master" + elif not branches: + branch_name = None + else: + # Pick the first branch that is returned. Good or bad. + branch_name = branches[0] + + pieces["branch"] = branch_name + + # parse describe_out. It will be like TAG-NUM-gHEX[-dirty] or HEX[-dirty] + # TAG might have hyphens. + git_describe = describe_out + + # look for -dirty suffix + dirty = git_describe.endswith("-dirty") + pieces["dirty"] = dirty + if dirty: + git_describe = git_describe[:git_describe.rindex("-dirty")] + + # now we have TAG-NUM-gHEX or HEX + + if "-" in git_describe: + # TAG-NUM-gHEX + mo = re.search(r'^(.+)-(\d+)-g([0-9a-f]+)$', git_describe) + if not mo: + # unparsable. Maybe git-describe is misbehaving? + pieces["error"] = ("unable to parse git-describe output: '%s'" + % describe_out) + return pieces + + # tag + full_tag = mo.group(1) + if not full_tag.startswith(tag_prefix): + if verbose: + fmt = "tag '%s' doesn't start with prefix '%s'" + print(fmt % (full_tag, tag_prefix)) + pieces["error"] = ("tag '%s' doesn't start with prefix '%s'" + % (full_tag, tag_prefix)) + return pieces + pieces["closest-tag"] = full_tag[len(tag_prefix):] + + # distance: number of commits since tag + pieces["distance"] = int(mo.group(2)) + + # commit: short hex revision ID + pieces["short"] = mo.group(3) + + else: + # HEX: no tags + pieces["closest-tag"] = None + out, rc = runner(GITS, ["rev-list", "HEAD", "--left-right"], cwd=root) + pieces["distance"] = len(out.split()) # total number of commits + + # commit date: see ISO-8601 comment in git_versions_from_keywords() + date = runner(GITS, ["show", "-s", "--format=%ci", "HEAD"], cwd=root)[0].strip() + # Use only the last line. Previous lines may contain GPG signature + # information. + date = date.splitlines()[-1] + pieces["date"] = date.strip().replace(" ", "T", 1).replace(" ", "", 1) + + return pieces + + +def do_vcs_install(versionfile_source, ipy): + """Git-specific installation logic for Versioneer. + + For Git, this means creating/changing .gitattributes to mark _version.py + for export-subst keyword substitution. + """ + GITS = ["git"] + if sys.platform == "win32": + GITS = ["git.cmd", "git.exe"] + files = [versionfile_source] + if ipy: + files.append(ipy) + if "VERSIONEER_PEP518" not in globals(): + try: + my_path = __file__ + if my_path.endswith((".pyc", ".pyo")): + my_path = os.path.splitext(my_path)[0] + ".py" + versioneer_file = os.path.relpath(my_path) + except NameError: + versioneer_file = "versioneer.py" + files.append(versioneer_file) + present = False + try: + with open(".gitattributes", "r") as fobj: + for line in fobj: + if line.strip().startswith(versionfile_source): + if "export-subst" in line.strip().split()[1:]: + present = True + break + except OSError: + pass + if not present: + with open(".gitattributes", "a+") as fobj: + fobj.write(f"{versionfile_source} export-subst\n") + files.append(".gitattributes") + run_command(GITS, ["add", "--"] + files) + + +def versions_from_parentdir(parentdir_prefix, root, verbose): + """Try to determine the version from the parent directory name. + + Source tarballs conventionally unpack into a directory that includes both + the project name and a version string. We will also support searching up + two directory levels for an appropriately named parent directory + """ + rootdirs = [] + + for _ in range(3): + dirname = os.path.basename(root) + if dirname.startswith(parentdir_prefix): + return {"version": dirname[len(parentdir_prefix):], + "full-revisionid": None, + "dirty": False, "error": None, "date": None} + rootdirs.append(root) + root = os.path.dirname(root) # up a level + + if verbose: + print("Tried directories %s but none started with prefix %s" % + (str(rootdirs), parentdir_prefix)) + raise NotThisMethod("rootdir doesn't start with parentdir_prefix") + + +SHORT_VERSION_PY = """ +# This file was generated by 'versioneer.py' (0.28) from +# revision-control system data, or from the parent directory name of an +# unpacked source archive. Distribution tarballs contain a pre-generated copy +# of this file. + +import json + +version_json = ''' +%s +''' # END VERSION_JSON + + +def get_versions(): + return json.loads(version_json) +""" + + +def versions_from_file(filename): + """Try to determine the version from _version.py if present.""" + try: + with open(filename) as f: + contents = f.read() + except OSError: + raise NotThisMethod("unable to read _version.py") + mo = re.search(r"version_json = '''\n(.*)''' # END VERSION_JSON", + contents, re.M | re.S) + if not mo: + mo = re.search(r"version_json = '''\r\n(.*)''' # END VERSION_JSON", + contents, re.M | re.S) + if not mo: + raise NotThisMethod("no version_json in _version.py") + return json.loads(mo.group(1)) + + +def write_to_version_file(filename, versions): + """Write the given version number to the given _version.py file.""" + os.unlink(filename) + contents = json.dumps(versions, sort_keys=True, + indent=1, separators=(",", ": ")) + with open(filename, "w") as f: + f.write(SHORT_VERSION_PY % contents) + + print("set %s to '%s'" % (filename, versions["version"])) + + +def plus_or_dot(pieces): + """Return a + if we don't already have one, else return a .""" + if "+" in pieces.get("closest-tag", ""): + return "." + return "+" + + +def render_pep440(pieces): + """Build up version string, with post-release "local version identifier". + + Our goal: TAG[+DISTANCE.gHEX[.dirty]] . Note that if you + get a tagged build and then dirty it, you'll get TAG+0.gHEX.dirty + + Exceptions: + 1: no tags. git_describe was just HEX. 0+untagged.DISTANCE.gHEX[.dirty] + """ + if pieces["closest-tag"]: + rendered = pieces["closest-tag"] + if pieces["distance"] or pieces["dirty"]: + rendered += plus_or_dot(pieces) + rendered += "%d.g%s" % (pieces["distance"], pieces["short"]) + if pieces["dirty"]: + rendered += ".dirty" + else: + # exception #1 + rendered = "0+untagged.%d.g%s" % (pieces["distance"], + pieces["short"]) + if pieces["dirty"]: + rendered += ".dirty" + return rendered + + +def render_pep440_branch(pieces): + """TAG[[.dev0]+DISTANCE.gHEX[.dirty]] . + + The ".dev0" means not master branch. Note that .dev0 sorts backwards + (a feature branch will appear "older" than the master branch). + + Exceptions: + 1: no tags. 0[.dev0]+untagged.DISTANCE.gHEX[.dirty] + """ + if pieces["closest-tag"]: + rendered = pieces["closest-tag"] + if pieces["distance"] or pieces["dirty"]: + if pieces["branch"] != "master": + rendered += ".dev0" + rendered += plus_or_dot(pieces) + rendered += "%d.g%s" % (pieces["distance"], pieces["short"]) + if pieces["dirty"]: + rendered += ".dirty" + else: + # exception #1 + rendered = "0" + if pieces["branch"] != "master": + rendered += ".dev0" + rendered += "+untagged.%d.g%s" % (pieces["distance"], + pieces["short"]) + if pieces["dirty"]: + rendered += ".dirty" + return rendered + + +def pep440_split_post(ver): + """Split pep440 version string at the post-release segment. + + Returns the release segments before the post-release and the + post-release version number (or -1 if no post-release segment is present). + """ + vc = str.split(ver, ".post") + return vc[0], int(vc[1] or 0) if len(vc) == 2 else None + + +def render_pep440_pre(pieces): + """TAG[.postN.devDISTANCE] -- No -dirty. + + Exceptions: + 1: no tags. 0.post0.devDISTANCE + """ + if pieces["closest-tag"]: + if pieces["distance"]: + # update the post release segment + tag_version, post_version = pep440_split_post(pieces["closest-tag"]) + rendered = tag_version + if post_version is not None: + rendered += ".post%d.dev%d" % (post_version + 1, pieces["distance"]) + else: + rendered += ".post0.dev%d" % (pieces["distance"]) + else: + # no commits, use the tag as the version + rendered = pieces["closest-tag"] + else: + # exception #1 + rendered = "0.post0.dev%d" % pieces["distance"] + return rendered + + +def render_pep440_post(pieces): + """TAG[.postDISTANCE[.dev0]+gHEX] . + + The ".dev0" means dirty. Note that .dev0 sorts backwards + (a dirty tree will appear "older" than the corresponding clean one), + but you shouldn't be releasing software with -dirty anyways. + + Exceptions: + 1: no tags. 0.postDISTANCE[.dev0] + """ + if pieces["closest-tag"]: + rendered = pieces["closest-tag"] + if pieces["distance"] or pieces["dirty"]: + rendered += ".post%d" % pieces["distance"] + if pieces["dirty"]: + rendered += ".dev0" + rendered += plus_or_dot(pieces) + rendered += "g%s" % pieces["short"] + else: + # exception #1 + rendered = "0.post%d" % pieces["distance"] + if pieces["dirty"]: + rendered += ".dev0" + rendered += "+g%s" % pieces["short"] + return rendered + + +def render_pep440_post_branch(pieces): + """TAG[.postDISTANCE[.dev0]+gHEX[.dirty]] . + + The ".dev0" means not master branch. + + Exceptions: + 1: no tags. 0.postDISTANCE[.dev0]+gHEX[.dirty] + """ + if pieces["closest-tag"]: + rendered = pieces["closest-tag"] + if pieces["distance"] or pieces["dirty"]: + rendered += ".post%d" % pieces["distance"] + if pieces["branch"] != "master": + rendered += ".dev0" + rendered += plus_or_dot(pieces) + rendered += "g%s" % pieces["short"] + if pieces["dirty"]: + rendered += ".dirty" + else: + # exception #1 + rendered = "0.post%d" % pieces["distance"] + if pieces["branch"] != "master": + rendered += ".dev0" + rendered += "+g%s" % pieces["short"] + if pieces["dirty"]: + rendered += ".dirty" + return rendered + + +def render_pep440_old(pieces): + """TAG[.postDISTANCE[.dev0]] . + + The ".dev0" means dirty. + + Exceptions: + 1: no tags. 0.postDISTANCE[.dev0] + """ + if pieces["closest-tag"]: + rendered = pieces["closest-tag"] + if pieces["distance"] or pieces["dirty"]: + rendered += ".post%d" % pieces["distance"] + if pieces["dirty"]: + rendered += ".dev0" + else: + # exception #1 + rendered = "0.post%d" % pieces["distance"] + if pieces["dirty"]: + rendered += ".dev0" + return rendered + + +def render_git_describe(pieces): + """TAG[-DISTANCE-gHEX][-dirty]. + + Like 'git describe --tags --dirty --always'. + + Exceptions: + 1: no tags. HEX[-dirty] (note: no 'g' prefix) + """ + if pieces["closest-tag"]: + rendered = pieces["closest-tag"] + if pieces["distance"]: + rendered += "-%d-g%s" % (pieces["distance"], pieces["short"]) + else: + # exception #1 + rendered = pieces["short"] + if pieces["dirty"]: + rendered += "-dirty" + return rendered + + +def render_git_describe_long(pieces): + """TAG-DISTANCE-gHEX[-dirty]. + + Like 'git describe --tags --dirty --always -long'. + The distance/hash is unconditional. + + Exceptions: + 1: no tags. HEX[-dirty] (note: no 'g' prefix) + """ + if pieces["closest-tag"]: + rendered = pieces["closest-tag"] + rendered += "-%d-g%s" % (pieces["distance"], pieces["short"]) + else: + # exception #1 + rendered = pieces["short"] + if pieces["dirty"]: + rendered += "-dirty" + return rendered + + +def render(pieces, style): + """Render the given version pieces into the requested style.""" + if pieces["error"]: + return {"version": "unknown", + "full-revisionid": pieces.get("long"), + "dirty": None, + "error": pieces["error"], + "date": None} + + if not style or style == "default": + style = "pep440" # the default + + if style == "pep440": + rendered = render_pep440(pieces) + elif style == "pep440-branch": + rendered = render_pep440_branch(pieces) + elif style == "pep440-pre": + rendered = render_pep440_pre(pieces) + elif style == "pep440-post": + rendered = render_pep440_post(pieces) + elif style == "pep440-post-branch": + rendered = render_pep440_post_branch(pieces) + elif style == "pep440-old": + rendered = render_pep440_old(pieces) + elif style == "git-describe": + rendered = render_git_describe(pieces) + elif style == "git-describe-long": + rendered = render_git_describe_long(pieces) + else: + raise ValueError("unknown style '%s'" % style) + + return {"version": rendered, "full-revisionid": pieces["long"], + "dirty": pieces["dirty"], "error": None, + "date": pieces.get("date")} + + +class VersioneerBadRootError(Exception): + """The project root directory is unknown or missing key files.""" + + +def get_versions(verbose=False): + """Get the project version from whatever source is available. + + Returns dict with two keys: 'version' and 'full'. + """ + if "versioneer" in sys.modules: + # see the discussion in cmdclass.py:get_cmdclass() + del sys.modules["versioneer"] + + root = get_root() + cfg = get_config_from_root(root) + + assert cfg.VCS is not None, "please set [versioneer]VCS= in setup.cfg" + handlers = HANDLERS.get(cfg.VCS) + assert handlers, "unrecognized VCS '%s'" % cfg.VCS + verbose = verbose or cfg.verbose + assert cfg.versionfile_source is not None, \ + "please set versioneer.versionfile_source" + assert cfg.tag_prefix is not None, "please set versioneer.tag_prefix" + + versionfile_abs = os.path.join(root, cfg.versionfile_source) + + # extract version from first of: _version.py, VCS command (e.g. 'git + # describe'), parentdir. This is meant to work for developers using a + # source checkout, for users of a tarball created by 'setup.py sdist', + # and for users of a tarball/zipball created by 'git archive' or github's + # download-from-tag feature or the equivalent in other VCSes. + + get_keywords_f = handlers.get("get_keywords") + from_keywords_f = handlers.get("keywords") + if get_keywords_f and from_keywords_f: + try: + keywords = get_keywords_f(versionfile_abs) + ver = from_keywords_f(keywords, cfg.tag_prefix, verbose) + if verbose: + print("got version from expanded keyword %s" % ver) + return ver + except NotThisMethod: + pass + + try: + ver = versions_from_file(versionfile_abs) + if verbose: + print("got version from file %s %s" % (versionfile_abs, ver)) + return ver + except NotThisMethod: + pass + + from_vcs_f = handlers.get("pieces_from_vcs") + if from_vcs_f: + try: + pieces = from_vcs_f(cfg.tag_prefix, root, verbose) + ver = render(pieces, cfg.style) + if verbose: + print("got version from VCS %s" % ver) + return ver + except NotThisMethod: + pass + + try: + if cfg.parentdir_prefix: + ver = versions_from_parentdir(cfg.parentdir_prefix, root, verbose) + if verbose: + print("got version from parentdir %s" % ver) + return ver + except NotThisMethod: + pass + + if verbose: + print("unable to compute version") + + return {"version": "0+unknown", "full-revisionid": None, + "dirty": None, "error": "unable to compute version", + "date": None} + + +def get_version(): + """Get the short version string for this project.""" + return get_versions()["version"] + + +def get_cmdclass(cmdclass=None): + """Get the custom setuptools subclasses used by Versioneer. + + If the package uses a different cmdclass (e.g. one from numpy), it + should be provide as an argument. + """ + if "versioneer" in sys.modules: + del sys.modules["versioneer"] + # this fixes the "python setup.py develop" case (also 'install' and + # 'easy_install .'), in which subdependencies of the main project are + # built (using setup.py bdist_egg) in the same python process. Assume + # a main project A and a dependency B, which use different versions + # of Versioneer. A's setup.py imports A's Versioneer, leaving it in + # sys.modules by the time B's setup.py is executed, causing B to run + # with the wrong versioneer. Setuptools wraps the sub-dep builds in a + # sandbox that restores sys.modules to it's pre-build state, so the + # parent is protected against the child's "import versioneer". By + # removing ourselves from sys.modules here, before the child build + # happens, we protect the child from the parent's versioneer too. + # Also see https://github.com/python-versioneer/python-versioneer/issues/52 + + cmds = {} if cmdclass is None else cmdclass.copy() + + # we add "version" to setuptools + from setuptools import Command + + class cmd_version(Command): + description = "report generated version string" + user_options = [] + boolean_options = [] + + def initialize_options(self): + pass + + def finalize_options(self): + pass + + def run(self): + vers = get_versions(verbose=True) + print("Version: %s" % vers["version"]) + print(" full-revisionid: %s" % vers.get("full-revisionid")) + print(" dirty: %s" % vers.get("dirty")) + print(" date: %s" % vers.get("date")) + if vers["error"]: + print(" error: %s" % vers["error"]) + cmds["version"] = cmd_version + + # we override "build_py" in setuptools + # + # most invocation pathways end up running build_py: + # distutils/build -> build_py + # distutils/install -> distutils/build ->.. + # setuptools/bdist_wheel -> distutils/install ->.. + # setuptools/bdist_egg -> distutils/install_lib -> build_py + # setuptools/install -> bdist_egg ->.. + # setuptools/develop -> ? + # pip install: + # copies source tree to a tempdir before running egg_info/etc + # if .git isn't copied too, 'git describe' will fail + # then does setup.py bdist_wheel, or sometimes setup.py install + # setup.py egg_info -> ? + + # pip install -e . and setuptool/editable_wheel will invoke build_py + # but the build_py command is not expected to copy any files. + + # we override different "build_py" commands for both environments + if 'build_py' in cmds: + _build_py = cmds['build_py'] + else: + from setuptools.command.build_py import build_py as _build_py + + class cmd_build_py(_build_py): + def run(self): + root = get_root() + cfg = get_config_from_root(root) + versions = get_versions() + _build_py.run(self) + if getattr(self, "editable_mode", False): + # During editable installs `.py` and data files are + # not copied to build_lib + return + # now locate _version.py in the new build/ directory and replace + # it with an updated value + if cfg.versionfile_build: + target_versionfile = os.path.join(self.build_lib, + cfg.versionfile_build) + print("UPDATING %s" % target_versionfile) + write_to_version_file(target_versionfile, versions) + cmds["build_py"] = cmd_build_py + + if 'build_ext' in cmds: + _build_ext = cmds['build_ext'] + else: + from setuptools.command.build_ext import build_ext as _build_ext + + class cmd_build_ext(_build_ext): + def run(self): + root = get_root() + cfg = get_config_from_root(root) + versions = get_versions() + _build_ext.run(self) + if self.inplace: + # build_ext --inplace will only build extensions in + # build/lib<..> dir with no _version.py to write to. + # As in place builds will already have a _version.py + # in the module dir, we do not need to write one. + return + # now locate _version.py in the new build/ directory and replace + # it with an updated value + if not cfg.versionfile_build: + return + target_versionfile = os.path.join(self.build_lib, + cfg.versionfile_build) + if not os.path.exists(target_versionfile): + print(f"Warning: {target_versionfile} does not exist, skipping " + "version update. This can happen if you are running build_ext " + "without first running build_py.") + return + print("UPDATING %s" % target_versionfile) + write_to_version_file(target_versionfile, versions) + cmds["build_ext"] = cmd_build_ext + + if "cx_Freeze" in sys.modules: # cx_freeze enabled? + from cx_Freeze.dist import build_exe as _build_exe + # nczeczulin reports that py2exe won't like the pep440-style string + # as FILEVERSION, but it can be used for PRODUCTVERSION, e.g. + # setup(console=[{ + # "version": versioneer.get_version().split("+", 1)[0], # FILEVERSION + # "product_version": versioneer.get_version(), + # ... + + class cmd_build_exe(_build_exe): + def run(self): + root = get_root() + cfg = get_config_from_root(root) + versions = get_versions() + target_versionfile = cfg.versionfile_source + print("UPDATING %s" % target_versionfile) + write_to_version_file(target_versionfile, versions) + + _build_exe.run(self) + os.unlink(target_versionfile) + with open(cfg.versionfile_source, "w") as f: + LONG = LONG_VERSION_PY[cfg.VCS] + f.write(LONG % + {"DOLLAR": "$", + "STYLE": cfg.style, + "TAG_PREFIX": cfg.tag_prefix, + "PARENTDIR_PREFIX": cfg.parentdir_prefix, + "VERSIONFILE_SOURCE": cfg.versionfile_source, + }) + cmds["build_exe"] = cmd_build_exe + del cmds["build_py"] + + if 'py2exe' in sys.modules: # py2exe enabled? + try: + from py2exe.setuptools_buildexe import py2exe as _py2exe + except ImportError: + from py2exe.distutils_buildexe import py2exe as _py2exe + + class cmd_py2exe(_py2exe): + def run(self): + root = get_root() + cfg = get_config_from_root(root) + versions = get_versions() + target_versionfile = cfg.versionfile_source + print("UPDATING %s" % target_versionfile) + write_to_version_file(target_versionfile, versions) + + _py2exe.run(self) + os.unlink(target_versionfile) + with open(cfg.versionfile_source, "w") as f: + LONG = LONG_VERSION_PY[cfg.VCS] + f.write(LONG % + {"DOLLAR": "$", + "STYLE": cfg.style, + "TAG_PREFIX": cfg.tag_prefix, + "PARENTDIR_PREFIX": cfg.parentdir_prefix, + "VERSIONFILE_SOURCE": cfg.versionfile_source, + }) + cmds["py2exe"] = cmd_py2exe + + # sdist farms its file list building out to egg_info + if 'egg_info' in cmds: + _egg_info = cmds['egg_info'] + else: + from setuptools.command.egg_info import egg_info as _egg_info + + class cmd_egg_info(_egg_info): + def find_sources(self): + # egg_info.find_sources builds the manifest list and writes it + # in one shot + super().find_sources() + + # Modify the filelist and normalize it + root = get_root() + cfg = get_config_from_root(root) + self.filelist.append('versioneer.py') + if cfg.versionfile_source: + # There are rare cases where versionfile_source might not be + # included by default, so we must be explicit + self.filelist.append(cfg.versionfile_source) + self.filelist.sort() + self.filelist.remove_duplicates() + + # The write method is hidden in the manifest_maker instance that + # generated the filelist and was thrown away + # We will instead replicate their final normalization (to unicode, + # and POSIX-style paths) + from setuptools import unicode_utils + normalized = [unicode_utils.filesys_decode(f).replace(os.sep, '/') + for f in self.filelist.files] + + manifest_filename = os.path.join(self.egg_info, 'SOURCES.txt') + with open(manifest_filename, 'w') as fobj: + fobj.write('\n'.join(normalized)) + + cmds['egg_info'] = cmd_egg_info + + # we override different "sdist" commands for both environments + if 'sdist' in cmds: + _sdist = cmds['sdist'] + else: + from setuptools.command.sdist import sdist as _sdist + + class cmd_sdist(_sdist): + def run(self): + versions = get_versions() + self._versioneer_generated_versions = versions + # unless we update this, the command will keep using the old + # version + self.distribution.metadata.version = versions["version"] + return _sdist.run(self) + + def make_release_tree(self, base_dir, files): + root = get_root() + cfg = get_config_from_root(root) + _sdist.make_release_tree(self, base_dir, files) + # now locate _version.py in the new base_dir directory + # (remembering that it may be a hardlink) and replace it with an + # updated value + target_versionfile = os.path.join(base_dir, cfg.versionfile_source) + print("UPDATING %s" % target_versionfile) + write_to_version_file(target_versionfile, + self._versioneer_generated_versions) + cmds["sdist"] = cmd_sdist + + return cmds + + +CONFIG_ERROR = """ +setup.cfg is missing the necessary Versioneer configuration. You need +a section like: + + [versioneer] + VCS = git + style = pep440 + versionfile_source = src/myproject/_version.py + versionfile_build = myproject/_version.py + tag_prefix = + parentdir_prefix = myproject- + +You will also need to edit your setup.py to use the results: + + import versioneer + setup(version=versioneer.get_version(), + cmdclass=versioneer.get_cmdclass(), ...) + +Please read the docstring in ./versioneer.py for configuration instructions, +edit setup.cfg, and re-run the installer or 'python versioneer.py setup'. +""" + +SAMPLE_CONFIG = """ +# See the docstring in versioneer.py for instructions. Note that you must +# re-run 'versioneer.py setup' after changing this section, and commit the +# resulting files. + +[versioneer] +#VCS = git +#style = pep440 +#versionfile_source = +#versionfile_build = +#tag_prefix = +#parentdir_prefix = + +""" + +OLD_SNIPPET = """ +from ._version import get_versions +__version__ = get_versions()['version'] +del get_versions +""" + +INIT_PY_SNIPPET = """ +from . import {0} +__version__ = {0}.get_versions()['version'] +""" + + +def do_setup(): + """Do main VCS-independent setup function for installing Versioneer.""" + root = get_root() + try: + cfg = get_config_from_root(root) + except (OSError, configparser.NoSectionError, + configparser.NoOptionError) as e: + if isinstance(e, (OSError, configparser.NoSectionError)): + print("Adding sample versioneer config to setup.cfg", + file=sys.stderr) + with open(os.path.join(root, "setup.cfg"), "a") as f: + f.write(SAMPLE_CONFIG) + print(CONFIG_ERROR, file=sys.stderr) + return 1 + + print(" creating %s" % cfg.versionfile_source) + with open(cfg.versionfile_source, "w") as f: + LONG = LONG_VERSION_PY[cfg.VCS] + f.write(LONG % {"DOLLAR": "$", + "STYLE": cfg.style, + "TAG_PREFIX": cfg.tag_prefix, + "PARENTDIR_PREFIX": cfg.parentdir_prefix, + "VERSIONFILE_SOURCE": cfg.versionfile_source, + }) + + ipy = os.path.join(os.path.dirname(cfg.versionfile_source), + "__init__.py") + if os.path.exists(ipy): + try: + with open(ipy, "r") as f: + old = f.read() + except OSError: + old = "" + module = os.path.splitext(os.path.basename(cfg.versionfile_source))[0] + snippet = INIT_PY_SNIPPET.format(module) + if OLD_SNIPPET in old: + print(" replacing boilerplate in %s" % ipy) + with open(ipy, "w") as f: + f.write(old.replace(OLD_SNIPPET, snippet)) + elif snippet not in old: + print(" appending to %s" % ipy) + with open(ipy, "a") as f: + f.write(snippet) + else: + print(" %s unmodified" % ipy) + else: + print(" %s doesn't exist, ok" % ipy) + ipy = None + + # Make VCS-specific changes. For git, this means creating/changing + # .gitattributes to mark _version.py for export-subst keyword + # substitution. + do_vcs_install(cfg.versionfile_source, ipy) + return 0 + + +def scan_setup_py(): + """Validate the contents of setup.py against Versioneer's expectations.""" + found = set() + setters = False + errors = 0 + with open("setup.py", "r") as f: + for line in f.readlines(): + if "import versioneer" in line: + found.add("import") + if "versioneer.get_cmdclass()" in line: + found.add("cmdclass") + if "versioneer.get_version()" in line: + found.add("get_version") + if "versioneer.VCS" in line: + setters = True + if "versioneer.versionfile_source" in line: + setters = True + if len(found) != 3: + print("") + print("Your setup.py appears to be missing some important items") + print("(but I might be wrong). Please make sure it has something") + print("roughly like the following:") + print("") + print(" import versioneer") + print(" setup( version=versioneer.get_version(),") + print(" cmdclass=versioneer.get_cmdclass(), ...)") + print("") + errors += 1 + if setters: + print("You should remove lines like 'versioneer.VCS = ' and") + print("'versioneer.versionfile_source = ' . This configuration") + print("now lives in setup.cfg, and should be removed from setup.py") + print("") + errors += 1 + return errors + + +def setup_command(): + """Set up Versioneer and exit with appropriate error code.""" + errors = do_setup() + errors += scan_setup_py() + sys.exit(1 if errors else 0) + + +if __name__ == "__main__": + cmd = sys.argv[1] + if cmd == "setup": + setup_command()