From ed32b88d0a9ad3e9c5098d7022e5c825a7cdff40 Mon Sep 17 00:00:00 2001 From: marscher Date: Tue, 19 Apr 2016 16:57:42 +0200 Subject: [PATCH 01/33] [clustering] fix memleak in minRMSD assign. Fixes #776 --- pyemma/coordinates/clustering/src/clustering.c | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pyemma/coordinates/clustering/src/clustering.c b/pyemma/coordinates/clustering/src/clustering.c index 1e1d05c3f..38bd8ab23 100644 --- a/pyemma/coordinates/clustering/src/clustering.c +++ b/pyemma/coordinates/clustering/src/clustering.c @@ -85,7 +85,7 @@ int c_assign(float *chunk, float *centers, npy_int32 *dtraj, char* metric, float * SKP_restrict chunk_p; #endif - buffer_a = NULL; buffer_b = NULL; trace_centers_p = NULL; + buffer_a = NULL; buffer_b = NULL; trace_centers_p = NULL; centers_precentered = NULL; ret = ASSIGN_SUCCESS; debug=0; @@ -109,7 +109,6 @@ int c_assign(float *chunk, float *centers, npy_int32 *dtraj, char* metric, inplace_center_and_trace_atom_major(centers_precentered, &trace_centers, 1, dim/3); } trace_centers_p = &trace_centers; - //trace_centers_p = NULL; centers = centers_precentered; } else { ret = ASSIGN_ERR_INVALID_METRIC; @@ -168,6 +167,7 @@ int c_assign(float *chunk, float *centers, npy_int32 *dtraj, char* metric, error: free(buffer_a); free(buffer_b); + free(centers_precentered); return ret; } From 0f93ab2455074d6f2f422c0704d176451dedfe63 Mon Sep 17 00:00:00 2001 From: marscher Date: Tue, 19 Apr 2016 18:41:54 +0200 Subject: [PATCH 02/33] [travis] on success push dev-versions to anaconda cloud --- .travis.yml | 5 ++++- tools/ci/travis/after_success.sh | 13 +++++++++++++ 2 files changed, 17 insertions(+), 1 deletion(-) create mode 100644 tools/ci/travis/after_success.sh diff --git a/.travis.yml b/.travis.yml index de434d0a5..eca997601 100644 --- a/.travis.yml +++ b/.travis.yml @@ -9,6 +9,9 @@ env: global: - PATH=$HOME/miniconda/bin:$PATH - common_py_deps="jinja2 conda-build" + - PACKAGENAME=pyemma + - ORGNAME=omnia + secure: BBcvn2SDTrKtCQdJFJ6ezv5lmYWYtoukS1maptS2RuEHrBqVMN++go5U9BTNDHBLUESewolaR9GFng2xXBLpPYgCE2XOGsJPKM0L15/Ae/QWSmqUbLNJFVZz3I3EliJJV9si1EaWQnQY0axPqSXRTA8CyVOeO9zxMewWozGIHE0= matrix: # minimum possible numpy version = 1.8, because of mdtraj - python=2.7 CONDA_PY=27 CONDA_NPY=19 @@ -37,4 +40,4 @@ after_success: # coverage report: needs .coverage file generated by testsuite and git src - pip install coveralls - coveralls - +- ./tools/ci/travis/after_success.sh diff --git a/tools/ci/travis/after_success.sh b/tools/ci/travis/after_success.sh new file mode 100644 index 000000000..872883826 --- /dev/null +++ b/tools/ci/travis/after_success.sh @@ -0,0 +1,13 @@ +if [ "$TRAVIS_PULL_REQUEST" = true ]; then + echo "This is a pull request. No deployment will be done."; exit 0 +fi + + +if [ "$TRAVIS_BRANCH" != "devel" ]; then + echo "No deployment on BRANCH='$TRAVIS_BRANCH'"; exit 0 +fi + + +# Deploy to binstar +conda install --yes anaconda-client +anaconda -t $BINSTAR_TOKEN upload --force -u omnia -p ${PACKAGENAME}-dev $HOME/miniconda/conda-bld/*/${PACKAGENAME}-dev-*.tar.bz2 From 165de9cf68b50778f5e5307749dbc83e093b9754 Mon Sep 17 00:00:00 2001 From: marscher Date: Tue, 19 Apr 2016 18:47:32 +0200 Subject: [PATCH 03/33] [travis] only execute on_success script in case secure env vars are available. --- .travis.yml | 2 +- tools/ci/travis/after_success.sh | 12 ++++++++++-- 2 files changed, 11 insertions(+), 3 deletions(-) diff --git a/.travis.yml b/.travis.yml index eca997601..e75a5ceb3 100644 --- a/.travis.yml +++ b/.travis.yml @@ -40,4 +40,4 @@ after_success: # coverage report: needs .coverage file generated by testsuite and git src - pip install coveralls - coveralls -- ./tools/ci/travis/after_success.sh +- if [ "$TRAVIS_SECURE_ENV_VARS" == true ]; then source tools/travis-ci/after_success.sh; fi diff --git a/tools/ci/travis/after_success.sh b/tools/ci/travis/after_success.sh index 872883826..ef228f63b 100644 --- a/tools/ci/travis/after_success.sh +++ b/tools/ci/travis/after_success.sh @@ -9,5 +9,13 @@ fi # Deploy to binstar -conda install --yes anaconda-client -anaconda -t $BINSTAR_TOKEN upload --force -u omnia -p ${PACKAGENAME}-dev $HOME/miniconda/conda-bld/*/${PACKAGENAME}-dev-*.tar.bz2 +conda install --yes anaconda-client jinja2 +pushd . +cd $HOME/miniconda/conda-bld +FILES=*/${PACKAGENAME}-dev-*.tar.bz2 +for filename in $FILES; do + anaconda -t $BINSTAR_TOKEN remove --force ${ORGNAME}/${PACKAGENAME}-dev/${filename} + anaconda -t $BINSTAR_TOKEN upload --force -u ${ORGNAME} -p ${PACKAGENAME}-dev ${filename} +done +popd + From d7acb3b5af04511fb874dec0d8c4f4add235b98a Mon Sep 17 00:00:00 2001 From: vargaslo Date: Tue, 19 Apr 2016 15:35:54 -0500 Subject: [PATCH 04/33] Update maximum_likelihood_hmsm.py format string to prevent error --- pyemma/msm/estimators/maximum_likelihood_hmsm.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/pyemma/msm/estimators/maximum_likelihood_hmsm.py b/pyemma/msm/estimators/maximum_likelihood_hmsm.py index e96b92038..1c280b915 100644 --- a/pyemma/msm/estimators/maximum_likelihood_hmsm.py +++ b/pyemma/msm/estimators/maximum_likelihood_hmsm.py @@ -169,8 +169,8 @@ def _estimate(self, dtrajs): if self.lag >= _np.max(trajlengths): raise ValueError('Illegal lag time ' + str(self.lag) + ' exceeds longest trajectory length') if self.lag > _np.mean(trajlengths): - self.logger.warning('Lag time ' + str(self.lag) + ' is on the order of mean trajectory length' - + _np.mean(trajlengths) + '. It is recommended to fit four lag times in each ' + self.logger.warning('Lag time ' + str(self.lag) + ' is on the order of mean trajectory length ' + + str(_np.mean(trajlengths)) + '. It is recommended to fit four lag times in each ' + 'trajectory. HMM might be inaccurate.') # EVALUATE STRIDE @@ -646,4 +646,4 @@ def cktest(self, mlags=10, conf=0.95, err_est=False, show_progress=True): mlags=mlags, conf=conf, err_est=err_est, show_progress=show_progress) ck.estimate(self._dtrajs_full) - return ck \ No newline at end of file + return ck From 7b282fc7b1a5b3f13248c24d9452a0db61c7957a Mon Sep 17 00:00:00 2001 From: marscher Date: Wed, 20 Apr 2016 11:37:06 +0200 Subject: [PATCH 05/33] [travis] moved secure section to proper location, added shebang for after_success.sh --- .travis.yml | 2 +- tools/ci/travis/after_success.sh | 1 + 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/.travis.yml b/.travis.yml index e75a5ceb3..670c8c7e2 100644 --- a/.travis.yml +++ b/.travis.yml @@ -11,7 +11,7 @@ env: - common_py_deps="jinja2 conda-build" - PACKAGENAME=pyemma - ORGNAME=omnia - secure: BBcvn2SDTrKtCQdJFJ6ezv5lmYWYtoukS1maptS2RuEHrBqVMN++go5U9BTNDHBLUESewolaR9GFng2xXBLpPYgCE2XOGsJPKM0L15/Ae/QWSmqUbLNJFVZz3I3EliJJV9si1EaWQnQY0axPqSXRTA8CyVOeO9zxMewWozGIHE0= + - secure: BBcvn2SDTrKtCQdJFJ6ezv5lmYWYtoukS1maptS2RuEHrBqVMN++go5U9BTNDHBLUESewolaR9GFng2xXBLpPYgCE2XOGsJPKM0L15/Ae/QWSmqUbLNJFVZz3I3EliJJV9si1EaWQnQY0axPqSXRTA8CyVOeO9zxMewWozGIHE0= matrix: # minimum possible numpy version = 1.8, because of mdtraj - python=2.7 CONDA_PY=27 CONDA_NPY=19 diff --git a/tools/ci/travis/after_success.sh b/tools/ci/travis/after_success.sh index ef228f63b..98c554cdd 100644 --- a/tools/ci/travis/after_success.sh +++ b/tools/ci/travis/after_success.sh @@ -1,3 +1,4 @@ +#!/bin/bash if [ "$TRAVIS_PULL_REQUEST" = true ]; then echo "This is a pull request. No deployment will be done."; exit 0 fi From a8541f087d82358e607f81957e3f5ddf897df94b Mon Sep 17 00:00:00 2001 From: marscher Date: Wed, 20 Apr 2016 11:52:57 +0200 Subject: [PATCH 06/33] [travis] fix path --- .travis.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.travis.yml b/.travis.yml index 670c8c7e2..e1c318dba 100644 --- a/.travis.yml +++ b/.travis.yml @@ -40,4 +40,4 @@ after_success: # coverage report: needs .coverage file generated by testsuite and git src - pip install coveralls - coveralls -- if [ "$TRAVIS_SECURE_ENV_VARS" == true ]; then source tools/travis-ci/after_success.sh; fi +- if [ "$TRAVIS_SECURE_ENV_VARS" == true ]; then source tools/ci/travis-ci/after_success.sh; fi From 57ccb82c4f67a810283d62741a69617d04d96ae1 Mon Sep 17 00:00:00 2001 From: marscher Date: Wed, 20 Apr 2016 12:02:13 +0200 Subject: [PATCH 07/33] [travis] follow-up --- .travis.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.travis.yml b/.travis.yml index e1c318dba..a07c18e1e 100644 --- a/.travis.yml +++ b/.travis.yml @@ -40,4 +40,4 @@ after_success: # coverage report: needs .coverage file generated by testsuite and git src - pip install coveralls - coveralls -- if [ "$TRAVIS_SECURE_ENV_VARS" == true ]; then source tools/ci/travis-ci/after_success.sh; fi +- if [ "$TRAVIS_SECURE_ENV_VARS" == true ]; then source tools/ci/travis/after_success.sh; fi From 98d358b6939b68050d6488a89c23403a7e1aad8d Mon Sep 17 00:00:00 2001 From: marscher Date: Wed, 20 Apr 2016 14:08:16 +0200 Subject: [PATCH 08/33] [versioneer] upgraded to 0.16 --- pyemma/_version.py | 417 ++++++++++++++++++++++++++++++++++++++------- versioneer.py | 261 ++++++++++++++++++---------- 2 files changed, 521 insertions(+), 157 deletions(-) diff --git a/pyemma/_version.py b/pyemma/_version.py index 4a8c2627b..57b087f36 100644 --- a/pyemma/_version.py +++ b/pyemma/_version.py @@ -6,20 +6,9 @@ # that just contains the computed version number. # This file is released into the public domain. Generated by -# versioneer-0.13 (https://github.com/warner/python-versioneer) +# versioneer-0.16 (https://github.com/warner/python-versioneer) -# these strings will be replaced by git during git-archive - -from __future__ import absolute_import, print_function -from six.moves import range - -git_refnames = "$Format:%d$" -git_full = "$Format:%H$" - -# these strings are filled in when 'setup.py versioneer' creates _version.py -tag_prefix = "v" -parentdir_prefix = "pyemma-" -versionfile_source = "pyemma/_version.py" +"""Git implementation of _version.py.""" import errno import os @@ -28,11 +17,62 @@ import sys +def get_keywords(): + """Get the keywords needed to look up the version information.""" + # these strings will be replaced by git during git-archive. + # setup.py/versioneer.py will grep for the variable names, so they must + # each be defined on a line of their own. _version.py will just call + # get_keywords(). + git_refnames = "$Format:%d$" + git_full = "$Format:%H$" + keywords = {"refnames": git_refnames, "full": git_full} + return keywords + + +class VersioneerConfig: + """Container for Versioneer configuration parameters.""" + + +def get_config(): + """Create, populate and return the VersioneerConfig() object.""" + # these strings are filled in when 'setup.py versioneer' creates + # _version.py + cfg = VersioneerConfig() + cfg.VCS = "git" + cfg.style = "pep440" + cfg.tag_prefix = "v" + cfg.parentdir_prefix = "pyemma-" + cfg.versionfile_source = "pyemma/_version.py" + cfg.verbose = False + return cfg + + +class NotThisMethod(Exception): + """Exception raised if a method is not valid for the current scenario.""" + + +LONG_VERSION_PY = {} +HANDLERS = {} + + +def register_vcs_handler(vcs, method): # decorator + """Decorator to mark a method as the handler for a particular VCS.""" + def decorate(f): + """Store f in HANDLERS[vcs][method].""" + if vcs not in HANDLERS: + HANDLERS[vcs] = {} + HANDLERS[vcs][method] = f + return f + return decorate + + def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False): + """Call the given command(s).""" assert isinstance(commands, list) p = None for c in commands: try: + dispcmd = str([c] + args) # remember shell=False, so use git.cmd on windows, not just git p = subprocess.Popen([c] + args, cwd=cwd, stdout=subprocess.PIPE, stderr=(subprocess.PIPE if hide_stderr @@ -43,7 +83,7 @@ def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False): if e.errno == errno.ENOENT: continue if verbose: - print("unable to run %s" % args[0]) + print("unable to run %s" % dispcmd) print(e) return None else: @@ -51,28 +91,35 @@ def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False): print("unable to find command, tried %s" % (commands,)) return None stdout = p.communicate()[0].strip() - if sys.version >= '3': + if sys.version_info[0] >= 3: stdout = stdout.decode() if p.returncode != 0: if verbose: - print("unable to run %s (error)" % args[0]) + print("unable to run %s (error)" % dispcmd) return None return stdout -def versions_from_parentdir(parentdir_prefix, root, verbose=False): - # Source tarballs conventionally unpack into a directory that includes - # both the project name and a version string. +def versions_from_parentdir(parentdir_prefix, root, verbose): + """Try to determine the version from the parent directory name. + + Source tarballs conventionally unpack into a directory that includes + both the project name and a version string. + """ dirname = os.path.basename(root) if not dirname.startswith(parentdir_prefix): if verbose: print("guessing rootdir is '%s', but '%s' doesn't start with " "prefix '%s'" % (root, dirname, parentdir_prefix)) - return None - return {"version": dirname[len(parentdir_prefix):], "full": ""} + raise NotThisMethod("rootdir doesn't start with parentdir_prefix") + return {"version": dirname[len(parentdir_prefix):], + "full-revisionid": None, + "dirty": False, "error": None} +@register_vcs_handler("git", "get_keywords") def git_get_keywords(versionfile_abs): + """Extract version information from the given file.""" # the code embedded in _version.py can just fetch the value of these # keywords. When used from setup.py, we don't want to import _version.py, # so we do it with a regexp instead. This function is not used from @@ -95,14 +142,16 @@ def git_get_keywords(versionfile_abs): return keywords -def git_versions_from_keywords(keywords, tag_prefix, verbose=False): +@register_vcs_handler("git", "keywords") +def git_versions_from_keywords(keywords, tag_prefix, verbose): + """Get version information from git keywords.""" if not keywords: - return {} # keyword-finding function failed to find keywords + raise NotThisMethod("no keywords at all, weird") refnames = keywords["refnames"].strip() if refnames.startswith("$Format"): if verbose: print("keywords are unexpanded, not using") - return {} # unexpanded, so not in an unpacked git-archive tarball + raise NotThisMethod("unexpanded keywords, not a git-archive tarball") refs = set([r.strip() for r in refnames.strip("()").split(",")]) # starting in git-1.8.3, tags are listed as "tag: foo-1.0" instead of # just "foo-1.0". If we see a "tag: " prefix, prefer those. @@ -128,68 +177,308 @@ def git_versions_from_keywords(keywords, tag_prefix, verbose=False): if verbose: print("picking %s" % r) return {"version": r, - "full": keywords["full"].strip()} - # no suitable tags, so we use the full revision id + "full-revisionid": keywords["full"].strip(), + "dirty": False, "error": None + } + # no suitable tags, so version is "0+unknown", but full hex is still there if verbose: - print("no suitable tags, using full revision id") - return {"version": keywords["full"].strip(), - "full": keywords["full"].strip()} + print("no suitable tags, using unknown + full revision id") + return {"version": "0+unknown", + "full-revisionid": keywords["full"].strip(), + "dirty": False, "error": "no suitable tags"} -def git_versions_from_vcs(tag_prefix, root, verbose=False): - # this runs 'git' from the root of the source tree. This only gets called - # if the git-archive 'subst' keywords were *not* expanded, and - # _version.py hasn't already been rewritten with a short version string, - # meaning we're inside a checked out source tree. +@register_vcs_handler("git", "pieces_from_vcs") +def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command): + """Get version from 'git describe' in the root of the source tree. + This only gets called if the git-archive 'subst' keywords were *not* + expanded, and _version.py hasn't already been rewritten with a short + version string, meaning we're inside a checked out source tree. + """ if not os.path.exists(os.path.join(root, ".git")): if verbose: print("no .git in %s" % root) - return {} + raise NotThisMethod("no .git directory") GITS = ["git"] if sys.platform == "win32": GITS = ["git.cmd", "git.exe"] - stdout = run_command(GITS, ["describe", "--tags", "--dirty", "--always"], - cwd=root) - if stdout is None: - return {} - if not stdout.startswith(tag_prefix): - if verbose: - fmt = "tag '%s' doesn't start with prefix '%s'" - print(fmt % (stdout, tag_prefix)) - return {} - tag = stdout[len(tag_prefix):] - stdout = run_command(GITS, ["rev-parse", "HEAD"], cwd=root) - if stdout is None: - return {} - full = stdout.strip() - if tag.endswith("-dirty"): - full += "-dirty" - return {"version": tag, "full": full} - - -def get_versions(default={"version": "unknown", "full": ""}, verbose=False): + # if there is a tag matching tag_prefix, this yields TAG-NUM-gHEX[-dirty] + # if there isn't one, this yields HEX[-dirty] (no NUM) + describe_out = run_command(GITS, ["describe", "--tags", "--dirty", + "--always", "--long", + "--match", "%s*" % tag_prefix], + cwd=root) + # --long was added in git-1.5.5 + if describe_out is None: + raise NotThisMethod("'git describe' failed") + describe_out = describe_out.strip() + full_out = run_command(GITS, ["rev-parse", "HEAD"], cwd=root) + if full_out is None: + raise NotThisMethod("'git rev-parse' failed") + full_out = full_out.strip() + + pieces = {} + pieces["long"] = full_out + pieces["short"] = full_out[:7] # maybe improved later + pieces["error"] = None + + # parse describe_out. It will be like TAG-NUM-gHEX[-dirty] or HEX[-dirty] + # TAG might have hyphens. + git_describe = describe_out + + # look for -dirty suffix + dirty = git_describe.endswith("-dirty") + pieces["dirty"] = dirty + if dirty: + git_describe = git_describe[:git_describe.rindex("-dirty")] + + # now we have TAG-NUM-gHEX or HEX + + if "-" in git_describe: + # TAG-NUM-gHEX + mo = re.search(r'^(.+)-(\d+)-g([0-9a-f]+)$', git_describe) + if not mo: + # unparseable. Maybe git-describe is misbehaving? + pieces["error"] = ("unable to parse git-describe output: '%s'" + % describe_out) + return pieces + + # tag + full_tag = mo.group(1) + if not full_tag.startswith(tag_prefix): + if verbose: + fmt = "tag '%s' doesn't start with prefix '%s'" + print(fmt % (full_tag, tag_prefix)) + pieces["error"] = ("tag '%s' doesn't start with prefix '%s'" + % (full_tag, tag_prefix)) + return pieces + pieces["closest-tag"] = full_tag[len(tag_prefix):] + + # distance: number of commits since tag + pieces["distance"] = int(mo.group(2)) + + # commit: short hex revision ID + pieces["short"] = mo.group(3) + + else: + # HEX: no tags + pieces["closest-tag"] = None + count_out = run_command(GITS, ["rev-list", "HEAD", "--count"], + cwd=root) + pieces["distance"] = int(count_out) # total number of commits + + return pieces + + +def plus_or_dot(pieces): + """Return a + if we don't already have one, else return a .""" + if "+" in pieces.get("closest-tag", ""): + return "." + return "+" + + +def render_pep440(pieces): + """Build up version string, with post-release "local version identifier". + + Our goal: TAG[+DISTANCE.gHEX[.dirty]] . Note that if you + get a tagged build and then dirty it, you'll get TAG+0.gHEX.dirty + + Exceptions: + 1: no tags. git_describe was just HEX. 0+untagged.DISTANCE.gHEX[.dirty] + """ + if pieces["closest-tag"]: + rendered = pieces["closest-tag"] + if pieces["distance"] or pieces["dirty"]: + rendered += plus_or_dot(pieces) + rendered += "%d.g%s" % (pieces["distance"], pieces["short"]) + if pieces["dirty"]: + rendered += ".dirty" + else: + # exception #1 + rendered = "0+untagged.%d.g%s" % (pieces["distance"], + pieces["short"]) + if pieces["dirty"]: + rendered += ".dirty" + return rendered + + +def render_pep440_pre(pieces): + """TAG[.post.devDISTANCE] -- No -dirty. + + Exceptions: + 1: no tags. 0.post.devDISTANCE + """ + if pieces["closest-tag"]: + rendered = pieces["closest-tag"] + if pieces["distance"]: + rendered += ".post.dev%d" % pieces["distance"] + else: + # exception #1 + rendered = "0.post.dev%d" % pieces["distance"] + return rendered + + +def render_pep440_post(pieces): + """TAG[.postDISTANCE[.dev0]+gHEX] . + + The ".dev0" means dirty. Note that .dev0 sorts backwards + (a dirty tree will appear "older" than the corresponding clean one), + but you shouldn't be releasing software with -dirty anyways. + + Exceptions: + 1: no tags. 0.postDISTANCE[.dev0] + """ + if pieces["closest-tag"]: + rendered = pieces["closest-tag"] + if pieces["distance"] or pieces["dirty"]: + rendered += ".post%d" % pieces["distance"] + if pieces["dirty"]: + rendered += ".dev0" + rendered += plus_or_dot(pieces) + rendered += "g%s" % pieces["short"] + else: + # exception #1 + rendered = "0.post%d" % pieces["distance"] + if pieces["dirty"]: + rendered += ".dev0" + rendered += "+g%s" % pieces["short"] + return rendered + + +def render_pep440_old(pieces): + """TAG[.postDISTANCE[.dev0]] . + + The ".dev0" means dirty. + + Eexceptions: + 1: no tags. 0.postDISTANCE[.dev0] + """ + if pieces["closest-tag"]: + rendered = pieces["closest-tag"] + if pieces["distance"] or pieces["dirty"]: + rendered += ".post%d" % pieces["distance"] + if pieces["dirty"]: + rendered += ".dev0" + else: + # exception #1 + rendered = "0.post%d" % pieces["distance"] + if pieces["dirty"]: + rendered += ".dev0" + return rendered + + +def render_git_describe(pieces): + """TAG[-DISTANCE-gHEX][-dirty]. + + Like 'git describe --tags --dirty --always'. + + Exceptions: + 1: no tags. HEX[-dirty] (note: no 'g' prefix) + """ + if pieces["closest-tag"]: + rendered = pieces["closest-tag"] + if pieces["distance"]: + rendered += "-%d-g%s" % (pieces["distance"], pieces["short"]) + else: + # exception #1 + rendered = pieces["short"] + if pieces["dirty"]: + rendered += "-dirty" + return rendered + + +def render_git_describe_long(pieces): + """TAG-DISTANCE-gHEX[-dirty]. + + Like 'git describe --tags --dirty --always -long'. + The distance/hash is unconditional. + + Exceptions: + 1: no tags. HEX[-dirty] (note: no 'g' prefix) + """ + if pieces["closest-tag"]: + rendered = pieces["closest-tag"] + rendered += "-%d-g%s" % (pieces["distance"], pieces["short"]) + else: + # exception #1 + rendered = pieces["short"] + if pieces["dirty"]: + rendered += "-dirty" + return rendered + + +def render(pieces, style): + """Render the given version pieces into the requested style.""" + if pieces["error"]: + return {"version": "unknown", + "full-revisionid": pieces.get("long"), + "dirty": None, + "error": pieces["error"]} + + if not style or style == "default": + style = "pep440" # the default + + if style == "pep440": + rendered = render_pep440(pieces) + elif style == "pep440-pre": + rendered = render_pep440_pre(pieces) + elif style == "pep440-post": + rendered = render_pep440_post(pieces) + elif style == "pep440-old": + rendered = render_pep440_old(pieces) + elif style == "git-describe": + rendered = render_git_describe(pieces) + elif style == "git-describe-long": + rendered = render_git_describe_long(pieces) + else: + raise ValueError("unknown style '%s'" % style) + + return {"version": rendered, "full-revisionid": pieces["long"], + "dirty": pieces["dirty"], "error": None} + + +def get_versions(): + """Get version information or return default if unable to do so.""" # I am in _version.py, which lives at ROOT/VERSIONFILE_SOURCE. If we have # __file__, we can work backwards from there to the root. Some # py2exe/bbfreeze/non-CPython implementations don't do __file__, in which # case we can only use expanded keywords. - keywords = {"refnames": git_refnames, "full": git_full} - ver = git_versions_from_keywords(keywords, tag_prefix, verbose) - if ver: - return ver + cfg = get_config() + verbose = cfg.verbose + + try: + return git_versions_from_keywords(get_keywords(), cfg.tag_prefix, + verbose) + except NotThisMethod: + pass try: root = os.path.realpath(__file__) # versionfile_source is the relative path from the top of the source # tree (where the .git directory might live) to this file. Invert # this to find the root from __file__. - for i in range(len(versionfile_source.split('/'))): + for i in cfg.versionfile_source.split('/'): root = os.path.dirname(root) except NameError: - return default + return {"version": "0+unknown", "full-revisionid": None, + "dirty": None, + "error": "unable to find root of source tree"} + + try: + pieces = git_pieces_from_vcs(cfg.tag_prefix, root, verbose) + return render(pieces, cfg.style) + except NotThisMethod: + pass + + try: + if cfg.parentdir_prefix: + return versions_from_parentdir(cfg.parentdir_prefix, root, verbose) + except NotThisMethod: + pass - return (git_versions_from_vcs(tag_prefix, root, verbose) - or versions_from_parentdir(parentdir_prefix, root, verbose) - or default) + return {"version": "0+unknown", "full-revisionid": None, + "dirty": None, + "error": "unable to compute version"} diff --git a/versioneer.py b/versioneer.py index c010f63e3..7ed2a21d2 100644 --- a/versioneer.py +++ b/versioneer.py @@ -1,7 +1,8 @@ -# Version: 0.15 +# Version: 0.16 + +"""The Versioneer - like a rocketeer, but for versions. -""" The Versioneer ============== @@ -9,7 +10,7 @@ * https://github.com/warner/python-versioneer * Brian Warner * License: Public Domain -* Compatible With: python2.6, 2.7, 3.2, 3.3, 3.4, and pypy +* Compatible With: python2.6, 2.7, 3.3, 3.4, 3.5, and pypy * [![Latest Version] (https://pypip.in/version/versioneer/badge.svg?style=flat) ](https://pypi.python.org/pypi/versioneer/) @@ -125,16 +126,18 @@ If this is set to None, then `setup.py build` will not attempt to rewrite any `_version.py` in the built tree. If your project does not have any libraries (e.g. if it only builds a script), then you should use - `versionfile_build = None` and override `distutils.command.build_scripts` - to explicitly insert a copy of `versioneer.get_version()` into your - generated script. + `versionfile_build = None`. To actually use the computed version string, + your `setup.py` will need to override `distutils.command.build_scripts` + with a subclass that explicitly inserts a copy of + `versioneer.get_version()` into your script file. See + `test/demoapp-script-only/setup.py` for an example. * `tag_prefix`: a string, like 'PROJECTNAME-', which appears at the start of all VCS tags. If your tags look like 'myproject-1.2.0', then you should use tag_prefix='myproject-'. If you use unprefixed tags like '1.2.0', this - should be an empty string. + should be an empty string, using either `tag_prefix=` or `tag_prefix=''`. * `parentdir_prefix`: @@ -159,7 +162,7 @@ style = pep440 versionfile_source = src/myproject/_version.py versionfile_build = myproject/_version.py - tag_prefix = "" + tag_prefix = parentdir_prefix = myproject- ```` @@ -286,6 +289,10 @@ `SRC/_version.py` * commit any changed files +### Upgrading to 0.16 + +Nothing special. + ### Upgrading to 0.15 Starting with this version, Versioneer is configured with a `[versioneer]` @@ -333,9 +340,11 @@ ## License -To make Versioneer easier to embed, all its code is hereby released into the -public domain. The `_version.py` that it creates is also in the public -domain. +To make Versioneer easier to embed, all its code is dedicated to the public +domain. The `_version.py` that it creates is also in the public domain. +Specifically, both are released under the Creative Commons "Public Domain +Dedication" license (CC0-1.0), as described in +https://creativecommons.org/publicdomain/zero/1.0/ . """ @@ -353,12 +362,15 @@ class VersioneerConfig: - pass + """Container for Versioneer configuration parameters.""" def get_root(): - # we require that all commands are run from the project root, i.e. the - # directory that contains setup.py, setup.cfg, and versioneer.py . + """Get the project root directory. + + We require that all commands are run from the project root, i.e. the + directory that contains setup.py, setup.cfg, and versioneer.py . + """ root = os.path.realpath(os.path.abspath(os.getcwd())) setup_py = os.path.join(root, "setup.py") versioneer_py = os.path.join(root, "versioneer.py") @@ -391,6 +403,7 @@ def get_root(): def get_config_from_root(root): + """Read the project setup.cfg file to determine Versioneer config.""" # This might raise EnvironmentError (if setup.cfg is missing), or # configparser.NoSectionError (if it lacks a [versioneer] section), or # configparser.NoOptionError (if it lacks "VCS="). See the docstring at @@ -411,13 +424,15 @@ def get(parser, name): cfg.versionfile_source = get(parser, "versionfile_source") cfg.versionfile_build = get(parser, "versionfile_build") cfg.tag_prefix = get(parser, "tag_prefix") + if cfg.tag_prefix in ("''", '""'): + cfg.tag_prefix = "" cfg.parentdir_prefix = get(parser, "parentdir_prefix") cfg.verbose = get(parser, "verbose") return cfg class NotThisMethod(Exception): - pass + """Exception raised if a method is not valid for the current scenario.""" # these dictionaries contain VCS-specific tools LONG_VERSION_PY = {} @@ -425,7 +440,9 @@ class NotThisMethod(Exception): def register_vcs_handler(vcs, method): # decorator + """Decorator to mark a method as the handler for a particular VCS.""" def decorate(f): + """Store f in HANDLERS[vcs][method].""" if vcs not in HANDLERS: HANDLERS[vcs] = {} HANDLERS[vcs][method] = f @@ -434,6 +451,7 @@ def decorate(f): def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False): + """Call the given command(s).""" assert isinstance(commands, list) p = None for c in commands: @@ -472,7 +490,9 @@ def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False): # that just contains the computed version number. # This file is released into the public domain. Generated by -# versioneer-0.15 (https://github.com/warner/python-versioneer) +# versioneer-0.16 (https://github.com/warner/python-versioneer) + +"""Git implementation of _version.py.""" import errno import os @@ -482,6 +502,7 @@ def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False): def get_keywords(): + """Get the keywords needed to look up the version information.""" # these strings will be replaced by git during git-archive. # setup.py/versioneer.py will grep for the variable names, so they must # each be defined on a line of their own. _version.py will just call @@ -493,10 +514,11 @@ def get_keywords(): class VersioneerConfig: - pass + """Container for Versioneer configuration parameters.""" def get_config(): + """Create, populate and return the VersioneerConfig() object.""" # these strings are filled in when 'setup.py versioneer' creates # _version.py cfg = VersioneerConfig() @@ -510,7 +532,7 @@ def get_config(): class NotThisMethod(Exception): - pass + """Exception raised if a method is not valid for the current scenario.""" LONG_VERSION_PY = {} @@ -518,7 +540,9 @@ class NotThisMethod(Exception): def register_vcs_handler(vcs, method): # decorator + """Decorator to mark a method as the handler for a particular VCS.""" def decorate(f): + """Store f in HANDLERS[vcs][method].""" if vcs not in HANDLERS: HANDLERS[vcs] = {} HANDLERS[vcs][method] = f @@ -527,6 +551,7 @@ def decorate(f): def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False): + """Call the given command(s).""" assert isinstance(commands, list) p = None for c in commands: @@ -560,8 +585,11 @@ def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False): def versions_from_parentdir(parentdir_prefix, root, verbose): - # Source tarballs conventionally unpack into a directory that includes - # both the project name and a version string. + """Try to determine the version from the parent directory name. + + Source tarballs conventionally unpack into a directory that includes + both the project name and a version string. + """ dirname = os.path.basename(root) if not dirname.startswith(parentdir_prefix): if verbose: @@ -575,6 +603,7 @@ def versions_from_parentdir(parentdir_prefix, root, verbose): @register_vcs_handler("git", "get_keywords") def git_get_keywords(versionfile_abs): + """Extract version information from the given file.""" # the code embedded in _version.py can just fetch the value of these # keywords. When used from setup.py, we don't want to import _version.py, # so we do it with a regexp instead. This function is not used from @@ -599,6 +628,7 @@ def git_get_keywords(versionfile_abs): @register_vcs_handler("git", "keywords") def git_versions_from_keywords(keywords, tag_prefix, verbose): + """Get version information from git keywords.""" if not keywords: raise NotThisMethod("no keywords at all, weird") refnames = keywords["refnames"].strip() @@ -644,11 +674,12 @@ def git_versions_from_keywords(keywords, tag_prefix, verbose): @register_vcs_handler("git", "pieces_from_vcs") def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command): - # this runs 'git' from the root of the source tree. This only gets called - # if the git-archive 'subst' keywords were *not* expanded, and - # _version.py hasn't already been rewritten with a short version string, - # meaning we're inside a checked out source tree. + """Get version from 'git describe' in the root of the source tree. + This only gets called if the git-archive 'subst' keywords were *not* + expanded, and _version.py hasn't already been rewritten with a short + version string, meaning we're inside a checked out source tree. + """ if not os.path.exists(os.path.join(root, ".git")): if verbose: print("no .git in %%s" %% root) @@ -657,10 +688,11 @@ def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command): GITS = ["git"] if sys.platform == "win32": GITS = ["git.cmd", "git.exe"] - # if there is a tag, this yields TAG-NUM-gHEX[-dirty] - # if there are no tags, this yields HEX[-dirty] (no NUM) + # if there is a tag matching tag_prefix, this yields TAG-NUM-gHEX[-dirty] + # if there isn't one, this yields HEX[-dirty] (no NUM) describe_out = run_command(GITS, ["describe", "--tags", "--dirty", - "--always", "--long"], + "--always", "--long", + "--match", "%%s*" %% tag_prefix], cwd=root) # --long was added in git-1.5.5 if describe_out is None: @@ -725,19 +757,21 @@ def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command): def plus_or_dot(pieces): + """Return a + if we don't already have one, else return a .""" if "+" in pieces.get("closest-tag", ""): return "." return "+" def render_pep440(pieces): - # now build up version string, with post-release "local version - # identifier". Our goal: TAG[+DISTANCE.gHEX[.dirty]] . Note that if you - # get a tagged build and then dirty it, you'll get TAG+0.gHEX.dirty + """Build up version string, with post-release "local version identifier". - # exceptions: - # 1: no tags. git_describe was just HEX. 0+untagged.DISTANCE.gHEX[.dirty] + Our goal: TAG[+DISTANCE.gHEX[.dirty]] . Note that if you + get a tagged build and then dirty it, you'll get TAG+0.gHEX.dirty + Exceptions: + 1: no tags. git_describe was just HEX. 0+untagged.DISTANCE.gHEX[.dirty] + """ if pieces["closest-tag"]: rendered = pieces["closest-tag"] if pieces["distance"] or pieces["dirty"]: @@ -755,11 +789,11 @@ def render_pep440(pieces): def render_pep440_pre(pieces): - # TAG[.post.devDISTANCE] . No -dirty - - # exceptions: - # 1: no tags. 0.post.devDISTANCE + """TAG[.post.devDISTANCE] -- No -dirty. + Exceptions: + 1: no tags. 0.post.devDISTANCE + """ if pieces["closest-tag"]: rendered = pieces["closest-tag"] if pieces["distance"]: @@ -771,14 +805,15 @@ def render_pep440_pre(pieces): def render_pep440_post(pieces): - # TAG[.postDISTANCE[.dev0]+gHEX] . The ".dev0" means dirty. Note that - # .dev0 sorts backwards (a dirty tree will appear "older" than the - # corresponding clean one), but you shouldn't be releasing software with - # -dirty anyways. + """TAG[.postDISTANCE[.dev0]+gHEX] . - # exceptions: - # 1: no tags. 0.postDISTANCE[.dev0] + The ".dev0" means dirty. Note that .dev0 sorts backwards + (a dirty tree will appear "older" than the corresponding clean one), + but you shouldn't be releasing software with -dirty anyways. + Exceptions: + 1: no tags. 0.postDISTANCE[.dev0] + """ if pieces["closest-tag"]: rendered = pieces["closest-tag"] if pieces["distance"] or pieces["dirty"]: @@ -797,11 +832,13 @@ def render_pep440_post(pieces): def render_pep440_old(pieces): - # TAG[.postDISTANCE[.dev0]] . The ".dev0" means dirty. + """TAG[.postDISTANCE[.dev0]] . - # exceptions: - # 1: no tags. 0.postDISTANCE[.dev0] + The ".dev0" means dirty. + Eexceptions: + 1: no tags. 0.postDISTANCE[.dev0] + """ if pieces["closest-tag"]: rendered = pieces["closest-tag"] if pieces["distance"] or pieces["dirty"]: @@ -817,12 +854,13 @@ def render_pep440_old(pieces): def render_git_describe(pieces): - # TAG[-DISTANCE-gHEX][-dirty], like 'git describe --tags --dirty - # --always' + """TAG[-DISTANCE-gHEX][-dirty]. - # exceptions: - # 1: no tags. HEX[-dirty] (note: no 'g' prefix) + Like 'git describe --tags --dirty --always'. + Exceptions: + 1: no tags. HEX[-dirty] (note: no 'g' prefix) + """ if pieces["closest-tag"]: rendered = pieces["closest-tag"] if pieces["distance"]: @@ -836,12 +874,14 @@ def render_git_describe(pieces): def render_git_describe_long(pieces): - # TAG-DISTANCE-gHEX[-dirty], like 'git describe --tags --dirty - # --always -long'. The distance/hash is unconditional. + """TAG-DISTANCE-gHEX[-dirty]. - # exceptions: - # 1: no tags. HEX[-dirty] (note: no 'g' prefix) + Like 'git describe --tags --dirty --always -long'. + The distance/hash is unconditional. + Exceptions: + 1: no tags. HEX[-dirty] (note: no 'g' prefix) + """ if pieces["closest-tag"]: rendered = pieces["closest-tag"] rendered += "-%%d-g%%s" %% (pieces["distance"], pieces["short"]) @@ -854,6 +894,7 @@ def render_git_describe_long(pieces): def render(pieces, style): + """Render the given version pieces into the requested style.""" if pieces["error"]: return {"version": "unknown", "full-revisionid": pieces.get("long"), @@ -883,6 +924,7 @@ def render(pieces, style): def get_versions(): + """Get version information or return default if unable to do so.""" # I am in _version.py, which lives at ROOT/VERSIONFILE_SOURCE. If we have # __file__, we can work backwards from there to the root. Some # py2exe/bbfreeze/non-CPython implementations don't do __file__, in which @@ -929,6 +971,7 @@ def get_versions(): @register_vcs_handler("git", "get_keywords") def git_get_keywords(versionfile_abs): + """Extract version information from the given file.""" # the code embedded in _version.py can just fetch the value of these # keywords. When used from setup.py, we don't want to import _version.py, # so we do it with a regexp instead. This function is not used from @@ -953,6 +996,7 @@ def git_get_keywords(versionfile_abs): @register_vcs_handler("git", "keywords") def git_versions_from_keywords(keywords, tag_prefix, verbose): + """Get version information from git keywords.""" if not keywords: raise NotThisMethod("no keywords at all, weird") refnames = keywords["refnames"].strip() @@ -998,11 +1042,12 @@ def git_versions_from_keywords(keywords, tag_prefix, verbose): @register_vcs_handler("git", "pieces_from_vcs") def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command): - # this runs 'git' from the root of the source tree. This only gets called - # if the git-archive 'subst' keywords were *not* expanded, and - # _version.py hasn't already been rewritten with a short version string, - # meaning we're inside a checked out source tree. + """Get version from 'git describe' in the root of the source tree. + This only gets called if the git-archive 'subst' keywords were *not* + expanded, and _version.py hasn't already been rewritten with a short + version string, meaning we're inside a checked out source tree. + """ if not os.path.exists(os.path.join(root, ".git")): if verbose: print("no .git in %s" % root) @@ -1011,10 +1056,11 @@ def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command): GITS = ["git"] if sys.platform == "win32": GITS = ["git.cmd", "git.exe"] - # if there is a tag, this yields TAG-NUM-gHEX[-dirty] - # if there are no tags, this yields HEX[-dirty] (no NUM) + # if there is a tag matching tag_prefix, this yields TAG-NUM-gHEX[-dirty] + # if there isn't one, this yields HEX[-dirty] (no NUM) describe_out = run_command(GITS, ["describe", "--tags", "--dirty", - "--always", "--long"], + "--always", "--long", + "--match", "%s*" % tag_prefix], cwd=root) # --long was added in git-1.5.5 if describe_out is None: @@ -1079,6 +1125,11 @@ def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command): def do_vcs_install(manifest_in, versionfile_source, ipy): + """Git-specific installation logic for Versioneer. + + For Git, this means creating/changing .gitattributes to mark _version.py + for export-time keyword substitution. + """ GITS = ["git"] if sys.platform == "win32": GITS = ["git.cmd", "git.exe"] @@ -1112,8 +1163,11 @@ def do_vcs_install(manifest_in, versionfile_source, ipy): def versions_from_parentdir(parentdir_prefix, root, verbose): - # Source tarballs conventionally unpack into a directory that includes - # both the project name and a version string. + """Try to determine the version from the parent directory name. + + Source tarballs conventionally unpack into a directory that includes + both the project name and a version string. + """ dirname = os.path.basename(root) if not dirname.startswith(parentdir_prefix): if verbose: @@ -1125,7 +1179,7 @@ def versions_from_parentdir(parentdir_prefix, root, verbose): "dirty": False, "error": None} SHORT_VERSION_PY = """ -# This file was generated by 'versioneer.py' (0.15) from +# This file was generated by 'versioneer.py' (0.16) from # revision-control system data, or from the parent directory name of an # unpacked source archive. Distribution tarballs contain a pre-generated copy # of this file. @@ -1144,6 +1198,7 @@ def get_versions(): def versions_from_file(filename): + """Try to determine the version from _version.py if present.""" try: with open(filename) as f: contents = f.read() @@ -1157,6 +1212,7 @@ def versions_from_file(filename): def write_to_version_file(filename, versions): + """Write the given version number to the given _version.py file.""" os.unlink(filename) contents = json.dumps(versions, sort_keys=True, indent=1, separators=(",", ": ")) @@ -1167,19 +1223,21 @@ def write_to_version_file(filename, versions): def plus_or_dot(pieces): + """Return a + if we don't already have one, else return a .""" if "+" in pieces.get("closest-tag", ""): return "." return "+" def render_pep440(pieces): - # now build up version string, with post-release "local version - # identifier". Our goal: TAG[+DISTANCE.gHEX[.dirty]] . Note that if you - # get a tagged build and then dirty it, you'll get TAG+0.gHEX.dirty + """Build up version string, with post-release "local version identifier". - # exceptions: - # 1: no tags. git_describe was just HEX. 0+untagged.DISTANCE.gHEX[.dirty] + Our goal: TAG[+DISTANCE.gHEX[.dirty]] . Note that if you + get a tagged build and then dirty it, you'll get TAG+0.gHEX.dirty + Exceptions: + 1: no tags. git_describe was just HEX. 0+untagged.DISTANCE.gHEX[.dirty] + """ if pieces["closest-tag"]: rendered = pieces["closest-tag"] if pieces["distance"] or pieces["dirty"]: @@ -1197,11 +1255,11 @@ def render_pep440(pieces): def render_pep440_pre(pieces): - # TAG[.post.devDISTANCE] . No -dirty - - # exceptions: - # 1: no tags. 0.post.devDISTANCE + """TAG[.post.devDISTANCE] -- No -dirty. + Exceptions: + 1: no tags. 0.post.devDISTANCE + """ if pieces["closest-tag"]: rendered = pieces["closest-tag"] if pieces["distance"]: @@ -1213,14 +1271,15 @@ def render_pep440_pre(pieces): def render_pep440_post(pieces): - # TAG[.postDISTANCE[.dev0]+gHEX] . The ".dev0" means dirty. Note that - # .dev0 sorts backwards (a dirty tree will appear "older" than the - # corresponding clean one), but you shouldn't be releasing software with - # -dirty anyways. + """TAG[.postDISTANCE[.dev0]+gHEX] . - # exceptions: - # 1: no tags. 0.postDISTANCE[.dev0] + The ".dev0" means dirty. Note that .dev0 sorts backwards + (a dirty tree will appear "older" than the corresponding clean one), + but you shouldn't be releasing software with -dirty anyways. + Exceptions: + 1: no tags. 0.postDISTANCE[.dev0] + """ if pieces["closest-tag"]: rendered = pieces["closest-tag"] if pieces["distance"] or pieces["dirty"]: @@ -1239,11 +1298,13 @@ def render_pep440_post(pieces): def render_pep440_old(pieces): - # TAG[.postDISTANCE[.dev0]] . The ".dev0" means dirty. + """TAG[.postDISTANCE[.dev0]] . - # exceptions: - # 1: no tags. 0.postDISTANCE[.dev0] + The ".dev0" means dirty. + Eexceptions: + 1: no tags. 0.postDISTANCE[.dev0] + """ if pieces["closest-tag"]: rendered = pieces["closest-tag"] if pieces["distance"] or pieces["dirty"]: @@ -1259,12 +1320,13 @@ def render_pep440_old(pieces): def render_git_describe(pieces): - # TAG[-DISTANCE-gHEX][-dirty], like 'git describe --tags --dirty - # --always' + """TAG[-DISTANCE-gHEX][-dirty]. - # exceptions: - # 1: no tags. HEX[-dirty] (note: no 'g' prefix) + Like 'git describe --tags --dirty --always'. + Exceptions: + 1: no tags. HEX[-dirty] (note: no 'g' prefix) + """ if pieces["closest-tag"]: rendered = pieces["closest-tag"] if pieces["distance"]: @@ -1278,12 +1340,14 @@ def render_git_describe(pieces): def render_git_describe_long(pieces): - # TAG-DISTANCE-gHEX[-dirty], like 'git describe --tags --dirty - # --always -long'. The distance/hash is unconditional. + """TAG-DISTANCE-gHEX[-dirty]. - # exceptions: - # 1: no tags. HEX[-dirty] (note: no 'g' prefix) + Like 'git describe --tags --dirty --always -long'. + The distance/hash is unconditional. + Exceptions: + 1: no tags. HEX[-dirty] (note: no 'g' prefix) + """ if pieces["closest-tag"]: rendered = pieces["closest-tag"] rendered += "-%d-g%s" % (pieces["distance"], pieces["short"]) @@ -1296,6 +1360,7 @@ def render_git_describe_long(pieces): def render(pieces, style): + """Render the given version pieces into the requested style.""" if pieces["error"]: return {"version": "unknown", "full-revisionid": pieces.get("long"), @@ -1325,12 +1390,14 @@ def render(pieces, style): class VersioneerBadRootError(Exception): - pass + """The project root directory is unknown or missing key files.""" def get_versions(verbose=False): - # returns dict with two keys: 'version' and 'full' + """Get the project version from whatever source is available. + Returns dict with two keys: 'version' and 'full'. + """ if "versioneer" in sys.modules: # see the discussion in cmdclass.py:get_cmdclass() del sys.modules["versioneer"] @@ -1402,10 +1469,12 @@ def get_versions(verbose=False): def get_version(): + """Get the short version string for this project.""" return get_versions()["version"] def get_cmdclass(): + """Get the custom setuptools/distutils subclasses used by Versioneer.""" if "versioneer" in sys.modules: del sys.modules["versioneer"] # this fixes the "python setup.py develop" case (also 'install' and @@ -1456,7 +1525,11 @@ def run(self): # setuptools/install -> bdist_egg ->.. # setuptools/develop -> ? - from distutils.command.build_py import build_py as _build_py + # we override different "build_py" commands for both environments + if "setuptools" in sys.modules: + from setuptools.command.build_py import build_py as _build_py + else: + from distutils.command.build_py import build_py as _build_py class cmd_build_py(_build_py): def run(self): @@ -1539,7 +1612,7 @@ def make_release_tree(self, base_dir, files): style = pep440 versionfile_source = src/myproject/_version.py versionfile_build = myproject/_version.py - tag_prefix = "" + tag_prefix = parentdir_prefix = myproject- You will also need to edit your setup.py to use the results: @@ -1575,6 +1648,7 @@ def make_release_tree(self, base_dir, files): def do_setup(): + """Main VCS-independent setup function for installing Versioneer.""" root = get_root() try: cfg = get_config_from_root(root) @@ -1656,6 +1730,7 @@ def do_setup(): def scan_setup_py(): + """Validate the contents of setup.py against Versioneer's expectations.""" found = set() setters = False errors = 0 From 304bb6c6850821309c69c167b5515aa6655c07b4 Mon Sep 17 00:00:00 2001 From: marscher Date: Wed, 20 Apr 2016 14:13:43 +0200 Subject: [PATCH 09/33] [conda-recipe] use git version string in meta.yaml --- tools/conda-recipe/build.sh | 4 ++-- tools/conda-recipe/dev_version.py | 5 +++++ tools/conda-recipe/meta.yaml | 4 ++-- 3 files changed, 9 insertions(+), 4 deletions(-) create mode 100644 tools/conda-recipe/dev_version.py diff --git a/tools/conda-recipe/build.sh b/tools/conda-recipe/build.sh index f87e27d20..19342cbaa 100755 --- a/tools/conda-recipe/build.sh +++ b/tools/conda-recipe/build.sh @@ -1,3 +1,3 @@ #!/bin/bash -#$PYTHON setup.py install -pip install . -vv +$PYTHON setup.py install +$PYTHON tools/conda-recipe/dev_version.py diff --git a/tools/conda-recipe/dev_version.py b/tools/conda-recipe/dev_version.py new file mode 100644 index 000000000..e3c1704a6 --- /dev/null +++ b/tools/conda-recipe/dev_version.py @@ -0,0 +1,5 @@ +from __future__ import print_function +from pyemma import __version__ as version + +with open('__conda_version__.txt', 'w') as f: + f.write(version) diff --git a/tools/conda-recipe/meta.yaml b/tools/conda-recipe/meta.yaml index 718fbb3fa..3f72b6da9 100644 --- a/tools/conda-recipe/meta.yaml +++ b/tools/conda-recipe/meta.yaml @@ -1,6 +1,6 @@ package: - name: pyemma - version: !!str dev + name: pyemma-dev + #version: 0 source: path: ../.. From b40b0295a34a4d1bc92b7d32570c72b2ff48dc57 Mon Sep 17 00:00:00 2001 From: marscher Date: Wed, 20 Apr 2016 14:25:50 +0200 Subject: [PATCH 10/33] [CI] moved tools -> devtools --- .travis.yml | 6 +++--- appveyor.yml | 4 ++-- {tools => devtools}/ci/appveyor/deploy.ps1 | 0 {tools => devtools}/ci/appveyor/runTestsuite.ps1 | 2 +- {tools => devtools}/ci/appveyor/run_with_env.cmd | 0 .../ci/appveyor/transform_xunit_to_appveyor.xsl | 0 {tools => devtools}/ci/jenkins/update_versions_json.py | 0 {tools => devtools}/ci/travis/after_success.sh | 0 {tools => devtools}/ci/travis/install_miniconda.sh | 0 {tools => devtools}/ci/travis/make_docs.sh | 0 devtools/conda-recipe/bld.bat | 10 ++++++++++ devtools/conda-recipe/build.sh | 3 +++ {tools => devtools}/conda-recipe/dev_version.py | 0 {tools => devtools}/conda-recipe/meta.yaml | 0 {tools => devtools}/conda-recipe/run_test.py | 0 tools/conda-recipe/bld.bat | 8 -------- tools/conda-recipe/build.sh | 3 --- 17 files changed, 19 insertions(+), 17 deletions(-) rename {tools => devtools}/ci/appveyor/deploy.ps1 (100%) rename {tools => devtools}/ci/appveyor/runTestsuite.ps1 (92%) rename {tools => devtools}/ci/appveyor/run_with_env.cmd (100%) rename {tools => devtools}/ci/appveyor/transform_xunit_to_appveyor.xsl (100%) rename {tools => devtools}/ci/jenkins/update_versions_json.py (100%) rename {tools => devtools}/ci/travis/after_success.sh (100%) rename {tools => devtools}/ci/travis/install_miniconda.sh (100%) rename {tools => devtools}/ci/travis/make_docs.sh (100%) create mode 100644 devtools/conda-recipe/bld.bat create mode 100755 devtools/conda-recipe/build.sh rename {tools => devtools}/conda-recipe/dev_version.py (100%) rename {tools => devtools}/conda-recipe/meta.yaml (100%) rename {tools => devtools}/conda-recipe/run_test.py (100%) delete mode 100644 tools/conda-recipe/bld.bat delete mode 100755 tools/conda-recipe/build.sh diff --git a/.travis.yml b/.travis.yml index a07c18e1e..dcb30c1bd 100644 --- a/.travis.yml +++ b/.travis.yml @@ -28,16 +28,16 @@ matrix: env: python=2.7 CONDA_PY=27 CONDA_NPY=19 before_install: -- tools/ci/travis/install_miniconda.sh +- devtools/ci/travis/install_miniconda.sh - conda config --set always_yes true - conda config --add channels omnia - conda install -q $common_py_deps script: -- conda build -q tools/conda-recipe +- conda build -q devtools/conda-recipe after_success: # coverage report: needs .coverage file generated by testsuite and git src - pip install coveralls - coveralls -- if [ "$TRAVIS_SECURE_ENV_VARS" == true ]; then source tools/ci/travis/after_success.sh; fi +- if [ "$TRAVIS_SECURE_ENV_VARS" == true ]; then source devtools/ci/travis/after_success.sh; fi diff --git a/appveyor.yml b/appveyor.yml index df4210607..8e46586e5 100644 --- a/appveyor.yml +++ b/appveyor.yml @@ -3,7 +3,7 @@ environment: # SDK v7.0 MSVC Express 2008's SetEnv.cmd script will fail if the # /E:ON and /V:ON options are not enabled in the batch script intepreter # See: http://stackoverflow.com/a/13751649/163740 - CMD_IN_ENV: "cmd /E:ON /V:ON /C .\\tools\\ci\\appveyor\\run_with_env.cmd" + CMD_IN_ENV: "cmd /E:ON /V:ON /C .\\devtools\\ci\\appveyor\\run_with_env.cmd" CONDA_NPY: "110" matrix: @@ -37,4 +37,4 @@ build: false # Not a C# project, build stuff at the test step instead. test_script: # run testsuite and upload test results to AppVeyor; return exit code of testsuite - - conda build -q tools/conda-recipe + - conda build -q devtools/conda-recipe diff --git a/tools/ci/appveyor/deploy.ps1 b/devtools/ci/appveyor/deploy.ps1 similarity index 100% rename from tools/ci/appveyor/deploy.ps1 rename to devtools/ci/appveyor/deploy.ps1 diff --git a/tools/ci/appveyor/runTestsuite.ps1 b/devtools/ci/appveyor/runTestsuite.ps1 similarity index 92% rename from tools/ci/appveyor/runTestsuite.ps1 rename to devtools/ci/appveyor/runTestsuite.ps1 index c599df31a..6b0a051c4 100644 --- a/tools/ci/appveyor/runTestsuite.ps1 +++ b/devtools/ci/appveyor/runTestsuite.ps1 @@ -22,7 +22,7 @@ function upload($file) { function run { cd $env:APPVEYOR_BUILD_FOLDER - $stylesheet = "tools/ci/appveyor/transform_xunit_to_appveyor.xsl" + $stylesheet = "devtools/ci/appveyor/transform_xunit_to_appveyor.xsl" $input = "nosetests.xml" $output = "transformed.xml" diff --git a/tools/ci/appveyor/run_with_env.cmd b/devtools/ci/appveyor/run_with_env.cmd similarity index 100% rename from tools/ci/appveyor/run_with_env.cmd rename to devtools/ci/appveyor/run_with_env.cmd diff --git a/tools/ci/appveyor/transform_xunit_to_appveyor.xsl b/devtools/ci/appveyor/transform_xunit_to_appveyor.xsl similarity index 100% rename from tools/ci/appveyor/transform_xunit_to_appveyor.xsl rename to devtools/ci/appveyor/transform_xunit_to_appveyor.xsl diff --git a/tools/ci/jenkins/update_versions_json.py b/devtools/ci/jenkins/update_versions_json.py similarity index 100% rename from tools/ci/jenkins/update_versions_json.py rename to devtools/ci/jenkins/update_versions_json.py diff --git a/tools/ci/travis/after_success.sh b/devtools/ci/travis/after_success.sh similarity index 100% rename from tools/ci/travis/after_success.sh rename to devtools/ci/travis/after_success.sh diff --git a/tools/ci/travis/install_miniconda.sh b/devtools/ci/travis/install_miniconda.sh similarity index 100% rename from tools/ci/travis/install_miniconda.sh rename to devtools/ci/travis/install_miniconda.sh diff --git a/tools/ci/travis/make_docs.sh b/devtools/ci/travis/make_docs.sh similarity index 100% rename from tools/ci/travis/make_docs.sh rename to devtools/ci/travis/make_docs.sh diff --git a/devtools/conda-recipe/bld.bat b/devtools/conda-recipe/bld.bat new file mode 100644 index 000000000..515c3a7ec --- /dev/null +++ b/devtools/conda-recipe/bld.bat @@ -0,0 +1,10 @@ +if not defined APPVEYOR ( + echo not on appveyor + "%PYTHON%" setup.py install +) else ( + echo on appveyor + cmd /E:ON /V:ON /C %APPVEYOR_BUILD_FOLDER%\devtools\ci\appveyor\run_with_env.cmd "%PYTHON%" setup.py install +) +set build_status=%ERRORLEVEL% +"%PYTHON%" devtools\conda-recipe\dev_version.py +if %build_status% 1 exit 1 diff --git a/devtools/conda-recipe/build.sh b/devtools/conda-recipe/build.sh new file mode 100755 index 000000000..9c9d57833 --- /dev/null +++ b/devtools/conda-recipe/build.sh @@ -0,0 +1,3 @@ +#!/bin/bash +$PYTHON setup.py install +$PYTHON devtools/conda-recipe/dev_version.py diff --git a/tools/conda-recipe/dev_version.py b/devtools/conda-recipe/dev_version.py similarity index 100% rename from tools/conda-recipe/dev_version.py rename to devtools/conda-recipe/dev_version.py diff --git a/tools/conda-recipe/meta.yaml b/devtools/conda-recipe/meta.yaml similarity index 100% rename from tools/conda-recipe/meta.yaml rename to devtools/conda-recipe/meta.yaml diff --git a/tools/conda-recipe/run_test.py b/devtools/conda-recipe/run_test.py similarity index 100% rename from tools/conda-recipe/run_test.py rename to devtools/conda-recipe/run_test.py diff --git a/tools/conda-recipe/bld.bat b/tools/conda-recipe/bld.bat deleted file mode 100644 index a0eaf95ea..000000000 --- a/tools/conda-recipe/bld.bat +++ /dev/null @@ -1,8 +0,0 @@ -if not defined APPVEYOR ( - echo not on appveyor - "%PYTHON%" setup.py install -) else ( - echo on appveyor - cmd /E:ON /V:ON /C %APPVEYOR_BUILD_FOLDER%\tools\ci\appveyor\run_with_env.cmd "%PYTHON%" setup.py install -) -if errorlevel 1 exit 1 diff --git a/tools/conda-recipe/build.sh b/tools/conda-recipe/build.sh deleted file mode 100755 index 19342cbaa..000000000 --- a/tools/conda-recipe/build.sh +++ /dev/null @@ -1,3 +0,0 @@ -#!/bin/bash -$PYTHON setup.py install -$PYTHON tools/conda-recipe/dev_version.py From 9f5b4db4dd4b33e35bdb8fd80b36315e992d9056 Mon Sep 17 00:00:00 2001 From: marscher Date: Wed, 20 Apr 2016 14:40:47 +0200 Subject: [PATCH 11/33] [travis] updated token --- .travis.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.travis.yml b/.travis.yml index dcb30c1bd..842c67927 100644 --- a/.travis.yml +++ b/.travis.yml @@ -11,7 +11,7 @@ env: - common_py_deps="jinja2 conda-build" - PACKAGENAME=pyemma - ORGNAME=omnia - - secure: BBcvn2SDTrKtCQdJFJ6ezv5lmYWYtoukS1maptS2RuEHrBqVMN++go5U9BTNDHBLUESewolaR9GFng2xXBLpPYgCE2XOGsJPKM0L15/Ae/QWSmqUbLNJFVZz3I3EliJJV9si1EaWQnQY0axPqSXRTA8CyVOeO9zxMewWozGIHE0= + - secure: "YI89RGfpxB29XoyPzBGXsCWSgdhP4i+DS7gFmwaDoOz3R+ZW8yG3ZaexW15WUe7h0tb1L2aYvZCcqgaBrH2SNCiR0SRbhk3EBPg1C3baMVlVhDeQru9S/FrEJ1ZUGhnUitbEEIdG2MYexETjoGef6K+7dXBJWJMy/rNRm61PSJw=" matrix: # minimum possible numpy version = 1.8, because of mdtraj - python=2.7 CONDA_PY=27 CONDA_NPY=19 From 211bbc7c7151f001903a914d063676d454a375cb Mon Sep 17 00:00:00 2001 From: marscher Date: Wed, 20 Apr 2016 14:54:49 +0200 Subject: [PATCH 12/33] [appveyor] try to deploy dev version to conda --- appveyor.yml | 5 +++++ devtools/ci/appveyor/after_success.bat | 8 ++++++++ devtools/conda-recipe/bld.bat | 4 ++-- 3 files changed, 15 insertions(+), 2 deletions(-) create mode 100644 devtools/ci/appveyor/after_success.bat diff --git a/appveyor.yml b/appveyor.yml index 8e46586e5..b55c136c5 100644 --- a/appveyor.yml +++ b/appveyor.yml @@ -1,4 +1,5 @@ environment: + BINSTAR_TOKEN: o5nTvV9PwJQgt5HXoqAl1iWi1ivyOrGb9ZL20puP4xklAEfvN/0EDHbhDHpVwjC7 global: # SDK v7.0 MSVC Express 2008's SetEnv.cmd script will fail if the # /E:ON and /V:ON options are not enabled in the batch script intepreter @@ -38,3 +39,7 @@ build: false # Not a C# project, build stuff at the test step instead. test_script: # run testsuite and upload test results to AppVeyor; return exit code of testsuite - conda build -q devtools/conda-recipe + +on_success: + - devtools/ci/appveyor/after_success.bat + diff --git a/devtools/ci/appveyor/after_success.bat b/devtools/ci/appveyor/after_success.bat new file mode 100644 index 000000000..4b346e6f0 --- /dev/null +++ b/devtools/ci/appveyor/after_success.bat @@ -0,0 +1,8 @@ +% Deploy to binstar +conda install --yes anaconda-client jinja2 +cd %PYTHON%\conda-bld +for %%filename in (*\%PACKAGENAME%-dev-*.tar.bz2) do ( + echo "removing file %%~filename" + anaconda -t %BINSTAR_TOKEN% remove --force %ORGNAME%\%PACKAGENAME%-dev\%%~filename + anaconda -t %BINSTAR_TOKEN% upload --force -u %ORGNAME% -p %PACKAGENAME%-dev %%~filename +) diff --git a/devtools/conda-recipe/bld.bat b/devtools/conda-recipe/bld.bat index 515c3a7ec..7cfd03316 100644 --- a/devtools/conda-recipe/bld.bat +++ b/devtools/conda-recipe/bld.bat @@ -1,10 +1,10 @@ if not defined APPVEYOR ( echo not on appveyor - "%PYTHON%" setup.py install + "%PYTHON%" setup.py install ) else ( echo on appveyor cmd /E:ON /V:ON /C %APPVEYOR_BUILD_FOLDER%\devtools\ci\appveyor\run_with_env.cmd "%PYTHON%" setup.py install ) set build_status=%ERRORLEVEL% "%PYTHON%" devtools\conda-recipe\dev_version.py -if %build_status% 1 exit 1 +if %build_status% == 1 exit 1 From 7471a3634b28b03e53eaf1c05f67f1f1eb863963 Mon Sep 17 00:00:00 2001 From: marscher Date: Wed, 20 Apr 2016 16:50:33 +0200 Subject: [PATCH 13/33] [travis|dev-builds] delete 10 oldest dev builds in after success step --- .travis.yml | 1 + devtools/ci/travis/after_success.sh | 3 +- devtools/ci/travis/dev_pkgs_del_old.py | 45 ++++++++++++++++++++++++++ 3 files changed, 48 insertions(+), 1 deletion(-) create mode 100644 devtools/ci/travis/dev_pkgs_del_old.py diff --git a/.travis.yml b/.travis.yml index 842c67927..f4f7c67cb 100644 --- a/.travis.yml +++ b/.travis.yml @@ -11,6 +11,7 @@ env: - common_py_deps="jinja2 conda-build" - PACKAGENAME=pyemma - ORGNAME=omnia + - DEV_BUILD_N_KEEP=10 - secure: "YI89RGfpxB29XoyPzBGXsCWSgdhP4i+DS7gFmwaDoOz3R+ZW8yG3ZaexW15WUe7h0tb1L2aYvZCcqgaBrH2SNCiR0SRbhk3EBPg1C3baMVlVhDeQru9S/FrEJ1ZUGhnUitbEEIdG2MYexETjoGef6K+7dXBJWJMy/rNRm61PSJw=" matrix: # minimum possible numpy version = 1.8, because of mdtraj diff --git a/devtools/ci/travis/after_success.sh b/devtools/ci/travis/after_success.sh index 98c554cdd..ddea4ca6f 100644 --- a/devtools/ci/travis/after_success.sh +++ b/devtools/ci/travis/after_success.sh @@ -15,8 +15,9 @@ pushd . cd $HOME/miniconda/conda-bld FILES=*/${PACKAGENAME}-dev-*.tar.bz2 for filename in $FILES; do - anaconda -t $BINSTAR_TOKEN remove --force ${ORGNAME}/${PACKAGENAME}-dev/${filename} anaconda -t $BINSTAR_TOKEN upload --force -u ${ORGNAME} -p ${PACKAGENAME}-dev ${filename} done popd +# call cleanup +python dev_pkgs_del_old.py diff --git a/devtools/ci/travis/dev_pkgs_del_old.py b/devtools/ci/travis/dev_pkgs_del_old.py new file mode 100644 index 000000000..39d682fd6 --- /dev/null +++ b/devtools/ci/travis/dev_pkgs_del_old.py @@ -0,0 +1,45 @@ +""" +Cleanup old development builds on Anaconda.org + +Assumes one has set two environment variables: + +1. BINSTAR_TOKEN: token to authenticate with anaconda.org +2. DEV_BUILD_N_KEEP: int, how many builds to keep, delete oldest first. +3. ORGNAME: str, anaconda.org organisation/user +4. PACKGENAME: str, name of package to clean up + +author: Martin K. Scherer +data: 20.4.16 +""" +from __future__ import print_function, absolute_import +from binstar_client.utils import get_server_api +from pkg_resources import parse_version +from operator import getitem +import os + +token = os.getenv['BINSTAR_TOKEN'] +org = os.getenv['ORGNAME'] +pkg = os.getenv['PACKAGENAME'] +n_keep = int(os.getenv('DEV_BUILD_N_KEEP')) + +b = get_server_api(token=token) +package = b.package(org, pkg) + +# sort releases by version number, oldest first +sorted_by_version = sorted(package['releases'], + key=lambda rel: parse_version(rel['version']), + reverse=True + ) +to_delete = [] + +while len(sorted_by_version) > N_KEEP: + to_delete.append(sorted_by_version.pop()) + +# remove old releases from anaconda.org +for rel in to_delete: + spec = rel['full_name'] + version = rel['version'] + for dist in rel['distributions']: + b.remove_dist(org, package_name=pkg, release=version, basename=dist) + print("removed file %s" % dist) + From e641c4e8136e0771731afd413354bebed8e19d11 Mon Sep 17 00:00:00 2001 From: marscher Date: Wed, 20 Apr 2016 17:50:19 +0200 Subject: [PATCH 14/33] [appveyor] fix --- appveyor.yml | 10 +++++----- devtools/ci/appveyor/after_success.bat | 11 +++++------ 2 files changed, 10 insertions(+), 11 deletions(-) diff --git a/appveyor.yml b/appveyor.yml index b55c136c5..15b1cff2f 100644 --- a/appveyor.yml +++ b/appveyor.yml @@ -8,22 +8,22 @@ environment: CONDA_NPY: "110" matrix: - - PYTHON: "C:\\Miniconda" + - MINICONDA_MINICONDA_PYTHON: "C:\\Miniconda" CONDA_PY: "27" - - PYTHON: "C:\\Miniconda-x64" + - MINICONDA_PYTHON: "C:\\Miniconda-x64" CONDA_PY: "27" ARCH: "64" - - PYTHON: "C:\\Miniconda3" + - MINICONDA_PYTHON: "C:\\Miniconda3" CONDA_PY: "35" - - PYTHON: "C:\\Miniconda3-x64" + - MINICONDA_PYTHON: "C:\\Miniconda3-x64" CONDA_PY: "34" ARCH: "64" install: - - "SET PATH=%PYTHON%;%PYTHON%\\Scripts;%PATH%;" + - "SET PATH=%MINICONDA_PYTHON%;%MINICONDA_PYTHON%\\Scripts;%PATH%;" - conda config --set always_yes true - conda config --add channels omnia diff --git a/devtools/ci/appveyor/after_success.bat b/devtools/ci/appveyor/after_success.bat index 4b346e6f0..da6996867 100644 --- a/devtools/ci/appveyor/after_success.bat +++ b/devtools/ci/appveyor/after_success.bat @@ -1,8 +1,7 @@ -% Deploy to binstar -conda install --yes anaconda-client jinja2 -cd %PYTHON%\conda-bld -for %%filename in (*\%PACKAGENAME%-dev-*.tar.bz2) do ( - echo "removing file %%~filename" - anaconda -t %BINSTAR_TOKEN% remove --force %ORGNAME%\%PACKAGENAME%-dev\%%~filename +conda install --yes -q anaconda-client jinja2 +cd %PYTHON_MINICONDA%\conda-bld +dir /s /b %PACKAGENAME%-dev-*.tar.bz2 > files.txt +for /F %%filename in (files.txt) do ( + echo "uploading file %%~filename" anaconda -t %BINSTAR_TOKEN% upload --force -u %ORGNAME% -p %PACKAGENAME%-dev %%~filename ) From 172d49dc6b8ab39de1996a6fb4e2f2c2457a117a Mon Sep 17 00:00:00 2001 From: marscher Date: Wed, 20 Apr 2016 17:58:10 +0200 Subject: [PATCH 15/33] [travis] fix path --- devtools/ci/travis/after_success.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/devtools/ci/travis/after_success.sh b/devtools/ci/travis/after_success.sh index ddea4ca6f..5016ce897 100644 --- a/devtools/ci/travis/after_success.sh +++ b/devtools/ci/travis/after_success.sh @@ -20,4 +20,4 @@ done popd # call cleanup -python dev_pkgs_del_old.py +python devtools/ci/travis/dev_pkgs_del_old.py From a25867a56bdbfe44943009e518602c591ab27ce3 Mon Sep 17 00:00:00 2001 From: marscher Date: Wed, 20 Apr 2016 18:00:56 +0200 Subject: [PATCH 16/33] [appveyor] fix --- appveyor.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/appveyor.yml b/appveyor.yml index 15b1cff2f..4776a22cf 100644 --- a/appveyor.yml +++ b/appveyor.yml @@ -8,7 +8,7 @@ environment: CONDA_NPY: "110" matrix: - - MINICONDA_MINICONDA_PYTHON: "C:\\Miniconda" + - MINICONDA_PYTHON: "C:\\Miniconda" CONDA_PY: "27" - MINICONDA_PYTHON: "C:\\Miniconda-x64" From 9c329549b14dfe588946ea1e0da7f98d8c1474ce Mon Sep 17 00:00:00 2001 From: marscher Date: Thu, 21 Apr 2016 17:13:59 +0200 Subject: [PATCH 17/33] [setup] fix bhmm version range --- setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.py b/setup.py index 59ea344de..af61d280b 100755 --- a/setup.py +++ b/setup.py @@ -217,7 +217,7 @@ def run(self): 'matplotlib', 'msmtools', 'thermotools>=0.1.14', - 'bhmm<0.7', + 'bhmm>=0.6,<0.7', 'joblib>0.8.4', 'pyyaml', 'psutil>=3.1.1', From c5548c4e349c38396c9a9a3fcbf23b47acae2d89 Mon Sep 17 00:00:00 2001 From: marscher Date: Thu, 21 Apr 2016 17:14:17 +0200 Subject: [PATCH 18/33] [appveyor] disable after_success step --- appveyor.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/appveyor.yml b/appveyor.yml index 4776a22cf..7a70e6c34 100644 --- a/appveyor.yml +++ b/appveyor.yml @@ -40,6 +40,6 @@ test_script: # run testsuite and upload test results to AppVeyor; return exit code of testsuite - conda build -q devtools/conda-recipe -on_success: - - devtools/ci/appveyor/after_success.bat +#on_success: +# - devtools/ci/appveyor/after_success.bat From 3d3a93dd70851ab2b4202eb7f2a1540805dd6e75 Mon Sep 17 00:00:00 2001 From: marscher Date: Fri, 22 Apr 2016 11:59:24 +0200 Subject: [PATCH 19/33] [base/estimator] in case of func evaluation store result to return value. --- pyemma/_base/estimator.py | 1 + 1 file changed, 1 insertion(+) diff --git a/pyemma/_base/estimator.py b/pyemma/_base/estimator.py index a2aeacff0..164996f47 100644 --- a/pyemma/_base/estimator.py +++ b/pyemma/_base/estimator.py @@ -165,6 +165,7 @@ def _estimate_param_scan_worker(estimator, params, X, evaluate, evaluate_args, # if we only have one value, unpack it if len(values) == 1: values = values[0] + res.append(values) else: raise ValueError('Invalid setting for evaluate: ' + str(evaluate)) From 7df2ae6c21e797e47f8188a746c11564921c68bc Mon Sep 17 00:00:00 2001 From: marscher Date: Fri, 22 Apr 2016 11:59:31 +0200 Subject: [PATCH 20/33] [pyemma/__init__] add alias for '__version__' -> 'version' --- pyemma/__init__.py | 1 + 1 file changed, 1 insertion(+) diff --git a/pyemma/__init__.py b/pyemma/__init__.py index 30a721dc8..d1b659b0d 100644 --- a/pyemma/__init__.py +++ b/pyemma/__init__.py @@ -26,6 +26,7 @@ # set version from versioneer. from ._version import get_versions __version__ = get_versions()['version'] +version = __version__ del get_versions from .util import config From f7b8845b14d7bc43851361d387be757c2eb956d7 Mon Sep 17 00:00:00 2001 From: marscher Date: Fri, 22 Apr 2016 11:38:27 +0200 Subject: [PATCH 21/33] [MLMSM] fix show progress param in cktest --- pyemma/msm/estimators/maximum_likelihood_msm.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/pyemma/msm/estimators/maximum_likelihood_msm.py b/pyemma/msm/estimators/maximum_likelihood_msm.py index 5c0b7b7f1..5ffd31c27 100644 --- a/pyemma/msm/estimators/maximum_likelihood_msm.py +++ b/pyemma/msm/estimators/maximum_likelihood_msm.py @@ -771,7 +771,7 @@ def cktest(self, nsets, memberships=None, mlags=10, conf=0.95, err_est=False, If False, only the prediction will get error bars, which is often sufficient to validate a model. show_progress : bool, optional - Show progressbars for calculation? + Show progress bars for calculation? Returns ------- @@ -796,6 +796,7 @@ def cktest(self, nsets, memberships=None, mlags=10, conf=0.95, err_est=False, if memberships is None: self.pcca(nsets) memberships = self.metastable_memberships - ck = ChapmanKolmogorovValidator(self, self, memberships, mlags=mlags, conf=conf, err_est=err_est) + ck = ChapmanKolmogorovValidator(self, self, memberships, mlags=mlags, conf=conf, + err_est=err_est, show_progress=show_progress) ck.estimate(self._dtrajs_full) return ck From 95d2862bd67837b9cf308c865f47e3f0172908eb Mon Sep 17 00:00:00 2001 From: marscher Date: Fri, 22 Apr 2016 11:53:34 +0200 Subject: [PATCH 22/33] [progressreporter] call callback in case numerator==denominator too. --- pyemma/_base/progress/reporter.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/pyemma/_base/progress/reporter.py b/pyemma/_base/progress/reporter.py index 69c602d70..f1e8efc16 100644 --- a/pyemma/_base/progress/reporter.py +++ b/pyemma/_base/progress/reporter.py @@ -171,6 +171,9 @@ def _progress_update(self, numerator_increment, stage=0, **kw): pg.numerator += numerator_increment # we are done if pg.numerator == pg.denominator: + if stage in self._prog_rep_callbacks: + for callback in self._prog_rep_callbacks[stage]: + callback(stage, pg, **kw) self._progress_force_finish(stage) return elif pg.numerator > pg.denominator: @@ -184,7 +187,7 @@ def _progress_update(self, numerator_increment, stage=0, **kw): _show_progressbar(pg, description=desc) - if hasattr(self, '_prog_rep_callbacks') and stage in self._prog_rep_callbacks: + if stage in self._prog_rep_callbacks: for callback in self._prog_rep_callbacks[stage]: callback(stage, pg, **kw) From f86c89dee5874fb2b23c108f4b25bee73cec96be Mon Sep 17 00:00:00 2001 From: marscher Date: Mon, 25 Apr 2016 13:15:41 +0200 Subject: [PATCH 23/33] added changelog entry --- doc/source/CHANGELOG.rst | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/doc/source/CHANGELOG.rst b/doc/source/CHANGELOG.rst index 52cf2cd10..37158b8aa 100644 --- a/doc/source/CHANGELOG.rst +++ b/doc/source/CHANGELOG.rst @@ -1,6 +1,12 @@ Changelog ========= +2.1.2 (4-25-2016) +----------------- + +Service release to fix release fix for memory leak in minRMSD cluster assignment. + + 2.1.1 (4-18-2016) ----------------- Service release. Fixes some From 4d5890038f0eab1482ff6d62958d42262ed9cf64 Mon Sep 17 00:00:00 2001 From: gph82 Date: Mon, 25 Apr 2016 14:06:04 +0200 Subject: [PATCH 24/33] [plots/timescales] fix off-by-one error in nr. of timescales --- pyemma/plots/timescales.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/pyemma/plots/timescales.py b/pyemma/plots/timescales.py index 0a5b71ec6..90b041aec 100644 --- a/pyemma/plots/timescales.py +++ b/pyemma/plots/timescales.py @@ -92,13 +92,15 @@ def plot_implied_timescales(ITS, ax=None, outfile=None, show_mle=True, show_mean raise TypeError('optional arguments nits and process are mutually exclusive:', nits, process) if not _is_iterable_of_int(process): raise ValueError('process has to be an iterable of integers') - if _np.max(process) > ITS.number_of_timescales: + if _np.max(process)+1 > ITS.number_of_timescales: raise ValueError('requested process %u, whereas ITS only contains %u timescales'%(_np.max(process), ITS.number_of_timescales)) # Now that it's for sure that nits==-1, process is iter_of_ints, and the requested processes exist in its object: its_idx = process else: if not _is_int(nits): raise TypeError('nits is not an integer, ',nits) + if nits == -1: + nits = ITS.number_of_timescales its_idx = _np.arange(ITS.number_of_timescales)[:nits] # Check units and dt for user error. From acb01846bed46832777c8706a3de9d2954d2e1c5 Mon Sep 17 00:00:00 2001 From: marscher Date: Mon, 25 Apr 2016 14:34:44 +0200 Subject: [PATCH 25/33] [estimator] fix exception handling. * Do not try to access estimator_instance.model in case of an exception. * Log exceptions as warning, if estimator is Loggable. --- pyemma/_base/estimator.py | 9 ++++++--- 1 file changed, 6 insertions(+), 3 deletions(-) diff --git a/pyemma/_base/estimator.py b/pyemma/_base/estimator.py index 164996f47..03fadcf49 100644 --- a/pyemma/_base/estimator.py +++ b/pyemma/_base/estimator.py @@ -127,10 +127,13 @@ def _estimate_param_scan_worker(estimator, params, X, evaluate, evaluate_args, """ # run estimation + model = None try: # catch any exception - estimator.estimate(X, **params) + model = estimator.estimate(X, **params) except: - e = sys.exc_info()[0] + e = sys.exc_info()[1] + if isinstance(estimator, Loggable): + estimator.logger.warning("Ignored error during estimation: %s" % e) if failfast: raise # re-raise else: @@ -141,7 +144,7 @@ def _estimate_param_scan_worker(estimator, params, X, evaluate, evaluate_args, # deal with result if evaluate is None: # we want full models - res.append(estimator.model) + res.append(model) # we want to evaluate function(s) of the model elif _types.is_iterable(evaluate): values = [] # the function values the model From 208b580c838d37aa52cece2b3abd17c83421cc66 Mon Sep 17 00:00:00 2001 From: marscher Date: Mon, 25 Apr 2016 14:45:15 +0200 Subject: [PATCH 26/33] do not return estimator, but the model --- pyemma/_base/estimator.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/pyemma/_base/estimator.py b/pyemma/_base/estimator.py index 03fadcf49..4d8ea4b71 100644 --- a/pyemma/_base/estimator.py +++ b/pyemma/_base/estimator.py @@ -129,7 +129,8 @@ def _estimate_param_scan_worker(estimator, params, X, evaluate, evaluate_args, # run estimation model = None try: # catch any exception - model = estimator.estimate(X, **params) + estimator.estimate(X, **params) + model = estimator.model except: e = sys.exc_info()[1] if isinstance(estimator, Loggable): From 2fb4a650a31a60aa45e8b7212de1e244d13e827d Mon Sep 17 00:00:00 2001 From: marscher Date: Mon, 25 Apr 2016 15:54:47 +0200 Subject: [PATCH 27/33] added test --- pyemma/msm/tests/test_estimator.py | 51 ++++++++++++++++++++++++++++++ 1 file changed, 51 insertions(+) create mode 100644 pyemma/msm/tests/test_estimator.py diff --git a/pyemma/msm/tests/test_estimator.py b/pyemma/msm/tests/test_estimator.py new file mode 100644 index 000000000..f26ba79a6 --- /dev/null +++ b/pyemma/msm/tests/test_estimator.py @@ -0,0 +1,51 @@ +# This file is part of PyEMMA. +# +# Copyright (c) 2016 Computational Molecular Biology Group, Freie Universitaet Berlin (GER) +# +# PyEMMA is free software: you can redistribute it and/or modify +# it under the terms of the GNU Lesser General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public License +# along with this program. If not, see . + +import unittest +import mock +from pyemma import msm +from functools import wraps + + +class TestCK_MSM(unittest.TestCase): + + def test_failfast_true(self): + """ test that exception is thrown for failfast=True""" + from pyemma._base.estimator import _estimate_param_scan_worker + failfast = True + @wraps(_estimate_param_scan_worker) + def worker_wrapper(*args): + args = list(args) + args[5] = failfast + return _estimate_param_scan_worker(*args) + with self.assertRaises(NotImplementedError): + with mock.patch('pyemma._base.estimator._estimate_param_scan_worker', worker_wrapper): + hmm = msm.estimate_hidden_markov_model([0, 0, 0, 1, 1, 1, 0, 0], 2, 1,) + hmm.cktest() + + def test_failfast_false(self): + """ test, that no exception is raised during estimation""" + from pyemma._base.estimator import _estimate_param_scan_worker + failfast = False + @wraps(_estimate_param_scan_worker) + def worker_wrapper(*args): + args = list(args) + args[5] = failfast + return _estimate_param_scan_worker(*args) + with mock.patch('pyemma._base.estimator._estimate_param_scan_worker', worker_wrapper): + hmm = msm.estimate_hidden_markov_model([0, 0, 0, 1, 1, 1, 0, 0], 2, 1,) + hmm.cktest() From 1a5defcd190665b375de61d0d71a846ad158092d Mon Sep 17 00:00:00 2001 From: marscher Date: Mon, 25 Apr 2016 17:26:20 +0200 Subject: [PATCH 28/33] [doc] updated Makefile to easily set parameters for nbconvert. Added config --- doc/Makefile | 17 +- doc/jupyter_nbconvert_config.py | 450 ++++++++++++++++++++++++++++++++ 2 files changed, 454 insertions(+), 13 deletions(-) create mode 100644 doc/jupyter_nbconvert_config.py diff --git a/doc/Makefile b/doc/Makefile index bcada9d17..66bca43fc 100644 --- a/doc/Makefile +++ b/doc/Makefile @@ -17,8 +17,8 @@ PAPEROPT_letter = -D latex_paper_size=letter ALLSPHINXOPTS = -d build/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) source # ipython notebook index file -F = source/generated/ipynb-list.rst -HEADER = '.. toctree:: \n :maxdepth: 1 \n' +NBCONVERT_CONFIG = jupyter_nbconvert_config.py +NBCONVERT_CMD = jupyter nbconvert $(nbflags) --config $(NBCONVERT_CONFIG) .PHONY: help clean html ipython-rst @@ -42,20 +42,10 @@ source/generated: mkdir -p source/generated ipython-rst: source/generated - # find all ipython notebooks (without their checkpoints) @$(eval notebooks:= $$(shell cd source/generated && \ find ../../../pyemma-ipython/ -name \*.ipynb \ -and -not -name \*-checkpoint.ipynb)) - # convert notebooks to rst - cd source/generated; jupyter nbconvert --to rst $(notebooks) - # strip directory part and extensions - #@$(eval notebooks=$$(notdir $(notebooks))) - #@$(eval notebooks=$$(basename $(notebooks))) - # write header - #@echo $(HEADER) > $(F) - # put every notebook as a link in index file - #@$(foreach n,$(notebooks), echo " $n" >> $(F);) - + cd source/generated; $(NBCONVERT_CMD) $(notebooks) @echo "Conversion finished." latex: @@ -64,3 +54,4 @@ latex: html_check: $(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) build/html $(FILES) + diff --git a/doc/jupyter_nbconvert_config.py b/doc/jupyter_nbconvert_config.py new file mode 100644 index 000000000..fa1c26fb1 --- /dev/null +++ b/doc/jupyter_nbconvert_config.py @@ -0,0 +1,450 @@ +# Configuration file for jupyter-nbconvert. + +#------------------------------------------------------------------------------ +# Configurable configuration +#------------------------------------------------------------------------------ + +#------------------------------------------------------------------------------ +# LoggingConfigurable configuration +#------------------------------------------------------------------------------ + +# A parent class for Configurables that log. +# +# Subclasses have a log trait, and the default behavior is to get the logger +# from the currently running Application. + +#------------------------------------------------------------------------------ +# SingletonConfigurable configuration +#------------------------------------------------------------------------------ + +# A configurable that only allows one instance. +# +# This class is for classes that should only have one instance of itself or +# *any* subclass. To create and retrieve such a class use the +# :meth:`SingletonConfigurable.instance` method. + +#------------------------------------------------------------------------------ +# Application configuration +#------------------------------------------------------------------------------ + +# This is an application. + +# The date format used by logging formatters for %(asctime)s +# c.Application.log_datefmt = '%Y-%m-%d %H:%M:%S' + +# The Logging format template +# c.Application.log_format = '[%(name)s]%(highlevel)s %(message)s' + +# Set the log level by value or name. +# c.Application.log_level = 30 + +#------------------------------------------------------------------------------ +# JupyterApp configuration +#------------------------------------------------------------------------------ + +# Base class for Jupyter applications + +# Answer yes to any prompts. +# c.JupyterApp.answer_yes = False + +# Full path of a config file. +# c.JupyterApp.config_file = u'' + +# Specify a config file to load. +# c.JupyterApp.config_file_name = u'' + +# Generate default config file. +# c.JupyterApp.generate_config = False + +#------------------------------------------------------------------------------ +# NbConvertApp configuration +#------------------------------------------------------------------------------ + +# This application is used to convert notebook files (*.ipynb) to various other +# formats. +# +# WARNING: THE COMMANDLINE INTERFACE MAY CHANGE IN FUTURE RELEASES. + +# The export format to be used, either one of the built-in formats, or a dotted +# object name that represents the import path for an `Exporter` class +c.NbConvertApp.export_format = 'rst' + +# read a single notebook from stdin. +# c.NbConvertApp.from_stdin = False + +# List of notebooks to convert. Wildcards are supported. Filenames passed +# positionally will be added to the list. +# c.NbConvertApp.notebooks = [] + +# overwrite base name use for output files. can only be used when converting one +# notebook at a time. +# c.NbConvertApp.output_base = '' + +# PostProcessor class used to write the results of the conversion +# c.NbConvertApp.postprocessor_class = u'' + +# Whether to apply a suffix prior to the extension (only relevant when +# converting to notebook format). The suffix is determined by the exporter, and +# is usually '.nbconvert'. +# c.NbConvertApp.use_output_suffix = True + +# Writer class used to write the results of the conversion +# c.NbConvertApp.writer_class = 'FilesWriter' + +#------------------------------------------------------------------------------ +# NbConvertBase configuration +#------------------------------------------------------------------------------ + +# Global configurable class for shared config +# +# Useful for display data priority that might be used by many transformers + +# DEPRECATED default highlight language, please use language_info metadata +# instead +# c.NbConvertBase.default_language = 'ipython' + +# An ordered list of preferred output type, the first encountered will usually +# be used when converting discarding the others. +# c.NbConvertBase.display_data_priority = ['text/html', 'application/pdf', 'text/latex', 'image/svg+xml', 'image/png', 'image/jpeg', 'text/markdown', 'text/plain'] + +#------------------------------------------------------------------------------ +# Exporter configuration +#------------------------------------------------------------------------------ + +# Class containing methods that sequentially run a list of preprocessors on a +# NotebookNode object and then return the modified NotebookNode object and +# accompanying resources dict. + +# List of preprocessors available by default, by name, namespace, instance, or +# type. +# c.Exporter.default_preprocessors = ['nbconvert.preprocessors.ClearOutputPreprocessor', 'nbconvert.preprocessors.ExecutePreprocessor', 'nbconvert.preprocessors.coalesce_streams', 'nbconvert.preprocessors.SVG2PDFPreprocessor', 'nbconvert.preprocessors.CSSHTMLHeaderPreprocessor', 'nbconvert.preprocessors.LatexPreprocessor', 'nbconvert.preprocessors.HighlightMagicsPreprocessor', 'nbconvert.preprocessors.ExtractOutputPreprocessor'] + +# Extension of the file that should be written to disk +# c.Exporter.file_extension = '.txt' + +# List of preprocessors, by name or namespace, to enable. +# c.Exporter.preprocessors = [] + +#------------------------------------------------------------------------------ +# TemplateExporter configuration +#------------------------------------------------------------------------------ + +# Exports notebooks into other file formats. Uses Jinja 2 templating engine to +# output new formats. Inherit from this class if you are creating a new +# template type along with new filters/preprocessors. If the filters/ +# preprocessors provided by default suffice, there is no need to inherit from +# this class. Instead, override the template_file and file_extension traits via +# a config file. +# +# - add_anchor - add_prompts - ansi2html - ansi2latex - ascii_only - +# citation2latex - comment_lines - escape_latex - filter_data_type - get_lines - +# get_metadata - highlight2html - highlight2latex - html2text - indent - +# ipython2python - markdown2html - markdown2latex - markdown2rst - path2url - +# posix_path - prevent_list_blocks - strip_ansi - strip_dollars - +# strip_files_prefix - wrap_text + +# Dictionary of filters, by name and namespace, to add to the Jinja environment. +# c.TemplateExporter.filters = {} + +# formats of raw cells to be included in this Exporter's output. +# c.TemplateExporter.raw_mimetypes = [] + +# +# c.TemplateExporter.template_extension = '.tpl' + +# Name of the template file to use +# c.TemplateExporter.template_file = u'' + +# +# c.TemplateExporter.template_path = ['.'] + +#------------------------------------------------------------------------------ +# HTMLExporter configuration +#------------------------------------------------------------------------------ + +# Exports a basic HTML document. This exporter assists with the export of HTML. +# Inherit from it if you are writing your own HTML template and need custom +# preprocessors/filters. If you don't need custom preprocessors/ filters, just +# change the 'template_file' config option. + +#------------------------------------------------------------------------------ +# LatexExporter configuration +#------------------------------------------------------------------------------ + +# Exports to a Latex template. Inherit from this class if your template is +# LaTeX based and you need custom tranformers/filters. Inherit from it if you +# are writing your own HTML template and need custom tranformers/filters. If +# you don't need custom tranformers/filters, just change the 'template_file' +# config option. Place your template in the special "/latex" subfolder of the +# "../templates" folder. + +# +# c.LatexExporter.template_extension = '.tplx' + +#------------------------------------------------------------------------------ +# MarkdownExporter configuration +#------------------------------------------------------------------------------ + +# Exports to a markdown document (.md) + +#------------------------------------------------------------------------------ +# NotebookExporter configuration +#------------------------------------------------------------------------------ + +# Exports to an IPython notebook. + +# The nbformat version to write. Use this to downgrade notebooks. +# c.NotebookExporter.nbformat_version = 4 + +#------------------------------------------------------------------------------ +# PDFExporter configuration +#------------------------------------------------------------------------------ + +# Writer designed to write to PDF files + +# Shell command used to run bibtex. +# c.PDFExporter.bib_command = [u'bibtex', u'{filename}'] + +# Shell command used to compile latex. +# c.PDFExporter.latex_command = [u'pdflatex', u'{filename}'] + +# How many times latex will be called. +# c.PDFExporter.latex_count = 3 + +# File extensions of temp files to remove after running. +# c.PDFExporter.temp_file_exts = ['.aux', '.bbl', '.blg', '.idx', '.log', '.out'] + +# Whether to display the output of latex commands. +# c.PDFExporter.verbose = False + +#------------------------------------------------------------------------------ +# PythonExporter configuration +#------------------------------------------------------------------------------ + +# Exports a Python code file. + +#------------------------------------------------------------------------------ +# RSTExporter configuration +#------------------------------------------------------------------------------ + +# Exports restructured text documents. + +#------------------------------------------------------------------------------ +# ScriptExporter configuration +#------------------------------------------------------------------------------ + +#------------------------------------------------------------------------------ +# SlidesExporter configuration +#------------------------------------------------------------------------------ + +# Exports HTML slides with reveal.js + +# The URL prefix for reveal.js. This can be a a relative URL for a local copy of +# reveal.js, or point to a CDN. +# +# For speaker notes to work, a local reveal.js prefix must be used. +# c.SlidesExporter.reveal_url_prefix = u'' + +#------------------------------------------------------------------------------ +# Preprocessor configuration +#------------------------------------------------------------------------------ + +# A configurable preprocessor +# +# Inherit from this class if you wish to have configurability for your +# preprocessor. +# +# Any configurable traitlets this class exposed will be configurable in profiles +# using c.SubClassName.attribute = value +# +# you can overwrite :meth:`preprocess_cell` to apply a transformation +# independently on each cell or :meth:`preprocess` if you prefer your own logic. +# See corresponding docstring for informations. +# +# Disabled by default and can be enabled via the config by +# 'c.YourPreprocessorName.enabled = True' + +# +# c.Preprocessor.enabled = False + +#------------------------------------------------------------------------------ +# CSSHTMLHeaderPreprocessor configuration +#------------------------------------------------------------------------------ + +# Preprocessor used to pre-process notebook for HTML output. Adds IPython +# notebook front-end CSS and Pygments CSS to HTML output. + +# CSS highlight class identifier +# c.CSSHTMLHeaderPreprocessor.highlight_class = '.highlight' + +#------------------------------------------------------------------------------ +# ClearOutputPreprocessor configuration +#------------------------------------------------------------------------------ + +# Removes the output from all code cells in a notebook. + +#------------------------------------------------------------------------------ +# ConvertFiguresPreprocessor configuration +#------------------------------------------------------------------------------ + +# Converts all of the outputs in a notebook from one format to another. + +# Format the converter accepts +# c.ConvertFiguresPreprocessor.from_format = u'' + +# Format the converter writes +# c.ConvertFiguresPreprocessor.to_format = u'' + +#------------------------------------------------------------------------------ +# ExecutePreprocessor configuration +#------------------------------------------------------------------------------ + +# Executes all the cells in a notebook + +# If `False` (default), when a cell raises an error the execution is stoppped +# and a `CellExecutionError` is raised. If `True`, execution errors are ignored +# and the execution is continued until the end of the notebook. Output from +# exceptions is included in the cell output in both cases. +# c.ExecutePreprocessor.allow_errors = False + +# If execution of a cell times out, interrupt the kernel and continue executing +# other cells rather than throwing an error and stopping. +# c.ExecutePreprocessor.interrupt_on_timeout = False + +# Name of kernel to use to execute the cells. If not set, use the kernel_spec +# embedded in the notebook. +# c.ExecutePreprocessor.kernel_name = '' + +# If `False` (default), then the kernel will continue waiting for iopub messages +# until it receives a kernel idle message, or until a timeout occurs, at which +# point the currently executing cell will be skipped. If `True`, then an error +# will be raised after the first timeout. This option generally does not need to +# be used, but may be useful in contexts where there is the possibility of +# executing notebooks with memory-consuming infinite loops. +# c.ExecutePreprocessor.raise_on_iopub_timeout = False + +# The time to wait (in seconds) for output from executions. If a cell execution +# takes longer, an exception (TimeoutError on python 3+, RuntimeError on python +# 2) is raised. +# +# `None` or `-1` will disable the timeout. +c.ExecutePreprocessor.timeout = -1 + +#------------------------------------------------------------------------------ +# ExtractOutputPreprocessor configuration +#------------------------------------------------------------------------------ + +# Extracts all of the outputs from the notebook file. The extracted outputs +# are returned in the 'resources' dictionary. + +# +# c.ExtractOutputPreprocessor.extract_output_types = set(['image/png', 'application/pdf', 'image/jpeg', 'image/svg+xml']) + +# +# c.ExtractOutputPreprocessor.output_filename_template = '{unique_key}_{cell_index}_{index}{extension}' + +#------------------------------------------------------------------------------ +# HighlightMagicsPreprocessor configuration +#------------------------------------------------------------------------------ + +# Detects and tags code cells that use a different languages than Python. + +# Syntax highlighting for magic's extension languages. Each item associates a +# language magic extension such as %%R, with a pygments lexer such as r. +# c.HighlightMagicsPreprocessor.languages = {} + +#------------------------------------------------------------------------------ +# LatexPreprocessor configuration +#------------------------------------------------------------------------------ + +# Preprocessor for latex destined documents. +# +# Mainly populates the `latex` key in the resources dict, adding definitions for +# pygments highlight styles. + +#------------------------------------------------------------------------------ +# SVG2PDFPreprocessor configuration +#------------------------------------------------------------------------------ + +# Converts all of the outputs in a notebook from SVG to PDF. + +# The command to use for converting SVG to PDF +# +# This string is a template, which will be formatted with the keys to_filename +# and from_filename. +# +# The conversion call must read the SVG from {from_flename}, and write a PDF to +# {to_filename}. +# c.SVG2PDFPreprocessor.command = u'' + +# The path to Inkscape, if necessary +# c.SVG2PDFPreprocessor.inkscape = u'' + +#------------------------------------------------------------------------------ +# WriterBase configuration +#------------------------------------------------------------------------------ + +# Consumes output from nbconvert export...() methods and writes to a useful +# location. + +# List of the files that the notebook references. Files will be included with +# written output. +# c.WriterBase.files = [] + +#------------------------------------------------------------------------------ +# DebugWriter configuration +#------------------------------------------------------------------------------ + +# Consumes output from nbconvert export...() methods and writes usefull +# debugging information to the stdout. The information includes a list of +# resources that were extracted from the notebook(s) during export. + +#------------------------------------------------------------------------------ +# FilesWriter configuration +#------------------------------------------------------------------------------ + +# Consumes nbconvert output and produces files. + +# Directory to write output to. Leave blank to output to the current directory +# c.FilesWriter.build_directory = '' + +# When copying files that the notebook depends on, copy them in relation to this +# path, such that the destination filename will be os.path.relpath(filename, +# relpath). If FilesWriter is operating on a notebook that already exists +# elsewhere on disk, then the default will be the directory containing that +# notebook. +# c.FilesWriter.relpath = '' + +#------------------------------------------------------------------------------ +# StdoutWriter configuration +#------------------------------------------------------------------------------ + +# Consumes output from nbconvert export...() methods and writes to the stdout +# stream. + +#------------------------------------------------------------------------------ +# PostProcessorBase configuration +#------------------------------------------------------------------------------ + +#------------------------------------------------------------------------------ +# ServePostProcessor configuration +#------------------------------------------------------------------------------ + +# Post processor designed to serve files +# +# Proxies reveal.js requests to a CDN if no local reveal.js is present + +# The IP address to listen on. +# c.ServePostProcessor.ip = '127.0.0.1' + +# Should the browser be opened automatically? +# c.ServePostProcessor.open_in_browser = True + +# port for the server to listen on. +# c.ServePostProcessor.port = 8000 + +# URL for reveal.js CDN. +# c.ServePostProcessor.reveal_cdn = 'https://cdnjs.cloudflare.com/ajax/libs/reveal.js/3.1.0' + +# URL prefix for reveal.js +# c.ServePostProcessor.reveal_prefix = 'reveal.js' From 7e7c643e30822f6e2045183c283e82136ea3dd39 Mon Sep 17 00:00:00 2001 From: marscher Date: Mon, 25 Apr 2016 18:24:19 +0200 Subject: [PATCH 29/33] [doc] use realpath for config, convert only notebooks contained in ipython.rst --- doc/Makefile | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/doc/Makefile b/doc/Makefile index 66bca43fc..db5488b90 100644 --- a/doc/Makefile +++ b/doc/Makefile @@ -17,7 +17,7 @@ PAPEROPT_letter = -D latex_paper_size=letter ALLSPHINXOPTS = -d build/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) source # ipython notebook index file -NBCONVERT_CONFIG = jupyter_nbconvert_config.py +NBCONVERT_CONFIG = $(realpath jupyter_nbconvert_config.py) NBCONVERT_CMD = jupyter nbconvert $(nbflags) --config $(NBCONVERT_CONFIG) .PHONY: help clean html ipython-rst @@ -42,6 +42,7 @@ source/generated: mkdir -p source/generated ipython-rst: source/generated + @$(eval nbs_in_doc:= $$(shell grep generated source/ipython.rst | sed -re 's/\s+generated\/(.+)/\1/g' )) @$(eval notebooks:= $$(shell cd source/generated && \ find ../../../pyemma-ipython/ -name \*.ipynb \ -and -not -name \*-checkpoint.ipynb)) From 46e4d51f7796b19f152c83ebdbdb95403013ff0e Mon Sep 17 00:00:00 2001 From: marscher Date: Mon, 25 Apr 2016 19:16:31 +0200 Subject: [PATCH 30/33] [setup] pin thermotools version range --- devtools/conda-recipe/meta.yaml | 4 ++-- setup.py | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/devtools/conda-recipe/meta.yaml b/devtools/conda-recipe/meta.yaml index 3f72b6da9..2eb2580aa 100644 --- a/devtools/conda-recipe/meta.yaml +++ b/devtools/conda-recipe/meta.yaml @@ -20,7 +20,7 @@ requirements: - mock - funcsigs - msmtools - - thermotools >=0.1.14 + - thermotools >=0.1.14,<0.2 - nose - numpy x.x - pyyaml @@ -39,7 +39,7 @@ requirements: - mock - funcsigs - msmtools - - thermotools >=0.1.14 + - thermotools >=0.1.14,<0.2 - numpy x.x - pyyaml - scipy diff --git a/setup.py b/setup.py index af61d280b..9640440a6 100755 --- a/setup.py +++ b/setup.py @@ -216,7 +216,7 @@ def run(self): 'mdtraj>=1.5.0', 'matplotlib', 'msmtools', - 'thermotools>=0.1.14', + 'thermotools>=0.1.14,<0.2', 'bhmm>=0.6,<0.7', 'joblib>0.8.4', 'pyyaml', From da00e9ba9855a889f77eaa57abe74cdc9bcd77db Mon Sep 17 00:00:00 2001 From: marscher Date: Mon, 25 Apr 2016 19:41:08 +0200 Subject: [PATCH 31/33] [doc] convert only listed notebooks --- doc/Makefile | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/doc/Makefile b/doc/Makefile index db5488b90..62c02c6b1 100644 --- a/doc/Makefile +++ b/doc/Makefile @@ -1,6 +1,7 @@ # Makefile for Sphinx documentation # +SHELL := /bin/bash PYVER = 2.7 PYTHON = python$(PYVER) @@ -42,11 +43,10 @@ source/generated: mkdir -p source/generated ipython-rst: source/generated - @$(eval nbs_in_doc:= $$(shell grep generated source/ipython.rst | sed -re 's/\s+generated\/(.+)/\1/g' )) - @$(eval notebooks:= $$(shell cd source/generated && \ - find ../../../pyemma-ipython/ -name \*.ipynb \ - -and -not -name \*-checkpoint.ipynb)) - cd source/generated; $(NBCONVERT_CMD) $(notebooks) + @$(eval nb_names:= $$(shell grep generated source/ipython.rst | sed -re 's/\s+generated\/(.+)/\1|/g')) + @$(eval notebooks:= $$(shell find ../pyemma-ipython -name *.ipynb | grep -E "$(nb_names)" )) + cd source/generated + for nb in $(notebooks); do $(NBCONVERT_CMD) $(nb); done @echo "Conversion finished." latex: From 4f7e0ed9899284193f8d3b82175f2f23701b928b Mon Sep 17 00:00:00 2001 From: marscher Date: Mon, 25 Apr 2016 20:11:37 +0200 Subject: [PATCH 32/33] fix --- doc/Makefile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/Makefile b/doc/Makefile index 62c02c6b1..a15403dd1 100644 --- a/doc/Makefile +++ b/doc/Makefile @@ -46,7 +46,7 @@ ipython-rst: source/generated @$(eval nb_names:= $$(shell grep generated source/ipython.rst | sed -re 's/\s+generated\/(.+)/\1|/g')) @$(eval notebooks:= $$(shell find ../pyemma-ipython -name *.ipynb | grep -E "$(nb_names)" )) cd source/generated - for nb in $(notebooks); do $(NBCONVERT_CMD) $(nb); done + set -x; for nb in $(notebooks); do $(NBCONVERT_CMD) $$nb; done @echo "Conversion finished." latex: From 5c1b5ecbb238cf49f35b59bb8accb5f9141d9cad Mon Sep 17 00:00:00 2001 From: marscher Date: Mon, 25 Apr 2016 20:24:58 +0200 Subject: [PATCH 33/33] [doc] restrict --- doc/Makefile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/Makefile b/doc/Makefile index a15403dd1..b82178463 100644 --- a/doc/Makefile +++ b/doc/Makefile @@ -43,7 +43,7 @@ source/generated: mkdir -p source/generated ipython-rst: source/generated - @$(eval nb_names:= $$(shell grep generated source/ipython.rst | sed -re 's/\s+generated\/(.+)/\1|/g')) + @$(eval nb_names:= $$(shell grep generated source/ipython.rst | sed -re 's/\s+generated\/(.+)/\/\1.ipynb|/g')) @$(eval notebooks:= $$(shell find ../pyemma-ipython -name *.ipynb | grep -E "$(nb_names)" )) cd source/generated set -x; for nb in $(notebooks); do $(NBCONVERT_CMD) $$nb; done