[med-svn] [Git][med-team/conda-package-handling][master] 4 commits: New upstream version 1.8.0
Andreas Tille (@tille)
gitlab at salsa.debian.org
Fri Mar 18 12:27:20 GMT 2022
Andreas Tille pushed to branch master at Debian Med / conda-package-handling
Commits:
fc20d21f by Andreas Tille at 2022-03-18T12:54:19+01:00
New upstream version 1.8.0
- - - - -
ce50a0cb by Andreas Tille at 2022-03-18T12:54:19+01:00
routine-update: New upstream version
- - - - -
7afe5a0e by Andreas Tille at 2022-03-18T12:54:20+01:00
Update upstream source from tag 'upstream/1.8.0'
Update to upstream version '1.8.0'
with Debian dir f3152e601a4aa87aca967abbe29155fb3e5d1bba
- - - - -
ed549b20 by Andreas Tille at 2022-03-18T12:55:42+01:00
routine-update: Ready to upload to unstable
- - - - -
30 changed files:
- .authors.yml
- − .circleci/config.yml
- − .codacy.yml
- + .github/workflows/boards.yml
- + .github/workflows/issues.yml
- + .github/workflows/labels.yml
- .mailmap
- − .travis.yml
- AUTHORS.txt → AUTHORS.rst
- CHANGELOG.md → CHANGELOG.rst
- MANIFEST.in
- − appveyor.yml
- − conda.recipe/conda_build_config.yaml
- conda.recipe/meta.yaml
- debian/changelog
- rever.xsh
- setup.py
- src/conda_package_handling/__init__.py
- src/conda_package_handling/_version.py
- src/conda_package_handling/api.py
- src/conda_package_handling/cli.py
- src/conda_package_handling/conda_fmt.py
- src/conda_package_handling/interface.py
- src/conda_package_handling/tarball.py
- src/conda_package_handling/utils.py
- + tests/data/cph_test_data-0.0.1-0.tar.bz2
- + tests/recipes/cph_test_data/build.sh
- + tests/recipes/cph_test_data/meta.yaml
- tests/test_api.py
- versioneer.py
Changes:
=====================================
.authors.yml
=====================================
@@ -48,3 +48,38 @@
num_commits: 1
first_commit: 2021-03-14 08:41:18
github: seemethere
+- name: Daniel Bast
+ email: 2790401+dbast at users.noreply.github.com
+ num_commits: 2
+ first_commit: 2021-04-12 11:49:45
+- name: conda-bot
+ email: ad-team+condabot at anaconda.com
+ num_commits: 2
+ first_commit: 2022-01-25 21:32:39
+- name: Cheng H. Lee
+ email: clee at anaconda.com
+ alternate_emails:
+ - chenghlee at users.noreply.github.com
+ num_commits: 4
+ first_commit: 2022-02-09 18:08:33
+- name: Chris Burr
+ email: chrisburr at users.noreply.github.com
+ num_commits: 1
+ first_commit: 2022-02-17 10:23:53
+- name: Daniel Holth
+ email: dholth at anaconda.com
+ num_commits: 2
+ first_commit: 2021-08-20 21:11:50
+ github: dholth
+- name: vz-x
+ email: 77290357+vz-x at users.noreply.github.com
+ num_commits: 1
+ first_commit: 2022-02-17 11:31:22
+- name: Christopher Barber
+ email: christopher.barber at analog.com
+ num_commits: 2
+ first_commit: 2022-02-09 01:00:38
+- name: Jannis Leidel
+ email: jannis at leidel.info
+ num_commits: 5
+ first_commit: 2021-09-17 21:51:27
=====================================
.circleci/config.yml deleted
=====================================
@@ -1,48 +0,0 @@
-version: 2
-jobs:
- build:
- docker:
- - image: continuumio/miniconda3
-
- working_directory: ~/repo
-
- steps:
- - checkout
-
-
- # - restore_cache:
- # keys:
- # - v1-dependencies-{{ checksum "environment.yml" }}
- # - v1-dependencies-
-
-
- - run:
- name: install dependencies
- command: |
- # conda env create -q || conda env update -q
- # source activate adj
- conda install -qy conda-build anaconda-client pytest pytest-cov python-libarchive-c six futures
- conda config --set auto_update_conda no
- conda info -a
- pip install -e .
- conda build conda.recipe --no-test
- conda install --use-local conda-package-handling
-
-
- # - save_cache:
- # paths:
- # - /opt/conda
- # key: v1-dependencies-{{ checksum "environment.yml" }}
-
-
- - run:
- name: run tests
- command: |
- # source activate adj
- pytest --color=yes -v --cov=conda_package_handling tests
- conda install -c conda-forge codecov
- codecov
-
- - store_artifacts:
- path: test-reports
- destination: test-reports
=====================================
.codacy.yml deleted
=====================================
@@ -1,8 +0,0 @@
----
-exclude_paths:
- - 'conda_package_handling/_version.py'
- - 'tests/**/*'
- - 'tests/*'
- - 'benchmarks/**/*'
- - 'setup.py'
- - 'versioneer.py'
=====================================
.github/workflows/boards.yml
=====================================
@@ -0,0 +1,211 @@
+name: Automate Boards
+
+on:
+ issues:
+ types: [opened, labeled, unlabeled]
+
+env:
+ BACKLOG_LBL: backlog
+ SPRINT_LBL: sprint
+ # these variables cannot be used in if expressions
+ # see https://docs.github.com/en/actions/learn-github-actions/contexts#context-availability
+ TRIAGING_ID: 4
+ TRIAGING_URL: https://api.github.com/projects/13697310
+ BACKLOG_ID: 5
+ BACKLOG_URL: https://api.github.com/projects/13697370
+ SPRINT_ID: 8
+ SPRINT_URL: https://api.github.com/projects/13829490
+
+jobs:
+ # new -> Triaging[New]
+ to_triaging:
+ # if new issue (or old issue marked as [stale::recovered]) and not [backlog] or [sprint]
+ if: >-
+ !github.event.repository.fork
+ && github.event_name == 'issues'
+ && (
+ (
+ github.event.sender.login != 'conda-bot'
+ && github.event.action == 'opened'
+ )
+ || (
+ github.event.action == 'labeled'
+ && github.event.label.name == 'stale::recovered'
+ )
+ )
+ && !contains(github.event.issue.labels.*.name, 'backlog')
+ && !contains(github.event.issue.labels.*.name, 'sprint')
+ runs-on: ubuntu-latest
+ steps:
+ # add to Triaging board
+ - uses: alex-page/github-project-automation-plus at v0.8.1
+ with:
+ action: add # if not present
+ project: Triaging
+ column: New
+ repo-token: ${{ secrets.PROJECT_TOKEN }}
+
+ # new -> Backlog[Unplanned]
+ # Triaging[Ready] -> Backlog[Unplanned]
+ # Sprint[To Do] -> Backlog[Do Next]
+ to_backlog:
+ # if new issue with [backlog]
+ # if labeled [backlog]
+ # if added to Backlog board
+ # if unlabeled [sprint]
+ # if removed from Sprint board
+ if: >-
+ !github.event.repository.fork
+ && github.event.sender.login != 'conda-bot'
+ && github.event.issue.state == 'open'
+ && (
+ (
+ github.event_name == 'issues'
+ && github.event.action == 'opened'
+ && contains(github.event.issue.labels.*.name, 'backlog')
+ && !contains(github.event.issue.labels.*.name, 'sprint')
+ )
+ || (
+ github.event_name == 'issues'
+ && github.event.action == 'labeled'
+ && github.event.label.name == 'backlog'
+ )
+ || (
+ github.event_name == 'issues'
+ && github.event.action == 'unlabeled'
+ && github.event.label.name == 'sprint'
+ )
+ )
+ runs-on: ubuntu-latest
+ steps:
+ # (helper) access private GitHub Action
+ - uses: actions/checkout at v2
+ # (helper) detect if attached to triaging board
+ - id: on_triaging
+ uses: conda/actions/issue-in-project at v1.1
+ with:
+ org: conda
+ project: ${{ env.TRIAGING_ID }}
+ issue: ${{ github.event.issue.id }}
+ github_token: ${{ secrets.PROJECT_TOKEN }}
+ # (helper) detect if attached to sprint board
+ - id: on_sprint
+ uses: conda/actions/issue-in-project at v1.1
+ with:
+ org: conda
+ project: ${{ env.SPRINT_ID }}
+ issue: ${{ github.event.issue.id }}
+ github_token: ${{ secrets.PROJECT_TOKEN }}
+ # (fail-safe) remove from Triaging board
+ - uses: alex-page/github-project-automation-plus at v0.8.1
+ with:
+ action: delete # if present
+ project: Triaging
+ column: Ready # unused
+ repo-token: ${{ secrets.PROJECT_TOKEN }}
+ # add [backlog] label
+ - uses: actions-ecosystem/action-add-labels at v1.1.0
+ with:
+ labels: ${{ env.BACKLOG_LBL }}
+ number: ${{ github.event.issue.number }}
+ github_token: ${{ secrets.PROJECT_TOKEN }}
+ # add to Backlog board (from Triaging board)
+ - uses: alex-page/github-project-automation-plus at v0.8.1
+ if: >-
+ fromJSON(steps.on_triaging.outputs.contains)
+ && !contains(github.event.issue.labels.*.name, 'sprint')
+ && !fromJSON(steps.on_sprint.outputs.contains)
+ with:
+ action: add # if not present
+ project: Backlog
+ column: Unplanned
+ repo-token: ${{ secrets.PROJECT_TOKEN }}
+ # add to Backlog board (from Sprint board)
+ - uses: alex-page/github-project-automation-plus at v0.8.1
+ if: >-
+ !fromJSON(steps.on_triaging.outputs.contains)
+ || contains(github.event.issue.labels.*.name, 'sprint')
+ || fromJSON(steps.on_sprint.outputs.contains)
+ with:
+ action: add # if not present
+ project: Backlog
+ column: Do Next
+ repo-token: ${{ secrets.PROJECT_TOKEN }}
+ # remove [sprint] label
+ - uses: actions-ecosystem/action-remove-labels at v1.3.0
+ with:
+ labels: ${{ env.SPRINT_LBL }}
+ number: ${{ github.event.issue.number }}
+ github_token: ${{ secrets.PROJECT_TOKEN }}
+ # remove from Sprint board
+ - uses: alex-page/github-project-automation-plus at v0.8.1
+ with:
+ action: delete # if present
+ project: Sprint
+ column: To Do # unused
+ repo-token: ${{ secrets.PROJECT_TOKEN }}
+
+ # new -> Sprint[To Do]
+ # Backlog[Do Next] -> Sprint[To Do]
+ to_sprint:
+ # if new issue with [sprint]
+ # if unlabeled [backlog]
+ # if removed from Backlog board
+ # if labeled [sprint]
+ # if added to Sprint board
+ if: >-
+ !github.event.repository.fork
+ && github.event.sender.login != 'conda-bot'
+ && github.event.issue.state == 'open'
+ && (
+ (
+ github.event_name == 'issues'
+ && github.event.action == 'opened'
+ && contains(github.event.issue.labels.*.name, 'sprint')
+ )
+ || (
+ github.event_name == 'issues'
+ && github.event.action == 'unlabeled'
+ && github.event.label.name == 'backlog'
+ )
+ || (
+ github.event_name == 'issues'
+ && github.event.action == 'labeled'
+ && github.event.label.name == 'sprint'
+ )
+ )
+ runs-on: ubuntu-latest
+ steps:
+ # (fail-safe) remove from Triaging board
+ - uses: alex-page/github-project-automation-plus at v0.8.1
+ with:
+ action: delete # if present
+ project: Triaging
+ column: Ready # unused
+ repo-token: ${{ secrets.PROJECT_TOKEN }}
+ # remove [backlog] label
+ - uses: actions-ecosystem/action-remove-labels at v1.3.0
+ with:
+ labels: ${{ env.BACKLOG_LBL }}
+ number: ${{ github.event.issue.number }}
+ github_token: ${{ secrets.PROJECT_TOKEN }}
+ # remove from Backlog board
+ - uses: alex-page/github-project-automation-plus at v0.8.1
+ with:
+ action: delete # if present
+ project: Backlog
+ column: Do Next # unused
+ repo-token: ${{ secrets.PROJECT_TOKEN }}
+ # add [sprint] label
+ - uses: actions-ecosystem/action-add-labels at v1.1.0
+ with:
+ labels: ${{ env.SPRINT_LBL }}
+ number: ${{ github.event.issue.number }}
+ github_token: ${{ secrets.PROJECT_TOKEN }}
+ # add to Sprint board
+ - uses: alex-page/github-project-automation-plus at v0.8.1
+ with:
+ action: add # if not present
+ project: Sprint
+ column: To Do
+ repo-token: ${{ secrets.PROJECT_TOKEN }}
=====================================
.github/workflows/issues.yml
=====================================
@@ -0,0 +1,37 @@
+name: Automate Issues
+
+on:
+ issue_comment:
+ types: [created]
+
+env:
+ # labels
+ FEEDBACK_LBL: pending::feedback
+ SUPPORT_LBL: pending::support
+
+jobs:
+ # NOTE: doesn't catch cases where multiple users act as the author/reporter,
+ # this is just an effort to catch the majority of support cases
+ # TODO: create conda-triaging team and modify this to toggle label based on
+ # whether a non-triaging engineer commented
+ pending_support:
+ # if [pending::feedback] and the author responds
+ if: >-
+ github.event_name == 'issue_comment'
+ && github.event.action == 'created'
+ && !github.event.issue.pull_request
+ && contains(github.event.issue.labels.*.name, 'pending::feedback')
+ && github.event.issue.user.login == github.event.comment.user.login
+ runs-on: ubuntu-latest
+ steps:
+ # remove [pending::feedback]
+ - uses: actions-ecosystem/action-remove-labels at v1.3.0
+ with:
+ labels: ${{ env.FEEDBACK_LBL }}
+ github_token: ${{ secrets.PROJECT_TOKEN }}
+ # add [pending::feedback], if still open
+ - uses: actions-ecosystem/action-add-labels at v1.1.0
+ if: github.event.issue.state == 'open'
+ with:
+ labels: ${{ env.SUPPORT_LBL }}
+ github_token: ${{ secrets.PROJECT_TOKEN }}
=====================================
.github/workflows/labels.yml
=====================================
@@ -0,0 +1,38 @@
+name: Sync Labels
+
+on:
+ workflow_dispatch:
+ inputs:
+ dryrun:
+ description: "dryrun: Preview changes to labels without editing them (true|false)"
+ required: true
+ default: "true"
+
+jobs:
+ sync:
+ runs-on: ubuntu-latest
+ env:
+ GLOBAL: https://raw.githubusercontent.com/conda/infra/main/.github/global.yml
+ LOCAL: .github/labels.yml
+ steps:
+ - uses: actions/checkout at v2
+ - id: has_local
+ uses: andstor/file-existence-action at v1.0.1
+ with:
+ files: ${{ env.LOCAL }}
+ - name: Global Only
+ uses: EndBug/label-sync at v2.1.0
+ if: steps.has_local.outputs.files_exists == 'false'
+ with:
+ config-file: ${{ env.GLOBAL }}
+ delete-other-labels: true
+ dry-run: ${{ github.event.inputs.dryrun }}
+ - name: Global & Local
+ uses: EndBug/label-sync at v2.1.0
+ if: steps.has_local.outputs.files_exists == 'true'
+ with:
+ config-file: |
+ ${{ env.GLOBAL }}
+ ${{ env.LOCAL }}
+ delete-other-labels: true
+ dry-run: ${{ github.event.inputs.dryrun }}
=====================================
.mailmap
=====================================
@@ -14,8 +14,16 @@ Michael Sarahan <msarahan at gmail.com> Mike Sarahan <msarahan at anaconda.com>
Ray Donnelly <mingw.android at gmail.com>
leej3 <johnleenimh at gmail.com>
Jonathan J. Helmus <jjhelmus at gmail.com>
+Jannis Leidel <jannis at leidel.info>
Nehal J Wani <nehaljw.kkd1 at gmail.com>
Alan Du <alanhdu at gmail.com>
+Cheng H. Lee <clee at anaconda.com> Cheng H. Lee <chenghlee at users.noreply.github.com>
Matthew R. Becker <beckermr at users.noreply.github.com>
+Daniel Bast <2790401+dbast at users.noreply.github.com>
+conda-bot <ad-team+condabot at anaconda.com>
+Daniel Holth <dholth at anaconda.com>
+Christopher Barber <christopher.barber at analog.com>
ossdev07 <ossdev at puresoftware.com>
Eli Uriegas <seemethere101 at gmail.com>
+Chris Burr <chrisburr at users.noreply.github.com>
+vz-x <77290357+vz-x at users.noreply.github.com>
=====================================
.travis.yml deleted
=====================================
@@ -1,53 +0,0 @@
-# Config file for automatic testing at travis-ci.org
-
-dist: xenial
-language: python
-matrix:
- include:
- - python: 2.7
- - arch: amd64
- python: 3.7
- - arch: arm64
- python: 3.7
-env:
- global:
- - SUDO=""
- - CONDA_ALWAYS_YES=1
-install:
- - set -e
- - if [[ "${TRAVIS_CPU_ARCH}" == "arm64" ]]; then
- set -x;
- # Archiconda install requires sudo for running commands. Pass the path
- # through to the sudo environment so that condda is accessible
- SUDO='sudo env PATH=$PATH'
- wget -q "https://github.com/Archiconda/build-tools/releases/download/0.2.3/Archiconda3-0.2.3-Linux-aarch64.sh" -O archiconda.sh;
- chmod +x archiconda.sh;
- bash archiconda.sh -b -p $HOME/miniconda;
- export PATH="$HOME/miniconda/bin:$PATH";
- $SUDO conda install python=3.7 conda conda-build;
- set +x;
- else
- wget https://repo.anaconda.com/pkgs/misc/conda-execs/conda-4.7.5-linux-64.exe -O conda.exe;
- chmod +x conda.exe;
- ./conda.exe create -p $HOME/miniconda python=$TRAVIS_PYTHON_VERSION conda conda-build;
- fi;
- - export PATH="$HOME/miniconda/bin:$PATH"
- - hash -r
- - if [[ "$TRAVIS_PYTHON_VERSION" == "2.7" ]]; then
- pip install futures mock pytest-cov pytest-mock;
- else
- pip3 install mock pytest-cov pytest-mock;
- fi;
- - $SUDO conda build conda.recipe --no-test
- - $SUDO conda install --use-local conda-package-handling
- - $SUDO conda info -a
-script:
- # rebuilding the recipe with our new CPH installed tests it a bit deeper than the test suite.
- - which conda
- - $SUDO conda build conda.recipe
- # the system pytest is used for coverage testing, cph from current conda envorinment is used.
- - which pytest
- - pytest -v --color=yes -rA --cov=conda_package_handling tests
-after_success:
- - $SUDO conda install codecov
- - codecov
=====================================
AUTHORS.txt → AUTHORS.rst
=====================================
@@ -5,8 +5,16 @@ Authors are sorted by number of commits.
* Ray Donnelly
* leej3
* Jonathan J. Helmus
+* Jannis Leidel
* Nehal J Wani
* Alan Du
+* Cheng H. Lee
* Matthew R. Becker
+* Daniel Bast
+* conda-bot
+* Daniel Holth
+* Christopher Barber
* ossdev07
* Eli Uriegas
+* Chris Burr
+* vz-x
=====================================
CHANGELOG.md → CHANGELOG.rst
=====================================
@@ -1,6 +1,51 @@
-# current developments
+.. current developments
+
+2022-03-12 1.8.0:
+==================
+
+Enhancements:
+-------------
+
+* Compute package hashes in threads. (#83)
+
+Bug fixes:
+----------
+
+* Fix running from a read-only working directory (#44)
+* Fix symlinks to directories being incorrectly placed in the ``info`` tarball
+ when transmuting ``.tar.bz2``- to ``.conda``-format packages (#84)
+* No longer generate emtpy metadata.json in v2 packages (#88)
+* Fix for TypeError in tarball.py. (#86)
+
+Deprecations:
+-------------
+
+* Remove Python 2 support.
+
+Other:
+------
+
+* Added project board, issue staleness, thread locking and label automation
+ using GitHub action workflows to improve maintenance of GitHub project..
+
+ More information can be found in the infra repo: https://github.com/conda/infra
+
+* Removed unused continuous integration platform config files.
+
+Contributors:
+-------------
+
+* @dholth
+* @conda-bot
+* @chenghlee
+* @analog-cbarber
+* @chrisburr
+* @vz-x
+* @jezdez
+
+
2021-04-12 1.7.3:
-------------------
+==================
Enhancements:
-------------
@@ -28,7 +73,7 @@ Contributors:
2020-10-16 1.7.2:
-------------------
+==================
Enhancements:
-------------
@@ -49,7 +94,7 @@ Contributors:
* @nehaljwani
2019-09-20 1.6.0:
-------------------
+==================
Enhancements:
-------------
@@ -71,7 +116,7 @@ Contributors:
2019-08-31 1.5.0:
-------------------
+==================
Contributors:
-------------
@@ -81,7 +126,7 @@ Contributors:
2019-08-04 1.4.1:
-------------------
+==================
Enhancements:
-------------
@@ -95,7 +140,7 @@ Contributors:
2019-08-02 1.4.0:
-------------------
+==================
Bug fixes:
----------
@@ -110,7 +155,7 @@ Contributors:
2019-07-11 1.3.11:
-------------------
+==================
Bug fixes:
----------
@@ -124,7 +169,7 @@ Contributors:
2019-06-24 1.3.10:
-------------------
+==================
Contributors:
-------------
@@ -133,7 +178,7 @@ Contributors:
2019-06-14 1.3.9:
-------------------
+==================
Bug fixes:
----------
@@ -147,7 +192,7 @@ Contributors:
2019-06-13 1.3.8:
-------------------
+==================
Bug fixes:
----------
@@ -161,7 +206,7 @@ Contributors:
2019-06-12 1.3.7:
-------------------
+==================
Bug fixes:
----------
@@ -175,7 +220,7 @@ Contributors:
2019-06-12 1.3.6:
-------------------
+==================
Contributors:
-------------
@@ -183,7 +228,7 @@ Contributors:
2019-06-12 1.3.5:
-------------------
+==================
Bug fixes:
----------
@@ -197,7 +242,7 @@ Contributors:
2019-06-11 1.3.4:
-------------------
+==================
Bug fixes:
----------
@@ -212,7 +257,7 @@ Contributors:
2019-06-11 1.3.3:
-------------------
+==================
Bug fixes:
----------
@@ -226,7 +271,7 @@ Contributors:
2019-06-11 1.3.2:
-------------------
+==================
Bug fixes:
----------
@@ -240,7 +285,7 @@ Contributors:
2019-06-11 1.3.1:
-------------------
+==================
Bug fixes:
----------
@@ -254,7 +299,7 @@ Contributors:
2019-06-10 1.3.0:
-------------------
+==================
Enhancements:
-------------
@@ -268,7 +313,7 @@ Contributors:
2019-06-08 1.2.0:
-------------------
+==================
Enhancements:
-------------
@@ -283,7 +328,7 @@ Contributors:
2019-05-21 1.1.5:
-------------------
+==================
Bug fixes:
----------
@@ -297,7 +342,7 @@ Contributors:
2019-05-21 1.1.4:
-------------------
+==================
Enhancements:
-------------
@@ -311,7 +356,7 @@ Contributors:
2019-05-20 1.1.3:
-------------------
+==================
Bug fixes:
----------
@@ -325,7 +370,7 @@ Contributors:
2019-05-20 1.1.2:
-------------------
+==================
Bug fixes:
----------
@@ -339,7 +384,7 @@ Contributors:
2019-05-14 1.1.1:
-------------------
+==================
Bug fixes:
----------
@@ -353,7 +398,7 @@ Contributors:
2019-05-10 1.1.0:
-------------------
+==================
Bug fixes:
----------
@@ -371,7 +416,7 @@ Contributors:
2019-02-13 1.0.4:
-------------------
+==================
Enhancements:
-------------
@@ -386,7 +431,7 @@ Contributors:
2019-02-04 1.0.3:
-------------------
+==================
Bug fixes:
----------
@@ -400,7 +445,7 @@ Contributors:
2019-02-04 1.0.2:
-------------------
+==================
Contributors:
-------------
@@ -409,14 +454,7 @@ Contributors:
2019-02-04 1.0.1:
-------------------
+==================
Contributors:
-------------
-
-
-
-
-## 1.0.0
-
-Initial release
=====================================
MANIFEST.in
=====================================
@@ -1 +1,4 @@
global-exclude tests/*
+include AUTHORS.rst CHANGELOG.rst
+include versioneer.py
+include src/conda_package_handling/_version.py
=====================================
appveyor.yml deleted
=====================================
@@ -1,45 +0,0 @@
-environment:
- matrix:
- - PYTHON_VERSION: "3.9"
- - PYTHON_VERSION: "2.7"
-
-install:
- # If there is a newer build queued for the same PR, cancel this one.
- # The AppVeyor 'rollout builds' option is supposed to serve the same
- # purpose but it is problematic because it tends to cancel builds pushed
- # directly to master instead of just PR builds (or the converse).
- # credits: JuliaLang developers.
- - ps: if ($env:APPVEYOR_PULL_REQUEST_NUMBER -and $env:APPVEYOR_BUILD_NUMBER -ne ((Invoke-RestMethod `
- https://ci.appveyor.com/api/projects/$env:APPVEYOR_ACCOUNT_NAME/$env:APPVEYOR_PROJECT_SLUG/history?recordsNumber=50).builds | `
- Where-Object pullRequestId -eq $env:APPVEYOR_PULL_REQUEST_NUMBER)[0].buildNumber) { `
- throw "There are newer queued builds for this pull request, failing early." }
-
- - appveyor DownloadFile https://repo.anaconda.com/pkgs/misc/conda-execs/conda-4.7.5-win-64.exe -FileName loner_conda.exe
- - set "CONDA_PKGS_DIRS=C:\condacache\pkgs"
- - set "CONDA_ALWAYS_YES=true"
- - set "CONDA_AUTO_UPDATE_CONDA=false"
- - loner_conda.exe create -p C:\test_conda python=%PYTHON_VERSION% conda conda-build pytest six pytest-cov pytest-mock "conda-package-handling!=1.5.0"
- # these correspond to folder naming of miniconda installs on appveyor. See
- # https://www.appveyor.com/docs/installed-software#python
- - call "C:\test_conda\Scripts\activate.bat"
-
- # Needed for building python2.7 x64 extensions with conda-build
- - conda install -c defaults -c conda-forge vs2008_express_vc_python_patch
- - call setup_x64
-
- - conda info
- # this is to ensure dependencies
- - conda build conda.recipe --no-test
- - conda install --use-local conda-package-handling
-
-# Not a .NET project, we build package in the install step instead
-build: false
-
-test_script:
- # rebuilding the recipe with our new CPH installed tests the unicode issue that occurred in 1.5.0
- - conda build conda.recipe
- - py.test --color=yes -v --cov conda_package_handling --cov-report xml tests
-
-on_success:
- - conda install codecov
- - codecov --env PYTHON_VERSION --file C:\projects\conda-package-handling\coverage.xml
=====================================
conda.recipe/conda_build_config.yaml deleted
=====================================
@@ -1,4 +0,0 @@
-# The default AppVeyor VM image includes Visual Studio 2008 and 2015 but not 2017
-# Rather than using a different image, use VS2015 for non-Python 2.7 builds
-c_compiler: # [win and not py27]
- - vs2015 # [win and not py27]
=====================================
conda.recipe/meta.yaml
=====================================
@@ -22,9 +22,7 @@ requirements:
- msinttypes # [win and vc<14]
- _libarchive_static_for_cph
run:
- - futures # [py27]
- python
- - six
- tqdm
test:
source_files:
=====================================
debian/changelog
=====================================
@@ -1,3 +1,9 @@
+conda-package-handling (1.8.0-1) unstable; urgency=medium
+
+ * New upstream version
+
+ -- Andreas Tille <tille at debian.org> Fri, 18 Mar 2022 12:54:38 +0100
+
conda-package-handling (1.7.3-2) unstable; urgency=medium
* Build-Depends: python3-six
=====================================
rever.xsh
=====================================
@@ -1,27 +1,28 @@
$ACTIVITIES = [
"authors",
"changelog",
- "tag",
- "push_tag",
- "ghrelease",
- "conda_forge"
- ]
+ # "tag",
+ # "push_tag",
+ # "ghrelease",
+ # "conda_forge"
+]
#
# Basic settings
#
$PROJECT = $GITHUB_REPO = "conda-package-handling"
$GITHUB_ORG = "conda"
-$AUTHORS_FILENAME = "AUTHORS.txt"
+$AUTHORS_FILENAME = "AUTHORS.rst"
#
# Changelog settings
#
-$CHANGELOG_FILENAME = "CHANGELOG.md"
-$CHANGELOG_PATTERN = "# current developments"
-$CHANGELOG_HEADER = """# current developments
+$CHANGELOG_FILENAME = "CHANGELOG.rst"
+$CHANGELOG_PATTERN = ".. current developments"
+$CHANGELOG_HEADER = """.. current developments
+
$RELEASE_DATE $VERSION:
-------------------
+==================
"""
$CHANGELOG_CATEGORIES = (
=====================================
setup.py
=====================================
@@ -15,12 +15,6 @@ archive_utils_cy_extension = Extension(
libraries=_libraries,
)
-requirements = [
- # package requirements go here
- "six",
-]
-if sys.version_info.major == 2:
- requirements.append('futures')
setup(
name='conda-package-handling',
@@ -38,11 +32,10 @@ setup(
'cph=conda_package_handling.cli:main'
]
},
- install_requires=requirements,
keywords='conda-package-handling',
classifiers=[
- 'Programming Language :: Python :: 2.7',
- 'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
+ 'Programming Language :: Python :: 3.8',
+ 'Programming Language :: Python :: 3.9',
]
)
=====================================
src/conda_package_handling/__init__.py
=====================================
@@ -1,3 +1,2 @@
-from ._version import get_versions
-__version__ = get_versions()['version']
-del get_versions
+from . import _version
+__version__ = _version.get_versions()['version']
=====================================
src/conda_package_handling/_version.py
=====================================
@@ -6,7 +6,7 @@
# that just contains the computed version number.
# This file is released into the public domain. Generated by
-# versioneer-0.18 (https://github.com/warner/python-versioneer)
+# versioneer-0.22 (https://github.com/python-versioneer/python-versioneer)
"""Git implementation of _version.py."""
@@ -15,6 +15,8 @@ import os
import re
import subprocess
import sys
+from typing import Callable, Dict
+import functools
def get_keywords():
@@ -23,9 +25,9 @@ def get_keywords():
# setup.py/versioneer.py will grep for the variable names, so they must
# each be defined on a line of their own. _version.py will just call
# get_keywords().
- git_refnames = " (HEAD -> master, tag: 1.7.3)"
- git_full = "3a8c263f86b2d8b2095684e5b9b787609b0e1b34"
- git_date = "2021-04-12 11:49:45 +0200"
+ git_refnames = " (HEAD -> master, tag: 1.8.0)"
+ git_full = "7e3d7494f96adee79ca121eeeeca139381f2a74c"
+ git_date = "2022-03-15 11:11:44 +0100"
keywords = {"refnames": git_refnames, "full": git_full, "date": git_date}
return keywords
@@ -43,7 +45,7 @@ def get_config():
cfg.style = ""
cfg.tag_prefix = ""
cfg.parentdir_prefix = "conda-package-handling-"
- cfg.versionfile_source = "conda_package_handling/_version.py"
+ cfg.versionfile_source = "src/conda_package_handling/_version.py"
cfg.verbose = False
return cfg
@@ -52,12 +54,12 @@ class NotThisMethod(Exception):
"""Exception raised if a method is not valid for the current scenario."""
-LONG_VERSION_PY = {}
-HANDLERS = {}
+LONG_VERSION_PY: Dict[str, str] = {}
+HANDLERS: Dict[str, Dict[str, Callable]] = {}
def register_vcs_handler(vcs, method): # decorator
- """Decorator to mark a method as the handler for a particular VCS."""
+ """Create decorator to mark a method as the handler of a VCS."""
def decorate(f):
"""Store f in HANDLERS[vcs][method]."""
if vcs not in HANDLERS:
@@ -71,17 +73,25 @@ def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False,
env=None):
"""Call the given command(s)."""
assert isinstance(commands, list)
- p = None
- for c in commands:
+ process = None
+
+ popen_kwargs = {}
+ if sys.platform == "win32":
+ # This hides the console window if pythonw.exe is used
+ startupinfo = subprocess.STARTUPINFO()
+ startupinfo.dwFlags |= subprocess.STARTF_USESHOWWINDOW
+ popen_kwargs["startupinfo"] = startupinfo
+
+ for command in commands:
try:
- dispcmd = str([c] + args)
+ dispcmd = str([command] + args)
# remember shell=False, so use git.cmd on windows, not just git
- p = subprocess.Popen([c] + args, cwd=cwd, env=env,
- stdout=subprocess.PIPE,
- stderr=(subprocess.PIPE if hide_stderr
- else None))
+ process = subprocess.Popen([command] + args, cwd=cwd, env=env,
+ stdout=subprocess.PIPE,
+ stderr=(subprocess.PIPE if hide_stderr
+ else None), **popen_kwargs)
break
- except EnvironmentError:
+ except OSError:
e = sys.exc_info()[1]
if e.errno == errno.ENOENT:
continue
@@ -93,15 +103,13 @@ def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False,
if verbose:
print("unable to find command, tried %s" % (commands,))
return None, None
- stdout = p.communicate()[0].strip()
- if sys.version_info[0] >= 3:
- stdout = stdout.decode()
- if p.returncode != 0:
+ stdout = process.communicate()[0].strip().decode()
+ if process.returncode != 0:
if verbose:
print("unable to run %s (error)" % dispcmd)
print("stdout was %s" % stdout)
- return None, p.returncode
- return stdout, p.returncode
+ return None, process.returncode
+ return stdout, process.returncode
def versions_from_parentdir(parentdir_prefix, root, verbose):
@@ -113,15 +121,14 @@ def versions_from_parentdir(parentdir_prefix, root, verbose):
"""
rootdirs = []
- for i in range(3):
+ for _ in range(3):
dirname = os.path.basename(root)
if dirname.startswith(parentdir_prefix):
return {"version": dirname[len(parentdir_prefix):],
"full-revisionid": None,
"dirty": False, "error": None, "date": None}
- else:
- rootdirs.append(root)
- root = os.path.dirname(root) # up a level
+ rootdirs.append(root)
+ root = os.path.dirname(root) # up a level
if verbose:
print("Tried directories %s but none started with prefix %s" %
@@ -138,22 +145,21 @@ def git_get_keywords(versionfile_abs):
# _version.py.
keywords = {}
try:
- f = open(versionfile_abs, "r")
- for line in f.readlines():
- if line.strip().startswith("git_refnames ="):
- mo = re.search(r'=\s*"(.*)"', line)
- if mo:
- keywords["refnames"] = mo.group(1)
- if line.strip().startswith("git_full ="):
- mo = re.search(r'=\s*"(.*)"', line)
- if mo:
- keywords["full"] = mo.group(1)
- if line.strip().startswith("git_date ="):
- mo = re.search(r'=\s*"(.*)"', line)
- if mo:
- keywords["date"] = mo.group(1)
- f.close()
- except EnvironmentError:
+ with open(versionfile_abs, "r") as fobj:
+ for line in fobj:
+ if line.strip().startswith("git_refnames ="):
+ mo = re.search(r'=\s*"(.*)"', line)
+ if mo:
+ keywords["refnames"] = mo.group(1)
+ if line.strip().startswith("git_full ="):
+ mo = re.search(r'=\s*"(.*)"', line)
+ if mo:
+ keywords["full"] = mo.group(1)
+ if line.strip().startswith("git_date ="):
+ mo = re.search(r'=\s*"(.*)"', line)
+ if mo:
+ keywords["date"] = mo.group(1)
+ except OSError:
pass
return keywords
@@ -161,10 +167,14 @@ def git_get_keywords(versionfile_abs):
@register_vcs_handler("git", "keywords")
def git_versions_from_keywords(keywords, tag_prefix, verbose):
"""Get version information from git keywords."""
- if not keywords:
- raise NotThisMethod("no keywords at all, weird")
+ if "refnames" not in keywords:
+ raise NotThisMethod("Short version file found")
date = keywords.get("date")
if date is not None:
+ # Use only the last line. Previous lines may contain GPG signature
+ # information.
+ date = date.splitlines()[-1]
+
# git-2.2.0 added "%cI", which expands to an ISO-8601 -compliant
# datestamp. However we prefer "%ci" (which expands to an "ISO-8601
# -like" string, which we must then edit to make compliant), because
@@ -177,11 +187,11 @@ def git_versions_from_keywords(keywords, tag_prefix, verbose):
if verbose:
print("keywords are unexpanded, not using")
raise NotThisMethod("unexpanded keywords, not a git-archive tarball")
- refs = set([r.strip() for r in refnames.strip("()").split(",")])
+ refs = {r.strip() for r in refnames.strip("()").split(",")}
# starting in git-1.8.3, tags are listed as "tag: foo-1.0" instead of
# just "foo-1.0". If we see a "tag: " prefix, prefer those.
TAG = "tag: "
- tags = set([r[len(TAG):] for r in refs if r.startswith(TAG)])
+ tags = {r[len(TAG):] for r in refs if r.startswith(TAG)}
if not tags:
# Either we're using git < 1.8.3, or there really are no tags. We use
# a heuristic: assume all version tags have a digit. The old git %d
@@ -190,7 +200,7 @@ def git_versions_from_keywords(keywords, tag_prefix, verbose):
# between branches and tags. By ignoring refnames without digits, we
# filter out many common branch names like "release" and
# "stabilization", as well as "HEAD" and "master".
- tags = set([r for r in refs if re.search(r'\d', r)])
+ tags = {r for r in refs if re.search(r'\d', r)}
if verbose:
print("discarding '%s', no digits" % ",".join(refs - tags))
if verbose:
@@ -199,6 +209,11 @@ def git_versions_from_keywords(keywords, tag_prefix, verbose):
# sorting will prefer e.g. "2.0" over "2.0rc1"
if ref.startswith(tag_prefix):
r = ref[len(tag_prefix):]
+ # Filter out refs that exactly match prefix or that don't start
+ # with a number once the prefix is stripped (mostly a concern
+ # when prefix is '')
+ if not re.match(r'\d', r):
+ continue
if verbose:
print("picking %s" % r)
return {"version": r,
@@ -214,7 +229,7 @@ def git_versions_from_keywords(keywords, tag_prefix, verbose):
@register_vcs_handler("git", "pieces_from_vcs")
-def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command):
+def git_pieces_from_vcs(tag_prefix, root, verbose, runner=run_command):
"""Get version from 'git describe' in the root of the source tree.
This only gets called if the git-archive 'subst' keywords were *not*
@@ -225,24 +240,32 @@ def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command):
if sys.platform == "win32":
GITS = ["git.cmd", "git.exe"]
- out, rc = run_command(GITS, ["rev-parse", "--git-dir"], cwd=root,
- hide_stderr=True)
+ # GIT_DIR can interfere with correct operation of Versioneer.
+ # It may be intended to be passed to the Versioneer-versioned project,
+ # but that should not change where we get our version from.
+ env = os.environ.copy()
+ env.pop("GIT_DIR", None)
+ runner = functools.partial(runner, env=env)
+
+ _, rc = runner(GITS, ["rev-parse", "--git-dir"], cwd=root,
+ hide_stderr=True)
if rc != 0:
if verbose:
print("Directory %s not under git control" % root)
raise NotThisMethod("'git rev-parse --git-dir' returned error")
+ MATCH_ARGS = ["--match", "%s*" % tag_prefix] if tag_prefix else []
+
# if there is a tag matching tag_prefix, this yields TAG-NUM-gHEX[-dirty]
# if there isn't one, this yields HEX[-dirty] (no NUM)
- describe_out, rc = run_command(GITS, ["describe", "--tags", "--dirty",
- "--always", "--long",
- "--match", "%s*" % tag_prefix],
- cwd=root)
+ describe_out, rc = runner(GITS, ["describe", "--tags", "--dirty",
+ "--always", "--long", *MATCH_ARGS],
+ cwd=root)
# --long was added in git-1.5.5
if describe_out is None:
raise NotThisMethod("'git describe' failed")
describe_out = describe_out.strip()
- full_out, rc = run_command(GITS, ["rev-parse", "HEAD"], cwd=root)
+ full_out, rc = runner(GITS, ["rev-parse", "HEAD"], cwd=root)
if full_out is None:
raise NotThisMethod("'git rev-parse' failed")
full_out = full_out.strip()
@@ -252,6 +275,39 @@ def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command):
pieces["short"] = full_out[:7] # maybe improved later
pieces["error"] = None
+ branch_name, rc = runner(GITS, ["rev-parse", "--abbrev-ref", "HEAD"],
+ cwd=root)
+ # --abbrev-ref was added in git-1.6.3
+ if rc != 0 or branch_name is None:
+ raise NotThisMethod("'git rev-parse --abbrev-ref' returned error")
+ branch_name = branch_name.strip()
+
+ if branch_name == "HEAD":
+ # If we aren't exactly on a branch, pick a branch which represents
+ # the current commit. If all else fails, we are on a branchless
+ # commit.
+ branches, rc = runner(GITS, ["branch", "--contains"], cwd=root)
+ # --contains was added in git-1.5.4
+ if rc != 0 or branches is None:
+ raise NotThisMethod("'git branch --contains' returned error")
+ branches = branches.split("\n")
+
+ # Remove the first line if we're running detached
+ if "(" in branches[0]:
+ branches.pop(0)
+
+ # Strip off the leading "* " from the list of branches.
+ branches = [branch[2:] for branch in branches]
+ if "master" in branches:
+ branch_name = "master"
+ elif not branches:
+ branch_name = None
+ else:
+ # Pick the first branch that is returned. Good or bad.
+ branch_name = branches[0]
+
+ pieces["branch"] = branch_name
+
# parse describe_out. It will be like TAG-NUM-gHEX[-dirty] or HEX[-dirty]
# TAG might have hyphens.
git_describe = describe_out
@@ -268,7 +324,7 @@ def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command):
# TAG-NUM-gHEX
mo = re.search(r'^(.+)-(\d+)-g([0-9a-f]+)$', git_describe)
if not mo:
- # unparseable. Maybe git-describe is misbehaving?
+ # unparsable. Maybe git-describe is misbehaving?
pieces["error"] = ("unable to parse git-describe output: '%s'"
% describe_out)
return pieces
@@ -293,13 +349,14 @@ def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command):
else:
# HEX: no tags
pieces["closest-tag"] = None
- count_out, rc = run_command(GITS, ["rev-list", "HEAD", "--count"],
- cwd=root)
+ count_out, rc = runner(GITS, ["rev-list", "HEAD", "--count"], cwd=root)
pieces["distance"] = int(count_out) # total number of commits
# commit date: see ISO-8601 comment in git_versions_from_keywords()
- date = run_command(GITS, ["show", "-s", "--format=%ci", "HEAD"],
- cwd=root)[0].strip()
+ date = runner(GITS, ["show", "-s", "--format=%ci", "HEAD"], cwd=root)[0].strip()
+ # Use only the last line. Previous lines may contain GPG signature
+ # information.
+ date = date.splitlines()[-1]
pieces["date"] = date.strip().replace(" ", "T", 1).replace(" ", "", 1)
return pieces
@@ -337,19 +394,67 @@ def render_pep440(pieces):
return rendered
-def render_pep440_pre(pieces):
- """TAG[.post.devDISTANCE] -- No -dirty.
+def render_pep440_branch(pieces):
+ """TAG[[.dev0]+DISTANCE.gHEX[.dirty]] .
+
+ The ".dev0" means not master branch. Note that .dev0 sorts backwards
+ (a feature branch will appear "older" than the master branch).
Exceptions:
- 1: no tags. 0.post.devDISTANCE
+ 1: no tags. 0[.dev0]+untagged.DISTANCE.gHEX[.dirty]
"""
if pieces["closest-tag"]:
rendered = pieces["closest-tag"]
+ if pieces["distance"] or pieces["dirty"]:
+ if pieces["branch"] != "master":
+ rendered += ".dev0"
+ rendered += plus_or_dot(pieces)
+ rendered += "%d.g%s" % (pieces["distance"], pieces["short"])
+ if pieces["dirty"]:
+ rendered += ".dirty"
+ else:
+ # exception #1
+ rendered = "0"
+ if pieces["branch"] != "master":
+ rendered += ".dev0"
+ rendered += "+untagged.%d.g%s" % (pieces["distance"],
+ pieces["short"])
+ if pieces["dirty"]:
+ rendered += ".dirty"
+ return rendered
+
+
+def pep440_split_post(ver):
+ """Split pep440 version string at the post-release segment.
+
+ Returns the release segments before the post-release and the
+ post-release version number (or -1 if no post-release segment is present).
+ """
+ vc = str.split(ver, ".post")
+ return vc[0], int(vc[1] or 0) if len(vc) == 2 else None
+
+
+def render_pep440_pre(pieces):
+ """TAG[.postN.devDISTANCE] -- No -dirty.
+
+ Exceptions:
+ 1: no tags. 0.post0.devDISTANCE
+ """
+ if pieces["closest-tag"]:
if pieces["distance"]:
- rendered += ".post.dev%d" % pieces["distance"]
+ # update the post release segment
+ tag_version, post_version = pep440_split_post(pieces["closest-tag"])
+ rendered = tag_version
+ if post_version is not None:
+ rendered += ".post%d.dev%d" % (post_version+1, pieces["distance"])
+ else:
+ rendered += ".post0.dev%d" % (pieces["distance"])
+ else:
+ # no commits, use the tag as the version
+ rendered = pieces["closest-tag"]
else:
# exception #1
- rendered = "0.post.dev%d" % pieces["distance"]
+ rendered = "0.post0.dev%d" % pieces["distance"]
return rendered
@@ -380,12 +485,41 @@ def render_pep440_post(pieces):
return rendered
+def render_pep440_post_branch(pieces):
+ """TAG[.postDISTANCE[.dev0]+gHEX[.dirty]] .
+
+ The ".dev0" means not master branch.
+
+ Exceptions:
+ 1: no tags. 0.postDISTANCE[.dev0]+gHEX[.dirty]
+ """
+ if pieces["closest-tag"]:
+ rendered = pieces["closest-tag"]
+ if pieces["distance"] or pieces["dirty"]:
+ rendered += ".post%d" % pieces["distance"]
+ if pieces["branch"] != "master":
+ rendered += ".dev0"
+ rendered += plus_or_dot(pieces)
+ rendered += "g%s" % pieces["short"]
+ if pieces["dirty"]:
+ rendered += ".dirty"
+ else:
+ # exception #1
+ rendered = "0.post%d" % pieces["distance"]
+ if pieces["branch"] != "master":
+ rendered += ".dev0"
+ rendered += "+g%s" % pieces["short"]
+ if pieces["dirty"]:
+ rendered += ".dirty"
+ return rendered
+
+
def render_pep440_old(pieces):
"""TAG[.postDISTANCE[.dev0]] .
The ".dev0" means dirty.
- Eexceptions:
+ Exceptions:
1: no tags. 0.postDISTANCE[.dev0]
"""
if pieces["closest-tag"]:
@@ -456,10 +590,14 @@ def render(pieces, style):
if style == "pep440":
rendered = render_pep440(pieces)
+ elif style == "pep440-branch":
+ rendered = render_pep440_branch(pieces)
elif style == "pep440-pre":
rendered = render_pep440_pre(pieces)
elif style == "pep440-post":
rendered = render_pep440_post(pieces)
+ elif style == "pep440-post-branch":
+ rendered = render_pep440_post_branch(pieces)
elif style == "pep440-old":
rendered = render_pep440_old(pieces)
elif style == "git-describe":
@@ -495,7 +633,7 @@ def get_versions():
# versionfile_source is the relative path from the top of the source
# tree (where the .git directory might live) to this file. Invert
# this to find the root from __file__.
- for i in cfg.versionfile_source.split('/'):
+ for _ in cfg.versionfile_source.split('/'):
root = os.path.dirname(root)
except NameError:
return {"version": "0+unknown", "full-revisionid": None,
=====================================
src/conda_package_handling/api.py
=====================================
@@ -3,7 +3,6 @@ from glob import glob as _glob
import functools as _functools
import tempfile as _tempfile
-from six import string_types as _string_types
import tqdm as _tqdm
# expose these two exceptions as part of the API. Everything else should feed into these.
@@ -68,7 +67,7 @@ def create(prefix, file_list, out_fn, out_folder=None, **kw):
if file_list is None:
file_list = _collect_paths(prefix)
- elif isinstance(file_list, _string_types):
+ elif isinstance(file_list, str):
try:
with open(file_list) as f:
data = f.readlines()
@@ -104,7 +103,7 @@ def _convert(fn, out_ext, out_folder, **kw):
out_fn = _os.path.join(out_folder, basename + out_ext)
errors = ""
if not _os.path.lexists(out_fn) or ('force' in kw and kw['force']):
- with _TemporaryDirectory(prefix=out_folder) as tmp:
+ with _TemporaryDirectory(dir=out_folder) as tmp:
try:
extract(fn, dest_dir=tmp)
file_list = _collect_paths(tmp)
=====================================
src/conda_package_handling/cli.py
=====================================
@@ -65,7 +65,7 @@ def parse_args(parse_this=None):
convert_parser.add_argument('out_ext', help="extension of file to convert to. "
"Examples: .tar.bz2, .conda")
convert_parser.add_argument("--out-folder", help="Folder to dump final archive to")
- convert_parser.add_argument("--force", action='store_true', help="Force overwrite existing package")
+ convert_parser.add_argument("--force", action='store_true', help="Force overwrite existing package")
convert_parser.add_argument("--processes", type=int, help="Max number of processes to use. If "
"not set, defaults to your CPU count.")
return parser.parse_args(parse_this)
=====================================
src/conda_package_handling/conda_fmt.py
=====================================
@@ -4,11 +4,7 @@ https://anaconda.atlassian.net/wiki/spaces/AD/pages/90210540/Conda+package+forma
import json
import os
from tempfile import NamedTemporaryFile
-try:
- from zipfile import ZipFile, BadZipFile, ZIP_STORED
-except ImportError:
- # py27 compat
- from zipfile import ZipFile, ZIP_STORED, BadZipfile as BadZipFile
+from zipfile import ZipFile, BadZipFile, ZIP_STORED
from . import utils
from .exceptions import InvalidArchiveError
@@ -30,7 +26,7 @@ def _lookup_component_filename(zf, file_id, component_name):
def _extract_component(fn, file_id, component_name, dest_dir=os.getcwd()):
try:
with ZipFile(fn, compression=ZIP_STORED) as zf:
- with utils.TemporaryDirectory(prefix=dest_dir) as tmpdir:
+ with utils.TemporaryDirectory(dir=dest_dir) as tmpdir:
with utils.tmp_chdir(tmpdir):
component_filename = _lookup_component_filename(zf, file_id, component_name)
if not component_filename:
@@ -73,7 +69,7 @@ class CondaFormat_v2(AbstractBaseFormat):
info_files = set(file_list) - set(pkg_files)
ext, comp_filter, filter_opts = kw.get('compression_tuple') or DEFAULT_COMPRESSION_TUPLE
- with utils.TemporaryDirectory(prefix=out_folder) as tmpdir:
+ with utils.TemporaryDirectory(dir=out_folder) as tmpdir:
info_tarball = create_compressed_tarball(prefix, info_files, tmpdir, 'info-' + out_fn,
ext, comp_filter, filter_opts)
pkg_tarball = create_compressed_tarball(prefix, pkg_files, tmpdir, 'pkg-' + out_fn,
@@ -84,6 +80,7 @@ class CondaFormat_v2(AbstractBaseFormat):
with ZipFile(conda_pkg_fn, 'w', compression=ZIP_STORED) as zf:
with NamedTemporaryFile(mode='w', delete=False) as tf:
json.dump(pkg_metadata, tf)
+ tf.flush()
zf.write(tf.name, 'metadata.json')
for pkg in (info_tarball, pkg_tarball):
zf.write(pkg, os.path.basename(pkg))
@@ -94,9 +91,5 @@ class CondaFormat_v2(AbstractBaseFormat):
def get_pkg_details(in_file):
stat_result = os.stat(in_file)
size = stat_result.st_size
- # open the file twice because we need to start from the beginning each time
- with open(in_file, 'rb') as f:
- md5 = utils.md5_checksum(f)
- with open(in_file, 'rb') as f:
- sha256 = utils.sha256_checksum(f)
+ md5, sha256 = utils.checksums(in_file, ("md5", "sha256"))
return {"size": size, "md5": md5, "sha256": sha256}
=====================================
src/conda_package_handling/interface.py
=====================================
@@ -1,11 +1,8 @@
import abc
import os
-import six
-
- at six.add_metaclass(abc.ABCMeta)
-class AbstractBaseFormat():
+class AbstractBaseFormat(metaclass=abc.ABCMeta):
@staticmethod
@abc.abstractmethod
=====================================
src/conda_package_handling/tarball.py
=====================================
@@ -120,7 +120,7 @@ def _tar_xf_no_libarchive(tarball_full_path, destination_directory=None):
)
else:
raise InvalidArchiveError(tarball_full_path,
- "failed with error: {}" % str(e))
+ "failed with error: {}".format(str(e)))
if sys.platform.startswith('linux') and os.getuid() == 0:
# When extracting as root, tarfile will by restore ownership
@@ -166,9 +166,5 @@ class CondaTarBZ2(AbstractBaseFormat):
def get_pkg_details(in_file):
stat_result = os.stat(in_file)
size = stat_result.st_size
- # open the file twice because we need to start from the beginning each time
- with open(in_file, 'rb') as f:
- md5 = utils.md5_checksum(f)
- with open(in_file, 'rb') as f:
- sha256 = utils.sha256_checksum(f)
+ md5, sha256 = utils.checksums(in_file, ('md5', 'sha256'))
return {"size": size, "md5": md5, "sha256": sha256}
=====================================
src/conda_package_handling/utils.py
=====================================
@@ -1,5 +1,5 @@
import contextlib
-from concurrent.futures import ProcessPoolExecutor, Executor
+from concurrent.futures import ProcessPoolExecutor, ThreadPoolExecutor, Executor
from errno import ENOENT, EACCES, EPERM, EROFS
import fnmatch
import hashlib
@@ -16,8 +16,6 @@ import sys
from tempfile import mkdtemp, NamedTemporaryFile
import warnings as _warnings
-from six import string_types
-
on_win = sys.platform == 'win32'
log = logging.getLogger(__file__)
CONDA_TEMP_EXTENSION = '.c~'
@@ -346,7 +344,7 @@ def tmp_chdir(dest):
def ensure_list(arg):
- if (isinstance(arg, string_types) or not hasattr(arg, '__iter__')):
+ if (isinstance(arg, str) or not hasattr(arg, '__iter__')):
if arg is not None:
arg = [arg]
else:
@@ -363,7 +361,11 @@ def filter_files(files_list, prefix, filter_patterns=(r'(.*[\\\\/])?\.git[\\\\/]
for pattern in filter_patterns:
r = re.compile(pattern)
files_list = set(files_list) - set(filter(r.match, files_list))
- return [f for f in files_list if not os.path.isdir(os.path.join(prefix, f))]
+ return [f for f in files_list if
+ # `islink` prevents symlinks to directories from being removed
+ os.path.islink(os.path.join(prefix, f)) or
+ not os.path.isdir(os.path.join(prefix, f))
+ ]
def filter_info_files(files_list, prefix):
@@ -409,3 +411,21 @@ def sha256_checksum(fd):
def md5_checksum(fd):
return _checksum(fd, 'md5')
+
+
+def checksum(fn, algorithm, buffersize=1<<20):
+ """
+ Calculate a checksum for a filename (not an open file).
+ """
+ with open(fn, 'rb') as fd:
+ return _checksum(fd, algorithm, buffersize)
+
+
+def checksums(fn, algorithms, buffersize=1<<20):
+ """
+ Calculate multiple checksums for a filename in parallel.
+ """
+ with ThreadPoolExecutor(max_workers=len(algorithms)) as e:
+ # take care not to share hash_impl between threads
+ results = [e.submit(checksum, fn, algorithm, buffersize) for algorithm in algorithms]
+ return [result.result() for result in results]
=====================================
tests/data/cph_test_data-0.0.1-0.tar.bz2
=====================================
Binary files /dev/null and b/tests/data/cph_test_data-0.0.1-0.tar.bz2 differ
=====================================
tests/recipes/cph_test_data/build.sh
=====================================
@@ -0,0 +1,29 @@
+cd "${PREFIX}"
+umask 022
+
+mkdir -p bin lib libexec share/terminfo
+
+echo -e '#/bin/sh\necho hello world' > bin/hello-1.0
+chmod 755 bin/hello-1.0
+
+echo -n "" >share/terminfo/xterm.dat
+chmod 644 share/terminfo/xterm.dat
+
+
+# Tests for symlink...
+pushd bin >/dev/null
+ln -sn hello-1.0 hello # ...to file in same dir
+popd >/dev/null
+
+pushd libexec >/dev/null
+ln -sn ../bin/hello greetings # ...to file in another dir
+popd >/dev/null
+
+pushd share >/dev/null
+ln -sn terminfo termcap # ...to subdir in same dir
+popd >/dev/null
+
+pushd lib >/dev/null
+ln -sn ../share/terminfo terminfo # ...to subdir of another dir
+ln -sn libdangle.lib.1 libdangle.lib # ...dangling link
+popd >/dev/null
=====================================
tests/recipes/cph_test_data/meta.yaml
=====================================
@@ -0,0 +1,6 @@
+package:
+ name: cph_test_data
+ version: 0.0.1
+
+build:
+ noarch: generic
=====================================
tests/test_api.py
=====================================
@@ -1,8 +1,12 @@
from datetime import datetime
+import json
import os
import shutil
import sys
import tarfile
+import zipfile
+
+from tempfile import TemporaryDirectory
import pytest
@@ -12,6 +16,7 @@ import conda_package_handling.tarball
this_dir = os.path.dirname(__file__)
data_dir = os.path.join(this_dir, "data")
test_package_name = "mock-2.0.0-py37_1000"
+test_package_name_2 = "cph_test_data-0.0.1-0"
def test_api_extract_tarball_implicit_path(testing_workdir):
@@ -100,11 +105,72 @@ def test_api_extract_info_conda_v2(testing_workdir):
assert not os.path.isdir(os.path.join(testing_workdir, 'manual_path', 'lib'))
+def check_conda_v2_metadata(condafile):
+ with zipfile.ZipFile(condafile) as zf:
+ d = json.loads(zf.read('metadata.json'))
+ assert d['conda_pkg_format_version'] == 2
+
def test_api_transmute_tarball_to_conda_v2(testing_workdir):
tarfile = os.path.join(data_dir, test_package_name + '.tar.bz2')
errors = api.transmute(tarfile, '.conda', testing_workdir)
assert not errors
- assert os.path.isfile(os.path.join(testing_workdir, test_package_name + '.conda'))
+ condafile = os.path.join(testing_workdir, test_package_name + '.conda')
+ assert os.path.isfile(condafile)
+ check_conda_v2_metadata(condafile)
+
+ at pytest.mark.skipif(sys.platform=="win32", reason="windows and symlinks are not great")
+def test_api_transmute_to_conda_v2_contents(testing_workdir):
+ def _walk(path):
+ for entry in os.scandir(path):
+ if entry.is_dir(follow_symlinks=False):
+ yield from _walk(entry.path)
+ continue
+ yield entry
+
+ tar_path = os.path.join(data_dir, test_package_name_2 + '.tar.bz2')
+ conda_path = os.path.join(testing_workdir, test_package_name_2 + '.conda')
+ api.transmute(tar_path, '.conda', testing_workdir)
+
+ # Verify original contents were all put in the right place
+ pkg_tarbz2 = tarfile.open(tar_path, mode="r:bz2")
+ info_items = [item for item in pkg_tarbz2.getmembers()
+ if item.path.startswith("info/")]
+ pkg_items = [item for item in pkg_tarbz2.getmembers()
+ if not item.path.startswith("info/")]
+
+ errors = []
+ for component, expected in (("info", info_items), ("pkg", pkg_items)):
+ with TemporaryDirectory() as root:
+ api.extract(conda_path, root, components=component)
+
+ contents = {
+ os.path.relpath(entry.path, root): {
+ "is_symlink": entry.is_symlink(),
+ "target": os.readlink(entry.path) if entry.is_symlink() else None
+ }
+ for entry in _walk(root)
+ }
+
+ for item in expected:
+ if item.path not in contents:
+ errors.append(f"'{item.path}' not found in {component} contents")
+ continue
+
+ ct = contents.pop(item.path)
+ if item.issym():
+ if not ct["is_symlink"] or ct["target"] != item.linkname:
+ errors.append(f"{item.name} -> {item.linkname} incorrect in {component} contents")
+ elif not item.isfile():
+ # Raise an exception rather than appending to `errors`
+ # because getting to this point is an indication that our
+ # test data (i.e., .tar.bz2 package) is corrupt, rather
+ # than the `.transmute` function having problems (which is
+ # what `errors` is meant to track). For context, conda
+ # packages should only contain regular files and symlinks.
+ raise ValueError(f"unexpected item '{item.path}' in test .tar.bz2")
+ if contents:
+ errors.append(f"extra files [{', '.join(contents)}] in {component} contents")
+ assert not errors
def test_api_transmute_conda_v2_to_tarball(testing_workdir):
=====================================
versioneer.py
=====================================
@@ -1,5 +1,5 @@
-# Version: 0.18
+# Version: 0.22
"""The Versioneer - like a rocketeer, but for versions.
@@ -7,18 +7,14 @@ The Versioneer
==============
* like a rocketeer, but for versions!
-* https://github.com/warner/python-versioneer
+* https://github.com/python-versioneer/python-versioneer
* Brian Warner
* License: Public Domain
-* Compatible With: python2.6, 2.7, 3.2, 3.3, 3.4, 3.5, 3.6, and pypy
-* [![Latest Version]
-(https://pypip.in/version/versioneer/badge.svg?style=flat)
-](https://pypi.python.org/pypi/versioneer/)
-* [![Build Status]
-(https://travis-ci.org/warner/python-versioneer.png?branch=master)
-](https://travis-ci.org/warner/python-versioneer)
-
-This is a tool for managing a recorded version number in distutils-based
+* Compatible with: Python 3.6, 3.7, 3.8, 3.9, 3.10 and pypy3
+* [![Latest Version][pypi-image]][pypi-url]
+* [![Build Status][travis-image]][travis-url]
+
+This is a tool for managing a recorded version number in distutils/setuptools-based
python projects. The goal is to remove the tedious and error-prone "update
the embedded version string" step from your release process. Making a new
release should be as easy as recording a new tag in your version-control
@@ -27,9 +23,10 @@ system, and maybe making new tarballs.
## Quick Install
-* `pip install versioneer` to somewhere to your $PATH
-* add a `[versioneer]` section to your setup.cfg (see below)
+* `pip install versioneer` to somewhere in your $PATH
+* add a `[versioneer]` section to your setup.cfg (see [Install](INSTALL.md))
* run `versioneer install` in your source tree, commit the results
+* Verify version information with `python setup.py version`
## Version Identifiers
@@ -61,7 +58,7 @@ version 1.3). Many VCS systems can report a description that captures this,
for example `git describe --tags --dirty --always` reports things like
"0.7-1-g574ab98-dirty" to indicate that the checkout is one revision past the
0.7 tag, has a unique revision id of "574ab98", and is "dirty" (it has
-uncommitted changes.
+uncommitted changes).
The version identifier is used for multiple purposes:
@@ -166,7 +163,7 @@ which may help identify what went wrong).
Some situations are known to cause problems for Versioneer. This details the
most significant ones. More can be found on Github
-[issues page](https://github.com/warner/python-versioneer/issues).
+[issues page](https://github.com/python-versioneer/python-versioneer/issues).
### Subprojects
@@ -180,7 +177,7 @@ two common reasons why `setup.py` might not be in the root:
`setup.cfg`, and `tox.ini`. Projects like these produce multiple PyPI
distributions (and upload multiple independently-installable tarballs)..
* Source trees whose main purpose is to contain a C library, but which also
- provide bindings to Python (and perhaps other langauges) in subdirectories.
+ provide bindings to Python (and perhaps other languages) in subdirectories.
Versioneer will look for `.git` in parent directories, and most operations
should get the right version string. However `pip` and `setuptools` have bugs
@@ -194,9 +191,9 @@ work too.
Pip-8.1.1 is known to have this problem, but hopefully it will get fixed in
some later version.
-[Bug #38](https://github.com/warner/python-versioneer/issues/38) is tracking
+[Bug #38](https://github.com/python-versioneer/python-versioneer/issues/38) is tracking
this issue. The discussion in
-[PR #61](https://github.com/warner/python-versioneer/pull/61) describes the
+[PR #61](https://github.com/python-versioneer/python-versioneer/pull/61) describes the
issue from the Versioneer side in more detail.
[pip PR#3176](https://github.com/pypa/pip/pull/3176) and
[pip PR#3615](https://github.com/pypa/pip/pull/3615) contain work to improve
@@ -224,22 +221,10 @@ regenerated while a different version is checked out. Many setup.py commands
cause egg_info to be rebuilt (including `sdist`, `wheel`, and installing into
a different virtualenv), so this can be surprising.
-[Bug #83](https://github.com/warner/python-versioneer/issues/83) describes
+[Bug #83](https://github.com/python-versioneer/python-versioneer/issues/83) describes
this one, but upgrading to a newer version of setuptools should probably
resolve it.
-### Unicode version strings
-
-While Versioneer works (and is continually tested) with both Python 2 and
-Python 3, it is not entirely consistent with bytes-vs-unicode distinctions.
-Newer releases probably generate unicode version strings on py2. It's not
-clear that this is wrong, but it may be surprising for applications when then
-write these strings to a network connection or include them in bytes-oriented
-APIs like cryptographic checksums.
-
-[Bug #71](https://github.com/warner/python-versioneer/issues/71) investigates
-this question.
-
## Updating Versioneer
@@ -265,6 +250,14 @@ installation by editing setup.py . Alternatively, it might go the other
direction and include code from all supported VCS systems, reducing the
number of intermediate scripts.
+## Similar projects
+
+* [setuptools_scm](https://github.com/pypa/setuptools_scm/) - a non-vendored build-time
+ dependency
+* [minver](https://github.com/jbweston/miniver) - a lightweight reimplementation of
+ versioneer
+* [versioningit](https://github.com/jwodder/versioningit) - a PEP 518-based setuptools
+ plugin
## License
@@ -274,19 +267,28 @@ Specifically, both are released under the Creative Commons "Public Domain
Dedication" license (CC0-1.0), as described in
https://creativecommons.org/publicdomain/zero/1.0/ .
+[pypi-image]: https://img.shields.io/pypi/v/versioneer.svg
+[pypi-url]: https://pypi.python.org/pypi/versioneer/
+[travis-image]:
+https://img.shields.io/travis/com/python-versioneer/python-versioneer.svg
+[travis-url]: https://travis-ci.com/github/python-versioneer/python-versioneer
+
"""
+# pylint:disable=invalid-name,import-outside-toplevel,missing-function-docstring
+# pylint:disable=missing-class-docstring,too-many-branches,too-many-statements
+# pylint:disable=raise-missing-from,too-many-lines,too-many-locals,import-error
+# pylint:disable=too-few-public-methods,redefined-outer-name,consider-using-with
+# pylint:disable=attribute-defined-outside-init,too-many-arguments
-from __future__ import print_function
-try:
- import configparser
-except ImportError:
- import ConfigParser as configparser
+import configparser
import errno
import json
import os
import re
import subprocess
import sys
+from typing import Callable, Dict
+import functools
class VersioneerConfig:
@@ -321,12 +323,12 @@ def get_root():
# module-import table will cache the first one. So we can't use
# os.path.dirname(__file__), as that will find whichever
# versioneer.py was first imported, even in later projects.
- me = os.path.realpath(os.path.abspath(__file__))
- me_dir = os.path.normcase(os.path.splitext(me)[0])
+ my_path = os.path.realpath(os.path.abspath(__file__))
+ me_dir = os.path.normcase(os.path.splitext(my_path)[0])
vsr_dir = os.path.normcase(os.path.splitext(versioneer_py)[0])
if me_dir != vsr_dir:
print("Warning: build in %s is using versioneer.py from %s"
- % (os.path.dirname(me), versioneer_py))
+ % (os.path.dirname(my_path), versioneer_py))
except NameError:
pass
return root
@@ -334,30 +336,29 @@ def get_root():
def get_config_from_root(root):
"""Read the project setup.cfg file to determine Versioneer config."""
- # This might raise EnvironmentError (if setup.cfg is missing), or
+ # This might raise OSError (if setup.cfg is missing), or
# configparser.NoSectionError (if it lacks a [versioneer] section), or
# configparser.NoOptionError (if it lacks "VCS="). See the docstring at
# the top of versioneer.py for instructions on writing your setup.cfg .
setup_cfg = os.path.join(root, "setup.cfg")
- parser = configparser.SafeConfigParser()
- with open(setup_cfg, "r") as f:
- parser.readfp(f)
+ parser = configparser.ConfigParser()
+ with open(setup_cfg, "r") as cfg_file:
+ parser.read_file(cfg_file)
VCS = parser.get("versioneer", "VCS") # mandatory
- def get(parser, name):
- if parser.has_option("versioneer", name):
- return parser.get("versioneer", name)
- return None
+ # Dict-like interface for non-mandatory entries
+ section = parser["versioneer"]
+
cfg = VersioneerConfig()
cfg.VCS = VCS
- cfg.style = get(parser, "style") or ""
- cfg.versionfile_source = get(parser, "versionfile_source")
- cfg.versionfile_build = get(parser, "versionfile_build")
- cfg.tag_prefix = get(parser, "tag_prefix")
+ cfg.style = section.get("style", "")
+ cfg.versionfile_source = section.get("versionfile_source")
+ cfg.versionfile_build = section.get("versionfile_build")
+ cfg.tag_prefix = section.get("tag_prefix")
if cfg.tag_prefix in ("''", '""'):
cfg.tag_prefix = ""
- cfg.parentdir_prefix = get(parser, "parentdir_prefix")
- cfg.verbose = get(parser, "verbose")
+ cfg.parentdir_prefix = section.get("parentdir_prefix")
+ cfg.verbose = section.get("verbose")
return cfg
@@ -366,17 +367,15 @@ class NotThisMethod(Exception):
# these dictionaries contain VCS-specific tools
-LONG_VERSION_PY = {}
-HANDLERS = {}
+LONG_VERSION_PY: Dict[str, str] = {}
+HANDLERS: Dict[str, Dict[str, Callable]] = {}
def register_vcs_handler(vcs, method): # decorator
- """Decorator to mark a method as the handler for a particular VCS."""
+ """Create decorator to mark a method as the handler of a VCS."""
def decorate(f):
"""Store f in HANDLERS[vcs][method]."""
- if vcs not in HANDLERS:
- HANDLERS[vcs] = {}
- HANDLERS[vcs][method] = f
+ HANDLERS.setdefault(vcs, {})[method] = f
return f
return decorate
@@ -385,17 +384,25 @@ def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False,
env=None):
"""Call the given command(s)."""
assert isinstance(commands, list)
- p = None
- for c in commands:
+ process = None
+
+ popen_kwargs = {}
+ if sys.platform == "win32":
+ # This hides the console window if pythonw.exe is used
+ startupinfo = subprocess.STARTUPINFO()
+ startupinfo.dwFlags |= subprocess.STARTF_USESHOWWINDOW
+ popen_kwargs["startupinfo"] = startupinfo
+
+ for command in commands:
try:
- dispcmd = str([c] + args)
+ dispcmd = str([command] + args)
# remember shell=False, so use git.cmd on windows, not just git
- p = subprocess.Popen([c] + args, cwd=cwd, env=env,
- stdout=subprocess.PIPE,
- stderr=(subprocess.PIPE if hide_stderr
- else None))
+ process = subprocess.Popen([command] + args, cwd=cwd, env=env,
+ stdout=subprocess.PIPE,
+ stderr=(subprocess.PIPE if hide_stderr
+ else None), **popen_kwargs)
break
- except EnvironmentError:
+ except OSError:
e = sys.exc_info()[1]
if e.errno == errno.ENOENT:
continue
@@ -407,18 +414,16 @@ def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False,
if verbose:
print("unable to find command, tried %s" % (commands,))
return None, None
- stdout = p.communicate()[0].strip()
- if sys.version_info[0] >= 3:
- stdout = stdout.decode()
- if p.returncode != 0:
+ stdout = process.communicate()[0].strip().decode()
+ if process.returncode != 0:
if verbose:
print("unable to run %s (error)" % dispcmd)
print("stdout was %s" % stdout)
- return None, p.returncode
- return stdout, p.returncode
+ return None, process.returncode
+ return stdout, process.returncode
-LONG_VERSION_PY['git'] = '''
+LONG_VERSION_PY['git'] = r'''
# This file helps to compute a version number in source trees obtained from
# git-archive tarball (such as those provided by githubs download-from-tag
# feature). Distribution tarballs (built by setup.py sdist) and build
@@ -426,7 +431,7 @@ LONG_VERSION_PY['git'] = '''
# that just contains the computed version number.
# This file is released into the public domain. Generated by
-# versioneer-0.18 (https://github.com/warner/python-versioneer)
+# versioneer-0.22 (https://github.com/python-versioneer/python-versioneer)
"""Git implementation of _version.py."""
@@ -435,6 +440,8 @@ import os
import re
import subprocess
import sys
+from typing import Callable, Dict
+import functools
def get_keywords():
@@ -472,12 +479,12 @@ class NotThisMethod(Exception):
"""Exception raised if a method is not valid for the current scenario."""
-LONG_VERSION_PY = {}
-HANDLERS = {}
+LONG_VERSION_PY: Dict[str, str] = {}
+HANDLERS: Dict[str, Dict[str, Callable]] = {}
def register_vcs_handler(vcs, method): # decorator
- """Decorator to mark a method as the handler for a particular VCS."""
+ """Create decorator to mark a method as the handler of a VCS."""
def decorate(f):
"""Store f in HANDLERS[vcs][method]."""
if vcs not in HANDLERS:
@@ -491,17 +498,25 @@ def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False,
env=None):
"""Call the given command(s)."""
assert isinstance(commands, list)
- p = None
- for c in commands:
+ process = None
+
+ popen_kwargs = {}
+ if sys.platform == "win32":
+ # This hides the console window if pythonw.exe is used
+ startupinfo = subprocess.STARTUPINFO()
+ startupinfo.dwFlags |= subprocess.STARTF_USESHOWWINDOW
+ popen_kwargs["startupinfo"] = startupinfo
+
+ for command in commands:
try:
- dispcmd = str([c] + args)
+ dispcmd = str([command] + args)
# remember shell=False, so use git.cmd on windows, not just git
- p = subprocess.Popen([c] + args, cwd=cwd, env=env,
- stdout=subprocess.PIPE,
- stderr=(subprocess.PIPE if hide_stderr
- else None))
+ process = subprocess.Popen([command] + args, cwd=cwd, env=env,
+ stdout=subprocess.PIPE,
+ stderr=(subprocess.PIPE if hide_stderr
+ else None), **popen_kwargs)
break
- except EnvironmentError:
+ except OSError:
e = sys.exc_info()[1]
if e.errno == errno.ENOENT:
continue
@@ -513,15 +528,13 @@ def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False,
if verbose:
print("unable to find command, tried %%s" %% (commands,))
return None, None
- stdout = p.communicate()[0].strip()
- if sys.version_info[0] >= 3:
- stdout = stdout.decode()
- if p.returncode != 0:
+ stdout = process.communicate()[0].strip().decode()
+ if process.returncode != 0:
if verbose:
print("unable to run %%s (error)" %% dispcmd)
print("stdout was %%s" %% stdout)
- return None, p.returncode
- return stdout, p.returncode
+ return None, process.returncode
+ return stdout, process.returncode
def versions_from_parentdir(parentdir_prefix, root, verbose):
@@ -533,15 +546,14 @@ def versions_from_parentdir(parentdir_prefix, root, verbose):
"""
rootdirs = []
- for i in range(3):
+ for _ in range(3):
dirname = os.path.basename(root)
if dirname.startswith(parentdir_prefix):
return {"version": dirname[len(parentdir_prefix):],
"full-revisionid": None,
"dirty": False, "error": None, "date": None}
- else:
- rootdirs.append(root)
- root = os.path.dirname(root) # up a level
+ rootdirs.append(root)
+ root = os.path.dirname(root) # up a level
if verbose:
print("Tried directories %%s but none started with prefix %%s" %%
@@ -558,22 +570,21 @@ def git_get_keywords(versionfile_abs):
# _version.py.
keywords = {}
try:
- f = open(versionfile_abs, "r")
- for line in f.readlines():
- if line.strip().startswith("git_refnames ="):
- mo = re.search(r'=\s*"(.*)"', line)
- if mo:
- keywords["refnames"] = mo.group(1)
- if line.strip().startswith("git_full ="):
- mo = re.search(r'=\s*"(.*)"', line)
- if mo:
- keywords["full"] = mo.group(1)
- if line.strip().startswith("git_date ="):
- mo = re.search(r'=\s*"(.*)"', line)
- if mo:
- keywords["date"] = mo.group(1)
- f.close()
- except EnvironmentError:
+ with open(versionfile_abs, "r") as fobj:
+ for line in fobj:
+ if line.strip().startswith("git_refnames ="):
+ mo = re.search(r'=\s*"(.*)"', line)
+ if mo:
+ keywords["refnames"] = mo.group(1)
+ if line.strip().startswith("git_full ="):
+ mo = re.search(r'=\s*"(.*)"', line)
+ if mo:
+ keywords["full"] = mo.group(1)
+ if line.strip().startswith("git_date ="):
+ mo = re.search(r'=\s*"(.*)"', line)
+ if mo:
+ keywords["date"] = mo.group(1)
+ except OSError:
pass
return keywords
@@ -581,10 +592,14 @@ def git_get_keywords(versionfile_abs):
@register_vcs_handler("git", "keywords")
def git_versions_from_keywords(keywords, tag_prefix, verbose):
"""Get version information from git keywords."""
- if not keywords:
- raise NotThisMethod("no keywords at all, weird")
+ if "refnames" not in keywords:
+ raise NotThisMethod("Short version file found")
date = keywords.get("date")
if date is not None:
+ # Use only the last line. Previous lines may contain GPG signature
+ # information.
+ date = date.splitlines()[-1]
+
# git-2.2.0 added "%%cI", which expands to an ISO-8601 -compliant
# datestamp. However we prefer "%%ci" (which expands to an "ISO-8601
# -like" string, which we must then edit to make compliant), because
@@ -597,11 +612,11 @@ def git_versions_from_keywords(keywords, tag_prefix, verbose):
if verbose:
print("keywords are unexpanded, not using")
raise NotThisMethod("unexpanded keywords, not a git-archive tarball")
- refs = set([r.strip() for r in refnames.strip("()").split(",")])
+ refs = {r.strip() for r in refnames.strip("()").split(",")}
# starting in git-1.8.3, tags are listed as "tag: foo-1.0" instead of
# just "foo-1.0". If we see a "tag: " prefix, prefer those.
TAG = "tag: "
- tags = set([r[len(TAG):] for r in refs if r.startswith(TAG)])
+ tags = {r[len(TAG):] for r in refs if r.startswith(TAG)}
if not tags:
# Either we're using git < 1.8.3, or there really are no tags. We use
# a heuristic: assume all version tags have a digit. The old git %%d
@@ -610,7 +625,7 @@ def git_versions_from_keywords(keywords, tag_prefix, verbose):
# between branches and tags. By ignoring refnames without digits, we
# filter out many common branch names like "release" and
# "stabilization", as well as "HEAD" and "master".
- tags = set([r for r in refs if re.search(r'\d', r)])
+ tags = {r for r in refs if re.search(r'\d', r)}
if verbose:
print("discarding '%%s', no digits" %% ",".join(refs - tags))
if verbose:
@@ -619,6 +634,11 @@ def git_versions_from_keywords(keywords, tag_prefix, verbose):
# sorting will prefer e.g. "2.0" over "2.0rc1"
if ref.startswith(tag_prefix):
r = ref[len(tag_prefix):]
+ # Filter out refs that exactly match prefix or that don't start
+ # with a number once the prefix is stripped (mostly a concern
+ # when prefix is '')
+ if not re.match(r'\d', r):
+ continue
if verbose:
print("picking %%s" %% r)
return {"version": r,
@@ -634,7 +654,7 @@ def git_versions_from_keywords(keywords, tag_prefix, verbose):
@register_vcs_handler("git", "pieces_from_vcs")
-def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command):
+def git_pieces_from_vcs(tag_prefix, root, verbose, runner=run_command):
"""Get version from 'git describe' in the root of the source tree.
This only gets called if the git-archive 'subst' keywords were *not*
@@ -645,24 +665,32 @@ def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command):
if sys.platform == "win32":
GITS = ["git.cmd", "git.exe"]
- out, rc = run_command(GITS, ["rev-parse", "--git-dir"], cwd=root,
- hide_stderr=True)
+ # GIT_DIR can interfere with correct operation of Versioneer.
+ # It may be intended to be passed to the Versioneer-versioned project,
+ # but that should not change where we get our version from.
+ env = os.environ.copy()
+ env.pop("GIT_DIR", None)
+ runner = functools.partial(runner, env=env)
+
+ _, rc = runner(GITS, ["rev-parse", "--git-dir"], cwd=root,
+ hide_stderr=True)
if rc != 0:
if verbose:
print("Directory %%s not under git control" %% root)
raise NotThisMethod("'git rev-parse --git-dir' returned error")
+ MATCH_ARGS = ["--match", "%%s*" %% tag_prefix] if tag_prefix else []
+
# if there is a tag matching tag_prefix, this yields TAG-NUM-gHEX[-dirty]
# if there isn't one, this yields HEX[-dirty] (no NUM)
- describe_out, rc = run_command(GITS, ["describe", "--tags", "--dirty",
- "--always", "--long",
- "--match", "%%s*" %% tag_prefix],
- cwd=root)
+ describe_out, rc = runner(GITS, ["describe", "--tags", "--dirty",
+ "--always", "--long", *MATCH_ARGS],
+ cwd=root)
# --long was added in git-1.5.5
if describe_out is None:
raise NotThisMethod("'git describe' failed")
describe_out = describe_out.strip()
- full_out, rc = run_command(GITS, ["rev-parse", "HEAD"], cwd=root)
+ full_out, rc = runner(GITS, ["rev-parse", "HEAD"], cwd=root)
if full_out is None:
raise NotThisMethod("'git rev-parse' failed")
full_out = full_out.strip()
@@ -672,6 +700,39 @@ def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command):
pieces["short"] = full_out[:7] # maybe improved later
pieces["error"] = None
+ branch_name, rc = runner(GITS, ["rev-parse", "--abbrev-ref", "HEAD"],
+ cwd=root)
+ # --abbrev-ref was added in git-1.6.3
+ if rc != 0 or branch_name is None:
+ raise NotThisMethod("'git rev-parse --abbrev-ref' returned error")
+ branch_name = branch_name.strip()
+
+ if branch_name == "HEAD":
+ # If we aren't exactly on a branch, pick a branch which represents
+ # the current commit. If all else fails, we are on a branchless
+ # commit.
+ branches, rc = runner(GITS, ["branch", "--contains"], cwd=root)
+ # --contains was added in git-1.5.4
+ if rc != 0 or branches is None:
+ raise NotThisMethod("'git branch --contains' returned error")
+ branches = branches.split("\n")
+
+ # Remove the first line if we're running detached
+ if "(" in branches[0]:
+ branches.pop(0)
+
+ # Strip off the leading "* " from the list of branches.
+ branches = [branch[2:] for branch in branches]
+ if "master" in branches:
+ branch_name = "master"
+ elif not branches:
+ branch_name = None
+ else:
+ # Pick the first branch that is returned. Good or bad.
+ branch_name = branches[0]
+
+ pieces["branch"] = branch_name
+
# parse describe_out. It will be like TAG-NUM-gHEX[-dirty] or HEX[-dirty]
# TAG might have hyphens.
git_describe = describe_out
@@ -688,7 +749,7 @@ def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command):
# TAG-NUM-gHEX
mo = re.search(r'^(.+)-(\d+)-g([0-9a-f]+)$', git_describe)
if not mo:
- # unparseable. Maybe git-describe is misbehaving?
+ # unparsable. Maybe git-describe is misbehaving?
pieces["error"] = ("unable to parse git-describe output: '%%s'"
%% describe_out)
return pieces
@@ -713,13 +774,14 @@ def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command):
else:
# HEX: no tags
pieces["closest-tag"] = None
- count_out, rc = run_command(GITS, ["rev-list", "HEAD", "--count"],
- cwd=root)
+ count_out, rc = runner(GITS, ["rev-list", "HEAD", "--count"], cwd=root)
pieces["distance"] = int(count_out) # total number of commits
# commit date: see ISO-8601 comment in git_versions_from_keywords()
- date = run_command(GITS, ["show", "-s", "--format=%%ci", "HEAD"],
- cwd=root)[0].strip()
+ date = runner(GITS, ["show", "-s", "--format=%%ci", "HEAD"], cwd=root)[0].strip()
+ # Use only the last line. Previous lines may contain GPG signature
+ # information.
+ date = date.splitlines()[-1]
pieces["date"] = date.strip().replace(" ", "T", 1).replace(" ", "", 1)
return pieces
@@ -757,19 +819,67 @@ def render_pep440(pieces):
return rendered
-def render_pep440_pre(pieces):
- """TAG[.post.devDISTANCE] -- No -dirty.
+def render_pep440_branch(pieces):
+ """TAG[[.dev0]+DISTANCE.gHEX[.dirty]] .
+
+ The ".dev0" means not master branch. Note that .dev0 sorts backwards
+ (a feature branch will appear "older" than the master branch).
Exceptions:
- 1: no tags. 0.post.devDISTANCE
+ 1: no tags. 0[.dev0]+untagged.DISTANCE.gHEX[.dirty]
"""
if pieces["closest-tag"]:
rendered = pieces["closest-tag"]
+ if pieces["distance"] or pieces["dirty"]:
+ if pieces["branch"] != "master":
+ rendered += ".dev0"
+ rendered += plus_or_dot(pieces)
+ rendered += "%%d.g%%s" %% (pieces["distance"], pieces["short"])
+ if pieces["dirty"]:
+ rendered += ".dirty"
+ else:
+ # exception #1
+ rendered = "0"
+ if pieces["branch"] != "master":
+ rendered += ".dev0"
+ rendered += "+untagged.%%d.g%%s" %% (pieces["distance"],
+ pieces["short"])
+ if pieces["dirty"]:
+ rendered += ".dirty"
+ return rendered
+
+
+def pep440_split_post(ver):
+ """Split pep440 version string at the post-release segment.
+
+ Returns the release segments before the post-release and the
+ post-release version number (or -1 if no post-release segment is present).
+ """
+ vc = str.split(ver, ".post")
+ return vc[0], int(vc[1] or 0) if len(vc) == 2 else None
+
+
+def render_pep440_pre(pieces):
+ """TAG[.postN.devDISTANCE] -- No -dirty.
+
+ Exceptions:
+ 1: no tags. 0.post0.devDISTANCE
+ """
+ if pieces["closest-tag"]:
if pieces["distance"]:
- rendered += ".post.dev%%d" %% pieces["distance"]
+ # update the post release segment
+ tag_version, post_version = pep440_split_post(pieces["closest-tag"])
+ rendered = tag_version
+ if post_version is not None:
+ rendered += ".post%%d.dev%%d" %% (post_version+1, pieces["distance"])
+ else:
+ rendered += ".post0.dev%%d" %% (pieces["distance"])
+ else:
+ # no commits, use the tag as the version
+ rendered = pieces["closest-tag"]
else:
# exception #1
- rendered = "0.post.dev%%d" %% pieces["distance"]
+ rendered = "0.post0.dev%%d" %% pieces["distance"]
return rendered
@@ -800,12 +910,41 @@ def render_pep440_post(pieces):
return rendered
+def render_pep440_post_branch(pieces):
+ """TAG[.postDISTANCE[.dev0]+gHEX[.dirty]] .
+
+ The ".dev0" means not master branch.
+
+ Exceptions:
+ 1: no tags. 0.postDISTANCE[.dev0]+gHEX[.dirty]
+ """
+ if pieces["closest-tag"]:
+ rendered = pieces["closest-tag"]
+ if pieces["distance"] or pieces["dirty"]:
+ rendered += ".post%%d" %% pieces["distance"]
+ if pieces["branch"] != "master":
+ rendered += ".dev0"
+ rendered += plus_or_dot(pieces)
+ rendered += "g%%s" %% pieces["short"]
+ if pieces["dirty"]:
+ rendered += ".dirty"
+ else:
+ # exception #1
+ rendered = "0.post%%d" %% pieces["distance"]
+ if pieces["branch"] != "master":
+ rendered += ".dev0"
+ rendered += "+g%%s" %% pieces["short"]
+ if pieces["dirty"]:
+ rendered += ".dirty"
+ return rendered
+
+
def render_pep440_old(pieces):
"""TAG[.postDISTANCE[.dev0]] .
The ".dev0" means dirty.
- Eexceptions:
+ Exceptions:
1: no tags. 0.postDISTANCE[.dev0]
"""
if pieces["closest-tag"]:
@@ -876,10 +1015,14 @@ def render(pieces, style):
if style == "pep440":
rendered = render_pep440(pieces)
+ elif style == "pep440-branch":
+ rendered = render_pep440_branch(pieces)
elif style == "pep440-pre":
rendered = render_pep440_pre(pieces)
elif style == "pep440-post":
rendered = render_pep440_post(pieces)
+ elif style == "pep440-post-branch":
+ rendered = render_pep440_post_branch(pieces)
elif style == "pep440-old":
rendered = render_pep440_old(pieces)
elif style == "git-describe":
@@ -915,7 +1058,7 @@ def get_versions():
# versionfile_source is the relative path from the top of the source
# tree (where the .git directory might live) to this file. Invert
# this to find the root from __file__.
- for i in cfg.versionfile_source.split('/'):
+ for _ in cfg.versionfile_source.split('/'):
root = os.path.dirname(root)
except NameError:
return {"version": "0+unknown", "full-revisionid": None,
@@ -950,22 +1093,21 @@ def git_get_keywords(versionfile_abs):
# _version.py.
keywords = {}
try:
- f = open(versionfile_abs, "r")
- for line in f.readlines():
- if line.strip().startswith("git_refnames ="):
- mo = re.search(r'=\s*"(.*)"', line)
- if mo:
- keywords["refnames"] = mo.group(1)
- if line.strip().startswith("git_full ="):
- mo = re.search(r'=\s*"(.*)"', line)
- if mo:
- keywords["full"] = mo.group(1)
- if line.strip().startswith("git_date ="):
- mo = re.search(r'=\s*"(.*)"', line)
- if mo:
- keywords["date"] = mo.group(1)
- f.close()
- except EnvironmentError:
+ with open(versionfile_abs, "r") as fobj:
+ for line in fobj:
+ if line.strip().startswith("git_refnames ="):
+ mo = re.search(r'=\s*"(.*)"', line)
+ if mo:
+ keywords["refnames"] = mo.group(1)
+ if line.strip().startswith("git_full ="):
+ mo = re.search(r'=\s*"(.*)"', line)
+ if mo:
+ keywords["full"] = mo.group(1)
+ if line.strip().startswith("git_date ="):
+ mo = re.search(r'=\s*"(.*)"', line)
+ if mo:
+ keywords["date"] = mo.group(1)
+ except OSError:
pass
return keywords
@@ -973,10 +1115,14 @@ def git_get_keywords(versionfile_abs):
@register_vcs_handler("git", "keywords")
def git_versions_from_keywords(keywords, tag_prefix, verbose):
"""Get version information from git keywords."""
- if not keywords:
- raise NotThisMethod("no keywords at all, weird")
+ if "refnames" not in keywords:
+ raise NotThisMethod("Short version file found")
date = keywords.get("date")
if date is not None:
+ # Use only the last line. Previous lines may contain GPG signature
+ # information.
+ date = date.splitlines()[-1]
+
# git-2.2.0 added "%cI", which expands to an ISO-8601 -compliant
# datestamp. However we prefer "%ci" (which expands to an "ISO-8601
# -like" string, which we must then edit to make compliant), because
@@ -989,11 +1135,11 @@ def git_versions_from_keywords(keywords, tag_prefix, verbose):
if verbose:
print("keywords are unexpanded, not using")
raise NotThisMethod("unexpanded keywords, not a git-archive tarball")
- refs = set([r.strip() for r in refnames.strip("()").split(",")])
+ refs = {r.strip() for r in refnames.strip("()").split(",")}
# starting in git-1.8.3, tags are listed as "tag: foo-1.0" instead of
# just "foo-1.0". If we see a "tag: " prefix, prefer those.
TAG = "tag: "
- tags = set([r[len(TAG):] for r in refs if r.startswith(TAG)])
+ tags = {r[len(TAG):] for r in refs if r.startswith(TAG)}
if not tags:
# Either we're using git < 1.8.3, or there really are no tags. We use
# a heuristic: assume all version tags have a digit. The old git %d
@@ -1002,7 +1148,7 @@ def git_versions_from_keywords(keywords, tag_prefix, verbose):
# between branches and tags. By ignoring refnames without digits, we
# filter out many common branch names like "release" and
# "stabilization", as well as "HEAD" and "master".
- tags = set([r for r in refs if re.search(r'\d', r)])
+ tags = {r for r in refs if re.search(r'\d', r)}
if verbose:
print("discarding '%s', no digits" % ",".join(refs - tags))
if verbose:
@@ -1011,6 +1157,11 @@ def git_versions_from_keywords(keywords, tag_prefix, verbose):
# sorting will prefer e.g. "2.0" over "2.0rc1"
if ref.startswith(tag_prefix):
r = ref[len(tag_prefix):]
+ # Filter out refs that exactly match prefix or that don't start
+ # with a number once the prefix is stripped (mostly a concern
+ # when prefix is '')
+ if not re.match(r'\d', r):
+ continue
if verbose:
print("picking %s" % r)
return {"version": r,
@@ -1026,7 +1177,7 @@ def git_versions_from_keywords(keywords, tag_prefix, verbose):
@register_vcs_handler("git", "pieces_from_vcs")
-def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command):
+def git_pieces_from_vcs(tag_prefix, root, verbose, runner=run_command):
"""Get version from 'git describe' in the root of the source tree.
This only gets called if the git-archive 'subst' keywords were *not*
@@ -1037,24 +1188,32 @@ def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command):
if sys.platform == "win32":
GITS = ["git.cmd", "git.exe"]
- out, rc = run_command(GITS, ["rev-parse", "--git-dir"], cwd=root,
- hide_stderr=True)
+ # GIT_DIR can interfere with correct operation of Versioneer.
+ # It may be intended to be passed to the Versioneer-versioned project,
+ # but that should not change where we get our version from.
+ env = os.environ.copy()
+ env.pop("GIT_DIR", None)
+ runner = functools.partial(runner, env=env)
+
+ _, rc = runner(GITS, ["rev-parse", "--git-dir"], cwd=root,
+ hide_stderr=True)
if rc != 0:
if verbose:
print("Directory %s not under git control" % root)
raise NotThisMethod("'git rev-parse --git-dir' returned error")
+ MATCH_ARGS = ["--match", "%s*" % tag_prefix] if tag_prefix else []
+
# if there is a tag matching tag_prefix, this yields TAG-NUM-gHEX[-dirty]
# if there isn't one, this yields HEX[-dirty] (no NUM)
- describe_out, rc = run_command(GITS, ["describe", "--tags", "--dirty",
- "--always", "--long",
- "--match", "%s*" % tag_prefix],
- cwd=root)
+ describe_out, rc = runner(GITS, ["describe", "--tags", "--dirty",
+ "--always", "--long", *MATCH_ARGS],
+ cwd=root)
# --long was added in git-1.5.5
if describe_out is None:
raise NotThisMethod("'git describe' failed")
describe_out = describe_out.strip()
- full_out, rc = run_command(GITS, ["rev-parse", "HEAD"], cwd=root)
+ full_out, rc = runner(GITS, ["rev-parse", "HEAD"], cwd=root)
if full_out is None:
raise NotThisMethod("'git rev-parse' failed")
full_out = full_out.strip()
@@ -1064,6 +1223,39 @@ def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command):
pieces["short"] = full_out[:7] # maybe improved later
pieces["error"] = None
+ branch_name, rc = runner(GITS, ["rev-parse", "--abbrev-ref", "HEAD"],
+ cwd=root)
+ # --abbrev-ref was added in git-1.6.3
+ if rc != 0 or branch_name is None:
+ raise NotThisMethod("'git rev-parse --abbrev-ref' returned error")
+ branch_name = branch_name.strip()
+
+ if branch_name == "HEAD":
+ # If we aren't exactly on a branch, pick a branch which represents
+ # the current commit. If all else fails, we are on a branchless
+ # commit.
+ branches, rc = runner(GITS, ["branch", "--contains"], cwd=root)
+ # --contains was added in git-1.5.4
+ if rc != 0 or branches is None:
+ raise NotThisMethod("'git branch --contains' returned error")
+ branches = branches.split("\n")
+
+ # Remove the first line if we're running detached
+ if "(" in branches[0]:
+ branches.pop(0)
+
+ # Strip off the leading "* " from the list of branches.
+ branches = [branch[2:] for branch in branches]
+ if "master" in branches:
+ branch_name = "master"
+ elif not branches:
+ branch_name = None
+ else:
+ # Pick the first branch that is returned. Good or bad.
+ branch_name = branches[0]
+
+ pieces["branch"] = branch_name
+
# parse describe_out. It will be like TAG-NUM-gHEX[-dirty] or HEX[-dirty]
# TAG might have hyphens.
git_describe = describe_out
@@ -1080,7 +1272,7 @@ def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command):
# TAG-NUM-gHEX
mo = re.search(r'^(.+)-(\d+)-g([0-9a-f]+)$', git_describe)
if not mo:
- # unparseable. Maybe git-describe is misbehaving?
+ # unparsable. Maybe git-describe is misbehaving?
pieces["error"] = ("unable to parse git-describe output: '%s'"
% describe_out)
return pieces
@@ -1105,13 +1297,14 @@ def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command):
else:
# HEX: no tags
pieces["closest-tag"] = None
- count_out, rc = run_command(GITS, ["rev-list", "HEAD", "--count"],
- cwd=root)
+ count_out, rc = runner(GITS, ["rev-list", "HEAD", "--count"], cwd=root)
pieces["distance"] = int(count_out) # total number of commits
# commit date: see ISO-8601 comment in git_versions_from_keywords()
- date = run_command(GITS, ["show", "-s", "--format=%ci", "HEAD"],
- cwd=root)[0].strip()
+ date = runner(GITS, ["show", "-s", "--format=%ci", "HEAD"], cwd=root)[0].strip()
+ # Use only the last line. Previous lines may contain GPG signature
+ # information.
+ date = date.splitlines()[-1]
pieces["date"] = date.strip().replace(" ", "T", 1).replace(" ", "", 1)
return pieces
@@ -1130,27 +1323,26 @@ def do_vcs_install(manifest_in, versionfile_source, ipy):
if ipy:
files.append(ipy)
try:
- me = __file__
- if me.endswith(".pyc") or me.endswith(".pyo"):
- me = os.path.splitext(me)[0] + ".py"
- versioneer_file = os.path.relpath(me)
+ my_path = __file__
+ if my_path.endswith(".pyc") or my_path.endswith(".pyo"):
+ my_path = os.path.splitext(my_path)[0] + ".py"
+ versioneer_file = os.path.relpath(my_path)
except NameError:
versioneer_file = "versioneer.py"
files.append(versioneer_file)
present = False
try:
- f = open(".gitattributes", "r")
- for line in f.readlines():
- if line.strip().startswith(versionfile_source):
- if "export-subst" in line.strip().split()[1:]:
- present = True
- f.close()
- except EnvironmentError:
+ with open(".gitattributes", "r") as fobj:
+ for line in fobj:
+ if line.strip().startswith(versionfile_source):
+ if "export-subst" in line.strip().split()[1:]:
+ present = True
+ break
+ except OSError:
pass
if not present:
- f = open(".gitattributes", "a+")
- f.write("%s export-subst\n" % versionfile_source)
- f.close()
+ with open(".gitattributes", "a+") as fobj:
+ fobj.write(f"{versionfile_source} export-subst\n")
files.append(".gitattributes")
run_command(GITS, ["add", "--"] + files)
@@ -1164,15 +1356,14 @@ def versions_from_parentdir(parentdir_prefix, root, verbose):
"""
rootdirs = []
- for i in range(3):
+ for _ in range(3):
dirname = os.path.basename(root)
if dirname.startswith(parentdir_prefix):
return {"version": dirname[len(parentdir_prefix):],
"full-revisionid": None,
"dirty": False, "error": None, "date": None}
- else:
- rootdirs.append(root)
- root = os.path.dirname(root) # up a level
+ rootdirs.append(root)
+ root = os.path.dirname(root) # up a level
if verbose:
print("Tried directories %s but none started with prefix %s" %
@@ -1181,7 +1372,7 @@ def versions_from_parentdir(parentdir_prefix, root, verbose):
SHORT_VERSION_PY = """
-# This file was generated by 'versioneer.py' (0.18) from
+# This file was generated by 'versioneer.py' (0.22) from
# revision-control system data, or from the parent directory name of an
# unpacked source archive. Distribution tarballs contain a pre-generated copy
# of this file.
@@ -1203,7 +1394,7 @@ def versions_from_file(filename):
try:
with open(filename) as f:
contents = f.read()
- except EnvironmentError:
+ except OSError:
raise NotThisMethod("unable to read _version.py")
mo = re.search(r"version_json = '''\n(.*)''' # END VERSION_JSON",
contents, re.M | re.S)
@@ -1258,19 +1449,67 @@ def render_pep440(pieces):
return rendered
-def render_pep440_pre(pieces):
- """TAG[.post.devDISTANCE] -- No -dirty.
+def render_pep440_branch(pieces):
+ """TAG[[.dev0]+DISTANCE.gHEX[.dirty]] .
+
+ The ".dev0" means not master branch. Note that .dev0 sorts backwards
+ (a feature branch will appear "older" than the master branch).
Exceptions:
- 1: no tags. 0.post.devDISTANCE
+ 1: no tags. 0[.dev0]+untagged.DISTANCE.gHEX[.dirty]
"""
if pieces["closest-tag"]:
rendered = pieces["closest-tag"]
+ if pieces["distance"] or pieces["dirty"]:
+ if pieces["branch"] != "master":
+ rendered += ".dev0"
+ rendered += plus_or_dot(pieces)
+ rendered += "%d.g%s" % (pieces["distance"], pieces["short"])
+ if pieces["dirty"]:
+ rendered += ".dirty"
+ else:
+ # exception #1
+ rendered = "0"
+ if pieces["branch"] != "master":
+ rendered += ".dev0"
+ rendered += "+untagged.%d.g%s" % (pieces["distance"],
+ pieces["short"])
+ if pieces["dirty"]:
+ rendered += ".dirty"
+ return rendered
+
+
+def pep440_split_post(ver):
+ """Split pep440 version string at the post-release segment.
+
+ Returns the release segments before the post-release and the
+ post-release version number (or -1 if no post-release segment is present).
+ """
+ vc = str.split(ver, ".post")
+ return vc[0], int(vc[1] or 0) if len(vc) == 2 else None
+
+
+def render_pep440_pre(pieces):
+ """TAG[.postN.devDISTANCE] -- No -dirty.
+
+ Exceptions:
+ 1: no tags. 0.post0.devDISTANCE
+ """
+ if pieces["closest-tag"]:
if pieces["distance"]:
- rendered += ".post.dev%d" % pieces["distance"]
+ # update the post release segment
+ tag_version, post_version = pep440_split_post(pieces["closest-tag"])
+ rendered = tag_version
+ if post_version is not None:
+ rendered += ".post%d.dev%d" % (post_version+1, pieces["distance"])
+ else:
+ rendered += ".post0.dev%d" % (pieces["distance"])
+ else:
+ # no commits, use the tag as the version
+ rendered = pieces["closest-tag"]
else:
# exception #1
- rendered = "0.post.dev%d" % pieces["distance"]
+ rendered = "0.post0.dev%d" % pieces["distance"]
return rendered
@@ -1301,12 +1540,41 @@ def render_pep440_post(pieces):
return rendered
+def render_pep440_post_branch(pieces):
+ """TAG[.postDISTANCE[.dev0]+gHEX[.dirty]] .
+
+ The ".dev0" means not master branch.
+
+ Exceptions:
+ 1: no tags. 0.postDISTANCE[.dev0]+gHEX[.dirty]
+ """
+ if pieces["closest-tag"]:
+ rendered = pieces["closest-tag"]
+ if pieces["distance"] or pieces["dirty"]:
+ rendered += ".post%d" % pieces["distance"]
+ if pieces["branch"] != "master":
+ rendered += ".dev0"
+ rendered += plus_or_dot(pieces)
+ rendered += "g%s" % pieces["short"]
+ if pieces["dirty"]:
+ rendered += ".dirty"
+ else:
+ # exception #1
+ rendered = "0.post%d" % pieces["distance"]
+ if pieces["branch"] != "master":
+ rendered += ".dev0"
+ rendered += "+g%s" % pieces["short"]
+ if pieces["dirty"]:
+ rendered += ".dirty"
+ return rendered
+
+
def render_pep440_old(pieces):
"""TAG[.postDISTANCE[.dev0]] .
The ".dev0" means dirty.
- Eexceptions:
+ Exceptions:
1: no tags. 0.postDISTANCE[.dev0]
"""
if pieces["closest-tag"]:
@@ -1377,10 +1645,14 @@ def render(pieces, style):
if style == "pep440":
rendered = render_pep440(pieces)
+ elif style == "pep440-branch":
+ rendered = render_pep440_branch(pieces)
elif style == "pep440-pre":
rendered = render_pep440_pre(pieces)
elif style == "pep440-post":
rendered = render_pep440_post(pieces)
+ elif style == "pep440-post-branch":
+ rendered = render_pep440_post_branch(pieces)
elif style == "pep440-old":
rendered = render_pep440_old(pieces)
elif style == "git-describe":
@@ -1480,8 +1752,12 @@ def get_version():
return get_versions()["version"]
-def get_cmdclass():
- """Get the custom setuptools/distutils subclasses used by Versioneer."""
+def get_cmdclass(cmdclass=None):
+ """Get the custom setuptools/distutils subclasses used by Versioneer..
+
+ If the package uses a different cmdclass (e.g. one from numpy), it
+ should be provide as an argument.
+ """
if "versioneer" in sys.modules:
del sys.modules["versioneer"]
# this fixes the "python setup.py develop" case (also 'install' and
@@ -1495,12 +1771,15 @@ def get_cmdclass():
# parent is protected against the child's "import versioneer". By
# removing ourselves from sys.modules here, before the child build
# happens, we protect the child from the parent's versioneer too..
- # Also see https://github.com/warner/python-versioneer/issues/52
+ # Also see https://github.com/python-versioneer/python-versioneer/issues/52
- cmds = {}
+ cmds = {} if cmdclass is None else cmdclass.copy()
# we add "version" to both distutils and setuptools
- from distutils.core import Command
+ try:
+ from setuptools import Command
+ except ImportError:
+ from distutils.core import Command
class cmd_version(Command):
description = "report generated version string"
@@ -1539,7 +1818,9 @@ def get_cmdclass():
# setup.py egg_info -> ?
# we override different "build_py" commands for both environments
- if "setuptools" in sys.modules:
+ if 'build_py' in cmds:
+ _build_py = cmds['build_py']
+ elif "setuptools" in sys.modules:
from setuptools.command.build_py import build_py as _build_py
else:
from distutils.command.build_py import build_py as _build_py
@@ -1559,6 +1840,33 @@ def get_cmdclass():
write_to_version_file(target_versionfile, versions)
cmds["build_py"] = cmd_build_py
+ if 'build_ext' in cmds:
+ _build_ext = cmds['build_ext']
+ elif "setuptools" in sys.modules:
+ from setuptools.command.build_ext import build_ext as _build_ext
+ else:
+ from distutils.command.build_ext import build_ext as _build_ext
+
+ class cmd_build_ext(_build_ext):
+ def run(self):
+ root = get_root()
+ cfg = get_config_from_root(root)
+ versions = get_versions()
+ _build_ext.run(self)
+ if self.inplace:
+ # build_ext --inplace will only build extensions in
+ # build/lib<..> dir with no _version.py to write to.
+ # As in place builds will already have a _version.py
+ # in the module dir, we do not need to write one.
+ return
+ # now locate _version.py in the new build/ directory and replace
+ # it with an updated value
+ target_versionfile = os.path.join(self.build_lib,
+ cfg.versionfile_build)
+ print("UPDATING %s" % target_versionfile)
+ write_to_version_file(target_versionfile, versions)
+ cmds["build_ext"] = cmd_build_ext
+
if "cx_Freeze" in sys.modules: # cx_freeze enabled?
from cx_Freeze.dist import build_exe as _build_exe
# nczeczulin reports that py2exe won't like the pep440-style string
@@ -1592,10 +1900,7 @@ def get_cmdclass():
del cmds["build_py"]
if 'py2exe' in sys.modules: # py2exe enabled?
- try:
- from py2exe.distutils_buildexe import py2exe as _py2exe # py3
- except ImportError:
- from py2exe.build_exe import py2exe as _py2exe # py2
+ from py2exe.distutils_buildexe import py2exe as _py2exe
class cmd_py2exe(_py2exe):
def run(self):
@@ -1620,7 +1925,9 @@ def get_cmdclass():
cmds["py2exe"] = cmd_py2exe
# we override different "sdist" commands for both environments
- if "setuptools" in sys.modules:
+ if 'sdist' in cmds:
+ _sdist = cmds['sdist']
+ elif "setuptools" in sys.modules:
from setuptools.command.sdist import sdist as _sdist
else:
from distutils.command.sdist import sdist as _sdist
@@ -1687,21 +1994,26 @@ SAMPLE_CONFIG = """
"""
-INIT_PY_SNIPPET = """
+OLD_SNIPPET = """
from ._version import get_versions
__version__ = get_versions()['version']
del get_versions
"""
+INIT_PY_SNIPPET = """
+from . import {0}
+__version__ = {0}.get_versions()['version']
+"""
+
def do_setup():
- """Main VCS-independent setup function for installing Versioneer."""
+ """Do main VCS-independent setup function for installing Versioneer."""
root = get_root()
try:
cfg = get_config_from_root(root)
- except (EnvironmentError, configparser.NoSectionError,
+ except (OSError, configparser.NoSectionError,
configparser.NoOptionError) as e:
- if isinstance(e, (EnvironmentError, configparser.NoSectionError)):
+ if isinstance(e, (OSError, configparser.NoSectionError)):
print("Adding sample versioneer config to setup.cfg",
file=sys.stderr)
with open(os.path.join(root, "setup.cfg"), "a") as f:
@@ -1725,12 +2037,18 @@ def do_setup():
try:
with open(ipy, "r") as f:
old = f.read()
- except EnvironmentError:
+ except OSError:
old = ""
- if INIT_PY_SNIPPET not in old:
+ module = os.path.splitext(os.path.basename(cfg.versionfile_source))[0]
+ snippet = INIT_PY_SNIPPET.format(module)
+ if OLD_SNIPPET in old:
+ print(" replacing boilerplate in %s" % ipy)
+ with open(ipy, "w") as f:
+ f.write(old.replace(OLD_SNIPPET, snippet))
+ elif snippet not in old:
print(" appending to %s" % ipy)
with open(ipy, "a") as f:
- f.write(INIT_PY_SNIPPET)
+ f.write(snippet)
else:
print(" %s unmodified" % ipy)
else:
@@ -1749,7 +2067,7 @@ def do_setup():
if line.startswith("include "):
for include in line.split()[1:]:
simple_includes.add(include)
- except EnvironmentError:
+ except OSError:
pass
# That doesn't cover everything MANIFEST.in can do
# (http://docs.python.org/2/distutils/sourcedist.html#commands), so
View it on GitLab: https://salsa.debian.org/med-team/conda-package-handling/-/compare/ee45cc1c6b1c9ef35d612a43055a3e24094d4056...ed549b20bdef2129aa5e8b9ead06c8e3527088d6
--
View it on GitLab: https://salsa.debian.org/med-team/conda-package-handling/-/compare/ee45cc1c6b1c9ef35d612a43055a3e24094d4056...ed549b20bdef2129aa5e8b9ead06c8e3527088d6
You're receiving this email because of your account on salsa.debian.org.
-------------- next part --------------
An HTML attachment was scrubbed...
URL: <http://alioth-lists.debian.net/pipermail/debian-med-commit/attachments/20220318/c78f9247/attachment-0001.htm>
More information about the debian-med-commit
mailing list