[med-svn] [Git][med-team/patsy][upstream] New upstream version 0.5.4
Lance Lin (@linqigang)
gitlab at salsa.debian.org
Wed Dec 20 16:24:58 GMT 2023
Lance Lin pushed to branch upstream at Debian Med / patsy
Commits:
4e5ab23b by Lance Lin at 2023-12-20T20:32:22+07:00
New upstream version 0.5.4
- - - - -
8 changed files:
- + .github/workflows/publish.yml
- + .github/workflows/tox.yml
- doc/changes.rst
- patsy/parse_formula.py
- patsy/tokens.py
- patsy/version.py
- setup.py
- tox.ini
Changes:
=====================================
.github/workflows/publish.yml
=====================================
@@ -0,0 +1,27 @@
+name: Publish tagged releases to PyPI
+
+on:
+ push:
+ tags:
+ - "v*"
+
+jobs:
+ deploy:
+ runs-on: ubuntu-latest
+ steps:
+ - uses: actions/checkout at v1
+ - name: Set up Python
+ uses: actions/setup-python at v1
+ with:
+ python-version: '3.7'
+ - name: Install dependencies
+ run: |
+ python -m pip install --upgrade pip
+ pip install setuptools wheel twine
+ - name: Build and publish
+ env:
+ TWINE_USERNAME: ${{ secrets.PYPI_USERNAME }}
+ TWINE_PASSWORD: ${{ secrets.PYPI_PASSWORD }}
+ run: |
+ python setup.py sdist bdist_wheel
+ twine upload dist/*
=====================================
.github/workflows/tox.yml
=====================================
@@ -0,0 +1,42 @@
+name: Run Tox Tests
+
+on:
+ push:
+ branches:
+ - "*"
+
+jobs:
+ build:
+ runs-on: ubuntu-latest
+ strategy:
+ max-parallel: 4
+ matrix:
+ python-version: ['2.7', '3.6', '3.7', '3.8', '3.9', '3.10', '3.11', '3.12']
+ pandas-presence: ['with_pandas', 'without_pandas']
+ env:
+ PYTHON_VERSION: ${{ matrix.python-version }}
+ PANDAS_PRESENCE: ${{ matrix.pandas-presence }}
+ steps:
+ - uses: actions/checkout at v2
+ - uses: gabrielfalcao/pyenv-action at v17
+ with:
+ default: "${{ matrix.python-version }}"
+ - name: Install Python ${{ matrix.python-version }}
+ run: |
+ pyenv install "${{ matrix.python-version }}"
+ pyenv local "${{ matrix.python-version }}"
+ pyenv versions
+ - name: Install dependencies
+ run: |
+ pip install -U pip
+ pip install tox tox-gh-actions
+ - name: Test with tox
+ run: |
+ PYTHON_ENV="py$(echo $PYTHON_VERSION | sed 's/\.//;s/\-dev//')"
+ tox -e "${PYTHON_ENV}-${PANDAS_PRESENCE}"
+ - name: Upload coverage to Codecov
+ uses: codecov/codecov-action at v1.0.10
+ with:
+ file: ./coverage.xml
+ flags: unittests
+ env_vars: PYTHON_VERSION,PANDAS_PRESENCE
=====================================
doc/changes.rst
=====================================
@@ -8,6 +8,11 @@ All Patsy releases are archived at Zenodo:
.. image:: https://zenodo.org/badge/DOI/10.5281/zenodo.592075.svg
:target: https://doi.org/10.5281/zenodo.592075
+v0.5.4
+------
+
+* Add support for Python 3.12, and fix broken unit tests in this version.
+
v0.5.3
------
=====================================
patsy/parse_formula.py
=====================================
@@ -84,7 +84,7 @@ def _tokenize_formula(code, operator_strings):
# "magic" token does:
end_tokens = set(magic_token_types)
end_tokens.remove("(")
-
+
it = PushbackAdapter(python_tokenize(code))
for pytype, token_string, origin in it:
if token_string in magic_token_types:
@@ -92,7 +92,7 @@ def _tokenize_formula(code, operator_strings):
else:
it.push_back((pytype, token_string, origin))
yield _read_python_expr(it, end_tokens)
-
+
def test__tokenize_formula():
code = "y ~ a + (foo(b,c + 2)) + -1 + 0 + 10"
tokens = list(_tokenize_formula(code, ["+", "-", "~"]))
@@ -274,8 +274,8 @@ def _parsing_error_test(parse_fn, error_descs): # pragma: no cover
except PatsyError as e:
print(e)
assert e.origin.code == bad_code
- assert e.origin.start == start
- assert e.origin.end == end
+ assert e.origin.start in (0, start)
+ assert e.origin.end in (end, len(bad_code))
else:
assert False, "parser failed to report an error!"
=====================================
patsy/tokens.py
=====================================
@@ -31,11 +31,10 @@ def python_tokenize(code):
for (pytype, string, (_, start), (_, end), code) in it:
if pytype == tokenize.ENDMARKER:
break
- origin = Origin(code, start, end)
- assert pytype != tokenize.NL
- if pytype == tokenize.NEWLINE:
+ if pytype in (tokenize.NL, tokenize.NEWLINE):
assert string == ""
continue
+ origin = Origin(code, start, end)
if pytype == tokenize.ERRORTOKEN:
raise PatsyError("error tokenizing input "
"(maybe an unclosed string?)",
@@ -53,8 +52,14 @@ def python_tokenize(code):
# end of the source text. We have our own error handling for
# such cases, so just treat this as an end-of-stream.
#
+ if "unterminated string literal" in e.args[0]:
+ raise PatsyError(
+ "error tokenizing input ({})".format(e.args[0]),
+ Origin(code, 0, len(code)),
+ )
+
# Just in case someone adds some other error case:
- assert e.args[0].startswith("EOF in multi-line")
+ assert "EOF in multi-line" in e.args[0]
return
def test_python_tokenize():
=====================================
patsy/version.py
=====================================
@@ -17,4 +17,4 @@
# want. (Contrast with the special suffix 1.0.0.dev, which sorts *before*
# 1.0.0.)
-__version__ = "0.5.3"
+__version__ = "0.5.4"
=====================================
setup.py
=====================================
@@ -46,6 +46,7 @@ setup(
"Programming Language :: Python :: 3.9",
"Programming Language :: Python :: 3.10",
"Programming Language :: Python :: 3.11",
+ "Programming Language :: Python :: 3.12",
"Topic :: Scientific/Engineering",
],
)
=====================================
tox.ini
=====================================
@@ -1,5 +1,5 @@
[tox]
-envlist = {py27,py36,py37,py38,py39,py310,py311}-{with_pandas,without_pandas}
+envlist = {py27,py36,py37,py38,py39,py310,py311,py312}-{with_pandas,without_pandas}
[gh-actions]
python =
@@ -10,6 +10,7 @@ python =
3.9: py39
3.10: py310
3.11: py311
+ 3.12: py312
[testenv]
deps=
@@ -25,6 +26,8 @@ setenv=
OMP_NUM_THREADS=1
MKL_NUM_THREADS=1
VML_NUM_THREADS=1
+allowlist_externals=
+ env
commands=
pytest -vv --cov=patsy --cov-config={toxinidir}/.coveragerc --cov-report=term-missing --cov-report=xml --cov-report=html:{toxworkdir}/coverage/{envname} {posargs:}
env PATSY_AVOID_OPTIONAL_DEPENDENCIES=1 pytest -vv --cov=patsy --cov-config={toxinidir}/.coveragerc --cov-report=term-missing --cov-report=xml --cov-report=html:{toxworkdir}/coverage/{envname} {posargs:}
View it on GitLab: https://salsa.debian.org/med-team/patsy/-/commit/4e5ab23bfb3a1c6cfa018905672d5d20737cafe8
--
View it on GitLab: https://salsa.debian.org/med-team/patsy/-/commit/4e5ab23bfb3a1c6cfa018905672d5d20737cafe8
You're receiving this email because of your account on salsa.debian.org.
-------------- next part --------------
An HTML attachment was scrubbed...
URL: <http://alioth-lists.debian.net/pipermail/debian-med-commit/attachments/20231220/5044dacc/attachment-0001.htm>
More information about the debian-med-commit
mailing list