Script 'mail_helper' called by obssrc
Hello community,

here is the log from the commit of package python-pytest-subtests for 
openSUSE:Factory checked in at 2023-06-03 00:06:37
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Comparing /work/SRC/openSUSE:Factory/python-pytest-subtests (Old)
 and      /work/SRC/openSUSE:Factory/.python-pytest-subtests.new.15902 (New)
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++

Package is "python-pytest-subtests"

Sat Jun  3 00:06:37 2023 rev:10 rq:1090087 version:0.11.0

Changes:
--------
--- 
/work/SRC/openSUSE:Factory/python-pytest-subtests/python-pytest-subtests.changes
    2023-04-25 16:53:27.874179945 +0200
+++ 
/work/SRC/openSUSE:Factory/.python-pytest-subtests.new.15902/python-pytest-subtests.changes
 2023-06-03 00:06:44.549818453 +0200
@@ -1,0 +2,10 @@
+Wed May 31 19:44:46 UTC 2023 - Dirk Müller <dmuel...@suse.com>
+
+- update to 0.11.0:
+  * Logging is displayed for failing subtests (`#92`_)
+  * Passing subtests no longer turn the pytest output to yellow
+    (as if warnings have been issued)
+  * Now the ``msg`` contents of a subtest is displayed when
+    running pytest with ``-v``.
+
+-------------------------------------------------------------------

Old:
----
  pytest-subtests-0.10.0.tar.gz

New:
----
  pytest-subtests-0.11.0.tar.gz

++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++

Other differences:
------------------
++++++ python-pytest-subtests.spec ++++++
--- /var/tmp/diff_new_pack.J4cQXe/_old  2023-06-03 00:06:45.737825468 +0200
+++ /var/tmp/diff_new_pack.J4cQXe/_new  2023-06-03 00:06:45.741825492 +0200
@@ -18,14 +18,15 @@
 
 %{?sle15_python_module_pythons}
 Name:           python-pytest-subtests
-Version:        0.10.0
+Version:        0.11.0
 Release:        0
 Summary:        Python unittest subTest() support and subtests fixture
 License:        MIT
 URL:            https://github.com/pytest-dev/pytest-subtests
 Source:         
https://files.pythonhosted.org/packages/source/p/pytest-subtests/pytest-subtests-%{version}.tar.gz
-BuildRequires:  %{python_module setuptools >= 40.0}
+BuildRequires:  %{python_module pip}
 BuildRequires:  %{python_module setuptools_scm}
+BuildRequires:  %{python_module wheel}
 BuildRequires:  fdupes
 BuildRequires:  python-rpm-macros
 Requires:       python-pytest >= 7.0
@@ -42,10 +43,10 @@
 %setup -q -n pytest-subtests-%{version}
 
 %build
-%python_build
+%pyproject_wheel
 
 %install
-%python_install
+%pyproject_install
 %python_expand %fdupes %{buildroot}%{$python_sitelib}
 
 %check

++++++ pytest-subtests-0.10.0.tar.gz -> pytest-subtests-0.11.0.tar.gz ++++++
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/pytest-subtests-0.10.0/.github/workflows/deploy.yml 
new/pytest-subtests-0.11.0/.github/workflows/deploy.yml
--- old/pytest-subtests-0.10.0/.github/workflows/deploy.yml     2023-02-16 
02:46:47.000000000 +0100
+++ new/pytest-subtests-0.11.0/.github/workflows/deploy.yml     2023-05-15 
14:16:44.000000000 +0200
@@ -1,15 +1,19 @@
 name: deploy
 
 on:
-  push:
-    tags:
-      - "[0-9]+.[0-9]+.[0-9]+"
+  workflow_dispatch:
+    inputs:
+      version:
+        description: 'Release version'
+        required: true
+        default: '1.2.3'
 
 jobs:
-  deploy:
-    if: github.repository == 'pytest-dev/pytest-subtests'
 
+  package:
     runs-on: ubuntu-latest
+    env:
+      SETUPTOOLS_SCM_PRETEND_VERSION: ${{ github.event.inputs.version }}
 
     steps:
     - uses: actions/checkout@v3
@@ -17,6 +21,17 @@
     - name: Build and Check Package
       uses: hynek/build-and-inspect-python-package@v1.5
 
+  deploy:
+    needs: package
+    runs-on: ubuntu-latest
+    environment: deploy
+    permissions:
+      id-token: write  # For PyPI trusted publishers.
+      contents: write  # For tag.
+
+    steps:
+    - uses: actions/checkout@v3
+
     - name: Download Package
       uses: actions/download-artifact@v3
       with:
@@ -24,7 +39,11 @@
         path: dist
 
     - name: Publish package to PyPI
-      uses: pypa/gh-action-pypi-publish@master
-      with:
-        user: __token__
-        password: ${{ secrets.pypi_token }}
+      uses: pypa/gh-action-pypi-publish@v1.8.5
+
+    - name: Push tag
+      run: |
+        git config user.name "pytest bot"
+        git config user.email "pytest...@gmail.com"
+        git tag --annotate --message=v${{ github.event.inputs.version }} v${{ 
github.event.inputs.version }} ${{ github.sha }}
+        git push origin v${{ github.event.inputs.version }}
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/pytest-subtests-0.10.0/.github/workflows/test.yml 
new/pytest-subtests-0.11.0/.github/workflows/test.yml
--- old/pytest-subtests-0.10.0/.github/workflows/test.yml       2023-02-16 
02:46:47.000000000 +0100
+++ new/pytest-subtests-0.11.0/.github/workflows/test.yml       2023-05-15 
14:16:44.000000000 +0200
@@ -3,7 +3,12 @@
 on:
   push:
     branches:
+      - main
+      - "test-me-*"
+
   pull_request:
+    branches:
+      - "*"
 
 
 # Cancel running jobs for the same workflow and branch.
@@ -12,10 +17,17 @@
   cancel-in-progress: true
 
 jobs:
-  test:
 
-    runs-on: ${{ matrix.os }}
+  package:
+    runs-on: ubuntu-latest
+    steps:
+      - uses: actions/checkout@v3
+      - name: Build and Check Package
+        uses: hynek/build-and-inspect-python-package@v1.5
 
+  test:
+    needs: [package]
+    runs-on: ${{ matrix.os }}
     strategy:
       fail-fast: false
       matrix:
@@ -79,6 +91,12 @@
     steps:
     - uses: actions/checkout@v3
 
+    - name: Download Package
+      uses: actions/download-artifact@v3
+      with:
+        name: Packages
+        path: dist
+
     - name: Set up Python
       uses: actions/setup-python@v4
       with:
@@ -90,12 +108,6 @@
         python -m pip install --upgrade tox
 
     - name: Test
+      shell: bash
       run: |
-        tox -e ${{ matrix.tox_env }}
-
-  check-package:
-    runs-on: ubuntu-latest
-    steps:
-    - uses: actions/checkout@v3
-    - name: Build and Check Package
-      uses: hynek/build-and-inspect-python-package@v1.5
+        tox run -e ${{ matrix.tox_env }} --installpkg `find dist/*.tar.gz`
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/pytest-subtests-0.10.0/.gitignore 
new/pytest-subtests-0.11.0/.gitignore
--- old/pytest-subtests-0.10.0/.gitignore       2023-02-16 02:46:47.000000000 
+0100
+++ new/pytest-subtests-0.11.0/.gitignore       2023-05-15 14:16:44.000000000 
+0200
@@ -102,3 +102,6 @@
 
 # mypy
 .mypy_cache/
+
+# PyCharm.
+.idea/
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/pytest-subtests-0.10.0/CHANGELOG.rst 
new/pytest-subtests-0.11.0/CHANGELOG.rst
--- old/pytest-subtests-0.10.0/CHANGELOG.rst    2023-02-16 02:46:47.000000000 
+0100
+++ new/pytest-subtests-0.11.0/CHANGELOG.rst    2023-05-15 14:16:44.000000000 
+0200
@@ -1,6 +1,19 @@
 CHANGELOG
 =========
 
+0.11.0 (2023-05-15)
+-------------------
+
+* Logging is displayed for failing subtests (`#92`_)
+* Passing subtests no longer turn the pytest output to yellow (as if warnings 
have been issued) (`#86`_). Thanks to `Andrew-Brock`_ for providing the 
solution.
+* Now the ``msg`` contents of a subtest is displayed when running pytest with 
``-v`` (`#6`_).
+
+.. _#6: https://github.com/pytest-dev/pytest-subtests/issues/6
+.. _#86: https://github.com/pytest-dev/pytest-subtests/issues/86
+.. _#92: https://github.com/pytest-dev/pytest-subtests/issues/87
+
+.. _`Andrew-Brock`: https://github.com/Andrew-Brock
+
 0.10.0 (2022-02-15)
 -------------------
 
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/pytest-subtests-0.10.0/PKG-INFO 
new/pytest-subtests-0.11.0/PKG-INFO
--- old/pytest-subtests-0.10.0/PKG-INFO 2023-02-16 02:47:09.727210300 +0100
+++ new/pytest-subtests-0.11.0/PKG-INFO 2023-05-15 14:17:07.564752000 +0200
@@ -1,13 +1,11 @@
 Metadata-Version: 2.1
 Name: pytest-subtests
-Version: 0.10.0
+Version: 0.11.0
 Summary: unittest subTest() support and subtests fixture
 Home-page: https://github.com/pytest-dev/pytest-subtests
 Author: Bruno Oliveira
-Author-email: nicodde...@gmail.com
-Maintainer: Bruno Oliveira
-Maintainer-email: nicodde...@gmail.com
 License: MIT
+Keywords: test,unittest,pytest
 Classifier: Development Status :: 4 - Beta
 Classifier: Framework :: Pytest
 Classifier: Intended Audience :: Developers
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/pytest-subtests-0.10.0/RELEASING.rst 
new/pytest-subtests-0.11.0/RELEASING.rst
--- old/pytest-subtests-0.10.0/RELEASING.rst    2023-02-16 02:46:47.000000000 
+0100
+++ new/pytest-subtests-0.11.0/RELEASING.rst    2023-05-15 14:16:44.000000000 
+0200
@@ -22,16 +22,12 @@
 
 To publish a new release ``X.Y.Z``, the steps are as follows:
 
-#. Create a new branch named ``release-X.Y.Z`` from the latest ``master``.
+#. Create a new branch named ``release-X.Y.Z`` from the latest ``main``.
 
 #. Update the ``CHANGELOG.rst`` file with the new release information.
 
-#. Commit and push the branch for review.
+#. Commit and push the branch to ``upstream`` and open a PR.
 
-#. Once PR is **green** and **approved**, create and push a tag::
+#. Once the PR is **green** and **approved**, start the ``deploy`` workflow 
manually from the branch ``release-VERSION``, passing ``VERSION`` as parameter.
 
-    $ export VERSION=X.Y.Z
-    $ git tag v$VERSION release-$VERSION
-    $ git push g...@github.com:pytest-dev/pytest-subtests.git v$VERSION
-
-That will build the package and publish it on ``PyPI`` automatically.
+#. Merge the release PR to ``main``.
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/pytest-subtests-0.10.0/pyproject.toml 
new/pytest-subtests-0.11.0/pyproject.toml
--- old/pytest-subtests-0.10.0/pyproject.toml   1970-01-01 01:00:00.000000000 
+0100
+++ new/pytest-subtests-0.11.0/pyproject.toml   2023-05-15 14:16:44.000000000 
+0200
@@ -0,0 +1,8 @@
+[build-system]
+requires = [
+  "setuptools",
+  "setuptools-scm[toml]>=6.2.3",
+]
+build-backend = "setuptools.build_meta"
+
+[tool.setuptools_scm]
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/pytest-subtests-0.10.0/pytest.ini 
new/pytest-subtests-0.11.0/pytest.ini
--- old/pytest-subtests-0.10.0/pytest.ini       1970-01-01 01:00:00.000000000 
+0100
+++ new/pytest-subtests-0.11.0/pytest.ini       2023-05-15 14:16:44.000000000 
+0200
@@ -0,0 +1,3 @@
+[pytest]
+addopts = -ra
+testpaths = tests
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' 
old/pytest-subtests-0.10.0/pytest_subtests.egg-info/PKG-INFO 
new/pytest-subtests-0.11.0/pytest_subtests.egg-info/PKG-INFO
--- old/pytest-subtests-0.10.0/pytest_subtests.egg-info/PKG-INFO        
2023-02-16 02:47:09.000000000 +0100
+++ new/pytest-subtests-0.11.0/pytest_subtests.egg-info/PKG-INFO        
1970-01-01 01:00:00.000000000 +0100
@@ -1,206 +0,0 @@
-Metadata-Version: 2.1
-Name: pytest-subtests
-Version: 0.10.0
-Summary: unittest subTest() support and subtests fixture
-Home-page: https://github.com/pytest-dev/pytest-subtests
-Author: Bruno Oliveira
-Author-email: nicodde...@gmail.com
-Maintainer: Bruno Oliveira
-Maintainer-email: nicodde...@gmail.com
-License: MIT
-Classifier: Development Status :: 4 - Beta
-Classifier: Framework :: Pytest
-Classifier: Intended Audience :: Developers
-Classifier: Topic :: Software Development :: Testing
-Classifier: Programming Language :: Python
-Classifier: Programming Language :: Python :: 3
-Classifier: Programming Language :: Python :: 3.7
-Classifier: Programming Language :: Python :: 3.8
-Classifier: Programming Language :: Python :: 3.9
-Classifier: Programming Language :: Python :: 3.10
-Classifier: Programming Language :: Python :: 3.11
-Classifier: Programming Language :: Python :: Implementation :: CPython
-Classifier: Operating System :: OS Independent
-Classifier: License :: OSI Approved :: MIT License
-Requires-Python: >=3.7
-Description-Content-Type: text/x-rst
-License-File: LICENSE
-
-===============
-pytest-subtests
-===============
-
-unittest ``subTest()`` support and ``subtests`` fixture.
-
-.. image:: https://img.shields.io/pypi/v/pytest-subtests.svg
-    :target: https://pypi.org/project/pytest-subtests
-    :alt: PyPI version
-
-.. image:: https://img.shields.io/conda/vn/conda-forge/pytest-subtests.svg
-    :target: https://anaconda.org/conda-forge/pytest-subtests
-
-.. image:: https://img.shields.io/pypi/pyversions/pytest-subtests.svg
-    :target: https://pypi.org/project/pytest-subtests
-    :alt: Python versions
-
-.. image:: 
https://github.com/pytest-dev/pytest-subtests/workflows/test/badge.svg
-  :target: https://github.com/pytest-dev/pytest-subtests/actions
-
-.. image:: https://img.shields.io/badge/code%20style-black-000000.svg
-    :target: https://github.com/ambv/black
-
-----
-
-This `pytest`_ plugin was generated with `Cookiecutter`_ along with 
`@hackebrot`_'s `cookiecutter-pytest-plugin`_ template.
-
-
-Features
---------
-
-* Adds support for `TestCase.subTest 
<https://docs.python.org/3/library/unittest.html#distinguishing-test-iterations-using-subtests>`__.
-
-* New ``subtests`` fixture, providing similar functionality for pure pytest 
tests.
-
-
-Installation
-------------
-
-You can install ``pytest-subtests`` via `pip`_ from `PyPI`_::
-
-    $ pip install pytest-subtests
-
-
-
-Usage
------
-
-unittest subTest() example
-^^^^^^^^^^^^^^^^^^^^^^^^^^
-
-.. code-block:: python
-
-    import unittest
-
-
-    class T(unittest.TestCase):
-        def test_foo(self):
-            for i in range(5):
-                with self.subTest("custom message", i=i):
-                    self.assertEqual(i % 2, 0)
-
-
-    if __name__ == "__main__":
-        unittest.main()
-
-
-**Output**
-
-.. code-block::
-
-    λ pytest .tmp\test-unit-subtest.py
-    ======================== test session starts ========================
-    ...
-    collected 1 item
-
-    .tmp\test-unit-subtest.py FF.                                  [100%]
-
-    ============================= FAILURES ==============================
-    _________________ T.test_foo [custom message] (i=1) _________________
-
-    self = <test-unit-subtest.T testMethod=test_foo>
-
-        def test_foo(self):
-            for i in range(5):
-                with self.subTest('custom message', i=i):
-    >               self.assertEqual(i % 2, 0)
-    E               AssertionError: 1 != 0
-
-    .tmp\test-unit-subtest.py:9: AssertionError
-    _________________ T.test_foo [custom message] (i=3) _________________
-
-    self = <test-unit-subtest.T testMethod=test_foo>
-
-        def test_foo(self):
-            for i in range(5):
-                with self.subTest('custom message', i=i):
-    >               self.assertEqual(i % 2, 0)
-    E               AssertionError: 1 != 0
-
-    .tmp\test-unit-subtest.py:9: AssertionError
-    ================ 2 failed, 1 passed in 0.07 seconds =================
-
-
-``subtests`` fixture example
-^^^^^^^^^^^^^^^^^^^^^^^^^^^^
-
-.. code-block:: python
-
-    def test(subtests):
-        for i in range(5):
-            with subtests.test(msg="custom message", i=i):
-                assert i % 2 == 0
-
-
-**Output**
-
-.. code-block::
-
-    λ pytest .tmp\test-subtest.py
-    ======================== test session starts ========================
-    ...
-    collected 1 item
-
-    .tmp\test-subtest.py .F.F..                                    [100%]
-
-    ============================= FAILURES ==============================
-    ____________________ test [custom message] (i=1) ____________________
-
-        def test(subtests):
-            for i in range(5):
-                with subtests.test(msg='custom message', i=i):
-    >               assert i % 2 == 0
-    E               assert (1 % 2) == 0
-
-    .tmp\test-subtest.py:4: AssertionError
-    ____________________ test [custom message] (i=3) ____________________
-
-        def test(subtests):
-            for i in range(5):
-                with subtests.test(msg='custom message', i=i):
-    >               assert i % 2 == 0
-    E               assert (3 % 2) == 0
-
-    .tmp\test-subtest.py:4: AssertionError
-    ================ 2 failed, 1 passed in 0.07 seconds =================
-
-Contributing
-------------
-Contributions are very welcome. Tests can be run with `tox`_:
-
-.. code-block::
-
-    tox -e py37
-
-License
--------
-
-Distributed under the terms of the `MIT`_ license, "pytest-subtests" is free 
and open source software
-
-
-Issues
-------
-
-If you encounter any problems, please `file an issue`_ along with a detailed 
description.
-
-.. _`Cookiecutter`: https://github.com/audreyr/cookiecutter
-.. _`@hackebrot`: https://github.com/hackebrot
-.. _`MIT`: http://opensource.org/licenses/MIT
-.. _`BSD-3`: http://opensource.org/licenses/BSD-3-Clause
-.. _`GNU GPL v3.0`: http://www.gnu.org/licenses/gpl-3.0.txt
-.. _`Apache Software License 2.0`: http://www.apache.org/licenses/LICENSE-2.0
-.. _`cookiecutter-pytest-plugin`: 
https://github.com/pytest-dev/cookiecutter-pytest-plugin
-.. _`file an issue`: https://github.com/pytest-dev/pytest-subtests/issues
-.. _`pytest`: https://github.com/pytest-dev/pytest
-.. _`tox`: https://tox.readthedocs.io/en/latest/
-.. _`pip`: https://pypi.org/project/pip/
-.. _`PyPI`: https://pypi.org/project
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' 
old/pytest-subtests-0.10.0/pytest_subtests.egg-info/SOURCES.txt 
new/pytest-subtests-0.11.0/pytest_subtests.egg-info/SOURCES.txt
--- old/pytest-subtests-0.10.0/pytest_subtests.egg-info/SOURCES.txt     
2023-02-16 02:47:09.000000000 +0100
+++ new/pytest-subtests-0.11.0/pytest_subtests.egg-info/SOURCES.txt     
1970-01-01 01:00:00.000000000 +0100
@@ -1,19 +0,0 @@
-.gitignore
-.pre-commit-config.yaml
-CHANGELOG.rst
-LICENSE
-README.rst
-RELEASING.rst
-pytest_subtests.py
-setup.py
-tox.ini
-.github/workflows/deploy.yml
-.github/workflows/test.yml
-pytest_subtests.egg-info/PKG-INFO
-pytest_subtests.egg-info/SOURCES.txt
-pytest_subtests.egg-info/dependency_links.txt
-pytest_subtests.egg-info/entry_points.txt
-pytest_subtests.egg-info/requires.txt
-pytest_subtests.egg-info/top_level.txt
-tests/conftest.py
-tests/test_subtests.py
\ No newline at end of file
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' 
old/pytest-subtests-0.10.0/pytest_subtests.egg-info/dependency_links.txt 
new/pytest-subtests-0.11.0/pytest_subtests.egg-info/dependency_links.txt
--- old/pytest-subtests-0.10.0/pytest_subtests.egg-info/dependency_links.txt    
2023-02-16 02:47:09.000000000 +0100
+++ new/pytest-subtests-0.11.0/pytest_subtests.egg-info/dependency_links.txt    
1970-01-01 01:00:00.000000000 +0100
@@ -1 +0,0 @@
-
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' 
old/pytest-subtests-0.10.0/pytest_subtests.egg-info/entry_points.txt 
new/pytest-subtests-0.11.0/pytest_subtests.egg-info/entry_points.txt
--- old/pytest-subtests-0.10.0/pytest_subtests.egg-info/entry_points.txt        
2023-02-16 02:47:09.000000000 +0100
+++ new/pytest-subtests-0.11.0/pytest_subtests.egg-info/entry_points.txt        
1970-01-01 01:00:00.000000000 +0100
@@ -1,2 +0,0 @@
-[pytest11]
-subtests = pytest_subtests
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' 
old/pytest-subtests-0.10.0/pytest_subtests.egg-info/requires.txt 
new/pytest-subtests-0.11.0/pytest_subtests.egg-info/requires.txt
--- old/pytest-subtests-0.10.0/pytest_subtests.egg-info/requires.txt    
2023-02-16 02:47:09.000000000 +0100
+++ new/pytest-subtests-0.11.0/pytest_subtests.egg-info/requires.txt    
1970-01-01 01:00:00.000000000 +0100
@@ -1,2 +0,0 @@
-pytest>=7.0
-attrs>=19.2.0
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' 
old/pytest-subtests-0.10.0/pytest_subtests.egg-info/top_level.txt 
new/pytest-subtests-0.11.0/pytest_subtests.egg-info/top_level.txt
--- old/pytest-subtests-0.10.0/pytest_subtests.egg-info/top_level.txt   
2023-02-16 02:47:09.000000000 +0100
+++ new/pytest-subtests-0.11.0/pytest_subtests.egg-info/top_level.txt   
1970-01-01 01:00:00.000000000 +0100
@@ -1 +0,0 @@
-pytest_subtests
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/pytest-subtests-0.10.0/pytest_subtests.py 
new/pytest-subtests-0.11.0/pytest_subtests.py
--- old/pytest-subtests-0.10.0/pytest_subtests.py       2023-02-16 
02:46:47.000000000 +0100
+++ new/pytest-subtests-0.11.0/pytest_subtests.py       1970-01-01 
01:00:00.000000000 +0100
@@ -1,257 +0,0 @@
-import time
-from contextlib import contextmanager
-from contextlib import nullcontext
-
-import attr
-import pytest
-from _pytest._code import ExceptionInfo
-from _pytest.capture import CaptureFixture
-from _pytest.capture import FDCapture
-from _pytest.capture import SysCapture
-from _pytest.outcomes import OutcomeException
-from _pytest.reports import TestReport
-from _pytest.runner import CallInfo
-from _pytest.runner import check_interactive_exception
-from _pytest.unittest import TestCaseFunction
-
-
-def pytest_addoption(parser):
-    group = parser.getgroup("subtests")
-    group.addoption(
-        "--no-subtests-shortletter",
-        action="store_true",
-        dest="no_subtests_shortletter",
-        default=False,
-        help="Disables subtest output 'dots' in non-verbose mode 
(EXPERIMENTAL)",
-    )
-
-
-@attr.s
-class SubTestContext:
-    msg = attr.ib()
-    kwargs = attr.ib()
-
-
-@attr.s(init=False)
-class SubTestReport(TestReport):
-    context = attr.ib()
-
-    @property
-    def head_line(self):
-        _, _, domain = self.location
-        return f"{domain} {self.sub_test_description()}"
-
-    def sub_test_description(self):
-        parts = []
-        if isinstance(self.context.msg, str):
-            parts.append(f"[{self.context.msg}]")
-        if self.context.kwargs:
-            params_desc = ", ".join(
-                f"{k}={v!r}" for (k, v) in sorted(self.context.kwargs.items())
-            )
-            parts.append(f"({params_desc})")
-        return " ".join(parts) or "(<subtest>)"
-
-    def _to_json(self):
-        data = super()._to_json()
-        del data["context"]
-        data["_report_type"] = "SubTestReport"
-        data["_subtest.context"] = attr.asdict(self.context)
-        return data
-
-    @classmethod
-    def _from_json(cls, reportdict):
-        report = super()._from_json(reportdict)
-        context_data = reportdict["_subtest.context"]
-        report.context = SubTestContext(
-            msg=context_data["msg"], kwargs=context_data["kwargs"]
-        )
-        return report
-
-    @classmethod
-    def _from_test_report(cls, test_report):
-        return super()._from_json(test_report._to_json())
-
-
-def _addSubTest(self, test_case, test, exc_info):
-    if exc_info is not None:
-        msg = test._message if isinstance(test._message, str) else None
-        call_info = make_call_info(
-            ExceptionInfo(exc_info, _ispytest=True),
-            start=0,
-            stop=0,
-            duration=0,
-            when="call",
-        )
-        report = self.ihook.pytest_runtest_makereport(item=self, 
call=call_info)
-        sub_report = SubTestReport._from_test_report(report)
-        sub_report.context = SubTestContext(msg, dict(test.params))
-        self.ihook.pytest_runtest_logreport(report=sub_report)
-        if check_interactive_exception(call_info, sub_report):
-            self.ihook.pytest_exception_interact(
-                node=self, call=call_info, report=sub_report
-            )
-
-
-def pytest_configure(config):
-    TestCaseFunction.addSubTest = _addSubTest
-    TestCaseFunction.failfast = False
-
-
-def pytest_unconfigure():
-    if hasattr(TestCaseFunction, "addSubTest"):
-        del TestCaseFunction.addSubTest
-    if hasattr(TestCaseFunction, "failfast"):
-        del TestCaseFunction.failfast
-
-
-@pytest.fixture
-def subtests(request):
-    capmam = request.node.config.pluginmanager.get_plugin("capturemanager")
-    if capmam is not None:
-        suspend_capture_ctx = capmam.global_and_fixture_disabled
-    else:
-        suspend_capture_ctx = nullcontext
-    yield SubTests(request.node.ihook, suspend_capture_ctx, request)
-
-
-@attr.s
-class SubTests:
-    ihook = attr.ib()
-    suspend_capture_ctx = attr.ib()
-    request = attr.ib()
-
-    @property
-    def item(self):
-        return self.request.node
-
-    @contextmanager
-    def _capturing_output(self):
-        option = self.request.config.getoption("capture", None)
-
-        # capsys or capfd are active, subtest should not capture
-
-        capman = self.request.config.pluginmanager.getplugin("capturemanager")
-        capture_fixture_active = getattr(capman, "_capture_fixture", None)
-
-        if option == "sys" and not capture_fixture_active:
-            with ignore_pytest_private_warning():
-                fixture = CaptureFixture(SysCapture, self.request)
-        elif option == "fd" and not capture_fixture_active:
-            with ignore_pytest_private_warning():
-                fixture = CaptureFixture(FDCapture, self.request)
-        else:
-            fixture = None
-
-        if fixture is not None:
-            fixture._start()
-
-        captured = Captured()
-        try:
-            yield captured
-        finally:
-            if fixture is not None:
-                out, err = fixture.readouterr()
-                fixture.close()
-                captured.out = out
-                captured.err = err
-
-    @contextmanager
-    def test(self, msg=None, **kwargs):
-        start = time.time()
-        precise_start = time.perf_counter()
-        exc_info = None
-
-        with self._capturing_output() as captured:
-            try:
-                yield
-            except (Exception, OutcomeException):
-                exc_info = ExceptionInfo.from_current()
-
-        precise_stop = time.perf_counter()
-        duration = precise_stop - precise_start
-        stop = time.time()
-
-        call_info = make_call_info(
-            exc_info, start=start, stop=stop, duration=duration, when="call"
-        )
-        report = self.ihook.pytest_runtest_makereport(item=self.item, 
call=call_info)
-        sub_report = SubTestReport._from_test_report(report)
-        sub_report.context = SubTestContext(msg, kwargs.copy())
-
-        captured.update_report(sub_report)
-
-        with self.suspend_capture_ctx():
-            self.ihook.pytest_runtest_logreport(report=sub_report)
-
-        if check_interactive_exception(call_info, sub_report):
-            self.ihook.pytest_exception_interact(
-                node=self.item, call=call_info, report=sub_report
-            )
-
-
-def make_call_info(exc_info, *, start, stop, duration, when):
-    return CallInfo(
-        None,
-        exc_info,
-        start=start,
-        stop=stop,
-        duration=duration,
-        when=when,
-        _ispytest=True,
-    )
-
-
-@contextmanager
-def ignore_pytest_private_warning():
-    import warnings
-
-    with warnings.catch_warnings():
-        warnings.filterwarnings(
-            "ignore",
-            "A private pytest class or function was used.",
-            category=pytest.PytestDeprecationWarning,
-        )
-        yield
-
-
-@attr.s
-class Captured:
-    out = attr.ib(default="", type=str)
-    err = attr.ib(default="", type=str)
-
-    def update_report(self, report):
-        if self.out:
-            report.sections.append(("Captured stdout call", self.out))
-        if self.err:
-            report.sections.append(("Captured stderr call", self.err))
-
-
-def pytest_report_to_serializable(report):
-    if isinstance(report, SubTestReport):
-        return report._to_json()
-
-
-def pytest_report_from_serializable(data):
-    if data.get("_report_type") == "SubTestReport":
-        return SubTestReport._from_json(data)
-
-
-@pytest.hookimpl(tryfirst=True)
-def pytest_report_teststatus(report, config):
-    if report.when != "call" or not isinstance(report, SubTestReport):
-        return
-
-    if hasattr(report, "wasxfail"):
-        return None
-
-    outcome = report.outcome
-    if report.passed:
-        short = "" if config.option.no_subtests_shortletter else ","
-        return f"subtests {outcome}", short, "SUBPASS"
-    elif report.skipped:
-        short = "" if config.option.no_subtests_shortletter else "-"
-        return outcome, short, "SUBSKIP"
-    elif outcome == "failed":
-        short = "" if config.option.no_subtests_shortletter else "u"
-        return outcome, short, "SUBFAIL"
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/pytest-subtests-0.10.0/setup.cfg 
new/pytest-subtests-0.11.0/setup.cfg
--- old/pytest-subtests-0.10.0/setup.cfg        2023-02-16 02:47:09.727210300 
+0100
+++ new/pytest-subtests-0.11.0/setup.cfg        2023-05-15 14:17:07.564752000 
+0200
@@ -1,3 +1,45 @@
+[metadata]
+name = pytest-subtests
+description = unittest subTest() support and subtests fixture
+long_description = file: README.rst
+long_description_content_type = text/x-rst
+url = https://github.com/pytest-dev/pytest-subtests
+author = Bruno Oliveira
+license = MIT
+license_file = LICENSE
+classifiers = 
+       Development Status :: 4 - Beta
+       Framework :: Pytest
+       Intended Audience :: Developers
+       Topic :: Software Development :: Testing
+       Programming Language :: Python
+       Programming Language :: Python :: 3
+       Programming Language :: Python :: 3.7
+       Programming Language :: Python :: 3.8
+       Programming Language :: Python :: 3.9
+       Programming Language :: Python :: 3.10
+       Programming Language :: Python :: 3.11
+       Programming Language :: Python :: Implementation :: CPython
+       Operating System :: OS Independent
+       License :: OSI Approved :: MIT License
+keywords = test, unittest, pytest
+
+[options]
+py_modules = pytest_subtests
+install_requires = 
+       pytest>=7.0
+       attrs>=19.2.0
+python_requires = >=3.7
+package_dir = 
+       =src
+setup_requires = 
+       setuptools
+       setuptools-scm>=6.0
+
+[options.entry_points]
+pytest11 = 
+       subtests = pytest_subtests
+
 [egg_info]
 tag_build = 
 tag_date = 0
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/pytest-subtests-0.10.0/setup.py 
new/pytest-subtests-0.11.0/setup.py
--- old/pytest-subtests-0.10.0/setup.py 2023-02-16 02:46:47.000000000 +0100
+++ new/pytest-subtests-0.11.0/setup.py 1970-01-01 01:00:00.000000000 +0100
@@ -1,43 +0,0 @@
-from pathlib import Path
-
-from setuptools import setup
-
-
-long_description = (
-    Path(__file__).parent.joinpath("README.rst").read_text(encoding="UTF-8")
-)
-
-setup(
-    name="pytest-subtests",
-    author="Bruno Oliveira",
-    author_email="nicodde...@gmail.com",
-    maintainer="Bruno Oliveira",
-    maintainer_email="nicodde...@gmail.com",
-    license="MIT",
-    url="https://github.com/pytest-dev/pytest-subtests";,
-    description="unittest subTest() support and subtests fixture",
-    long_description=long_description,
-    long_description_content_type="text/x-rst",
-    py_modules=["pytest_subtests"],
-    use_scm_version=True,
-    setup_requires=["setuptools-scm", "setuptools>=40.0"],
-    python_requires=">=3.7",
-    install_requires=["pytest>=7.0", "attrs>=19.2.0"],
-    classifiers=[
-        "Development Status :: 4 - Beta",
-        "Framework :: Pytest",
-        "Intended Audience :: Developers",
-        "Topic :: Software Development :: Testing",
-        "Programming Language :: Python",
-        "Programming Language :: Python :: 3",
-        "Programming Language :: Python :: 3.7",
-        "Programming Language :: Python :: 3.8",
-        "Programming Language :: Python :: 3.9",
-        "Programming Language :: Python :: 3.10",
-        "Programming Language :: Python :: 3.11",
-        "Programming Language :: Python :: Implementation :: CPython",
-        "Operating System :: OS Independent",
-        "License :: OSI Approved :: MIT License",
-    ],
-    entry_points={"pytest11": ["subtests = pytest_subtests"]},
-)
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' 
old/pytest-subtests-0.10.0/src/pytest_subtests.egg-info/PKG-INFO 
new/pytest-subtests-0.11.0/src/pytest_subtests.egg-info/PKG-INFO
--- old/pytest-subtests-0.10.0/src/pytest_subtests.egg-info/PKG-INFO    
1970-01-01 01:00:00.000000000 +0100
+++ new/pytest-subtests-0.11.0/src/pytest_subtests.egg-info/PKG-INFO    
2023-05-15 14:17:07.000000000 +0200
@@ -0,0 +1,204 @@
+Metadata-Version: 2.1
+Name: pytest-subtests
+Version: 0.11.0
+Summary: unittest subTest() support and subtests fixture
+Home-page: https://github.com/pytest-dev/pytest-subtests
+Author: Bruno Oliveira
+License: MIT
+Keywords: test,unittest,pytest
+Classifier: Development Status :: 4 - Beta
+Classifier: Framework :: Pytest
+Classifier: Intended Audience :: Developers
+Classifier: Topic :: Software Development :: Testing
+Classifier: Programming Language :: Python
+Classifier: Programming Language :: Python :: 3
+Classifier: Programming Language :: Python :: 3.7
+Classifier: Programming Language :: Python :: 3.8
+Classifier: Programming Language :: Python :: 3.9
+Classifier: Programming Language :: Python :: 3.10
+Classifier: Programming Language :: Python :: 3.11
+Classifier: Programming Language :: Python :: Implementation :: CPython
+Classifier: Operating System :: OS Independent
+Classifier: License :: OSI Approved :: MIT License
+Requires-Python: >=3.7
+Description-Content-Type: text/x-rst
+License-File: LICENSE
+
+===============
+pytest-subtests
+===============
+
+unittest ``subTest()`` support and ``subtests`` fixture.
+
+.. image:: https://img.shields.io/pypi/v/pytest-subtests.svg
+    :target: https://pypi.org/project/pytest-subtests
+    :alt: PyPI version
+
+.. image:: https://img.shields.io/conda/vn/conda-forge/pytest-subtests.svg
+    :target: https://anaconda.org/conda-forge/pytest-subtests
+
+.. image:: https://img.shields.io/pypi/pyversions/pytest-subtests.svg
+    :target: https://pypi.org/project/pytest-subtests
+    :alt: Python versions
+
+.. image:: 
https://github.com/pytest-dev/pytest-subtests/workflows/test/badge.svg
+  :target: https://github.com/pytest-dev/pytest-subtests/actions
+
+.. image:: https://img.shields.io/badge/code%20style-black-000000.svg
+    :target: https://github.com/ambv/black
+
+----
+
+This `pytest`_ plugin was generated with `Cookiecutter`_ along with 
`@hackebrot`_'s `cookiecutter-pytest-plugin`_ template.
+
+
+Features
+--------
+
+* Adds support for `TestCase.subTest 
<https://docs.python.org/3/library/unittest.html#distinguishing-test-iterations-using-subtests>`__.
+
+* New ``subtests`` fixture, providing similar functionality for pure pytest 
tests.
+
+
+Installation
+------------
+
+You can install ``pytest-subtests`` via `pip`_ from `PyPI`_::
+
+    $ pip install pytest-subtests
+
+
+
+Usage
+-----
+
+unittest subTest() example
+^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+.. code-block:: python
+
+    import unittest
+
+
+    class T(unittest.TestCase):
+        def test_foo(self):
+            for i in range(5):
+                with self.subTest("custom message", i=i):
+                    self.assertEqual(i % 2, 0)
+
+
+    if __name__ == "__main__":
+        unittest.main()
+
+
+**Output**
+
+.. code-block::
+
+    λ pytest .tmp\test-unit-subtest.py
+    ======================== test session starts ========================
+    ...
+    collected 1 item
+
+    .tmp\test-unit-subtest.py FF.                                  [100%]
+
+    ============================= FAILURES ==============================
+    _________________ T.test_foo [custom message] (i=1) _________________
+
+    self = <test-unit-subtest.T testMethod=test_foo>
+
+        def test_foo(self):
+            for i in range(5):
+                with self.subTest('custom message', i=i):
+    >               self.assertEqual(i % 2, 0)
+    E               AssertionError: 1 != 0
+
+    .tmp\test-unit-subtest.py:9: AssertionError
+    _________________ T.test_foo [custom message] (i=3) _________________
+
+    self = <test-unit-subtest.T testMethod=test_foo>
+
+        def test_foo(self):
+            for i in range(5):
+                with self.subTest('custom message', i=i):
+    >               self.assertEqual(i % 2, 0)
+    E               AssertionError: 1 != 0
+
+    .tmp\test-unit-subtest.py:9: AssertionError
+    ================ 2 failed, 1 passed in 0.07 seconds =================
+
+
+``subtests`` fixture example
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+.. code-block:: python
+
+    def test(subtests):
+        for i in range(5):
+            with subtests.test(msg="custom message", i=i):
+                assert i % 2 == 0
+
+
+**Output**
+
+.. code-block::
+
+    λ pytest .tmp\test-subtest.py
+    ======================== test session starts ========================
+    ...
+    collected 1 item
+
+    .tmp\test-subtest.py .F.F..                                    [100%]
+
+    ============================= FAILURES ==============================
+    ____________________ test [custom message] (i=1) ____________________
+
+        def test(subtests):
+            for i in range(5):
+                with subtests.test(msg='custom message', i=i):
+    >               assert i % 2 == 0
+    E               assert (1 % 2) == 0
+
+    .tmp\test-subtest.py:4: AssertionError
+    ____________________ test [custom message] (i=3) ____________________
+
+        def test(subtests):
+            for i in range(5):
+                with subtests.test(msg='custom message', i=i):
+    >               assert i % 2 == 0
+    E               assert (3 % 2) == 0
+
+    .tmp\test-subtest.py:4: AssertionError
+    ================ 2 failed, 1 passed in 0.07 seconds =================
+
+Contributing
+------------
+Contributions are very welcome. Tests can be run with `tox`_:
+
+.. code-block::
+
+    tox -e py37
+
+License
+-------
+
+Distributed under the terms of the `MIT`_ license, "pytest-subtests" is free 
and open source software
+
+
+Issues
+------
+
+If you encounter any problems, please `file an issue`_ along with a detailed 
description.
+
+.. _`Cookiecutter`: https://github.com/audreyr/cookiecutter
+.. _`@hackebrot`: https://github.com/hackebrot
+.. _`MIT`: http://opensource.org/licenses/MIT
+.. _`BSD-3`: http://opensource.org/licenses/BSD-3-Clause
+.. _`GNU GPL v3.0`: http://www.gnu.org/licenses/gpl-3.0.txt
+.. _`Apache Software License 2.0`: http://www.apache.org/licenses/LICENSE-2.0
+.. _`cookiecutter-pytest-plugin`: 
https://github.com/pytest-dev/cookiecutter-pytest-plugin
+.. _`file an issue`: https://github.com/pytest-dev/pytest-subtests/issues
+.. _`pytest`: https://github.com/pytest-dev/pytest
+.. _`tox`: https://tox.readthedocs.io/en/latest/
+.. _`pip`: https://pypi.org/project/pip/
+.. _`PyPI`: https://pypi.org/project
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' 
old/pytest-subtests-0.10.0/src/pytest_subtests.egg-info/SOURCES.txt 
new/pytest-subtests-0.11.0/src/pytest_subtests.egg-info/SOURCES.txt
--- old/pytest-subtests-0.10.0/src/pytest_subtests.egg-info/SOURCES.txt 
1970-01-01 01:00:00.000000000 +0100
+++ new/pytest-subtests-0.11.0/src/pytest_subtests.egg-info/SOURCES.txt 
2023-05-15 14:17:07.000000000 +0200
@@ -0,0 +1,21 @@
+.gitignore
+.pre-commit-config.yaml
+CHANGELOG.rst
+LICENSE
+README.rst
+RELEASING.rst
+pyproject.toml
+pytest.ini
+setup.cfg
+tox.ini
+.github/workflows/deploy.yml
+.github/workflows/test.yml
+src/pytest_subtests.py
+src/pytest_subtests.egg-info/PKG-INFO
+src/pytest_subtests.egg-info/SOURCES.txt
+src/pytest_subtests.egg-info/dependency_links.txt
+src/pytest_subtests.egg-info/entry_points.txt
+src/pytest_subtests.egg-info/requires.txt
+src/pytest_subtests.egg-info/top_level.txt
+tests/conftest.py
+tests/test_subtests.py
\ No newline at end of file
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' 
old/pytest-subtests-0.10.0/src/pytest_subtests.egg-info/dependency_links.txt 
new/pytest-subtests-0.11.0/src/pytest_subtests.egg-info/dependency_links.txt
--- 
old/pytest-subtests-0.10.0/src/pytest_subtests.egg-info/dependency_links.txt    
    1970-01-01 01:00:00.000000000 +0100
+++ 
new/pytest-subtests-0.11.0/src/pytest_subtests.egg-info/dependency_links.txt    
    2023-05-15 14:17:07.000000000 +0200
@@ -0,0 +1 @@
+
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' 
old/pytest-subtests-0.10.0/src/pytest_subtests.egg-info/entry_points.txt 
new/pytest-subtests-0.11.0/src/pytest_subtests.egg-info/entry_points.txt
--- old/pytest-subtests-0.10.0/src/pytest_subtests.egg-info/entry_points.txt    
1970-01-01 01:00:00.000000000 +0100
+++ new/pytest-subtests-0.11.0/src/pytest_subtests.egg-info/entry_points.txt    
2023-05-15 14:17:07.000000000 +0200
@@ -0,0 +1,2 @@
+[pytest11]
+subtests = pytest_subtests
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' 
old/pytest-subtests-0.10.0/src/pytest_subtests.egg-info/requires.txt 
new/pytest-subtests-0.11.0/src/pytest_subtests.egg-info/requires.txt
--- old/pytest-subtests-0.10.0/src/pytest_subtests.egg-info/requires.txt        
1970-01-01 01:00:00.000000000 +0100
+++ new/pytest-subtests-0.11.0/src/pytest_subtests.egg-info/requires.txt        
2023-05-15 14:17:07.000000000 +0200
@@ -0,0 +1,2 @@
+pytest>=7.0
+attrs>=19.2.0
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' 
old/pytest-subtests-0.10.0/src/pytest_subtests.egg-info/top_level.txt 
new/pytest-subtests-0.11.0/src/pytest_subtests.egg-info/top_level.txt
--- old/pytest-subtests-0.10.0/src/pytest_subtests.egg-info/top_level.txt       
1970-01-01 01:00:00.000000000 +0100
+++ new/pytest-subtests-0.11.0/src/pytest_subtests.egg-info/top_level.txt       
2023-05-15 14:17:07.000000000 +0200
@@ -0,0 +1 @@
+pytest_subtests
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/pytest-subtests-0.10.0/src/pytest_subtests.py 
new/pytest-subtests-0.11.0/src/pytest_subtests.py
--- old/pytest-subtests-0.10.0/src/pytest_subtests.py   1970-01-01 
01:00:00.000000000 +0100
+++ new/pytest-subtests-0.11.0/src/pytest_subtests.py   2023-05-15 
14:16:44.000000000 +0200
@@ -0,0 +1,307 @@
+import time
+from contextlib import contextmanager
+from contextlib import nullcontext
+
+import attr
+import pytest
+from _pytest._code import ExceptionInfo
+from _pytest.capture import CaptureFixture
+from _pytest.capture import FDCapture
+from _pytest.capture import SysCapture
+from _pytest.logging import LogCaptureHandler, catching_logs
+from _pytest.outcomes import OutcomeException
+from _pytest.reports import TestReport
+from _pytest.runner import CallInfo
+from _pytest.runner import check_interactive_exception
+from _pytest.unittest import TestCaseFunction
+
+
+def pytest_addoption(parser):
+    group = parser.getgroup("subtests")
+    group.addoption(
+        "--no-subtests-shortletter",
+        action="store_true",
+        dest="no_subtests_shortletter",
+        default=False,
+        help="Disables subtest output 'dots' in non-verbose mode 
(EXPERIMENTAL)",
+    )
+
+
+@attr.s
+class SubTestContext:
+    msg = attr.ib()
+    kwargs = attr.ib()
+
+
+@attr.s(init=False)
+class SubTestReport(TestReport):
+    context = attr.ib()
+
+    @property
+    def head_line(self):
+        _, _, domain = self.location
+        return f"{domain} {self.sub_test_description()}"
+
+    def sub_test_description(self):
+        parts = []
+        if isinstance(self.context.msg, str):
+            parts.append(f"[{self.context.msg}]")
+        if self.context.kwargs:
+            params_desc = ", ".join(
+                f"{k}={v!r}" for (k, v) in sorted(self.context.kwargs.items())
+            )
+            parts.append(f"({params_desc})")
+        return " ".join(parts) or "(<subtest>)"
+
+    def _to_json(self):
+        data = super()._to_json()
+        del data["context"]
+        data["_report_type"] = "SubTestReport"
+        data["_subtest.context"] = attr.asdict(self.context)
+        return data
+
+    @classmethod
+    def _from_json(cls, reportdict):
+        report = super()._from_json(reportdict)
+        context_data = reportdict["_subtest.context"]
+        report.context = SubTestContext(
+            msg=context_data["msg"], kwargs=context_data["kwargs"]
+        )
+        return report
+
+    @classmethod
+    def _from_test_report(cls, test_report):
+        return super()._from_json(test_report._to_json())
+
+
+def _addSubTest(self, test_case, test, exc_info):
+    if exc_info is not None:
+        msg = test._message if isinstance(test._message, str) else None
+        call_info = make_call_info(
+            ExceptionInfo(exc_info, _ispytest=True),
+            start=0,
+            stop=0,
+            duration=0,
+            when="call",
+        )
+        report = self.ihook.pytest_runtest_makereport(item=self, 
call=call_info)
+        sub_report = SubTestReport._from_test_report(report)
+        sub_report.context = SubTestContext(msg, dict(test.params))
+        self.ihook.pytest_runtest_logreport(report=sub_report)
+        if check_interactive_exception(call_info, sub_report):
+            self.ihook.pytest_exception_interact(
+                node=self, call=call_info, report=sub_report
+            )
+
+
+def pytest_configure(config):
+    TestCaseFunction.addSubTest = _addSubTest
+    TestCaseFunction.failfast = False
+
+    # Hack (#86): the terminal does not know about the "subtests"
+    # status, so it will by default turn the output to yellow.
+    # This forcibly adds the new 'subtests' status.
+    import _pytest.terminal
+
+    new_types = tuple(
+        f"subtests {outcome}" for outcome in ("passed", "failed", "skipped")
+    )
+    # We need to check if we are not re-adding because we run our own tests
+    # with pytester in-process mode, so this will be called multiple times.
+    if new_types[0] not in _pytest.terminal.KNOWN_TYPES:
+        _pytest.terminal.KNOWN_TYPES = _pytest.terminal.KNOWN_TYPES + new_types
+
+    _pytest.terminal._color_for_type.update(
+        {
+            f"subtests {outcome}": _pytest.terminal._color_for_type[outcome]
+            for outcome in ("passed", "failed", "skipped")
+            if outcome in _pytest.terminal._color_for_type
+        }
+    )
+
+
+def pytest_unconfigure():
+    if hasattr(TestCaseFunction, "addSubTest"):
+        del TestCaseFunction.addSubTest
+    if hasattr(TestCaseFunction, "failfast"):
+        del TestCaseFunction.failfast
+
+
+@pytest.fixture
+def subtests(request):
+    capmam = request.node.config.pluginmanager.get_plugin("capturemanager")
+    if capmam is not None:
+        suspend_capture_ctx = capmam.global_and_fixture_disabled
+    else:
+        suspend_capture_ctx = nullcontext
+    yield SubTests(request.node.ihook, suspend_capture_ctx, request)
+
+
+@attr.s
+class SubTests:
+    ihook = attr.ib()
+    suspend_capture_ctx = attr.ib()
+    request = attr.ib()
+
+    @property
+    def item(self):
+        return self.request.node
+
+    @contextmanager
+    def _capturing_output(self):
+        option = self.request.config.getoption("capture", None)
+
+        # capsys or capfd are active, subtest should not capture
+
+        capman = self.request.config.pluginmanager.getplugin("capturemanager")
+        capture_fixture_active = getattr(capman, "_capture_fixture", None)
+
+        if option == "sys" and not capture_fixture_active:
+            with ignore_pytest_private_warning():
+                fixture = CaptureFixture(SysCapture, self.request)
+        elif option == "fd" and not capture_fixture_active:
+            with ignore_pytest_private_warning():
+                fixture = CaptureFixture(FDCapture, self.request)
+        else:
+            fixture = None
+
+        if fixture is not None:
+            fixture._start()
+
+        captured = Captured()
+        try:
+            yield captured
+        finally:
+            if fixture is not None:
+                out, err = fixture.readouterr()
+                fixture.close()
+                captured.out = out
+                captured.err = err
+    
+    @contextmanager
+    def _capturing_logs(self):
+        logging_plugin = 
self.request.config.pluginmanager.getplugin("logging-plugin")
+        if logging_plugin is None:
+            yield NullCapturedLogs()
+        else:
+            handler = LogCaptureHandler()
+            handler.setFormatter(logging_plugin.formatter)
+            
+            captured_logs = CapturedLogs(handler)
+            with catching_logs(handler):
+                yield captured_logs
+
+    @contextmanager
+    def test(self, msg=None, **kwargs):
+        start = time.time()
+        precise_start = time.perf_counter()
+        exc_info = None
+
+        with self._capturing_output() as captured_output, 
self._capturing_logs() as captured_logs:
+            try:
+                yield
+            except (Exception, OutcomeException):
+                exc_info = ExceptionInfo.from_current()
+
+        precise_stop = time.perf_counter()
+        duration = precise_stop - precise_start
+        stop = time.time()
+
+        call_info = make_call_info(
+            exc_info, start=start, stop=stop, duration=duration, when="call"
+        )
+        report = self.ihook.pytest_runtest_makereport(item=self.item, 
call=call_info)
+        sub_report = SubTestReport._from_test_report(report)
+        sub_report.context = SubTestContext(msg, kwargs.copy())
+
+        captured_output.update_report(sub_report)
+        captured_logs.update_report(sub_report)
+    
+        with self.suspend_capture_ctx():
+            self.ihook.pytest_runtest_logreport(report=sub_report)
+
+        if check_interactive_exception(call_info, sub_report):
+            self.ihook.pytest_exception_interact(
+                node=self.item, call=call_info, report=sub_report
+            )
+
+
+def make_call_info(exc_info, *, start, stop, duration, when):
+    return CallInfo(
+        None,
+        exc_info,
+        start=start,
+        stop=stop,
+        duration=duration,
+        when=when,
+        _ispytest=True,
+    )
+
+
+@contextmanager
+def ignore_pytest_private_warning():
+    import warnings
+
+    with warnings.catch_warnings():
+        warnings.filterwarnings(
+            "ignore",
+            "A private pytest class or function was used.",
+            category=pytest.PytestDeprecationWarning,
+        )
+        yield
+
+
+@attr.s
+class Captured:
+    out = attr.ib(default="", type=str)
+    err = attr.ib(default="", type=str)
+
+    def update_report(self, report):
+        if self.out:
+            report.sections.append(("Captured stdout call", self.out))
+        if self.err:
+            report.sections.append(("Captured stderr call", self.err))
+
+
+class CapturedLogs:
+    def __init__(self, handler):
+        self._handler = handler
+    
+    def update_report(self, report):
+        report.sections.append(("Captured log call", 
self._handler.stream.getvalue()))
+
+
+class NullCapturedLogs:   
+    def update_report(self, report):
+        pass
+        
+
+def pytest_report_to_serializable(report):
+    if isinstance(report, SubTestReport):
+        return report._to_json()
+
+
+def pytest_report_from_serializable(data):
+    if data.get("_report_type") == "SubTestReport":
+        return SubTestReport._from_json(data)
+
+
+@pytest.hookimpl(tryfirst=True)
+def pytest_report_teststatus(report, config):
+    if report.when != "call" or not isinstance(report, SubTestReport):
+        return
+
+    if hasattr(report, "wasxfail"):
+        return None
+
+    outcome = report.outcome
+    description = report.sub_test_description()
+    if report.passed:
+        short = "" if config.option.no_subtests_shortletter else ","
+        return f"subtests {outcome}", short, f"{description} SUBPASS"
+    elif report.skipped:
+        short = "" if config.option.no_subtests_shortletter else "-"
+        return outcome, short, f"{description} SUBSKIP"
+    elif outcome == "failed":
+        short = "" if config.option.no_subtests_shortletter else "u"
+        return outcome, short, f"{description} SUBFAIL"
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/pytest-subtests-0.10.0/tests/test_subtests.py 
new/pytest-subtests-0.11.0/tests/test_subtests.py
--- old/pytest-subtests-0.10.0/tests/test_subtests.py   2023-02-16 
02:46:47.000000000 +0100
+++ new/pytest-subtests-0.11.0/tests/test_subtests.py   2023-05-15 
14:16:44.000000000 +0200
@@ -29,7 +29,7 @@
         else:
             pytest.importorskip("xdist")
             result = testdir.runpytest("-n1")
-            expected_lines = ["gw0 [1]"]
+            expected_lines = ["1 worker [1 item]"]
 
         expected_lines += [
             "* test_foo [[]custom[]] (i=1) *",
@@ -43,18 +43,18 @@
             result = testdir.runpytest("-v")
             expected_lines = [
                 "*collected 1 item",
-                "test_simple_terminal_verbose.py::test_foo SUBPASS *100%*",
-                "test_simple_terminal_verbose.py::test_foo SUBFAIL *100%*",
-                "test_simple_terminal_verbose.py::test_foo SUBPASS *100%*",
-                "test_simple_terminal_verbose.py::test_foo SUBFAIL *100%*",
-                "test_simple_terminal_verbose.py::test_foo SUBPASS *100%*",
+                "test_simple_terminal_verbose.py::test_foo [[]custom[]] (i=0) 
SUBPASS *100%*",
+                "test_simple_terminal_verbose.py::test_foo [[]custom[]] (i=1) 
SUBFAIL *100%*",
+                "test_simple_terminal_verbose.py::test_foo [[]custom[]] (i=2) 
SUBPASS *100%*",
+                "test_simple_terminal_verbose.py::test_foo [[]custom[]] (i=3) 
SUBFAIL *100%*",
+                "test_simple_terminal_verbose.py::test_foo [[]custom[]] (i=4) 
SUBPASS *100%*",
                 "test_simple_terminal_verbose.py::test_foo PASSED *100%*",
             ]
         else:
             pytest.importorskip("xdist")
             result = testdir.runpytest("-n1", "-v")
             expected_lines = [
-                "gw0 [1]",
+                "1 worker [1 item]",
                 "*gw0*100%* test_simple_terminal_verbose.py::test_foo*",
                 "*gw0*100%* test_simple_terminal_verbose.py::test_foo*",
                 "*gw0*100%* test_simple_terminal_verbose.py::test_foo*",
@@ -87,7 +87,7 @@
         else:
             pytest.importorskip("xdist")
             result = testdir.runpytest("-n1")
-            expected_lines = ["gw0 [1]"]
+            expected_lines = ["1 worker [1 item]"]
         expected_lines += ["* 1 passed, 3 skipped, 2 subtests passed in *"]
         result.stdout.fnmatch_lines(expected_lines)
 
@@ -108,7 +108,7 @@
         else:
             pytest.importorskip("xdist")
             result = testdir.runpytest("-n1")
-            expected_lines = ["gw0 [1]"]
+            expected_lines = ["1 worker [1 item]"]
         expected_lines += ["* 1 passed, 3 xfailed, 2 subtests passed in *"]
         result.stdout.fnmatch_lines(expected_lines)
 
@@ -159,7 +159,7 @@
             else:
                 pytest.importorskip("xdist")
                 result = testdir.runpytest(simple_script, "-n1")
-                expected_lines = ["gw0 [1]"]
+                expected_lines = ["1 worker [1 item]"]
             result.stdout.fnmatch_lines(
                 expected_lines
                 + [
@@ -193,15 +193,15 @@
                 result = testdir.runpytest(simple_script, "-v")
                 expected_lines = [
                     "*collected 1 item",
-                    "test_simple_terminal_verbose.py::T::test_foo SUBFAIL 
*100%*",
-                    "test_simple_terminal_verbose.py::T::test_foo SUBFAIL 
*100%*",
+                    "test_simple_terminal_verbose.py::T::test_foo [[]custom[]] 
(i=1) SUBFAIL *100%*",
+                    "test_simple_terminal_verbose.py::T::test_foo [[]custom[]] 
(i=3) SUBFAIL *100%*",
                     "test_simple_terminal_verbose.py::T::test_foo PASSED 
*100%*",
                 ]
             else:
                 pytest.importorskip("xdist")
                 result = testdir.runpytest(simple_script, "-n1", "-v")
                 expected_lines = [
-                    "gw0 [1]",
+                    "1 worker [1 item]",
                     "*gw0*100%* SUBFAIL 
test_simple_terminal_verbose.py::T::test_foo*",
                     "*gw0*100%* SUBFAIL 
test_simple_terminal_verbose.py::T::test_foo*",
                     "*gw0*100%* PASSED 
test_simple_terminal_verbose.py::T::test_foo*",
@@ -365,6 +365,91 @@
         )
 
 
+class TestLogging:
+    def create_file(self, testdir):
+        testdir.makepyfile(
+            """
+            import logging
+
+            def test_foo(subtests):
+                logging.info("before")
+
+                with subtests.test("sub1"):
+                    print("sub1 stdout")
+                    logging.info("sub1 logging")
+
+                with subtests.test("sub2"):
+                    print("sub2 stdout")
+                    logging.info("sub2 logging")
+                    assert False
+            """
+        )
+
+    def test_capturing(self, testdir):
+        self.create_file(testdir)
+        result = testdir.runpytest("--log-level=INFO")
+        result.stdout.fnmatch_lines(
+            [
+                "*___ test_foo [[]sub2[]] __*",
+                "*-- Captured stdout call --*",
+                "sub2 stdout",
+                "*-- Captured log call ---*",
+                "INFO     root:test_capturing.py:12 sub2 logging",
+                "*== short test summary info ==*"
+            ]
+        )
+
+    def test_caplog(self, testdir):
+        testdir.makepyfile(
+            """
+            import logging
+
+            def test(subtests, caplog):
+                caplog.set_level(logging.INFO)
+                logging.info("start test")
+
+                with subtests.test("sub1"):
+                    logging.info("inside %s", "subtest1")
+                
+                assert len(caplog.records) == 2
+                assert caplog.records[0].getMessage() == "start test"
+                assert caplog.records[1].getMessage() == "inside subtest1"
+            """
+        )
+        result = testdir.runpytest()
+        result.stdout.fnmatch_lines(
+            [
+                "*1 passed*",
+            ]
+        )
+
+    def test_no_logging(self, testdir):
+        testdir.makepyfile(
+            """
+            import logging
+
+            def test(subtests):
+                logging.info("start log line")
+
+                with subtests.test("sub passing"):
+                    logging.info("inside %s", "passing log line")
+                
+                with subtests.test("sub failing"):
+                    logging.info("inside %s", "failing log line")
+                    assert False
+
+                logging.info("end log line")
+            """
+        )
+        result = testdir.runpytest("-p no:logging")
+        result.stdout.fnmatch_lines(
+            [
+                "*1 passed*",
+            ]
+        )
+        result.stdout.no_fnmatch_line("*root:test_no_logging.py*log line*")
+
+
 class TestDebugging:
     """Check --pdb support for subtests fixture and TestCase.subTest."""
 
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/pytest-subtests-0.10.0/tox.ini 
new/pytest-subtests-0.11.0/tox.ini
--- old/pytest-subtests-0.10.0/tox.ini  2023-02-16 02:46:47.000000000 +0100
+++ new/pytest-subtests-0.11.0/tox.ini  2023-05-15 14:16:44.000000000 +0200
@@ -8,7 +8,7 @@
     TRAVIS
     PYTEST_ADDOPTS
 deps =
-    pytest-xdist>=1.28
+    pytest-xdist>=3.3.0
 
 commands =
     pytest {posargs:tests}

Reply via email to