Script 'mail_helper' called by obssrc
Hello community,
here is the log from the commit of package python-filelock for openSUSE:Factory
checked in at 2024-06-17 19:27:33
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Comparing /work/SRC/openSUSE:Factory/python-filelock (Old)
and /work/SRC/openSUSE:Factory/.python-filelock.new.19518 (New)
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Package is "python-filelock"
Mon Jun 17 19:27:33 2024 rev:20 rq:1181236 version:3.15.1
Changes:
--------
--- /work/SRC/openSUSE:Factory/python-filelock/python-filelock.changes
2024-05-03 19:44:21.780443991 +0200
+++
/work/SRC/openSUSE:Factory/.python-filelock.new.19518/python-filelock.changes
2024-06-17 19:28:18.219640112 +0200
@@ -1,0 +2,11 @@
+Mon Jun 17 06:00:15 UTC 2024 - Dirk Müller <[email protected]>
+
+- update to 3.15.1:
+ * Hotfix: Restore __init__ method; more robust initialization
+ for singleton locks
+- update to 3.15.0:
+ * asyncio support
+ * Don't initialize BaseFileLock when just returning existing
+ instance
+
+-------------------------------------------------------------------
Old:
----
filelock-3.14.0.tar.gz
New:
----
filelock-3.15.1.tar.gz
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Other differences:
------------------
++++++ python-filelock.spec ++++++
--- /var/tmp/diff_new_pack.gdXsYO/_old 2024-06-17 19:28:18.887664558 +0200
+++ /var/tmp/diff_new_pack.gdXsYO/_new 2024-06-17 19:28:18.887664558 +0200
@@ -19,15 +19,17 @@
%{?sle15_python_module_pythons}
Name: python-filelock
-Version: 3.14.0
+Version: 3.15.1
Release: 0
Summary: Platform Independent File Lock in Python
License: Unlicense
URL: https://github.com/tox-dev/py-filelock
Source:
https://files.pythonhosted.org/packages/source/f/filelock/filelock-%{version}.tar.gz
+BuildRequires: %{python_module asyncio}
BuildRequires: %{python_module hatch_vcs}
BuildRequires: %{python_module hatchling}
BuildRequires: %{python_module pip}
+BuildRequires: %{python_module pytest-asyncio}
BuildRequires: %{python_module pytest-mock}
BuildRequires: %{python_module pytest}
BuildRequires: %{python_module wheel}
@@ -36,6 +38,7 @@
%if 0%{?python_version_nodots} < 311
Requires: python-typing_extensions >= 4.7.1
%endif
+Requires: python-asyncio
BuildArch: noarch
%python_subpackages
++++++ filelock-3.14.0.tar.gz -> filelock-3.15.1.tar.gz ++++++
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn'
'--exclude=.svnignore' old/filelock-3.14.0/PKG-INFO new/filelock-3.15.1/PKG-INFO
--- old/filelock-3.14.0/PKG-INFO 2020-02-02 01:00:00.000000000 +0100
+++ new/filelock-3.15.1/PKG-INFO 2020-02-02 01:00:00.000000000 +0100
@@ -1,6 +1,6 @@
Metadata-Version: 2.3
Name: filelock
-Version: 3.14.0
+Version: 3.15.1
Summary: A platform independent file lock.
Project-URL: Documentation, https://py-filelock.readthedocs.io
Project-URL: Homepage, https://github.com/tox-dev/py-filelock
@@ -33,6 +33,7 @@
Requires-Dist: covdefaults>=2.3; extra == 'testing'
Requires-Dist: coverage>=7.3.2; extra == 'testing'
Requires-Dist: diff-cover>=8.0.1; extra == 'testing'
+Requires-Dist: pytest-asyncio>=0.21; extra == 'testing'
Requires-Dist: pytest-cov>=4.1; extra == 'testing'
Requires-Dist: pytest-mock>=3.12; extra == 'testing'
Requires-Dist: pytest-timeout>=2.2; extra == 'testing'
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn'
'--exclude=.svnignore' old/filelock-3.14.0/pyproject.toml
new/filelock-3.15.1/pyproject.toml
--- old/filelock-3.14.0/pyproject.toml 2020-02-02 01:00:00.000000000 +0100
+++ new/filelock-3.15.1/pyproject.toml 2020-02-02 01:00:00.000000000 +0100
@@ -17,7 +17,9 @@
"user",
]
license = "Unlicense"
-maintainers = [{ name = "Bernát Gábor", email = "[email protected]" }]
+maintainers = [
+ { name = "Bernát Gábor", email = "[email protected]" },
+]
requires-python = ">=3.8"
classifiers = [
"Development Status :: 5 - Production/Stable",
@@ -48,12 +50,13 @@
"coverage>=7.3.2",
"diff-cover>=8.0.1",
"pytest>=7.4.3",
+ "pytest-asyncio>=0.21",
"pytest-cov>=4.1",
"pytest-mock>=3.12",
"pytest-timeout>=2.2",
]
optional-dependencies.typing = [
- 'typing-extensions>=4.8; python_version < "3.11"',
+ "typing-extensions>=4.8; python_version<'3.11'",
]
urls.Documentation = "https://py-filelock.readthedocs.io"
urls.Homepage = "https://github.com/tox-dev/py-filelock"
@@ -62,39 +65,48 @@
[tool.hatch]
build.hooks.vcs.version-file = "src/filelock/version.py"
-build.targets.sdist.include = ["/src", "/tests", "/tox.ini"]
+build.targets.sdist.include = [
+ "/src",
+ "/tests",
+ "/tox.ini",
+]
version.source = "vcs"
[tool.ruff]
-line-length = 120
target-version = "py38"
-lint.isort = { known-first-party = ["filelock"], required-imports = ["from
__future__ import annotations"] }
-lint.select = ["ALL"]
+line-length = 120
+format.preview = true
+format.docstring-code-line-length = 100
+format.docstring-code-format = true
+lint.select = [
+ "ALL",
+]
lint.ignore = [
"ANN101", # Missing type annotation for `self` in method
- "D301", # Use `r"""` if any backslashes in a docstring
- "D205", # 1 blank line required between summary line and description
- "D401", # First line of docstring should be in imperative mood
+ "COM812", # Conflict with formatter
+ "CPY", # No copyright statements
"D203", # `one-blank-line-before-class` (D203) and
`no-blank-line-before-class` (D211) are incompatible
+ "D205", # 1 blank line required between summary line and description
"D212", # `multi-line-summary-first-line` (D212) and
`multi-line-summary-second-line` (D213) are incompatible
- "S104", # Possible binding to all interface
- "COM812", # Conflict with formatter
+ "D301", # Use `r"""` if any backslashes in a docstring
+ "D401", # First line of docstring should be in imperative mood
"ISC001", # Conflict with formatter
- "CPY", # No copyright statements
+ "S104", # Possible binding to all interface
]
-lint.preview = true
-format.preview = true
-format.docstring-code-format = true
-format.docstring-code-line-length = 100
-[tool.ruff.lint.per-file-ignores]
-"tests/**/*.py" = [
- "S101", # asserts allowed in tests...
+lint.per-file-ignores."tests/**/*.py" = [
+ "D", # don"t care about documentation in tests
"FBT", # don"t care about booleans as positional arguments in tests
"INP001", # no implicit namespace
- "D", # don"t care about documentation in tests
- "S603", # `subprocess` call: check for execution of untrusted input
"PLR2004", # Magic value used in comparison, consider replacing with a
constant variable
+ "S101", # asserts allowed in tests...
+ "S603", # `subprocess` call: check for execution of untrusted input
]
+lint.isort = { known-first-party = [
+ "filelock",
+], required-imports = [
+ "from __future__ import annotations",
+] }
+lint.preview = true
[tool.codespell]
builtin = "clear,usage,en-GB_to_en-US"
@@ -105,14 +117,31 @@
[tool.coverage]
html.show_contexts = true
html.skip_covered = false
-paths.source = ["src", ".tox/*/lib/*/site-packages",
".tox\\*\\Lib\\site-packages", "**/src", "**\\src"]
-paths.other = [".", "*/filelock", "*\\filelock"]
+paths.source = [
+ "src",
+ ".tox/*/lib/*/site-packages",
+ ".tox\\*\\Lib\\site-packages",
+ "**/src",
+ "**\\src",
+]
+paths.other = [
+ ".",
+ "*/filelock",
+ "*\\filelock",
+]
report.fail_under = 76
run.parallel = true
-run.plugins = ["covdefaults"]
+run.plugins = [
+ "covdefaults",
+]
[tool.mypy]
python_version = "3.11"
show_error_codes = true
strict = true
-overrides = [{ module = ["appdirs.*", "jnius.*"], ignore_missing_imports =
true }]
+overrides = [
+ { module = [
+ "appdirs.*",
+ "jnius.*",
+ ], ignore_missing_imports = true },
+]
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn'
'--exclude=.svnignore' old/filelock-3.14.0/src/filelock/__init__.py
new/filelock-3.15.1/src/filelock/__init__.py
--- old/filelock-3.14.0/src/filelock/__init__.py 2020-02-02
01:00:00.000000000 +0100
+++ new/filelock-3.15.1/src/filelock/__init__.py 2020-02-02
01:00:00.000000000 +0100
@@ -17,6 +17,13 @@
from ._soft import SoftFileLock
from ._unix import UnixFileLock, has_fcntl
from ._windows import WindowsFileLock
+from .asyncio import (
+ AsyncAcquireReturnProxy,
+ AsyncSoftFileLock,
+ AsyncUnixFileLock,
+ AsyncWindowsFileLock,
+ BaseAsyncFileLock,
+)
from .version import version
#: version of the project as a string
@@ -25,23 +32,34 @@
if sys.platform == "win32": # pragma: win32 cover
_FileLock: type[BaseFileLock] = WindowsFileLock
+ _AsyncFileLock: type[BaseAsyncFileLock] = AsyncWindowsFileLock
else: # pragma: win32 no cover # noqa: PLR5501
if has_fcntl:
_FileLock: type[BaseFileLock] = UnixFileLock
+ _AsyncFileLock: type[BaseAsyncFileLock] = AsyncUnixFileLock
else:
_FileLock = SoftFileLock
+ _AsyncFileLock = AsyncSoftFileLock
if warnings is not None:
warnings.warn("only soft file lock is available", stacklevel=2)
if TYPE_CHECKING:
FileLock = SoftFileLock
+ AsyncFileLock = AsyncSoftFileLock
else:
#: Alias for the lock, which should be used for the current platform.
FileLock = _FileLock
+ AsyncFileLock = _AsyncFileLock
__all__ = [
"AcquireReturnProxy",
+ "AsyncAcquireReturnProxy",
+ "AsyncFileLock",
+ "AsyncSoftFileLock",
+ "AsyncUnixFileLock",
+ "AsyncWindowsFileLock",
+ "BaseAsyncFileLock",
"BaseFileLock",
"FileLock",
"SoftFileLock",
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn'
'--exclude=.svnignore' old/filelock-3.14.0/src/filelock/_api.py
new/filelock-3.15.1/src/filelock/_api.py
--- old/filelock-3.14.0/src/filelock/_api.py 2020-02-02 01:00:00.000000000
+0100
+++ new/filelock-3.15.1/src/filelock/_api.py 2020-02-02 01:00:00.000000000
+0100
@@ -80,18 +80,18 @@
class BaseFileLock(ABC, contextlib.ContextDecorator):
"""Abstract base class for a file lock object."""
- _instances: WeakValueDictionary[str, BaseFileLock]
+ _instances: WeakValueDictionary[str, Self]
def __new__( # noqa: PLR0913
cls,
lock_file: str | os.PathLike[str],
- timeout: float = -1,
- mode: int = 0o644,
- thread_local: bool = True, # noqa: ARG003, FBT001, FBT002
+ timeout: float = -1, # noqa: ARG003
+ mode: int = 0o644, # noqa: ARG003
+ thread_local: bool = True, # noqa: FBT001, FBT002, ARG003
*,
blocking: bool = True, # noqa: ARG003
is_singleton: bool = False,
- **kwargs: dict[str, Any], # capture remaining kwargs for subclasses
# noqa: ARG003
+ **kwargs: Any, # capture remaining kwargs for subclasses # noqa:
ARG003, ANN401
) -> Self:
"""Create a new lock object or if specified return the singleton
instance for the lock file."""
if not is_singleton:
@@ -99,11 +99,9 @@
instance = cls._instances.get(str(lock_file))
if not instance:
- instance = super().__new__(cls)
- cls._instances[str(lock_file)] = instance
- elif timeout != instance.timeout or mode != instance.mode:
- msg = "Singleton lock instances cannot be initialized with
differing arguments"
- raise ValueError(msg)
+ self = super().__new__(cls)
+ cls._instances[str(lock_file)] = self
+ return self
return instance # type: ignore[return-value] #
https://github.com/python/mypy/issues/15322
@@ -138,6 +136,34 @@
to pass the same object around.
"""
+ if is_singleton and hasattr(self, "_context"):
+ # test whether other parameters match existing instance.
+ if not self.is_singleton:
+ msg = "__init__ should only be called on initialized object if
it is a singleton"
+ raise RuntimeError(msg)
+
+ params_to_check = {
+ "thread_local": (thread_local, self.is_thread_local()),
+ "timeout": (timeout, self.timeout),
+ "mode": (mode, self.mode),
+ "blocking": (blocking, self.blocking),
+ }
+
+ non_matching_params = {
+ name: (passed_param, set_param)
+ for name, (passed_param, set_param) in params_to_check.items()
+ if passed_param != set_param
+ }
+ if not non_matching_params:
+ return # bypass initialization because object is already
initialized
+
+ # parameters do not match; raise error
+ msg = "Singleton lock instances cannot be initialized with
differing arguments"
+ msg += "\nNon-matching arguments: "
+ for param_name, (passed_param, set_param) in
non_matching_params.items():
+ msg += f"\n\t{param_name} (existing lock has {set_param} but
{passed_param} was passed)"
+ raise ValueError(msg)
+
self._is_thread_local = thread_local
self._is_singleton = is_singleton
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn'
'--exclude=.svnignore' old/filelock-3.14.0/src/filelock/asyncio.py
new/filelock-3.15.1/src/filelock/asyncio.py
--- old/filelock-3.14.0/src/filelock/asyncio.py 1970-01-01 01:00:00.000000000
+0100
+++ new/filelock-3.15.1/src/filelock/asyncio.py 2020-02-02 01:00:00.000000000
+0100
@@ -0,0 +1,314 @@
+"""An asyncio-based implementation of the file lock."""
+
+from __future__ import annotations
+
+import asyncio
+import contextlib
+import logging
+import os
+import time
+from dataclasses import dataclass
+from threading import local
+from typing import TYPE_CHECKING, Any, Callable, NoReturn
+
+from ._api import BaseFileLock, FileLockContext
+from ._error import Timeout
+from ._soft import SoftFileLock
+from ._unix import UnixFileLock
+from ._windows import WindowsFileLock
+
+if TYPE_CHECKING:
+ import sys
+ from concurrent import futures
+ from types import TracebackType
+
+ if sys.version_info >= (3, 11): # pragma: no cover (py311+)
+ from typing import Self
+ else: # pragma: no cover (<py311)
+ from typing_extensions import Self
+
+
+_LOGGER = logging.getLogger("filelock")
+
+
+@dataclass
+class AsyncFileLockContext(FileLockContext):
+ """A dataclass which holds the context for a ``BaseAsyncFileLock``
object."""
+
+ #: Whether run in executor
+ run_in_executor: bool = True
+
+ #: The executor
+ executor: futures.Executor | None = None
+
+ #: The loop
+ loop: asyncio.AbstractEventLoop | None = None
+
+
+class AsyncThreadLocalFileContext(AsyncFileLockContext, local):
+ """A thread local version of the ``FileLockContext`` class."""
+
+
+class AsyncAcquireReturnProxy:
+ """A context-aware object that will release the lock file when exiting."""
+
+ def __init__(self, lock: BaseAsyncFileLock) -> None: # noqa: D107
+ self.lock = lock
+
+ async def __aenter__(self) -> BaseAsyncFileLock: # noqa: D105
+ return self.lock
+
+ async def __aexit__( # noqa: D105
+ self,
+ exc_type: type[BaseException] | None,
+ exc_value: BaseException | None,
+ traceback: TracebackType | None,
+ ) -> None:
+ await self.lock.release()
+
+
+class BaseAsyncFileLock(BaseFileLock):
+ """Base class for asynchronous file locks."""
+
+ def __init__( # noqa: PLR0913
+ self,
+ lock_file: str | os.PathLike[str],
+ timeout: float = -1,
+ mode: int = 0o644,
+ thread_local: bool = False, # noqa: FBT001, FBT002
+ *,
+ blocking: bool = True,
+ is_singleton: bool = False,
+ loop: asyncio.AbstractEventLoop | None = None,
+ run_in_executor: bool = True,
+ executor: futures.Executor | None = None,
+ ) -> None:
+ """
+ Create a new lock object.
+
+ :param lock_file: path to the file
+ :param timeout: default timeout when acquiring the lock, in seconds.
It will be used as fallback value in \
+ the acquire method, if no timeout value (``None``) is given. If
you want to disable the timeout, set it \
+ to a negative value. A timeout of 0 means that there is exactly
one attempt to acquire the file lock.
+ :param mode: file permissions for the lockfile
+ :param thread_local: Whether this object's internal context should be
thread local or not. If this is set to \
+ ``False`` then the lock will be reentrant across threads.
+ :param blocking: whether the lock should be blocking or not
+ :param is_singleton: If this is set to ``True`` then only one instance
of this class will be created \
+ per lock file. This is useful if you want to use the lock object
for reentrant locking without needing \
+ to pass the same object around.
+ :param loop: The event loop to use. If not specified, the running
event loop will be used.
+ :param run_in_executor: If this is set to ``True`` then the lock will
be acquired in an executor.
+ :param executor: The executor to use. If not specified, the default
executor will be used.
+
+ """
+ self._is_thread_local = thread_local
+ self._is_singleton = is_singleton
+ if thread_local and run_in_executor:
+ msg = "run_in_executor is not supported when thread_local is True"
+ raise ValueError(msg)
+
+ # Create the context. Note that external code should not work with the
context directly and should instead use
+ # properties of this class.
+ kwargs: dict[str, Any] = {
+ "lock_file": os.fspath(lock_file),
+ "timeout": timeout,
+ "mode": mode,
+ "blocking": blocking,
+ "loop": loop,
+ "run_in_executor": run_in_executor,
+ "executor": executor,
+ }
+ self._context: AsyncFileLockContext = (AsyncThreadLocalFileContext if
thread_local else AsyncFileLockContext)(
+ **kwargs
+ )
+
+ @property
+ def run_in_executor(self) -> bool:
+ """::return: whether run in executor."""
+ return self._context.run_in_executor
+
+ @property
+ def executor(self) -> futures.Executor | None:
+ """::return: the executor."""
+ return self._context.executor
+
+ @executor.setter
+ def executor(self, value: futures.Executor | None) -> None: # pragma: no
cover
+ """
+ Change the executor.
+
+ :param value: the new executor or ``None``
+ :type value: futures.Executor | None
+
+ """
+ self._context.executor = value
+
+ @property
+ def loop(self) -> asyncio.AbstractEventLoop | None:
+ """::return: the event loop."""
+ return self._context.loop
+
+ async def acquire( # type: ignore[override]
+ self,
+ timeout: float | None = None,
+ poll_interval: float = 0.05,
+ *,
+ blocking: bool | None = None,
+ ) -> AsyncAcquireReturnProxy:
+ """
+ Try to acquire the file lock.
+
+ :param timeout: maximum wait time for acquiring the lock, ``None``
means use the default
+ :attr:`~BaseFileLock.timeout` is and if ``timeout < 0``, there is
no timeout and
+ this method will block until the lock could be acquired
+ :param poll_interval: interval of trying to acquire the lock file
+ :param blocking: defaults to True. If False, function will return
immediately if it cannot obtain a lock on the
+ first attempt. Otherwise, this method will block until the timeout
expires or the lock is acquired.
+ :raises Timeout: if fails to acquire lock within the timeout period
+ :return: a context object that will unlock the file when the context
is exited
+
+ .. code-block:: python
+
+ # You can use this method in the context manager (recommended)
+ with lock.acquire():
+ pass
+
+ # Or use an equivalent try-finally construct:
+ lock.acquire()
+ try:
+ pass
+ finally:
+ lock.release()
+
+ """
+ # Use the default timeout, if no timeout is provided.
+ if timeout is None:
+ timeout = self._context.timeout
+
+ if blocking is None:
+ blocking = self._context.blocking
+
+ # Increment the number right at the beginning. We can still undo it,
if something fails.
+ self._context.lock_counter += 1
+
+ lock_id = id(self)
+ lock_filename = self.lock_file
+ start_time = time.perf_counter()
+ try:
+ while True:
+ if not self.is_locked:
+ _LOGGER.debug("Attempting to acquire lock %s on %s",
lock_id, lock_filename)
+ await self._run_internal_method(self._acquire)
+ if self.is_locked:
+ _LOGGER.debug("Lock %s acquired on %s", lock_id,
lock_filename)
+ break
+ if blocking is False:
+ _LOGGER.debug("Failed to immediately acquire lock %s on
%s", lock_id, lock_filename)
+ raise Timeout(lock_filename) # noqa: TRY301
+ if 0 <= timeout < time.perf_counter() - start_time:
+ _LOGGER.debug("Timeout on acquiring lock %s on %s",
lock_id, lock_filename)
+ raise Timeout(lock_filename) # noqa: TRY301
+ msg = "Lock %s not acquired on %s, waiting %s seconds ..."
+ _LOGGER.debug(msg, lock_id, lock_filename, poll_interval)
+ await asyncio.sleep(poll_interval)
+ except BaseException: # Something did go wrong, so decrement the
counter.
+ self._context.lock_counter = max(0, self._context.lock_counter - 1)
+ raise
+ return AsyncAcquireReturnProxy(lock=self)
+
+ async def release(self, force: bool = False) -> None: # type:
ignore[override] # noqa: FBT001, FBT002
+ """
+ Releases the file lock. Please note, that the lock is only completely
released, if the lock counter is 0.
+ Also note, that the lock file itself is not automatically deleted.
+
+ :param force: If true, the lock counter is ignored and the lock is
released in every case/
+
+ """
+ if self.is_locked:
+ self._context.lock_counter -= 1
+
+ if self._context.lock_counter == 0 or force:
+ lock_id, lock_filename = id(self), self.lock_file
+
+ _LOGGER.debug("Attempting to release lock %s on %s", lock_id,
lock_filename)
+ await self._run_internal_method(self._release)
+ self._context.lock_counter = 0
+ _LOGGER.debug("Lock %s released on %s", lock_id, lock_filename)
+
+ async def _run_internal_method(self, method: Callable[[], Any]) -> None:
+ if asyncio.iscoroutinefunction(method):
+ await method()
+ elif self.run_in_executor:
+ loop = self.loop or asyncio.get_running_loop()
+ await loop.run_in_executor(self.executor, method)
+ else:
+ method()
+
+ def __enter__(self) -> NoReturn:
+ """
+ Replace old __enter__ method to avoid using it.
+
+ NOTE: DO NOT USE `with` FOR ASYNCIO LOCKS, USE `async with` INSTEAD.
+
+ :return: none
+ :rtype: NoReturn
+ """
+ msg = "Do not use `with` for asyncio locks, use `async with` instead."
+ raise NotImplementedError(msg)
+
+ async def __aenter__(self) -> Self:
+ """
+ Acquire the lock.
+
+ :return: the lock object
+
+ """
+ await self.acquire()
+ return self
+
+ async def __aexit__(
+ self,
+ exc_type: type[BaseException] | None,
+ exc_value: BaseException | None,
+ traceback: TracebackType | None,
+ ) -> None:
+ """
+ Release the lock.
+
+ :param exc_type: the exception type if raised
+ :param exc_value: the exception value if raised
+ :param traceback: the exception traceback if raised
+
+ """
+ await self.release()
+
+ def __del__(self) -> None:
+ """Called when the lock object is deleted."""
+ with contextlib.suppress(RuntimeError):
+ loop = self.loop or asyncio.get_running_loop()
+ if not loop.is_running(): # pragma: no cover
+ loop.run_until_complete(self.release(force=True))
+ else:
+ loop.create_task(self.release(force=True))
+
+
+class AsyncSoftFileLock(SoftFileLock, BaseAsyncFileLock):
+ """Simply watches the existence of the lock file."""
+
+
+class AsyncUnixFileLock(UnixFileLock, BaseAsyncFileLock):
+ """Uses the :func:`fcntl.flock` to hard lock the lock file on unix
systems."""
+
+
+class AsyncWindowsFileLock(WindowsFileLock, BaseAsyncFileLock):
+ """Uses the :func:`msvcrt.locking` to hard lock the lock file on windows
systems."""
+
+
+__all__ = [
+ "AsyncAcquireReturnProxy",
+ "AsyncSoftFileLock",
+ "AsyncUnixFileLock",
+ "AsyncWindowsFileLock",
+ "BaseAsyncFileLock",
+]
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn'
'--exclude=.svnignore' old/filelock-3.14.0/src/filelock/version.py
new/filelock-3.15.1/src/filelock/version.py
--- old/filelock-3.14.0/src/filelock/version.py 2020-02-02 01:00:00.000000000
+0100
+++ new/filelock-3.15.1/src/filelock/version.py 2020-02-02 01:00:00.000000000
+0100
@@ -12,5 +12,5 @@
__version_tuple__: VERSION_TUPLE
version_tuple: VERSION_TUPLE
-__version__ = version = '3.14.0'
-__version_tuple__ = version_tuple = (3, 14, 0)
+__version__ = version = '3.15.1'
+__version_tuple__ = version_tuple = (3, 15, 1)
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn'
'--exclude=.svnignore' old/filelock-3.14.0/tests/test_async_filelock.py
new/filelock-3.15.1/tests/test_async_filelock.py
--- old/filelock-3.14.0/tests/test_async_filelock.py 1970-01-01
01:00:00.000000000 +0100
+++ new/filelock-3.15.1/tests/test_async_filelock.py 2020-02-02
01:00:00.000000000 +0100
@@ -0,0 +1,181 @@
+from __future__ import annotations
+
+import logging
+from pathlib import Path, PurePath
+
+import pytest
+
+from filelock import AsyncFileLock, AsyncSoftFileLock, BaseAsyncFileLock,
Timeout
+
+
[email protected]("lock_type", [AsyncFileLock, AsyncSoftFileLock])
[email protected]("path_type", [str, PurePath, Path])
[email protected]("filename", ["a", "new/b", "new2/new3/c"])
[email protected]()
+async def test_simple(
+ lock_type: type[BaseAsyncFileLock],
+ path_type: type[str | Path],
+ filename: str,
+ tmp_path: Path,
+ caplog: pytest.LogCaptureFixture,
+) -> None:
+ caplog.set_level(logging.DEBUG)
+
+ # test lock creation by passing a `str`
+ lock_path = tmp_path / filename
+ lock = lock_type(path_type(lock_path))
+ async with lock as locked:
+ assert lock.is_locked
+ assert lock is locked
+ assert not lock.is_locked
+
+ assert caplog.messages == [
+ f"Attempting to acquire lock {id(lock)} on {lock_path}",
+ f"Lock {id(lock)} acquired on {lock_path}",
+ f"Attempting to release lock {id(lock)} on {lock_path}",
+ f"Lock {id(lock)} released on {lock_path}",
+ ]
+ assert [r.levelno for r in caplog.records] == [logging.DEBUG,
logging.DEBUG, logging.DEBUG, logging.DEBUG]
+ assert [r.name for r in caplog.records] == ["filelock", "filelock",
"filelock", "filelock"]
+ assert logging.getLogger("filelock").level == logging.NOTSET
+
+
[email protected]("lock_type", [AsyncFileLock, AsyncSoftFileLock])
[email protected]("path_type", [str, PurePath, Path])
[email protected]("filename", ["a", "new/b", "new2/new3/c"])
[email protected]()
+async def test_acquire(
+ lock_type: type[BaseAsyncFileLock],
+ path_type: type[str | Path],
+ filename: str,
+ tmp_path: Path,
+ caplog: pytest.LogCaptureFixture,
+) -> None:
+ caplog.set_level(logging.DEBUG)
+
+ # test lock creation by passing a `str`
+ lock_path = tmp_path / filename
+ lock = lock_type(path_type(lock_path))
+ async with await lock.acquire() as locked:
+ assert lock.is_locked
+ assert lock is locked
+ assert not lock.is_locked
+
+ assert caplog.messages == [
+ f"Attempting to acquire lock {id(lock)} on {lock_path}",
+ f"Lock {id(lock)} acquired on {lock_path}",
+ f"Attempting to release lock {id(lock)} on {lock_path}",
+ f"Lock {id(lock)} released on {lock_path}",
+ ]
+ assert [r.levelno for r in caplog.records] == [logging.DEBUG,
logging.DEBUG, logging.DEBUG, logging.DEBUG]
+ assert [r.name for r in caplog.records] == ["filelock", "filelock",
"filelock", "filelock"]
+ assert logging.getLogger("filelock").level == logging.NOTSET
+
+
[email protected]("lock_type", [AsyncFileLock, AsyncSoftFileLock])
[email protected]()
+async def test_non_blocking(lock_type: type[BaseAsyncFileLock], tmp_path:
Path) -> None:
+ # raises Timeout error when the lock cannot be acquired
+ lock_path = tmp_path / "a"
+ lock_1, lock_2 = lock_type(str(lock_path)), lock_type(str(lock_path))
+ lock_3 = lock_type(str(lock_path), blocking=False)
+ lock_4 = lock_type(str(lock_path), timeout=0)
+ lock_5 = lock_type(str(lock_path), blocking=False, timeout=-1)
+
+ # acquire lock 1
+ await lock_1.acquire()
+ assert lock_1.is_locked
+ assert not lock_2.is_locked
+ assert not lock_3.is_locked
+ assert not lock_4.is_locked
+ assert not lock_5.is_locked
+
+ # try to acquire lock 2
+ with pytest.raises(Timeout, match="The file lock '.*' could not be
acquired."):
+ await lock_2.acquire(blocking=False)
+ assert not lock_2.is_locked
+ assert lock_1.is_locked
+
+ # try to acquire pre-parametrized `blocking=False` lock 3 with `acquire`
+ with pytest.raises(Timeout, match="The file lock '.*' could not be
acquired."):
+ await lock_3.acquire()
+ assert not lock_3.is_locked
+ assert lock_1.is_locked
+
+ # try to acquire pre-parametrized `blocking=False` lock 3 with context
manager
+ with pytest.raises(Timeout, match="The file lock '.*' could not be
acquired."):
+ async with lock_3:
+ pass
+ assert not lock_3.is_locked
+ assert lock_1.is_locked
+
+ # try to acquire pre-parametrized `timeout=0` lock 4 with `acquire`
+ with pytest.raises(Timeout, match="The file lock '.*' could not be
acquired."):
+ await lock_4.acquire()
+ assert not lock_4.is_locked
+ assert lock_1.is_locked
+
+ # try to acquire pre-parametrized `timeout=0` lock 4 with context manager
+ with pytest.raises(Timeout, match="The file lock '.*' could not be
acquired."):
+ async with lock_4:
+ pass
+ assert not lock_4.is_locked
+ assert lock_1.is_locked
+
+ # blocking precedence over timeout
+ # try to acquire pre-parametrized `timeout=-1,blocking=False` lock 5 with
`acquire`
+ with pytest.raises(Timeout, match="The file lock '.*' could not be
acquired."):
+ await lock_5.acquire()
+ assert not lock_5.is_locked
+ assert lock_1.is_locked
+
+ # try to acquire pre-parametrized `timeout=-1,blocking=False` lock 5 with
context manager
+ with pytest.raises(Timeout, match="The file lock '.*' could not be
acquired."):
+ async with lock_5:
+ pass
+ assert not lock_5.is_locked
+ assert lock_1.is_locked
+
+ # release lock 1
+ await lock_1.release()
+ assert not lock_1.is_locked
+ assert not lock_2.is_locked
+ assert not lock_3.is_locked
+ assert not lock_4.is_locked
+ assert not lock_5.is_locked
+
+
[email protected]("lock_type", [AsyncFileLock, AsyncSoftFileLock])
[email protected]("thread_local", [True, False])
[email protected]()
+async def test_non_executor(lock_type: type[BaseAsyncFileLock], thread_local:
bool, tmp_path: Path) -> None:
+ lock_path = tmp_path / "a"
+ lock = lock_type(str(lock_path), thread_local=thread_local,
run_in_executor=False)
+ async with lock as locked:
+ assert lock.is_locked
+ assert lock is locked
+ assert not lock.is_locked
+
+
[email protected]()
+async def test_coroutine_function(tmp_path: Path) -> None:
+ acquired = released = False
+
+ class AioFileLock(BaseAsyncFileLock):
+ async def _acquire(self) -> None: # type: ignore[override]
+ nonlocal acquired
+ acquired = True
+ self._context.lock_file_fd = 1
+
+ async def _release(self) -> None: # type: ignore[override]
+ nonlocal released
+ released = True
+ self._context.lock_file_fd = None
+
+ lock = AioFileLock(str(tmp_path / "a"))
+ await lock.acquire()
+ assert acquired
+ assert not released
+ await lock.release()
+ assert acquired
+ assert released
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn'
'--exclude=.svnignore' old/filelock-3.14.0/tests/test_filelock.py
new/filelock-3.15.1/tests/test_filelock.py
--- old/filelock-3.14.0/tests/test_filelock.py 2020-02-02 01:00:00.000000000
+0100
+++ new/filelock-3.15.1/tests/test_filelock.py 2020-02-02 01:00:00.000000000
+0100
@@ -201,6 +201,23 @@
assert not lock.is_locked
[email protected]("lock_type", [FileLock, SoftFileLock])
+def test_nested_contruct(lock_type: type[BaseFileLock], tmp_path: Path) ->
None:
+ # lock is re-entrant for a given file even if it is constructed multiple
times
+ lock_path = tmp_path / "a"
+
+ with lock_type(str(lock_path), is_singleton=True, timeout=2) as lock_1:
+ assert lock_1.is_locked
+
+ with lock_type(str(lock_path), is_singleton=True, timeout=2) as lock_2:
+ assert lock_2 is lock_1
+ assert lock_2.is_locked
+
+ assert lock_1.is_locked
+
+ assert not lock_1.is_locked
+
+
_ExcInfoType = Union[Tuple[Type[BaseException], BaseException, TracebackType],
Tuple[None, None, None]]
@@ -670,9 +687,10 @@
mode: int = 0o644,
thread_local: bool = True,
my_param: int = 0,
- **kwargs: dict[str, Any],
+ **kwargs: dict[str, Any], # noqa: ARG002
) -> None:
- pass
+ super().__init__(lock_file, timeout, mode, thread_local,
blocking=True, is_singleton=True)
+ self.my_param = my_param
lock_path = tmp_path / "a"
MyFileLock(str(lock_path), my_param=1)
@@ -685,9 +703,10 @@
mode: int = 0o644,
thread_local: bool = True,
my_param: int = 0,
- **kwargs: dict[str, Any],
+ **kwargs: dict[str, Any], # noqa: ARG002
) -> None:
- pass
+ super().__init__(lock_file, timeout, mode, thread_local,
blocking=True, is_singleton=True)
+ self.my_param = my_param
MySoftFileLock(str(lock_path), my_param=1)
@@ -725,12 +744,19 @@
@pytest.mark.parametrize("lock_type", [FileLock, SoftFileLock])
def test_singleton_locks_must_be_initialized_with_the_same_args(lock_type:
type[BaseFileLock], tmp_path: Path) -> None:
lock_path = tmp_path / "a"
- lock = lock_type(str(lock_path), is_singleton=True) # noqa: F841
+ args: dict[str, Any] = {"timeout": -1, "mode": 0o644, "thread_local":
True, "blocking": True}
+ alternate_args: dict[str, Any] = {"timeout": 10, "mode": 0,
"thread_local": False, "blocking": False}
+
+ lock = lock_type(str(lock_path), is_singleton=True, **args)
- with pytest.raises(ValueError, match="Singleton lock instances cannot be
initialized with differing arguments"):
- lock_type(str(lock_path), timeout=10, is_singleton=True)
- with pytest.raises(ValueError, match="Singleton lock instances cannot be
initialized with differing arguments"):
- lock_type(str(lock_path), mode=0, is_singleton=True)
+ for arg_name in args:
+ general_msg = "Singleton lock instances cannot be initialized with
differing arguments"
+ altered_args = args.copy()
+ altered_args[arg_name] = alternate_args[arg_name]
+ with pytest.raises(ValueError, match=general_msg) as exc_info:
+ lock_type(str(lock_path), is_singleton=True, **altered_args)
+ exc_info.match(arg_name) # ensure specific non-matching argument is
included in exception text
+ del lock, exc_info
@pytest.mark.skipif(hasattr(sys, "pypy_version_info"), reason="del() does not
trigger GC in PyPy")