Merge branch 'hotfix/3.32.10'
Some checks failed
Python Unit Tests / windows (windows-latest, 3.10) (push) Has been cancelled
Python Unit Tests / windows (windows-latest, 3.11) (push) Has been cancelled
Python Unit Tests / windows (windows-latest, 3.12) (push) Has been cancelled
Python Unit Tests / windows (windows-latest, 3.8) (push) Has been cancelled
Python Unit Tests / windows (windows-latest, 3.9) (push) Has been cancelled
Python Unit Tests / linux (ubuntu-latest, 3.10) (push) Has been cancelled
Python Unit Tests / linux (ubuntu-latest, 3.11) (push) Has been cancelled
Python Unit Tests / linux (ubuntu-latest, 3.12) (push) Has been cancelled
Python Unit Tests / linux (ubuntu-latest, 3.8) (push) Has been cancelled
Python Unit Tests / linux (ubuntu-latest, 3.9) (push) Has been cancelled
Python Unit Tests / macos (macos-latest, 3.10) (push) Has been cancelled
Python Unit Tests / macos (macos-latest, 3.11) (push) Has been cancelled
Python Unit Tests / macos (macos-latest, 3.12) (push) Has been cancelled
Python Unit Tests / macos (macos-latest, 3.8) (push) Has been cancelled
Python Unit Tests / macos (macos-latest, 3.9) (push) Has been cancelled

This commit is contained in:
JackDandy 2024-10-07 01:10:01 +01:00
commit 6b762f2b48
21 changed files with 895 additions and 312 deletions

View file

@ -1,4 +1,11 @@
### 3.32.8 (2024-10-07 00:30:00 UTC) ### 3.33.0 (2024-0x-xx xx:xx:00 UTC)
* Update filelock 3.14.0 (8556141) to 3.15.4 (9a979df)
* Update package resource API 68.2.2 (8ad627d) to 70.1.1 (222ebf9)
* Update urllib3 2.2.1 (54d6edf) to 2.2.2 (27e2a5c)
### 3.32.8 (2024-10-07 00:30:00 UTC)
* Change min required Python version to 3.9 * Change min required Python version to 3.9
* Change add support for Python 3.9.20, 3.10.15, 3.11.10, 3.12.7 * Change add support for Python 3.9.20, 3.10.15, 3.11.10, 3.12.7

View file

@ -17,6 +17,13 @@ from ._error import Timeout
from ._soft import SoftFileLock from ._soft import SoftFileLock
from ._unix import UnixFileLock, has_fcntl from ._unix import UnixFileLock, has_fcntl
from ._windows import WindowsFileLock from ._windows import WindowsFileLock
from .asyncio import (
AsyncAcquireReturnProxy,
AsyncSoftFileLock,
AsyncUnixFileLock,
AsyncWindowsFileLock,
BaseAsyncFileLock,
)
from .version import version from .version import version
#: version of the project as a string #: version of the project as a string
@ -25,23 +32,34 @@ __version__: str = version
if sys.platform == "win32": # pragma: win32 cover if sys.platform == "win32": # pragma: win32 cover
_FileLock: type[BaseFileLock] = WindowsFileLock _FileLock: type[BaseFileLock] = WindowsFileLock
_AsyncFileLock: type[BaseAsyncFileLock] = AsyncWindowsFileLock
else: # pragma: win32 no cover # noqa: PLR5501 else: # pragma: win32 no cover # noqa: PLR5501
if has_fcntl: if has_fcntl:
_FileLock: type[BaseFileLock] = UnixFileLock _FileLock: type[BaseFileLock] = UnixFileLock
_AsyncFileLock: type[BaseAsyncFileLock] = AsyncUnixFileLock
else: else:
_FileLock = SoftFileLock _FileLock = SoftFileLock
_AsyncFileLock = AsyncSoftFileLock
if warnings is not None: if warnings is not None:
warnings.warn("only soft file lock is available", stacklevel=2) warnings.warn("only soft file lock is available", stacklevel=2)
if TYPE_CHECKING: if TYPE_CHECKING:
FileLock = SoftFileLock FileLock = SoftFileLock
AsyncFileLock = AsyncSoftFileLock
else: else:
#: Alias for the lock, which should be used for the current platform. #: Alias for the lock, which should be used for the current platform.
FileLock = _FileLock FileLock = _FileLock
AsyncFileLock = _AsyncFileLock
__all__ = [ __all__ = [
"AcquireReturnProxy", "AcquireReturnProxy",
"AsyncAcquireReturnProxy",
"AsyncFileLock",
"AsyncSoftFileLock",
"AsyncUnixFileLock",
"AsyncWindowsFileLock",
"BaseAsyncFileLock",
"BaseFileLock", "BaseFileLock",
"FileLock", "FileLock",
"SoftFileLock", "SoftFileLock",

View file

@ -1,14 +1,15 @@
from __future__ import annotations from __future__ import annotations
import contextlib import contextlib
import inspect
import logging import logging
import os import os
import time import time
import warnings import warnings
from abc import ABC, abstractmethod from abc import ABCMeta, abstractmethod
from dataclasses import dataclass from dataclasses import dataclass
from threading import local from threading import local
from typing import TYPE_CHECKING, Any from typing import TYPE_CHECKING, Any, cast
from weakref import WeakValueDictionary from weakref import WeakValueDictionary
from ._error import Timeout from ._error import Timeout
@ -77,35 +78,71 @@ class ThreadLocalFileContext(FileLockContext, local):
"""A thread local version of the ``FileLockContext`` class.""" """A thread local version of the ``FileLockContext`` class."""
class BaseFileLock(ABC, contextlib.ContextDecorator): class FileLockMeta(ABCMeta):
"""Abstract base class for a file lock object.""" def __call__( # noqa: PLR0913
_instances: WeakValueDictionary[str, BaseFileLock]
def __new__( # noqa: PLR0913
cls, cls,
lock_file: str | os.PathLike[str], lock_file: str | os.PathLike[str],
timeout: float = -1, timeout: float = -1,
mode: int = 0o644, mode: int = 0o644,
thread_local: bool = True, # noqa: ARG003, FBT001, FBT002 thread_local: bool = True, # noqa: FBT001, FBT002
*, *,
blocking: bool = True, # noqa: ARG003 blocking: bool = True,
is_singleton: bool = False, is_singleton: bool = False,
**kwargs: dict[str, Any], # capture remaining kwargs for subclasses # noqa: ARG003 **kwargs: Any, # capture remaining kwargs for subclasses # noqa: ANN401
) -> Self: ) -> BaseFileLock:
"""Create a new lock object or if specified return the singleton instance for the lock file.""" if is_singleton:
if not is_singleton: instance = cls._instances.get(str(lock_file)) # type: ignore[attr-defined]
return super().__new__(cls) if instance:
params_to_check = {
"thread_local": (thread_local, instance.is_thread_local()),
"timeout": (timeout, instance.timeout),
"mode": (mode, instance.mode),
"blocking": (blocking, instance.blocking),
}
instance = cls._instances.get(str(lock_file)) non_matching_params = {
if not instance: name: (passed_param, set_param)
instance = super().__new__(cls) for name, (passed_param, set_param) in params_to_check.items()
cls._instances[str(lock_file)] = instance if passed_param != set_param
elif timeout != instance.timeout or mode != instance.mode: }
msg = "Singleton lock instances cannot be initialized with differing arguments" if not non_matching_params:
raise ValueError(msg) return cast(BaseFileLock, instance)
return instance # type: ignore[return-value] # https://github.com/python/mypy/issues/15322 # parameters do not match; raise error
msg = "Singleton lock instances cannot be initialized with differing arguments"
msg += "\nNon-matching arguments: "
for param_name, (passed_param, set_param) in non_matching_params.items():
msg += f"\n\t{param_name} (existing lock has {set_param} but {passed_param} was passed)"
raise ValueError(msg)
# Workaround to make `__init__`'s params optional in subclasses
# E.g. virtualenv changes the signature of the `__init__` method in the `BaseFileLock` class descendant
# (https://github.com/tox-dev/filelock/pull/340)
all_params = {
"timeout": timeout,
"mode": mode,
"thread_local": thread_local,
"blocking": blocking,
"is_singleton": is_singleton,
**kwargs,
}
present_params = inspect.signature(cls.__init__).parameters # type: ignore[misc]
init_params = {key: value for key, value in all_params.items() if key in present_params}
instance = super().__call__(lock_file, **init_params)
if is_singleton:
cls._instances[str(lock_file)] = instance # type: ignore[attr-defined]
return cast(BaseFileLock, instance)
class BaseFileLock(contextlib.ContextDecorator, metaclass=FileLockMeta):
"""Abstract base class for a file lock object."""
_instances: WeakValueDictionary[str, BaseFileLock]
def __init_subclass__(cls, **kwargs: dict[str, Any]) -> None: def __init_subclass__(cls, **kwargs: dict[str, Any]) -> None:
"""Setup unique state for lock subclasses.""" """Setup unique state for lock subclasses."""

342
lib/filelock/asyncio.py Normal file
View file

@ -0,0 +1,342 @@
"""An asyncio-based implementation of the file lock."""
from __future__ import annotations
import asyncio
import contextlib
import logging
import os
import time
from dataclasses import dataclass
from threading import local
from typing import TYPE_CHECKING, Any, Callable, NoReturn, cast
from ._api import BaseFileLock, FileLockContext, FileLockMeta
from ._error import Timeout
from ._soft import SoftFileLock
from ._unix import UnixFileLock
from ._windows import WindowsFileLock
if TYPE_CHECKING:
import sys
from concurrent import futures
from types import TracebackType
if sys.version_info >= (3, 11): # pragma: no cover (py311+)
from typing import Self
else: # pragma: no cover (<py311)
from typing_extensions import Self
_LOGGER = logging.getLogger("filelock")
@dataclass
class AsyncFileLockContext(FileLockContext):
"""A dataclass which holds the context for a ``BaseAsyncFileLock`` object."""
#: Whether run in executor
run_in_executor: bool = True
#: The executor
executor: futures.Executor | None = None
#: The loop
loop: asyncio.AbstractEventLoop | None = None
class AsyncThreadLocalFileContext(AsyncFileLockContext, local):
"""A thread local version of the ``FileLockContext`` class."""
class AsyncAcquireReturnProxy:
"""A context-aware object that will release the lock file when exiting."""
def __init__(self, lock: BaseAsyncFileLock) -> None: # noqa: D107
self.lock = lock
async def __aenter__(self) -> BaseAsyncFileLock: # noqa: D105
return self.lock
async def __aexit__( # noqa: D105
self,
exc_type: type[BaseException] | None,
exc_value: BaseException | None,
traceback: TracebackType | None,
) -> None:
await self.lock.release()
class AsyncFileLockMeta(FileLockMeta):
def __call__( # type: ignore[override] # noqa: PLR0913
cls, # noqa: N805
lock_file: str | os.PathLike[str],
timeout: float = -1,
mode: int = 0o644,
thread_local: bool = False, # noqa: FBT001, FBT002
*,
blocking: bool = True,
is_singleton: bool = False,
loop: asyncio.AbstractEventLoop | None = None,
run_in_executor: bool = True,
executor: futures.Executor | None = None,
) -> BaseAsyncFileLock:
if thread_local and run_in_executor:
msg = "run_in_executor is not supported when thread_local is True"
raise ValueError(msg)
instance = super().__call__(
lock_file=lock_file,
timeout=timeout,
mode=mode,
thread_local=thread_local,
blocking=blocking,
is_singleton=is_singleton,
loop=loop,
run_in_executor=run_in_executor,
executor=executor,
)
return cast(BaseAsyncFileLock, instance)
class BaseAsyncFileLock(BaseFileLock, metaclass=AsyncFileLockMeta):
"""Base class for asynchronous file locks."""
def __init__( # noqa: PLR0913
self,
lock_file: str | os.PathLike[str],
timeout: float = -1,
mode: int = 0o644,
thread_local: bool = False, # noqa: FBT001, FBT002
*,
blocking: bool = True,
is_singleton: bool = False,
loop: asyncio.AbstractEventLoop | None = None,
run_in_executor: bool = True,
executor: futures.Executor | None = None,
) -> None:
"""
Create a new lock object.
:param lock_file: path to the file
:param timeout: default timeout when acquiring the lock, in seconds. It will be used as fallback value in \
the acquire method, if no timeout value (``None``) is given. If you want to disable the timeout, set it \
to a negative value. A timeout of 0 means that there is exactly one attempt to acquire the file lock.
:param mode: file permissions for the lockfile
:param thread_local: Whether this object's internal context should be thread local or not. If this is set to \
``False`` then the lock will be reentrant across threads.
:param blocking: whether the lock should be blocking or not
:param is_singleton: If this is set to ``True`` then only one instance of this class will be created \
per lock file. This is useful if you want to use the lock object for reentrant locking without needing \
to pass the same object around.
:param loop: The event loop to use. If not specified, the running event loop will be used.
:param run_in_executor: If this is set to ``True`` then the lock will be acquired in an executor.
:param executor: The executor to use. If not specified, the default executor will be used.
"""
self._is_thread_local = thread_local
self._is_singleton = is_singleton
# Create the context. Note that external code should not work with the context directly and should instead use
# properties of this class.
kwargs: dict[str, Any] = {
"lock_file": os.fspath(lock_file),
"timeout": timeout,
"mode": mode,
"blocking": blocking,
"loop": loop,
"run_in_executor": run_in_executor,
"executor": executor,
}
self._context: AsyncFileLockContext = (AsyncThreadLocalFileContext if thread_local else AsyncFileLockContext)(
**kwargs
)
@property
def run_in_executor(self) -> bool:
"""::return: whether run in executor."""
return self._context.run_in_executor
@property
def executor(self) -> futures.Executor | None:
"""::return: the executor."""
return self._context.executor
@executor.setter
def executor(self, value: futures.Executor | None) -> None: # pragma: no cover
"""
Change the executor.
:param value: the new executor or ``None``
:type value: futures.Executor | None
"""
self._context.executor = value
@property
def loop(self) -> asyncio.AbstractEventLoop | None:
"""::return: the event loop."""
return self._context.loop
async def acquire( # type: ignore[override]
self,
timeout: float | None = None,
poll_interval: float = 0.05,
*,
blocking: bool | None = None,
) -> AsyncAcquireReturnProxy:
"""
Try to acquire the file lock.
:param timeout: maximum wait time for acquiring the lock, ``None`` means use the default
:attr:`~BaseFileLock.timeout` is and if ``timeout < 0``, there is no timeout and
this method will block until the lock could be acquired
:param poll_interval: interval of trying to acquire the lock file
:param blocking: defaults to True. If False, function will return immediately if it cannot obtain a lock on the
first attempt. Otherwise, this method will block until the timeout expires or the lock is acquired.
:raises Timeout: if fails to acquire lock within the timeout period
:return: a context object that will unlock the file when the context is exited
.. code-block:: python
# You can use this method in the context manager (recommended)
with lock.acquire():
pass
# Or use an equivalent try-finally construct:
lock.acquire()
try:
pass
finally:
lock.release()
"""
# Use the default timeout, if no timeout is provided.
if timeout is None:
timeout = self._context.timeout
if blocking is None:
blocking = self._context.blocking
# Increment the number right at the beginning. We can still undo it, if something fails.
self._context.lock_counter += 1
lock_id = id(self)
lock_filename = self.lock_file
start_time = time.perf_counter()
try:
while True:
if not self.is_locked:
_LOGGER.debug("Attempting to acquire lock %s on %s", lock_id, lock_filename)
await self._run_internal_method(self._acquire)
if self.is_locked:
_LOGGER.debug("Lock %s acquired on %s", lock_id, lock_filename)
break
if blocking is False:
_LOGGER.debug("Failed to immediately acquire lock %s on %s", lock_id, lock_filename)
raise Timeout(lock_filename) # noqa: TRY301
if 0 <= timeout < time.perf_counter() - start_time:
_LOGGER.debug("Timeout on acquiring lock %s on %s", lock_id, lock_filename)
raise Timeout(lock_filename) # noqa: TRY301
msg = "Lock %s not acquired on %s, waiting %s seconds ..."
_LOGGER.debug(msg, lock_id, lock_filename, poll_interval)
await asyncio.sleep(poll_interval)
except BaseException: # Something did go wrong, so decrement the counter.
self._context.lock_counter = max(0, self._context.lock_counter - 1)
raise
return AsyncAcquireReturnProxy(lock=self)
async def release(self, force: bool = False) -> None: # type: ignore[override] # noqa: FBT001, FBT002
"""
Releases the file lock. Please note, that the lock is only completely released, if the lock counter is 0.
Also note, that the lock file itself is not automatically deleted.
:param force: If true, the lock counter is ignored and the lock is released in every case/
"""
if self.is_locked:
self._context.lock_counter -= 1
if self._context.lock_counter == 0 or force:
lock_id, lock_filename = id(self), self.lock_file
_LOGGER.debug("Attempting to release lock %s on %s", lock_id, lock_filename)
await self._run_internal_method(self._release)
self._context.lock_counter = 0
_LOGGER.debug("Lock %s released on %s", lock_id, lock_filename)
async def _run_internal_method(self, method: Callable[[], Any]) -> None:
if asyncio.iscoroutinefunction(method):
await method()
elif self.run_in_executor:
loop = self.loop or asyncio.get_running_loop()
await loop.run_in_executor(self.executor, method)
else:
method()
def __enter__(self) -> NoReturn:
"""
Replace old __enter__ method to avoid using it.
NOTE: DO NOT USE `with` FOR ASYNCIO LOCKS, USE `async with` INSTEAD.
:return: none
:rtype: NoReturn
"""
msg = "Do not use `with` for asyncio locks, use `async with` instead."
raise NotImplementedError(msg)
async def __aenter__(self) -> Self:
"""
Acquire the lock.
:return: the lock object
"""
await self.acquire()
return self
async def __aexit__(
self,
exc_type: type[BaseException] | None,
exc_value: BaseException | None,
traceback: TracebackType | None,
) -> None:
"""
Release the lock.
:param exc_type: the exception type if raised
:param exc_value: the exception value if raised
:param traceback: the exception traceback if raised
"""
await self.release()
def __del__(self) -> None:
"""Called when the lock object is deleted."""
with contextlib.suppress(RuntimeError):
loop = self.loop or asyncio.get_running_loop()
if not loop.is_running(): # pragma: no cover
loop.run_until_complete(self.release(force=True))
else:
loop.create_task(self.release(force=True))
class AsyncSoftFileLock(SoftFileLock, BaseAsyncFileLock):
"""Simply watches the existence of the lock file."""
class AsyncUnixFileLock(UnixFileLock, BaseAsyncFileLock):
"""Uses the :func:`fcntl.flock` to hard lock the lock file on unix systems."""
class AsyncWindowsFileLock(WindowsFileLock, BaseAsyncFileLock):
"""Uses the :func:`msvcrt.locking` to hard lock the lock file on windows systems."""
__all__ = [
"AsyncAcquireReturnProxy",
"AsyncSoftFileLock",
"AsyncUnixFileLock",
"AsyncWindowsFileLock",
"BaseAsyncFileLock",
]

View file

@ -1,4 +1,4 @@
# file generated by setuptools_scm # file generated by setuptools_scm
# don't change, don't track in version control # don't change, don't track in version control
__version__ = version = '3.14.0' __version__ = version = '3.15.4'
__version_tuple__ = version_tuple = (3, 14, 0) __version_tuple__ = version_tuple = (3, 15, 4)

File diff suppressed because it is too large Load diff

View file

@ -1,8 +1,9 @@
from __future__ import annotations
from importlib.machinery import ModuleSpec from importlib.machinery import ModuleSpec
import importlib.util import importlib.util
import sys import sys
from types import ModuleType from types import ModuleType
from typing import Iterable, Optional, Sequence from typing import Iterable, Sequence
class VendorImporter: class VendorImporter:
@ -15,7 +16,7 @@ class VendorImporter:
self, self,
root_name: str, root_name: str,
vendored_names: Iterable[str] = (), vendored_names: Iterable[str] = (),
vendor_pkg: Optional[str] = None, vendor_pkg: str | None = None,
): ):
self.root_name = root_name self.root_name = root_name
self.vendored_names = set(vendored_names) self.vendored_names = set(vendored_names)
@ -65,8 +66,8 @@ class VendorImporter:
def find_spec( def find_spec(
self, self,
fullname: str, fullname: str,
path: Optional[Sequence[str]] = None, path: Sequence[str] | None = None,
target: Optional[ModuleType] = None, target: ModuleType | None = None,
): ):
"""Return a module spec for vendored names.""" """Return a module spec for vendored names."""
return ( return (

View file

@ -12,7 +12,7 @@ _TYPE_BODY = typing.Union[bytes, typing.IO[typing.Any], typing.Iterable[bytes],
class ProxyConfig(typing.NamedTuple): class ProxyConfig(typing.NamedTuple):
ssl_context: ssl.SSLContext | None ssl_context: ssl.SSLContext | None
use_forwarding_for_https: bool use_forwarding_for_https: bool
assert_hostname: None | str | Literal[False] assert_hostname: None | str | typing.Literal[False]
assert_fingerprint: str | None assert_fingerprint: str | None
@ -28,7 +28,7 @@ class _ResponseOptions(typing.NamedTuple):
if typing.TYPE_CHECKING: if typing.TYPE_CHECKING:
import ssl import ssl
from typing import Literal, Protocol from typing import Protocol
from .response import BaseHTTPResponse from .response import BaseHTTPResponse
@ -124,7 +124,7 @@ if typing.TYPE_CHECKING:
# Certificate verification methods # Certificate verification methods
cert_reqs: int | str | None cert_reqs: int | str | None
assert_hostname: None | str | Literal[False] assert_hostname: None | str | typing.Literal[False]
assert_fingerprint: str | None assert_fingerprint: str | None
ssl_context: ssl.SSLContext | None ssl_context: ssl.SSLContext | None
@ -155,7 +155,7 @@ if typing.TYPE_CHECKING:
proxy: Url | None = None, proxy: Url | None = None,
proxy_config: ProxyConfig | None = None, proxy_config: ProxyConfig | None = None,
cert_reqs: int | str | None = None, cert_reqs: int | str | None = None,
assert_hostname: None | str | Literal[False] = None, assert_hostname: None | str | typing.Literal[False] = None,
assert_fingerprint: str | None = None, assert_fingerprint: str | None = None,
server_hostname: str | None = None, server_hostname: str | None = None,
ssl_context: ssl.SSLContext | None = None, ssl_context: ssl.SSLContext | None = None,

View file

@ -427,7 +427,7 @@ class HTTPHeaderDict(typing.MutableMapping[str, str]):
val = other.getlist(key) val = other.getlist(key)
self._container[key.lower()] = [key, *val] self._container[key.lower()] = [key, *val]
def copy(self) -> HTTPHeaderDict: def copy(self) -> Self:
clone = type(self)() clone = type(self)()
clone._copy_from(self) clone._copy_from(self)
return clone return clone
@ -462,7 +462,7 @@ class HTTPHeaderDict(typing.MutableMapping[str, str]):
self.extend(maybe_constructable) self.extend(maybe_constructable)
return self return self
def __or__(self, other: object) -> HTTPHeaderDict: def __or__(self, other: object) -> Self:
# Supports merging header dicts using operator | # Supports merging header dicts using operator |
# combining items with add instead of __setitem__ # combining items with add instead of __setitem__
maybe_constructable = ensure_can_construct_http_header_dict(other) maybe_constructable = ensure_can_construct_http_header_dict(other)
@ -472,7 +472,7 @@ class HTTPHeaderDict(typing.MutableMapping[str, str]):
result.extend(maybe_constructable) result.extend(maybe_constructable)
return result return result
def __ror__(self, other: object) -> HTTPHeaderDict: def __ror__(self, other: object) -> Self:
# Supports merging header dicts using operator | when other is on left side # Supports merging header dicts using operator | when other is on left side
# combining items with add instead of __setitem__ # combining items with add instead of __setitem__
maybe_constructable = ensure_can_construct_http_header_dict(other) maybe_constructable = ensure_can_construct_http_header_dict(other)

View file

@ -1,4 +1,4 @@
# This file is protected via CODEOWNERS # This file is protected via CODEOWNERS
from __future__ import annotations from __future__ import annotations
__version__ = "2.2.1" __version__ = "2.2.2"

View file

@ -14,8 +14,6 @@ from http.client import ResponseNotReady
from socket import timeout as SocketTimeout from socket import timeout as SocketTimeout
if typing.TYPE_CHECKING: if typing.TYPE_CHECKING:
from typing import Literal
from .response import HTTPResponse from .response import HTTPResponse
from .util.ssl_ import _TYPE_PEER_CERT_RET_DICT from .util.ssl_ import _TYPE_PEER_CERT_RET_DICT
from .util.ssltransport import SSLTransport from .util.ssltransport import SSLTransport
@ -482,6 +480,7 @@ class HTTPConnection(_HTTPConnection):
headers=headers, headers=headers,
status=httplib_response.status, status=httplib_response.status,
version=httplib_response.version, version=httplib_response.version,
version_string=getattr(self, "_http_vsn_str", "HTTP/?"),
reason=httplib_response.reason, reason=httplib_response.reason,
preload_content=resp_options.preload_content, preload_content=resp_options.preload_content,
decode_content=resp_options.decode_content, decode_content=resp_options.decode_content,
@ -523,7 +522,7 @@ class HTTPSConnection(HTTPConnection):
proxy: Url | None = None, proxy: Url | None = None,
proxy_config: ProxyConfig | None = None, proxy_config: ProxyConfig | None = None,
cert_reqs: int | str | None = None, cert_reqs: int | str | None = None,
assert_hostname: None | str | Literal[False] = None, assert_hostname: None | str | typing.Literal[False] = None,
assert_fingerprint: str | None = None, assert_fingerprint: str | None = None,
server_hostname: str | None = None, server_hostname: str | None = None,
ssl_context: ssl.SSLContext | None = None, ssl_context: ssl.SSLContext | None = None,
@ -577,7 +576,7 @@ class HTTPSConnection(HTTPConnection):
cert_reqs: int | str | None = None, cert_reqs: int | str | None = None,
key_password: str | None = None, key_password: str | None = None,
ca_certs: str | None = None, ca_certs: str | None = None,
assert_hostname: None | str | Literal[False] = None, assert_hostname: None | str | typing.Literal[False] = None,
assert_fingerprint: str | None = None, assert_fingerprint: str | None = None,
ca_cert_dir: str | None = None, ca_cert_dir: str | None = None,
ca_cert_data: None | str | bytes = None, ca_cert_data: None | str | bytes = None,
@ -742,7 +741,7 @@ def _ssl_wrap_socket_and_match_hostname(
ca_certs: str | None, ca_certs: str | None,
ca_cert_dir: str | None, ca_cert_dir: str | None,
ca_cert_data: None | str | bytes, ca_cert_data: None | str | bytes,
assert_hostname: None | str | Literal[False], assert_hostname: None | str | typing.Literal[False],
assert_fingerprint: str | None, assert_fingerprint: str | None,
server_hostname: str | None, server_hostname: str | None,
ssl_context: ssl.SSLContext | None, ssl_context: ssl.SSLContext | None,

View file

@ -53,7 +53,8 @@ from .util.util import to_str
if typing.TYPE_CHECKING: if typing.TYPE_CHECKING:
import ssl import ssl
from typing import Literal
from typing_extensions import Self
from ._base_connection import BaseHTTPConnection, BaseHTTPSConnection from ._base_connection import BaseHTTPConnection, BaseHTTPSConnection
@ -61,8 +62,6 @@ log = logging.getLogger(__name__)
_TYPE_TIMEOUT = typing.Union[Timeout, float, _TYPE_DEFAULT, None] _TYPE_TIMEOUT = typing.Union[Timeout, float, _TYPE_DEFAULT, None]
_SelfT = typing.TypeVar("_SelfT")
# Pool objects # Pool objects
class ConnectionPool: class ConnectionPool:
@ -95,7 +94,7 @@ class ConnectionPool:
def __str__(self) -> str: def __str__(self) -> str:
return f"{type(self).__name__}(host={self.host!r}, port={self.port!r})" return f"{type(self).__name__}(host={self.host!r}, port={self.port!r})"
def __enter__(self: _SelfT) -> _SelfT: def __enter__(self) -> Self:
return self return self
def __exit__( def __exit__(
@ -103,7 +102,7 @@ class ConnectionPool:
exc_type: type[BaseException] | None, exc_type: type[BaseException] | None,
exc_val: BaseException | None, exc_val: BaseException | None,
exc_tb: TracebackType | None, exc_tb: TracebackType | None,
) -> Literal[False]: ) -> typing.Literal[False]:
self.close() self.close()
# Return False to re-raise any potential exceptions # Return False to re-raise any potential exceptions
return False return False
@ -544,17 +543,14 @@ class HTTPConnectionPool(ConnectionPool, RequestMethods):
response._connection = response_conn # type: ignore[attr-defined] response._connection = response_conn # type: ignore[attr-defined]
response._pool = self # type: ignore[attr-defined] response._pool = self # type: ignore[attr-defined]
# emscripten connection doesn't have _http_vsn_str
http_version = getattr(conn, "_http_vsn_str", "HTTP/?")
log.debug( log.debug(
'%s://%s:%s "%s %s %s" %s %s', '%s://%s:%s "%s %s HTTP/%s" %s %s',
self.scheme, self.scheme,
self.host, self.host,
self.port, self.port,
method, method,
url, url,
# HTTP version response.version,
http_version,
response.status, response.status,
response.length_remaining, response.length_remaining,
) )
@ -1002,7 +998,7 @@ class HTTPSConnectionPool(HTTPConnectionPool):
ssl_version: int | str | None = None, ssl_version: int | str | None = None,
ssl_minimum_version: ssl.TLSVersion | None = None, ssl_minimum_version: ssl.TLSVersion | None = None,
ssl_maximum_version: ssl.TLSVersion | None = None, ssl_maximum_version: ssl.TLSVersion | None = None,
assert_hostname: str | Literal[False] | None = None, assert_hostname: str | typing.Literal[False] | None = None,
assert_fingerprint: str | None = None, assert_fingerprint: str | None = None,
ca_cert_dir: str | None = None, ca_cert_dir: str | None = None,
**conn_kw: typing.Any, **conn_kw: typing.Any,

View file

@ -71,10 +71,8 @@ try:
except ImportError: except ImportError:
ssl = None # type: ignore[assignment] ssl = None # type: ignore[assignment]
from typing import TypedDict
class _TYPE_SOCKS_OPTIONS(typing.TypedDict):
class _TYPE_SOCKS_OPTIONS(TypedDict):
socks_version: int socks_version: int
proxy_host: str | None proxy_host: str | None
proxy_port: str | None proxy_port: str | None

View file

@ -195,6 +195,7 @@ class HTTP2Response(BaseHTTPResponse):
headers=headers, headers=headers,
# Following CPython, we map HTTP versions to major * 10 + minor integers # Following CPython, we map HTTP versions to major * 10 + minor integers
version=20, version=20,
version_string="HTTP/2",
# No reason phrase in HTTP/2 # No reason phrase in HTTP/2
reason=None, reason=None,
decode_content=decode_content, decode_content=decode_content,

View file

@ -26,7 +26,8 @@ from .util.url import Url, parse_url
if typing.TYPE_CHECKING: if typing.TYPE_CHECKING:
import ssl import ssl
from typing import Literal
from typing_extensions import Self
__all__ = ["PoolManager", "ProxyManager", "proxy_from_url"] __all__ = ["PoolManager", "ProxyManager", "proxy_from_url"]
@ -51,8 +52,6 @@ SSL_KEYWORDS = (
# http.client.HTTPConnection & http.client.HTTPSConnection in Python 3.7 # http.client.HTTPConnection & http.client.HTTPSConnection in Python 3.7
_DEFAULT_BLOCKSIZE = 16384 _DEFAULT_BLOCKSIZE = 16384
_SelfT = typing.TypeVar("_SelfT")
class PoolKey(typing.NamedTuple): class PoolKey(typing.NamedTuple):
""" """
@ -214,7 +213,7 @@ class PoolManager(RequestMethods):
self.pool_classes_by_scheme = pool_classes_by_scheme self.pool_classes_by_scheme = pool_classes_by_scheme
self.key_fn_by_scheme = key_fn_by_scheme.copy() self.key_fn_by_scheme = key_fn_by_scheme.copy()
def __enter__(self: _SelfT) -> _SelfT: def __enter__(self) -> Self:
return self return self
def __exit__( def __exit__(
@ -222,7 +221,7 @@ class PoolManager(RequestMethods):
exc_type: type[BaseException] | None, exc_type: type[BaseException] | None,
exc_val: BaseException | None, exc_val: BaseException | None,
exc_tb: TracebackType | None, exc_tb: TracebackType | None,
) -> Literal[False]: ) -> typing.Literal[False]:
self.clear() self.clear()
# Return False to re-raise any potential exceptions # Return False to re-raise any potential exceptions
return False return False
@ -553,7 +552,7 @@ class ProxyManager(PoolManager):
proxy_headers: typing.Mapping[str, str] | None = None, proxy_headers: typing.Mapping[str, str] | None = None,
proxy_ssl_context: ssl.SSLContext | None = None, proxy_ssl_context: ssl.SSLContext | None = None,
use_forwarding_for_https: bool = False, use_forwarding_for_https: bool = False,
proxy_assert_hostname: None | str | Literal[False] = None, proxy_assert_hostname: None | str | typing.Literal[False] = None,
proxy_assert_fingerprint: str | None = None, proxy_assert_fingerprint: str | None = None,
**connection_pool_kw: typing.Any, **connection_pool_kw: typing.Any,
) -> None: ) -> None:

View file

@ -26,20 +26,21 @@ except ImportError:
brotli = None brotli = None
try: try:
import zstandard as zstd # type: ignore[import-not-found] import zstandard as zstd
except (AttributeError, ImportError, ValueError): # Defensive:
HAS_ZSTD = False
else:
# The package 'zstandard' added the 'eof' property starting # The package 'zstandard' added the 'eof' property starting
# in v0.18.0 which we require to ensure a complete and # in v0.18.0 which we require to ensure a complete and
# valid zstd stream was fed into the ZstdDecoder. # valid zstd stream was fed into the ZstdDecoder.
# See: https://github.com/urllib3/urllib3/pull/2624 # See: https://github.com/urllib3/urllib3/pull/2624
_zstd_version = _zstd_version = tuple( _zstd_version = tuple(
map(int, re.search(r"^([0-9]+)\.([0-9]+)", zstd.__version__).groups()) # type: ignore[union-attr] map(int, re.search(r"^([0-9]+)\.([0-9]+)", zstd.__version__).groups()) # type: ignore[union-attr]
) )
if _zstd_version < (0, 18): # Defensive: if _zstd_version < (0, 18): # Defensive:
zstd = None HAS_ZSTD = False
else:
except (AttributeError, ImportError, ValueError): # Defensive: HAS_ZSTD = True
zstd = None
from . import util from . import util
from ._base_connection import _TYPE_BODY from ._base_connection import _TYPE_BODY
@ -61,8 +62,6 @@ from .util.response import is_fp_closed, is_response_to_head
from .util.retry import Retry from .util.retry import Retry
if typing.TYPE_CHECKING: if typing.TYPE_CHECKING:
from typing import Literal
from .connectionpool import HTTPConnectionPool from .connectionpool import HTTPConnectionPool
log = logging.getLogger(__name__) log = logging.getLogger(__name__)
@ -163,7 +162,7 @@ if brotli is not None:
return b"" return b""
if zstd is not None: if HAS_ZSTD:
class ZstdDecoder(ContentDecoder): class ZstdDecoder(ContentDecoder):
def __init__(self) -> None: def __init__(self) -> None:
@ -183,7 +182,7 @@ if zstd is not None:
ret = self._obj.flush() # note: this is a no-op ret = self._obj.flush() # note: this is a no-op
if not self._obj.eof: if not self._obj.eof:
raise DecodeError("Zstandard data is incomplete") raise DecodeError("Zstandard data is incomplete")
return ret # type: ignore[no-any-return] return ret
class MultiDecoder(ContentDecoder): class MultiDecoder(ContentDecoder):
@ -219,7 +218,7 @@ def _get_decoder(mode: str) -> ContentDecoder:
if brotli is not None and mode == "br": if brotli is not None and mode == "br":
return BrotliDecoder() return BrotliDecoder()
if zstd is not None and mode == "zstd": if HAS_ZSTD and mode == "zstd":
return ZstdDecoder() return ZstdDecoder()
return DeflateDecoder() return DeflateDecoder()
@ -302,7 +301,7 @@ class BaseHTTPResponse(io.IOBase):
CONTENT_DECODERS = ["gzip", "x-gzip", "deflate"] CONTENT_DECODERS = ["gzip", "x-gzip", "deflate"]
if brotli is not None: if brotli is not None:
CONTENT_DECODERS += ["br"] CONTENT_DECODERS += ["br"]
if zstd is not None: if HAS_ZSTD:
CONTENT_DECODERS += ["zstd"] CONTENT_DECODERS += ["zstd"]
REDIRECT_STATUSES = [301, 302, 303, 307, 308] REDIRECT_STATUSES = [301, 302, 303, 307, 308]
@ -310,7 +309,7 @@ class BaseHTTPResponse(io.IOBase):
if brotli is not None: if brotli is not None:
DECODER_ERROR_CLASSES += (brotli.error,) DECODER_ERROR_CLASSES += (brotli.error,)
if zstd is not None: if HAS_ZSTD:
DECODER_ERROR_CLASSES += (zstd.ZstdError,) DECODER_ERROR_CLASSES += (zstd.ZstdError,)
def __init__( def __init__(
@ -319,6 +318,7 @@ class BaseHTTPResponse(io.IOBase):
headers: typing.Mapping[str, str] | typing.Mapping[bytes, bytes] | None = None, headers: typing.Mapping[str, str] | typing.Mapping[bytes, bytes] | None = None,
status: int, status: int,
version: int, version: int,
version_string: str,
reason: str | None, reason: str | None,
decode_content: bool, decode_content: bool,
request_url: str | None, request_url: str | None,
@ -330,6 +330,7 @@ class BaseHTTPResponse(io.IOBase):
self.headers = HTTPHeaderDict(headers) # type: ignore[arg-type] self.headers = HTTPHeaderDict(headers) # type: ignore[arg-type]
self.status = status self.status = status
self.version = version self.version = version
self.version_string = version_string
self.reason = reason self.reason = reason
self.decode_content = decode_content self.decode_content = decode_content
self._has_decoded_content = False self._has_decoded_content = False
@ -346,7 +347,7 @@ class BaseHTTPResponse(io.IOBase):
self._decoder: ContentDecoder | None = None self._decoder: ContentDecoder | None = None
self.length_remaining: int | None self.length_remaining: int | None
def get_redirect_location(self) -> str | None | Literal[False]: def get_redirect_location(self) -> str | None | typing.Literal[False]:
""" """
Should we redirect and where to? Should we redirect and where to?
@ -364,13 +365,21 @@ class BaseHTTPResponse(io.IOBase):
def json(self) -> typing.Any: def json(self) -> typing.Any:
""" """
Parses the body of the HTTP response as JSON. Deserializes the body of the HTTP response as a Python object.
To use a custom JSON decoder pass the result of :attr:`HTTPResponse.data` to the decoder. The body of the HTTP response must be encoded using UTF-8, as per
`RFC 8529 Section 8.1 <https://www.rfc-editor.org/rfc/rfc8259#section-8.1>`_.
This method can raise either `UnicodeDecodeError` or `json.JSONDecodeError`. To use a custom JSON decoder pass the result of :attr:`HTTPResponse.data` to
your custom decoder instead.
Read more :ref:`here <json>`. If the body of the HTTP response is not decodable to UTF-8, a
`UnicodeDecodeError` will be raised. If the body of the HTTP response is not a
valid JSON document, a `json.JSONDecodeError` will be raised.
Read more :ref:`here <json_content>`.
:returns: The body of the HTTP response as a Python object.
""" """
data = self.data.decode("utf-8") data = self.data.decode("utf-8")
return _json.loads(data) return _json.loads(data)
@ -567,6 +576,7 @@ class HTTPResponse(BaseHTTPResponse):
headers: typing.Mapping[str, str] | typing.Mapping[bytes, bytes] | None = None, headers: typing.Mapping[str, str] | typing.Mapping[bytes, bytes] | None = None,
status: int = 0, status: int = 0,
version: int = 0, version: int = 0,
version_string: str = "HTTP/?",
reason: str | None = None, reason: str | None = None,
preload_content: bool = True, preload_content: bool = True,
decode_content: bool = True, decode_content: bool = True,
@ -584,6 +594,7 @@ class HTTPResponse(BaseHTTPResponse):
headers=headers, headers=headers,
status=status, status=status,
version=version, version=version,
version_string=version_string,
reason=reason, reason=reason,
decode_content=decode_content, decode_content=decode_content,
request_url=request_url, request_url=request_url,
@ -926,7 +937,10 @@ class HTTPResponse(BaseHTTPResponse):
if decode_content is None: if decode_content is None:
decode_content = self.decode_content decode_content = self.decode_content
if amt is not None: if amt and amt < 0:
# Negative numbers and `None` should be treated the same.
amt = None
elif amt is not None:
cache_content = False cache_content = False
if len(self._decoded_buffer) >= amt: if len(self._decoded_buffer) >= amt:
@ -986,6 +1000,9 @@ class HTTPResponse(BaseHTTPResponse):
""" """
if decode_content is None: if decode_content is None:
decode_content = self.decode_content decode_content = self.decode_content
if amt and amt < 0:
# Negative numbers and `None` should be treated the same.
amt = None
# try and respond without going to the network # try and respond without going to the network
if self._has_decoded_content: if self._has_decoded_content:
if not decode_content: if not decode_content:
@ -1180,6 +1197,11 @@ class HTTPResponse(BaseHTTPResponse):
if self._fp.fp is None: # type: ignore[union-attr] if self._fp.fp is None: # type: ignore[union-attr]
return None return None
if amt and amt < 0:
# Negative numbers and `None` should be treated the same,
# but httplib handles only `None` correctly.
amt = None
while True: while True:
self._update_chunk_length() self._update_chunk_length()
if self.chunk_left == 0: if self.chunk_left == 0:

View file

@ -29,7 +29,7 @@ except ImportError:
else: else:
ACCEPT_ENCODING += ",br" ACCEPT_ENCODING += ",br"
try: try:
import zstandard as _unused_module_zstd # type: ignore[import-not-found] # noqa: F401 import zstandard as _unused_module_zstd # noqa: F401
except ImportError: except ImportError:
pass pass
else: else:

View file

@ -21,6 +21,8 @@ from ..exceptions import (
from .util import reraise from .util import reraise
if typing.TYPE_CHECKING: if typing.TYPE_CHECKING:
from typing_extensions import Self
from ..connectionpool import ConnectionPool from ..connectionpool import ConnectionPool
from ..response import BaseHTTPResponse from ..response import BaseHTTPResponse
@ -187,7 +189,9 @@ class Retry:
RETRY_AFTER_STATUS_CODES = frozenset([413, 429, 503]) RETRY_AFTER_STATUS_CODES = frozenset([413, 429, 503])
#: Default headers to be used for ``remove_headers_on_redirect`` #: Default headers to be used for ``remove_headers_on_redirect``
DEFAULT_REMOVE_HEADERS_ON_REDIRECT = frozenset(["Cookie", "Authorization"]) DEFAULT_REMOVE_HEADERS_ON_REDIRECT = frozenset(
["Cookie", "Authorization", "Proxy-Authorization"]
)
#: Default maximum backoff time. #: Default maximum backoff time.
DEFAULT_BACKOFF_MAX = 120 DEFAULT_BACKOFF_MAX = 120
@ -240,7 +244,7 @@ class Retry:
) )
self.backoff_jitter = backoff_jitter self.backoff_jitter = backoff_jitter
def new(self, **kw: typing.Any) -> Retry: def new(self, **kw: typing.Any) -> Self:
params = dict( params = dict(
total=self.total, total=self.total,
connect=self.connect, connect=self.connect,
@ -429,7 +433,7 @@ class Retry:
error: Exception | None = None, error: Exception | None = None,
_pool: ConnectionPool | None = None, _pool: ConnectionPool | None = None,
_stacktrace: TracebackType | None = None, _stacktrace: TracebackType | None = None,
) -> Retry: ) -> Self:
"""Return a new Retry object with incremented retry counters. """Return a new Retry object with incremented retry counters.
:param response: A response object, or None, if the server did not :param response: A response object, or None, if the server did not

View file

@ -78,7 +78,7 @@ def _is_has_never_check_common_name_reliable(
if typing.TYPE_CHECKING: if typing.TYPE_CHECKING:
from ssl import VerifyMode from ssl import VerifyMode
from typing import Literal, TypedDict from typing import TypedDict
from .ssltransport import SSLTransport as SSLTransportType from .ssltransport import SSLTransport as SSLTransportType
@ -365,7 +365,7 @@ def ssl_wrap_socket(
ca_cert_dir: str | None = ..., ca_cert_dir: str | None = ...,
key_password: str | None = ..., key_password: str | None = ...,
ca_cert_data: None | str | bytes = ..., ca_cert_data: None | str | bytes = ...,
tls_in_tls: Literal[False] = ..., tls_in_tls: typing.Literal[False] = ...,
) -> ssl.SSLSocket: ) -> ssl.SSLSocket:
... ...

View file

@ -8,12 +8,11 @@ import typing
from ..exceptions import ProxySchemeUnsupported from ..exceptions import ProxySchemeUnsupported
if typing.TYPE_CHECKING: if typing.TYPE_CHECKING:
from typing import Literal from typing_extensions import Self
from .ssl_ import _TYPE_PEER_CERT_RET, _TYPE_PEER_CERT_RET_DICT from .ssl_ import _TYPE_PEER_CERT_RET, _TYPE_PEER_CERT_RET_DICT
_SelfT = typing.TypeVar("_SelfT", bound="SSLTransport")
_WriteBuffer = typing.Union[bytearray, memoryview] _WriteBuffer = typing.Union[bytearray, memoryview]
_ReturnValue = typing.TypeVar("_ReturnValue") _ReturnValue = typing.TypeVar("_ReturnValue")
@ -70,7 +69,7 @@ class SSLTransport:
# Perform initial handshake. # Perform initial handshake.
self._ssl_io_loop(self.sslobj.do_handshake) self._ssl_io_loop(self.sslobj.do_handshake)
def __enter__(self: _SelfT) -> _SelfT: def __enter__(self) -> Self:
return self return self
def __exit__(self, *_: typing.Any) -> None: def __exit__(self, *_: typing.Any) -> None:
@ -174,12 +173,12 @@ class SSLTransport:
@typing.overload @typing.overload
def getpeercert( def getpeercert(
self, binary_form: Literal[False] = ... self, binary_form: typing.Literal[False] = ...
) -> _TYPE_PEER_CERT_RET_DICT | None: ) -> _TYPE_PEER_CERT_RET_DICT | None:
... ...
@typing.overload @typing.overload
def getpeercert(self, binary_form: Literal[True]) -> bytes | None: def getpeercert(self, binary_form: typing.Literal[True]) -> bytes | None:
... ...
def getpeercert(self, binary_form: bool = False) -> _TYPE_PEER_CERT_RET: def getpeercert(self, binary_form: bool = False) -> _TYPE_PEER_CERT_RET:

View file

@ -2477,14 +2477,14 @@ def _save_config(force=False, **kwargs):
try: try:
if check_valid_config(CONFIG_FILE): if check_valid_config(CONFIG_FILE):
for _t in range(0, 3): for _t in range(0, 3):
copy_file(CONFIG_FILE, backup_config) copy_file(CONFIG_FILE, backup_config)
if not check_valid_config(backup_config): if not check_valid_config(backup_config):
if 2 > _t: if 2 > _t:
logger.debug('backup config file seems to be invalid, retrying...') logger.debug('backup config file seems to be invalid, retrying...')
else: else:
logger.warning('backup config file seems to be invalid, not backing up.') logger.warning('backup config file seems to be invalid, not backing up.')
backup_config = None backup_config = None
remove_file_perm(backup_config) remove_file_perm(backup_config)
2 > _t and time.sleep(3) 2 > _t and time.sleep(3)
else: else:
break break