Merge branch 'feature/UpdateRequests' into dev

This commit is contained in:
JackDandy 2024-06-05 08:48:34 +01:00
commit a515073374
10 changed files with 273 additions and 52 deletions

View file

@ -1,4 +1,9 @@
### 3.31.0 (2024-06-05 08:00:00 UTC) ### 3.32.0 (2024-xx-xx xx:xx:00 UTC)
* Update Requests library 2.31.0 (8812812) to 2.32.3 (0e322af)
### 3.31.0 (2024-06-05 08:00:00 UTC)
* Update Apprise 1.3.0 (6458ab0) to 1.6.0 (0c0d5da) * Update Apprise 1.3.0 (6458ab0) to 1.6.0 (0c0d5da)
* Update attr 22.2.0 (683d056) to 23.1.0 (67e4ff2) * Update attr 22.2.0 (683d056) to 23.1.0 (67e4ff2)

View file

@ -83,7 +83,11 @@ def check_compatibility(urllib3_version, chardet_version, charset_normalizer_ver
# charset_normalizer >= 2.0.0 < 4.0.0 # charset_normalizer >= 2.0.0 < 4.0.0
assert (2, 0, 0) <= (major, minor, patch) < (4, 0, 0) assert (2, 0, 0) <= (major, minor, patch) < (4, 0, 0)
else: else:
raise Exception("You need either charset_normalizer or chardet installed") warnings.warn(
"Unable to find acceptable character detection dependency "
"(chardet or charset_normalizer).",
RequestsDependencyWarning,
)
def _check_cryptography(cryptography_version): def _check_cryptography(cryptography_version):

View file

@ -5,8 +5,8 @@
__title__ = "requests" __title__ = "requests"
__description__ = "Python HTTP for Humans." __description__ = "Python HTTP for Humans."
__url__ = "https://requests.readthedocs.io" __url__ = "https://requests.readthedocs.io"
__version__ = "2.31.0" __version__ = "2.32.3"
__build__ = 0x023100 __build__ = 0x023203
__author__ = "Kenneth Reitz" __author__ = "Kenneth Reitz"
__author_email__ = "me@kennethreitz.org" __author_email__ = "me@kennethreitz.org"
__license__ = "Apache-2.0" __license__ = "Apache-2.0"

View file

@ -8,6 +8,8 @@ and maintain connections.
import os.path import os.path
import socket # noqa: F401 import socket # noqa: F401
import typing
import warnings
from urllib3.exceptions import ClosedPoolError, ConnectTimeoutError from urllib3.exceptions import ClosedPoolError, ConnectTimeoutError
from urllib3.exceptions import HTTPError as _HTTPError from urllib3.exceptions import HTTPError as _HTTPError
@ -25,6 +27,7 @@ from urllib3.poolmanager import PoolManager, proxy_from_url
from urllib3.util import Timeout as TimeoutSauce from urllib3.util import Timeout as TimeoutSauce
from urllib3.util import parse_url from urllib3.util import parse_url
from urllib3.util.retry import Retry from urllib3.util.retry import Retry
from urllib3.util.ssl_ import create_urllib3_context
from .auth import _basic_auth_str from .auth import _basic_auth_str
from .compat import basestring, urlparse from .compat import basestring, urlparse
@ -61,12 +64,76 @@ except ImportError:
raise InvalidSchema("Missing dependencies for SOCKS support.") raise InvalidSchema("Missing dependencies for SOCKS support.")
if typing.TYPE_CHECKING:
from .models import PreparedRequest
DEFAULT_POOLBLOCK = False DEFAULT_POOLBLOCK = False
DEFAULT_POOLSIZE = 10 DEFAULT_POOLSIZE = 10
DEFAULT_RETRIES = 0 DEFAULT_RETRIES = 0
DEFAULT_POOL_TIMEOUT = None DEFAULT_POOL_TIMEOUT = None
try:
import ssl # noqa: F401
_preloaded_ssl_context = create_urllib3_context()
_preloaded_ssl_context.load_verify_locations(
extract_zipped_paths(DEFAULT_CA_BUNDLE_PATH)
)
except ImportError:
# Bypass default SSLContext creation when Python
# interpreter isn't built with the ssl module.
_preloaded_ssl_context = None
def _urllib3_request_context(
request: "PreparedRequest",
verify: "bool | str | None",
client_cert: "typing.Tuple[str, str] | str | None",
poolmanager: "PoolManager",
) -> "(typing.Dict[str, typing.Any], typing.Dict[str, typing.Any])":
host_params = {}
pool_kwargs = {}
parsed_request_url = urlparse(request.url)
scheme = parsed_request_url.scheme.lower()
port = parsed_request_url.port
# Determine if we have and should use our default SSLContext
# to optimize performance on standard requests.
poolmanager_kwargs = getattr(poolmanager, "connection_pool_kw", {})
has_poolmanager_ssl_context = poolmanager_kwargs.get("ssl_context")
should_use_default_ssl_context = (
_preloaded_ssl_context is not None and not has_poolmanager_ssl_context
)
cert_reqs = "CERT_REQUIRED"
if verify is False:
cert_reqs = "CERT_NONE"
elif verify is True and should_use_default_ssl_context:
pool_kwargs["ssl_context"] = _preloaded_ssl_context
elif isinstance(verify, str):
if not os.path.isdir(verify):
pool_kwargs["ca_certs"] = verify
else:
pool_kwargs["ca_cert_dir"] = verify
pool_kwargs["cert_reqs"] = cert_reqs
if client_cert is not None:
if isinstance(client_cert, tuple) and len(client_cert) == 2:
pool_kwargs["cert_file"] = client_cert[0]
pool_kwargs["key_file"] = client_cert[1]
else:
# According to our docs, we allow users to specify just the client
# cert path
pool_kwargs["cert_file"] = client_cert
host_params = {
"scheme": scheme,
"host": parsed_request_url.hostname,
"port": port,
}
return host_params, pool_kwargs
class BaseAdapter: class BaseAdapter:
"""The Base Transport Adapter""" """The Base Transport Adapter"""
@ -247,23 +314,22 @@ class HTTPAdapter(BaseAdapter):
:param cert: The SSL certificate to verify. :param cert: The SSL certificate to verify.
""" """
if url.lower().startswith("https") and verify: if url.lower().startswith("https") and verify:
cert_loc = None conn.cert_reqs = "CERT_REQUIRED"
# Allow self-specified cert location. # Only load the CA certificates if 'verify' is a string indicating the CA bundle to use.
# Otherwise, if verify is a boolean, we don't load anything since
# the connection will be using a context with the default certificates already loaded,
# and this avoids a call to the slow load_verify_locations()
if verify is not True: if verify is not True:
# `verify` must be a str with a path then
cert_loc = verify cert_loc = verify
if not cert_loc: if not os.path.exists(cert_loc):
cert_loc = extract_zipped_paths(DEFAULT_CA_BUNDLE_PATH)
if not cert_loc or not os.path.exists(cert_loc):
raise OSError( raise OSError(
f"Could not find a suitable TLS CA certificate bundle, " f"Could not find a suitable TLS CA certificate bundle, "
f"invalid path: {cert_loc}" f"invalid path: {cert_loc}"
) )
conn.cert_reqs = "CERT_REQUIRED"
if not os.path.isdir(cert_loc): if not os.path.isdir(cert_loc):
conn.ca_certs = cert_loc conn.ca_certs = cert_loc
else: else:
@ -327,8 +393,110 @@ class HTTPAdapter(BaseAdapter):
return response return response
def build_connection_pool_key_attributes(self, request, verify, cert=None):
"""Build the PoolKey attributes used by urllib3 to return a connection.
This looks at the PreparedRequest, the user-specified verify value,
and the value of the cert parameter to determine what PoolKey values
to use to select a connection from a given urllib3 Connection Pool.
The SSL related pool key arguments are not consistently set. As of
this writing, use the following to determine what keys may be in that
dictionary:
* If ``verify`` is ``True``, ``"ssl_context"`` will be set and will be the
default Requests SSL Context
* If ``verify`` is ``False``, ``"ssl_context"`` will not be set but
``"cert_reqs"`` will be set
* If ``verify`` is a string, (i.e., it is a user-specified trust bundle)
``"ca_certs"`` will be set if the string is not a directory recognized
by :py:func:`os.path.isdir`, otherwise ``"ca_certs_dir"`` will be
set.
* If ``"cert"`` is specified, ``"cert_file"`` will always be set. If
``"cert"`` is a tuple with a second item, ``"key_file"`` will also
be present
To override these settings, one may subclass this class, call this
method and use the above logic to change parameters as desired. For
example, if one wishes to use a custom :py:class:`ssl.SSLContext` one
must both set ``"ssl_context"`` and based on what else they require,
alter the other keys to ensure the desired behaviour.
:param request:
The PreparedReqest being sent over the connection.
:type request:
:class:`~requests.models.PreparedRequest`
:param verify:
Either a boolean, in which case it controls whether
we verify the server's TLS certificate, or a string, in which case it
must be a path to a CA bundle to use.
:param cert:
(optional) Any user-provided SSL certificate for client
authentication (a.k.a., mTLS). This may be a string (i.e., just
the path to a file which holds both certificate and key) or a
tuple of length 2 with the certificate file path and key file
path.
:returns:
A tuple of two dictionaries. The first is the "host parameters"
portion of the Pool Key including scheme, hostname, and port. The
second is a dictionary of SSLContext related parameters.
"""
return _urllib3_request_context(request, verify, cert, self.poolmanager)
def get_connection_with_tls_context(self, request, verify, proxies=None, cert=None):
"""Returns a urllib3 connection for the given request and TLS settings.
This should not be called from user code, and is only exposed for use
when subclassing the :class:`HTTPAdapter <requests.adapters.HTTPAdapter>`.
:param request:
The :class:`PreparedRequest <PreparedRequest>` object to be sent
over the connection.
:param verify:
Either a boolean, in which case it controls whether we verify the
server's TLS certificate, or a string, in which case it must be a
path to a CA bundle to use.
:param proxies:
(optional) The proxies dictionary to apply to the request.
:param cert:
(optional) Any user-provided SSL certificate to be used for client
authentication (a.k.a., mTLS).
:rtype:
urllib3.ConnectionPool
"""
proxy = select_proxy(request.url, proxies)
try:
host_params, pool_kwargs = self.build_connection_pool_key_attributes(
request,
verify,
cert,
)
except ValueError as e:
raise InvalidURL(e, request=request)
if proxy:
proxy = prepend_scheme_if_needed(proxy, "http")
proxy_url = parse_url(proxy)
if not proxy_url.host:
raise InvalidProxyURL(
"Please check proxy URL. It is malformed "
"and could be missing the host."
)
proxy_manager = self.proxy_manager_for(proxy)
conn = proxy_manager.connection_from_host(
**host_params, pool_kwargs=pool_kwargs
)
else:
# Only scheme should be lower case
conn = self.poolmanager.connection_from_host(
**host_params, pool_kwargs=pool_kwargs
)
return conn
def get_connection(self, url, proxies=None): def get_connection(self, url, proxies=None):
"""Returns a urllib3 connection for the given URL. This should not be """DEPRECATED: Users should move to `get_connection_with_tls_context`
for all subclasses of HTTPAdapter using Requests>=2.32.2.
Returns a urllib3 connection for the given URL. This should not be
called from user code, and is only exposed for use when subclassing the called from user code, and is only exposed for use when subclassing the
:class:`HTTPAdapter <requests.adapters.HTTPAdapter>`. :class:`HTTPAdapter <requests.adapters.HTTPAdapter>`.
@ -336,6 +504,15 @@ class HTTPAdapter(BaseAdapter):
:param proxies: (optional) A Requests-style dictionary of proxies used on this request. :param proxies: (optional) A Requests-style dictionary of proxies used on this request.
:rtype: urllib3.ConnectionPool :rtype: urllib3.ConnectionPool
""" """
warnings.warn(
(
"`get_connection` has been deprecated in favor of "
"`get_connection_with_tls_context`. Custom HTTPAdapter subclasses "
"will need to migrate for Requests>=2.32.2. Please see "
"https://github.com/psf/requests/pull/6710 for more details."
),
DeprecationWarning,
)
proxy = select_proxy(url, proxies) proxy = select_proxy(url, proxies)
if proxy: if proxy:
@ -390,6 +567,9 @@ class HTTPAdapter(BaseAdapter):
using_socks_proxy = proxy_scheme.startswith("socks") using_socks_proxy = proxy_scheme.startswith("socks")
url = request.path_url url = request.path_url
if url.startswith("//"): # Don't confuse urllib3
url = f"/{url.lstrip('/')}"
if is_proxied_http_request and not using_socks_proxy: if is_proxied_http_request and not using_socks_proxy:
url = urldefragauth(request.url) url = urldefragauth(request.url)
@ -450,7 +630,9 @@ class HTTPAdapter(BaseAdapter):
""" """
try: try:
conn = self.get_connection(request.url, proxies) conn = self.get_connection_with_tls_context(
request, verify, proxies=proxies, cert=cert
)
except LocationValueError as e: except LocationValueError as e:
raise InvalidURL(e, request=request) raise InvalidURL(e, request=request)

View file

@ -7,13 +7,28 @@ between Python 2 and Python 3. It remains for backwards
compatibility until the next major version. compatibility until the next major version.
""" """
try: import importlib
import chardet
except ImportError:
import charset_normalizer as chardet
import sys import sys
# -------------------
# Character Detection
# -------------------
def _resolve_char_detection():
"""Find supported character detection libraries."""
chardet = None
for lib in ("chardet", "charset_normalizer"):
if chardet is None:
try:
chardet = importlib.import_module(lib)
except ImportError:
pass
return chardet
chardet = _resolve_char_detection()
# ------- # -------
# Pythons # Pythons
# ------- # -------

View file

@ -41,6 +41,16 @@ class JSONDecodeError(InvalidJSONError, CompatJSONDecodeError):
CompatJSONDecodeError.__init__(self, *args) CompatJSONDecodeError.__init__(self, *args)
InvalidJSONError.__init__(self, *self.args, **kwargs) InvalidJSONError.__init__(self, *self.args, **kwargs)
def __reduce__(self):
"""
The __reduce__ method called when pickling the object must
be the one from the JSONDecodeError (be it json/simplejson)
as it expects all the arguments for instantiation, not just
one like the IOError, and the MRO would by default call the
__reduce__ method from the IOError due to the inheritance order.
"""
return CompatJSONDecodeError.__reduce__(self)
class HTTPError(RequestException): class HTTPError(RequestException):
"""An HTTP error occurred.""" """An HTTP error occurred."""

View file

@ -789,7 +789,12 @@ class Response:
@property @property
def apparent_encoding(self): def apparent_encoding(self):
"""The apparent encoding, provided by the charset_normalizer or chardet libraries.""" """The apparent encoding, provided by the charset_normalizer or chardet libraries."""
if chardet is not None:
return chardet.detect(self.content)["encoding"] return chardet.detect(self.content)["encoding"]
else:
# If no character detection library is available, we'll fall back
# to a standard Python utf-8 str.
return "utf-8"
def iter_content(self, chunk_size=1, decode_unicode=False): def iter_content(self, chunk_size=1, decode_unicode=False):
"""Iterates over the response data. When stream=True is set on the """Iterates over the response data. When stream=True is set on the

View file

@ -1,13 +1,6 @@
import sys import sys
try: from .compat import chardet
import chardet
except ImportError:
import warnings
import charset_normalizer as chardet
warnings.filterwarnings("ignore", "Trying to detect", module="charset_normalizer")
# This code exists for backwards compatibility reasons. # This code exists for backwards compatibility reasons.
# I don't like it either. Just look the other way. :) # I don't like it either. Just look the other way. :)
@ -20,9 +13,11 @@ for package in ("urllib3", "idna"):
if mod == package or mod.startswith(f"{package}."): if mod == package or mod.startswith(f"{package}."):
sys.modules[f"requests.packages.{mod}"] = sys.modules[mod] sys.modules[f"requests.packages.{mod}"] = sys.modules[mod]
target = chardet.__name__ if chardet is not None:
for mod in list(sys.modules): target = chardet.__name__
for mod in list(sys.modules):
if mod == target or mod.startswith(f"{target}."): if mod == target or mod.startswith(f"{target}."):
target = target.replace(target, "chardet") imported_mod = sys.modules[mod]
sys.modules[f"requests.packages.{target}"] = sys.modules[mod] sys.modules[f"requests.packages.{mod}"] = imported_mod
# Kinda cool, though, right? mod = mod.replace(target, "chardet")
sys.modules[f"requests.packages.{mod}"] = imported_mod

View file

@ -24,7 +24,7 @@ _codes = {
# Informational. # Informational.
100: ("continue",), 100: ("continue",),
101: ("switching_protocols",), 101: ("switching_protocols",),
102: ("processing",), 102: ("processing", "early-hints"),
103: ("checkpoint",), 103: ("checkpoint",),
122: ("uri_too_long", "request_uri_too_long"), 122: ("uri_too_long", "request_uri_too_long"),
200: ("ok", "okay", "all_ok", "all_okay", "all_good", "\\o/", ""), 200: ("ok", "okay", "all_ok", "all_okay", "all_good", "\\o/", ""),
@ -65,8 +65,8 @@ _codes = {
410: ("gone",), 410: ("gone",),
411: ("length_required",), 411: ("length_required",),
412: ("precondition_failed", "precondition"), 412: ("precondition_failed", "precondition"),
413: ("request_entity_too_large",), 413: ("request_entity_too_large", "content_too_large"),
414: ("request_uri_too_large",), 414: ("request_uri_too_large", "uri_too_long"),
415: ("unsupported_media_type", "unsupported_media", "media_type"), 415: ("unsupported_media_type", "unsupported_media", "media_type"),
416: ( 416: (
"requested_range_not_satisfiable", "requested_range_not_satisfiable",
@ -76,10 +76,10 @@ _codes = {
417: ("expectation_failed",), 417: ("expectation_failed",),
418: ("im_a_teapot", "teapot", "i_am_a_teapot"), 418: ("im_a_teapot", "teapot", "i_am_a_teapot"),
421: ("misdirected_request",), 421: ("misdirected_request",),
422: ("unprocessable_entity", "unprocessable"), 422: ("unprocessable_entity", "unprocessable", "unprocessable_content"),
423: ("locked",), 423: ("locked",),
424: ("failed_dependency", "dependency"), 424: ("failed_dependency", "dependency"),
425: ("unordered_collection", "unordered"), 425: ("unordered_collection", "unordered", "too_early"),
426: ("upgrade_required", "upgrade"), 426: ("upgrade_required", "upgrade"),
428: ("precondition_required", "precondition"), 428: ("precondition_required", "precondition"),
429: ("too_many_requests", "too_many"), 429: ("too_many_requests", "too_many"),

View file

@ -97,6 +97,8 @@ if sys.platform == "win32":
# '<local>' string by the localhost entry and the corresponding # '<local>' string by the localhost entry and the corresponding
# canonical entry. # canonical entry.
proxyOverride = proxyOverride.split(";") proxyOverride = proxyOverride.split(";")
# filter out empty strings to avoid re.match return true in the following code.
proxyOverride = filter(None, proxyOverride)
# now check if we match one of the registry values. # now check if we match one of the registry values.
for test in proxyOverride: for test in proxyOverride:
if test == "<local>": if test == "<local>":
@ -134,6 +136,9 @@ def super_len(o):
total_length = None total_length = None
current_position = 0 current_position = 0
if isinstance(o, str):
o = o.encode("utf-8")
if hasattr(o, "__len__"): if hasattr(o, "__len__"):
total_length = len(o) total_length = len(o)
@ -859,7 +864,7 @@ def select_proxy(url, proxies):
def resolve_proxies(request, proxies, trust_env=True): def resolve_proxies(request, proxies, trust_env=True):
"""This method takes proxy information from a request and configuration """This method takes proxy information from a request and configuration
input to resolve a mapping of target proxies. This will consider settings input to resolve a mapping of target proxies. This will consider settings
such a NO_PROXY to strip proxy configurations. such as NO_PROXY to strip proxy configurations.
:param request: Request or PreparedRequest :param request: Request or PreparedRequest
:param proxies: A dictionary of schemes or schemes and hosts to proxy URLs :param proxies: A dictionary of schemes or schemes and hosts to proxy URLs
@ -1051,7 +1056,7 @@ def _validate_header_part(header, header_part, header_validator_index):
if not validator.match(header_part): if not validator.match(header_part):
header_kind = "name" if header_validator_index == 0 else "value" header_kind = "name" if header_validator_index == 0 else "value"
raise InvalidHeader( raise InvalidHeader(
f"Invalid leading whitespace, reserved character(s), or return" f"Invalid leading whitespace, reserved character(s), or return "
f"character(s) in header {header_kind}: {header_part!r}" f"character(s) in header {header_kind}: {header_part!r}"
) )