mirror of
https://github.com/SickGear/SickGear.git
synced 2024-12-03 18:03:37 +00:00
Merge pull request #622 from JackDandy/feature/UpdateRequests
Update Requests library 2.7.0 (5d6d1bc) to 2.9.1 (a1c9b84).
This commit is contained in:
commit
eb15b8d925
40 changed files with 1099 additions and 597 deletions
|
@ -16,6 +16,7 @@
|
||||||
* Update html5lib 0.999 to 0.99999999/1.0b9 (46dae3d)
|
* Update html5lib 0.999 to 0.99999999/1.0b9 (46dae3d)
|
||||||
* Update PNotify library 2.0.1 to 2.1.0
|
* Update PNotify library 2.0.1 to 2.1.0
|
||||||
* Update profilehooks 1.4 to 1.8.2.dev0 (ee3f1a8)
|
* Update profilehooks 1.4 to 1.8.2.dev0 (ee3f1a8)
|
||||||
|
* Update Requests library 2.7.0 (5d6d1bc) to 2.9.1 (a1c9b84)
|
||||||
|
|
||||||
|
|
||||||
### 0.11.0 (2016-01-10 22:30:00 UTC)
|
### 0.11.0 (2016-01-10 22:30:00 UTC)
|
||||||
|
|
|
@ -1,13 +0,0 @@
|
||||||
Copyright 2015 Kenneth Reitz
|
|
||||||
|
|
||||||
Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
you may not use this file except in compliance with the License.
|
|
||||||
You may obtain a copy of the License at
|
|
||||||
|
|
||||||
http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
|
|
||||||
Unless required by applicable law or agreed to in writing, software
|
|
||||||
distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
See the License for the specific language governing permissions and
|
|
||||||
limitations under the License.
|
|
|
@ -1,54 +0,0 @@
|
||||||
Requests includes some vendorized python libraries to ease installation.
|
|
||||||
|
|
||||||
Urllib3 License
|
|
||||||
===============
|
|
||||||
|
|
||||||
This is the MIT license: http://www.opensource.org/licenses/mit-license.php
|
|
||||||
|
|
||||||
Copyright 2008-2011 Andrey Petrov and contributors (see CONTRIBUTORS.txt),
|
|
||||||
Modifications copyright 2012 Kenneth Reitz.
|
|
||||||
|
|
||||||
Permission is hereby granted, free of charge, to any person obtaining
|
|
||||||
a copy of this software and associated documentation files (the
|
|
||||||
"Software"), to deal in the Software without restriction, including
|
|
||||||
without limitation the rights to use, copy, modify, merge, publish,
|
|
||||||
distribute, sublicense, and/or sell copies of the Software, and to
|
|
||||||
permit persons to whom the Software is furnished to do so, subject to
|
|
||||||
the following conditions:
|
|
||||||
|
|
||||||
The above copyright notice and this permission notice shall be
|
|
||||||
included in all copies or substantial portions of the Software.
|
|
||||||
|
|
||||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
|
|
||||||
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
|
||||||
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
|
|
||||||
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
|
|
||||||
LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
|
|
||||||
OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
|
|
||||||
WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
|
||||||
|
|
||||||
Chardet License
|
|
||||||
===============
|
|
||||||
|
|
||||||
This library is free software; you can redistribute it and/or
|
|
||||||
modify it under the terms of the GNU Lesser General Public
|
|
||||||
License as published by the Free Software Foundation; either
|
|
||||||
version 2.1 of the License, or (at your option) any later version.
|
|
||||||
|
|
||||||
This library is distributed in the hope that it will be useful,
|
|
||||||
but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
||||||
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
|
||||||
Lesser General Public License for more details.
|
|
||||||
|
|
||||||
You should have received a copy of the GNU Lesser General Public
|
|
||||||
License along with this library; if not, write to the Free Software
|
|
||||||
Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
|
|
||||||
02110-1301 USA
|
|
||||||
|
|
||||||
|
|
||||||
CA Bundle License
|
|
||||||
=================
|
|
||||||
|
|
||||||
This Source Code Form is subject to the terms of the Mozilla Public
|
|
||||||
License, v. 2.0. If a copy of the MPL was not distributed with this
|
|
||||||
file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
|
|
@ -42,8 +42,8 @@ is at <http://python-requests.org>.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
__title__ = 'requests'
|
__title__ = 'requests'
|
||||||
__version__ = '2.7.0'
|
__version__ = '2.9.1'
|
||||||
__build__ = 0x020700
|
__build__ = 0x020901
|
||||||
__author__ = 'Kenneth Reitz'
|
__author__ = 'Kenneth Reitz'
|
||||||
__license__ = 'Apache 2.0'
|
__license__ = 'Apache 2.0'
|
||||||
__copyright__ = 'Copyright 2015 Kenneth Reitz'
|
__copyright__ = 'Copyright 2015 Kenneth Reitz'
|
||||||
|
@ -62,7 +62,8 @@ from .sessions import session, Session
|
||||||
from .status_codes import codes
|
from .status_codes import codes
|
||||||
from .exceptions import (
|
from .exceptions import (
|
||||||
RequestException, Timeout, URLRequired,
|
RequestException, Timeout, URLRequired,
|
||||||
TooManyRedirects, HTTPError, ConnectionError
|
TooManyRedirects, HTTPError, ConnectionError,
|
||||||
|
FileModeWarning,
|
||||||
)
|
)
|
||||||
|
|
||||||
# Set default logging handler to avoid "No handler found" warnings.
|
# Set default logging handler to avoid "No handler found" warnings.
|
||||||
|
@ -75,3 +76,8 @@ except ImportError:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
logging.getLogger(__name__).addHandler(NullHandler())
|
logging.getLogger(__name__).addHandler(NullHandler())
|
||||||
|
|
||||||
|
import warnings
|
||||||
|
|
||||||
|
# FileModeWarnings go off per the default.
|
||||||
|
warnings.simplefilter('default', FileModeWarning, append=True)
|
||||||
|
|
|
@ -8,6 +8,7 @@ This module contains the transport adapters that Requests uses to define
|
||||||
and maintain connections.
|
and maintain connections.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
import os.path
|
||||||
import socket
|
import socket
|
||||||
|
|
||||||
from .models import Response
|
from .models import Response
|
||||||
|
@ -17,11 +18,14 @@ from .packages.urllib3.util import Timeout as TimeoutSauce
|
||||||
from .packages.urllib3.util.retry import Retry
|
from .packages.urllib3.util.retry import Retry
|
||||||
from .compat import urlparse, basestring
|
from .compat import urlparse, basestring
|
||||||
from .utils import (DEFAULT_CA_BUNDLE_PATH, get_encoding_from_headers,
|
from .utils import (DEFAULT_CA_BUNDLE_PATH, get_encoding_from_headers,
|
||||||
prepend_scheme_if_needed, get_auth_from_url, urldefragauth)
|
prepend_scheme_if_needed, get_auth_from_url, urldefragauth,
|
||||||
|
select_proxy)
|
||||||
from .structures import CaseInsensitiveDict
|
from .structures import CaseInsensitiveDict
|
||||||
|
from .packages.urllib3.exceptions import ClosedPoolError
|
||||||
from .packages.urllib3.exceptions import ConnectTimeoutError
|
from .packages.urllib3.exceptions import ConnectTimeoutError
|
||||||
from .packages.urllib3.exceptions import HTTPError as _HTTPError
|
from .packages.urllib3.exceptions import HTTPError as _HTTPError
|
||||||
from .packages.urllib3.exceptions import MaxRetryError
|
from .packages.urllib3.exceptions import MaxRetryError
|
||||||
|
from .packages.urllib3.exceptions import NewConnectionError
|
||||||
from .packages.urllib3.exceptions import ProxyError as _ProxyError
|
from .packages.urllib3.exceptions import ProxyError as _ProxyError
|
||||||
from .packages.urllib3.exceptions import ProtocolError
|
from .packages.urllib3.exceptions import ProtocolError
|
||||||
from .packages.urllib3.exceptions import ReadTimeoutError
|
from .packages.urllib3.exceptions import ReadTimeoutError
|
||||||
|
@ -104,7 +108,7 @@ class HTTPAdapter(BaseAdapter):
|
||||||
|
|
||||||
def __setstate__(self, state):
|
def __setstate__(self, state):
|
||||||
# Can't handle by adding 'proxy_manager' to self.__attrs__ because
|
# Can't handle by adding 'proxy_manager' to self.__attrs__ because
|
||||||
# because self.poolmanager uses a lambda function, which isn't pickleable.
|
# self.poolmanager uses a lambda function, which isn't pickleable.
|
||||||
self.proxy_manager = {}
|
self.proxy_manager = {}
|
||||||
self.config = {}
|
self.config = {}
|
||||||
|
|
||||||
|
@ -182,10 +186,15 @@ class HTTPAdapter(BaseAdapter):
|
||||||
raise Exception("Could not find a suitable SSL CA certificate bundle.")
|
raise Exception("Could not find a suitable SSL CA certificate bundle.")
|
||||||
|
|
||||||
conn.cert_reqs = 'CERT_REQUIRED'
|
conn.cert_reqs = 'CERT_REQUIRED'
|
||||||
conn.ca_certs = cert_loc
|
|
||||||
|
if not os.path.isdir(cert_loc):
|
||||||
|
conn.ca_certs = cert_loc
|
||||||
|
else:
|
||||||
|
conn.ca_cert_dir = cert_loc
|
||||||
else:
|
else:
|
||||||
conn.cert_reqs = 'CERT_NONE'
|
conn.cert_reqs = 'CERT_NONE'
|
||||||
conn.ca_certs = None
|
conn.ca_certs = None
|
||||||
|
conn.ca_cert_dir = None
|
||||||
|
|
||||||
if cert:
|
if cert:
|
||||||
if not isinstance(cert, basestring):
|
if not isinstance(cert, basestring):
|
||||||
|
@ -238,8 +247,7 @@ class HTTPAdapter(BaseAdapter):
|
||||||
:param url: The URL to connect to.
|
:param url: The URL to connect to.
|
||||||
:param proxies: (optional) A Requests-style dictionary of proxies used on this request.
|
:param proxies: (optional) A Requests-style dictionary of proxies used on this request.
|
||||||
"""
|
"""
|
||||||
proxies = proxies or {}
|
proxy = select_proxy(url, proxies)
|
||||||
proxy = proxies.get(urlparse(url.lower()).scheme)
|
|
||||||
|
|
||||||
if proxy:
|
if proxy:
|
||||||
proxy = prepend_scheme_if_needed(proxy, 'http')
|
proxy = prepend_scheme_if_needed(proxy, 'http')
|
||||||
|
@ -272,12 +280,10 @@ class HTTPAdapter(BaseAdapter):
|
||||||
:class:`HTTPAdapter <requests.adapters.HTTPAdapter>`.
|
:class:`HTTPAdapter <requests.adapters.HTTPAdapter>`.
|
||||||
|
|
||||||
:param request: The :class:`PreparedRequest <PreparedRequest>` being sent.
|
:param request: The :class:`PreparedRequest <PreparedRequest>` being sent.
|
||||||
:param proxies: A dictionary of schemes to proxy URLs.
|
:param proxies: A dictionary of schemes or schemes and hosts to proxy URLs.
|
||||||
"""
|
"""
|
||||||
proxies = proxies or {}
|
proxy = select_proxy(request.url, proxies)
|
||||||
scheme = urlparse(request.url).scheme
|
scheme = urlparse(request.url).scheme
|
||||||
proxy = proxies.get(scheme)
|
|
||||||
|
|
||||||
if proxy and scheme != 'https':
|
if proxy and scheme != 'https':
|
||||||
url = urldefragauth(request.url)
|
url = urldefragauth(request.url)
|
||||||
else:
|
else:
|
||||||
|
@ -310,7 +316,6 @@ class HTTPAdapter(BaseAdapter):
|
||||||
:class:`HTTPAdapter <requests.adapters.HTTPAdapter>`.
|
:class:`HTTPAdapter <requests.adapters.HTTPAdapter>`.
|
||||||
|
|
||||||
:param proxies: The url of the proxy being used for this request.
|
:param proxies: The url of the proxy being used for this request.
|
||||||
:param kwargs: Optional additional keyword arguments.
|
|
||||||
"""
|
"""
|
||||||
headers = {}
|
headers = {}
|
||||||
username, password = get_auth_from_url(proxy)
|
username, password = get_auth_from_url(proxy)
|
||||||
|
@ -395,7 +400,15 @@ class HTTPAdapter(BaseAdapter):
|
||||||
low_conn.send(b'\r\n')
|
low_conn.send(b'\r\n')
|
||||||
low_conn.send(b'0\r\n\r\n')
|
low_conn.send(b'0\r\n\r\n')
|
||||||
|
|
||||||
r = low_conn.getresponse()
|
# Receive the response from the server
|
||||||
|
try:
|
||||||
|
# For Python 2.7+ versions, use buffering of HTTP
|
||||||
|
# responses
|
||||||
|
r = low_conn.getresponse(buffering=True)
|
||||||
|
except TypeError:
|
||||||
|
# For compatibility with Python 2.6 versions and back
|
||||||
|
r = low_conn.getresponse()
|
||||||
|
|
||||||
resp = HTTPResponse.from_httplib(
|
resp = HTTPResponse.from_httplib(
|
||||||
r,
|
r,
|
||||||
pool=conn,
|
pool=conn,
|
||||||
|
@ -414,13 +427,18 @@ class HTTPAdapter(BaseAdapter):
|
||||||
|
|
||||||
except MaxRetryError as e:
|
except MaxRetryError as e:
|
||||||
if isinstance(e.reason, ConnectTimeoutError):
|
if isinstance(e.reason, ConnectTimeoutError):
|
||||||
raise ConnectTimeout(e, request=request)
|
# TODO: Remove this in 3.0.0: see #2811
|
||||||
|
if not isinstance(e.reason, NewConnectionError):
|
||||||
|
raise ConnectTimeout(e, request=request)
|
||||||
|
|
||||||
if isinstance(e.reason, ResponseError):
|
if isinstance(e.reason, ResponseError):
|
||||||
raise RetryError(e, request=request)
|
raise RetryError(e, request=request)
|
||||||
|
|
||||||
raise ConnectionError(e, request=request)
|
raise ConnectionError(e, request=request)
|
||||||
|
|
||||||
|
except ClosedPoolError as e:
|
||||||
|
raise ConnectionError(e, request=request)
|
||||||
|
|
||||||
except _ProxyError as e:
|
except _ProxyError as e:
|
||||||
raise ProxyError(e)
|
raise ProxyError(e)
|
||||||
|
|
||||||
|
|
|
@ -33,7 +33,7 @@ def request(method, url, **kwargs):
|
||||||
:param allow_redirects: (optional) Boolean. Set to True if POST/PUT/DELETE redirect following is allowed.
|
:param allow_redirects: (optional) Boolean. Set to True if POST/PUT/DELETE redirect following is allowed.
|
||||||
:type allow_redirects: bool
|
:type allow_redirects: bool
|
||||||
:param proxies: (optional) Dictionary mapping protocol to the URL of the proxy.
|
:param proxies: (optional) Dictionary mapping protocol to the URL of the proxy.
|
||||||
:param verify: (optional) if ``True``, the SSL cert will be verified. A CA_BUNDLE path can also be provided.
|
:param verify: (optional) whether the SSL cert will be verified. A CA_BUNDLE path can also be provided. Defaults to ``True``.
|
||||||
:param stream: (optional) if ``False``, the response content will be immediately downloaded.
|
:param stream: (optional) if ``False``, the response content will be immediately downloaded.
|
||||||
:param cert: (optional) if String, path to ssl client cert file (.pem). If Tuple, ('cert', 'key') pair.
|
:param cert: (optional) if String, path to ssl client cert file (.pem). If Tuple, ('cert', 'key') pair.
|
||||||
:return: :class:`Response <Response>` object
|
:return: :class:`Response <Response>` object
|
||||||
|
@ -46,13 +46,11 @@ def request(method, url, **kwargs):
|
||||||
<Response [200]>
|
<Response [200]>
|
||||||
"""
|
"""
|
||||||
|
|
||||||
session = sessions.Session()
|
# By using the 'with' statement we are sure the session is closed, thus we
|
||||||
response = session.request(method=method, url=url, **kwargs)
|
# avoid leaving sockets open which can trigger a ResourceWarning in some
|
||||||
# By explicitly closing the session, we avoid leaving sockets open which
|
# cases, and look like a memory leak in others.
|
||||||
# can trigger a ResourceWarning in some cases, and look like a memory leak
|
with sessions.Session() as session:
|
||||||
# in others.
|
return session.request(method=method, url=url, **kwargs)
|
||||||
session.close()
|
|
||||||
return response
|
|
||||||
|
|
||||||
|
|
||||||
def get(url, params=None, **kwargs):
|
def get(url, params=None, **kwargs):
|
||||||
|
|
|
@ -11,6 +11,7 @@ import os
|
||||||
import re
|
import re
|
||||||
import time
|
import time
|
||||||
import hashlib
|
import hashlib
|
||||||
|
import threading
|
||||||
|
|
||||||
from base64 import b64encode
|
from base64 import b64encode
|
||||||
|
|
||||||
|
@ -63,19 +64,26 @@ class HTTPDigestAuth(AuthBase):
|
||||||
def __init__(self, username, password):
|
def __init__(self, username, password):
|
||||||
self.username = username
|
self.username = username
|
||||||
self.password = password
|
self.password = password
|
||||||
self.last_nonce = ''
|
# Keep state in per-thread local storage
|
||||||
self.nonce_count = 0
|
self._thread_local = threading.local()
|
||||||
self.chal = {}
|
|
||||||
self.pos = None
|
def init_per_thread_state(self):
|
||||||
self.num_401_calls = 1
|
# Ensure state is initialized just once per-thread
|
||||||
|
if not hasattr(self._thread_local, 'init'):
|
||||||
|
self._thread_local.init = True
|
||||||
|
self._thread_local.last_nonce = ''
|
||||||
|
self._thread_local.nonce_count = 0
|
||||||
|
self._thread_local.chal = {}
|
||||||
|
self._thread_local.pos = None
|
||||||
|
self._thread_local.num_401_calls = None
|
||||||
|
|
||||||
def build_digest_header(self, method, url):
|
def build_digest_header(self, method, url):
|
||||||
|
|
||||||
realm = self.chal['realm']
|
realm = self._thread_local.chal['realm']
|
||||||
nonce = self.chal['nonce']
|
nonce = self._thread_local.chal['nonce']
|
||||||
qop = self.chal.get('qop')
|
qop = self._thread_local.chal.get('qop')
|
||||||
algorithm = self.chal.get('algorithm')
|
algorithm = self._thread_local.chal.get('algorithm')
|
||||||
opaque = self.chal.get('opaque')
|
opaque = self._thread_local.chal.get('opaque')
|
||||||
|
|
||||||
if algorithm is None:
|
if algorithm is None:
|
||||||
_algorithm = 'MD5'
|
_algorithm = 'MD5'
|
||||||
|
@ -114,12 +122,12 @@ class HTTPDigestAuth(AuthBase):
|
||||||
HA1 = hash_utf8(A1)
|
HA1 = hash_utf8(A1)
|
||||||
HA2 = hash_utf8(A2)
|
HA2 = hash_utf8(A2)
|
||||||
|
|
||||||
if nonce == self.last_nonce:
|
if nonce == self._thread_local.last_nonce:
|
||||||
self.nonce_count += 1
|
self._thread_local.nonce_count += 1
|
||||||
else:
|
else:
|
||||||
self.nonce_count = 1
|
self._thread_local.nonce_count = 1
|
||||||
ncvalue = '%08x' % self.nonce_count
|
ncvalue = '%08x' % self._thread_local.nonce_count
|
||||||
s = str(self.nonce_count).encode('utf-8')
|
s = str(self._thread_local.nonce_count).encode('utf-8')
|
||||||
s += nonce.encode('utf-8')
|
s += nonce.encode('utf-8')
|
||||||
s += time.ctime().encode('utf-8')
|
s += time.ctime().encode('utf-8')
|
||||||
s += os.urandom(8)
|
s += os.urandom(8)
|
||||||
|
@ -128,7 +136,7 @@ class HTTPDigestAuth(AuthBase):
|
||||||
if _algorithm == 'MD5-SESS':
|
if _algorithm == 'MD5-SESS':
|
||||||
HA1 = hash_utf8('%s:%s:%s' % (HA1, nonce, cnonce))
|
HA1 = hash_utf8('%s:%s:%s' % (HA1, nonce, cnonce))
|
||||||
|
|
||||||
if qop is None:
|
if not qop:
|
||||||
respdig = KD(HA1, "%s:%s" % (nonce, HA2))
|
respdig = KD(HA1, "%s:%s" % (nonce, HA2))
|
||||||
elif qop == 'auth' or 'auth' in qop.split(','):
|
elif qop == 'auth' or 'auth' in qop.split(','):
|
||||||
noncebit = "%s:%s:%s:%s:%s" % (
|
noncebit = "%s:%s:%s:%s:%s" % (
|
||||||
|
@ -139,7 +147,7 @@ class HTTPDigestAuth(AuthBase):
|
||||||
# XXX handle auth-int.
|
# XXX handle auth-int.
|
||||||
return None
|
return None
|
||||||
|
|
||||||
self.last_nonce = nonce
|
self._thread_local.last_nonce = nonce
|
||||||
|
|
||||||
# XXX should the partial digests be encoded too?
|
# XXX should the partial digests be encoded too?
|
||||||
base = 'username="%s", realm="%s", nonce="%s", uri="%s", ' \
|
base = 'username="%s", realm="%s", nonce="%s", uri="%s", ' \
|
||||||
|
@ -158,23 +166,22 @@ class HTTPDigestAuth(AuthBase):
|
||||||
def handle_redirect(self, r, **kwargs):
|
def handle_redirect(self, r, **kwargs):
|
||||||
"""Reset num_401_calls counter on redirects."""
|
"""Reset num_401_calls counter on redirects."""
|
||||||
if r.is_redirect:
|
if r.is_redirect:
|
||||||
self.num_401_calls = 1
|
self._thread_local.num_401_calls = 1
|
||||||
|
|
||||||
def handle_401(self, r, **kwargs):
|
def handle_401(self, r, **kwargs):
|
||||||
"""Takes the given response and tries digest-auth, if needed."""
|
"""Takes the given response and tries digest-auth, if needed."""
|
||||||
|
|
||||||
if self.pos is not None:
|
if self._thread_local.pos is not None:
|
||||||
# Rewind the file position indicator of the body to where
|
# Rewind the file position indicator of the body to where
|
||||||
# it was to resend the request.
|
# it was to resend the request.
|
||||||
r.request.body.seek(self.pos)
|
r.request.body.seek(self._thread_local.pos)
|
||||||
num_401_calls = getattr(self, 'num_401_calls', 1)
|
|
||||||
s_auth = r.headers.get('www-authenticate', '')
|
s_auth = r.headers.get('www-authenticate', '')
|
||||||
|
|
||||||
if 'digest' in s_auth.lower() and num_401_calls < 2:
|
if 'digest' in s_auth.lower() and self._thread_local.num_401_calls < 2:
|
||||||
|
|
||||||
self.num_401_calls += 1
|
self._thread_local.num_401_calls += 1
|
||||||
pat = re.compile(r'digest ', flags=re.IGNORECASE)
|
pat = re.compile(r'digest ', flags=re.IGNORECASE)
|
||||||
self.chal = parse_dict_header(pat.sub('', s_auth, count=1))
|
self._thread_local.chal = parse_dict_header(pat.sub('', s_auth, count=1))
|
||||||
|
|
||||||
# Consume content and release the original connection
|
# Consume content and release the original connection
|
||||||
# to allow our new request to reuse the same one.
|
# to allow our new request to reuse the same one.
|
||||||
|
@ -192,21 +199,25 @@ class HTTPDigestAuth(AuthBase):
|
||||||
|
|
||||||
return _r
|
return _r
|
||||||
|
|
||||||
self.num_401_calls = 1
|
self._thread_local.num_401_calls = 1
|
||||||
return r
|
return r
|
||||||
|
|
||||||
def __call__(self, r):
|
def __call__(self, r):
|
||||||
|
# Initialize per-thread state, if needed
|
||||||
|
self.init_per_thread_state()
|
||||||
# If we have a saved nonce, skip the 401
|
# If we have a saved nonce, skip the 401
|
||||||
if self.last_nonce:
|
if self._thread_local.last_nonce:
|
||||||
r.headers['Authorization'] = self.build_digest_header(r.method, r.url)
|
r.headers['Authorization'] = self.build_digest_header(r.method, r.url)
|
||||||
try:
|
try:
|
||||||
self.pos = r.body.tell()
|
self._thread_local.pos = r.body.tell()
|
||||||
except AttributeError:
|
except AttributeError:
|
||||||
# In the case of HTTPDigestAuth being reused and the body of
|
# In the case of HTTPDigestAuth being reused and the body of
|
||||||
# the previous request was a file-like object, pos has the
|
# the previous request was a file-like object, pos has the
|
||||||
# file position of the previous body. Ensure it's set to
|
# file position of the previous body. Ensure it's set to
|
||||||
# None.
|
# None.
|
||||||
self.pos = None
|
self._thread_local.pos = None
|
||||||
r.register_hook('response', self.handle_401)
|
r.register_hook('response', self.handle_401)
|
||||||
r.register_hook('response', self.handle_redirect)
|
r.register_hook('response', self.handle_redirect)
|
||||||
|
self._thread_local.num_401_calls = 1
|
||||||
|
|
||||||
return r
|
return r
|
||||||
|
|
|
@ -1507,38 +1507,6 @@ rscL9yuwNwXsvFcj4jjSm2jzVhKIT0J8uDHEtdvkyCE06UgRNe76x5JXxZ805Mf2
|
||||||
9w4LTJxoeHtxMcfrHuBnQfO3oKfN5XozNmr6mis=
|
9w4LTJxoeHtxMcfrHuBnQfO3oKfN5XozNmr6mis=
|
||||||
-----END CERTIFICATE-----
|
-----END CERTIFICATE-----
|
||||||
|
|
||||||
# Issuer: CN=TÜRKTRUST Elektronik Sertifika Hizmet Sağlayıcısı O=(c) 2005 TÜRKTRUST Bilgi İletişim ve Bilişim Güvenliği Hizmetleri A.Ş.
|
|
||||||
# Subject: CN=TÜRKTRUST Elektronik Sertifika Hizmet Sağlayıcısı O=(c) 2005 TÜRKTRUST Bilgi İletişim ve Bilişim Güvenliği Hizmetleri A.Ş.
|
|
||||||
# Label: "TURKTRUST Certificate Services Provider Root 1"
|
|
||||||
# Serial: 1
|
|
||||||
# MD5 Fingerprint: f1:6a:22:18:c9:cd:df:ce:82:1d:1d:b7:78:5c:a9:a5
|
|
||||||
# SHA1 Fingerprint: 79:98:a3:08:e1:4d:65:85:e6:c2:1e:15:3a:71:9f:ba:5a:d3:4a:d9
|
|
||||||
# SHA256 Fingerprint: 44:04:e3:3b:5e:14:0d:cf:99:80:51:fd:fc:80:28:c7:c8:16:15:c5:ee:73:7b:11:1b:58:82:33:a9:b5:35:a0
|
|
||||||
-----BEGIN CERTIFICATE-----
|
|
||||||
MIID+zCCAuOgAwIBAgIBATANBgkqhkiG9w0BAQUFADCBtzE/MD0GA1UEAww2VMOc
|
|
||||||
UktUUlVTVCBFbGVrdHJvbmlrIFNlcnRpZmlrYSBIaXptZXQgU2HEn2xhecSxY8Sx
|
|
||||||
c8SxMQswCQYDVQQGDAJUUjEPMA0GA1UEBwwGQU5LQVJBMVYwVAYDVQQKDE0oYykg
|
|
||||||
MjAwNSBUw5xSS1RSVVNUIEJpbGdpIMSwbGV0acWfaW0gdmUgQmlsacWfaW0gR8O8
|
|
||||||
dmVubGnEn2kgSGl6bWV0bGVyaSBBLsWeLjAeFw0wNTA1MTMxMDI3MTdaFw0xNTAz
|
|
||||||
MjIxMDI3MTdaMIG3MT8wPQYDVQQDDDZUw5xSS1RSVVNUIEVsZWt0cm9uaWsgU2Vy
|
|
||||||
dGlmaWthIEhpem1ldCBTYcSfbGF5xLFjxLFzxLExCzAJBgNVBAYMAlRSMQ8wDQYD
|
|
||||||
VQQHDAZBTktBUkExVjBUBgNVBAoMTShjKSAyMDA1IFTDnFJLVFJVU1QgQmlsZ2kg
|
|
||||||
xLBsZXRpxZ9pbSB2ZSBCaWxpxZ9pbSBHw7x2ZW5sacSfaSBIaXptZXRsZXJpIEEu
|
|
||||||
xZ4uMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAylIF1mMD2Bxf3dJ7
|
|
||||||
XfIMYGFbazt0K3gNfUW9InTojAPBxhEqPZW8qZSwu5GXyGl8hMW0kWxsE2qkVa2k
|
|
||||||
heiVfrMArwDCBRj1cJ02i67L5BuBf5OI+2pVu32Fks66WJ/bMsW9Xe8iSi9BB35J
|
|
||||||
YbOG7E6mQW6EvAPs9TscyB/C7qju6hJKjRTP8wrgUDn5CDX4EVmt5yLqS8oUBt5C
|
|
||||||
urKZ8y1UiBAG6uEaPj1nH/vO+3yC6BFdSsG5FOpU2WabfIl9BJpiyelSPJ6c79L1
|
|
||||||
JuTm5Rh8i27fbMx4W09ysstcP4wFjdFMjK2Sx+F4f2VsSQZQLJ4ywtdKxnWKWU51
|
|
||||||
b0dewQIDAQABoxAwDjAMBgNVHRMEBTADAQH/MA0GCSqGSIb3DQEBBQUAA4IBAQAV
|
|
||||||
9VX/N5aAWSGk/KEVTCD21F/aAyT8z5Aa9CEKmu46sWrv7/hg0Uw2ZkUd82YCdAR7
|
|
||||||
kjCo3gp2D++Vbr3JN+YaDayJSFvMgzbC9UZcWYJWtNX+I7TYVBxEq8Sn5RTOPEFh
|
|
||||||
fEPmzcSBCYsk+1Ql1haolgxnB2+zUEfjHCQo3SqYpGH+2+oSN7wBGjSFvW5P55Fy
|
|
||||||
B0SFHljKVETd96y5y4khctuPwGkplyqjrhgjlxxBKot8KsF8kOipKMDTkcatKIdA
|
|
||||||
aLX/7KfS0zgYnNN9aV3wxqUeJBujR/xpB2jn5Jq07Q+hh4cCzofSSE7hvP/L8XKS
|
|
||||||
RGQDJereW26fyfJOrN3H
|
|
||||||
-----END CERTIFICATE-----
|
|
||||||
|
|
||||||
# Issuer: CN=TÜRKTRUST Elektronik Sertifika Hizmet Sağlayıcısı O=TÜRKTRUST Bilgi İletişim ve Bilişim Güvenliği Hizmetleri A.Ş. (c) Kasım 2005
|
# Issuer: CN=TÜRKTRUST Elektronik Sertifika Hizmet Sağlayıcısı O=TÜRKTRUST Bilgi İletişim ve Bilişim Güvenliği Hizmetleri A.Ş. (c) Kasım 2005
|
||||||
# Subject: CN=TÜRKTRUST Elektronik Sertifika Hizmet Sağlayıcısı O=TÜRKTRUST Bilgi İletişim ve Bilişim Güvenliği Hizmetleri A.Ş. (c) Kasım 2005
|
# Subject: CN=TÜRKTRUST Elektronik Sertifika Hizmet Sağlayıcısı O=TÜRKTRUST Bilgi İletişim ve Bilişim Güvenliği Hizmetleri A.Ş. (c) Kasım 2005
|
||||||
# Label: "TURKTRUST Certificate Services Provider Root 2"
|
# Label: "TURKTRUST Certificate Services Provider Root 2"
|
||||||
|
@ -2110,72 +2078,6 @@ t0QmwCbAr1UwnjvVNioZBPRcHv/PLLf/0P2HQBHVESO7SMAhqaQoLf0V+LBOK/Qw
|
||||||
WyH8EZE0vkHve52Xdf+XlcCWWC/qu0bXu+TZLg==
|
WyH8EZE0vkHve52Xdf+XlcCWWC/qu0bXu+TZLg==
|
||||||
-----END CERTIFICATE-----
|
-----END CERTIFICATE-----
|
||||||
|
|
||||||
# Issuer: CN=TC TrustCenter Class 2 CA II O=TC TrustCenter GmbH OU=TC TrustCenter Class 2 CA
|
|
||||||
# Subject: CN=TC TrustCenter Class 2 CA II O=TC TrustCenter GmbH OU=TC TrustCenter Class 2 CA
|
|
||||||
# Label: "TC TrustCenter Class 2 CA II"
|
|
||||||
# Serial: 941389028203453866782103406992443
|
|
||||||
# MD5 Fingerprint: ce:78:33:5c:59:78:01:6e:18:ea:b9:36:a0:b9:2e:23
|
|
||||||
# SHA1 Fingerprint: ae:50:83:ed:7c:f4:5c:bc:8f:61:c6:21:fe:68:5d:79:42:21:15:6e
|
|
||||||
# SHA256 Fingerprint: e6:b8:f8:76:64:85:f8:07:ae:7f:8d:ac:16:70:46:1f:07:c0:a1:3e:ef:3a:1f:f7:17:53:8d:7a:ba:d3:91:b4
|
|
||||||
-----BEGIN CERTIFICATE-----
|
|
||||||
MIIEqjCCA5KgAwIBAgIOLmoAAQACH9dSISwRXDswDQYJKoZIhvcNAQEFBQAwdjEL
|
|
||||||
MAkGA1UEBhMCREUxHDAaBgNVBAoTE1RDIFRydXN0Q2VudGVyIEdtYkgxIjAgBgNV
|
|
||||||
BAsTGVRDIFRydXN0Q2VudGVyIENsYXNzIDIgQ0ExJTAjBgNVBAMTHFRDIFRydXN0
|
|
||||||
Q2VudGVyIENsYXNzIDIgQ0EgSUkwHhcNMDYwMTEyMTQzODQzWhcNMjUxMjMxMjI1
|
|
||||||
OTU5WjB2MQswCQYDVQQGEwJERTEcMBoGA1UEChMTVEMgVHJ1c3RDZW50ZXIgR21i
|
|
||||||
SDEiMCAGA1UECxMZVEMgVHJ1c3RDZW50ZXIgQ2xhc3MgMiBDQTElMCMGA1UEAxMc
|
|
||||||
VEMgVHJ1c3RDZW50ZXIgQ2xhc3MgMiBDQSBJSTCCASIwDQYJKoZIhvcNAQEBBQAD
|
|
||||||
ggEPADCCAQoCggEBAKuAh5uO8MN8h9foJIIRszzdQ2Lu+MNF2ujhoF/RKrLqk2jf
|
|
||||||
tMjWQ+nEdVl//OEd+DFwIxuInie5e/060smp6RQvkL4DUsFJzfb95AhmC1eKokKg
|
|
||||||
uNV/aVyQMrKXDcpK3EY+AlWJU+MaWss2xgdW94zPEfRMuzBwBJWl9jmM/XOBCH2J
|
|
||||||
XjIeIqkiRUuwZi4wzJ9l/fzLganx4Duvo4bRierERXlQXa7pIXSSTYtZgo+U4+lK
|
|
||||||
8edJsBTj9WLL1XK9H7nSn6DNqPoByNkN39r8R52zyFTfSUrxIan+GE7uSNQZu+99
|
|
||||||
5OKdy1u2bv/jzVrndIIFuoAlOMvkaZ6vQaoahPUCAwEAAaOCATQwggEwMA8GA1Ud
|
|
||||||
EwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBTjq1RMgKHbVkO3
|
|
||||||
kUrL84J6E1wIqzCB7QYDVR0fBIHlMIHiMIHfoIHcoIHZhjVodHRwOi8vd3d3LnRy
|
|
||||||
dXN0Y2VudGVyLmRlL2NybC92Mi90Y19jbGFzc18yX2NhX0lJLmNybIaBn2xkYXA6
|
|
||||||
Ly93d3cudHJ1c3RjZW50ZXIuZGUvQ049VEMlMjBUcnVzdENlbnRlciUyMENsYXNz
|
|
||||||
JTIwMiUyMENBJTIwSUksTz1UQyUyMFRydXN0Q2VudGVyJTIwR21iSCxPVT1yb290
|
|
||||||
Y2VydHMsREM9dHJ1c3RjZW50ZXIsREM9ZGU/Y2VydGlmaWNhdGVSZXZvY2F0aW9u
|
|
||||||
TGlzdD9iYXNlPzANBgkqhkiG9w0BAQUFAAOCAQEAjNfffu4bgBCzg/XbEeprS6iS
|
|
||||||
GNn3Bzn1LL4GdXpoUxUc6krtXvwjshOg0wn/9vYua0Fxec3ibf2uWWuFHbhOIprt
|
|
||||||
ZjluS5TmVfwLG4t3wVMTZonZKNaL80VKY7f9ewthXbhtvsPcW3nS7Yblok2+XnR8
|
|
||||||
au0WOB9/WIFaGusyiC2y8zl3gK9etmF1KdsjTYjKUCjLhdLTEKJZbtOTVAB6okaV
|
|
||||||
hgWcqRmY5TFyDADiZ9lA4CQze28suVyrZZ0srHbqNZn1l7kPJOzHdiEoZa5X6AeI
|
|
||||||
dUpWoNIFOqTmjZKILPPy4cHGYdtBxceb9w4aUUXCYWvcZCcXjFq32nQozZfkvQ==
|
|
||||||
-----END CERTIFICATE-----
|
|
||||||
|
|
||||||
# Issuer: CN=TC TrustCenter Universal CA I O=TC TrustCenter GmbH OU=TC TrustCenter Universal CA
|
|
||||||
# Subject: CN=TC TrustCenter Universal CA I O=TC TrustCenter GmbH OU=TC TrustCenter Universal CA
|
|
||||||
# Label: "TC TrustCenter Universal CA I"
|
|
||||||
# Serial: 601024842042189035295619584734726
|
|
||||||
# MD5 Fingerprint: 45:e1:a5:72:c5:a9:36:64:40:9e:f5:e4:58:84:67:8c
|
|
||||||
# SHA1 Fingerprint: 6b:2f:34:ad:89:58:be:62:fd:b0:6b:5c:ce:bb:9d:d9:4f:4e:39:f3
|
|
||||||
# SHA256 Fingerprint: eb:f3:c0:2a:87:89:b1:fb:7d:51:19:95:d6:63:b7:29:06:d9:13:ce:0d:5e:10:56:8a:8a:77:e2:58:61:67:e7
|
|
||||||
-----BEGIN CERTIFICATE-----
|
|
||||||
MIID3TCCAsWgAwIBAgIOHaIAAQAC7LdggHiNtgYwDQYJKoZIhvcNAQEFBQAweTEL
|
|
||||||
MAkGA1UEBhMCREUxHDAaBgNVBAoTE1RDIFRydXN0Q2VudGVyIEdtYkgxJDAiBgNV
|
|
||||||
BAsTG1RDIFRydXN0Q2VudGVyIFVuaXZlcnNhbCBDQTEmMCQGA1UEAxMdVEMgVHJ1
|
|
||||||
c3RDZW50ZXIgVW5pdmVyc2FsIENBIEkwHhcNMDYwMzIyMTU1NDI4WhcNMjUxMjMx
|
|
||||||
MjI1OTU5WjB5MQswCQYDVQQGEwJERTEcMBoGA1UEChMTVEMgVHJ1c3RDZW50ZXIg
|
|
||||||
R21iSDEkMCIGA1UECxMbVEMgVHJ1c3RDZW50ZXIgVW5pdmVyc2FsIENBMSYwJAYD
|
|
||||||
VQQDEx1UQyBUcnVzdENlbnRlciBVbml2ZXJzYWwgQ0EgSTCCASIwDQYJKoZIhvcN
|
|
||||||
AQEBBQADggEPADCCAQoCggEBAKR3I5ZEr5D0MacQ9CaHnPM42Q9e3s9B6DGtxnSR
|
|
||||||
JJZ4Hgmgm5qVSkr1YnwCqMqs+1oEdjneX/H5s7/zA1hV0qq34wQi0fiU2iIIAI3T
|
|
||||||
fCZdzHd55yx4Oagmcw6iXSVphU9VDprvxrlE4Vc93x9UIuVvZaozhDrzznq+VZeu
|
|
||||||
jRIPFDPiUHDDSYcTvFHe15gSWu86gzOSBnWLknwSaHtwag+1m7Z3W0hZneTvWq3z
|
|
||||||
wZ7U10VOylY0Ibw+F1tvdwxIAUMpsN0/lm7mlaoMwCC2/T42J5zjXM9OgdwZu5GQ
|
|
||||||
fezmlwQek8wiSdeXhrYTCjxDI3d+8NzmzSQfO4ObNDqDNOMCAwEAAaNjMGEwHwYD
|
|
||||||
VR0jBBgwFoAUkqR1LKSevoFE63n8isWVpesQdXMwDwYDVR0TAQH/BAUwAwEB/zAO
|
|
||||||
BgNVHQ8BAf8EBAMCAYYwHQYDVR0OBBYEFJKkdSyknr6BROt5/IrFlaXrEHVzMA0G
|
|
||||||
CSqGSIb3DQEBBQUAA4IBAQAo0uCG1eb4e/CX3CJrO5UUVg8RMKWaTzqwOuAGy2X1
|
|
||||||
7caXJ/4l8lfmXpWMPmRgFVp/Lw0BxbFg/UU1z/CyvwbZ71q+s2IhtNerNXxTPqYn
|
|
||||||
8aEt2hojnczd7Dwtnic0XQ/CNnm8yUpiLe1r2X1BQ3y2qsrtYbE3ghUJGooWMNjs
|
|
||||||
ydZHcnhLEEYUjl8Or+zHL6sQ17bxbuyGssLoDZJz3KL0Dzq/YSMQiZxIQG5wALPT
|
|
||||||
ujdEWBF6AmqI8Dc08BnprNRlc/ZpjGSUOnmFKbAWKwyCPwacx/0QK54PLLae4xW/
|
|
||||||
2TYcuiUaUj0a7CIMHOCkoj3w6DnPgcB77V0fb8XQC9eY
|
|
||||||
-----END CERTIFICATE-----
|
|
||||||
|
|
||||||
# Issuer: CN=Deutsche Telekom Root CA 2 O=Deutsche Telekom AG OU=T-TeleSec Trust Center
|
# Issuer: CN=Deutsche Telekom Root CA 2 O=Deutsche Telekom AG OU=T-TeleSec Trust Center
|
||||||
# Subject: CN=Deutsche Telekom Root CA 2 O=Deutsche Telekom AG OU=T-TeleSec Trust Center
|
# Subject: CN=Deutsche Telekom Root CA 2 O=Deutsche Telekom AG OU=T-TeleSec Trust Center
|
||||||
# Label: "Deutsche Telekom Root CA 2"
|
# Label: "Deutsche Telekom Root CA 2"
|
||||||
|
@ -2206,36 +2108,6 @@ xbrYNuSD7Odlt79jWvNGr4GUN9RBjNYj1h7P9WgbRGOiWrqnNVmh5XAFmw4jV5mU
|
||||||
Cm26OWMohpLzGITY+9HPBVZkVw==
|
Cm26OWMohpLzGITY+9HPBVZkVw==
|
||||||
-----END CERTIFICATE-----
|
-----END CERTIFICATE-----
|
||||||
|
|
||||||
# Issuer: CN=ComSign Secured CA O=ComSign
|
|
||||||
# Subject: CN=ComSign Secured CA O=ComSign
|
|
||||||
# Label: "ComSign Secured CA"
|
|
||||||
# Serial: 264725503855295744117309814499492384489
|
|
||||||
# MD5 Fingerprint: 40:01:25:06:8d:21:43:6a:0e:43:00:9c:e7:43:f3:d5
|
|
||||||
# SHA1 Fingerprint: f9:cd:0e:2c:da:76:24:c1:8f:bd:f0:f0:ab:b6:45:b8:f7:fe:d5:7a
|
|
||||||
# SHA256 Fingerprint: 50:79:41:c7:44:60:a0:b4:70:86:22:0d:4e:99:32:57:2a:b5:d1:b5:bb:cb:89:80:ab:1c:b1:76:51:a8:44:d2
|
|
||||||
-----BEGIN CERTIFICATE-----
|
|
||||||
MIIDqzCCApOgAwIBAgIRAMcoRwmzuGxFjB36JPU2TukwDQYJKoZIhvcNAQEFBQAw
|
|
||||||
PDEbMBkGA1UEAxMSQ29tU2lnbiBTZWN1cmVkIENBMRAwDgYDVQQKEwdDb21TaWdu
|
|
||||||
MQswCQYDVQQGEwJJTDAeFw0wNDAzMjQxMTM3MjBaFw0yOTAzMTYxNTA0NTZaMDwx
|
|
||||||
GzAZBgNVBAMTEkNvbVNpZ24gU2VjdXJlZCBDQTEQMA4GA1UEChMHQ29tU2lnbjEL
|
|
||||||
MAkGA1UEBhMCSUwwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDGtWhf
|
|
||||||
HZQVw6QIVS3joFd67+l0Kru5fFdJGhFeTymHDEjWaueP1H5XJLkGieQcPOqs49oh
|
|
||||||
gHMhCu95mGwfCP+hUH3ymBvJVG8+pSjsIQQPRbsHPaHA+iqYHU4Gk/v1iDurX8sW
|
|
||||||
v+bznkqH7Rnqwp9D5PGBpX8QTz7RSmKtUxvLg/8HZaWSLWapW7ha9B20IZFKF3ue
|
|
||||||
Mv5WJDmyVIRD9YTC2LxBkMyd1mja6YJQqTtoz7VdApRgFrFD2UNd3V2Hbuq7s8lr
|
|
||||||
9gOUCXDeFhF6K+h2j0kQmHe5Y1yLM5d19guMsqtb3nQgJT/j8xH5h2iGNXHDHYwt
|
|
||||||
6+UarA9z1YJZQIDTAgMBAAGjgacwgaQwDAYDVR0TBAUwAwEB/zBEBgNVHR8EPTA7
|
|
||||||
MDmgN6A1hjNodHRwOi8vZmVkaXIuY29tc2lnbi5jby5pbC9jcmwvQ29tU2lnblNl
|
|
||||||
Y3VyZWRDQS5jcmwwDgYDVR0PAQH/BAQDAgGGMB8GA1UdIwQYMBaAFMFL7XC29z58
|
|
||||||
ADsAj8c+DkWfHl3sMB0GA1UdDgQWBBTBS+1wtvc+fAA7AI/HPg5Fnx5d7DANBgkq
|
|
||||||
hkiG9w0BAQUFAAOCAQEAFs/ukhNQq3sUnjO2QiBq1BW9Cav8cujvR3qQrFHBZE7p
|
|
||||||
iL1DRYHjZiM/EoZNGeQFsOY3wo3aBijJD4mkU6l1P7CW+6tMM1X5eCZGbxs2mPtC
|
|
||||||
dsGCuY7e+0X5YxtiOzkGynd6qDwJz2w2PQ8KRUtpFhpFfTMDZflScZAmlaxMDPWL
|
|
||||||
kz/MdXSFmLr/YnpNH4n+rr2UAJm/EaXc4HnFFgt9AmEd6oX5AhVP51qJThRv4zdL
|
|
||||||
hfXBPGHg/QVBspJ/wx2g0K5SZGBrGMYmnNj1ZOQ2GmKfig8+/21OGVZOIJFsnzQz
|
|
||||||
OjRXUDpvgV4GxvU+fE6OK85lBi5d0ipTdF7Tbieejw==
|
|
||||||
-----END CERTIFICATE-----
|
|
||||||
|
|
||||||
# Issuer: CN=Cybertrust Global Root O=Cybertrust, Inc
|
# Issuer: CN=Cybertrust Global Root O=Cybertrust, Inc
|
||||||
# Subject: CN=Cybertrust Global Root O=Cybertrust, Inc
|
# Subject: CN=Cybertrust Global Root O=Cybertrust, Inc
|
||||||
# Label: "Cybertrust Global Root"
|
# Label: "Cybertrust Global Root"
|
||||||
|
@ -2373,34 +2245,6 @@ h7U/2k3ZIQAw3pDaDtMaSKk+hQsUi4y8QZ5q9w5wwDX3OaJdZtB7WZ+oRxKaJyOk
|
||||||
LY4ng5IgodcVf/EuGO70SH8vf/GhGLWhC5SgYiAynB321O+/TIho
|
LY4ng5IgodcVf/EuGO70SH8vf/GhGLWhC5SgYiAynB321O+/TIho
|
||||||
-----END CERTIFICATE-----
|
-----END CERTIFICATE-----
|
||||||
|
|
||||||
# Issuer: CN=Buypass Class 3 CA 1 O=Buypass AS-983163327
|
|
||||||
# Subject: CN=Buypass Class 3 CA 1 O=Buypass AS-983163327
|
|
||||||
# Label: "Buypass Class 3 CA 1"
|
|
||||||
# Serial: 2
|
|
||||||
# MD5 Fingerprint: df:3c:73:59:81:e7:39:50:81:04:4c:34:a2:cb:b3:7b
|
|
||||||
# SHA1 Fingerprint: 61:57:3a:11:df:0e:d8:7e:d5:92:65:22:ea:d0:56:d7:44:b3:23:71
|
|
||||||
# SHA256 Fingerprint: b7:b1:2b:17:1f:82:1d:aa:99:0c:d0:fe:50:87:b1:28:44:8b:a8:e5:18:4f:84:c5:1e:02:b5:c8:fb:96:2b:24
|
|
||||||
-----BEGIN CERTIFICATE-----
|
|
||||||
MIIDUzCCAjugAwIBAgIBAjANBgkqhkiG9w0BAQUFADBLMQswCQYDVQQGEwJOTzEd
|
|
||||||
MBsGA1UECgwUQnV5cGFzcyBBUy05ODMxNjMzMjcxHTAbBgNVBAMMFEJ1eXBhc3Mg
|
|
||||||
Q2xhc3MgMyBDQSAxMB4XDTA1MDUwOTE0MTMwM1oXDTE1MDUwOTE0MTMwM1owSzEL
|
|
||||||
MAkGA1UEBhMCTk8xHTAbBgNVBAoMFEJ1eXBhc3MgQVMtOTgzMTYzMzI3MR0wGwYD
|
|
||||||
VQQDDBRCdXlwYXNzIENsYXNzIDMgQ0EgMTCCASIwDQYJKoZIhvcNAQEBBQADggEP
|
|
||||||
ADCCAQoCggEBAKSO13TZKWTeXx+HgJHqTjnmGcZEC4DVC69TB4sSveZn8AKxifZg
|
|
||||||
isRbsELRwCGoy+Gb72RRtqfPFfV0gGgEkKBYouZ0plNTVUhjP5JW3SROjvi6K//z
|
|
||||||
NIqeKNc0n6wv1g/xpC+9UrJJhW05NfBEMJNGJPO251P7vGGvqaMU+8IXF4Rs4HyI
|
|
||||||
+MkcVyzwPX6UvCWThOiaAJpFBUJXgPROztmuOfbIUxAMZTpHe2DC1vqRycZxbL2R
|
|
||||||
hzyRhkmr8w+gbCZ2Xhysm3HljbybIR6c1jh+JIAVMYKWsUnTYjdbiAwKYjT+p0h+
|
|
||||||
mbEwi5A3lRyoH6UsjfRVyNvdWQrCrXig9IsCAwEAAaNCMEAwDwYDVR0TAQH/BAUw
|
|
||||||
AwEB/zAdBgNVHQ4EFgQUOBTmyPCppAP0Tj4io1vy1uCtQHQwDgYDVR0PAQH/BAQD
|
|
||||||
AgEGMA0GCSqGSIb3DQEBBQUAA4IBAQABZ6OMySU9E2NdFm/soT4JXJEVKirZgCFP
|
|
||||||
Bdy7pYmrEzMqnji3jG8CcmPHc3ceCQa6Oyh7pEfJYWsICCD8igWKH7y6xsL+z27s
|
|
||||||
EzNxZy5p+qksP2bAEllNC1QCkoS72xLvg3BweMhT+t/Gxv/ciC8HwEmdMldg0/L2
|
|
||||||
mSlf56oBzKwzqBwKu5HEA6BvtjT5htOzdlSY9EqBs1OdTUDs5XcTRa9bqh/YL0yC
|
|
||||||
e/4qxFi7T/ye/QNlGioOw6UgFpRreaaiErS7GqQjel/wroQk5PMr+4okoyeYZdow
|
|
||||||
dXb8GZHo2+ubPzK/QJcHJrrM85SFSnonk8+QQtS4Wxam58tAA915
|
|
||||||
-----END CERTIFICATE-----
|
|
||||||
|
|
||||||
# Issuer: CN=EBG Elektronik Sertifika Hizmet Sağlayıcısı O=EBG Bilişim Teknolojileri ve Hizmetleri A.Ş.
|
# Issuer: CN=EBG Elektronik Sertifika Hizmet Sağlayıcısı O=EBG Bilişim Teknolojileri ve Hizmetleri A.Ş.
|
||||||
# Subject: CN=EBG Elektronik Sertifika Hizmet Sağlayıcısı O=EBG Bilişim Teknolojileri ve Hizmetleri A.Ş.
|
# Subject: CN=EBG Elektronik Sertifika Hizmet Sağlayıcısı O=EBG Bilişim Teknolojileri ve Hizmetleri A.Ş.
|
||||||
# Label: "EBG Elektronik Sertifika Hizmet Sa\xC4\x9Flay\xc4\xb1\x63\xc4\xb1s\xc4\xb1"
|
# Label: "EBG Elektronik Sertifika Hizmet Sa\xC4\x9Flay\xc4\xb1\x63\xc4\xb1s\xc4\xb1"
|
||||||
|
@ -5277,6 +5121,112 @@ Ci77o0cOPaYjesYBx4/IXr9tgFa+iiS6M+qf4TIRnvHST4D2G0CvOJ4RUHlzEhLN
|
||||||
AAoACxGV2lZFA4gKn2fQ1XmxqI1AbQ3CekD6819kR5LLU7m7Wc5P/dAVUwHY3+vZ
|
AAoACxGV2lZFA4gKn2fQ1XmxqI1AbQ3CekD6819kR5LLU7m7Wc5P/dAVUwHY3+vZ
|
||||||
5nbv0CO7O6l5s9UCKc2Jo5YPSjXnTkLAdc0Hz+Ys63su
|
5nbv0CO7O6l5s9UCKc2Jo5YPSjXnTkLAdc0Hz+Ys63su
|
||||||
-----END CERTIFICATE-----
|
-----END CERTIFICATE-----
|
||||||
|
|
||||||
|
# Issuer: CN=TÜRKTRUST Elektronik Sertifika Hizmet Sağlayıcısı H5 O=TÜRKTRUST Bilgi İletişim ve Bilişim Güvenliği Hizmetleri A.Ş.
|
||||||
|
# Subject: CN=TÜRKTRUST Elektronik Sertifika Hizmet Sağlayıcısı H5 O=TÜRKTRUST Bilgi İletişim ve Bilişim Güvenliği Hizmetleri A.Ş.
|
||||||
|
# Label: "TÜRKTRUST Elektronik Sertifika Hizmet Sağlayıcısı H5"
|
||||||
|
# Serial: 156233699172481
|
||||||
|
# MD5 Fingerprint: da:70:8e:f0:22:df:93:26:f6:5f:9f:d3:15:06:52:4e
|
||||||
|
# SHA1 Fingerprint: c4:18:f6:4d:46:d1:df:00:3d:27:30:13:72:43:a9:12:11:c6:75:fb
|
||||||
|
# SHA256 Fingerprint: 49:35:1b:90:34:44:c1:85:cc:dc:5c:69:3d:24:d8:55:5c:b2:08:d6:a8:14:13:07:69:9f:4a:f0:63:19:9d:78
|
||||||
|
-----BEGIN CERTIFICATE-----
|
||||||
|
MIIEJzCCAw+gAwIBAgIHAI4X/iQggTANBgkqhkiG9w0BAQsFADCBsTELMAkGA1UE
|
||||||
|
BhMCVFIxDzANBgNVBAcMBkFua2FyYTFNMEsGA1UECgxEVMOcUktUUlVTVCBCaWxn
|
||||||
|
aSDEsGxldGnFn2ltIHZlIEJpbGnFn2ltIEfDvHZlbmxpxJ9pIEhpem1ldGxlcmkg
|
||||||
|
QS7Fni4xQjBABgNVBAMMOVTDnFJLVFJVU1QgRWxla3Ryb25payBTZXJ0aWZpa2Eg
|
||||||
|
SGl6bWV0IFNhxJ9sYXnEsWPEsXPEsSBINTAeFw0xMzA0MzAwODA3MDFaFw0yMzA0
|
||||||
|
MjgwODA3MDFaMIGxMQswCQYDVQQGEwJUUjEPMA0GA1UEBwwGQW5rYXJhMU0wSwYD
|
||||||
|
VQQKDERUw5xSS1RSVVNUIEJpbGdpIMSwbGV0acWfaW0gdmUgQmlsacWfaW0gR8O8
|
||||||
|
dmVubGnEn2kgSGl6bWV0bGVyaSBBLsWeLjFCMEAGA1UEAww5VMOcUktUUlVTVCBF
|
||||||
|
bGVrdHJvbmlrIFNlcnRpZmlrYSBIaXptZXQgU2HEn2xhecSxY8Sxc8SxIEg1MIIB
|
||||||
|
IjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEApCUZ4WWe60ghUEoI5RHwWrom
|
||||||
|
/4NZzkQqL/7hzmAD/I0Dpe3/a6i6zDQGn1k19uwsu537jVJp45wnEFPzpALFp/kR
|
||||||
|
Gml1bsMdi9GYjZOHp3GXDSHHmflS0yxjXVW86B8BSLlg/kJK9siArs1mep5Fimh3
|
||||||
|
4khon6La8eHBEJ/rPCmBp+EyCNSgBbGM+42WAA4+Jd9ThiI7/PS98wl+d+yG6w8z
|
||||||
|
5UNP9FR1bSmZLmZaQ9/LXMrI5Tjxfjs1nQ/0xVqhzPMggCTTV+wVunUlm+hkS7M0
|
||||||
|
hO8EuPbJbKoCPrZV4jI3X/xml1/N1p7HIL9Nxqw/dV8c7TKcfGkAaZHjIxhT6QID
|
||||||
|
AQABo0IwQDAdBgNVHQ4EFgQUVpkHHtOsDGlktAxQR95DLL4gwPswDgYDVR0PAQH/
|
||||||
|
BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQELBQADggEBAJ5FdnsX
|
||||||
|
SDLyOIspve6WSk6BGLFRRyDN0GSxDsnZAdkJzsiZ3GglE9Rc8qPoBP5yCccLqh0l
|
||||||
|
VX6Wmle3usURehnmp349hQ71+S4pL+f5bFgWV1Al9j4uPqrtd3GqqpmWRgqujuwq
|
||||||
|
URawXs3qZwQcWDD1YIq9pr1N5Za0/EKJAWv2cMhQOQwt1WbZyNKzMrcbGW3LM/nf
|
||||||
|
peYVhDfwwvJllpKQd/Ct9JDpEXjXk4nAPQu6KfTomZ1yju2dL+6SfaHx/126M2CF
|
||||||
|
Yv4HAqGEVka+lgqaE9chTLd8B59OTj+RdPsnnRHM3eaxynFNExc5JsUpISuTKWqW
|
||||||
|
+qtB4Uu2NQvAmxU=
|
||||||
|
-----END CERTIFICATE-----
|
||||||
|
|
||||||
|
# Issuer: CN=TÜRKTRUST Elektronik Sertifika Hizmet Sağlayıcısı H6 O=TÜRKTRUST Bilgi İletişim ve Bilişim Güvenliği Hizmetleri A.Ş.
|
||||||
|
# Subject: CN=TÜRKTRUST Elektronik Sertifika Hizmet Sağlayıcısı H6 O=TÜRKTRUST Bilgi İletişim ve Bilişim Güvenliği Hizmetleri A.Ş.
|
||||||
|
# Label: "TÜRKTRUST Elektronik Sertifika Hizmet Sağlayıcısı H6"
|
||||||
|
# Serial: 138134509972618
|
||||||
|
# MD5 Fingerprint: f8:c5:ee:2a:6b:be:95:8d:08:f7:25:4a:ea:71:3e:46
|
||||||
|
# SHA1 Fingerprint: 8a:5c:8c:ee:a5:03:e6:05:56:ba:d8:1b:d4:f6:c9:b0:ed:e5:2f:e0
|
||||||
|
# SHA256 Fingerprint: 8d:e7:86:55:e1:be:7f:78:47:80:0b:93:f6:94:d2:1d:36:8c:c0:6e:03:3e:7f:ab:04:bb:5e:b9:9d:a6:b7:00
|
||||||
|
-----BEGIN CERTIFICATE-----
|
||||||
|
MIIEJjCCAw6gAwIBAgIGfaHyZeyKMA0GCSqGSIb3DQEBCwUAMIGxMQswCQYDVQQG
|
||||||
|
EwJUUjEPMA0GA1UEBwwGQW5rYXJhMU0wSwYDVQQKDERUw5xSS1RSVVNUIEJpbGdp
|
||||||
|
IMSwbGV0acWfaW0gdmUgQmlsacWfaW0gR8O8dmVubGnEn2kgSGl6bWV0bGVyaSBB
|
||||||
|
LsWeLjFCMEAGA1UEAww5VMOcUktUUlVTVCBFbGVrdHJvbmlrIFNlcnRpZmlrYSBI
|
||||||
|
aXptZXQgU2HEn2xhecSxY8Sxc8SxIEg2MB4XDTEzMTIxODA5MDQxMFoXDTIzMTIx
|
||||||
|
NjA5MDQxMFowgbExCzAJBgNVBAYTAlRSMQ8wDQYDVQQHDAZBbmthcmExTTBLBgNV
|
||||||
|
BAoMRFTDnFJLVFJVU1QgQmlsZ2kgxLBsZXRpxZ9pbSB2ZSBCaWxpxZ9pbSBHw7x2
|
||||||
|
ZW5sacSfaSBIaXptZXRsZXJpIEEuxZ4uMUIwQAYDVQQDDDlUw5xSS1RSVVNUIEVs
|
||||||
|
ZWt0cm9uaWsgU2VydGlmaWthIEhpem1ldCBTYcSfbGF5xLFjxLFzxLEgSDYwggEi
|
||||||
|
MA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCdsGjW6L0UlqMACprx9MfMkU1x
|
||||||
|
eHe59yEmFXNRFpQJRwXiM/VomjX/3EsvMsew7eKC5W/a2uqsxgbPJQ1BgfbBOCK9
|
||||||
|
+bGlprMBvD9QFyv26WZV1DOzXPhDIHiTVRZwGTLmiddk671IUP320EEDwnS3/faA
|
||||||
|
z1vFq6TWlRKb55cTMgPp1KtDWxbtMyJkKbbSk60vbNg9tvYdDjTu0n2pVQ8g9P0p
|
||||||
|
u5FbHH3GQjhtQiht1AH7zYiXSX6484P4tZgvsycLSF5W506jM7NE1qXyGJTtHB6p
|
||||||
|
lVxiSvgNZ1GpryHV+DKdeboaX+UEVU0TRv/yz3THGmNtwx8XEsMeED5gCLMxAgMB
|
||||||
|
AAGjQjBAMB0GA1UdDgQWBBTdVRcT9qzoSCHK77Wv0QAy7Z6MtTAOBgNVHQ8BAf8E
|
||||||
|
BAMCAQYwDwYDVR0TAQH/BAUwAwEB/zANBgkqhkiG9w0BAQsFAAOCAQEAb1gNl0Oq
|
||||||
|
FlQ+v6nfkkU/hQu7VtMMUszIv3ZnXuaqs6fvuay0EBQNdH49ba3RfdCaqaXKGDsC
|
||||||
|
QC4qnFAUi/5XfldcEQlLNkVS9z2sFP1E34uXI9TDwe7UU5X+LEr+DXCqu4svLcsy
|
||||||
|
o4LyVN/Y8t3XSHLuSqMplsNEzm61kod2pLv0kmzOLBQJZo6NrRa1xxsJYTvjIKID
|
||||||
|
gI6tflEATseWhvtDmHd9KMeP2Cpu54Rvl0EpABZeTeIT6lnAY2c6RPuY/ATTMHKm
|
||||||
|
9ocJV612ph1jmv3XZch4gyt1O6VbuA1df74jrlZVlFjvH4GMKrLN5ptjnhi85WsG
|
||||||
|
tAuYSyher4hYyw==
|
||||||
|
-----END CERTIFICATE-----
|
||||||
|
|
||||||
|
# Issuer: CN=Certinomis - Root CA O=Certinomis OU=0002 433998903
|
||||||
|
# Subject: CN=Certinomis - Root CA O=Certinomis OU=0002 433998903
|
||||||
|
# Label: "Certinomis - Root CA"
|
||||||
|
# Serial: 1
|
||||||
|
# MD5 Fingerprint: 14:0a:fd:8d:a8:28:b5:38:69:db:56:7e:61:22:03:3f
|
||||||
|
# SHA1 Fingerprint: 9d:70:bb:01:a5:a4:a0:18:11:2e:f7:1c:01:b9:32:c5:34:e7:88:a8
|
||||||
|
# SHA256 Fingerprint: 2a:99:f5:bc:11:74:b7:3c:bb:1d:62:08:84:e0:1c:34:e5:1c:cb:39:78:da:12:5f:0e:33:26:88:83:bf:41:58
|
||||||
|
-----BEGIN CERTIFICATE-----
|
||||||
|
MIIFkjCCA3qgAwIBAgIBATANBgkqhkiG9w0BAQsFADBaMQswCQYDVQQGEwJGUjET
|
||||||
|
MBEGA1UEChMKQ2VydGlub21pczEXMBUGA1UECxMOMDAwMiA0MzM5OTg5MDMxHTAb
|
||||||
|
BgNVBAMTFENlcnRpbm9taXMgLSBSb290IENBMB4XDTEzMTAyMTA5MTcxOFoXDTMz
|
||||||
|
MTAyMTA5MTcxOFowWjELMAkGA1UEBhMCRlIxEzARBgNVBAoTCkNlcnRpbm9taXMx
|
||||||
|
FzAVBgNVBAsTDjAwMDIgNDMzOTk4OTAzMR0wGwYDVQQDExRDZXJ0aW5vbWlzIC0g
|
||||||
|
Um9vdCBDQTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBANTMCQosP5L2
|
||||||
|
fxSeC5yaah1AMGT9qt8OHgZbn1CF6s2Nq0Nn3rD6foCWnoR4kkjW4znuzuRZWJfl
|
||||||
|
LieY6pOod5tK8O90gC3rMB+12ceAnGInkYjwSond3IjmFPnVAy//ldu9n+ws+hQV
|
||||||
|
WZUKxkd8aRi5pwP5ynapz8dvtF4F/u7BUrJ1Mofs7SlmO/NKFoL21prbcpjp3vDF
|
||||||
|
TKWrteoB4owuZH9kb/2jJZOLyKIOSY008B/sWEUuNKqEUL3nskoTuLAPrjhdsKkb
|
||||||
|
5nPJWqHZZkCqqU2mNAKthH6yI8H7KsZn9DS2sJVqM09xRLWtwHkziOC/7aOgFLSc
|
||||||
|
CbAK42C++PhmiM1b8XcF4LVzbsF9Ri6OSyemzTUK/eVNfaoqoynHWmgE6OXWk6Ri
|
||||||
|
wsXm9E/G+Z8ajYJJGYrKWUM66A0ywfRMEwNvbqY/kXPLynNvEiCL7sCCeN5LLsJJ
|
||||||
|
wx3tFvYk9CcbXFcx3FXuqB5vbKziRcxXV4p1VxngtViZSTYxPDMBbRZKzbgqg4SG
|
||||||
|
m/lg0h9tkQPTYKbVPZrdd5A9NaSfD171UkRpucC63M9933zZxKyGIjK8e2uR73r4
|
||||||
|
F2iw4lNVYC2vPsKD2NkJK/DAZNuHi5HMkesE/Xa0lZrmFAYb1TQdvtj/dBxThZng
|
||||||
|
WVJKYe2InmtJiUZ+IFrZ50rlau7SZRFDAgMBAAGjYzBhMA4GA1UdDwEB/wQEAwIB
|
||||||
|
BjAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBTvkUz1pcMw6C8I6tNxIqSSaHh0
|
||||||
|
2TAfBgNVHSMEGDAWgBTvkUz1pcMw6C8I6tNxIqSSaHh02TANBgkqhkiG9w0BAQsF
|
||||||
|
AAOCAgEAfj1U2iJdGlg+O1QnurrMyOMaauo++RLrVl89UM7g6kgmJs95Vn6RHJk/
|
||||||
|
0KGRHCwPT5iVWVO90CLYiF2cN/z7ZMF4jIuaYAnq1fohX9B0ZedQxb8uuQsLrbWw
|
||||||
|
F6YSjNRieOpWauwK0kDDPAUwPk2Ut59KA9N9J0u2/kTO+hkzGm2kQtHdzMjI1xZS
|
||||||
|
g081lLMSVX3l4kLr5JyTCcBMWwerx20RoFAXlCOotQqSD7J6wWAsOMwaplv/8gzj
|
||||||
|
qh8c3LigkyfeY+N/IZ865Z764BNqdeuWXGKRlI5nU7aJ+BIJy29SWwNyhlCVCNSN
|
||||||
|
h4YVH5Uk2KRvms6knZtt0rJ2BobGVgjF6wnaNsIbW0G+YSrjcOa4pvi2WsS9Iff/
|
||||||
|
ql+hbHY5ZtbqTFXhADObE5hjyW/QASAJN1LnDE8+zbz1X5YnpyACleAu6AdBBR8V
|
||||||
|
btaw5BngDwKTACdyxYvRVB9dSsNAl35VpnzBMwQUAR1JIGkLGZOdblgi90AMRgwj
|
||||||
|
Y/M50n92Uaf0yKHxDHYiI0ZSKS3io0EHVmmY0gUJvGnHWmHNj4FgFU2A3ZDifcRQ
|
||||||
|
8ow7bkrHxuaAKzyBvBGAFhAn1/DNP3nMcyrDflOR1m749fPH0FFNjkulW+YZFzvW
|
||||||
|
gQncItzujrnEj1PhZ7szuIgVRs/taTX/dQ1G885x4cVrhkIGuUE=
|
||||||
|
-----END CERTIFICATE-----
|
||||||
# Issuer: CN=Entrust.net Secure Server Certification Authority O=Entrust.net OU=www.entrust.net/CPS incorp. by ref. (limits liab.)/(c) 1999 Entrust.net Limited
|
# Issuer: CN=Entrust.net Secure Server Certification Authority O=Entrust.net OU=www.entrust.net/CPS incorp. by ref. (limits liab.)/(c) 1999 Entrust.net Limited
|
||||||
# Subject: CN=Entrust.net Secure Server Certification Authority O=Entrust.net OU=www.entrust.net/CPS incorp. by ref. (limits liab.)/(c) 1999 Entrust.net Limited
|
# Subject: CN=Entrust.net Secure Server Certification Authority O=Entrust.net OU=www.entrust.net/CPS incorp. by ref. (limits liab.)/(c) 1999 Entrust.net Limited
|
||||||
# Label: "Entrust.net Secure Server CA"
|
# Label: "Entrust.net Secure Server CA"
|
||||||
|
|
|
@ -8,6 +8,7 @@ requests.utils imports from here, so be careful with imports.
|
||||||
|
|
||||||
import copy
|
import copy
|
||||||
import time
|
import time
|
||||||
|
import calendar
|
||||||
import collections
|
import collections
|
||||||
from .compat import cookielib, urlparse, urlunparse, Morsel
|
from .compat import cookielib, urlparse, urlunparse, Morsel
|
||||||
|
|
||||||
|
@ -143,10 +144,13 @@ def remove_cookie_by_name(cookiejar, name, domain=None, path=None):
|
||||||
"""
|
"""
|
||||||
clearables = []
|
clearables = []
|
||||||
for cookie in cookiejar:
|
for cookie in cookiejar:
|
||||||
if cookie.name == name:
|
if cookie.name != name:
|
||||||
if domain is None or domain == cookie.domain:
|
continue
|
||||||
if path is None or path == cookie.path:
|
if domain is not None and domain != cookie.domain:
|
||||||
clearables.append((cookie.domain, cookie.path, cookie.name))
|
continue
|
||||||
|
if path is not None and path != cookie.path:
|
||||||
|
continue
|
||||||
|
clearables.append((cookie.domain, cookie.path, cookie.name))
|
||||||
|
|
||||||
for domain, path, name in clearables:
|
for domain, path, name in clearables:
|
||||||
cookiejar.clear(domain, path, name)
|
cookiejar.clear(domain, path, name)
|
||||||
|
@ -365,7 +369,7 @@ def _copy_cookie_jar(jar):
|
||||||
return None
|
return None
|
||||||
|
|
||||||
if hasattr(jar, 'copy'):
|
if hasattr(jar, 'copy'):
|
||||||
# We're dealing with an instane of RequestsCookieJar
|
# We're dealing with an instance of RequestsCookieJar
|
||||||
return jar.copy()
|
return jar.copy()
|
||||||
# We're dealing with a generic CookieJar instance
|
# We're dealing with a generic CookieJar instance
|
||||||
new_jar = copy.copy(jar)
|
new_jar = copy.copy(jar)
|
||||||
|
@ -421,8 +425,9 @@ def morsel_to_cookie(morsel):
|
||||||
raise TypeError('max-age: %s must be integer' % morsel['max-age'])
|
raise TypeError('max-age: %s must be integer' % morsel['max-age'])
|
||||||
elif morsel['expires']:
|
elif morsel['expires']:
|
||||||
time_template = '%a, %d-%b-%Y %H:%M:%S GMT'
|
time_template = '%a, %d-%b-%Y %H:%M:%S GMT'
|
||||||
expires = int(time.mktime(
|
expires = calendar.timegm(
|
||||||
time.strptime(morsel['expires'], time_template)) - time.timezone)
|
time.strptime(morsel['expires'], time_template)
|
||||||
|
)
|
||||||
return create_cookie(
|
return create_cookie(
|
||||||
comment=morsel['comment'],
|
comment=morsel['comment'],
|
||||||
comment_url=bool(morsel['comment']),
|
comment_url=bool(morsel['comment']),
|
||||||
|
|
|
@ -97,3 +97,18 @@ class StreamConsumedError(RequestException, TypeError):
|
||||||
|
|
||||||
class RetryError(RequestException):
|
class RetryError(RequestException):
|
||||||
"""Custom retries logic failed"""
|
"""Custom retries logic failed"""
|
||||||
|
|
||||||
|
|
||||||
|
# Warnings
|
||||||
|
|
||||||
|
|
||||||
|
class RequestsWarning(Warning):
|
||||||
|
"""Base warning for Requests."""
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class FileModeWarning(RequestsWarning, DeprecationWarning):
|
||||||
|
"""
|
||||||
|
A file was opened in text mode, but Requests determined its binary length.
|
||||||
|
"""
|
||||||
|
pass
|
||||||
|
|
|
@ -12,34 +12,23 @@ Available hooks:
|
||||||
The response generated from a Request.
|
The response generated from a Request.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
|
||||||
HOOKS = ['response']
|
HOOKS = ['response']
|
||||||
|
|
||||||
|
|
||||||
def default_hooks():
|
def default_hooks():
|
||||||
hooks = {}
|
return dict((event, []) for event in HOOKS)
|
||||||
for event in HOOKS:
|
|
||||||
hooks[event] = []
|
|
||||||
return hooks
|
|
||||||
|
|
||||||
# TODO: response is the only one
|
# TODO: response is the only one
|
||||||
|
|
||||||
|
|
||||||
def dispatch_hook(key, hooks, hook_data, **kwargs):
|
def dispatch_hook(key, hooks, hook_data, **kwargs):
|
||||||
"""Dispatches a hook dictionary on a given piece of data."""
|
"""Dispatches a hook dictionary on a given piece of data."""
|
||||||
|
|
||||||
hooks = hooks or dict()
|
hooks = hooks or dict()
|
||||||
|
hooks = hooks.get(key)
|
||||||
if key in hooks:
|
if hooks:
|
||||||
hooks = hooks.get(key)
|
|
||||||
|
|
||||||
if hasattr(hooks, '__call__'):
|
if hasattr(hooks, '__call__'):
|
||||||
hooks = [hooks]
|
hooks = [hooks]
|
||||||
|
|
||||||
for hook in hooks:
|
for hook in hooks:
|
||||||
_hook_data = hook(hook_data, **kwargs)
|
_hook_data = hook(hook_data, **kwargs)
|
||||||
if _hook_data is not None:
|
if _hook_data is not None:
|
||||||
hook_data = _hook_data
|
hook_data = _hook_data
|
||||||
|
|
||||||
return hook_data
|
return hook_data
|
||||||
|
|
|
@ -192,7 +192,7 @@ class Request(RequestHooksMixin):
|
||||||
:param headers: dictionary of headers to send.
|
:param headers: dictionary of headers to send.
|
||||||
:param files: dictionary of {filename: fileobject} files to multipart upload.
|
:param files: dictionary of {filename: fileobject} files to multipart upload.
|
||||||
:param data: the body to attach to the request. If a dictionary is provided, form-encoding will take place.
|
:param data: the body to attach to the request. If a dictionary is provided, form-encoding will take place.
|
||||||
:param json: json for the body to attach to the request (if data is not specified).
|
:param json: json for the body to attach to the request (if files or data is not specified).
|
||||||
:param params: dictionary of URL parameters to append to the URL.
|
:param params: dictionary of URL parameters to append to the URL.
|
||||||
:param auth: Auth handler or (user, pass) tuple.
|
:param auth: Auth handler or (user, pass) tuple.
|
||||||
:param cookies: dictionary or CookieJar of cookies to attach to this request.
|
:param cookies: dictionary or CookieJar of cookies to attach to this request.
|
||||||
|
@ -319,12 +319,12 @@ class PreparedRequest(RequestEncodingMixin, RequestHooksMixin):
|
||||||
"""Prepares the given HTTP method."""
|
"""Prepares the given HTTP method."""
|
||||||
self.method = method
|
self.method = method
|
||||||
if self.method is not None:
|
if self.method is not None:
|
||||||
self.method = self.method.upper()
|
self.method = to_native_string(self.method.upper())
|
||||||
|
|
||||||
def prepare_url(self, url, params):
|
def prepare_url(self, url, params):
|
||||||
"""Prepares the given HTTP URL."""
|
"""Prepares the given HTTP URL."""
|
||||||
#: Accept objects that have string representations.
|
#: Accept objects that have string representations.
|
||||||
#: We're unable to blindy call unicode/str functions
|
#: We're unable to blindly call unicode/str functions
|
||||||
#: as this will include the bytestring indicator (b'')
|
#: as this will include the bytestring indicator (b'')
|
||||||
#: on python 3.x.
|
#: on python 3.x.
|
||||||
#: https://github.com/kennethreitz/requests/pull/2238
|
#: https://github.com/kennethreitz/requests/pull/2238
|
||||||
|
@ -385,6 +385,9 @@ class PreparedRequest(RequestEncodingMixin, RequestHooksMixin):
|
||||||
if isinstance(fragment, str):
|
if isinstance(fragment, str):
|
||||||
fragment = fragment.encode('utf-8')
|
fragment = fragment.encode('utf-8')
|
||||||
|
|
||||||
|
if isinstance(params, (str, bytes)):
|
||||||
|
params = to_native_string(params)
|
||||||
|
|
||||||
enc_params = self._encode_params(params)
|
enc_params = self._encode_params(params)
|
||||||
if enc_params:
|
if enc_params:
|
||||||
if query:
|
if query:
|
||||||
|
@ -414,7 +417,7 @@ class PreparedRequest(RequestEncodingMixin, RequestHooksMixin):
|
||||||
content_type = None
|
content_type = None
|
||||||
length = None
|
length = None
|
||||||
|
|
||||||
if json is not None:
|
if not data and json is not None:
|
||||||
content_type = 'application/json'
|
content_type = 'application/json'
|
||||||
body = complexjson.dumps(json)
|
body = complexjson.dumps(json)
|
||||||
|
|
||||||
|
@ -434,7 +437,7 @@ class PreparedRequest(RequestEncodingMixin, RequestHooksMixin):
|
||||||
if files:
|
if files:
|
||||||
raise NotImplementedError('Streamed bodies and files are mutually exclusive.')
|
raise NotImplementedError('Streamed bodies and files are mutually exclusive.')
|
||||||
|
|
||||||
if length is not None:
|
if length:
|
||||||
self.headers['Content-Length'] = builtin_str(length)
|
self.headers['Content-Length'] = builtin_str(length)
|
||||||
else:
|
else:
|
||||||
self.headers['Transfer-Encoding'] = 'chunked'
|
self.headers['Transfer-Encoding'] = 'chunked'
|
||||||
|
@ -443,7 +446,7 @@ class PreparedRequest(RequestEncodingMixin, RequestHooksMixin):
|
||||||
if files:
|
if files:
|
||||||
(body, content_type) = self._encode_files(files, data)
|
(body, content_type) = self._encode_files(files, data)
|
||||||
else:
|
else:
|
||||||
if data and json is None:
|
if data:
|
||||||
body = self._encode_params(data)
|
body = self._encode_params(data)
|
||||||
if isinstance(data, basestring) or hasattr(data, 'read'):
|
if isinstance(data, basestring) or hasattr(data, 'read'):
|
||||||
content_type = None
|
content_type = None
|
||||||
|
@ -631,7 +634,7 @@ class Response(object):
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def is_permanent_redirect(self):
|
def is_permanent_redirect(self):
|
||||||
"""True if this Response one of the permanant versions of redirect"""
|
"""True if this Response one of the permanent versions of redirect"""
|
||||||
return ('location' in self.headers and self.status_code in (codes.moved_permanently, codes.permanent_redirect))
|
return ('location' in self.headers and self.status_code in (codes.moved_permanently, codes.permanent_redirect))
|
||||||
|
|
||||||
@property
|
@property
|
||||||
|
|
|
@ -1,8 +1,11 @@
|
||||||
If you are planning to submit a pull request to requests with any changes in
|
If you are planning to submit a pull request to requests with any changes in
|
||||||
this library do not go any further. These are independent libraries which we
|
this library do not go any further. These are independent libraries which we
|
||||||
vendor into requests. Any changes necessary to these libraries must be made in
|
vendor into requests. Any changes necessary to these libraries must be made in
|
||||||
them and submitted as separate pull requests to those libraries.
|
them and submitted as separate pull requests to those libraries.
|
||||||
|
|
||||||
urllib3 pull requests go here: https://github.com/shazow/urllib3
|
urllib3 pull requests go here: https://github.com/shazow/urllib3
|
||||||
|
|
||||||
chardet pull requests go here: https://github.com/chardet/chardet
|
chardet pull requests go here: https://github.com/chardet/chardet
|
||||||
|
|
||||||
|
See https://github.com/kennethreitz/requests/pull/1812#issuecomment-30854316
|
||||||
|
for the reasoning behind this.
|
||||||
|
|
|
@ -1,3 +1,36 @@
|
||||||
from __future__ import absolute_import
|
'''
|
||||||
|
Debian and other distributions "unbundle" requests' vendored dependencies, and
|
||||||
|
rewrite all imports to use the global versions of ``urllib3`` and ``chardet``.
|
||||||
|
The problem with this is that not only requests itself imports those
|
||||||
|
dependencies, but third-party code outside of the distros' control too.
|
||||||
|
|
||||||
from . import urllib3
|
In reaction to these problems, the distro maintainers replaced
|
||||||
|
``requests.packages`` with a magical "stub module" that imports the correct
|
||||||
|
modules. The implementations were varying in quality and all had severe
|
||||||
|
problems. For example, a symlink (or hardlink) that links the correct modules
|
||||||
|
into place introduces problems regarding object identity, since you now have
|
||||||
|
two modules in `sys.modules` with the same API, but different identities::
|
||||||
|
|
||||||
|
requests.packages.urllib3 is not urllib3
|
||||||
|
|
||||||
|
With version ``2.5.2``, requests started to maintain its own stub, so that
|
||||||
|
distro-specific breakage would be reduced to a minimum, even though the whole
|
||||||
|
issue is not requests' fault in the first place. See
|
||||||
|
https://github.com/kennethreitz/requests/pull/2375 for the corresponding pull
|
||||||
|
request.
|
||||||
|
'''
|
||||||
|
|
||||||
|
from __future__ import absolute_import
|
||||||
|
import sys
|
||||||
|
|
||||||
|
try:
|
||||||
|
from . import urllib3
|
||||||
|
except ImportError:
|
||||||
|
import urllib3
|
||||||
|
sys.modules['%s.urllib3' % __name__] = urllib3
|
||||||
|
|
||||||
|
try:
|
||||||
|
from . import chardet
|
||||||
|
except ImportError:
|
||||||
|
import chardet
|
||||||
|
sys.modules['%s.chardet' % __name__] = chardet
|
||||||
|
|
|
@ -2,10 +2,8 @@
|
||||||
urllib3 - Thread-safe connection pooling and re-using.
|
urllib3 - Thread-safe connection pooling and re-using.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
__author__ = 'Andrey Petrov (andrey.petrov@shazow.net)'
|
from __future__ import absolute_import
|
||||||
__license__ = 'MIT'
|
import warnings
|
||||||
__version__ = '1.10.4'
|
|
||||||
|
|
||||||
|
|
||||||
from .connectionpool import (
|
from .connectionpool import (
|
||||||
HTTPConnectionPool,
|
HTTPConnectionPool,
|
||||||
|
@ -32,8 +30,30 @@ except ImportError:
|
||||||
def emit(self, record):
|
def emit(self, record):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
__author__ = 'Andrey Petrov (andrey.petrov@shazow.net)'
|
||||||
|
__license__ = 'MIT'
|
||||||
|
__version__ = '1.13.1'
|
||||||
|
|
||||||
|
__all__ = (
|
||||||
|
'HTTPConnectionPool',
|
||||||
|
'HTTPSConnectionPool',
|
||||||
|
'PoolManager',
|
||||||
|
'ProxyManager',
|
||||||
|
'HTTPResponse',
|
||||||
|
'Retry',
|
||||||
|
'Timeout',
|
||||||
|
'add_stderr_logger',
|
||||||
|
'connection_from_url',
|
||||||
|
'disable_warnings',
|
||||||
|
'encode_multipart_formdata',
|
||||||
|
'get_host',
|
||||||
|
'make_headers',
|
||||||
|
'proxy_from_url',
|
||||||
|
)
|
||||||
|
|
||||||
logging.getLogger(__name__).addHandler(NullHandler())
|
logging.getLogger(__name__).addHandler(NullHandler())
|
||||||
|
|
||||||
|
|
||||||
def add_stderr_logger(level=logging.DEBUG):
|
def add_stderr_logger(level=logging.DEBUG):
|
||||||
"""
|
"""
|
||||||
Helper for quickly adding a StreamHandler to the logger. Useful for
|
Helper for quickly adding a StreamHandler to the logger. Useful for
|
||||||
|
@ -55,12 +75,16 @@ def add_stderr_logger(level=logging.DEBUG):
|
||||||
del NullHandler
|
del NullHandler
|
||||||
|
|
||||||
|
|
||||||
import warnings
|
|
||||||
# SecurityWarning's always go off by default.
|
# SecurityWarning's always go off by default.
|
||||||
warnings.simplefilter('always', exceptions.SecurityWarning, append=True)
|
warnings.simplefilter('always', exceptions.SecurityWarning, append=True)
|
||||||
|
# SubjectAltNameWarning's should go off once per host
|
||||||
|
warnings.simplefilter('default', exceptions.SubjectAltNameWarning)
|
||||||
# InsecurePlatformWarning's don't vary between requests, so we keep it default.
|
# InsecurePlatformWarning's don't vary between requests, so we keep it default.
|
||||||
warnings.simplefilter('default', exceptions.InsecurePlatformWarning,
|
warnings.simplefilter('default', exceptions.InsecurePlatformWarning,
|
||||||
append=True)
|
append=True)
|
||||||
|
# SNIMissingWarnings should go off only once.
|
||||||
|
warnings.simplefilter('default', exceptions.SNIMissingWarning)
|
||||||
|
|
||||||
|
|
||||||
def disable_warnings(category=exceptions.HTTPWarning):
|
def disable_warnings(category=exceptions.HTTPWarning):
|
||||||
"""
|
"""
|
||||||
|
|
|
@ -1,3 +1,4 @@
|
||||||
|
from __future__ import absolute_import
|
||||||
from collections import Mapping, MutableMapping
|
from collections import Mapping, MutableMapping
|
||||||
try:
|
try:
|
||||||
from threading import RLock
|
from threading import RLock
|
||||||
|
@ -97,14 +98,7 @@ class RecentlyUsedContainer(MutableMapping):
|
||||||
return list(iterkeys(self._container))
|
return list(iterkeys(self._container))
|
||||||
|
|
||||||
|
|
||||||
_dict_setitem = dict.__setitem__
|
class HTTPHeaderDict(MutableMapping):
|
||||||
_dict_getitem = dict.__getitem__
|
|
||||||
_dict_delitem = dict.__delitem__
|
|
||||||
_dict_contains = dict.__contains__
|
|
||||||
_dict_setdefault = dict.setdefault
|
|
||||||
|
|
||||||
|
|
||||||
class HTTPHeaderDict(dict):
|
|
||||||
"""
|
"""
|
||||||
:param headers:
|
:param headers:
|
||||||
An iterable of field-value pairs. Must not contain multiple field names
|
An iterable of field-value pairs. Must not contain multiple field names
|
||||||
|
@ -139,7 +133,8 @@ class HTTPHeaderDict(dict):
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self, headers=None, **kwargs):
|
def __init__(self, headers=None, **kwargs):
|
||||||
dict.__init__(self)
|
super(HTTPHeaderDict, self).__init__()
|
||||||
|
self._container = {}
|
||||||
if headers is not None:
|
if headers is not None:
|
||||||
if isinstance(headers, HTTPHeaderDict):
|
if isinstance(headers, HTTPHeaderDict):
|
||||||
self._copy_from(headers)
|
self._copy_from(headers)
|
||||||
|
@ -149,38 +144,44 @@ class HTTPHeaderDict(dict):
|
||||||
self.extend(kwargs)
|
self.extend(kwargs)
|
||||||
|
|
||||||
def __setitem__(self, key, val):
|
def __setitem__(self, key, val):
|
||||||
return _dict_setitem(self, key.lower(), (key, val))
|
self._container[key.lower()] = (key, val)
|
||||||
|
return self._container[key.lower()]
|
||||||
|
|
||||||
def __getitem__(self, key):
|
def __getitem__(self, key):
|
||||||
val = _dict_getitem(self, key.lower())
|
val = self._container[key.lower()]
|
||||||
return ', '.join(val[1:])
|
return ', '.join(val[1:])
|
||||||
|
|
||||||
def __delitem__(self, key):
|
def __delitem__(self, key):
|
||||||
return _dict_delitem(self, key.lower())
|
del self._container[key.lower()]
|
||||||
|
|
||||||
def __contains__(self, key):
|
def __contains__(self, key):
|
||||||
return _dict_contains(self, key.lower())
|
return key.lower() in self._container
|
||||||
|
|
||||||
def __eq__(self, other):
|
def __eq__(self, other):
|
||||||
if not isinstance(other, Mapping) and not hasattr(other, 'keys'):
|
if not isinstance(other, Mapping) and not hasattr(other, 'keys'):
|
||||||
return False
|
return False
|
||||||
if not isinstance(other, type(self)):
|
if not isinstance(other, type(self)):
|
||||||
other = type(self)(other)
|
other = type(self)(other)
|
||||||
return dict((k1, self[k1]) for k1 in self) == dict((k2, other[k2]) for k2 in other)
|
return (dict((k.lower(), v) for k, v in self.itermerged()) ==
|
||||||
|
dict((k.lower(), v) for k, v in other.itermerged()))
|
||||||
|
|
||||||
def __ne__(self, other):
|
def __ne__(self, other):
|
||||||
return not self.__eq__(other)
|
return not self.__eq__(other)
|
||||||
|
|
||||||
values = MutableMapping.values
|
if not PY3: # Python 2
|
||||||
get = MutableMapping.get
|
|
||||||
update = MutableMapping.update
|
|
||||||
|
|
||||||
if not PY3: # Python 2
|
|
||||||
iterkeys = MutableMapping.iterkeys
|
iterkeys = MutableMapping.iterkeys
|
||||||
itervalues = MutableMapping.itervalues
|
itervalues = MutableMapping.itervalues
|
||||||
|
|
||||||
__marker = object()
|
__marker = object()
|
||||||
|
|
||||||
|
def __len__(self):
|
||||||
|
return len(self._container)
|
||||||
|
|
||||||
|
def __iter__(self):
|
||||||
|
# Only provide the originally cased names
|
||||||
|
for vals in self._container.values():
|
||||||
|
yield vals[0]
|
||||||
|
|
||||||
def pop(self, key, default=__marker):
|
def pop(self, key, default=__marker):
|
||||||
'''D.pop(k[,d]) -> v, remove specified key and return the corresponding value.
|
'''D.pop(k[,d]) -> v, remove specified key and return the corresponding value.
|
||||||
If key is not found, d is returned if given, otherwise KeyError is raised.
|
If key is not found, d is returned if given, otherwise KeyError is raised.
|
||||||
|
@ -216,7 +217,7 @@ class HTTPHeaderDict(dict):
|
||||||
key_lower = key.lower()
|
key_lower = key.lower()
|
||||||
new_vals = key, val
|
new_vals = key, val
|
||||||
# Keep the common case aka no item present as fast as possible
|
# Keep the common case aka no item present as fast as possible
|
||||||
vals = _dict_setdefault(self, key_lower, new_vals)
|
vals = self._container.setdefault(key_lower, new_vals)
|
||||||
if new_vals is not vals:
|
if new_vals is not vals:
|
||||||
# new_vals was not inserted, as there was a previous one
|
# new_vals was not inserted, as there was a previous one
|
||||||
if isinstance(vals, list):
|
if isinstance(vals, list):
|
||||||
|
@ -225,7 +226,7 @@ class HTTPHeaderDict(dict):
|
||||||
else:
|
else:
|
||||||
# vals should be a tuple then, i.e. only one item so far
|
# vals should be a tuple then, i.e. only one item so far
|
||||||
# Need to convert the tuple to list for further extension
|
# Need to convert the tuple to list for further extension
|
||||||
_dict_setitem(self, key_lower, [vals[0], vals[1], val])
|
self._container[key_lower] = [vals[0], vals[1], val]
|
||||||
|
|
||||||
def extend(self, *args, **kwargs):
|
def extend(self, *args, **kwargs):
|
||||||
"""Generic import function for any type of header-like object.
|
"""Generic import function for any type of header-like object.
|
||||||
|
@ -234,9 +235,9 @@ class HTTPHeaderDict(dict):
|
||||||
"""
|
"""
|
||||||
if len(args) > 1:
|
if len(args) > 1:
|
||||||
raise TypeError("extend() takes at most 1 positional "
|
raise TypeError("extend() takes at most 1 positional "
|
||||||
"arguments ({} given)".format(len(args)))
|
"arguments ({0} given)".format(len(args)))
|
||||||
other = args[0] if len(args) >= 1 else ()
|
other = args[0] if len(args) >= 1 else ()
|
||||||
|
|
||||||
if isinstance(other, HTTPHeaderDict):
|
if isinstance(other, HTTPHeaderDict):
|
||||||
for key, val in other.iteritems():
|
for key, val in other.iteritems():
|
||||||
self.add(key, val)
|
self.add(key, val)
|
||||||
|
@ -257,7 +258,7 @@ class HTTPHeaderDict(dict):
|
||||||
"""Returns a list of all the values for the named field. Returns an
|
"""Returns a list of all the values for the named field. Returns an
|
||||||
empty list if the key doesn't exist."""
|
empty list if the key doesn't exist."""
|
||||||
try:
|
try:
|
||||||
vals = _dict_getitem(self, key.lower())
|
vals = self._container[key.lower()]
|
||||||
except KeyError:
|
except KeyError:
|
||||||
return []
|
return []
|
||||||
else:
|
else:
|
||||||
|
@ -276,11 +277,11 @@ class HTTPHeaderDict(dict):
|
||||||
|
|
||||||
def _copy_from(self, other):
|
def _copy_from(self, other):
|
||||||
for key in other:
|
for key in other:
|
||||||
val = _dict_getitem(other, key)
|
val = other.getlist(key)
|
||||||
if isinstance(val, list):
|
if isinstance(val, list):
|
||||||
# Don't need to convert tuples
|
# Don't need to convert tuples
|
||||||
val = list(val)
|
val = list(val)
|
||||||
_dict_setitem(self, key, val)
|
self._container[key.lower()] = [key] + val
|
||||||
|
|
||||||
def copy(self):
|
def copy(self):
|
||||||
clone = type(self)()
|
clone = type(self)()
|
||||||
|
@ -290,33 +291,33 @@ class HTTPHeaderDict(dict):
|
||||||
def iteritems(self):
|
def iteritems(self):
|
||||||
"""Iterate over all header lines, including duplicate ones."""
|
"""Iterate over all header lines, including duplicate ones."""
|
||||||
for key in self:
|
for key in self:
|
||||||
vals = _dict_getitem(self, key)
|
vals = self._container[key.lower()]
|
||||||
for val in vals[1:]:
|
for val in vals[1:]:
|
||||||
yield vals[0], val
|
yield vals[0], val
|
||||||
|
|
||||||
def itermerged(self):
|
def itermerged(self):
|
||||||
"""Iterate over all headers, merging duplicate ones together."""
|
"""Iterate over all headers, merging duplicate ones together."""
|
||||||
for key in self:
|
for key in self:
|
||||||
val = _dict_getitem(self, key)
|
val = self._container[key.lower()]
|
||||||
yield val[0], ', '.join(val[1:])
|
yield val[0], ', '.join(val[1:])
|
||||||
|
|
||||||
def items(self):
|
def items(self):
|
||||||
return list(self.iteritems())
|
return list(self.iteritems())
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def from_httplib(cls, message): # Python 2
|
def from_httplib(cls, message): # Python 2
|
||||||
"""Read headers from a Python 2 httplib message object."""
|
"""Read headers from a Python 2 httplib message object."""
|
||||||
# python2.7 does not expose a proper API for exporting multiheaders
|
# python2.7 does not expose a proper API for exporting multiheaders
|
||||||
# efficiently. This function re-reads raw lines from the message
|
# efficiently. This function re-reads raw lines from the message
|
||||||
# object and extracts the multiheaders properly.
|
# object and extracts the multiheaders properly.
|
||||||
headers = []
|
headers = []
|
||||||
|
|
||||||
for line in message.headers:
|
for line in message.headers:
|
||||||
if line.startswith((' ', '\t')):
|
if line.startswith((' ', '\t')):
|
||||||
key, value = headers[-1]
|
key, value = headers[-1]
|
||||||
headers[-1] = (key, value + '\r\n' + line.rstrip())
|
headers[-1] = (key, value + '\r\n' + line.rstrip())
|
||||||
continue
|
continue
|
||||||
|
|
||||||
key, value = line.split(':', 1)
|
key, value = line.split(':', 1)
|
||||||
headers.append((key, value.strip()))
|
headers.append((key, value.strip()))
|
||||||
|
|
||||||
|
|
|
@ -1,23 +1,20 @@
|
||||||
|
from __future__ import absolute_import
|
||||||
import datetime
|
import datetime
|
||||||
|
import os
|
||||||
import sys
|
import sys
|
||||||
import socket
|
import socket
|
||||||
from socket import timeout as SocketTimeout
|
from socket import error as SocketError, timeout as SocketTimeout
|
||||||
import warnings
|
import warnings
|
||||||
from .packages import six
|
from .packages import six
|
||||||
|
|
||||||
try: # Python 3
|
try: # Python 3
|
||||||
from http.client import HTTPConnection as _HTTPConnection, HTTPException
|
from http.client import HTTPConnection as _HTTPConnection
|
||||||
|
from http.client import HTTPException # noqa: unused in this module
|
||||||
except ImportError:
|
except ImportError:
|
||||||
from httplib import HTTPConnection as _HTTPConnection, HTTPException
|
from httplib import HTTPConnection as _HTTPConnection
|
||||||
|
from httplib import HTTPException # noqa: unused in this module
|
||||||
|
|
||||||
class DummyConnection(object):
|
|
||||||
"Used to detect a failed ConnectionCls import."
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
try: # Compiled with SSL?
|
try: # Compiled with SSL?
|
||||||
HTTPSConnection = DummyConnection
|
|
||||||
import ssl
|
import ssl
|
||||||
BaseSSLError = ssl.SSLError
|
BaseSSLError = ssl.SSLError
|
||||||
except (ImportError, AttributeError): # Platform-specific: No SSL.
|
except (ImportError, AttributeError): # Platform-specific: No SSL.
|
||||||
|
@ -36,9 +33,10 @@ except NameError: # Python 2:
|
||||||
|
|
||||||
|
|
||||||
from .exceptions import (
|
from .exceptions import (
|
||||||
|
NewConnectionError,
|
||||||
ConnectTimeoutError,
|
ConnectTimeoutError,
|
||||||
|
SubjectAltNameWarning,
|
||||||
SystemTimeWarning,
|
SystemTimeWarning,
|
||||||
SecurityWarning,
|
|
||||||
)
|
)
|
||||||
from .packages.ssl_match_hostname import match_hostname
|
from .packages.ssl_match_hostname import match_hostname
|
||||||
|
|
||||||
|
@ -60,6 +58,11 @@ port_by_scheme = {
|
||||||
RECENT_DATE = datetime.date(2014, 1, 1)
|
RECENT_DATE = datetime.date(2014, 1, 1)
|
||||||
|
|
||||||
|
|
||||||
|
class DummyConnection(object):
|
||||||
|
"""Used to detect a failed ConnectionCls import."""
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
class HTTPConnection(_HTTPConnection, object):
|
class HTTPConnection(_HTTPConnection, object):
|
||||||
"""
|
"""
|
||||||
Based on httplib.HTTPConnection but provides an extra constructor
|
Based on httplib.HTTPConnection but provides an extra constructor
|
||||||
|
@ -133,11 +136,15 @@ class HTTPConnection(_HTTPConnection, object):
|
||||||
conn = connection.create_connection(
|
conn = connection.create_connection(
|
||||||
(self.host, self.port), self.timeout, **extra_kw)
|
(self.host, self.port), self.timeout, **extra_kw)
|
||||||
|
|
||||||
except SocketTimeout:
|
except SocketTimeout as e:
|
||||||
raise ConnectTimeoutError(
|
raise ConnectTimeoutError(
|
||||||
self, "Connection to %s timed out. (connect timeout=%s)" %
|
self, "Connection to %s timed out. (connect timeout=%s)" %
|
||||||
(self.host, self.timeout))
|
(self.host, self.timeout))
|
||||||
|
|
||||||
|
except SocketError as e:
|
||||||
|
raise NewConnectionError(
|
||||||
|
self, "Failed to establish a new connection: %s" % e)
|
||||||
|
|
||||||
return conn
|
return conn
|
||||||
|
|
||||||
def _prepare_conn(self, conn):
|
def _prepare_conn(self, conn):
|
||||||
|
@ -185,19 +192,25 @@ class VerifiedHTTPSConnection(HTTPSConnection):
|
||||||
"""
|
"""
|
||||||
cert_reqs = None
|
cert_reqs = None
|
||||||
ca_certs = None
|
ca_certs = None
|
||||||
|
ca_cert_dir = None
|
||||||
ssl_version = None
|
ssl_version = None
|
||||||
assert_fingerprint = None
|
assert_fingerprint = None
|
||||||
|
|
||||||
def set_cert(self, key_file=None, cert_file=None,
|
def set_cert(self, key_file=None, cert_file=None,
|
||||||
cert_reqs=None, ca_certs=None,
|
cert_reqs=None, ca_certs=None,
|
||||||
assert_hostname=None, assert_fingerprint=None):
|
assert_hostname=None, assert_fingerprint=None,
|
||||||
|
ca_cert_dir=None):
|
||||||
|
|
||||||
|
if (ca_certs or ca_cert_dir) and cert_reqs is None:
|
||||||
|
cert_reqs = 'CERT_REQUIRED'
|
||||||
|
|
||||||
self.key_file = key_file
|
self.key_file = key_file
|
||||||
self.cert_file = cert_file
|
self.cert_file = cert_file
|
||||||
self.cert_reqs = cert_reqs
|
self.cert_reqs = cert_reqs
|
||||||
self.ca_certs = ca_certs
|
|
||||||
self.assert_hostname = assert_hostname
|
self.assert_hostname = assert_hostname
|
||||||
self.assert_fingerprint = assert_fingerprint
|
self.assert_fingerprint = assert_fingerprint
|
||||||
|
self.ca_certs = ca_certs and os.path.expanduser(ca_certs)
|
||||||
|
self.ca_cert_dir = ca_cert_dir and os.path.expanduser(ca_cert_dir)
|
||||||
|
|
||||||
def connect(self):
|
def connect(self):
|
||||||
# Add certificate verification
|
# Add certificate verification
|
||||||
|
@ -234,6 +247,7 @@ class VerifiedHTTPSConnection(HTTPSConnection):
|
||||||
self.sock = ssl_wrap_socket(conn, self.key_file, self.cert_file,
|
self.sock = ssl_wrap_socket(conn, self.key_file, self.cert_file,
|
||||||
cert_reqs=resolved_cert_reqs,
|
cert_reqs=resolved_cert_reqs,
|
||||||
ca_certs=self.ca_certs,
|
ca_certs=self.ca_certs,
|
||||||
|
ca_cert_dir=self.ca_cert_dir,
|
||||||
server_hostname=hostname,
|
server_hostname=hostname,
|
||||||
ssl_version=resolved_ssl_version)
|
ssl_version=resolved_ssl_version)
|
||||||
|
|
||||||
|
@ -245,15 +259,25 @@ class VerifiedHTTPSConnection(HTTPSConnection):
|
||||||
cert = self.sock.getpeercert()
|
cert = self.sock.getpeercert()
|
||||||
if not cert.get('subjectAltName', ()):
|
if not cert.get('subjectAltName', ()):
|
||||||
warnings.warn((
|
warnings.warn((
|
||||||
'Certificate has no `subjectAltName`, falling back to check for a `commonName` for now. '
|
'Certificate for {0} has no `subjectAltName`, falling back to check for a '
|
||||||
'This feature is being removed by major browsers and deprecated by RFC 2818. '
|
'`commonName` for now. This feature is being removed by major browsers and '
|
||||||
'(See https://github.com/shazow/urllib3/issues/497 for details.)'),
|
'deprecated by RFC 2818. (See https://github.com/shazow/urllib3/issues/497 '
|
||||||
SecurityWarning
|
'for details.)'.format(hostname)),
|
||||||
|
SubjectAltNameWarning
|
||||||
)
|
)
|
||||||
match_hostname(cert, self.assert_hostname or hostname)
|
|
||||||
|
|
||||||
self.is_verified = (resolved_cert_reqs == ssl.CERT_REQUIRED
|
# In case the hostname is an IPv6 address, strip the square
|
||||||
or self.assert_fingerprint is not None)
|
# brackets from it before using it to validate. This is because
|
||||||
|
# a certificate with an IPv6 address in it won't have square
|
||||||
|
# brackets around that address. Sadly, match_hostname won't do this
|
||||||
|
# for us: it expects the plain host part without any extra work
|
||||||
|
# that might have been done to make it palatable to httplib.
|
||||||
|
asserted_hostname = self.assert_hostname or hostname
|
||||||
|
asserted_hostname = asserted_hostname.strip('[]')
|
||||||
|
match_hostname(cert, asserted_hostname)
|
||||||
|
|
||||||
|
self.is_verified = (resolved_cert_reqs == ssl.CERT_REQUIRED or
|
||||||
|
self.assert_fingerprint is not None)
|
||||||
|
|
||||||
|
|
||||||
if ssl:
|
if ssl:
|
||||||
|
|
|
@ -1,3 +1,4 @@
|
||||||
|
from __future__ import absolute_import
|
||||||
import errno
|
import errno
|
||||||
import logging
|
import logging
|
||||||
import sys
|
import sys
|
||||||
|
@ -10,13 +11,15 @@ try: # Python 3
|
||||||
from queue import LifoQueue, Empty, Full
|
from queue import LifoQueue, Empty, Full
|
||||||
except ImportError:
|
except ImportError:
|
||||||
from Queue import LifoQueue, Empty, Full
|
from Queue import LifoQueue, Empty, Full
|
||||||
import Queue as _ # Platform-specific: Windows
|
# Queue is imported for side effects on MS Windows
|
||||||
|
import Queue as _unused_module_Queue # noqa: unused
|
||||||
|
|
||||||
|
|
||||||
from .exceptions import (
|
from .exceptions import (
|
||||||
ClosedPoolError,
|
ClosedPoolError,
|
||||||
ProtocolError,
|
ProtocolError,
|
||||||
EmptyPoolError,
|
EmptyPoolError,
|
||||||
|
HeaderParsingError,
|
||||||
HostChangedError,
|
HostChangedError,
|
||||||
LocationValueError,
|
LocationValueError,
|
||||||
MaxRetryError,
|
MaxRetryError,
|
||||||
|
@ -25,6 +28,7 @@ from .exceptions import (
|
||||||
SSLError,
|
SSLError,
|
||||||
TimeoutError,
|
TimeoutError,
|
||||||
InsecureRequestWarning,
|
InsecureRequestWarning,
|
||||||
|
NewConnectionError,
|
||||||
)
|
)
|
||||||
from .packages.ssl_match_hostname import CertificateError
|
from .packages.ssl_match_hostname import CertificateError
|
||||||
from .packages import six
|
from .packages import six
|
||||||
|
@ -32,15 +36,16 @@ from .connection import (
|
||||||
port_by_scheme,
|
port_by_scheme,
|
||||||
DummyConnection,
|
DummyConnection,
|
||||||
HTTPConnection, HTTPSConnection, VerifiedHTTPSConnection,
|
HTTPConnection, HTTPSConnection, VerifiedHTTPSConnection,
|
||||||
HTTPException, BaseSSLError, ConnectionError
|
HTTPException, BaseSSLError,
|
||||||
)
|
)
|
||||||
from .request import RequestMethods
|
from .request import RequestMethods
|
||||||
from .response import HTTPResponse
|
from .response import HTTPResponse
|
||||||
|
|
||||||
from .util.connection import is_connection_dropped
|
from .util.connection import is_connection_dropped
|
||||||
|
from .util.response import assert_header_parsing
|
||||||
from .util.retry import Retry
|
from .util.retry import Retry
|
||||||
from .util.timeout import Timeout
|
from .util.timeout import Timeout
|
||||||
from .util.url import get_host
|
from .util.url import get_host, Url
|
||||||
|
|
||||||
|
|
||||||
xrange = six.moves.xrange
|
xrange = six.moves.xrange
|
||||||
|
@ -50,7 +55,7 @@ log = logging.getLogger(__name__)
|
||||||
_Default = object()
|
_Default = object()
|
||||||
|
|
||||||
|
|
||||||
## Pool objects
|
# Pool objects
|
||||||
class ConnectionPool(object):
|
class ConnectionPool(object):
|
||||||
"""
|
"""
|
||||||
Base class for all connection pools, such as
|
Base class for all connection pools, such as
|
||||||
|
@ -64,8 +69,7 @@ class ConnectionPool(object):
|
||||||
if not host:
|
if not host:
|
||||||
raise LocationValueError("No host specified.")
|
raise LocationValueError("No host specified.")
|
||||||
|
|
||||||
# httplib doesn't like it when we include brackets in ipv6 addresses
|
self.host = host
|
||||||
self.host = host.strip('[]')
|
|
||||||
self.port = port
|
self.port = port
|
||||||
|
|
||||||
def __str__(self):
|
def __str__(self):
|
||||||
|
@ -120,7 +124,7 @@ class HTTPConnectionPool(ConnectionPool, RequestMethods):
|
||||||
|
|
||||||
:param maxsize:
|
:param maxsize:
|
||||||
Number of connections to save that can be reused. More than 1 is useful
|
Number of connections to save that can be reused. More than 1 is useful
|
||||||
in multithreaded situations. If ``block`` is set to false, more
|
in multithreaded situations. If ``block`` is set to False, more
|
||||||
connections will be created but they will not be saved once they've
|
connections will be created but they will not be saved once they've
|
||||||
been used.
|
been used.
|
||||||
|
|
||||||
|
@ -381,8 +385,19 @@ class HTTPConnectionPool(ConnectionPool, RequestMethods):
|
||||||
log.debug("\"%s %s %s\" %s %s" % (method, url, http_version,
|
log.debug("\"%s %s %s\" %s %s" % (method, url, http_version,
|
||||||
httplib_response.status,
|
httplib_response.status,
|
||||||
httplib_response.length))
|
httplib_response.length))
|
||||||
|
|
||||||
|
try:
|
||||||
|
assert_header_parsing(httplib_response.msg)
|
||||||
|
except HeaderParsingError as hpe: # Platform-specific: Python 3
|
||||||
|
log.warning(
|
||||||
|
'Failed to parse headers (url=%s): %s',
|
||||||
|
self._absolute_url(url), hpe, exc_info=True)
|
||||||
|
|
||||||
return httplib_response
|
return httplib_response
|
||||||
|
|
||||||
|
def _absolute_url(self, path):
|
||||||
|
return Url(scheme=self.scheme, host=self.host, port=self.port, path=path).url
|
||||||
|
|
||||||
def close(self):
|
def close(self):
|
||||||
"""
|
"""
|
||||||
Close all pooled connections and disable the pool.
|
Close all pooled connections and disable the pool.
|
||||||
|
@ -568,27 +583,24 @@ class HTTPConnectionPool(ConnectionPool, RequestMethods):
|
||||||
# Close the connection. If a connection is reused on which there
|
# Close the connection. If a connection is reused on which there
|
||||||
# was a Certificate error, the next request will certainly raise
|
# was a Certificate error, the next request will certainly raise
|
||||||
# another Certificate error.
|
# another Certificate error.
|
||||||
if conn:
|
conn = conn and conn.close()
|
||||||
conn.close()
|
release_conn = True
|
||||||
conn = None
|
|
||||||
raise SSLError(e)
|
raise SSLError(e)
|
||||||
|
|
||||||
except SSLError:
|
except SSLError:
|
||||||
# Treat SSLError separately from BaseSSLError to preserve
|
# Treat SSLError separately from BaseSSLError to preserve
|
||||||
# traceback.
|
# traceback.
|
||||||
if conn:
|
conn = conn and conn.close()
|
||||||
conn.close()
|
release_conn = True
|
||||||
conn = None
|
|
||||||
raise
|
raise
|
||||||
|
|
||||||
except (TimeoutError, HTTPException, SocketError, ConnectionError) as e:
|
except (TimeoutError, HTTPException, SocketError, ProtocolError) as e:
|
||||||
if conn:
|
# Discard the connection for these exceptions. It will be
|
||||||
# Discard the connection for these exceptions. It will be
|
# be replaced during the next _get_conn() call.
|
||||||
# be replaced during the next _get_conn() call.
|
conn = conn and conn.close()
|
||||||
conn.close()
|
release_conn = True
|
||||||
conn = None
|
|
||||||
|
|
||||||
if isinstance(e, SocketError) and self.proxy:
|
if isinstance(e, (SocketError, NewConnectionError)) and self.proxy:
|
||||||
e = ProxyError('Cannot connect to proxy.', e)
|
e = ProxyError('Cannot connect to proxy.', e)
|
||||||
elif isinstance(e, (SocketError, HTTPException)):
|
elif isinstance(e, (SocketError, HTTPException)):
|
||||||
e = ProtocolError('Connection aborted.', e)
|
e = ProtocolError('Connection aborted.', e)
|
||||||
|
@ -626,26 +638,31 @@ class HTTPConnectionPool(ConnectionPool, RequestMethods):
|
||||||
retries = retries.increment(method, url, response=response, _pool=self)
|
retries = retries.increment(method, url, response=response, _pool=self)
|
||||||
except MaxRetryError:
|
except MaxRetryError:
|
||||||
if retries.raise_on_redirect:
|
if retries.raise_on_redirect:
|
||||||
|
# Release the connection for this response, since we're not
|
||||||
|
# returning it to be released manually.
|
||||||
|
response.release_conn()
|
||||||
raise
|
raise
|
||||||
return response
|
return response
|
||||||
|
|
||||||
log.info("Redirecting %s -> %s" % (url, redirect_location))
|
log.info("Redirecting %s -> %s" % (url, redirect_location))
|
||||||
return self.urlopen(method, redirect_location, body, headers,
|
return self.urlopen(
|
||||||
retries=retries, redirect=redirect,
|
method, redirect_location, body, headers,
|
||||||
assert_same_host=assert_same_host,
|
retries=retries, redirect=redirect,
|
||||||
timeout=timeout, pool_timeout=pool_timeout,
|
assert_same_host=assert_same_host,
|
||||||
release_conn=release_conn, **response_kw)
|
timeout=timeout, pool_timeout=pool_timeout,
|
||||||
|
release_conn=release_conn, **response_kw)
|
||||||
|
|
||||||
# Check if we should retry the HTTP response.
|
# Check if we should retry the HTTP response.
|
||||||
if retries.is_forced_retry(method, status_code=response.status):
|
if retries.is_forced_retry(method, status_code=response.status):
|
||||||
retries = retries.increment(method, url, response=response, _pool=self)
|
retries = retries.increment(method, url, response=response, _pool=self)
|
||||||
retries.sleep()
|
retries.sleep()
|
||||||
log.info("Forced retry: %s" % url)
|
log.info("Forced retry: %s" % url)
|
||||||
return self.urlopen(method, url, body, headers,
|
return self.urlopen(
|
||||||
retries=retries, redirect=redirect,
|
method, url, body, headers,
|
||||||
assert_same_host=assert_same_host,
|
retries=retries, redirect=redirect,
|
||||||
timeout=timeout, pool_timeout=pool_timeout,
|
assert_same_host=assert_same_host,
|
||||||
release_conn=release_conn, **response_kw)
|
timeout=timeout, pool_timeout=pool_timeout,
|
||||||
|
release_conn=release_conn, **response_kw)
|
||||||
|
|
||||||
return response
|
return response
|
||||||
|
|
||||||
|
@ -662,10 +679,10 @@ class HTTPSConnectionPool(HTTPConnectionPool):
|
||||||
``assert_hostname`` and ``host`` in this order to verify connections.
|
``assert_hostname`` and ``host`` in this order to verify connections.
|
||||||
If ``assert_hostname`` is False, no verification is done.
|
If ``assert_hostname`` is False, no verification is done.
|
||||||
|
|
||||||
The ``key_file``, ``cert_file``, ``cert_reqs``, ``ca_certs`` and
|
The ``key_file``, ``cert_file``, ``cert_reqs``, ``ca_certs``,
|
||||||
``ssl_version`` are only used if :mod:`ssl` is available and are fed into
|
``ca_cert_dir``, and ``ssl_version`` are only used if :mod:`ssl` is
|
||||||
:meth:`urllib3.util.ssl_wrap_socket` to upgrade the connection socket
|
available and are fed into :meth:`urllib3.util.ssl_wrap_socket` to upgrade
|
||||||
into an SSL socket.
|
the connection socket into an SSL socket.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
scheme = 'https'
|
scheme = 'https'
|
||||||
|
@ -678,15 +695,20 @@ class HTTPSConnectionPool(HTTPConnectionPool):
|
||||||
key_file=None, cert_file=None, cert_reqs=None,
|
key_file=None, cert_file=None, cert_reqs=None,
|
||||||
ca_certs=None, ssl_version=None,
|
ca_certs=None, ssl_version=None,
|
||||||
assert_hostname=None, assert_fingerprint=None,
|
assert_hostname=None, assert_fingerprint=None,
|
||||||
**conn_kw):
|
ca_cert_dir=None, **conn_kw):
|
||||||
|
|
||||||
HTTPConnectionPool.__init__(self, host, port, strict, timeout, maxsize,
|
HTTPConnectionPool.__init__(self, host, port, strict, timeout, maxsize,
|
||||||
block, headers, retries, _proxy, _proxy_headers,
|
block, headers, retries, _proxy, _proxy_headers,
|
||||||
**conn_kw)
|
**conn_kw)
|
||||||
|
|
||||||
|
if ca_certs and cert_reqs is None:
|
||||||
|
cert_reqs = 'CERT_REQUIRED'
|
||||||
|
|
||||||
self.key_file = key_file
|
self.key_file = key_file
|
||||||
self.cert_file = cert_file
|
self.cert_file = cert_file
|
||||||
self.cert_reqs = cert_reqs
|
self.cert_reqs = cert_reqs
|
||||||
self.ca_certs = ca_certs
|
self.ca_certs = ca_certs
|
||||||
|
self.ca_cert_dir = ca_cert_dir
|
||||||
self.ssl_version = ssl_version
|
self.ssl_version = ssl_version
|
||||||
self.assert_hostname = assert_hostname
|
self.assert_hostname = assert_hostname
|
||||||
self.assert_fingerprint = assert_fingerprint
|
self.assert_fingerprint = assert_fingerprint
|
||||||
|
@ -702,6 +724,7 @@ class HTTPSConnectionPool(HTTPConnectionPool):
|
||||||
cert_file=self.cert_file,
|
cert_file=self.cert_file,
|
||||||
cert_reqs=self.cert_reqs,
|
cert_reqs=self.cert_reqs,
|
||||||
ca_certs=self.ca_certs,
|
ca_certs=self.ca_certs,
|
||||||
|
ca_cert_dir=self.ca_cert_dir,
|
||||||
assert_hostname=self.assert_hostname,
|
assert_hostname=self.assert_hostname,
|
||||||
assert_fingerprint=self.assert_fingerprint)
|
assert_fingerprint=self.assert_fingerprint)
|
||||||
conn.ssl_version = self.ssl_version
|
conn.ssl_version = self.ssl_version
|
||||||
|
@ -760,8 +783,7 @@ class HTTPSConnectionPool(HTTPConnectionPool):
|
||||||
if not getattr(conn, 'sock', None): # AppEngine might not have `.sock`
|
if not getattr(conn, 'sock', None): # AppEngine might not have `.sock`
|
||||||
conn.connect()
|
conn.connect()
|
||||||
|
|
||||||
"""
|
""" if not conn.is_verified:
|
||||||
if not conn.is_verified:
|
|
||||||
warnings.warn((
|
warnings.warn((
|
||||||
'Unverified HTTPS request is being made. '
|
'Unverified HTTPS request is being made. '
|
||||||
'Adding certificate verification is strongly advised. See: '
|
'Adding certificate verification is strongly advised. See: '
|
||||||
|
@ -769,7 +791,6 @@ class HTTPSConnectionPool(HTTPConnectionPool):
|
||||||
InsecureRequestWarning)
|
InsecureRequestWarning)
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
|
||||||
def connection_from_url(url, **kw):
|
def connection_from_url(url, **kw):
|
||||||
"""
|
"""
|
||||||
Given a url, return an :class:`.ConnectionPool` instance of its host.
|
Given a url, return an :class:`.ConnectionPool` instance of its host.
|
||||||
|
|
223
lib/requests/packages/urllib3/contrib/appengine.py
Normal file
223
lib/requests/packages/urllib3/contrib/appengine.py
Normal file
|
@ -0,0 +1,223 @@
|
||||||
|
from __future__ import absolute_import
|
||||||
|
import logging
|
||||||
|
import os
|
||||||
|
import warnings
|
||||||
|
|
||||||
|
from ..exceptions import (
|
||||||
|
HTTPError,
|
||||||
|
HTTPWarning,
|
||||||
|
MaxRetryError,
|
||||||
|
ProtocolError,
|
||||||
|
TimeoutError,
|
||||||
|
SSLError
|
||||||
|
)
|
||||||
|
|
||||||
|
from ..packages.six import BytesIO
|
||||||
|
from ..request import RequestMethods
|
||||||
|
from ..response import HTTPResponse
|
||||||
|
from ..util.timeout import Timeout
|
||||||
|
from ..util.retry import Retry
|
||||||
|
|
||||||
|
try:
|
||||||
|
from google.appengine.api import urlfetch
|
||||||
|
except ImportError:
|
||||||
|
urlfetch = None
|
||||||
|
|
||||||
|
|
||||||
|
log = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
class AppEnginePlatformWarning(HTTPWarning):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class AppEnginePlatformError(HTTPError):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class AppEngineManager(RequestMethods):
|
||||||
|
"""
|
||||||
|
Connection manager for Google App Engine sandbox applications.
|
||||||
|
|
||||||
|
This manager uses the URLFetch service directly instead of using the
|
||||||
|
emulated httplib, and is subject to URLFetch limitations as described in
|
||||||
|
the App Engine documentation here:
|
||||||
|
|
||||||
|
https://cloud.google.com/appengine/docs/python/urlfetch
|
||||||
|
|
||||||
|
Notably it will raise an AppEnginePlatformError if:
|
||||||
|
* URLFetch is not available.
|
||||||
|
* If you attempt to use this on GAEv2 (Managed VMs), as full socket
|
||||||
|
support is available.
|
||||||
|
* If a request size is more than 10 megabytes.
|
||||||
|
* If a response size is more than 32 megabtyes.
|
||||||
|
* If you use an unsupported request method such as OPTIONS.
|
||||||
|
|
||||||
|
Beyond those cases, it will raise normal urllib3 errors.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, headers=None, retries=None, validate_certificate=True):
|
||||||
|
if not urlfetch:
|
||||||
|
raise AppEnginePlatformError(
|
||||||
|
"URLFetch is not available in this environment.")
|
||||||
|
|
||||||
|
if is_prod_appengine_mvms():
|
||||||
|
raise AppEnginePlatformError(
|
||||||
|
"Use normal urllib3.PoolManager instead of AppEngineManager"
|
||||||
|
"on Managed VMs, as using URLFetch is not necessary in "
|
||||||
|
"this environment.")
|
||||||
|
|
||||||
|
warnings.warn(
|
||||||
|
"urllib3 is using URLFetch on Google App Engine sandbox instead "
|
||||||
|
"of sockets. To use sockets directly instead of URLFetch see "
|
||||||
|
"https://urllib3.readthedocs.org/en/latest/contrib.html.",
|
||||||
|
AppEnginePlatformWarning)
|
||||||
|
|
||||||
|
RequestMethods.__init__(self, headers)
|
||||||
|
self.validate_certificate = validate_certificate
|
||||||
|
|
||||||
|
self.retries = retries or Retry.DEFAULT
|
||||||
|
|
||||||
|
def __enter__(self):
|
||||||
|
return self
|
||||||
|
|
||||||
|
def __exit__(self, exc_type, exc_val, exc_tb):
|
||||||
|
# Return False to re-raise any potential exceptions
|
||||||
|
return False
|
||||||
|
|
||||||
|
def urlopen(self, method, url, body=None, headers=None,
|
||||||
|
retries=None, redirect=True, timeout=Timeout.DEFAULT_TIMEOUT,
|
||||||
|
**response_kw):
|
||||||
|
|
||||||
|
retries = self._get_retries(retries, redirect)
|
||||||
|
|
||||||
|
try:
|
||||||
|
response = urlfetch.fetch(
|
||||||
|
url,
|
||||||
|
payload=body,
|
||||||
|
method=method,
|
||||||
|
headers=headers or {},
|
||||||
|
allow_truncated=False,
|
||||||
|
follow_redirects=(
|
||||||
|
redirect and
|
||||||
|
retries.redirect != 0 and
|
||||||
|
retries.total),
|
||||||
|
deadline=self._get_absolute_timeout(timeout),
|
||||||
|
validate_certificate=self.validate_certificate,
|
||||||
|
)
|
||||||
|
except urlfetch.DeadlineExceededError as e:
|
||||||
|
raise TimeoutError(self, e)
|
||||||
|
|
||||||
|
except urlfetch.InvalidURLError as e:
|
||||||
|
if 'too large' in str(e):
|
||||||
|
raise AppEnginePlatformError(
|
||||||
|
"URLFetch request too large, URLFetch only "
|
||||||
|
"supports requests up to 10mb in size.", e)
|
||||||
|
raise ProtocolError(e)
|
||||||
|
|
||||||
|
except urlfetch.DownloadError as e:
|
||||||
|
if 'Too many redirects' in str(e):
|
||||||
|
raise MaxRetryError(self, url, reason=e)
|
||||||
|
raise ProtocolError(e)
|
||||||
|
|
||||||
|
except urlfetch.ResponseTooLargeError as e:
|
||||||
|
raise AppEnginePlatformError(
|
||||||
|
"URLFetch response too large, URLFetch only supports"
|
||||||
|
"responses up to 32mb in size.", e)
|
||||||
|
|
||||||
|
except urlfetch.SSLCertificateError as e:
|
||||||
|
raise SSLError(e)
|
||||||
|
|
||||||
|
except urlfetch.InvalidMethodError as e:
|
||||||
|
raise AppEnginePlatformError(
|
||||||
|
"URLFetch does not support method: %s" % method, e)
|
||||||
|
|
||||||
|
http_response = self._urlfetch_response_to_http_response(
|
||||||
|
response, **response_kw)
|
||||||
|
|
||||||
|
# Check for redirect response
|
||||||
|
if (http_response.get_redirect_location() and
|
||||||
|
retries.raise_on_redirect and redirect):
|
||||||
|
raise MaxRetryError(self, url, "too many redirects")
|
||||||
|
|
||||||
|
# Check if we should retry the HTTP response.
|
||||||
|
if retries.is_forced_retry(method, status_code=http_response.status):
|
||||||
|
retries = retries.increment(
|
||||||
|
method, url, response=http_response, _pool=self)
|
||||||
|
log.info("Forced retry: %s" % url)
|
||||||
|
retries.sleep()
|
||||||
|
return self.urlopen(
|
||||||
|
method, url,
|
||||||
|
body=body, headers=headers,
|
||||||
|
retries=retries, redirect=redirect,
|
||||||
|
timeout=timeout, **response_kw)
|
||||||
|
|
||||||
|
return http_response
|
||||||
|
|
||||||
|
def _urlfetch_response_to_http_response(self, urlfetch_resp, **response_kw):
|
||||||
|
|
||||||
|
if is_prod_appengine():
|
||||||
|
# Production GAE handles deflate encoding automatically, but does
|
||||||
|
# not remove the encoding header.
|
||||||
|
content_encoding = urlfetch_resp.headers.get('content-encoding')
|
||||||
|
|
||||||
|
if content_encoding == 'deflate':
|
||||||
|
del urlfetch_resp.headers['content-encoding']
|
||||||
|
|
||||||
|
return HTTPResponse(
|
||||||
|
# In order for decoding to work, we must present the content as
|
||||||
|
# a file-like object.
|
||||||
|
body=BytesIO(urlfetch_resp.content),
|
||||||
|
headers=urlfetch_resp.headers,
|
||||||
|
status=urlfetch_resp.status_code,
|
||||||
|
**response_kw
|
||||||
|
)
|
||||||
|
|
||||||
|
def _get_absolute_timeout(self, timeout):
|
||||||
|
if timeout is Timeout.DEFAULT_TIMEOUT:
|
||||||
|
return 5 # 5s is the default timeout for URLFetch.
|
||||||
|
if isinstance(timeout, Timeout):
|
||||||
|
if timeout.read is not timeout.connect:
|
||||||
|
warnings.warn(
|
||||||
|
"URLFetch does not support granular timeout settings, "
|
||||||
|
"reverting to total timeout.", AppEnginePlatformWarning)
|
||||||
|
return timeout.total
|
||||||
|
return timeout
|
||||||
|
|
||||||
|
def _get_retries(self, retries, redirect):
|
||||||
|
if not isinstance(retries, Retry):
|
||||||
|
retries = Retry.from_int(
|
||||||
|
retries, redirect=redirect, default=self.retries)
|
||||||
|
|
||||||
|
if retries.connect or retries.read or retries.redirect:
|
||||||
|
warnings.warn(
|
||||||
|
"URLFetch only supports total retries and does not "
|
||||||
|
"recognize connect, read, or redirect retry parameters.",
|
||||||
|
AppEnginePlatformWarning)
|
||||||
|
|
||||||
|
return retries
|
||||||
|
|
||||||
|
|
||||||
|
def is_appengine():
|
||||||
|
return (is_local_appengine() or
|
||||||
|
is_prod_appengine() or
|
||||||
|
is_prod_appengine_mvms())
|
||||||
|
|
||||||
|
|
||||||
|
def is_appengine_sandbox():
|
||||||
|
return is_appengine() and not is_prod_appengine_mvms()
|
||||||
|
|
||||||
|
|
||||||
|
def is_local_appengine():
|
||||||
|
return ('APPENGINE_RUNTIME' in os.environ and
|
||||||
|
'Development/' in os.environ['SERVER_SOFTWARE'])
|
||||||
|
|
||||||
|
|
||||||
|
def is_prod_appengine():
|
||||||
|
return ('APPENGINE_RUNTIME' in os.environ and
|
||||||
|
'Google App Engine/' in os.environ['SERVER_SOFTWARE'] and
|
||||||
|
not is_prod_appengine_mvms())
|
||||||
|
|
||||||
|
|
||||||
|
def is_prod_appengine_mvms():
|
||||||
|
return os.environ.get('GAE_VM', False) == 'true'
|
|
@ -3,6 +3,7 @@ NTLM authenticating pool, contributed by erikcederstran
|
||||||
|
|
||||||
Issue #10, see: http://code.google.com/p/urllib3/issues/detail?id=10
|
Issue #10, see: http://code.google.com/p/urllib3/issues/detail?id=10
|
||||||
"""
|
"""
|
||||||
|
from __future__ import absolute_import
|
||||||
|
|
||||||
try:
|
try:
|
||||||
from http.client import HTTPSConnection
|
from http.client import HTTPSConnection
|
||||||
|
|
|
@ -43,6 +43,7 @@ Module Variables
|
||||||
.. _crime attack: https://en.wikipedia.org/wiki/CRIME_(security_exploit)
|
.. _crime attack: https://en.wikipedia.org/wiki/CRIME_(security_exploit)
|
||||||
|
|
||||||
'''
|
'''
|
||||||
|
from __future__ import absolute_import
|
||||||
|
|
||||||
try:
|
try:
|
||||||
from ndg.httpsclient.ssl_peer_verification import SUBJ_ALT_NAME_SUPPORT
|
from ndg.httpsclient.ssl_peer_verification import SUBJ_ALT_NAME_SUPPORT
|
||||||
|
@ -53,7 +54,7 @@ except SyntaxError as e:
|
||||||
import OpenSSL.SSL
|
import OpenSSL.SSL
|
||||||
from pyasn1.codec.der import decoder as der_decoder
|
from pyasn1.codec.der import decoder as der_decoder
|
||||||
from pyasn1.type import univ, constraint
|
from pyasn1.type import univ, constraint
|
||||||
from socket import _fileobject, timeout
|
from socket import _fileobject, timeout, error as SocketError
|
||||||
import ssl
|
import ssl
|
||||||
import select
|
import select
|
||||||
|
|
||||||
|
@ -71,6 +72,12 @@ _openssl_versions = {
|
||||||
ssl.PROTOCOL_TLSv1: OpenSSL.SSL.TLSv1_METHOD,
|
ssl.PROTOCOL_TLSv1: OpenSSL.SSL.TLSv1_METHOD,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if hasattr(ssl, 'PROTOCOL_TLSv1_1') and hasattr(OpenSSL.SSL, 'TLSv1_1_METHOD'):
|
||||||
|
_openssl_versions[ssl.PROTOCOL_TLSv1_1] = OpenSSL.SSL.TLSv1_1_METHOD
|
||||||
|
|
||||||
|
if hasattr(ssl, 'PROTOCOL_TLSv1_2') and hasattr(OpenSSL.SSL, 'TLSv1_2_METHOD'):
|
||||||
|
_openssl_versions[ssl.PROTOCOL_TLSv1_2] = OpenSSL.SSL.TLSv1_2_METHOD
|
||||||
|
|
||||||
try:
|
try:
|
||||||
_openssl_versions.update({ssl.PROTOCOL_SSLv3: OpenSSL.SSL.SSLv3_METHOD})
|
_openssl_versions.update({ssl.PROTOCOL_SSLv3: OpenSSL.SSL.SSLv3_METHOD})
|
||||||
except AttributeError:
|
except AttributeError:
|
||||||
|
@ -79,12 +86,14 @@ except AttributeError:
|
||||||
_openssl_verify = {
|
_openssl_verify = {
|
||||||
ssl.CERT_NONE: OpenSSL.SSL.VERIFY_NONE,
|
ssl.CERT_NONE: OpenSSL.SSL.VERIFY_NONE,
|
||||||
ssl.CERT_OPTIONAL: OpenSSL.SSL.VERIFY_PEER,
|
ssl.CERT_OPTIONAL: OpenSSL.SSL.VERIFY_PEER,
|
||||||
ssl.CERT_REQUIRED: OpenSSL.SSL.VERIFY_PEER
|
ssl.CERT_REQUIRED:
|
||||||
+ OpenSSL.SSL.VERIFY_FAIL_IF_NO_PEER_CERT,
|
OpenSSL.SSL.VERIFY_PEER + OpenSSL.SSL.VERIFY_FAIL_IF_NO_PEER_CERT,
|
||||||
}
|
}
|
||||||
|
|
||||||
DEFAULT_SSL_CIPHER_LIST = util.ssl_.DEFAULT_CIPHERS
|
DEFAULT_SSL_CIPHER_LIST = util.ssl_.DEFAULT_CIPHERS
|
||||||
|
|
||||||
|
# OpenSSL will only write 16K at a time
|
||||||
|
SSL_WRITE_BLOCKSIZE = 16384
|
||||||
|
|
||||||
orig_util_HAS_SNI = util.HAS_SNI
|
orig_util_HAS_SNI = util.HAS_SNI
|
||||||
orig_connection_ssl_wrap_socket = connection.ssl_wrap_socket
|
orig_connection_ssl_wrap_socket = connection.ssl_wrap_socket
|
||||||
|
@ -104,7 +113,7 @@ def extract_from_urllib3():
|
||||||
util.HAS_SNI = orig_util_HAS_SNI
|
util.HAS_SNI = orig_util_HAS_SNI
|
||||||
|
|
||||||
|
|
||||||
### Note: This is a slightly bug-fixed version of same from ndg-httpsclient.
|
# Note: This is a slightly bug-fixed version of same from ndg-httpsclient.
|
||||||
class SubjectAltName(BaseSubjectAltName):
|
class SubjectAltName(BaseSubjectAltName):
|
||||||
'''ASN.1 implementation for subjectAltNames support'''
|
'''ASN.1 implementation for subjectAltNames support'''
|
||||||
|
|
||||||
|
@ -115,7 +124,7 @@ class SubjectAltName(BaseSubjectAltName):
|
||||||
constraint.ValueSizeConstraint(1, 1024)
|
constraint.ValueSizeConstraint(1, 1024)
|
||||||
|
|
||||||
|
|
||||||
### Note: This is a slightly bug-fixed version of same from ndg-httpsclient.
|
# Note: This is a slightly bug-fixed version of same from ndg-httpsclient.
|
||||||
def get_subj_alt_name(peer_cert):
|
def get_subj_alt_name(peer_cert):
|
||||||
# Search through extensions
|
# Search through extensions
|
||||||
dns_name = []
|
dns_name = []
|
||||||
|
@ -173,7 +182,7 @@ class WrappedSocket(object):
|
||||||
if self.suppress_ragged_eofs and e.args == (-1, 'Unexpected EOF'):
|
if self.suppress_ragged_eofs and e.args == (-1, 'Unexpected EOF'):
|
||||||
return b''
|
return b''
|
||||||
else:
|
else:
|
||||||
raise
|
raise SocketError(e)
|
||||||
except OpenSSL.SSL.ZeroReturnError as e:
|
except OpenSSL.SSL.ZeroReturnError as e:
|
||||||
if self.connection.get_shutdown() == OpenSSL.SSL.RECEIVED_SHUTDOWN:
|
if self.connection.get_shutdown() == OpenSSL.SSL.RECEIVED_SHUTDOWN:
|
||||||
return b''
|
return b''
|
||||||
|
@ -204,13 +213,21 @@ class WrappedSocket(object):
|
||||||
continue
|
continue
|
||||||
|
|
||||||
def sendall(self, data):
|
def sendall(self, data):
|
||||||
while len(data):
|
total_sent = 0
|
||||||
sent = self._send_until_done(data)
|
while total_sent < len(data):
|
||||||
data = data[sent:]
|
sent = self._send_until_done(data[total_sent:total_sent + SSL_WRITE_BLOCKSIZE])
|
||||||
|
total_sent += sent
|
||||||
|
|
||||||
|
def shutdown(self):
|
||||||
|
# FIXME rethrow compatible exceptions should we ever use this
|
||||||
|
self.connection.shutdown()
|
||||||
|
|
||||||
def close(self):
|
def close(self):
|
||||||
if self._makefile_refs < 1:
|
if self._makefile_refs < 1:
|
||||||
return self.connection.shutdown()
|
try:
|
||||||
|
return self.connection.close()
|
||||||
|
except OpenSSL.SSL.Error:
|
||||||
|
return
|
||||||
else:
|
else:
|
||||||
self._makefile_refs -= 1
|
self._makefile_refs -= 1
|
||||||
|
|
||||||
|
@ -251,7 +268,7 @@ def _verify_callback(cnx, x509, err_no, err_depth, return_code):
|
||||||
|
|
||||||
def ssl_wrap_socket(sock, keyfile=None, certfile=None, cert_reqs=None,
|
def ssl_wrap_socket(sock, keyfile=None, certfile=None, cert_reqs=None,
|
||||||
ca_certs=None, server_hostname=None,
|
ca_certs=None, server_hostname=None,
|
||||||
ssl_version=None):
|
ssl_version=None, ca_cert_dir=None):
|
||||||
ctx = OpenSSL.SSL.Context(_openssl_versions[ssl_version])
|
ctx = OpenSSL.SSL.Context(_openssl_versions[ssl_version])
|
||||||
if certfile:
|
if certfile:
|
||||||
keyfile = keyfile or certfile # Match behaviour of the normal python ssl library
|
keyfile = keyfile or certfile # Match behaviour of the normal python ssl library
|
||||||
|
@ -260,9 +277,9 @@ def ssl_wrap_socket(sock, keyfile=None, certfile=None, cert_reqs=None,
|
||||||
ctx.use_privatekey_file(keyfile)
|
ctx.use_privatekey_file(keyfile)
|
||||||
if cert_reqs != ssl.CERT_NONE:
|
if cert_reqs != ssl.CERT_NONE:
|
||||||
ctx.set_verify(_openssl_verify[cert_reqs], _verify_callback)
|
ctx.set_verify(_openssl_verify[cert_reqs], _verify_callback)
|
||||||
if ca_certs:
|
if ca_certs or ca_cert_dir:
|
||||||
try:
|
try:
|
||||||
ctx.load_verify_locations(ca_certs, None)
|
ctx.load_verify_locations(ca_certs, ca_cert_dir)
|
||||||
except OpenSSL.SSL.Error as e:
|
except OpenSSL.SSL.Error as e:
|
||||||
raise ssl.SSLError('bad ca_certs: %r' % ca_certs, e)
|
raise ssl.SSLError('bad ca_certs: %r' % ca_certs, e)
|
||||||
else:
|
else:
|
||||||
|
@ -287,7 +304,7 @@ def ssl_wrap_socket(sock, keyfile=None, certfile=None, cert_reqs=None,
|
||||||
raise timeout('select timed out')
|
raise timeout('select timed out')
|
||||||
continue
|
continue
|
||||||
except OpenSSL.SSL.Error as e:
|
except OpenSSL.SSL.Error as e:
|
||||||
raise ssl.SSLError('bad handshake', e)
|
raise ssl.SSLError('bad handshake: %r' % e)
|
||||||
break
|
break
|
||||||
|
|
||||||
return WrappedSocket(cnx, sock)
|
return WrappedSocket(cnx, sock)
|
||||||
|
|
|
@ -1,16 +1,17 @@
|
||||||
|
from __future__ import absolute_import
|
||||||
|
# Base Exceptions
|
||||||
|
|
||||||
## Base Exceptions
|
|
||||||
|
|
||||||
class HTTPError(Exception):
|
class HTTPError(Exception):
|
||||||
"Base exception used by this module."
|
"Base exception used by this module."
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
class HTTPWarning(Warning):
|
class HTTPWarning(Warning):
|
||||||
"Base warning used by this module."
|
"Base warning used by this module."
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
class PoolError(HTTPError):
|
class PoolError(HTTPError):
|
||||||
"Base exception for errors caused within a pool."
|
"Base exception for errors caused within a pool."
|
||||||
def __init__(self, pool, message):
|
def __init__(self, pool, message):
|
||||||
|
@ -57,7 +58,7 @@ class ProtocolError(HTTPError):
|
||||||
ConnectionError = ProtocolError
|
ConnectionError = ProtocolError
|
||||||
|
|
||||||
|
|
||||||
## Leaf Exceptions
|
# Leaf Exceptions
|
||||||
|
|
||||||
class MaxRetryError(RequestError):
|
class MaxRetryError(RequestError):
|
||||||
"""Raised when the maximum number of retries is exceeded.
|
"""Raised when the maximum number of retries is exceeded.
|
||||||
|
@ -113,6 +114,11 @@ class ConnectTimeoutError(TimeoutError):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class NewConnectionError(ConnectTimeoutError, PoolError):
|
||||||
|
"Raised when we fail to establish a new connection. Usually ECONNREFUSED."
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
class EmptyPoolError(PoolError):
|
class EmptyPoolError(PoolError):
|
||||||
"Raised when a pool runs out of connections and no more are allowed."
|
"Raised when a pool runs out of connections and no more are allowed."
|
||||||
pass
|
pass
|
||||||
|
@ -149,6 +155,11 @@ class SecurityWarning(HTTPWarning):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class SubjectAltNameWarning(SecurityWarning):
|
||||||
|
"Warned when connecting to a host with a certificate missing a SAN."
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
class InsecureRequestWarning(SecurityWarning):
|
class InsecureRequestWarning(SecurityWarning):
|
||||||
"Warned when making an unverified HTTPS request."
|
"Warned when making an unverified HTTPS request."
|
||||||
pass
|
pass
|
||||||
|
@ -164,6 +175,27 @@ class InsecurePlatformWarning(SecurityWarning):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class SNIMissingWarning(HTTPWarning):
|
||||||
|
"Warned when making a HTTPS request without SNI available."
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
class ResponseNotChunked(ProtocolError, ValueError):
|
class ResponseNotChunked(ProtocolError, ValueError):
|
||||||
"Response needs to be chunked in order to read it as chunks."
|
"Response needs to be chunked in order to read it as chunks."
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class ProxySchemeUnknown(AssertionError, ValueError):
|
||||||
|
"ProxyManager does not support the supplied scheme"
|
||||||
|
# TODO(t-8ch): Stop inheriting from AssertionError in v2.0.
|
||||||
|
|
||||||
|
def __init__(self, scheme):
|
||||||
|
message = "Not supported proxy scheme %s" % scheme
|
||||||
|
super(ProxySchemeUnknown, self).__init__(message)
|
||||||
|
|
||||||
|
|
||||||
|
class HeaderParsingError(HTTPError):
|
||||||
|
"Raised by assert_header_parsing, but we convert it to a log.warning statement."
|
||||||
|
def __init__(self, defects, unparsed_data):
|
||||||
|
message = '%s, unparsed data: %r' % (defects or 'Unknown', unparsed_data)
|
||||||
|
super(HeaderParsingError, self).__init__(message)
|
||||||
|
|
|
@ -1,3 +1,4 @@
|
||||||
|
from __future__ import absolute_import
|
||||||
import email.utils
|
import email.utils
|
||||||
import mimetypes
|
import mimetypes
|
||||||
|
|
||||||
|
|
|
@ -1,3 +1,4 @@
|
||||||
|
from __future__ import absolute_import
|
||||||
import codecs
|
import codecs
|
||||||
|
|
||||||
from uuid import uuid4
|
from uuid import uuid4
|
||||||
|
|
|
@ -2,3 +2,4 @@ from __future__ import absolute_import
|
||||||
|
|
||||||
from . import ssl_match_hostname
|
from . import ssl_match_hostname
|
||||||
|
|
||||||
|
__all__ = ('ssl_match_hostname', )
|
||||||
|
|
1
lib/requests/packages/urllib3/packages/ssl_match_hostname/.gitignore
vendored
Normal file
1
lib/requests/packages/urllib3/packages/ssl_match_hostname/.gitignore
vendored
Normal file
|
@ -0,0 +1 @@
|
||||||
|
env
|
|
@ -1,3 +1,4 @@
|
||||||
|
from __future__ import absolute_import
|
||||||
import logging
|
import logging
|
||||||
|
|
||||||
try: # Python 3
|
try: # Python 3
|
||||||
|
@ -8,7 +9,7 @@ except ImportError:
|
||||||
from ._collections import RecentlyUsedContainer
|
from ._collections import RecentlyUsedContainer
|
||||||
from .connectionpool import HTTPConnectionPool, HTTPSConnectionPool
|
from .connectionpool import HTTPConnectionPool, HTTPSConnectionPool
|
||||||
from .connectionpool import port_by_scheme
|
from .connectionpool import port_by_scheme
|
||||||
from .exceptions import LocationValueError, MaxRetryError
|
from .exceptions import LocationValueError, MaxRetryError, ProxySchemeUnknown
|
||||||
from .request import RequestMethods
|
from .request import RequestMethods
|
||||||
from .util.url import parse_url
|
from .util.url import parse_url
|
||||||
from .util.retry import Retry
|
from .util.retry import Retry
|
||||||
|
@ -25,7 +26,7 @@ pool_classes_by_scheme = {
|
||||||
log = logging.getLogger(__name__)
|
log = logging.getLogger(__name__)
|
||||||
|
|
||||||
SSL_KEYWORDS = ('key_file', 'cert_file', 'cert_reqs', 'ca_certs',
|
SSL_KEYWORDS = ('key_file', 'cert_file', 'cert_reqs', 'ca_certs',
|
||||||
'ssl_version')
|
'ssl_version', 'ca_cert_dir')
|
||||||
|
|
||||||
|
|
||||||
class PoolManager(RequestMethods):
|
class PoolManager(RequestMethods):
|
||||||
|
@ -227,8 +228,8 @@ class ProxyManager(PoolManager):
|
||||||
port = port_by_scheme.get(proxy.scheme, 80)
|
port = port_by_scheme.get(proxy.scheme, 80)
|
||||||
proxy = proxy._replace(port=port)
|
proxy = proxy._replace(port=port)
|
||||||
|
|
||||||
assert proxy.scheme in ("http", "https"), \
|
if proxy.scheme not in ("http", "https"):
|
||||||
'Not supported proxy scheme %s' % proxy.scheme
|
raise ProxySchemeUnknown(proxy.scheme)
|
||||||
|
|
||||||
self.proxy = proxy
|
self.proxy = proxy
|
||||||
self.proxy_headers = proxy_headers or {}
|
self.proxy_headers = proxy_headers or {}
|
||||||
|
|
|
@ -1,3 +1,4 @@
|
||||||
|
from __future__ import absolute_import
|
||||||
try:
|
try:
|
||||||
from urllib.parse import urlencode
|
from urllib.parse import urlencode
|
||||||
except ImportError:
|
except ImportError:
|
||||||
|
@ -71,14 +72,22 @@ class RequestMethods(object):
|
||||||
headers=headers,
|
headers=headers,
|
||||||
**urlopen_kw)
|
**urlopen_kw)
|
||||||
|
|
||||||
def request_encode_url(self, method, url, fields=None, **urlopen_kw):
|
def request_encode_url(self, method, url, fields=None, headers=None,
|
||||||
|
**urlopen_kw):
|
||||||
"""
|
"""
|
||||||
Make a request using :meth:`urlopen` with the ``fields`` encoded in
|
Make a request using :meth:`urlopen` with the ``fields`` encoded in
|
||||||
the url. This is useful for request methods like GET, HEAD, DELETE, etc.
|
the url. This is useful for request methods like GET, HEAD, DELETE, etc.
|
||||||
"""
|
"""
|
||||||
|
if headers is None:
|
||||||
|
headers = self.headers
|
||||||
|
|
||||||
|
extra_kw = {'headers': headers}
|
||||||
|
extra_kw.update(urlopen_kw)
|
||||||
|
|
||||||
if fields:
|
if fields:
|
||||||
url += '?' + urlencode(fields)
|
url += '?' + urlencode(fields)
|
||||||
return self.urlopen(method, url, **urlopen_kw)
|
|
||||||
|
return self.urlopen(method, url, **extra_kw)
|
||||||
|
|
||||||
def request_encode_body(self, method, url, fields=None, headers=None,
|
def request_encode_body(self, method, url, fields=None, headers=None,
|
||||||
encode_multipart=True, multipart_boundary=None,
|
encode_multipart=True, multipart_boundary=None,
|
||||||
|
@ -125,7 +134,8 @@ class RequestMethods(object):
|
||||||
|
|
||||||
if fields:
|
if fields:
|
||||||
if 'body' in urlopen_kw:
|
if 'body' in urlopen_kw:
|
||||||
raise TypeError('request got values for both \'fields\' and \'body\', can only specify one.')
|
raise TypeError(
|
||||||
|
"request got values for both 'fields' and 'body', can only specify one.")
|
||||||
|
|
||||||
if encode_multipart:
|
if encode_multipart:
|
||||||
body, content_type = encode_multipart_formdata(fields, boundary=multipart_boundary)
|
body, content_type = encode_multipart_formdata(fields, boundary=multipart_boundary)
|
||||||
|
|
|
@ -1,18 +1,18 @@
|
||||||
try:
|
from __future__ import absolute_import
|
||||||
import http.client as httplib
|
from contextlib import contextmanager
|
||||||
except ImportError:
|
|
||||||
import httplib
|
|
||||||
import zlib
|
import zlib
|
||||||
import io
|
import io
|
||||||
from socket import timeout as SocketTimeout
|
from socket import timeout as SocketTimeout
|
||||||
|
from socket import error as SocketError
|
||||||
|
|
||||||
from ._collections import HTTPHeaderDict
|
from ._collections import HTTPHeaderDict
|
||||||
from .exceptions import (
|
from .exceptions import (
|
||||||
ProtocolError, DecodeError, ReadTimeoutError, ResponseNotChunked
|
ProtocolError, DecodeError, ReadTimeoutError, ResponseNotChunked
|
||||||
)
|
)
|
||||||
from .packages.six import string_types as basestring, binary_type, PY3
|
from .packages.six import string_types as basestring, binary_type, PY3
|
||||||
|
from .packages.six.moves import http_client as httplib
|
||||||
from .connection import HTTPException, BaseSSLError
|
from .connection import HTTPException, BaseSSLError
|
||||||
from .util.response import is_fp_closed
|
from .util.response import is_fp_closed, is_response_to_head
|
||||||
|
|
||||||
|
|
||||||
class DeflateDecoder(object):
|
class DeflateDecoder(object):
|
||||||
|
@ -132,8 +132,8 @@ class HTTPResponse(io.IOBase):
|
||||||
if "chunked" in encodings:
|
if "chunked" in encodings:
|
||||||
self.chunked = True
|
self.chunked = True
|
||||||
|
|
||||||
# We certainly don't want to preload content when the response is chunked.
|
# If requested, preload the body.
|
||||||
if not self.chunked and preload_content and not self._body:
|
if preload_content and not self._body:
|
||||||
self._body = self.read(decode_content=decode_content)
|
self._body = self.read(decode_content=decode_content)
|
||||||
|
|
||||||
def get_redirect_location(self):
|
def get_redirect_location(self):
|
||||||
|
@ -196,12 +196,70 @@ class HTTPResponse(io.IOBase):
|
||||||
"Received response with content-encoding: %s, but "
|
"Received response with content-encoding: %s, but "
|
||||||
"failed to decode it." % content_encoding, e)
|
"failed to decode it." % content_encoding, e)
|
||||||
|
|
||||||
if flush_decoder and decode_content and self._decoder:
|
if flush_decoder and decode_content:
|
||||||
buf = self._decoder.decompress(binary_type())
|
data += self._flush_decoder()
|
||||||
data += buf + self._decoder.flush()
|
|
||||||
|
|
||||||
return data
|
return data
|
||||||
|
|
||||||
|
def _flush_decoder(self):
|
||||||
|
"""
|
||||||
|
Flushes the decoder. Should only be called if the decoder is actually
|
||||||
|
being used.
|
||||||
|
"""
|
||||||
|
if self._decoder:
|
||||||
|
buf = self._decoder.decompress(b'')
|
||||||
|
return buf + self._decoder.flush()
|
||||||
|
|
||||||
|
return b''
|
||||||
|
|
||||||
|
@contextmanager
|
||||||
|
def _error_catcher(self):
|
||||||
|
"""
|
||||||
|
Catch low-level python exceptions, instead re-raising urllib3
|
||||||
|
variants, so that low-level exceptions are not leaked in the
|
||||||
|
high-level api.
|
||||||
|
|
||||||
|
On exit, release the connection back to the pool.
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
try:
|
||||||
|
yield
|
||||||
|
|
||||||
|
except SocketTimeout:
|
||||||
|
# FIXME: Ideally we'd like to include the url in the ReadTimeoutError but
|
||||||
|
# there is yet no clean way to get at it from this context.
|
||||||
|
raise ReadTimeoutError(self._pool, None, 'Read timed out.')
|
||||||
|
|
||||||
|
except BaseSSLError as e:
|
||||||
|
# FIXME: Is there a better way to differentiate between SSLErrors?
|
||||||
|
if 'read operation timed out' not in str(e): # Defensive:
|
||||||
|
# This shouldn't happen but just in case we're missing an edge
|
||||||
|
# case, let's avoid swallowing SSL errors.
|
||||||
|
raise
|
||||||
|
|
||||||
|
raise ReadTimeoutError(self._pool, None, 'Read timed out.')
|
||||||
|
|
||||||
|
except (HTTPException, SocketError) as e:
|
||||||
|
# This includes IncompleteRead.
|
||||||
|
raise ProtocolError('Connection broken: %r' % e, e)
|
||||||
|
|
||||||
|
except Exception:
|
||||||
|
# The response may not be closed but we're not going to use it anymore
|
||||||
|
# so close it now to ensure that the connection is released back to the pool.
|
||||||
|
if self._original_response and not self._original_response.isclosed():
|
||||||
|
self._original_response.close()
|
||||||
|
|
||||||
|
# Closing the response may not actually be sufficient to close
|
||||||
|
# everything, so if we have a hold of the connection close that
|
||||||
|
# too.
|
||||||
|
if self._connection is not None:
|
||||||
|
self._connection.close()
|
||||||
|
|
||||||
|
raise
|
||||||
|
finally:
|
||||||
|
if self._original_response and self._original_response.isclosed():
|
||||||
|
self.release_conn()
|
||||||
|
|
||||||
def read(self, amt=None, decode_content=None, cache_content=False):
|
def read(self, amt=None, decode_content=None, cache_content=False):
|
||||||
"""
|
"""
|
||||||
Similar to :meth:`httplib.HTTPResponse.read`, but with two additional
|
Similar to :meth:`httplib.HTTPResponse.read`, but with two additional
|
||||||
|
@ -231,45 +289,28 @@ class HTTPResponse(io.IOBase):
|
||||||
return
|
return
|
||||||
|
|
||||||
flush_decoder = False
|
flush_decoder = False
|
||||||
|
data = None
|
||||||
|
|
||||||
try:
|
with self._error_catcher():
|
||||||
try:
|
if amt is None:
|
||||||
if amt is None:
|
# cStringIO doesn't like amt=None
|
||||||
# cStringIO doesn't like amt=None
|
data = self._fp.read()
|
||||||
data = self._fp.read()
|
flush_decoder = True
|
||||||
|
else:
|
||||||
|
cache_content = False
|
||||||
|
data = self._fp.read(amt)
|
||||||
|
if amt != 0 and not data: # Platform-specific: Buggy versions of Python.
|
||||||
|
# Close the connection when no data is returned
|
||||||
|
#
|
||||||
|
# This is redundant to what httplib/http.client _should_
|
||||||
|
# already do. However, versions of python released before
|
||||||
|
# December 15, 2012 (http://bugs.python.org/issue16298) do
|
||||||
|
# not properly close the connection in all cases. There is
|
||||||
|
# no harm in redundantly calling close.
|
||||||
|
self._fp.close()
|
||||||
flush_decoder = True
|
flush_decoder = True
|
||||||
else:
|
|
||||||
cache_content = False
|
|
||||||
data = self._fp.read(amt)
|
|
||||||
if amt != 0 and not data: # Platform-specific: Buggy versions of Python.
|
|
||||||
# Close the connection when no data is returned
|
|
||||||
#
|
|
||||||
# This is redundant to what httplib/http.client _should_
|
|
||||||
# already do. However, versions of python released before
|
|
||||||
# December 15, 2012 (http://bugs.python.org/issue16298) do
|
|
||||||
# not properly close the connection in all cases. There is
|
|
||||||
# no harm in redundantly calling close.
|
|
||||||
self._fp.close()
|
|
||||||
flush_decoder = True
|
|
||||||
|
|
||||||
except SocketTimeout:
|
|
||||||
# FIXME: Ideally we'd like to include the url in the ReadTimeoutError but
|
|
||||||
# there is yet no clean way to get at it from this context.
|
|
||||||
raise ReadTimeoutError(self._pool, None, 'Read timed out.')
|
|
||||||
|
|
||||||
except BaseSSLError as e:
|
|
||||||
# FIXME: Is there a better way to differentiate between SSLErrors?
|
|
||||||
if 'read operation timed out' not in str(e): # Defensive:
|
|
||||||
# This shouldn't happen but just in case we're missing an edge
|
|
||||||
# case, let's avoid swallowing SSL errors.
|
|
||||||
raise
|
|
||||||
|
|
||||||
raise ReadTimeoutError(self._pool, None, 'Read timed out.')
|
|
||||||
|
|
||||||
except HTTPException as e:
|
|
||||||
# This includes IncompleteRead.
|
|
||||||
raise ProtocolError('Connection broken: %r' % e, e)
|
|
||||||
|
|
||||||
|
if data:
|
||||||
self._fp_bytes_read += len(data)
|
self._fp_bytes_read += len(data)
|
||||||
|
|
||||||
data = self._decode(data, decode_content, flush_decoder)
|
data = self._decode(data, decode_content, flush_decoder)
|
||||||
|
@ -277,11 +318,7 @@ class HTTPResponse(io.IOBase):
|
||||||
if cache_content:
|
if cache_content:
|
||||||
self._body = data
|
self._body = data
|
||||||
|
|
||||||
return data
|
return data
|
||||||
|
|
||||||
finally:
|
|
||||||
if self._original_response and self._original_response.isclosed():
|
|
||||||
self.release_conn()
|
|
||||||
|
|
||||||
def stream(self, amt=2**16, decode_content=None):
|
def stream(self, amt=2**16, decode_content=None):
|
||||||
"""
|
"""
|
||||||
|
@ -319,10 +356,11 @@ class HTTPResponse(io.IOBase):
|
||||||
with ``original_response=r``.
|
with ``original_response=r``.
|
||||||
"""
|
"""
|
||||||
headers = r.msg
|
headers = r.msg
|
||||||
|
|
||||||
if not isinstance(headers, HTTPHeaderDict):
|
if not isinstance(headers, HTTPHeaderDict):
|
||||||
if PY3: # Python 3
|
if PY3: # Python 3
|
||||||
headers = HTTPHeaderDict(headers.items())
|
headers = HTTPHeaderDict(headers.items())
|
||||||
else: # Python 2
|
else: # Python 2
|
||||||
headers = HTTPHeaderDict.from_httplib(headers)
|
headers = HTTPHeaderDict.from_httplib(headers)
|
||||||
|
|
||||||
# HTTPResponse objects in Python 3 don't have a .strict attribute
|
# HTTPResponse objects in Python 3 don't have a .strict attribute
|
||||||
|
@ -434,33 +472,43 @@ class HTTPResponse(io.IOBase):
|
||||||
self._init_decoder()
|
self._init_decoder()
|
||||||
# FIXME: Rewrite this method and make it a class with a better structured logic.
|
# FIXME: Rewrite this method and make it a class with a better structured logic.
|
||||||
if not self.chunked:
|
if not self.chunked:
|
||||||
raise ResponseNotChunked("Response is not chunked. "
|
raise ResponseNotChunked(
|
||||||
|
"Response is not chunked. "
|
||||||
"Header 'transfer-encoding: chunked' is missing.")
|
"Header 'transfer-encoding: chunked' is missing.")
|
||||||
|
|
||||||
if self._original_response and self._original_response._method.upper() == 'HEAD':
|
# Don't bother reading the body of a HEAD request.
|
||||||
# Don't bother reading the body of a HEAD request.
|
if self._original_response and is_response_to_head(self._original_response):
|
||||||
# FIXME: Can we do this somehow without accessing private httplib _method?
|
|
||||||
self._original_response.close()
|
self._original_response.close()
|
||||||
return
|
return
|
||||||
|
|
||||||
while True:
|
with self._error_catcher():
|
||||||
self._update_chunk_length()
|
while True:
|
||||||
if self.chunk_left == 0:
|
self._update_chunk_length()
|
||||||
break
|
if self.chunk_left == 0:
|
||||||
chunk = self._handle_chunk(amt)
|
break
|
||||||
yield self._decode(chunk, decode_content=decode_content,
|
chunk = self._handle_chunk(amt)
|
||||||
flush_decoder=True)
|
decoded = self._decode(chunk, decode_content=decode_content,
|
||||||
|
flush_decoder=False)
|
||||||
|
if decoded:
|
||||||
|
yield decoded
|
||||||
|
|
||||||
# Chunk content ends with \r\n: discard it.
|
if decode_content:
|
||||||
while True:
|
# On CPython and PyPy, we should never need to flush the
|
||||||
line = self._fp.fp.readline()
|
# decoder. However, on Jython we *might* need to, so
|
||||||
if not line:
|
# lets defensively do it anyway.
|
||||||
# Some sites may not end with '\r\n'.
|
decoded = self._flush_decoder()
|
||||||
break
|
if decoded: # Platform-specific: Jython.
|
||||||
if line == b'\r\n':
|
yield decoded
|
||||||
break
|
|
||||||
|
|
||||||
# We read everything; close the "file".
|
# Chunk content ends with \r\n: discard it.
|
||||||
if self._original_response:
|
while True:
|
||||||
self._original_response.close()
|
line = self._fp.fp.readline()
|
||||||
self.release_conn()
|
if not line:
|
||||||
|
# Some sites may not end with '\r\n'.
|
||||||
|
break
|
||||||
|
if line == b'\r\n':
|
||||||
|
break
|
||||||
|
|
||||||
|
# We read everything; close the "file".
|
||||||
|
if self._original_response:
|
||||||
|
self._original_response.close()
|
||||||
|
|
|
@ -1,3 +1,4 @@
|
||||||
|
from __future__ import absolute_import
|
||||||
# For backwards compatibility, provide imports that used to be here.
|
# For backwards compatibility, provide imports that used to be here.
|
||||||
from .connection import is_connection_dropped
|
from .connection import is_connection_dropped
|
||||||
from .request import make_headers
|
from .request import make_headers
|
||||||
|
@ -22,3 +23,22 @@ from .url import (
|
||||||
split_first,
|
split_first,
|
||||||
Url,
|
Url,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
__all__ = (
|
||||||
|
'HAS_SNI',
|
||||||
|
'SSLContext',
|
||||||
|
'Retry',
|
||||||
|
'Timeout',
|
||||||
|
'Url',
|
||||||
|
'assert_fingerprint',
|
||||||
|
'current_time',
|
||||||
|
'is_connection_dropped',
|
||||||
|
'is_fp_closed',
|
||||||
|
'get_host',
|
||||||
|
'parse_url',
|
||||||
|
'make_headers',
|
||||||
|
'resolve_cert_reqs',
|
||||||
|
'resolve_ssl_version',
|
||||||
|
'split_first',
|
||||||
|
'ssl_wrap_socket',
|
||||||
|
)
|
||||||
|
|
|
@ -1,3 +1,4 @@
|
||||||
|
from __future__ import absolute_import
|
||||||
import socket
|
import socket
|
||||||
try:
|
try:
|
||||||
from select import poll, POLLIN
|
from select import poll, POLLIN
|
||||||
|
@ -60,6 +61,8 @@ def create_connection(address, timeout=socket._GLOBAL_DEFAULT_TIMEOUT,
|
||||||
"""
|
"""
|
||||||
|
|
||||||
host, port = address
|
host, port = address
|
||||||
|
if host.startswith('['):
|
||||||
|
host = host.strip('[]')
|
||||||
err = None
|
err = None
|
||||||
for res in socket.getaddrinfo(host, port, 0, socket.SOCK_STREAM):
|
for res in socket.getaddrinfo(host, port, 0, socket.SOCK_STREAM):
|
||||||
af, socktype, proto, canonname, sa = res
|
af, socktype, proto, canonname, sa = res
|
||||||
|
@ -78,16 +81,16 @@ def create_connection(address, timeout=socket._GLOBAL_DEFAULT_TIMEOUT,
|
||||||
sock.connect(sa)
|
sock.connect(sa)
|
||||||
return sock
|
return sock
|
||||||
|
|
||||||
except socket.error as _:
|
except socket.error as e:
|
||||||
err = _
|
err = e
|
||||||
if sock is not None:
|
if sock is not None:
|
||||||
sock.close()
|
sock.close()
|
||||||
sock = None
|
sock = None
|
||||||
|
|
||||||
if err is not None:
|
if err is not None:
|
||||||
raise err
|
raise err
|
||||||
else:
|
|
||||||
raise socket.error("getaddrinfo returns an empty list")
|
raise socket.error("getaddrinfo returns an empty list")
|
||||||
|
|
||||||
|
|
||||||
def _set_socket_options(sock, options):
|
def _set_socket_options(sock, options):
|
||||||
|
|
|
@ -1,3 +1,4 @@
|
||||||
|
from __future__ import absolute_import
|
||||||
from base64 import b64encode
|
from base64 import b64encode
|
||||||
|
|
||||||
from ..packages.six import b
|
from ..packages.six import b
|
||||||
|
|
|
@ -1,3 +1,9 @@
|
||||||
|
from __future__ import absolute_import
|
||||||
|
from ..packages.six.moves import http_client as httplib
|
||||||
|
|
||||||
|
from ..exceptions import HeaderParsingError
|
||||||
|
|
||||||
|
|
||||||
def is_fp_closed(obj):
|
def is_fp_closed(obj):
|
||||||
"""
|
"""
|
||||||
Checks whether a given file-like object is closed.
|
Checks whether a given file-like object is closed.
|
||||||
|
@ -20,3 +26,49 @@ def is_fp_closed(obj):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
raise ValueError("Unable to determine whether fp is closed.")
|
raise ValueError("Unable to determine whether fp is closed.")
|
||||||
|
|
||||||
|
|
||||||
|
def assert_header_parsing(headers):
|
||||||
|
"""
|
||||||
|
Asserts whether all headers have been successfully parsed.
|
||||||
|
Extracts encountered errors from the result of parsing headers.
|
||||||
|
|
||||||
|
Only works on Python 3.
|
||||||
|
|
||||||
|
:param headers: Headers to verify.
|
||||||
|
:type headers: `httplib.HTTPMessage`.
|
||||||
|
|
||||||
|
:raises urllib3.exceptions.HeaderParsingError:
|
||||||
|
If parsing errors are found.
|
||||||
|
"""
|
||||||
|
|
||||||
|
# This will fail silently if we pass in the wrong kind of parameter.
|
||||||
|
# To make debugging easier add an explicit check.
|
||||||
|
if not isinstance(headers, httplib.HTTPMessage):
|
||||||
|
raise TypeError('expected httplib.Message, got {0}.'.format(
|
||||||
|
type(headers)))
|
||||||
|
|
||||||
|
defects = getattr(headers, 'defects', None)
|
||||||
|
get_payload = getattr(headers, 'get_payload', None)
|
||||||
|
|
||||||
|
unparsed_data = None
|
||||||
|
if get_payload: # Platform-specific: Python 3.
|
||||||
|
unparsed_data = get_payload()
|
||||||
|
|
||||||
|
if defects or unparsed_data:
|
||||||
|
raise HeaderParsingError(defects=defects, unparsed_data=unparsed_data)
|
||||||
|
|
||||||
|
|
||||||
|
def is_response_to_head(response):
|
||||||
|
"""
|
||||||
|
Checks, wether a the request of a response has been a HEAD-request.
|
||||||
|
Handles the quirks of AppEngine.
|
||||||
|
|
||||||
|
:param conn:
|
||||||
|
:type conn: :class:`httplib.HTTPResponse`
|
||||||
|
"""
|
||||||
|
# FIXME: Can we do this somehow without accessing private httplib _method?
|
||||||
|
method = response._method
|
||||||
|
if isinstance(method, int): # Platform-specific: Appengine
|
||||||
|
return method == 3
|
||||||
|
return method.upper() == 'HEAD'
|
||||||
|
|
|
@ -1,3 +1,4 @@
|
||||||
|
from __future__ import absolute_import
|
||||||
import time
|
import time
|
||||||
import logging
|
import logging
|
||||||
|
|
||||||
|
@ -94,7 +95,7 @@ class Retry(object):
|
||||||
|
|
||||||
seconds. If the backoff_factor is 0.1, then :func:`.sleep` will sleep
|
seconds. If the backoff_factor is 0.1, then :func:`.sleep` will sleep
|
||||||
for [0.1s, 0.2s, 0.4s, ...] between retries. It will never be longer
|
for [0.1s, 0.2s, 0.4s, ...] between retries. It will never be longer
|
||||||
than :attr:`Retry.MAX_BACKOFF`.
|
than :attr:`Retry.BACKOFF_MAX`.
|
||||||
|
|
||||||
By default, backoff is disabled (set to 0).
|
By default, backoff is disabled (set to 0).
|
||||||
|
|
||||||
|
@ -126,7 +127,7 @@ class Retry(object):
|
||||||
self.method_whitelist = method_whitelist
|
self.method_whitelist = method_whitelist
|
||||||
self.backoff_factor = backoff_factor
|
self.backoff_factor = backoff_factor
|
||||||
self.raise_on_redirect = raise_on_redirect
|
self.raise_on_redirect = raise_on_redirect
|
||||||
self._observed_errors = _observed_errors # TODO: use .history instead?
|
self._observed_errors = _observed_errors # TODO: use .history instead?
|
||||||
|
|
||||||
def new(self, **kw):
|
def new(self, **kw):
|
||||||
params = dict(
|
params = dict(
|
||||||
|
@ -206,7 +207,8 @@ class Retry(object):
|
||||||
|
|
||||||
return min(retry_counts) < 0
|
return min(retry_counts) < 0
|
||||||
|
|
||||||
def increment(self, method=None, url=None, response=None, error=None, _pool=None, _stacktrace=None):
|
def increment(self, method=None, url=None, response=None, error=None,
|
||||||
|
_pool=None, _stacktrace=None):
|
||||||
""" Return a new Retry object with incremented retry counters.
|
""" Return a new Retry object with incremented retry counters.
|
||||||
|
|
||||||
:param response: A response object, or None, if the server did not
|
:param response: A response object, or None, if the server did not
|
||||||
|
@ -274,7 +276,6 @@ class Retry(object):
|
||||||
|
|
||||||
return new_retry
|
return new_retry
|
||||||
|
|
||||||
|
|
||||||
def __repr__(self):
|
def __repr__(self):
|
||||||
return ('{cls.__name__}(total={self.total}, connect={self.connect}, '
|
return ('{cls.__name__}(total={self.total}, connect={self.connect}, '
|
||||||
'read={self.read}, redirect={self.redirect})').format(
|
'read={self.read}, redirect={self.redirect})').format(
|
||||||
|
|
|
@ -1,15 +1,42 @@
|
||||||
|
from __future__ import absolute_import
|
||||||
|
import errno
|
||||||
|
import warnings
|
||||||
|
import hmac
|
||||||
|
|
||||||
from binascii import hexlify, unhexlify
|
from binascii import hexlify, unhexlify
|
||||||
from hashlib import md5, sha1, sha256
|
from hashlib import md5, sha1, sha256
|
||||||
|
|
||||||
from ..exceptions import SSLError, InsecurePlatformWarning
|
from ..exceptions import SSLError, InsecurePlatformWarning, SNIMissingWarning
|
||||||
|
|
||||||
|
|
||||||
SSLContext = None
|
SSLContext = None
|
||||||
HAS_SNI = False
|
HAS_SNI = False
|
||||||
create_default_context = None
|
create_default_context = None
|
||||||
|
|
||||||
import errno
|
# Maps the length of a digest to a possible hash function producing this digest
|
||||||
import warnings
|
HASHFUNC_MAP = {
|
||||||
|
32: md5,
|
||||||
|
40: sha1,
|
||||||
|
64: sha256,
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def _const_compare_digest_backport(a, b):
|
||||||
|
"""
|
||||||
|
Compare two digests of equal length in constant time.
|
||||||
|
|
||||||
|
The digests must be of type str/bytes.
|
||||||
|
Returns True if the digests match, and False otherwise.
|
||||||
|
"""
|
||||||
|
result = abs(len(a) - len(b))
|
||||||
|
for l, r in zip(bytearray(a), bytearray(b)):
|
||||||
|
result |= l ^ r
|
||||||
|
return result == 0
|
||||||
|
|
||||||
|
|
||||||
|
_const_compare_digest = getattr(hmac, 'compare_digest',
|
||||||
|
_const_compare_digest_backport)
|
||||||
|
|
||||||
|
|
||||||
try: # Test for SSL features
|
try: # Test for SSL features
|
||||||
import ssl
|
import ssl
|
||||||
|
@ -68,8 +95,11 @@ except ImportError:
|
||||||
self.certfile = certfile
|
self.certfile = certfile
|
||||||
self.keyfile = keyfile
|
self.keyfile = keyfile
|
||||||
|
|
||||||
def load_verify_locations(self, location):
|
def load_verify_locations(self, cafile=None, capath=None):
|
||||||
self.ca_certs = location
|
self.ca_certs = cafile
|
||||||
|
|
||||||
|
if capath is not None:
|
||||||
|
raise SSLError("CA directories not supported in older Pythons")
|
||||||
|
|
||||||
def set_ciphers(self, cipher_suite):
|
def set_ciphers(self, cipher_suite):
|
||||||
if not self.supports_set_ciphers:
|
if not self.supports_set_ciphers:
|
||||||
|
@ -114,31 +144,21 @@ def assert_fingerprint(cert, fingerprint):
|
||||||
Fingerprint as string of hexdigits, can be interspersed by colons.
|
Fingerprint as string of hexdigits, can be interspersed by colons.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
# Maps the length of a digest to a possible hash function producing
|
|
||||||
# this digest.
|
|
||||||
hashfunc_map = {
|
|
||||||
16: md5,
|
|
||||||
20: sha1,
|
|
||||||
32: sha256,
|
|
||||||
}
|
|
||||||
|
|
||||||
fingerprint = fingerprint.replace(':', '').lower()
|
fingerprint = fingerprint.replace(':', '').lower()
|
||||||
digest_length, odd = divmod(len(fingerprint), 2)
|
digest_length = len(fingerprint)
|
||||||
|
hashfunc = HASHFUNC_MAP.get(digest_length)
|
||||||
if odd or digest_length not in hashfunc_map:
|
if not hashfunc:
|
||||||
raise SSLError('Fingerprint is of invalid length.')
|
raise SSLError(
|
||||||
|
'Fingerprint of invalid length: {0}'.format(fingerprint))
|
||||||
|
|
||||||
# We need encode() here for py32; works on py2 and p33.
|
# We need encode() here for py32; works on py2 and p33.
|
||||||
fingerprint_bytes = unhexlify(fingerprint.encode())
|
fingerprint_bytes = unhexlify(fingerprint.encode())
|
||||||
|
|
||||||
hashfunc = hashfunc_map[digest_length]
|
|
||||||
|
|
||||||
cert_digest = hashfunc(cert).digest()
|
cert_digest = hashfunc(cert).digest()
|
||||||
|
|
||||||
if not cert_digest == fingerprint_bytes:
|
if not _const_compare_digest(cert_digest, fingerprint_bytes):
|
||||||
raise SSLError('Fingerprints did not match. Expected "{0}", got "{1}".'
|
raise SSLError('Fingerprints did not match. Expected "{0}", got "{1}".'
|
||||||
.format(hexlify(fingerprint_bytes),
|
.format(fingerprint, hexlify(cert_digest)))
|
||||||
hexlify(cert_digest)))
|
|
||||||
|
|
||||||
|
|
||||||
def resolve_cert_reqs(candidate):
|
def resolve_cert_reqs(candidate):
|
||||||
|
@ -245,10 +265,11 @@ def create_urllib3_context(ssl_version=None, cert_reqs=None,
|
||||||
|
|
||||||
def ssl_wrap_socket(sock, keyfile=None, certfile=None, cert_reqs=None,
|
def ssl_wrap_socket(sock, keyfile=None, certfile=None, cert_reqs=None,
|
||||||
ca_certs=None, server_hostname=None,
|
ca_certs=None, server_hostname=None,
|
||||||
ssl_version=None, ciphers=None, ssl_context=None):
|
ssl_version=None, ciphers=None, ssl_context=None,
|
||||||
|
ca_cert_dir=None):
|
||||||
"""
|
"""
|
||||||
All arguments except for server_hostname and ssl_context have the same
|
All arguments except for server_hostname, ssl_context, and ca_cert_dir have
|
||||||
meaning as they do when using :func:`ssl.wrap_socket`.
|
the same meaning as they do when using :func:`ssl.wrap_socket`.
|
||||||
|
|
||||||
:param server_hostname:
|
:param server_hostname:
|
||||||
When SNI is supported, the expected hostname of the certificate
|
When SNI is supported, the expected hostname of the certificate
|
||||||
|
@ -258,15 +279,19 @@ def ssl_wrap_socket(sock, keyfile=None, certfile=None, cert_reqs=None,
|
||||||
:param ciphers:
|
:param ciphers:
|
||||||
A string of ciphers we wish the client to support. This is not
|
A string of ciphers we wish the client to support. This is not
|
||||||
supported on Python 2.6 as the ssl module does not support it.
|
supported on Python 2.6 as the ssl module does not support it.
|
||||||
|
:param ca_cert_dir:
|
||||||
|
A directory containing CA certificates in multiple separate files, as
|
||||||
|
supported by OpenSSL's -CApath flag or the capath argument to
|
||||||
|
SSLContext.load_verify_locations().
|
||||||
"""
|
"""
|
||||||
context = ssl_context
|
context = ssl_context
|
||||||
if context is None:
|
if context is None:
|
||||||
context = create_urllib3_context(ssl_version, cert_reqs,
|
context = create_urllib3_context(ssl_version, cert_reqs,
|
||||||
ciphers=ciphers)
|
ciphers=ciphers)
|
||||||
|
|
||||||
if ca_certs:
|
if ca_certs or ca_cert_dir:
|
||||||
try:
|
try:
|
||||||
context.load_verify_locations(ca_certs)
|
context.load_verify_locations(ca_certs, ca_cert_dir)
|
||||||
except IOError as e: # Platform-specific: Python 2.6, 2.7, 3.2
|
except IOError as e: # Platform-specific: Python 2.6, 2.7, 3.2
|
||||||
raise SSLError(e)
|
raise SSLError(e)
|
||||||
# Py33 raises FileNotFoundError which subclasses OSError
|
# Py33 raises FileNotFoundError which subclasses OSError
|
||||||
|
@ -275,8 +300,20 @@ def ssl_wrap_socket(sock, keyfile=None, certfile=None, cert_reqs=None,
|
||||||
if e.errno == errno.ENOENT:
|
if e.errno == errno.ENOENT:
|
||||||
raise SSLError(e)
|
raise SSLError(e)
|
||||||
raise
|
raise
|
||||||
|
|
||||||
if certfile:
|
if certfile:
|
||||||
context.load_cert_chain(certfile, keyfile)
|
context.load_cert_chain(certfile, keyfile)
|
||||||
if HAS_SNI: # Platform-specific: OpenSSL with enabled SNI
|
if HAS_SNI: # Platform-specific: OpenSSL with enabled SNI
|
||||||
return context.wrap_socket(sock, server_hostname=server_hostname)
|
return context.wrap_socket(sock, server_hostname=server_hostname)
|
||||||
|
|
||||||
|
warnings.warn(
|
||||||
|
'An HTTPS request has been made, but the SNI (Subject Name '
|
||||||
|
'Indication) extension to TLS is not available on this platform. '
|
||||||
|
'This may cause the server to present an incorrect TLS '
|
||||||
|
'certificate, which can cause validation failures. For more '
|
||||||
|
'information, see '
|
||||||
|
'https://urllib3.readthedocs.org/en/latest/security.html'
|
||||||
|
'#snimissingwarning.',
|
||||||
|
SNIMissingWarning
|
||||||
|
)
|
||||||
return context.wrap_socket(sock)
|
return context.wrap_socket(sock)
|
||||||
|
|
|
@ -1,3 +1,4 @@
|
||||||
|
from __future__ import absolute_import
|
||||||
# The default socket timeout, used by httplib to indicate that no timeout was
|
# The default socket timeout, used by httplib to indicate that no timeout was
|
||||||
# specified by the user
|
# specified by the user
|
||||||
from socket import _GLOBAL_DEFAULT_TIMEOUT
|
from socket import _GLOBAL_DEFAULT_TIMEOUT
|
||||||
|
@ -9,6 +10,7 @@ from ..exceptions import TimeoutStateError
|
||||||
# urllib3
|
# urllib3
|
||||||
_Default = object()
|
_Default = object()
|
||||||
|
|
||||||
|
|
||||||
def current_time():
|
def current_time():
|
||||||
"""
|
"""
|
||||||
Retrieve the current time. This function is mocked out in unit testing.
|
Retrieve the current time. This function is mocked out in unit testing.
|
||||||
|
@ -226,9 +228,9 @@ class Timeout(object):
|
||||||
has not yet been called on this object.
|
has not yet been called on this object.
|
||||||
"""
|
"""
|
||||||
if (self.total is not None and
|
if (self.total is not None and
|
||||||
self.total is not self.DEFAULT_TIMEOUT and
|
self.total is not self.DEFAULT_TIMEOUT and
|
||||||
self._read is not None and
|
self._read is not None and
|
||||||
self._read is not self.DEFAULT_TIMEOUT):
|
self._read is not self.DEFAULT_TIMEOUT):
|
||||||
# In case the connect timeout has not yet been established.
|
# In case the connect timeout has not yet been established.
|
||||||
if self._start_connect is None:
|
if self._start_connect is None:
|
||||||
return self._read
|
return self._read
|
||||||
|
|
|
@ -1,3 +1,4 @@
|
||||||
|
from __future__ import absolute_import
|
||||||
from collections import namedtuple
|
from collections import namedtuple
|
||||||
|
|
||||||
from ..exceptions import LocationParseError
|
from ..exceptions import LocationParseError
|
||||||
|
@ -85,6 +86,7 @@ class Url(namedtuple('Url', url_attrs)):
|
||||||
def __str__(self):
|
def __str__(self):
|
||||||
return self.url
|
return self.url
|
||||||
|
|
||||||
|
|
||||||
def split_first(s, delims):
|
def split_first(s, delims):
|
||||||
"""
|
"""
|
||||||
Given a string and an iterable of delimiters, split on the first found
|
Given a string and an iterable of delimiters, split on the first found
|
||||||
|
@ -115,7 +117,7 @@ def split_first(s, delims):
|
||||||
if min_idx is None or min_idx < 0:
|
if min_idx is None or min_idx < 0:
|
||||||
return s, '', None
|
return s, '', None
|
||||||
|
|
||||||
return s[:min_idx], s[min_idx+1:], min_delim
|
return s[:min_idx], s[min_idx + 1:], min_delim
|
||||||
|
|
||||||
|
|
||||||
def parse_url(url):
|
def parse_url(url):
|
||||||
|
@ -206,6 +208,7 @@ def parse_url(url):
|
||||||
|
|
||||||
return Url(scheme, auth, host, port, path, query, fragment)
|
return Url(scheme, auth, host, port, path, query, fragment)
|
||||||
|
|
||||||
|
|
||||||
def get_host(url):
|
def get_host(url):
|
||||||
"""
|
"""
|
||||||
Deprecated. Use :func:`.parse_url` instead.
|
Deprecated. Use :func:`.parse_url` instead.
|
||||||
|
|
|
@ -273,13 +273,13 @@ class Session(SessionRedirectMixin):
|
||||||
>>> import requests
|
>>> import requests
|
||||||
>>> s = requests.Session()
|
>>> s = requests.Session()
|
||||||
>>> s.get('http://httpbin.org/get')
|
>>> s.get('http://httpbin.org/get')
|
||||||
200
|
<Response [200]>
|
||||||
|
|
||||||
Or as a context manager::
|
Or as a context manager::
|
||||||
|
|
||||||
>>> with requests.Session() as s:
|
>>> with requests.Session() as s:
|
||||||
>>> s.get('http://httpbin.org/get')
|
>>> s.get('http://httpbin.org/get')
|
||||||
200
|
<Response [200]>
|
||||||
"""
|
"""
|
||||||
|
|
||||||
__attrs__ = [
|
__attrs__ = [
|
||||||
|
@ -299,9 +299,9 @@ class Session(SessionRedirectMixin):
|
||||||
#: :class:`Request <Request>`.
|
#: :class:`Request <Request>`.
|
||||||
self.auth = None
|
self.auth = None
|
||||||
|
|
||||||
#: Dictionary mapping protocol to the URL of the proxy (e.g.
|
#: Dictionary mapping protocol or protocol and host to the URL of the proxy
|
||||||
#: {'http': 'foo.bar:3128'}) to be used on each
|
#: (e.g. {'http': 'foo.bar:3128', 'http://host.name': 'foo.bar:4012'}) to
|
||||||
#: :class:`Request <Request>`.
|
#: be used on each :class:`Request <Request>`.
|
||||||
self.proxies = {}
|
self.proxies = {}
|
||||||
|
|
||||||
#: Event-handling hooks.
|
#: Event-handling hooks.
|
||||||
|
@ -325,7 +325,8 @@ class Session(SessionRedirectMixin):
|
||||||
#: limit, a :class:`TooManyRedirects` exception is raised.
|
#: limit, a :class:`TooManyRedirects` exception is raised.
|
||||||
self.max_redirects = DEFAULT_REDIRECT_LIMIT
|
self.max_redirects = DEFAULT_REDIRECT_LIMIT
|
||||||
|
|
||||||
#: Should we trust the environment?
|
#: Trust environment settings for proxy configuration, default
|
||||||
|
#: authentication and similar.
|
||||||
self.trust_env = True
|
self.trust_env = True
|
||||||
|
|
||||||
#: A CookieJar containing all currently outstanding cookies set on this
|
#: A CookieJar containing all currently outstanding cookies set on this
|
||||||
|
@ -410,8 +411,8 @@ class Session(SessionRedirectMixin):
|
||||||
:param url: URL for the new :class:`Request` object.
|
:param url: URL for the new :class:`Request` object.
|
||||||
:param params: (optional) Dictionary or bytes to be sent in the query
|
:param params: (optional) Dictionary or bytes to be sent in the query
|
||||||
string for the :class:`Request`.
|
string for the :class:`Request`.
|
||||||
:param data: (optional) Dictionary or bytes to send in the body of the
|
:param data: (optional) Dictionary, bytes, or file-like object to send
|
||||||
:class:`Request`.
|
in the body of the :class:`Request`.
|
||||||
:param json: (optional) json to send in the body of the
|
:param json: (optional) json to send in the body of the
|
||||||
:class:`Request`.
|
:class:`Request`.
|
||||||
:param headers: (optional) Dictionary of HTTP Headers to send with the
|
:param headers: (optional) Dictionary of HTTP Headers to send with the
|
||||||
|
@ -428,18 +429,15 @@ class Session(SessionRedirectMixin):
|
||||||
:type timeout: float or tuple
|
:type timeout: float or tuple
|
||||||
:param allow_redirects: (optional) Set to True by default.
|
:param allow_redirects: (optional) Set to True by default.
|
||||||
:type allow_redirects: bool
|
:type allow_redirects: bool
|
||||||
:param proxies: (optional) Dictionary mapping protocol to the URL of
|
:param proxies: (optional) Dictionary mapping protocol or protocol and
|
||||||
the proxy.
|
hostname to the URL of the proxy.
|
||||||
:param stream: (optional) whether to immediately download the response
|
:param stream: (optional) whether to immediately download the response
|
||||||
content. Defaults to ``False``.
|
content. Defaults to ``False``.
|
||||||
:param verify: (optional) if ``True``, the SSL cert will be verified.
|
:param verify: (optional) whether the SSL cert will be verified.
|
||||||
A CA_BUNDLE path can also be provided.
|
A CA_BUNDLE path can also be provided. Defaults to ``True``.
|
||||||
:param cert: (optional) if String, path to ssl client cert file (.pem).
|
:param cert: (optional) if String, path to ssl client cert file (.pem).
|
||||||
If Tuple, ('cert', 'key') pair.
|
If Tuple, ('cert', 'key') pair.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
method = to_native_string(method)
|
|
||||||
|
|
||||||
# Create the Request.
|
# Create the Request.
|
||||||
req = Request(
|
req = Request(
|
||||||
method = method.upper(),
|
method = method.upper(),
|
||||||
|
@ -636,7 +634,7 @@ class Session(SessionRedirectMixin):
|
||||||
'cert': cert}
|
'cert': cert}
|
||||||
|
|
||||||
def get_adapter(self, url):
|
def get_adapter(self, url):
|
||||||
"""Returns the appropriate connnection adapter for the given URL."""
|
"""Returns the appropriate connection adapter for the given URL."""
|
||||||
for (prefix, adapter) in self.adapters.items():
|
for (prefix, adapter) in self.adapters.items():
|
||||||
|
|
||||||
if url.lower().startswith(prefix):
|
if url.lower().startswith(prefix):
|
||||||
|
|
|
@ -78,11 +78,12 @@ _codes = {
|
||||||
507: ('insufficient_storage',),
|
507: ('insufficient_storage',),
|
||||||
509: ('bandwidth_limit_exceeded', 'bandwidth'),
|
509: ('bandwidth_limit_exceeded', 'bandwidth'),
|
||||||
510: ('not_extended',),
|
510: ('not_extended',),
|
||||||
|
511: ('network_authentication_required', 'network_auth', 'network_authentication'),
|
||||||
}
|
}
|
||||||
|
|
||||||
codes = LookupDict(name='status_codes')
|
codes = LookupDict(name='status_codes')
|
||||||
|
|
||||||
for (code, titles) in list(_codes.items()):
|
for code, titles in _codes.items():
|
||||||
for title in titles:
|
for title in titles:
|
||||||
setattr(codes, title, code)
|
setattr(codes, title, code)
|
||||||
if not title.startswith('\\'):
|
if not title.startswith('\\'):
|
||||||
|
|
|
@ -29,7 +29,7 @@ from .compat import (quote, urlparse, bytes, str, OrderedDict, unquote, is_py2,
|
||||||
basestring)
|
basestring)
|
||||||
from .cookies import RequestsCookieJar, cookiejar_from_dict
|
from .cookies import RequestsCookieJar, cookiejar_from_dict
|
||||||
from .structures import CaseInsensitiveDict
|
from .structures import CaseInsensitiveDict
|
||||||
from .exceptions import InvalidURL
|
from .exceptions import InvalidURL, FileModeWarning
|
||||||
|
|
||||||
_hush_pyflakes = (RequestsCookieJar,)
|
_hush_pyflakes = (RequestsCookieJar,)
|
||||||
|
|
||||||
|
@ -48,23 +48,44 @@ def dict_to_sequence(d):
|
||||||
|
|
||||||
|
|
||||||
def super_len(o):
|
def super_len(o):
|
||||||
|
total_length = 0
|
||||||
|
current_position = 0
|
||||||
|
|
||||||
if hasattr(o, '__len__'):
|
if hasattr(o, '__len__'):
|
||||||
return len(o)
|
total_length = len(o)
|
||||||
|
|
||||||
if hasattr(o, 'len'):
|
elif hasattr(o, 'len'):
|
||||||
return o.len
|
total_length = o.len
|
||||||
|
|
||||||
if hasattr(o, 'fileno'):
|
elif hasattr(o, 'getvalue'):
|
||||||
|
# e.g. BytesIO, cStringIO.StringIO
|
||||||
|
total_length = len(o.getvalue())
|
||||||
|
|
||||||
|
elif hasattr(o, 'fileno'):
|
||||||
try:
|
try:
|
||||||
fileno = o.fileno()
|
fileno = o.fileno()
|
||||||
except io.UnsupportedOperation:
|
except io.UnsupportedOperation:
|
||||||
pass
|
pass
|
||||||
else:
|
else:
|
||||||
return os.fstat(fileno).st_size
|
total_length = os.fstat(fileno).st_size
|
||||||
|
|
||||||
if hasattr(o, 'getvalue'):
|
# Having used fstat to determine the file length, we need to
|
||||||
# e.g. BytesIO, cStringIO.StringIO
|
# confirm that this file was opened up in binary mode.
|
||||||
return len(o.getvalue())
|
if 'b' not in o.mode:
|
||||||
|
warnings.warn((
|
||||||
|
"Requests has determined the content-length for this "
|
||||||
|
"request using the binary size of the file: however, the "
|
||||||
|
"file has been opened in text mode (i.e. without the 'b' "
|
||||||
|
"flag in the mode). This may lead to an incorrect "
|
||||||
|
"content-length. In Requests 3.0, support will be removed "
|
||||||
|
"for files in text mode."),
|
||||||
|
FileModeWarning
|
||||||
|
)
|
||||||
|
|
||||||
|
if hasattr(o, 'tell'):
|
||||||
|
current_position = o.tell()
|
||||||
|
|
||||||
|
return max(0, total_length - current_position)
|
||||||
|
|
||||||
|
|
||||||
def get_netrc_auth(url, raise_errors=False):
|
def get_netrc_auth(url, raise_errors=False):
|
||||||
|
@ -94,8 +115,12 @@ def get_netrc_auth(url, raise_errors=False):
|
||||||
|
|
||||||
ri = urlparse(url)
|
ri = urlparse(url)
|
||||||
|
|
||||||
# Strip port numbers from netloc
|
# Strip port numbers from netloc. This weird `if...encode`` dance is
|
||||||
host = ri.netloc.split(':')[0]
|
# used for Python 3.2, which doesn't support unicode literals.
|
||||||
|
splitstr = b':'
|
||||||
|
if isinstance(url, str):
|
||||||
|
splitstr = splitstr.decode('ascii')
|
||||||
|
host = ri.netloc.split(splitstr)[0]
|
||||||
|
|
||||||
try:
|
try:
|
||||||
_netrc = netrc(netrc_path).authenticators(host)
|
_netrc = netrc(netrc_path).authenticators(host)
|
||||||
|
@ -499,7 +524,9 @@ def should_bypass_proxies(url):
|
||||||
if no_proxy:
|
if no_proxy:
|
||||||
# We need to check whether we match here. We need to see if we match
|
# We need to check whether we match here. We need to see if we match
|
||||||
# the end of the netloc, both with and without the port.
|
# the end of the netloc, both with and without the port.
|
||||||
no_proxy = no_proxy.replace(' ', '').split(',')
|
no_proxy = (
|
||||||
|
host for host in no_proxy.replace(' ', '').split(',') if host
|
||||||
|
)
|
||||||
|
|
||||||
ip = netloc.split(':')[0]
|
ip = netloc.split(':')[0]
|
||||||
if is_ipv4_address(ip):
|
if is_ipv4_address(ip):
|
||||||
|
@ -537,36 +564,22 @@ def get_environ_proxies(url):
|
||||||
else:
|
else:
|
||||||
return getproxies()
|
return getproxies()
|
||||||
|
|
||||||
|
def select_proxy(url, proxies):
|
||||||
|
"""Select a proxy for the url, if applicable.
|
||||||
|
|
||||||
|
:param url: The url being for the request
|
||||||
|
:param proxies: A dictionary of schemes or schemes and hosts to proxy URLs
|
||||||
|
"""
|
||||||
|
proxies = proxies or {}
|
||||||
|
urlparts = urlparse(url)
|
||||||
|
proxy = proxies.get(urlparts.scheme+'://'+urlparts.hostname)
|
||||||
|
if proxy is None:
|
||||||
|
proxy = proxies.get(urlparts.scheme)
|
||||||
|
return proxy
|
||||||
|
|
||||||
def default_user_agent(name="python-requests"):
|
def default_user_agent(name="python-requests"):
|
||||||
"""Return a string representing the default user agent."""
|
"""Return a string representing the default user agent."""
|
||||||
_implementation = platform.python_implementation()
|
return '%s/%s' % (name, __version__)
|
||||||
|
|
||||||
if _implementation == 'CPython':
|
|
||||||
_implementation_version = platform.python_version()
|
|
||||||
elif _implementation == 'PyPy':
|
|
||||||
_implementation_version = '%s.%s.%s' % (sys.pypy_version_info.major,
|
|
||||||
sys.pypy_version_info.minor,
|
|
||||||
sys.pypy_version_info.micro)
|
|
||||||
if sys.pypy_version_info.releaselevel != 'final':
|
|
||||||
_implementation_version = ''.join([_implementation_version, sys.pypy_version_info.releaselevel])
|
|
||||||
elif _implementation == 'Jython':
|
|
||||||
_implementation_version = platform.python_version() # Complete Guess
|
|
||||||
elif _implementation == 'IronPython':
|
|
||||||
_implementation_version = platform.python_version() # Complete Guess
|
|
||||||
else:
|
|
||||||
_implementation_version = 'Unknown'
|
|
||||||
|
|
||||||
try:
|
|
||||||
p_system = platform.system()
|
|
||||||
p_release = platform.release()
|
|
||||||
except IOError:
|
|
||||||
p_system = 'Unknown'
|
|
||||||
p_release = 'Unknown'
|
|
||||||
|
|
||||||
return " ".join(['%s/%s' % (name, __version__),
|
|
||||||
'%s/%s' % (_implementation, _implementation_version),
|
|
||||||
'%s/%s' % (p_system, p_release)])
|
|
||||||
|
|
||||||
|
|
||||||
def default_headers():
|
def default_headers():
|
||||||
|
|
Loading…
Reference in a new issue