mirror of
https://github.com/SickGear/SickGear.git
synced 2024-12-11 13:43:37 +00:00
ce193ffcdb
Added cache handler to sickbeard it self so now everything should be running faster for searches and rss feeds.
394 lines
No EOL
14 KiB
Python
394 lines
No EOL
14 KiB
Python
# -*- coding: utf-8 -*-
|
|
|
|
"""
|
|
requests.adapters
|
|
~~~~~~~~~~~~~~~~~
|
|
|
|
This module contains the transport adapters that Requests uses to define
|
|
and maintain connections.
|
|
"""
|
|
|
|
import socket
|
|
import copy
|
|
|
|
from .models import Response
|
|
from .packages.urllib3.poolmanager import PoolManager, proxy_from_url
|
|
from .packages.urllib3.response import HTTPResponse
|
|
from .packages.urllib3.util import Timeout as TimeoutSauce
|
|
from .compat import urlparse, basestring, urldefrag, unquote
|
|
from .utils import (DEFAULT_CA_BUNDLE_PATH, get_encoding_from_headers,
|
|
except_on_missing_scheme, get_auth_from_url)
|
|
from .structures import CaseInsensitiveDict
|
|
from .packages.urllib3.exceptions import MaxRetryError
|
|
from .packages.urllib3.exceptions import TimeoutError
|
|
from .packages.urllib3.exceptions import SSLError as _SSLError
|
|
from .packages.urllib3.exceptions import HTTPError as _HTTPError
|
|
from .packages.urllib3.exceptions import ProxyError as _ProxyError
|
|
from .cookies import extract_cookies_to_jar
|
|
from .exceptions import ConnectionError, Timeout, SSLError, ProxyError
|
|
from .auth import _basic_auth_str
|
|
|
|
DEFAULT_POOLBLOCK = False
|
|
DEFAULT_POOLSIZE = 10
|
|
DEFAULT_RETRIES = 0
|
|
|
|
|
|
class BaseAdapter(object):
|
|
"""The Base Transport Adapter"""
|
|
|
|
def __init__(self):
|
|
super(BaseAdapter, self).__init__()
|
|
|
|
def send(self):
|
|
raise NotImplementedError
|
|
|
|
def close(self):
|
|
raise NotImplementedError
|
|
|
|
|
|
class HTTPAdapter(BaseAdapter):
|
|
"""The built-in HTTP Adapter for urllib3.
|
|
|
|
Provides a general-case interface for Requests sessions to contact HTTP and
|
|
HTTPS urls by implementing the Transport Adapter interface. This class will
|
|
usually be created by the :class:`Session <Session>` class under the
|
|
covers.
|
|
|
|
:param pool_connections: The number of urllib3 connection pools to cache.
|
|
:param pool_maxsize: The maximum number of connections to save in the pool.
|
|
:param int max_retries: The maximum number of retries each connection
|
|
should attempt. Note, this applies only to failed connections and
|
|
timeouts, never to requests where the server returns a response.
|
|
:param pool_block: Whether the connection pool should block for connections.
|
|
|
|
Usage::
|
|
|
|
>>> import lib.requests
|
|
>>> s = requests.Session()
|
|
>>> a = requests.adapters.HTTPAdapter(max_retries=3)
|
|
>>> s.mount('http://', a)
|
|
"""
|
|
__attrs__ = ['max_retries', 'config', '_pool_connections', '_pool_maxsize',
|
|
'_pool_block']
|
|
|
|
def __init__(self, pool_connections=DEFAULT_POOLSIZE,
|
|
pool_maxsize=DEFAULT_POOLSIZE, max_retries=DEFAULT_RETRIES,
|
|
pool_block=DEFAULT_POOLBLOCK):
|
|
self.max_retries = max_retries
|
|
self.config = {}
|
|
self.proxy_manager = {}
|
|
|
|
super(HTTPAdapter, self).__init__()
|
|
|
|
self._pool_connections = pool_connections
|
|
self._pool_maxsize = pool_maxsize
|
|
self._pool_block = pool_block
|
|
|
|
self.init_poolmanager(pool_connections, pool_maxsize, block=pool_block)
|
|
|
|
def __getstate__(self):
|
|
return dict((attr, getattr(self, attr, None)) for attr in
|
|
self.__attrs__)
|
|
|
|
def __setstate__(self, state):
|
|
# Can't handle by adding 'proxy_manager' to self.__attrs__ because
|
|
# because self.poolmanager uses a lambda function, which isn't pickleable.
|
|
self.proxy_manager = {}
|
|
self.config = {}
|
|
|
|
for attr, value in state.items():
|
|
setattr(self, attr, value)
|
|
|
|
self.init_poolmanager(self._pool_connections, self._pool_maxsize,
|
|
block=self._pool_block)
|
|
|
|
def init_poolmanager(self, connections, maxsize, block=DEFAULT_POOLBLOCK):
|
|
"""Initializes a urllib3 PoolManager. This method should not be called
|
|
from user code, and is only exposed for use when subclassing the
|
|
:class:`HTTPAdapter <requests.adapters.HTTPAdapter>`.
|
|
|
|
:param connections: The number of urllib3 connection pools to cache.
|
|
:param maxsize: The maximum number of connections to save in the pool.
|
|
:param block: Block when no free connections are available.
|
|
"""
|
|
# save these values for pickling
|
|
self._pool_connections = connections
|
|
self._pool_maxsize = maxsize
|
|
self._pool_block = block
|
|
|
|
self.poolmanager = PoolManager(num_pools=connections, maxsize=maxsize,
|
|
block=block)
|
|
|
|
def cert_verify(self, conn, url, verify, cert):
|
|
"""Verify a SSL certificate. This method should not be called from user
|
|
code, and is only exposed for use when subclassing the
|
|
:class:`HTTPAdapter <requests.adapters.HTTPAdapter>`.
|
|
|
|
:param conn: The urllib3 connection object associated with the cert.
|
|
:param url: The requested URL.
|
|
:param verify: Whether we should actually verify the certificate.
|
|
:param cert: The SSL certificate to verify.
|
|
"""
|
|
if url.lower().startswith('https') and verify:
|
|
|
|
cert_loc = None
|
|
|
|
# Allow self-specified cert location.
|
|
if verify is not True:
|
|
cert_loc = verify
|
|
|
|
if not cert_loc:
|
|
cert_loc = DEFAULT_CA_BUNDLE_PATH
|
|
|
|
if not cert_loc:
|
|
raise Exception("Could not find a suitable SSL CA certificate bundle.")
|
|
|
|
conn.cert_reqs = 'CERT_REQUIRED'
|
|
conn.ca_certs = cert_loc
|
|
else:
|
|
conn.cert_reqs = 'CERT_NONE'
|
|
conn.ca_certs = None
|
|
|
|
if cert:
|
|
if not isinstance(cert, basestring):
|
|
conn.cert_file = cert[0]
|
|
conn.key_file = cert[1]
|
|
else:
|
|
conn.cert_file = cert
|
|
|
|
def build_response(self, req, resp):
|
|
"""Builds a :class:`Response <requests.Response>` object from a urllib3
|
|
response. This should not be called from user code, and is only exposed
|
|
for use when subclassing the
|
|
:class:`HTTPAdapter <requests.adapters.HTTPAdapter>`
|
|
|
|
:param req: The :class:`PreparedRequest <PreparedRequest>` used to generate the response.
|
|
:param resp: The urllib3 response object.
|
|
"""
|
|
response = Response()
|
|
|
|
# Fallback to None if there's no status_code, for whatever reason.
|
|
response.status_code = getattr(resp, 'status', None)
|
|
|
|
# Make headers case-insensitive.
|
|
response.headers = CaseInsensitiveDict(getattr(resp, 'headers', {}))
|
|
|
|
# Set encoding.
|
|
response.encoding = get_encoding_from_headers(response.headers)
|
|
response.raw = resp
|
|
response.reason = response.raw.reason
|
|
|
|
if isinstance(req.url, bytes):
|
|
response.url = req.url.decode('utf-8')
|
|
else:
|
|
response.url = req.url
|
|
|
|
# Add new cookies from the server.
|
|
extract_cookies_to_jar(response.cookies, req, resp)
|
|
|
|
# Give the Response some context.
|
|
response.request = req
|
|
response.connection = self
|
|
|
|
return response
|
|
|
|
def get_connection(self, url, proxies=None):
|
|
"""Returns a urllib3 connection for the given URL. This should not be
|
|
called from user code, and is only exposed for use when subclassing the
|
|
:class:`HTTPAdapter <requests.adapters.HTTPAdapter>`.
|
|
|
|
:param url: The URL to connect to.
|
|
:param proxies: (optional) A Requests-style dictionary of proxies used on this request.
|
|
"""
|
|
proxies = proxies or {}
|
|
proxy = proxies.get(urlparse(url.lower()).scheme)
|
|
|
|
if proxy:
|
|
except_on_missing_scheme(proxy)
|
|
proxy_headers = self.proxy_headers(proxy)
|
|
|
|
if not proxy in self.proxy_manager:
|
|
self.proxy_manager[proxy] = proxy_from_url(
|
|
proxy,
|
|
proxy_headers=proxy_headers,
|
|
num_pools=self._pool_connections,
|
|
maxsize=self._pool_maxsize,
|
|
block=self._pool_block)
|
|
|
|
conn = self.proxy_manager[proxy].connection_from_url(url)
|
|
else:
|
|
# Only scheme should be lower case
|
|
parsed = urlparse(url)
|
|
url = parsed.geturl()
|
|
conn = self.poolmanager.connection_from_url(url)
|
|
|
|
return conn
|
|
|
|
def close(self):
|
|
"""Disposes of any internal state.
|
|
|
|
Currently, this just closes the PoolManager, which closes pooled
|
|
connections.
|
|
"""
|
|
self.poolmanager.clear()
|
|
|
|
def request_url(self, request, proxies):
|
|
"""Obtain the url to use when making the final request.
|
|
|
|
If the message is being sent through a HTTP proxy, the full URL has to
|
|
be used. Otherwise, we should only use the path portion of the URL.
|
|
|
|
This should not be called from user code, and is only exposed for use
|
|
when subclassing the
|
|
:class:`HTTPAdapter <requests.adapters.HTTPAdapter>`.
|
|
|
|
:param request: The :class:`PreparedRequest <PreparedRequest>` being sent.
|
|
:param proxies: A dictionary of schemes to proxy URLs.
|
|
"""
|
|
proxies = proxies or {}
|
|
scheme = urlparse(request.url).scheme
|
|
proxy = proxies.get(scheme)
|
|
|
|
if proxy and scheme != 'https':
|
|
url, _ = urldefrag(request.url)
|
|
else:
|
|
url = request.path_url
|
|
|
|
return url
|
|
|
|
def add_headers(self, request, **kwargs):
|
|
"""Add any headers needed by the connection. As of v2.0 this does
|
|
nothing by default, but is left for overriding by users that subclass
|
|
the :class:`HTTPAdapter <requests.adapters.HTTPAdapter>`.
|
|
|
|
This should not be called from user code, and is only exposed for use
|
|
when subclassing the
|
|
:class:`HTTPAdapter <requests.adapters.HTTPAdapter>`.
|
|
|
|
:param request: The :class:`PreparedRequest <PreparedRequest>` to add headers to.
|
|
:param kwargs: The keyword arguments from the call to send().
|
|
"""
|
|
pass
|
|
|
|
def proxy_headers(self, proxy):
|
|
"""Returns a dictionary of the headers to add to any request sent
|
|
through a proxy. This works with urllib3 magic to ensure that they are
|
|
correctly sent to the proxy, rather than in a tunnelled request if
|
|
CONNECT is being used.
|
|
|
|
This should not be called from user code, and is only exposed for use
|
|
when subclassing the
|
|
:class:`HTTPAdapter <requests.adapters.HTTPAdapter>`.
|
|
|
|
:param proxies: The url of the proxy being used for this request.
|
|
:param kwargs: Optional additional keyword arguments.
|
|
"""
|
|
headers = {}
|
|
username, password = get_auth_from_url(proxy)
|
|
|
|
if username and password:
|
|
headers['Proxy-Authorization'] = _basic_auth_str(username,
|
|
password)
|
|
|
|
return headers
|
|
|
|
def send(self, request, stream=False, timeout=None, verify=True, cert=None, proxies=None):
|
|
"""Sends PreparedRequest object. Returns Response object.
|
|
|
|
:param request: The :class:`PreparedRequest <PreparedRequest>` being sent.
|
|
:param stream: (optional) Whether to stream the request content.
|
|
:param timeout: (optional) The timeout on the request.
|
|
:param verify: (optional) Whether to verify SSL certificates.
|
|
:param cert: (optional) Any user-provided SSL certificate to be trusted.
|
|
:param proxies: (optional) The proxies dictionary to apply to the request.
|
|
"""
|
|
|
|
conn = self.get_connection(request.url, proxies)
|
|
|
|
self.cert_verify(conn, request.url, verify, cert)
|
|
url = self.request_url(request, proxies)
|
|
self.add_headers(request)
|
|
|
|
chunked = not (request.body is None or 'Content-Length' in request.headers)
|
|
|
|
timeout = TimeoutSauce(connect=timeout, read=timeout)
|
|
|
|
try:
|
|
if not chunked:
|
|
resp = conn.urlopen(
|
|
method=request.method,
|
|
url=url,
|
|
body=request.body,
|
|
headers=request.headers,
|
|
redirect=False,
|
|
assert_same_host=False,
|
|
preload_content=False,
|
|
decode_content=False,
|
|
retries=self.max_retries,
|
|
timeout=timeout
|
|
)
|
|
|
|
# Send the request.
|
|
else:
|
|
if hasattr(conn, 'proxy_pool'):
|
|
conn = conn.proxy_pool
|
|
|
|
low_conn = conn._get_conn(timeout=timeout)
|
|
|
|
try:
|
|
low_conn.putrequest(request.method,
|
|
url,
|
|
skip_accept_encoding=True)
|
|
|
|
for header, value in request.headers.items():
|
|
low_conn.putheader(header, value)
|
|
|
|
low_conn.endheaders()
|
|
|
|
for i in request.body:
|
|
low_conn.send(hex(len(i))[2:].encode('utf-8'))
|
|
low_conn.send(b'\r\n')
|
|
low_conn.send(i)
|
|
low_conn.send(b'\r\n')
|
|
low_conn.send(b'0\r\n\r\n')
|
|
|
|
r = low_conn.getresponse()
|
|
resp = HTTPResponse.from_httplib(
|
|
r,
|
|
pool=conn,
|
|
connection=low_conn,
|
|
preload_content=False,
|
|
decode_content=False
|
|
)
|
|
except:
|
|
# If we hit any problems here, clean up the connection.
|
|
# Then, reraise so that we can handle the actual exception.
|
|
low_conn.close()
|
|
raise
|
|
else:
|
|
# All is well, return the connection to the pool.
|
|
conn._put_conn(low_conn)
|
|
|
|
except socket.error as sockerr:
|
|
raise ConnectionError(sockerr, request=request)
|
|
|
|
except MaxRetryError as e:
|
|
raise ConnectionError(e, request=request)
|
|
|
|
except _ProxyError as e:
|
|
raise ProxyError(e)
|
|
|
|
except (_SSLError, _HTTPError) as e:
|
|
if isinstance(e, _SSLError):
|
|
raise SSLError(e, request=request)
|
|
elif isinstance(e, TimeoutError):
|
|
raise Timeout(e, request=request)
|
|
else:
|
|
raise
|
|
|
|
r = self.build_response(request, resp)
|
|
|
|
if not stream:
|
|
r.content
|
|
|
|
return r |