Fix for timeouts on TVDB and TVRage searches.

This commit is contained in:
echel0n 2014-03-15 20:13:30 -07:00
parent 31ccc8ffe2
commit b225794473
2 changed files with 16 additions and 14 deletions

View file

@ -403,12 +403,6 @@ class Tvdb:
And only the main language xml is used, the actor and banner xml are lost. And only the main language xml is used, the actor and banner xml are lost.
""" """
global lastTimeout
# if we're given a lastTimeout that is less than 1 min just give up
if not forceConnect and lastTimeout != None and datetime.datetime.now() - lastTimeout < datetime.timedelta(minutes=1):
raise tvdb_error("We recently timed out, so giving up early this time")
self.shows = ShowContainer() # Holds all Show classes self.shows = ShowContainer() # Holds all Show classes
self.corrections = {} # Holds show-name to show_id mapping self.corrections = {} # Holds show-name to show_id mapping
@ -520,7 +514,6 @@ class Tvdb:
return os.path.join(tempfile.gettempdir(), "tvdb_api-%s" % (uid)) return os.path.join(tempfile.gettempdir(), "tvdb_api-%s" % (uid))
def _loadUrl(self, url, params=None, language=None): def _loadUrl(self, url, params=None, language=None):
global lastTimeout
try: try:
log().debug("Retrieving URL %s" % url) log().debug("Retrieving URL %s" % url)
@ -532,10 +525,17 @@ class Tvdb:
# get response from TVDB # get response from TVDB
resp = sess.get(url, params=params) resp = sess.get(url, params=params)
except requests.HTTPError, e:
raise tvdb_error("HTTP error " + str(e.errno) + " while loading URL " + str(url))
except requests.ConnectionError, e:
raise tvdb_error("Connection error " + str(e.message) + " while loading URL " + str(url))
except requests.Timeout, e:
raise tvdb_error("Connection timed out " + str(e.message) + " while loading URL " + str(url))
except Exception, e: except Exception, e:
if not str(e).startswith('HTTP Error'): raise tvdb_error("Unknown exception while loading URL " + str(url) + ": " + str(e))
lastTimeout = datetime.datetime.now()
raise tvdb_error("Could not connect to server: %s" % (e))
if 'application/zip' in resp.headers.get("Content-Type", ''): if 'application/zip' in resp.headers.get("Content-Type", ''):
try: try:

View file

@ -182,7 +182,8 @@ Returns a byte-string retrieved from the url provider.
url = urlparse.urlunparse(parsed) url = urlparse.urlunparse(parsed)
it = iter(req_headers) it = iter(req_headers)
resp = requests.get(url, params=params, data=post_data, headers=dict(zip(it, it))) sess = requests.session()
resp = sess.get(url, params=params, data=post_data, headers=dict(zip(it, it)))
except requests.HTTPError, e: except requests.HTTPError, e:
logger.log(u"HTTP error " + str(e.errno) + " while loading URL " + url, logger.WARNING) logger.log(u"HTTP error " + str(e.errno) + " while loading URL " + url, logger.WARNING)
return None return None
@ -209,10 +210,11 @@ def _remove_file_failed(file):
def download_file(url, filename): def download_file(url, filename):
try: try:
req = requests.get(url, stream=True) sess = requests.session()
req = sess.get(url, stream=True)
#CHUNK = 16 * 1024 #CHUNK = 16 * 1024
with open(filename, 'wb') as fp: with open(filename, 'wb') as fp:
for chunk in req.iter_content(chunk_size=1024): for chunk in req.iter_content(chunk_size=(16 *1024)):
if chunk: if chunk:
fp.write(chunk) fp.write(chunk)
fp.flush() fp.flush()