diff --git a/lib/tvdb_api/tvdb_api.py b/lib/tvdb_api/tvdb_api.py index b4dd7588..bb27aa8d 100644 --- a/lib/tvdb_api/tvdb_api.py +++ b/lib/tvdb_api/tvdb_api.py @@ -402,13 +402,7 @@ class Tvdb: This is only used when all episodes are pulled. And only the main language xml is used, the actor and banner xml are lost. """ - - global lastTimeout - - # if we're given a lastTimeout that is less than 1 min just give up - if not forceConnect and lastTimeout != None and datetime.datetime.now() - lastTimeout < datetime.timedelta(minutes=1): - raise tvdb_error("We recently timed out, so giving up early this time") - + self.shows = ShowContainer() # Holds all Show classes self.corrections = {} # Holds show-name to show_id mapping @@ -520,7 +514,6 @@ class Tvdb: return os.path.join(tempfile.gettempdir(), "tvdb_api-%s" % (uid)) def _loadUrl(self, url, params=None, language=None): - global lastTimeout try: log().debug("Retrieving URL %s" % url) @@ -532,10 +525,17 @@ class Tvdb: # get response from TVDB resp = sess.get(url, params=params) + except requests.HTTPError, e: + raise tvdb_error("HTTP error " + str(e.errno) + " while loading URL " + str(url)) + + except requests.ConnectionError, e: + raise tvdb_error("Connection error " + str(e.message) + " while loading URL " + str(url)) + + except requests.Timeout, e: + raise tvdb_error("Connection timed out " + str(e.message) + " while loading URL " + str(url)) + except Exception, e: - if not str(e).startswith('HTTP Error'): - lastTimeout = datetime.datetime.now() - raise tvdb_error("Could not connect to server: %s" % (e)) + raise tvdb_error("Unknown exception while loading URL " + str(url) + ": " + str(e)) if 'application/zip' in resp.headers.get("Content-Type", ''): try: diff --git a/sickbeard/helpers.py b/sickbeard/helpers.py index 1bda5f4d..efca5845 100644 --- a/sickbeard/helpers.py +++ b/sickbeard/helpers.py @@ -182,7 +182,8 @@ Returns a byte-string retrieved from the url provider. url = urlparse.urlunparse(parsed) it = iter(req_headers) - resp = requests.get(url, params=params, data=post_data, headers=dict(zip(it, it))) + sess = requests.session() + resp = sess.get(url, params=params, data=post_data, headers=dict(zip(it, it))) except requests.HTTPError, e: logger.log(u"HTTP error " + str(e.errno) + " while loading URL " + url, logger.WARNING) return None @@ -209,10 +210,11 @@ def _remove_file_failed(file): def download_file(url, filename): try: - req = requests.get(url, stream=True) + sess = requests.session() + req = sess.get(url, stream=True) #CHUNK = 16 * 1024 with open(filename, 'wb') as fp: - for chunk in req.iter_content(chunk_size=1024): + for chunk in req.iter_content(chunk_size=(16 *1024)): if chunk: fp.write(chunk) fp.flush()