mirror of
https://github.com/SickGear/SickGear.git
synced 2025-01-05 17:43:37 +00:00
Logging message tweaks and minor PEP8 tweaks
* Change corrected spelling & better clarified various log messages * Change minor PEP8 tweaks in sab.py * Add api disabled error code for newznab providers
This commit is contained in:
parent
7af8f09916
commit
47da38a914
29 changed files with 111 additions and 104 deletions
|
@ -20,6 +20,9 @@
|
|||
* Change displayShow scene exception separator to a comma for neater appearance
|
||||
* Remove non english subtitle providers
|
||||
* Fix rename of excluded metadata
|
||||
* Change corrected spelling & better clarified various log messages
|
||||
* Change minor PEP8 tweaks in sab.py
|
||||
* Add api disabled error code for newznab providers
|
||||
|
||||
[develop changelog]
|
||||
|
||||
|
|
|
@ -62,7 +62,7 @@ class GenericClient(object):
|
|||
logger.log(self.name + u': Connection Timeout ' + ex(e), logger.ERROR)
|
||||
return False
|
||||
except Exception, e:
|
||||
logger.log(self.name + u': Unknown exception raised when send torrent to ' + self.name + ': ' + ex(e),
|
||||
logger.log(self.name + u': Unknown exception raised when sending torrent to ' + self.name + ': ' + ex(e),
|
||||
logger.ERROR)
|
||||
return False
|
||||
|
||||
|
|
|
@ -483,7 +483,7 @@ class AddShowidTvdbidIndex(db.SchemaUpgrade):
|
|||
def execute(self):
|
||||
backup_database(self.checkDBVersion())
|
||||
|
||||
logger.log(u'Check for duplicate shows before adding unique index.')
|
||||
logger.log(u'Checking for duplicate shows before adding unique index.')
|
||||
MainSanityCheck(self.connection).fix_duplicate_shows('tvdb_id')
|
||||
|
||||
logger.log(u'Adding index on tvdb_id (tv_shows) and showid (tv_episodes) to speed up searches/queries.')
|
||||
|
|
|
@ -416,7 +416,7 @@ def make_dirs(path):
|
|||
# Windows, create all missing folders
|
||||
if os.name == 'nt' or os.name == 'ce':
|
||||
try:
|
||||
logger.log(u"Folder " + path + " didn't exist, creating it", logger.DEBUG)
|
||||
logger.log(u"Folder " + path + " doesn't exist, creating it", logger.DEBUG)
|
||||
ek.ek(os.makedirs, path)
|
||||
except (OSError, IOError), e:
|
||||
logger.log(u"Failed creating " + path + " : " + ex(e), logger.ERROR)
|
||||
|
@ -436,7 +436,7 @@ def make_dirs(path):
|
|||
continue
|
||||
|
||||
try:
|
||||
logger.log(u"Folder " + sofar + " didn't exist, creating it", logger.DEBUG)
|
||||
logger.log(u"Folder " + sofar + " doesn't exist, creating it", logger.DEBUG)
|
||||
ek.ek(os.mkdir, sofar)
|
||||
# use normpath to remove end separator, otherwise checks permissions against itself
|
||||
chmodAsParent(ek.ek(os.path.normpath, sofar))
|
||||
|
@ -1464,7 +1464,7 @@ def wait_for_free_port(host, port):
|
|||
else:
|
||||
return
|
||||
|
||||
raise IOError("Port %r not free on %r" % (port, host))
|
||||
raise IOError("Port %r is not free on %r" % (port, host))
|
||||
|
||||
|
||||
def check_port(host, port, timeout=1.0):
|
||||
|
|
|
@ -188,11 +188,11 @@ class ImageCache:
|
|||
|
||||
# make sure the cache folder exists before we try copying to it
|
||||
if not ek.ek(os.path.isdir, self._cache_dir()):
|
||||
logger.log(u"Image cache dir didn't exist, creating it at " + str(self._cache_dir()))
|
||||
logger.log(u"Image cache directory doesn't exist, creating it at " + str(self._cache_dir()))
|
||||
ek.ek(os.makedirs, self._cache_dir())
|
||||
|
||||
if not ek.ek(os.path.isdir, self._thumbnails_dir()):
|
||||
logger.log(u"Thumbnails cache dir didn't exist, creating it at " + str(self._thumbnails_dir()))
|
||||
logger.log(u"Thumbnails cache directory doesn't exist, creating it at " + str(self._thumbnails_dir()))
|
||||
ek.ek(os.makedirs, self._thumbnails_dir())
|
||||
|
||||
logger.log(u"Copying from " + image_path + " to " + dest_path)
|
||||
|
@ -276,12 +276,12 @@ class ImageCache:
|
|||
|
||||
if cur_file_type in need_images and need_images[cur_file_type]:
|
||||
logger.log(
|
||||
u"Found an image in the show dir that doesn't exist in the cache, caching it: " + cur_file_name + ", type " + str(
|
||||
u"Found an image in the show directory that doesn't exist in the cache, caching it: " + cur_file_name + ", type " + str(
|
||||
cur_file_type), logger.DEBUG)
|
||||
self._cache_image_from_file(cur_file_name, cur_file_type, show_obj.indexerid)
|
||||
need_images[cur_file_type] = False
|
||||
except exceptions.ShowDirNotFoundException:
|
||||
logger.log(u"Unable to search for images in show dir because it doesn't exist", logger.WARNING)
|
||||
logger.log(u"Unable to search for images in show directory because it doesn't exist", logger.WARNING)
|
||||
|
||||
# download from indexer for missing ones
|
||||
for cur_image_type in [self.POSTER, self.BANNER, self.POSTER_THUMB, self.BANNER_THUMB]:
|
||||
|
|
|
@ -78,7 +78,7 @@ class Boxcar2Notifier:
|
|||
|
||||
# If you receive an HTTP status code of 400, it is because you failed to send the proper parameters
|
||||
elif e.code == 400:
|
||||
logger.log("Wrong data send to boxcar2", logger.ERROR)
|
||||
logger.log("Wrong data sent to boxcar2", logger.ERROR)
|
||||
return False
|
||||
|
||||
logger.log("Boxcar2 notification successful.", logger.DEBUG)
|
||||
|
|
|
@ -56,7 +56,7 @@ class EmailNotifier:
|
|||
show = self._parseEp(ep_name)
|
||||
to = self._generate_recepients(show)
|
||||
if len(to) == 0:
|
||||
logger.log('Skipping email notify because there are no configured recepients', logger.WARNING)
|
||||
logger.log('Skipping email notification because there are no configured recepients', logger.WARNING)
|
||||
else:
|
||||
try:
|
||||
msg = MIMEMultipart('alternative')
|
||||
|
@ -91,7 +91,7 @@ class EmailNotifier:
|
|||
show = self._parseEp(ep_name)
|
||||
to = self._generate_recepients(show)
|
||||
if len(to) == 0:
|
||||
logger.log('Skipping email notify because there are no configured recepients', logger.WARNING)
|
||||
logger.log('Skipping email notification because there are no configured recepients', logger.WARNING)
|
||||
else:
|
||||
try:
|
||||
msg = MIMEMultipart('alternative')
|
||||
|
@ -126,7 +126,7 @@ class EmailNotifier:
|
|||
show = self._parseEp(ep_name)
|
||||
to = self._generate_recepients(show)
|
||||
if len(to) == 0:
|
||||
logger.log('Skipping email notify because there are no configured recepients', logger.WARNING)
|
||||
logger.log('Skipping email notification because there are no configured recepients', logger.WARNING)
|
||||
else:
|
||||
try:
|
||||
msg = MIMEMultipart('alternative')
|
||||
|
|
|
@ -98,7 +98,7 @@ class ProwlNotifier:
|
|||
logger.log(u"Prowl notifications sent.", logger.MESSAGE)
|
||||
return True
|
||||
elif request_status == 401:
|
||||
logger.log(u"Prowl auth failed: %s" % response.reason, logger.ERROR)
|
||||
logger.log(u"Prowl authentication failed: %s" % response.reason, logger.ERROR)
|
||||
return False
|
||||
else:
|
||||
logger.log(u"Prowl notification failed.", logger.ERROR)
|
||||
|
|
|
@ -88,7 +88,7 @@ class PushalotNotifier:
|
|||
logger.log(u"Pushalot notifications sent.", logger.DEBUG)
|
||||
return True
|
||||
elif request_status == 410:
|
||||
logger.log(u"Pushalot auth failed: %s" % response.reason, logger.ERROR)
|
||||
logger.log(u"Pushalot authentication failed: %s" % response.reason, logger.ERROR)
|
||||
return False
|
||||
else:
|
||||
logger.log(u"Pushalot notification failed.", logger.ERROR)
|
||||
|
|
|
@ -114,7 +114,7 @@ class PushbulletNotifier:
|
|||
logger.log(u"Pushbullet notifications sent.", logger.DEBUG)
|
||||
return True
|
||||
elif request_status == 410:
|
||||
logger.log(u"Pushbullet auth failed: %s" % response.reason, logger.ERROR)
|
||||
logger.log(u"Pushbullet authentication failed: %s" % response.reason, logger.ERROR)
|
||||
return False
|
||||
else:
|
||||
logger.log(u"Pushbullet notification failed.", logger.ERROR)
|
||||
|
|
|
@ -52,10 +52,10 @@ def sendNZB(nzb, proper=False):
|
|||
|
||||
nzbGetRPC = xmlrpclib.ServerProxy(url)
|
||||
try:
|
||||
if nzbGetRPC.writelog("INFO", "SickGear connected to drop of %s any moment now." % (nzb.name + ".nzb")):
|
||||
logger.log(u"Successful connected to NZBget", logger.DEBUG)
|
||||
if nzbGetRPC.writelog("INFO", "SickGear connected to drop off %s any moment now." % (nzb.name + ".nzb")):
|
||||
logger.log(u"Successfully connected to NZBget", logger.DEBUG)
|
||||
else:
|
||||
logger.log(u"Successful connected to NZBget, but unable to send a message", logger.ERROR)
|
||||
logger.log(u"Successfully connected to NZBget, but unable to send a message", logger.ERROR)
|
||||
|
||||
except httplib.socket.error:
|
||||
logger.log(
|
||||
|
@ -94,8 +94,8 @@ def sendNZB(nzb, proper=False):
|
|||
data = nzb.extraInfo[0]
|
||||
nzbcontent64 = standard_b64encode(data)
|
||||
|
||||
logger.log(u"Sending NZB to NZBget")
|
||||
logger.log(u"URL: " + url, logger.DEBUG)
|
||||
logger.log(u"Sending NZB to NZBGet: %s" % nzb.name)
|
||||
logger.log(u"NZBGet URL: " + url, logger.DEBUG)
|
||||
|
||||
try:
|
||||
# Find out if nzbget supports priority (Version 9.0+), old versions beginning with a 0.x will use the old command
|
||||
|
|
|
@ -98,7 +98,7 @@ class BitSoupProvider(generic.TorrentProvider):
|
|||
return False
|
||||
|
||||
if re.search('Username or password incorrect', response.text):
|
||||
logger.log(u'Invalid username or password for ' + self.name + ' Check your settings', logger.ERROR)
|
||||
logger.log(u'Your authentication credentials for ' + self.name + ' are incorrect, check your config.', logger.ERROR)
|
||||
return False
|
||||
|
||||
return True
|
||||
|
@ -180,7 +180,7 @@ class BitSoupProvider(generic.TorrentProvider):
|
|||
|
||||
#Continue only if one Release is found
|
||||
if len(torrent_rows) < 2:
|
||||
logger.log(u"The Data returned from " + self.name + " do not contains any torrent",
|
||||
logger.log(u"The data returned from " + self.name + " does not contain any torrents",
|
||||
logger.DEBUG)
|
||||
continue
|
||||
|
||||
|
|
|
@ -89,7 +89,7 @@ class Fanzub(generic.NZBProvider):
|
|||
results.append(curItem)
|
||||
else:
|
||||
logger.log(
|
||||
u"The data returned from the " + self.name + " is incomplete, this result is unusable",
|
||||
u"The data returned from " + self.name + " is incomplete, this result is unusable",
|
||||
logger.DEBUG)
|
||||
|
||||
return results
|
||||
|
|
|
@ -109,7 +109,7 @@ class FreshOnTVProvider(generic.TorrentProvider):
|
|||
return False
|
||||
|
||||
if re.search('Username does not exist in the userbase or the account is not confirmed yet.', response.text):
|
||||
logger.log(u'Invalid username or password for ' + self.name + ' Check your settings', logger.ERROR)
|
||||
logger.log(u'Your authentication credentials for ' + self.name + ' are incorrect, check your config.', logger.ERROR)
|
||||
return False
|
||||
|
||||
if requests.utils.dict_from_cookiejar(self.session.cookies)['uid'] and requests.utils.dict_from_cookiejar(self.session.cookies)['pass']:
|
||||
|
@ -205,7 +205,7 @@ class FreshOnTVProvider(generic.TorrentProvider):
|
|||
|
||||
#Continue only if one Release is found
|
||||
if len(torrent_rows) < 2:
|
||||
logger.log(u"The Data returned from " + self.name + " do not contains any torrent",
|
||||
logger.log(u"The data returned from " + self.name + " does not contain any torrents",
|
||||
logger.DEBUG)
|
||||
continue
|
||||
|
||||
|
|
|
@ -113,7 +113,7 @@ class HDTorrentsProvider(generic.TorrentProvider):
|
|||
|
||||
if re.search('You need cookies enabled to log in.', response.text) \
|
||||
or response.status_code == 401:
|
||||
logger.log(u'Invalid username or password for ' + self.name + ' Check your settings', logger.ERROR)
|
||||
logger.log(u'Your authentication credentials for ' + self.name + ' are incorrect, check your config.', logger.ERROR)
|
||||
return False
|
||||
|
||||
self._uid = requests.utils.dict_from_cookiejar(self.session.cookies)['uid']
|
||||
|
@ -212,7 +212,7 @@ class HDTorrentsProvider(generic.TorrentProvider):
|
|||
continue
|
||||
|
||||
if not entries:
|
||||
logger.log(u"The Data returned from " + self.name + " do not contains any torrent",
|
||||
logger.log(u"The data returned from " + self.name + " does not contain any torrents",
|
||||
logger.DEBUG)
|
||||
continue
|
||||
|
||||
|
|
|
@ -97,7 +97,7 @@ class IPTorrentsProvider(generic.TorrentProvider):
|
|||
if re.search('tries left', response.text) \
|
||||
or re.search('<title>IPT</title>', response.text) \
|
||||
or response.status_code == 401:
|
||||
logger.log(u'Invalid username or password for ' + self.name + ', Check your settings!', logger.ERROR)
|
||||
logger.log(u'Your authentication credentials for ' + self.name + ' are incorrect, check your config.', logger.ERROR)
|
||||
return False
|
||||
|
||||
return True
|
||||
|
@ -189,7 +189,7 @@ class IPTorrentsProvider(generic.TorrentProvider):
|
|||
|
||||
#Continue only if one Release is found
|
||||
if len(torrents) < 2:
|
||||
logger.log(u"The Data returned from " + self.name + " do not contains any torrent",
|
||||
logger.log(u"The data returned from " + self.name + " does not contain any torrents",
|
||||
logger.WARNING)
|
||||
continue
|
||||
|
||||
|
|
|
@ -250,6 +250,9 @@ class NewznabProvider(generic.NZBProvider):
|
|||
elif code == '102':
|
||||
raise AuthException(
|
||||
"Your account isn't allowed to use the API on " + self.name + ", contact the administrator")
|
||||
elif code == '910':
|
||||
logger.log(u"" + self.name + " currently has their API disabled, please check with provider.", logger.WARNING)
|
||||
return False
|
||||
else:
|
||||
logger.log(u"Unknown error given from " + self.name + ": " + data.feed['error']['description'],
|
||||
logger.ERROR)
|
||||
|
@ -308,7 +311,7 @@ class NewznabProvider(generic.NZBProvider):
|
|||
results.append(item)
|
||||
else:
|
||||
logger.log(
|
||||
u"The data returned from the " + self.name + " is incomplete, this result is unusable",
|
||||
u"The data returned from " + self.name + " is incomplete, this result is unusable",
|
||||
logger.DEBUG)
|
||||
|
||||
# get total and offset attribs
|
||||
|
@ -336,7 +339,7 @@ class NewznabProvider(generic.NZBProvider):
|
|||
params['limit']) + " items.", logger.DEBUG)
|
||||
else:
|
||||
logger.log(str(
|
||||
total - int(params['offset'])) + " No more searches needed, could find anything I was looking for! " + str(
|
||||
total - int(params['offset'])) + " No more searches needed, couldn't find anything I was looking for! " + str(
|
||||
params['limit']) + " items.", logger.DEBUG)
|
||||
break
|
||||
|
||||
|
|
|
@ -208,7 +208,7 @@ class NextGenProvider(generic.TorrentProvider):
|
|||
resultsTable = html.find('div', attrs={'id': 'torrent-table-wrapper'})
|
||||
|
||||
if not resultsTable:
|
||||
logger.log(u"The Data returned from " + self.name + " do not contains any torrent",
|
||||
logger.log(u"The data returned from " + self.name + " does not contain any torrents",
|
||||
logger.DEBUG)
|
||||
continue
|
||||
|
||||
|
@ -253,7 +253,7 @@ class NextGenProvider(generic.TorrentProvider):
|
|||
items[mode].append(item)
|
||||
|
||||
else:
|
||||
logger.log(u"The Data returned from " + self.name + " do not contains any torrent",
|
||||
logger.log(u"The data returned from " + self.name + " does not contain any torrents",
|
||||
logger.WARNING)
|
||||
continue
|
||||
|
||||
|
|
|
@ -96,7 +96,7 @@ class NyaaProvider(generic.TorrentProvider):
|
|||
results.append(curItem)
|
||||
else:
|
||||
logger.log(
|
||||
u"The data returned from the " + self.name + " is incomplete, this result is unusable",
|
||||
u"The data returned from " + self.name + " is incomplete, this result is unusable",
|
||||
logger.DEBUG)
|
||||
|
||||
return results
|
||||
|
|
|
@ -98,7 +98,7 @@ class SCCProvider(generic.TorrentProvider):
|
|||
if re.search('Username or password incorrect', response.text) \
|
||||
or re.search('<title>SceneAccess \| Login</title>', response.text) \
|
||||
or response.status_code == 401:
|
||||
logger.log(u'Invalid username or password for ' + self.name + ' Check your settings', logger.ERROR)
|
||||
logger.log(u'Your authentication credentials for ' + self.name + ' are incorrect, check your config.', logger.ERROR)
|
||||
return False
|
||||
|
||||
return True
|
||||
|
@ -204,7 +204,7 @@ class SCCProvider(generic.TorrentProvider):
|
|||
source = self.name + " (" + html.title.string + ")"
|
||||
else:
|
||||
source = self.name
|
||||
logger.log(u"The Data returned from " + source + " does not contain any torrent", logger.DEBUG)
|
||||
logger.log(u"The data returned from " + source + " does not contain any torrents", logger.DEBUG)
|
||||
continue
|
||||
|
||||
for result in torrent_table.find_all('tr')[1:]:
|
||||
|
|
|
@ -91,7 +91,7 @@ class SpeedCDProvider(generic.TorrentProvider):
|
|||
|
||||
if re.search('Incorrect username or Password. Please try again.', response.text) \
|
||||
or response.status_code == 401:
|
||||
logger.log(u'Invalid username or password for ' + self.name + ' Check your settings', logger.ERROR)
|
||||
logger.log(u'Your authentication credentials for ' + self.name + ' are incorrect, check your config.', logger.ERROR)
|
||||
return False
|
||||
|
||||
return True
|
||||
|
|
|
@ -92,7 +92,7 @@ class TorrentBytesProvider(generic.TorrentProvider):
|
|||
return False
|
||||
|
||||
if re.search('Username or password incorrect', response.text):
|
||||
logger.log(u'Invalid username or password for ' + self.name + ' Check your settings', logger.ERROR)
|
||||
logger.log(u'Your authentication credentials for ' + self.name + ' are incorrect, check your config.', logger.ERROR)
|
||||
return False
|
||||
|
||||
return True
|
||||
|
@ -174,7 +174,7 @@ class TorrentBytesProvider(generic.TorrentProvider):
|
|||
|
||||
#Continue only if one Release is found
|
||||
if len(torrent_rows) < 2:
|
||||
logger.log(u"The Data returned from " + self.name + " do not contains any torrent",
|
||||
logger.log(u"The data returned from " + self.name + " does not contain any torrents",
|
||||
logger.DEBUG)
|
||||
continue
|
||||
|
||||
|
|
|
@ -101,11 +101,11 @@ class TorrentDayProvider(generic.TorrentProvider):
|
|||
return False
|
||||
|
||||
if re.search('You tried too often', response.text):
|
||||
logger.log(u'Too many login access for ' + self.name + ', can''t retrive any data', logger.ERROR)
|
||||
logger.log(u'Too many login attempts for ' + self.name + ', can\'t retrive any data', logger.ERROR)
|
||||
return False
|
||||
|
||||
if response.status_code == 401:
|
||||
logger.log(u'Invalid username or password for ' + self.name + ', Check your settings!', logger.ERROR)
|
||||
logger.log(u'Your authentication credentials for ' + self.name + ' are incorrect, check your config.', logger.ERROR)
|
||||
return False
|
||||
|
||||
if requests.utils.dict_from_cookiejar(self.session.cookies)['uid'] and requests.utils.dict_from_cookiejar(self.session.cookies)['pass']:
|
||||
|
@ -118,7 +118,7 @@ class TorrentDayProvider(generic.TorrentProvider):
|
|||
return True
|
||||
|
||||
else:
|
||||
logger.log(u'Unable to obtain cookie for TorrentDay', logger.ERROR)
|
||||
logger.log(u'Unable to obtain a cookie for TorrentDay', logger.ERROR)
|
||||
return False
|
||||
|
||||
|
||||
|
|
|
@ -97,7 +97,7 @@ class TorrentLeechProvider(generic.TorrentProvider):
|
|||
if re.search('Invalid Username/password', response.text) \
|
||||
or re.search('<title>Login :: TorrentLeech.org</title>', response.text) \
|
||||
or response.status_code == 401:
|
||||
logger.log(u'Invalid username or password for ' + self.name + ' Check your settings', logger.ERROR)
|
||||
logger.log(u'Your authentication credentials for ' + self.name + ' are incorrect, check your config.', logger.ERROR)
|
||||
return False
|
||||
|
||||
return True
|
||||
|
@ -182,7 +182,7 @@ class TorrentLeechProvider(generic.TorrentProvider):
|
|||
|
||||
#Continue only if one Release is found
|
||||
if len(torrent_rows) < 2:
|
||||
logger.log(u"The Data returned from " + self.name + " do not contains any torrent",
|
||||
logger.log(u"The data returned from " + self.name + " does not contain any torrents",
|
||||
logger.DEBUG)
|
||||
continue
|
||||
|
||||
|
|
|
@ -17,14 +17,15 @@
|
|||
# along with SickGear. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
|
||||
|
||||
import urllib, httplib
|
||||
import urllib
|
||||
import httplib
|
||||
import datetime
|
||||
|
||||
import sickbeard
|
||||
|
||||
from lib import MultipartPostHandler
|
||||
import urllib2, cookielib
|
||||
import urllib2
|
||||
import cookielib
|
||||
|
||||
try:
|
||||
import json
|
||||
|
@ -45,13 +46,13 @@ def sendNZB(nzb):
|
|||
|
||||
# set up a dict with the URL params in it
|
||||
params = {}
|
||||
if sickbeard.SAB_USERNAME != None:
|
||||
if sickbeard.SAB_USERNAME is not None:
|
||||
params['ma_username'] = sickbeard.SAB_USERNAME
|
||||
if sickbeard.SAB_PASSWORD != None:
|
||||
if sickbeard.SAB_PASSWORD is not None:
|
||||
params['ma_password'] = sickbeard.SAB_PASSWORD
|
||||
if sickbeard.SAB_APIKEY != None:
|
||||
if sickbeard.SAB_APIKEY is not None:
|
||||
params['apikey'] = sickbeard.SAB_APIKEY
|
||||
if sickbeard.SAB_CATEGORY != None:
|
||||
if sickbeard.SAB_CATEGORY is not None:
|
||||
params['cat'] = sickbeard.SAB_CATEGORY
|
||||
|
||||
# use high priority if specified (recently aired episode)
|
||||
|
@ -64,7 +65,7 @@ def sendNZB(nzb):
|
|||
if nzb.provider.getID() == 'newzbin':
|
||||
id = nzb.provider.getIDFromURL(nzb.url)
|
||||
if not id:
|
||||
logger.log("Unable to send NZB to sab, can't find ID in URL " + str(nzb.url), logger.ERROR)
|
||||
logger.log("Unable to send NZB to SABnzbd, can't find ID in URL " + str(nzb.url), logger.ERROR)
|
||||
return False
|
||||
params['mode'] = 'addid'
|
||||
params['name'] = id
|
||||
|
@ -79,8 +80,8 @@ def sendNZB(nzb):
|
|||
|
||||
url = sickbeard.SAB_HOST + "api?" + urllib.urlencode(params)
|
||||
|
||||
logger.log(u"Sending NZB to SABnzbd")
|
||||
logger.log(u"URL: " + url, logger.DEBUG)
|
||||
logger.log(u"Sending NZB to SABnzbd: %s" % nzb.name)
|
||||
logger.log(u"SABnzbd URL: " + url, logger.DEBUG)
|
||||
|
||||
try:
|
||||
# if we have the URL to an NZB then we've built up the SAB API URL already so just call it
|
||||
|
@ -99,15 +100,15 @@ def sendNZB(nzb):
|
|||
f = opener.open(req)
|
||||
|
||||
except (EOFError, IOError), e:
|
||||
logger.log(u"Unable to connect to SAB: " + ex(e), logger.ERROR)
|
||||
logger.log(u"Unable to connect to SABnzbd: " + ex(e), logger.ERROR)
|
||||
return False
|
||||
|
||||
except httplib.InvalidURL, e:
|
||||
logger.log(u"Invalid SAB host, check your config: " + ex(e), logger.ERROR)
|
||||
logger.log(u"Invalid SABnzbd host, check your config: " + ex(e), logger.ERROR)
|
||||
return False
|
||||
|
||||
# this means we couldn't open the connection or something just as bad
|
||||
if f == None:
|
||||
if f is None:
|
||||
logger.log(u"No data returned from SABnzbd, NZB not sent", logger.ERROR)
|
||||
return False
|
||||
|
||||
|
@ -115,7 +116,7 @@ def sendNZB(nzb):
|
|||
try:
|
||||
result = f.readlines()
|
||||
except Exception, e:
|
||||
logger.log(u"Error trying to get result from SAB, NZB not sent: " + ex(e), logger.ERROR)
|
||||
logger.log(u"Error trying to get result from SABnzbd, NZB not sent: " + ex(e), logger.ERROR)
|
||||
return False
|
||||
|
||||
# SAB shouldn't return a blank result, this most likely (but not always) means that it timed out and didn't recieve the NZB
|
||||
|
@ -126,17 +127,17 @@ def sendNZB(nzb):
|
|||
# massage the result a little bit
|
||||
sabText = result[0].strip()
|
||||
|
||||
logger.log(u"Result text from SAB: " + sabText, logger.DEBUG)
|
||||
logger.log(u"Result text from SABnzbd: " + sabText, logger.DEBUG)
|
||||
|
||||
# do some crude parsing of the result text to determine what SAB said
|
||||
if sabText == "ok":
|
||||
logger.log(u"NZB sent to SAB successfully", logger.DEBUG)
|
||||
logger.log(u"NZB sent to SABnzbd successfully", logger.DEBUG)
|
||||
return True
|
||||
elif sabText == "Missing authentication":
|
||||
logger.log(u"Incorrect username/password sent to SAB, NZB not sent", logger.ERROR)
|
||||
logger.log(u"Incorrect username/password sent to SABnzbd, NZB not sent", logger.ERROR)
|
||||
return False
|
||||
else:
|
||||
logger.log(u"Unknown failure sending NZB to sab. Return text is: " + sabText, logger.ERROR)
|
||||
logger.log(u"Unknown failure sending NZB to SABnzbd. Return text is: " + sabText, logger.ERROR)
|
||||
return False
|
||||
|
||||
|
||||
|
@ -144,12 +145,12 @@ def _checkSabResponse(f):
|
|||
try:
|
||||
result = f.readlines()
|
||||
except Exception, e:
|
||||
logger.log(u"Error trying to get result from SAB" + ex(e), logger.ERROR)
|
||||
return False, "Error from SAB"
|
||||
logger.log(u"Error trying to get result from SABnzbd" + ex(e), logger.ERROR)
|
||||
return False, "Error from SABnzbd"
|
||||
|
||||
if len(result) == 0:
|
||||
logger.log(u"No data returned from SABnzbd, NZB not sent", logger.ERROR)
|
||||
return False, "No data from SAB"
|
||||
return False, "No data from SABnzbd"
|
||||
|
||||
sabText = result[0].strip()
|
||||
sabJson = {}
|
||||
|
@ -159,8 +160,8 @@ def _checkSabResponse(f):
|
|||
pass
|
||||
|
||||
if sabText == "Missing authentication":
|
||||
logger.log(u"Incorrect username/password sent to SAB", logger.ERROR)
|
||||
return False, "Incorrect username/password sent to SAB"
|
||||
logger.log(u"Incorrect username/password sent to SABnzbd", logger.ERROR)
|
||||
return False, "Incorrect username/password sent to SABnzbd"
|
||||
elif 'error' in sabJson:
|
||||
logger.log(sabJson['error'], logger.ERROR)
|
||||
return False, sabJson['error']
|
||||
|
@ -172,12 +173,12 @@ def _sabURLOpenSimple(url):
|
|||
try:
|
||||
f = urllib.urlopen(url)
|
||||
except (EOFError, IOError), e:
|
||||
logger.log(u"Unable to connect to SAB: " + ex(e), logger.ERROR)
|
||||
logger.log(u"Unable to connect to SABnzbd: " + ex(e), logger.ERROR)
|
||||
return False, "Unable to connect"
|
||||
except httplib.InvalidURL, e:
|
||||
logger.log(u"Invalid SAB host, check your config: " + ex(e), logger.ERROR)
|
||||
return False, "Invalid SAB host"
|
||||
if f == None:
|
||||
logger.log(u"Invalid SABnzbd host, check your config: " + ex(e), logger.ERROR)
|
||||
return False, "Invalid SABnzbd host"
|
||||
if f is None:
|
||||
logger.log(u"No data returned from SABnzbd", logger.ERROR)
|
||||
return False, "No data returned from SABnzbd"
|
||||
else:
|
||||
|
|
|
@ -481,7 +481,7 @@ def xem_refresh(indexer_id, indexer, force=False):
|
|||
|
||||
if refresh or force:
|
||||
logger.log(
|
||||
u'Looking up XEM scene mapping using for show %s on %s' % (indexer_id, sickbeard.indexerApi(indexer).name,),
|
||||
u'Looking up XEM scene mapping for show %s on %s' % (indexer_id, sickbeard.indexerApi(indexer).name,),
|
||||
logger.DEBUG)
|
||||
|
||||
# mark refreshed
|
||||
|
@ -493,7 +493,7 @@ def xem_refresh(indexer_id, indexer, force=False):
|
|||
try:
|
||||
parsedJSON = sickbeard.helpers.getURL(url, json=True)
|
||||
if not parsedJSON or parsedJSON == '':
|
||||
logger.log(u'No XEN data for show "%s on %s"' % (indexer_id, sickbeard.indexerApi(indexer).name,), logger.MESSAGE)
|
||||
logger.log(u'No XEM data for show "%s on %s"' % (indexer_id, sickbeard.indexerApi(indexer).name,), logger.MESSAGE)
|
||||
return
|
||||
|
||||
if 'success' in parsedJSON['result']:
|
||||
|
|
|
@ -296,7 +296,7 @@ def isFirstBestMatch(result):
|
|||
Checks if the given result is a best quality match and if we want to archive the episode on first match.
|
||||
"""
|
||||
|
||||
logger.log(u"Checking if we should archive our first best quality match for for episode " + result.name,
|
||||
logger.log(u"Checking if we should archive our first best quality match for episode " + result.name,
|
||||
logger.DEBUG)
|
||||
|
||||
show_obj = result.episodes[0].show
|
||||
|
@ -550,7 +550,7 @@ def searchProviders(show, episodes, manualSearch=False):
|
|||
# if we need every ep in the season and there's nothing better then just download this and be done with it (unless single episodes are preferred)
|
||||
if allWanted and bestSeasonResult.quality == highest_quality_overall:
|
||||
logger.log(
|
||||
u"Every ep in this season is needed, downloading the whole " + bestSeasonResult.provider.providerType + " " + bestSeasonResult.name)
|
||||
u"Every episode in this season is needed, downloading the whole " + bestSeasonResult.provider.providerType + " " + bestSeasonResult.name)
|
||||
epObjs = []
|
||||
for curEpNum in allEps:
|
||||
epObjs.append(show.getEpisode(season, curEpNum))
|
||||
|
@ -560,7 +560,7 @@ def searchProviders(show, episodes, manualSearch=False):
|
|||
|
||||
elif not anyWanted:
|
||||
logger.log(
|
||||
u"No eps from this season are wanted at this quality, ignoring the result of " + bestSeasonResult.name,
|
||||
u"No episodes from this season are wanted at this quality, ignoring the result of " + bestSeasonResult.name,
|
||||
logger.DEBUG)
|
||||
|
||||
else:
|
||||
|
@ -590,7 +590,7 @@ def searchProviders(show, episodes, manualSearch=False):
|
|||
|
||||
# Season result from Torrent Provider must be a full-season torrent, creating multi-ep result for it.
|
||||
logger.log(
|
||||
u"Adding multi-ep result for full-season torrent. Set the episodes you don't want to 'don't download' in your torrent client if desired!")
|
||||
u"Adding multi episode result for full season torrent. Set the episodes you don't want to 'don't download' in your torrent client if desired!")
|
||||
epObjs = []
|
||||
for curEpNum in allEps:
|
||||
epObjs.append(show.getEpisode(season, curEpNum))
|
||||
|
@ -607,11 +607,11 @@ def searchProviders(show, episodes, manualSearch=False):
|
|||
if MULTI_EP_RESULT in foundResults[curProvider.name]:
|
||||
for multiResult in foundResults[curProvider.name][MULTI_EP_RESULT]:
|
||||
|
||||
logger.log(u"Seeing if we want to bother with multi-episode result " + multiResult.name, logger.DEBUG)
|
||||
logger.log(u"Seeing if we want to bother with multi episode result " + multiResult.name, logger.DEBUG)
|
||||
|
||||
if sickbeard.USE_FAILED_DOWNLOADS and failed_history.hasFailed(multiResult.name, multiResult.size,
|
||||
multiResult.provider.name):
|
||||
logger.log(multiResult.name + u" has previously failed, rejecting this multi-ep result")
|
||||
logger.log(multiResult.name + u" has previously failed, rejecting this multi episode result")
|
||||
continue
|
||||
|
||||
# see how many of the eps that this result covers aren't covered by single results
|
||||
|
@ -626,11 +626,11 @@ def searchProviders(show, episodes, manualSearch=False):
|
|||
notNeededEps.append(epNum)
|
||||
|
||||
logger.log(
|
||||
u"Single-ep check result is neededEps: " + str(neededEps) + ", notNeededEps: " + str(notNeededEps),
|
||||
u"Single episode check result is needed episodes: " + str(neededEps) + ", not needed episodes: " + str(notNeededEps),
|
||||
logger.DEBUG)
|
||||
|
||||
if not notNeededEps:
|
||||
logger.log(u"All of these episodes were covered by single episode results, ignoring this multi-episode result", logger.DEBUG)
|
||||
logger.log(u"All of these episodes were covered by single episode results, ignoring this multi episode result", logger.DEBUG)
|
||||
continue
|
||||
|
||||
# check if these eps are already covered by another multi-result
|
||||
|
@ -644,12 +644,12 @@ def searchProviders(show, episodes, manualSearch=False):
|
|||
multiNeededEps.append(epNum)
|
||||
|
||||
logger.log(
|
||||
u"Multi-ep check result is multiNeededEps: " + str(multiNeededEps) + ", multiNotNeededEps: " + str(
|
||||
u"Multi episode check result is multi needed episodes: " + str(multiNeededEps) + ", multi not needed episodes: " + str(
|
||||
multiNotNeededEps), logger.DEBUG)
|
||||
|
||||
if not multiNeededEps:
|
||||
logger.log(
|
||||
u"All of these episodes were covered by another multi-episode nzbs, ignoring this multi-ep result",
|
||||
u"All of these episodes were covered by another multi episode nzb, ignoring this multi episode result",
|
||||
logger.DEBUG)
|
||||
continue
|
||||
|
||||
|
@ -662,8 +662,8 @@ def searchProviders(show, episodes, manualSearch=False):
|
|||
epNum = epObj.episode
|
||||
if epNum in foundResults[curProvider.name]:
|
||||
logger.log(
|
||||
u"A needed multi-episode result overlaps with a single-episode result for ep #" + str(
|
||||
epNum) + ", removing the single-episode results from the list", logger.DEBUG)
|
||||
u"A needed multi episode result overlaps with a single episode result for episode #" + str(
|
||||
epNum) + ", removing the single episode results from the list", logger.DEBUG)
|
||||
del foundResults[curProvider.name][epNum]
|
||||
|
||||
# of all the single ep results narrow it down to the best one for each episode
|
||||
|
|
|
@ -106,7 +106,7 @@ class ShowQueue(generic_queue.GenericQueue):
|
|||
|
||||
if (self.isBeingUpdated(show) or self.isInUpdateQueue(show)) and not force:
|
||||
logger.log(
|
||||
u"A refresh was attempted but there is already an update queued or in progress. Since updates do a refres at the end anyway I'm skipping this request.",
|
||||
u"A refresh was attempted but there is already an update queued or in progress. Since updates do a refresh at the end anyway I'm skipping this request.",
|
||||
logger.DEBUG)
|
||||
return
|
||||
|
||||
|
@ -336,7 +336,7 @@ class QueueItemAdd(ShowQueueItem):
|
|||
self.show.loadIMDbInfo()
|
||||
except imdb_exceptions.IMDbError, e:
|
||||
#todo Insert UI notification
|
||||
logger.log(u" Something wrong on IMDb api: " + ex(e), logger.WARNING)
|
||||
logger.log(u"Something is wrong with IMDb api: " + ex(e), logger.WARNING)
|
||||
except Exception, e:
|
||||
logger.log(u"Error loading IMDb info: " + ex(e), logger.ERROR)
|
||||
|
||||
|
@ -365,7 +365,7 @@ class QueueItemAdd(ShowQueueItem):
|
|||
try:
|
||||
self.show.loadEpisodesFromDir()
|
||||
except Exception, e:
|
||||
logger.log(u"Error searching dir for episodes: " + ex(e), logger.ERROR)
|
||||
logger.log(u"Error searching directory for episodes: " + ex(e), logger.ERROR)
|
||||
logger.log(traceback.format_exc(), logger.DEBUG)
|
||||
|
||||
# if they gave a custom status then change all the eps to it
|
||||
|
@ -450,7 +450,7 @@ class QueueItemRename(ShowQueueItem):
|
|||
try:
|
||||
show_loc = self.show.location
|
||||
except exceptions.ShowDirNotFoundException:
|
||||
logger.log(u"Can't perform rename on " + self.show.name + " when the show dir is missing.", logger.WARNING)
|
||||
logger.log(u"Can't perform rename on " + self.show.name + " when the show directory is missing.", logger.WARNING)
|
||||
return
|
||||
|
||||
ep_obj_rename_list = []
|
||||
|
@ -519,7 +519,7 @@ class QueueItemUpdate(ShowQueueItem):
|
|||
try:
|
||||
self.show.loadIMDbInfo()
|
||||
except imdb_exceptions.IMDbError, e:
|
||||
logger.log(u" Something wrong on IMDb api: " + ex(e), logger.WARNING)
|
||||
logger.log(u"Something is wrong with IMDb api: " + ex(e), logger.WARNING)
|
||||
except Exception, e:
|
||||
logger.log(u"Error loading IMDb info: " + ex(e), logger.ERROR)
|
||||
logger.log(traceback.format_exc(), logger.DEBUG)
|
||||
|
|
|
@ -333,7 +333,7 @@ class TVShow(object):
|
|||
result = False
|
||||
|
||||
if not ek.ek(os.path.isdir, self._location):
|
||||
logger.log(str(self.indexerid) + u": Show dir doesn't exist, skipping NFO generation")
|
||||
logger.log(str(self.indexerid) + u": Show directory doesn't exist, skipping NFO generation")
|
||||
return False
|
||||
|
||||
logger.log(str(self.indexerid) + u": Writing NFOs for show")
|
||||
|
@ -345,7 +345,7 @@ class TVShow(object):
|
|||
def writeMetadata(self, show_only=False):
|
||||
|
||||
if not ek.ek(os.path.isdir, self._location):
|
||||
logger.log(str(self.indexerid) + u": Show dir doesn't exist, skipping NFO generation")
|
||||
logger.log(str(self.indexerid) + u": Show directory doesn't exist, skipping NFO generation")
|
||||
return
|
||||
|
||||
self.getImages()
|
||||
|
@ -358,7 +358,7 @@ class TVShow(object):
|
|||
def writeEpisodeNFOs(self):
|
||||
|
||||
if not ek.ek(os.path.isdir, self._location):
|
||||
logger.log(str(self.indexerid) + u": Show dir doesn't exist, skipping NFO generation")
|
||||
logger.log(str(self.indexerid) + u": Show directory doesn't exist, skipping NFO generation")
|
||||
return
|
||||
|
||||
logger.log(str(self.indexerid) + u": Writing NFOs for all episodes")
|
||||
|
@ -376,7 +376,7 @@ class TVShow(object):
|
|||
def updateMetadata(self):
|
||||
|
||||
if not ek.ek(os.path.isdir, self._location):
|
||||
logger.log(str(self.indexerid) + u": Show dir doesn't exist, skipping NFO generation")
|
||||
logger.log(str(self.indexerid) + u": Show directory doesn't exist, skipping NFO generation")
|
||||
return
|
||||
|
||||
self.updateShowNFO()
|
||||
|
@ -386,7 +386,7 @@ class TVShow(object):
|
|||
result = False
|
||||
|
||||
if not ek.ek(os.path.isdir, self._location):
|
||||
logger.log(str(self.indexerid) + u": Show dir doesn't exist, skipping NFO generation")
|
||||
logger.log(str(self.indexerid) + u": Show directory doesn't exist, skipping NFO generation")
|
||||
return False
|
||||
|
||||
logger.log(str(self.indexerid) + u": Updating NFOs for show with new indexer info")
|
||||
|
@ -399,7 +399,7 @@ class TVShow(object):
|
|||
def loadEpisodesFromDir(self):
|
||||
|
||||
if not ek.ek(os.path.isdir, self._location):
|
||||
logger.log(str(self.indexerid) + u": Show dir doesn't exist, not loading episodes from disk")
|
||||
logger.log(str(self.indexerid) + u": Show directory doesn't exist, not loading episodes from disk")
|
||||
return
|
||||
|
||||
logger.log(str(self.indexerid) + u": Loading all episodes from the show directory " + self._location)
|
||||
|
@ -706,14 +706,14 @@ class TVShow(object):
|
|||
|
||||
# if it was snatched and now exists then set the status correctly
|
||||
if oldStatus == SNATCHED and oldQuality <= newQuality:
|
||||
logger.log(u"STATUS: this ep used to be snatched with quality " + Quality.qualityStrings[
|
||||
logger.log(u"STATUS: this episode used to be snatched with quality " + Quality.qualityStrings[
|
||||
oldQuality] + u" but a file exists with quality " + Quality.qualityStrings[
|
||||
newQuality] + u" so I'm setting the status to DOWNLOADED", logger.DEBUG)
|
||||
newStatus = DOWNLOADED
|
||||
|
||||
# if it was snatched proper and we found a higher quality one then allow the status change
|
||||
elif oldStatus == SNATCHED_PROPER and oldQuality < newQuality:
|
||||
logger.log(u"STATUS: this ep used to be snatched proper with quality " + Quality.qualityStrings[
|
||||
logger.log(u"STATUS: this episode used to be snatched proper with quality " + Quality.qualityStrings[
|
||||
oldQuality] + u" but a file exists with quality " + Quality.qualityStrings[
|
||||
newQuality] + u" so I'm setting the status to DOWNLOADED", logger.DEBUG)
|
||||
newStatus = DOWNLOADED
|
||||
|
@ -1110,7 +1110,7 @@ class TVShow(object):
|
|||
def downloadSubtitles(self, force=False):
|
||||
# TODO: Add support for force option
|
||||
if not ek.ek(os.path.isdir, self._location):
|
||||
logger.log(str(self.indexerid) + ": Show dir doesn't exist, can't download subtitles", logger.DEBUG)
|
||||
logger.log(str(self.indexerid) + ": Show directory doesn't exist, can't download subtitles", logger.DEBUG)
|
||||
return
|
||||
logger.log(str(self.indexerid) + ": Downloading subtitles", logger.DEBUG)
|
||||
|
||||
|
@ -1718,7 +1718,7 @@ class TVEpisode(object):
|
|||
if not ek.ek(os.path.isdir,
|
||||
self.show._location) and not sickbeard.CREATE_MISSING_SHOW_DIRS and not sickbeard.ADD_SHOWS_WO_DIR:
|
||||
logger.log(
|
||||
u"The show dir is missing, not bothering to change the episode statuses since it'd probably be invalid")
|
||||
u"The show directory is missing, not bothering to change the episode statuses since it'd probably be invalid")
|
||||
return
|
||||
|
||||
if self.location:
|
||||
|
@ -1752,7 +1752,7 @@ class TVEpisode(object):
|
|||
|
||||
else:
|
||||
logger.log(
|
||||
u"Not touching status because we have no ep file, the airdate is in the past, and the status is " + str(
|
||||
u"Not touching status because we have no episode file, the airdate is in the past, and the status is " + str(
|
||||
self.status), logger.DEBUG)
|
||||
|
||||
# if we have a media file then it's downloaded
|
||||
|
@ -1773,7 +1773,7 @@ class TVEpisode(object):
|
|||
|
||||
if not ek.ek(os.path.isdir, self.show._location):
|
||||
logger.log(
|
||||
str(self.show.indexerid) + u": The show dir is missing, not bothering to try loading the episode NFO")
|
||||
str(self.show.indexerid) + u": The show directory is missing, not bothering to try loading the episode NFO")
|
||||
return
|
||||
|
||||
logger.log(
|
||||
|
@ -1876,7 +1876,7 @@ class TVEpisode(object):
|
|||
def createMetaFiles(self):
|
||||
|
||||
if not ek.ek(os.path.isdir, self.show._location):
|
||||
logger.log(str(self.show.indexerid) + u": The show dir is missing, not bothering to try to create metadata")
|
||||
logger.log(str(self.show.indexerid) + u": The show directory is missing, not bothering to try to create metadata")
|
||||
return
|
||||
|
||||
self.createNFO()
|
||||
|
|
Loading…
Reference in a new issue