Merge branch 'origin/dev'
33
SickBeard.py
|
@ -19,12 +19,14 @@
|
||||||
|
|
||||||
# Check needed software dependencies to nudge users to fix their setup
|
# Check needed software dependencies to nudge users to fix their setup
|
||||||
import sys
|
import sys
|
||||||
|
|
||||||
if sys.version_info < (2, 6):
|
if sys.version_info < (2, 6):
|
||||||
print "Sorry, requires Python 2.6 or 2.7."
|
print "Sorry, requires Python 2.6 or 2.7."
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
import Cheetah
|
import Cheetah
|
||||||
|
|
||||||
if Cheetah.Version[0] != '2':
|
if Cheetah.Version[0] != '2':
|
||||||
raise ValueError
|
raise ValueError
|
||||||
except ValueError:
|
except ValueError:
|
||||||
|
@ -35,6 +37,7 @@ except:
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
import os
|
import os
|
||||||
|
|
||||||
sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), 'lib')))
|
sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), 'lib')))
|
||||||
|
|
||||||
# We only need this for compiling an EXE and I will just always do that on 2.6+
|
# We only need this for compiling an EXE and I will just always do that on 2.6+
|
||||||
|
@ -67,6 +70,7 @@ signal.signal(signal.SIGTERM, sickbeard.sig_handler)
|
||||||
|
|
||||||
throwaway = datetime.datetime.strptime('20110101', '%Y%m%d')
|
throwaway = datetime.datetime.strptime('20110101', '%Y%m%d')
|
||||||
|
|
||||||
|
|
||||||
def loadShowsFromDB():
|
def loadShowsFromDB():
|
||||||
"""
|
"""
|
||||||
Populates the showList with shows from the database
|
Populates the showList with shows from the database
|
||||||
|
@ -80,7 +84,9 @@ def loadShowsFromDB():
|
||||||
curShow = TVShow(int(sqlShow["indexer"]), int(sqlShow["indexer_id"]))
|
curShow = TVShow(int(sqlShow["indexer"]), int(sqlShow["indexer_id"]))
|
||||||
sickbeard.showList.append(curShow)
|
sickbeard.showList.append(curShow)
|
||||||
except Exception, e:
|
except Exception, e:
|
||||||
logger.log(u"There was an error creating the show in " + sqlShow["location"] + ": " + str(e).decode('utf-8'), logger.ERROR)
|
logger.log(
|
||||||
|
u"There was an error creating the show in " + sqlShow["location"] + ": " + str(e).decode('utf-8'),
|
||||||
|
logger.ERROR)
|
||||||
logger.log(traceback.format_exc(), logger.DEBUG)
|
logger.log(traceback.format_exc(), logger.DEBUG)
|
||||||
|
|
||||||
# TODO: update the existing shows if the showlist has something in it
|
# TODO: update the existing shows if the showlist has something in it
|
||||||
|
@ -101,7 +107,7 @@ def daemonize():
|
||||||
sys.stderr.write("fork #1 failed: %d (%s)\n" % (e.errno, e.strerror))
|
sys.stderr.write("fork #1 failed: %d (%s)\n" % (e.errno, e.strerror))
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
os.setsid() # @UndefinedVariable - only available in UNIX
|
os.setsid() # unix
|
||||||
|
|
||||||
# Make sure I can read my own files and shut out others
|
# Make sure I can read my own files and shut out others
|
||||||
prev = os.umask(0)
|
prev = os.umask(0)
|
||||||
|
@ -139,6 +145,7 @@ def daemonize():
|
||||||
os.dup2(stdout.fileno(), sys.stdout.fileno())
|
os.dup2(stdout.fileno(), sys.stdout.fileno())
|
||||||
os.dup2(stderr.fileno(), sys.stderr.fileno())
|
os.dup2(stderr.fileno(), sys.stderr.fileno())
|
||||||
|
|
||||||
|
|
||||||
def main():
|
def main():
|
||||||
"""
|
"""
|
||||||
TV for me
|
TV for me
|
||||||
|
@ -180,11 +187,23 @@ def main():
|
||||||
# Need console logging for SickBeard.py and SickBeard-console.exe
|
# Need console logging for SickBeard.py and SickBeard-console.exe
|
||||||
consoleLogging = (not hasattr(sys, "frozen")) or (sickbeard.MY_NAME.lower().find('-console') > 0)
|
consoleLogging = (not hasattr(sys, "frozen")) or (sickbeard.MY_NAME.lower().find('-console') > 0)
|
||||||
|
|
||||||
|
# Attempt to rename the process for easier debugging
|
||||||
|
try:
|
||||||
|
from setproctitle import setproctitle
|
||||||
|
except ImportError:
|
||||||
|
if consoleLogging:
|
||||||
|
sys.stderr.write(u"setproctitle module is not available.\n")
|
||||||
|
setproctitle = lambda t: None
|
||||||
|
|
||||||
|
setproctitle(sickbeard.MY_NAME)
|
||||||
|
|
||||||
# Rename the main thread
|
# Rename the main thread
|
||||||
threading.currentThread().name = "MAIN"
|
threading.currentThread().name = "MAIN"
|
||||||
|
|
||||||
try:
|
try:
|
||||||
opts, args = getopt.getopt(sys.argv[1:], "qfdp::", ['quiet', 'forceupdate', 'daemon', 'port=', 'pidfile=', 'nolaunch', 'config=', 'datadir=']) # @UnusedVariable
|
opts, args = getopt.getopt(sys.argv[1:], "qfdp::",
|
||||||
|
['quiet', 'forceupdate', 'daemon', 'port=', 'pidfile=', 'nolaunch', 'config=',
|
||||||
|
'datadir=']) # @UnusedVariable
|
||||||
except getopt.GetoptError:
|
except getopt.GetoptError:
|
||||||
print "Available Options: --quiet, --forceupdate, --port, --daemon, --pidfile, --config, --datadir"
|
print "Available Options: --quiet, --forceupdate, --port, --daemon, --pidfile, --config, --datadir"
|
||||||
sys.exit()
|
sys.exit()
|
||||||
|
@ -290,10 +309,14 @@ def main():
|
||||||
CUR_DB_VERSION = db.DBConnection().checkDBVersion()
|
CUR_DB_VERSION = db.DBConnection().checkDBVersion()
|
||||||
if CUR_DB_VERSION > 0:
|
if CUR_DB_VERSION > 0:
|
||||||
if CUR_DB_VERSION < MIN_DB_VERSION:
|
if CUR_DB_VERSION < MIN_DB_VERSION:
|
||||||
raise SystemExit("Your database version (" + str(db.DBConnection().checkDBVersion()) + ") is too old to migrate from with this version of SickRage (" + str(MIN_DB_VERSION) + ").\n" + \
|
raise SystemExit("Your database version (" + str(
|
||||||
|
db.DBConnection().checkDBVersion()) + ") is too old to migrate from with this version of SickRage (" + str(
|
||||||
|
MIN_DB_VERSION) + ").\n" + \
|
||||||
"Upgrade using a previous version of SB first, or start with no database file to begin fresh.")
|
"Upgrade using a previous version of SB first, or start with no database file to begin fresh.")
|
||||||
if CUR_DB_VERSION > MAX_DB_VERSION:
|
if CUR_DB_VERSION > MAX_DB_VERSION:
|
||||||
raise SystemExit("Your database version (" + str(db.DBConnection().checkDBVersion()) + ") has been incremented past what this version of SickRage supports (" + str(MAX_DB_VERSION) + ").\n" + \
|
raise SystemExit("Your database version (" + str(
|
||||||
|
db.DBConnection().checkDBVersion()) + ") has been incremented past what this version of SickRage supports (" + str(
|
||||||
|
MAX_DB_VERSION) + ").\n" + \
|
||||||
"If you have used other forks of SB, your database may be unusable due to their modifications.")
|
"If you have used other forks of SB, your database may be unusable due to their modifications.")
|
||||||
|
|
||||||
# Initialize the config and our threads
|
# Initialize the config and our threads
|
||||||
|
|
|
@ -445,7 +445,8 @@ input:not(.btn){margin-right:6px;margin-top:5px;padding-top:4px;padding-bottom:4
|
||||||
}
|
}
|
||||||
|
|
||||||
.sickbeardTable td.filename {
|
.sickbeardTable td.filename {
|
||||||
width: 30%;
|
width: 10%;
|
||||||
|
text-align: center;
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -471,6 +472,10 @@ width: 30%;
|
||||||
.sickbeardTable td.search img {
|
.sickbeardTable td.search img {
|
||||||
padding-right: 2px;
|
padding-right: 2px;
|
||||||
}
|
}
|
||||||
|
.sickbeardTable td.search {
|
||||||
|
text-align: center;
|
||||||
|
width:5%
|
||||||
|
}
|
||||||
|
|
||||||
.sickbeardTable td small {
|
.sickbeardTable td small {
|
||||||
font-size: 11px;
|
font-size: 11px;
|
||||||
|
@ -729,7 +734,7 @@ div#summary tr td {
|
||||||
#donate a,
|
#donate a,
|
||||||
#donate a:hover {
|
#donate a:hover {
|
||||||
border: 0 ;
|
border: 0 ;
|
||||||
padding: 4px 15px 4px;
|
padding: 8px 15px 8px;
|
||||||
}
|
}
|
||||||
|
|
||||||
#contentWrapper {
|
#contentWrapper {
|
||||||
|
|
BIN
gui/slick/images/btn-google.jpg
Normal file
After Width: | Height: | Size: 1.5 KiB |
Before Width: | Height: | Size: 2.3 KiB |
Before Width: | Height: | Size: 1.7 KiB |
BIN
gui/slick/images/providers/anidb.gif
Normal file
After Width: | Height: | Size: 1.1 KiB |
BIN
gui/slick/images/providers/fanzub.gif
Normal file
After Width: | Height: | Size: 1.4 KiB |
BIN
gui/slick/images/providers/nyaatorrents.png
Normal file
After Width: | Height: | Size: 3.6 KiB |
BIN
gui/slick/images/xem.png
Normal file
After Width: | Height: | Size: 6.7 KiB |
|
@ -107,6 +107,17 @@
|
||||||
|
|
||||||
\$('#sbRoot').ajaxEpSearch();
|
\$('#sbRoot').ajaxEpSearch();
|
||||||
|
|
||||||
|
#set $fuzzydate = 'airdate'
|
||||||
|
#if $sickbeard.FUZZY_DATING:
|
||||||
|
fuzzyMoment({
|
||||||
|
containerClass : '.${fuzzydate}',
|
||||||
|
dateHasTime : true,
|
||||||
|
dateFormat : '${sickbeard.DATE_PRESET}',
|
||||||
|
timeFormat : '${sickbeard.TIME_PRESET}',
|
||||||
|
trimZero : #if $sickbeard.TRIM_ZERO then "true" else "false"#
|
||||||
|
});
|
||||||
|
#end if
|
||||||
|
|
||||||
});
|
});
|
||||||
//-->
|
//-->
|
||||||
</script>
|
</script>
|
||||||
|
@ -146,7 +157,8 @@
|
||||||
|
|
||||||
<!-- start $cur_result["show_name"] //-->
|
<!-- start $cur_result["show_name"] //-->
|
||||||
<tr class="$show_div">
|
<tr class="$show_div">
|
||||||
<td align="center" class="nowrap">$sbdatetime.sbdatetime.sbfdatetime($cur_result["localtime"]).decode($sickbeard.SYS_ENCODING)<span class="sort_data">$time.mktime($cur_result["localtime"].timetuple())</span></td>
|
## forced to use a div to wrap airdate, the column sort went crazy with a span
|
||||||
|
<td align="center" class="nowrap"><div class="${fuzzydate}">$sbdatetime.sbdatetime.sbfdatetime($cur_result["localtime"]).decode($sickbeard.SYS_ENCODING)</div><span class="sort_data">$time.mktime($cur_result["localtime"].timetuple())</span></td>
|
||||||
<td class="tvShow"><a href="$sbRoot/home/displayShow?show=${cur_result["showid"]}">$cur_result["show_name"]</a>
|
<td class="tvShow"><a href="$sbRoot/home/displayShow?show=${cur_result["showid"]}">$cur_result["show_name"]</a>
|
||||||
#if int($cur_result["paused"]):
|
#if int($cur_result["paused"]):
|
||||||
<span class="pause">[paused]</span>
|
<span class="pause">[paused]</span>
|
||||||
|
|
102
gui/slick/interfaces/default/config_anime.tmpl
Normal file
|
@ -0,0 +1,102 @@
|
||||||
|
#import sickbeard
|
||||||
|
#set global $title="Config - Anime"
|
||||||
|
#set global $header="Anime"
|
||||||
|
|
||||||
|
#set global $sbPath="../.."
|
||||||
|
|
||||||
|
#set global $topmenu="config"#
|
||||||
|
#include $os.path.join($sickbeard.PROG_DIR, "gui/slick/interfaces/default/inc_top.tmpl")
|
||||||
|
|
||||||
|
<script type="text/javascript" src="$sbRoot/js/configAnime.js?$sbPID"></script>
|
||||||
|
<script type="text/javascript" src="$sbRoot/js/config.js?$sbPID"></script>
|
||||||
|
#if $varExists('header')
|
||||||
|
<h1 class="header">$header</h1>
|
||||||
|
#else
|
||||||
|
<h1 class="title">$title</h1>
|
||||||
|
#end if
|
||||||
|
<div id="config">
|
||||||
|
<div id="config-content">
|
||||||
|
|
||||||
|
<form id="configForm" action="saveAnime" method="post">
|
||||||
|
|
||||||
|
<div id="config-components">
|
||||||
|
|
||||||
|
<div id="core-component-group1" class="component-group clearfix">
|
||||||
|
<div class="component-group-desc">
|
||||||
|
<h3><a href="http://anidb.info" onclick="window.open(this.href, '_blank'); return false;"><img src="$sbRoot/images/providers/anidb.gif" alt="AniDB" title="AniDB" width="16" height="16" /> AniDB</a></h3>
|
||||||
|
<p>AniDB is non-profit database of anime information that is freely open to the public</p>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<fieldset class="component-group-list">
|
||||||
|
<div class="field-pair">
|
||||||
|
<input type="checkbox" class="enabler" name="use_anidb" id="use_anidb" #if $sickbeard.USE_ANIDB then "checked=\"checked\"" else ""# />
|
||||||
|
<label class="clearfix" for="use_notifo">
|
||||||
|
<span class="component-title">Enable</span>
|
||||||
|
<span class="component-desc">Should Sick Beard use data from AniDB?</span>
|
||||||
|
</label>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div id="content_use_anidb">
|
||||||
|
<div class="field-pair">
|
||||||
|
<label class="nocheck clearfix">
|
||||||
|
<span class="component-title">AniDB Username</span>
|
||||||
|
<input type="text" name="anidb_username" id="anidb_username" value="$sickbeard.ANIDB_USERNAME" size="35" />
|
||||||
|
</label>
|
||||||
|
<label class="nocheck clearfix">
|
||||||
|
<span class="component-title"> </span>
|
||||||
|
<span class="component-desc">Username of your AniDB account</span>
|
||||||
|
</label>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div class="field-pair">
|
||||||
|
<label class="nocheck clearfix">
|
||||||
|
<span class="component-title">AniDB Password</span>
|
||||||
|
<input type="password" name="anidb_password" id="anidb_password" value="$sickbeard.ANIDB_PASSWORD" size="35" />
|
||||||
|
</label>
|
||||||
|
<label class="nocheck clearfix">
|
||||||
|
<span class="component-title"> </span>
|
||||||
|
<span class="component-desc">Password of your AniDB account</span>
|
||||||
|
</label>
|
||||||
|
</div>
|
||||||
|
<div class="field-pair">
|
||||||
|
<label class="nocheck clearfix">
|
||||||
|
<span class="component-title">AniDB MyList</span>
|
||||||
|
<input type="checkbox" name="anidb_use_mylist" id="anidb_use_mylist" #if $sickbeard.ANIDB_USE_MYLIST then "checked=\"checked\"" else ""# />
|
||||||
|
</label>
|
||||||
|
<label class="nocheck clearfix">
|
||||||
|
<span class="component-title"> </span>
|
||||||
|
<span class="component-desc">Do you want to add the PostProcessed Episodes to the MyList ?</span>
|
||||||
|
</label>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
<input type="submit" class="config_submitter" value="Save Changes" />
|
||||||
|
</fieldset>
|
||||||
|
|
||||||
|
</div><!-- /component-group //-->
|
||||||
|
<div id="core-component-group2" class="component-group clearfix">
|
||||||
|
|
||||||
|
<div class="component-group-desc">
|
||||||
|
<h3>Look and Feel</h3>
|
||||||
|
</div>
|
||||||
|
<fieldset class="component-group-list">
|
||||||
|
<div class="field-pair">
|
||||||
|
<input type="checkbox" class="enabler" name="split_home" id="split_home" #if $sickbeard.ANIME_SPLIT_HOME then "checked=\"checked\"" else ""# />
|
||||||
|
<label class="clearfix" for="use_notifo">
|
||||||
|
<span class="component-title">Split show lists</span>
|
||||||
|
<span class="component-desc">Separate anime and normal shows in groups</span>
|
||||||
|
</label>
|
||||||
|
</div>
|
||||||
|
<input type="submit" class="config_submitter" value="Save Changes" />
|
||||||
|
</fieldset>
|
||||||
|
</div><!-- /component-group //-->
|
||||||
|
<br/><input type="submit" class="config_submitter" value="Save Changes" /><br/>
|
||||||
|
|
||||||
|
</div><!-- /config-components //-->
|
||||||
|
|
||||||
|
</form>
|
||||||
|
|
||||||
|
|
||||||
|
</div></div>
|
||||||
|
<div class="clearfix"></div>
|
||||||
|
|
||||||
|
#include $os.path.join($sickbeard.PROG_DIR, "gui/slick/interfaces/default/inc_bottom.tmpl")
|
|
@ -48,7 +48,7 @@
|
||||||
<p><b>Some options may require a manual restart to take effect.</b></p>
|
<p><b>Some options may require a manual restart to take effect.</b></p>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
<fieldset class="component-group-list">
|
<fieldset class="component-group-list" style="width:670px">
|
||||||
<div class="field-pair">
|
<div class="field-pair">
|
||||||
<input type="checkbox" name="launch_browser" id="launch_browser" #if $sickbeard.LAUNCH_BROWSER then "checked=\"checked\"" else ""#/>
|
<input type="checkbox" name="launch_browser" id="launch_browser" #if $sickbeard.LAUNCH_BROWSER then "checked=\"checked\"" else ""#/>
|
||||||
<label class="clearfix" for="launch_browser">
|
<label class="clearfix" for="launch_browser">
|
||||||
|
@ -92,18 +92,18 @@
|
||||||
<input type="checkbox" name="auto_update" id="auto_update" #if $sickbeard.AUTO_UPDATE then "checked=\"checked\"" else ""#/>
|
<input type="checkbox" name="auto_update" id="auto_update" #if $sickbeard.AUTO_UPDATE then "checked=\"checked\"" else ""#/>
|
||||||
<label class="clearfix" for="auto_update">
|
<label class="clearfix" for="auto_update">
|
||||||
<span class="component-title">Automatic Updates</span>
|
<span class="component-title">Automatic Updates</span>
|
||||||
<span class="component-desc">Automatically get and install updates for SickRage when available.</span>
|
<span class="component-desc">Automatically get and install updates for SickRage when available. These</span>
|
||||||
</label>
|
</label>
|
||||||
<label class="nocheck clearfix">
|
<label class="nocheck clearfix">
|
||||||
<span class="component-title"> </span>
|
<span class="component-title"> </span>
|
||||||
<span class="component-desc">Automatic Updates run on startup and in the background on the interval specified above.</span>
|
<span class="component-desc">updates run on startup and in the background on the interval specified above.</span>
|
||||||
</label>
|
</label>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
<div class="field-pair">
|
<div class="field-pair">
|
||||||
<input type="checkbox" name="sort_article" id="sort_article" #if $sickbeard.SORT_ARTICLE then "checked=\"checked\"" else ""#/>
|
<input type="checkbox" name="sort_article" id="sort_article" #if $sickbeard.SORT_ARTICLE then "checked=\"checked\"" else ""#/>
|
||||||
<label class="clearfix" for="sort_article">
|
<label class="clearfix" for="sort_article">
|
||||||
<span class="component-title">Sort articles</span>
|
<span class="component-title">Sort Articles</span>
|
||||||
<span class="component-desc">Include articles (The, A, An) when sorting show lists.</span>
|
<span class="component-desc">Include articles (The, A, An) when sorting show lists.</span>
|
||||||
</label>
|
</label>
|
||||||
</div>
|
</div>
|
||||||
|
@ -124,7 +124,7 @@
|
||||||
|
|
||||||
<div class="field-pair">
|
<div class="field-pair">
|
||||||
<label class="nocheck clearfix">
|
<label class="nocheck clearfix">
|
||||||
<span class="component-title">Indexer Timeout:</span>
|
<span class="component-title">Indexer Timeout</span>
|
||||||
<input type="text" name="indexer_timeout" id="indexer_timeout" value="$sickbeard.INDEXER_TIMEOUT" size="5" />
|
<input type="text" name="indexer_timeout" id="indexer_timeout" value="$sickbeard.INDEXER_TIMEOUT" size="5" />
|
||||||
</label>
|
</label>
|
||||||
<label class="nocheck clearfix">
|
<label class="nocheck clearfix">
|
||||||
|
@ -135,7 +135,7 @@
|
||||||
|
|
||||||
<div class="field-pair">
|
<div class="field-pair">
|
||||||
<label class="nocheck clearfix" for="log_dir">
|
<label class="nocheck clearfix" for="log_dir">
|
||||||
<span class="component-title">Logging Director:y</span>
|
<span class="component-title">Logging Directory</span>
|
||||||
<input type="text" name="log_dir" id="log_dir" value="$sickbeard.ACTUAL_LOG_DIR" size="40" />
|
<input type="text" name="log_dir" id="log_dir" value="$sickbeard.ACTUAL_LOG_DIR" size="40" />
|
||||||
</label>
|
</label>
|
||||||
</div>
|
</div>
|
||||||
|
@ -143,8 +143,12 @@
|
||||||
<div class="field-pair">
|
<div class="field-pair">
|
||||||
<input type="checkbox" name="handle_reverse_proxy" id="handle_reverse_proxy" #if $sickbeard.HANDLE_REVERSE_PROXY then "checked=\"checked\"" else ""#/>
|
<input type="checkbox" name="handle_reverse_proxy" id="handle_reverse_proxy" #if $sickbeard.HANDLE_REVERSE_PROXY then "checked=\"checked\"" else ""#/>
|
||||||
<label class="clearfix" for="handle_reverse_proxy">
|
<label class="clearfix" for="handle_reverse_proxy">
|
||||||
<span class="component-title">Handle reverse proxies:</span>
|
<span class="component-title">Reverse Proxy Headers</span>
|
||||||
<span class="component-desc">Should SickRage accept reverse proxy headers? (X-Forwarded-Host, X-Forwarded-For, X-Forwarded-Proto)</span>
|
<span class="component-desc">Should SickRage accept the following reverse proxy headers?</span>
|
||||||
|
</label>
|
||||||
|
<label class="nocheck clearfix">
|
||||||
|
<span class="component-title"> </span>
|
||||||
|
<span class="component-desc">(X-Forwarded-Host, X-Forwarded-For, X-Forwarded-Proto)</span>
|
||||||
</label>
|
</label>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
|
@ -255,18 +259,22 @@
|
||||||
<p>You need to know what you are doing here!!!</p>
|
<p>You need to know what you are doing here!!!</p>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
<fieldset class="component-group-list">
|
<fieldset class="component-group-list" style="width:670px">
|
||||||
|
|
||||||
<div class="field-pair">
|
<div class="field-pair">
|
||||||
<label class="nocheck clearfix">
|
<label class="nocheck clearfix">
|
||||||
<span class="component-title">CPU Throttling:
|
<span class="component-title">CPU Throttling:</span>
|
||||||
|
<span class="component-desc">
|
||||||
<select id="cpu_presets" name="cpu_preset">
|
<select id="cpu_presets" name="cpu_preset">
|
||||||
#for $cur_preset in $cpu_presets:
|
#for $cur_preset in $cpu_presets:
|
||||||
<option value="$cur_preset" #if $cur_preset == $sickbeard.CPU_PRESET then "selected=\"selected\"" else ""#>$cur_preset</option>
|
<option value="$cur_preset" #if $cur_preset == $sickbeard.CPU_PRESET then "selected=\"selected\"" else ""#>$cur_preset.capitalize()</option>
|
||||||
#end for
|
#end for
|
||||||
</select>
|
</select>
|
||||||
</span>
|
</span>
|
||||||
<span class="component-desc">HIGH = Lower CPU usage<br>NORMAL = Defaults<br>LOW = Higher CPU usage</span>
|
</label>
|
||||||
|
<label class="nocheck clearfix">
|
||||||
|
<span class="component-title"> </span>
|
||||||
|
<span class="component-desc">Normal = Default. High = Lower CPU usage. Low = Higher CPU usage.</span>
|
||||||
</label>
|
</label>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
|
@ -288,7 +296,7 @@
|
||||||
</label>
|
</label>
|
||||||
<label class="nocheck clearfix">
|
<label class="nocheck clearfix">
|
||||||
<span class="component-title"> </span>
|
<span class="component-title"> </span>
|
||||||
<span class="component-desc">Specify path to Git binary, only use if OS is unable to find the path.</span>
|
<span class="component-desc">Specify path to Git binary, use only if OS is unable to find the path.</span>
|
||||||
</label>
|
</label>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
|
@ -318,11 +326,11 @@
|
||||||
<input type="checkbox" name="encryption_version" id="encryption_version" #if $sickbeard.ENCRYPTION_VERSION then "checked=\"checked\"" else ""#/>
|
<input type="checkbox" name="encryption_version" id="encryption_version" #if $sickbeard.ENCRYPTION_VERSION then "checked=\"checked\"" else ""#/>
|
||||||
<label class="clearfix" for="encryption_version">
|
<label class="clearfix" for="encryption_version">
|
||||||
<span class="component-title">Encrypt Passwords</span>
|
<span class="component-title">Encrypt Passwords</span>
|
||||||
<span class="component-desc">Should SickRage encrypt the passwords in <code>config.ini</code> file?</span>
|
<span class="component-desc">Should SickRage encrypt passwords in <code>config.ini</code> file?</span>
|
||||||
</label>
|
</label>
|
||||||
<label class="nocheck clearfix">
|
<label class="nocheck clearfix">
|
||||||
<span class="component-title"> </span>
|
<span class="component-title"> </span>
|
||||||
<span class="component-desc"><b>Warning</b>: Password must be only with <a style="color: blue;" href="http://en.wikipedia.org/wiki/ASCII#ASCII_printable_characters">ASCII characters</a></span>
|
<span class="component-desc"><b>Warning</b>: Passwords must only contain <a style="color: blue;" href="http://en.wikipedia.org/wiki/ASCII#ASCII_printable_characters">ASCII characters</a></span>
|
||||||
</label>
|
</label>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
|
@ -330,7 +338,7 @@
|
||||||
<input type="checkbox" name="calendar_unprotected" id="calendar_unprotected" #if $sickbeard.CALENDAR_UNPROTECTED then "checked=\"checked\"" else ""#/>
|
<input type="checkbox" name="calendar_unprotected" id="calendar_unprotected" #if $sickbeard.CALENDAR_UNPROTECTED then "checked=\"checked\"" else ""#/>
|
||||||
<label class="clearfix" for="calendar_unprotected">
|
<label class="clearfix" for="calendar_unprotected">
|
||||||
<span class="component-title">Unprotected Calendar</span>
|
<span class="component-title">Unprotected Calendar</span>
|
||||||
<span class="component-desc">This allows to subscribe to the calendar without user and password.</span>
|
<span class="component-desc">Allow subscribing to the calendar without user and password.</span>
|
||||||
</label>
|
</label>
|
||||||
<label class="nocheck clearfix">
|
<label class="nocheck clearfix">
|
||||||
<span class="component-title"> </span>
|
<span class="component-title"> </span>
|
||||||
|
@ -345,7 +353,7 @@
|
||||||
</label>
|
</label>
|
||||||
<label class="nocheck clearfix">
|
<label class="nocheck clearfix">
|
||||||
<span class="component-title"> </span>
|
<span class="component-title"> </span>
|
||||||
<span class="component-desc">Proxy to use for connecting to providers. Leave empty to not use proxy</b></span>
|
<span class="component-desc">Proxy to use for connecting to providers. Leave empty to not use proxy.</span>
|
||||||
</label>
|
</label>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
|
@ -359,25 +367,52 @@
|
||||||
<h3>Date and Time</h3>
|
<h3>Date and Time</h3>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
<fieldset class="component-group-list">
|
<fieldset class="component-group-list" style="width:670px">
|
||||||
|
|
||||||
|
<div class="field-pair">
|
||||||
|
<input type="checkbox" name="fuzzy_dating" id="fuzzy_dating" class="viewIf datePresets" #if $sickbeard.FUZZY_DATING == True then "checked=\"checked\"" else ""#/>
|
||||||
|
<label class="clearfix" for="fuzzy_dating">
|
||||||
|
<span class="component-title">Display Fuzzy Dates</span>
|
||||||
|
<span class="component-desc">E.g "Last Thu", "On Tue" and move the absolute date into time tooltips?</span>
|
||||||
|
</label>
|
||||||
|
</div>
|
||||||
|
<div class="show_if_fuzzy_dating#if True == $sickbeard.FUZZY_DATING then '' else ' metadataDiv'#">
|
||||||
|
<div class="field-pair">
|
||||||
|
<input type="checkbox" name="trim_zero" id="trim_zero" #if $sickbeard.TRIM_ZERO == True then "checked=\"checked\"" else ""#/>
|
||||||
|
<label class="clearfix" for="trim_zero">
|
||||||
|
<span class="component-title">Trim Zero Padding</span>
|
||||||
|
<span class="component-desc">Trim leading number "0" shown on hour of day and date of month?</span>
|
||||||
|
</label>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
<div class="field-pair">
|
<div class="field-pair">
|
||||||
<label class="nocheck clearfix" for="date_presets">
|
<label class="nocheck clearfix" for="date_presets">
|
||||||
<span class="component-title">Date Format:</span>
|
<span class="component-title">Date Style:</span>
|
||||||
<span class="component-desc">
|
<span class="component-desc">
|
||||||
<select id="date_presets" name="date_preset">
|
<div class="show_if_fuzzy_dating#if True == $sickbeard.FUZZY_DATING then '' else ' metadataDiv'#">
|
||||||
<option value="%x" #if "%x" == $sickbeard.DATE_PRESET then "selected=\"selected\"" else ""#>Use System Default</option>
|
<select id="date_presets#if True == $sickbeard.FUZZY_DATING then '' else '_na'#" name="date_preset#if True == $sickbeard.FUZZY_DATING then '' else '_na'#">
|
||||||
#for $cur_preset in $date_presets:
|
#for $cur_preset in $date_presets:
|
||||||
<option value="$cur_preset" #if $cur_preset == $sickbeard.DATE_PRESET then "selected=\"selected\"" else ""#>$datetime.datetime.now().strftime($cur_preset)</option>
|
<option value="$cur_preset" #if $cur_preset == $sickbeard.DATE_PRESET or ("%x" == $sickbeard.DATE_PRESET and "$cur_preset" == '%a, %b %d, %Y') then "selected=\"selected\"" else ""#>$datetime.datetime($datetime.datetime.now().year, 12, 31, 14, 30, 47).strftime($cur_preset)</option>
|
||||||
#end for
|
#end for
|
||||||
</select>
|
</select>
|
||||||
|
</div>
|
||||||
|
<div class="hide_if_fuzzy_dating#if True != $sickbeard.FUZZY_DATING then '' else ' metadataDiv'#">
|
||||||
|
<select id="date_presets#if True != $sickbeard.FUZZY_DATING then '' else '_na'#" name="date_preset#if True != $sickbeard.FUZZY_DATING then '' else '_na'#">
|
||||||
|
<option value="%x" #if "%x" == $sickbeard.DATE_PRESET then "selected=\"selected\"" else ""#>Use System Default</option>
|
||||||
|
#for $cur_preset in $date_presets:
|
||||||
|
<option value="$cur_preset" #if $cur_preset == $sickbeard.DATE_PRESET then "selected=\"selected\"" else ""#>$datetime.datetime($datetime.datetime.now().year, 12, 31, 14, 30, 47).strftime($cur_preset)</option>
|
||||||
|
#end for
|
||||||
|
</select>
|
||||||
|
</div>
|
||||||
|
<div id="date_use_system_default" class="metadataDiv"></div>
|
||||||
</span>
|
</span>
|
||||||
</label>
|
</label>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
<div class="field-pair">
|
<div class="field-pair">
|
||||||
<label class="nocheck clearfix" for="time_presets">
|
<label class="nocheck clearfix" for="time_presets">
|
||||||
<span class="component-title">Time Format:</span>
|
<span class="component-title">Time Style:</span>
|
||||||
<span class="component-desc">
|
<span class="component-desc">
|
||||||
<select id="time_presets" name="time_preset">
|
<select id="time_presets" name="time_preset">
|
||||||
#for $cur_preset in $time_presets:
|
#for $cur_preset in $time_presets:
|
||||||
|
@ -385,16 +420,26 @@
|
||||||
#end for
|
#end for
|
||||||
</select>
|
</select>
|
||||||
</span>
|
</span>
|
||||||
<span class="component-desc"><b>Note:</b> Seconds are only shown on the History Page.</span>
|
|
||||||
</label>
|
</label>
|
||||||
|
<label class="nocheck clearfix">
|
||||||
|
<span class="component-title"> </span>
|
||||||
|
<span class="component-desc"><b>Note:</b> Seconds are only shown on the History page.</span>
|
||||||
|
</label>
|
||||||
|
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
<div class="field-pair">
|
<div class="field-pair">
|
||||||
<label class="nocheck clearfix">
|
<label class="nocheck clearfix">
|
||||||
<span class="component-title">Timezones:</span>
|
<span class="component-title">Timezones:</span>
|
||||||
<span class="component-desc">Display Dates and Times in either your timezone or the shows network timezone?</span>
|
<span class="component-desc">Display dates and times in either your timezone or the shows network timezone?</span>
|
||||||
|
</label>
|
||||||
|
<label class="nocheck clearfix">
|
||||||
|
<span class="component-title"> </span>
|
||||||
<span class="component-desc">
|
<span class="component-desc">
|
||||||
<input type="radio" name="timezone_display" id="local" value="local" class="radio" #if $sickbeard.TIMEZONE_DISPLAY=="local" then "checked=\"checked\"" else ""# />Local Timezone<br />
|
<input type="radio" name="timezone_display" id="local" value="local" class="radio" #if $sickbeard.TIMEZONE_DISPLAY=="local" then "checked=\"checked\"" else ""# />Local Timezone<br />
|
||||||
|
</label>
|
||||||
|
<label class="nocheck clearfix">
|
||||||
|
<span class="component-title"> </span>
|
||||||
<input type="radio" name="timezone_display" id="network" value="network" class="radio" #if $sickbeard.TIMEZONE_DISPLAY=="network" then "checked=\"checked\"" else ""# />Network Timezone<br />
|
<input type="radio" name="timezone_display" id="network" value="network" class="radio" #if $sickbeard.TIMEZONE_DISPLAY=="network" then "checked=\"checked\"" else ""# />Network Timezone<br />
|
||||||
</span>
|
</span>
|
||||||
</label>
|
</label>
|
||||||
|
|
|
@ -19,9 +19,9 @@
|
||||||
<form id="configForm" action="saveNotifications" method="post">
|
<form id="configForm" action="saveNotifications" method="post">
|
||||||
<div id="config-components">
|
<div id="config-components">
|
||||||
<ul>
|
<ul>
|
||||||
<li><a href="#tabs-1">Home Theater</a></li>
|
<li><a href="#tabs-1">Home Theater / NAS</a></li>
|
||||||
<li><a href="#tabs-2">Devices</a></li>
|
<li><a href="#tabs-2">Devices</a></li>
|
||||||
<li><a href="#tabs-3">Online</a></li>
|
<li><a href="#tabs-3">Social</a></li>
|
||||||
</ul>
|
</ul>
|
||||||
|
|
||||||
<div id="tabs-1">
|
<div id="tabs-1">
|
||||||
|
@ -381,7 +381,8 @@
|
||||||
|
|
||||||
<div class="component-group clearfix">
|
<div class="component-group clearfix">
|
||||||
<div class="component-group-desc">
|
<div class="component-group-desc">
|
||||||
<h3><a href="http://synology.com/" rel="noreferrer" onclick="window.open('${sickbeard.ANON_REDIRECT}' + this.href, '_blank'); return false;"><img src="$sbRoot/images/notifiers/synoindex.png" alt="" title="Synology Indexer"/> Synology Indexer </a></h3>
|
<h3><a href="http://synology.com/" rel="noreferrer" onclick="window.open('${sickbeard.ANON_REDIRECT}' + this.href, '_blank'); return false;"><img src="$sbRoot/images/notifiers/synoindex.png" alt="" title="Synology"/> Synology </a></h3>
|
||||||
|
<p>The Synology DiskStation NAS.</p>
|
||||||
<p>Synology Indexer is the daemon running on the Synology NAS to build its media database.</p>
|
<p>Synology Indexer is the daemon running on the Synology NAS to build its media database.</p>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
|
@ -390,7 +391,7 @@
|
||||||
<input type="checkbox" class="enabler" name="use_synoindex" id="use_synoindex" #if $sickbeard.USE_SYNOINDEX then "checked=\"checked\"" else ""# />
|
<input type="checkbox" class="enabler" name="use_synoindex" id="use_synoindex" #if $sickbeard.USE_SYNOINDEX then "checked=\"checked\"" else ""# />
|
||||||
<label class="clearfix" for="use_synoindex">
|
<label class="clearfix" for="use_synoindex">
|
||||||
<span class="component-title">Enable</span>
|
<span class="component-title">Enable</span>
|
||||||
<span class="component-desc">Should SickRage send notifications to the synoindex daemon?<br /><br />
|
<span class="component-desc">Should SickRage send Synology notifications?<br /><br />
|
||||||
</span>
|
</span>
|
||||||
</label>
|
</label>
|
||||||
<label class="nocheck clearfix" for="use_synoindex">
|
<label class="nocheck clearfix" for="use_synoindex">
|
||||||
|
@ -708,7 +709,7 @@
|
||||||
<input type="checkbox" class="enabler" name="use_pushover" id="use_pushover" #if $sickbeard.USE_PUSHOVER then "checked=\"checked\"" else ""# />
|
<input type="checkbox" class="enabler" name="use_pushover" id="use_pushover" #if $sickbeard.USE_PUSHOVER then "checked=\"checked\"" else ""# />
|
||||||
<label class="clearfix" for="use_pushover">
|
<label class="clearfix" for="use_pushover">
|
||||||
<span class="component-title">Enable</span>
|
<span class="component-title">Enable</span>
|
||||||
<span class="component-desc">Should SickRage send notifications through Pushover?</span>
|
<span class="component-desc">Should SickRage send Pushover notifications?</span>
|
||||||
</label>
|
</label>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
|
@ -755,14 +756,14 @@
|
||||||
<div class="component-group clearfix">
|
<div class="component-group clearfix">
|
||||||
<div class="component-group-desc">
|
<div class="component-group-desc">
|
||||||
<h3><a href="http://boxcar.io/" rel="noreferrer" onclick="window.open('${sickbeard.ANON_REDIRECT}' + this.href, '_blank'); return false;"><img src="$sbRoot/images/notifiers/boxcar.png" alt="" title="Boxcar"/> Boxcar </a></h3>
|
<h3><a href="http://boxcar.io/" rel="noreferrer" onclick="window.open('${sickbeard.ANON_REDIRECT}' + this.href, '_blank'); return false;"><img src="$sbRoot/images/notifiers/boxcar.png" alt="" title="Boxcar"/> Boxcar </a></h3>
|
||||||
<p>Read your messages where and when you want them! A subscription will be send if needed.</p>
|
<p>Universal push notification for iOS. Read your messages where and when you want them! A subscription will be sent if needed.</p>
|
||||||
</div>
|
</div>
|
||||||
<fieldset class="component-group-list">
|
<fieldset class="component-group-list">
|
||||||
<div class="field-pair">
|
<div class="field-pair">
|
||||||
<input type="checkbox" class="enabler" name="use_boxcar" id="use_boxcar" #if $sickbeard.USE_BOXCAR then "checked=\"checked\"" else ""# />
|
<input type="checkbox" class="enabler" name="use_boxcar" id="use_boxcar" #if $sickbeard.USE_BOXCAR then "checked=\"checked\"" else ""# />
|
||||||
<label class="clearfix" for="use_boxcar">
|
<label class="clearfix" for="use_boxcar">
|
||||||
<span class="component-title">Enable</span>
|
<span class="component-title">Enable</span>
|
||||||
<span class="component-desc">Should SickRage send notifications through Boxcar?</span>
|
<span class="component-desc">Should SickRage send Boxcar notifications?</span>
|
||||||
</label>
|
</label>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
|
@ -816,7 +817,7 @@
|
||||||
<input type="checkbox" class="enabler" name="use_boxcar2" id="use_boxcar2" #if $sickbeard.USE_BOXCAR2 then "checked=\"checked\"" else ""# />
|
<input type="checkbox" class="enabler" name="use_boxcar2" id="use_boxcar2" #if $sickbeard.USE_BOXCAR2 then "checked=\"checked\"" else ""# />
|
||||||
<label class="clearfix" for="use_boxcar2">
|
<label class="clearfix" for="use_boxcar2">
|
||||||
<span class="component-title">Enable</span>
|
<span class="component-title">Enable</span>
|
||||||
<span class="component-desc">Should SickRage send notifications through Boxcar2?</span>
|
<span class="component-desc">Should SickRage Boxcar2 notifications?</span>
|
||||||
</label>
|
</label>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
|
@ -940,7 +941,7 @@
|
||||||
<input type="checkbox" class="enabler" name="use_pushalot" id="use_pushalot" #if $sickbeard.USE_PUSHALOT then "checked=\"checked\"" else ""# />
|
<input type="checkbox" class="enabler" name="use_pushalot" id="use_pushalot" #if $sickbeard.USE_PUSHALOT then "checked=\"checked\"" else ""# />
|
||||||
<label class="clearfix" for="use_pushalot">
|
<label class="clearfix" for="use_pushalot">
|
||||||
<span class="component-title">Enable</span>
|
<span class="component-title">Enable</span>
|
||||||
<span class="component-desc">Should SickRage send notifications through Pushalot?</span>
|
<span class="component-desc">Should SickRage send Pushalot notifications?</span>
|
||||||
</label>
|
</label>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
|
@ -994,7 +995,7 @@
|
||||||
<input type="checkbox" class="enabler" name="use_pushbullet" id="use_pushbullet" #if $sickbeard.USE_PUSHBULLET then "checked=\"checked\"" else ""# />
|
<input type="checkbox" class="enabler" name="use_pushbullet" id="use_pushbullet" #if $sickbeard.USE_PUSHBULLET then "checked=\"checked\"" else ""# />
|
||||||
<label class="clearfix" for="use_pushbullet">
|
<label class="clearfix" for="use_pushbullet">
|
||||||
<span class="component-title">Enable</span>
|
<span class="component-title">Enable</span>
|
||||||
<span class="component-desc">Should SickRage send notifications through Pushbullet?</span>
|
<span class="component-desc">Should SickRage send Pushbullet notifications?</span>
|
||||||
</label>
|
</label>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
|
|
|
@ -423,7 +423,7 @@
|
||||||
<td>Show.Name.S02E03.HDTV.XviD-RLSGROUP</td>
|
<td>Show.Name.S02E03.HDTV.XviD-RLSGROUP</td>
|
||||||
</tr>
|
</tr>
|
||||||
<tr>
|
<tr>
|
||||||
<td class="align-right"><i class="icon-info-sign" title="'SiCKBEARD' is used in place of RLSGROUP if it could not be properly detected"></i> <b>Release Group:</b></td>
|
<td class="align-right"><i class="icon-info-sign" title="'SiCKRAGE' is used in place of RLSGROUP if it could not be properly detected"></i> <b>Release Group:</b></td>
|
||||||
<td>%RG</td>
|
<td>%RG</td>
|
||||||
<td>RLSGROUP</td>
|
<td>RLSGROUP</td>
|
||||||
</tr>
|
</tr>
|
||||||
|
@ -467,6 +467,42 @@
|
||||||
<br/>
|
<br/>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
|
<div class="field-pair">
|
||||||
|
<input type="radio" name="naming_anime" id="naming_anime" value="1" #if $sickbeard.NAMING_ANIME == 1then "checked=\"checked\"" else ""#/>
|
||||||
|
<label class="clearfix" for="naming_anime">
|
||||||
|
<span class="component-title">Add Absolute Number</span>
|
||||||
|
<span class="component-desc">Add the absolute number to the season/episode format?</span>
|
||||||
|
</label>
|
||||||
|
<label class="nocheck clearfix">
|
||||||
|
<span class="component-title"> </span>
|
||||||
|
<span class="component-desc">Only applies to animes. (eg. S15E45 - 310 vs S15E45)</span>
|
||||||
|
</label>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div class="field-pair">
|
||||||
|
<input type="radio" name="naming_anime" id="naming_anime_only" value="2" #if $sickbeard.NAMING_ANIME == 2 then "checked=\"checked\"" else ""#/>
|
||||||
|
<label class="clearfix" for="naming_anime_only">
|
||||||
|
<span class="component-title">Only Absolute Number</span>
|
||||||
|
<span class="component-desc">Replace season/episode format with absolute number</span>
|
||||||
|
</label>
|
||||||
|
<label class="nocheck clearfix">
|
||||||
|
<span class="component-title"> </span>
|
||||||
|
<span class="component-desc">Only applies to animes.</span>
|
||||||
|
</label>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div class="field-pair">
|
||||||
|
<input type="radio" name="naming_anime" id="naming_anime_none" value="3" #if $sickbeard.NAMING_ANIME == 3 then "checked=\"checked\"" else ""#/>
|
||||||
|
<label class="clearfix" for="naming_anime_none">
|
||||||
|
<span class="component-title">No Absolute Number</span>
|
||||||
|
<span class="component-desc">Dont include the absolute number</span>
|
||||||
|
</label>
|
||||||
|
<label class="nocheck clearfix">
|
||||||
|
<span class="component-title"> </span>
|
||||||
|
<span class="component-desc">Only applies to animes.</span>
|
||||||
|
</label>
|
||||||
|
</div>
|
||||||
|
|
||||||
<div class="field-pair">
|
<div class="field-pair">
|
||||||
<input type="checkbox" id="naming_strip_year" name="naming_strip_year" #if $sickbeard.NAMING_STRIP_YEAR then "checked=\"checked\"" else ""#/>
|
<input type="checkbox" id="naming_strip_year" name="naming_strip_year" #if $sickbeard.NAMING_STRIP_YEAR then "checked=\"checked\"" else ""#/>
|
||||||
<label class="clearfix" for="naming_strip_year">
|
<label class="clearfix" for="naming_strip_year">
|
||||||
|
@ -631,7 +667,7 @@
|
||||||
<td>Show.Name.2010.03.09.HDTV.XviD-RLSGROUP</td>
|
<td>Show.Name.2010.03.09.HDTV.XviD-RLSGROUP</td>
|
||||||
</tr>
|
</tr>
|
||||||
<tr class="even">
|
<tr class="even">
|
||||||
<td class="align-right"><i class="icon-info-sign" title="'SiCKBEARD' is used in place of RLSGROUP if it could not be properly detected"></i> <b>Release Group:</b></td>
|
<td class="align-right"><i class="icon-info-sign" title="'SiCKRAGE' is used in place of RLSGROUP if it could not be properly detected"></i> <b>Release Group:</b></td>
|
||||||
<td>%RG</td>
|
<td>%RG</td>
|
||||||
<td>RLSGROUP</td>
|
<td>RLSGROUP</td>
|
||||||
</tr>
|
</tr>
|
||||||
|
@ -808,7 +844,7 @@
|
||||||
<td>Show.Name.9th.Mar.2011.HDTV.XviD-RLSGROUP</td>
|
<td>Show.Name.9th.Mar.2011.HDTV.XviD-RLSGROUP</td>
|
||||||
</tr>
|
</tr>
|
||||||
<tr class="even">
|
<tr class="even">
|
||||||
<td class="align-right"><i class="icon-info-sign" title="'SiCKBEARD' is used in place of RLSGROUP if it could not be properly detected"></i> <b>Release Group:</b></td>
|
<td class="align-right"><i class="icon-info-sign" title="'SiCKRAGE' is used in place of RLSGROUP if it could not be properly detected"></i> <b>Release Group:</b></td>
|
||||||
<td>%RG</td>
|
<td>%RG</td>
|
||||||
<td>RLSGROUP</td>
|
<td>RLSGROUP</td>
|
||||||
</tr>
|
</tr>
|
||||||
|
|
|
@ -372,7 +372,8 @@ var show_nzb_providers = #if $sickbeard.USE_NZBS then "true" else "false"#;
|
||||||
<span class="component-title"> </span>
|
<span class="component-title"> </span>
|
||||||
<span class="component-desc">
|
<span class="component-desc">
|
||||||
Stop transfer when reaching ratio<br>
|
Stop transfer when reaching ratio<br>
|
||||||
(blank for default)</span>
|
(-1 SickRage default to seed forever)<br>
|
||||||
|
(leave blank for downloader default)</span>
|
||||||
</label>
|
</label>
|
||||||
</div>
|
</div>
|
||||||
#end if
|
#end if
|
||||||
|
|
|
@ -457,7 +457,8 @@
|
||||||
</label>
|
</label>
|
||||||
<label class="nocheck clearfix">
|
<label class="nocheck clearfix">
|
||||||
<span class="component-title"> </span>
|
<span class="component-title"> </span>
|
||||||
<span class="component-desc">Duration (in hours) to seed for (blank for default)</span>
|
<span class="component-desc">Duration (in hours) to seed for<br>
|
||||||
|
(SickRage default '0' passes blank to downloader)</span>
|
||||||
</label>
|
</label>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
|
|
|
@ -22,18 +22,55 @@
|
||||||
|
|
||||||
<script type="text/javascript" src="$sbRoot/js/displayShow.js?$sbPID"></script>
|
<script type="text/javascript" src="$sbRoot/js/displayShow.js?$sbPID"></script>
|
||||||
<script type="text/javascript" src="$sbRoot/js/plotTooltip.js?$sbPID"></script>
|
<script type="text/javascript" src="$sbRoot/js/plotTooltip.js?$sbPID"></script>
|
||||||
|
<script type="text/javascript" src="$sbRoot/js/sceneExceptionsTooltip.js?$sbPID"></script>
|
||||||
<script type="text/javascript" src="$sbRoot/js/ajaxEpSearch.js?$sbPID"></script>
|
<script type="text/javascript" src="$sbRoot/js/ajaxEpSearch.js?$sbPID"></script>
|
||||||
<script type="text/javascript" src="$sbRoot/js/ajaxEpSubtitles.js?$sbPID"></script>
|
<script type="text/javascript" src="$sbRoot/js/ajaxEpSubtitles.js?$sbPID"></script>
|
||||||
<script type="text/javascript" src="$sbRoot/js/ajaxEpRetry.js?$sbPID"></script>
|
<script type="text/javascript" src="$sbRoot/js/ajaxEpRetry.js?$sbPID"></script>
|
||||||
|
<script type="text/javascript" charset="utf-8">
|
||||||
|
<!--
|
||||||
|
\$(document).ready(function(){
|
||||||
|
#set $fuzzydate = 'airdate'
|
||||||
|
#if $sickbeard.FUZZY_DATING:
|
||||||
|
fuzzyMoment({
|
||||||
|
containerClass : '.${fuzzydate}',
|
||||||
|
dateHasTime : false,
|
||||||
|
dateFormat : '${sickbeard.DATE_PRESET}',
|
||||||
|
timeFormat : '${sickbeard.TIME_PRESET}',
|
||||||
|
trimZero : #if $sickbeard.TRIM_ZERO then "true" else "false"#
|
||||||
|
});
|
||||||
|
#end if
|
||||||
|
#raw
|
||||||
|
$('.addQTip').each(function () {
|
||||||
|
$(this).css({'cursor':'help', 'font-weight':'800'});
|
||||||
|
$(this).qtip({
|
||||||
|
show: {solo:true},
|
||||||
|
position: {viewport:$(window), my:'left center', adjust:{ y: -10, x: 2 }},
|
||||||
|
style: {tip:{corner:true, method:'polygon'}, classes:'qtip-rounded qtip-dark qtip-shadow ui-tooltip-sb'}
|
||||||
|
});
|
||||||
|
});
|
||||||
|
#end raw
|
||||||
|
});
|
||||||
|
//-->
|
||||||
|
</script>
|
||||||
<div class="navShows">
|
<div class="navShows">
|
||||||
</br>
|
</br>
|
||||||
<div class="align-left"><b>Change Show:</b>
|
<div class="align-left"><b>Change Show:</b>
|
||||||
<div class="navShow"><img id="prevShow" src="$sbRoot/images/prev.gif" alt="<<" title="Prev Show" /></div>
|
<div class="navShow"><img id="prevShow" src="$sbRoot/images/prev.gif" alt="<<" title="Prev Show" /></div>
|
||||||
<select id="pickShow">
|
<select id="pickShow">
|
||||||
#for $curShow in $sortedShowList:
|
#for $curShowList in $sortedShowLists:
|
||||||
|
#set $curShowType = $curShowList[0]
|
||||||
|
#set $curShowList = $curShowList[1]
|
||||||
|
|
||||||
|
#if len($sortedShowLists) > 1:
|
||||||
|
<optgroup label="$curShowType">
|
||||||
|
#end if
|
||||||
|
#for $curShow in $curShowList:
|
||||||
<option value="$curShow.indexerid" #if $curShow == $show then "selected=\"selected\"" else ""#>$curShow.name</option>
|
<option value="$curShow.indexerid" #if $curShow == $show then "selected=\"selected\"" else ""#>$curShow.name</option>
|
||||||
#end for
|
#end for
|
||||||
|
#if len($sortedShowLists) > 1:
|
||||||
|
</optgroup>
|
||||||
|
#end if
|
||||||
|
#end for
|
||||||
</select>
|
</select>
|
||||||
<div class="navShow"><img id="nextShow" src="$sbRoot/images/next.gif" alt=">>" title="Next Show" /></div>
|
<div class="navShow"><img id="nextShow" src="$sbRoot/images/next.gif" alt=">>" title="Next Show" /></div>
|
||||||
</div></div>
|
</div></div>
|
||||||
|
@ -42,16 +79,14 @@
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
<div class="showInfo">
|
<div class="showInfo">
|
||||||
<h1 class="title"><a>$show.name</a></h1>
|
<h1 class="title" id="scene_exception_$show.indexerid"><a>$show.name</a></h1>
|
||||||
<span class="headerInfo" style="color: #b7b7b7; line-height: 16px;">
|
<span class="headerInfo" style="color: #b7b7b7; line-height: 16px;">
|
||||||
|
<span class="tvshowLink" style="vertical-align: text-top">
|
||||||
#if not $show.imdbid
|
#if not $show.imdbid
|
||||||
($show.startyear) - $show.runtime min
|
($show.startyear) - $show.runtime min
|
||||||
#if $show.genre:
|
#if $show.genre:
|
||||||
- $show.genre[1:-1].replace('|',' | ')
|
- $show.genre[1:-1].replace('|',' | ')
|
||||||
#end if
|
#end if
|
||||||
<span class="tvshowLink" style="vertical-align: text-top">
|
|
||||||
<a href="$sickbeard.indexerApi($show.indexer).config["show_url"]$show.indexerid" onclick="window.open(this.href, '_blank'); return false;" title="$sickbeard.indexerApi($show.indexer).config["show_url"]$show.indexerid"><img alt="$sickbeard.indexerApi($show.indexer).name" height="16" width="16" src="$sbRoot/images/$sickbeard.indexerApi($show.indexer).config["icon"] "style="margin-top: -1px;"/></a>
|
|
||||||
</span>
|
|
||||||
#else
|
#else
|
||||||
#if 'country_codes' in $show.imdb_info:
|
#if 'country_codes' in $show.imdb_info:
|
||||||
#for $country in $show.imdb_info['country_codes'].split('|')
|
#for $country in $show.imdb_info['country_codes'].split('|')
|
||||||
|
@ -61,11 +96,13 @@
|
||||||
#if 'year' in $show.imdb_info:
|
#if 'year' in $show.imdb_info:
|
||||||
($show.imdb_info['year']) - $show.imdb_info['runtimes'] min - $show.imdb_info['genres'].replace('|',' | ')
|
($show.imdb_info['year']) - $show.imdb_info['runtimes'] min - $show.imdb_info['genres'].replace('|',' | ')
|
||||||
#end if
|
#end if
|
||||||
<span class="tvshowLink" style="vertical-align: text-top">
|
|
||||||
<a href="http://www.imdb.com/title/$show.imdbid" rel="noreferrer" onclick="window.open('${sickbeard.ANON_REDIRECT}' + this.href, '_blank'); return false;" title="http://www.imdb.com/title/$show.imdbid"><img alt="[imdb]" height="16" width="16" src="$sbRoot/images/imdb.png" style="margin-top: -1px;"/>
|
<a href="http://www.imdb.com/title/$show.imdbid" rel="noreferrer" onclick="window.open('${sickbeard.ANON_REDIRECT}' + this.href, '_blank'); return false;" title="http://www.imdb.com/title/$show.imdbid"><img alt="[imdb]" height="16" width="16" src="$sbRoot/images/imdb.png" style="margin-top: -1px;"/>
|
||||||
<a href="$sickbeard.indexerApi($show.indexer).config["show_url"]$show.indexerid" rel="noreferrer" onclick="window.open('${sickbeard.ANON_REDIRECT}' + this.href, '_blank'); return false;" title="$sickbeard.indexerApi($show.indexer).config["show_url"]$show.indexerid"><img alt="$sickbeard.indexerApi($show.indexer).name" height="16" width="16" src="$sbRoot/images/$sickbeard.indexerApi($show.indexer).config["icon"] "style="margin-top: -1px;"/></a>
|
|
||||||
</span>
|
|
||||||
#end if
|
#end if
|
||||||
|
<a href="$sickbeard.indexerApi($show.indexer).config["show_url"]$show.indexerid" onclick="window.open(this.href, '_blank'); return false;" title="$sickbeard.indexerApi($show.indexer).config["show_url"]$show.indexerid"><img alt="$sickbeard.indexerApi($show.indexer).name" height="16" width="16" src="$sbRoot/images/$sickbeard.indexerApi($show.indexer).config["icon"] "style="margin-top: -1px;"/></a>
|
||||||
|
#if $xem_numbering or $xem_absolute_numbering:
|
||||||
|
<a href="http://thexem.de/search?q=$show.name" rel="noreferrer" onclick="window.open('${sickbeard.ANON_REDIRECT}' + this.href, '_blank'); return false;" title="http://thexem.de/search?q-$show.name"><img alt="[xem]" height="16" width="16" src="$sbRoot/images/xem.png" style="margin-top: -1px;"/>
|
||||||
|
#end if
|
||||||
|
</span>
|
||||||
</span>
|
</span>
|
||||||
#if $seasonResults:
|
#if $seasonResults:
|
||||||
##There is a special/season_0?##
|
##There is a special/season_0?##
|
||||||
|
@ -158,11 +195,32 @@
|
||||||
<tr><td class="showLegend">Flat Folders: </td><td><img src="$sbRoot/images/#if $show.flatten_folders == 1 or $sickbeard.NAMING_FORCE_FOLDERS then "yes16.png\" alt=\"Y" else "no16.png\" alt=\"N"#" width="16" height="16" /></td></tr>
|
<tr><td class="showLegend">Flat Folders: </td><td><img src="$sbRoot/images/#if $show.flatten_folders == 1 or $sickbeard.NAMING_FORCE_FOLDERS then "yes16.png\" alt=\"Y" else "no16.png\" alt=\"N"#" width="16" height="16" /></td></tr>
|
||||||
<tr><td class="showLegend">Paused: </td><td><img src="$sbRoot/images/#if int($show.paused) == 1 then "yes16.png\" alt=\"Y" else "no16.png\" alt=\"N"#" width="16" height="16" /></td></tr>
|
<tr><td class="showLegend">Paused: </td><td><img src="$sbRoot/images/#if int($show.paused) == 1 then "yes16.png\" alt=\"Y" else "no16.png\" alt=\"N"#" width="16" height="16" /></td></tr>
|
||||||
<tr><td class="showLegend">Air-by-Date: </td><td><img src="$sbRoot/images/#if int($show.air_by_date) == 1 then "yes16.png\" alt=\"Y" else "no16.png\" alt=\"N"#" width="16" height="16" /></td></tr>
|
<tr><td class="showLegend">Air-by-Date: </td><td><img src="$sbRoot/images/#if int($show.air_by_date) == 1 then "yes16.png\" alt=\"Y" else "no16.png\" alt=\"N"#" width="16" height="16" /></td></tr>
|
||||||
<tr><td class="showLegend">Sports: </td><td><img src="$sbRoot/images/#if int($show.sports) == 1 then "yes16.png\" alt=\"Y" else "no16.png\" alt=\"N"#" width="16" height="16" /></td></tr>
|
<tr><td class="showLegend">Sports: </td><td><img src="$sbRoot/images/#if int($show.is_sports) == 1 then "yes16.png\" alt=\"Y" else "no16.png\" alt=\"N"#" width="16" height="16" /></td></tr>
|
||||||
|
<tr><td class="showLegend">Anime: </td><td><img src="$sbRoot/images/#if int($show.is_anime) == 1 then "yes16.png\" alt=\"Y" else "no16.png\" alt=\"N"#" width="16" height="16" /></td></tr>
|
||||||
<tr><td class="showLegend">DVD Order: </td><td><img src="$sbRoot/images/#if int($show.dvdorder) == 1 then "yes16.png\" alt=\"Y" else "no16.png\" alt=\"N"#" width="16" height="16" /></td></tr>
|
<tr><td class="showLegend">DVD Order: </td><td><img src="$sbRoot/images/#if int($show.dvdorder) == 1 then "yes16.png\" alt=\"Y" else "no16.png\" alt=\"N"#" width="16" height="16" /></td></tr>
|
||||||
|
<tr><td class="showLegend">Scene Numbering: </td><td><img src="$sbRoot/images/#if int($show.scene) == 1 then "yes16.png\" alt=\"Y" else "no16.png\" alt=\"N"#" width="16" height="16" /></td></tr>
|
||||||
#if $anyQualities + $bestQualities
|
#if $anyQualities + $bestQualities
|
||||||
<tr><td class="showLegend">Archive First Match: </td><td><img src="$sbRoot/images/#if int($show.archive_firstmatch) == 1 then "yes16.png\" alt=\"Y" else "no16.png\" alt=\"N"#" width="16" height="16" /></td></tr>
|
<tr><td class="showLegend">Archive First Match: </td><td><img src="$sbRoot/images/#if int($show.archive_firstmatch) == 1 then "yes16.png\" alt=\"Y" else "no16.png\" alt=\"N"#" width="16" height="16" /></td></tr>
|
||||||
#end if
|
#end if
|
||||||
|
|
||||||
|
#if $bwl.get_white_keywords_for("gloabl"):
|
||||||
|
<tr><td class="showLegend">Whitelist: </td><td>#echo ', '.join($bwl.get_white_keywords_for("gloabl"))#</td></tr>
|
||||||
|
#end if
|
||||||
|
#if $bwl.get_black_keywords_for("gloabl"):
|
||||||
|
<tr><td class="showLegend">Blacklist: </td><td>#echo ', '.join($bwl.get_black_keywords_for("gloabl"))#</td></tr>
|
||||||
|
#end if
|
||||||
|
#if $bwl.get_white_keywords_for("release_group"):
|
||||||
|
<tr>
|
||||||
|
<td class="showLegend">Wanted Group#if len($bwl.get_white_keywords_for("release_group"))>1 then "s" else ""#:</td>
|
||||||
|
<td>#echo ', '.join($bwl.get_white_keywords_for("release_group"))#</td>
|
||||||
|
</tr>
|
||||||
|
#end if
|
||||||
|
#if $bwl.get_black_keywords_for("release_group"):
|
||||||
|
<tr>
|
||||||
|
<td class="showLegend">Unwanted Group#if len($bwl.get_black_keywords_for("release_group"))>1 then "s" else ""#:</td>
|
||||||
|
<td>#echo ', '.join($bwl.get_black_keywords_for("release_group"))#</td>
|
||||||
|
</tr>
|
||||||
|
#end if
|
||||||
</table>
|
</table>
|
||||||
</td>
|
</td>
|
||||||
</tr>
|
</tr>
|
||||||
|
@ -212,14 +270,48 @@
|
||||||
#continue
|
#continue
|
||||||
#end if
|
#end if
|
||||||
|
|
||||||
|
#set $scene = False
|
||||||
|
#set $scene_anime = False
|
||||||
|
#if not $show.air_by_date and not $show.is_sports and not $show.is_anime and $show.is_scene:
|
||||||
|
#set $scene = True
|
||||||
|
#elif not $show.air_by_date and not $show.is_sports and $show.is_anime and $show.is_scene:
|
||||||
|
#set $scene_anime = True
|
||||||
|
#end if
|
||||||
|
|
||||||
|
#set ($dfltSeas, $dfltEpis, $dfltAbsolute) = (0, 0, 0)
|
||||||
|
|
||||||
|
#if (epResult["season"], epResult["episode"]) in $xem_numbering:
|
||||||
|
#set ($dfltSeas, $dfltEpis) = $xem_numbering[(epResult["season"], epResult["episode"])]
|
||||||
|
#end if
|
||||||
|
|
||||||
|
#if epResult["absolute_number"] in $xem_absolute_numbering:
|
||||||
|
#set $dfltAbsolute = $xem_absolute_numbering[epResult["absolute_number"]]
|
||||||
|
#end if
|
||||||
|
|
||||||
|
#if epResult["absolute_number"] in $scene_absolute_numbering:
|
||||||
|
#set $scAbsolute = $scene_absolute_numbering[epResult["absolute_number"]]
|
||||||
|
#set $dfltAbsNumbering = False
|
||||||
|
#else
|
||||||
|
#set $scAbsolute = $dfltAbsolute
|
||||||
|
#set $dfltAbsNumbering = True
|
||||||
|
#end if
|
||||||
|
|
||||||
|
#if (epResult["season"], epResult["episode"]) in $scene_numbering:
|
||||||
|
#set ($scSeas, $scEpis) = $scene_numbering[(epResult["season"], epResult["episode"])]
|
||||||
|
#set $dfltEpNumbering = False
|
||||||
|
#else
|
||||||
|
#set ($scSeas, $scEpis) = ($dfltSeas, $dfltEpis)
|
||||||
|
#set $dfltEpNumbering = True
|
||||||
|
#end if
|
||||||
|
|
||||||
#if int($epResult["season"]) != $curSeason:
|
#if int($epResult["season"]) != $curSeason:
|
||||||
<tr><td colspan="10" style="height: 0px; padding:0; margin:0;"><a name="season-$epResult["season"]"></a></td></tr>
|
<tr><td colspan="11" style="height: 0px; padding:0; margin:0;"><a name="season-$epResult["season"]"></a></td></tr>
|
||||||
<tr class="seasonheader" id="season-$epResult["season"]" >
|
<tr class="seasonheader" id="season-$epResult["season"]" >
|
||||||
<td colspan="10">
|
<td colspan="11">
|
||||||
<h2>#if int($epResult["season"]) == 0 then "Specials" else "Season "+str($epResult["season"])#</h2>
|
<h2>#if int($epResult["season"]) == 0 then "Specials" else "Season "+str($epResult["season"])#</h2>
|
||||||
</td>
|
</td>
|
||||||
</tr>
|
</tr>
|
||||||
<tr id="season-$epResult["season"]-cols"><th width="1%"><input type="checkbox" class="seasonCheck" id="$epResult["season"]" /></th><th>NFO</th><th>TBN</th><th>Episode</th><th>Scene #</th><th>Name</th><th class="nowrap">Airdate</th><th>Filename</th>#if $sickbeard.USE_SUBTITLES and $show.subtitles then "<th>Subtitles</th>" else ""#<th>Status</th><th>Search</th></tr>
|
<tr id="season-$epResult["season"]-cols"><th width="1%"><input type="checkbox" class="seasonCheck" id="$epResult["season"]" /></th><th>NFO</th><th>TBN</th><th>Episode</th>#if $show.is_anime then "<th>Absolute</th>" else ""# #if $scene then "<th>Scene #</th>" else ""# #if $scene_anime then "<th>Scene Absolute</th>" else ""#<th>Name</th><th class="nowrap">Airdate</th>#if $sickbeard.USE_SUBTITLES and $show.subtitles then "<th>Subtitles</th>" else ""#<th>Status</th><th>Search</th></tr>
|
||||||
#set $curSeason = int($epResult["season"])
|
#set $curSeason = int($epResult["season"])
|
||||||
#end if
|
#end if
|
||||||
|
|
||||||
|
@ -233,23 +325,22 @@
|
||||||
</td>
|
</td>
|
||||||
<td align="center"><img src="$sbRoot/images/#if $epResult["hasnfo"] == 1 then "nfo.gif\" alt=\"Y" else "nfo-no.gif\" alt=\"N"#" width="23" height="11" /></td>
|
<td align="center"><img src="$sbRoot/images/#if $epResult["hasnfo"] == 1 then "nfo.gif\" alt=\"Y" else "nfo-no.gif\" alt=\"N"#" width="23" height="11" /></td>
|
||||||
<td align="center"><img src="$sbRoot/images/#if $epResult["hastbn"] == 1 then "tbn.gif\" alt=\"Y" else "tbn-no.gif\" alt=\"N"#" width="23" height="11" /></td>
|
<td align="center"><img src="$sbRoot/images/#if $epResult["hastbn"] == 1 then "tbn.gif\" alt=\"Y" else "tbn-no.gif\" alt=\"N"#" width="23" height="11" /></td>
|
||||||
<td align="center">$epResult["episode"]</td>
|
|
||||||
<td align="center">
|
<td align="center">
|
||||||
#if int($show.air_by_date) != 1 and int($show.sports) != 1
|
#if $epLoc and $show._location and $epLoc.lower().startswith($show._location.lower()):
|
||||||
#if (epResult["season"], epResult["episode"]) in $xem_numbering:
|
#set $epLoc = os.path.basename($epLoc[len($show._location)+1:])
|
||||||
#set ($dfltSeas, $dfltEpis) = $xem_numbering[(epResult["season"], epResult["episode"])]
|
#elif $epLoc and (not $epLoc.lower().startswith($show._location.lower()) or not $show._location):
|
||||||
#elif $xem_numbering and (epResult["season"], epResult["episode"]) not in $xem_numbering:
|
#set $epLoc = os.path.basename($epLoc)
|
||||||
#set ($dfltSeas, $dfltEpis) = (0,0)
|
|
||||||
#else:
|
|
||||||
#set ($dfltSeas, $dfltEpis) = (epResult["season"], epResult["episode"])
|
|
||||||
#end if
|
#end if
|
||||||
#if (epResult["season"], epResult["episode"]) in $scene_numbering:
|
#if $epLoc != "" and $epLoc != None:
|
||||||
#set ($scSeas, $scEpis) = $scene_numbering[(epResult["season"], epResult["episode"])]
|
<span title="$epLoc" class="addQTip">$epResult["episode"]</span>
|
||||||
#set $dfltEpNumbering = False
|
|
||||||
#else
|
#else
|
||||||
#set ($scSeas, $scEpis) = ($dfltSeas, $dfltEpis)
|
$epResult["episode"]#end if#</td>
|
||||||
#set $dfltEpNumbering = True
|
#if $show.is_anime:
|
||||||
|
<td align="center">$epResult["absolute_number"]</td>
|
||||||
#end if
|
#end if
|
||||||
|
|
||||||
|
#if $scene:
|
||||||
|
<td align="center">
|
||||||
<input type="text" placeholder="<%=str(dfltSeas) + 'x' + str(dfltEpis)%>" size="6" maxlength="8"
|
<input type="text" placeholder="<%=str(dfltSeas) + 'x' + str(dfltEpis)%>" size="6" maxlength="8"
|
||||||
class="sceneSeasonXEpisode" data-for-season="$epResult["season"]" data-for-episode="$epResult["episode"]"
|
class="sceneSeasonXEpisode" data-for-season="$epResult["season"]" data-for-episode="$epResult["episode"]"
|
||||||
id="sceneSeasonXEpisode_$show.indexerid<%="_"+str(epResult["season"])+"_"+str(epResult["episode"])%>"
|
id="sceneSeasonXEpisode_$show.indexerid<%="_"+str(epResult["season"])+"_"+str(epResult["episode"])%>"
|
||||||
|
@ -261,26 +352,30 @@
|
||||||
#end if
|
#end if
|
||||||
style="padding: 0; text-align: center; max-width: 60px;"
|
style="padding: 0; text-align: center; max-width: 60px;"
|
||||||
/>
|
/>
|
||||||
#else
|
|
||||||
N/A
|
|
||||||
#end if
|
|
||||||
</td>
|
</td>
|
||||||
|
#elif $scene_anime:
|
||||||
|
<td align="center">
|
||||||
|
<input type="text" placeholder="<%=str(dfltAbsolute)%>" size="6" maxlength="8"
|
||||||
|
class="sceneAbsolute" data-for-absolute="$epResult["absolute_number"]"
|
||||||
|
id="sceneAbsolute_$show.indexerid<%="_"+str(epResult["absolute_number"])%>"
|
||||||
|
title="Change the value here if scene absolute numbering differs from the indexer absolute numbering"
|
||||||
|
#if $dfltAbsNumbering:
|
||||||
|
value=""
|
||||||
|
#else
|
||||||
|
value="<%=str(scAbsolute)%>"
|
||||||
|
#end if
|
||||||
|
style="padding: 0; text-align: center; max-width: 60px;"
|
||||||
|
/>
|
||||||
|
</td>
|
||||||
|
#end if
|
||||||
|
|
||||||
<td class="title">
|
<td class="title">
|
||||||
#if $epResult["description"] != "" and $epResult["description"] != None:
|
#if $epResult["description"] != "" and $epResult["description"] != None:
|
||||||
<img style="padding-top: 3px;" src="$sbRoot/images/info32.png" width="16" height="16" class="plotInfo" alt="" id="plot_info_$show.indexerid<%="_"+str(epResult["season"])+"_"+str(epResult["episode"])%>" />
|
<img style="padding-top: 3px;" src="$sbRoot/images/info32.png" width="16" height="16" class="plotInfo" alt="" id="plot_info_$show.indexerid<%="_"+str(epResult["season"])+"_"+str(epResult["episode"])%>" />
|
||||||
#end if
|
#end if
|
||||||
$epResult["name"]
|
$epResult["name"]
|
||||||
</td>
|
</td>
|
||||||
<td align="center" class="nowrap">#if int($epResult["airdate"]) == 1 then "never" else $sbdatetime.sbdatetime.sbfdate($network_timezones.parse_date_time($epResult["airdate"],$show.airs,$show.network))#</td>
|
<td align="center" class="nowrap"><span class="${fuzzydate}">#if int($epResult["airdate"]) == 1 then "never" else $sbdatetime.sbdatetime.sbfdate($network_timezones.parse_date_time($epResult["airdate"],$show.airs,$show.network))#</span></td>
|
||||||
<td class="filename"><small>
|
|
||||||
#if $epLoc and $show._location and $epLoc.lower().startswith($show._location.lower()):
|
|
||||||
#set $epLoc = os.path.basename($epLoc[len($show._location)+1:])
|
|
||||||
#elif $epLoc and (not $epLoc.lower().startswith($show._location.lower()) or not $show._location):
|
|
||||||
#set $epLoc = os.path.basename($epLoc)
|
|
||||||
#end if
|
|
||||||
$epLoc
|
|
||||||
</small>
|
|
||||||
</td>
|
|
||||||
#if $sickbeard.USE_SUBTITLES and $show.subtitles:
|
#if $sickbeard.USE_SUBTITLES and $show.subtitles:
|
||||||
<td id="subtitles_column" class="subtitles_column" align="left">
|
<td id="subtitles_column" class="subtitles_column" align="left">
|
||||||
#if $epResult["subtitles"]:
|
#if $epResult["subtitles"]:
|
||||||
|
|
|
@ -1,7 +1,9 @@
|
||||||
#import sickbeard
|
#import sickbeard
|
||||||
|
#import lib.adba as adba
|
||||||
#from sickbeard import common
|
#from sickbeard import common
|
||||||
#from sickbeard import exceptions
|
#from sickbeard import exceptions
|
||||||
#from sickbeard import scene_exceptions
|
#from sickbeard import scene_exceptions
|
||||||
|
#from sickbeard.blackandwhitelist import *
|
||||||
#set global $title="Edit " + $show.name
|
#set global $title="Edit " + $show.name
|
||||||
#set global $header=$show.name
|
#set global $header=$show.name
|
||||||
|
|
||||||
|
@ -112,6 +114,10 @@ This <b>DOES NOT</b> allow SickRage to download non-english TV episodes!<br />
|
||||||
<b>Paused:</b> <input type="checkbox" name="paused" #if $show.paused == 1 then "checked=\"checked\"" else ""# /><br /><br />
|
<b>Paused:</b> <input type="checkbox" name="paused" #if $show.paused == 1 then "checked=\"checked\"" else ""# /><br /><br />
|
||||||
<b>Subtitles:</b> <input type="checkbox" name="subtitles"#if $show.subtitles == 1 and $sickbeard.USE_SUBTITLES then " checked=\"checked\"" else ""##if not $sickbeard.USE_SUBTITLES then " disabled=\"disabled\"" else ""#/><br /><br />
|
<b>Subtitles:</b> <input type="checkbox" name="subtitles"#if $show.subtitles == 1 and $sickbeard.USE_SUBTITLES then " checked=\"checked\"" else ""##if not $sickbeard.USE_SUBTITLES then " disabled=\"disabled\"" else ""#/><br /><br />
|
||||||
|
|
||||||
|
<b>Scene Numbering: </b>
|
||||||
|
<input type="checkbox" name="scene" #if $show.scene == 1 then "checked=\"checked\"" else ""# /><br/>
|
||||||
|
(check this if you wish to search by scene numbering, uncheck to search by indexer numbering)
|
||||||
|
<br/><br/>
|
||||||
<b>Air by date: </b>
|
<b>Air by date: </b>
|
||||||
<input type="checkbox" name="air_by_date" #if $show.air_by_date == 1 then "checked=\"checked\"" else ""# /><br />
|
<input type="checkbox" name="air_by_date" #if $show.air_by_date == 1 then "checked=\"checked\"" else ""# /><br />
|
||||||
(check this if the show is released as Show.03.02.2010 rather than Show.S02E03)
|
(check this if the show is released as Show.03.02.2010 rather than Show.S02E03)
|
||||||
|
@ -120,6 +126,10 @@ This <b>DOES NOT</b> allow SickRage to download non-english TV episodes!<br />
|
||||||
<input type="checkbox" name="sports" #if $show.sports == 1 then "checked=\"checked\"" else ""# /><br />
|
<input type="checkbox" name="sports" #if $show.sports == 1 then "checked=\"checked\"" else ""# /><br />
|
||||||
(check this if the show is a sporting or MMA event)
|
(check this if the show is a sporting or MMA event)
|
||||||
<br /><br />
|
<br /><br />
|
||||||
|
<b>Anime: </b>
|
||||||
|
<input type="checkbox" name="anime" #if $show.is_anime then "CHECKED" else ""#><br />
|
||||||
|
(check this if the show is released as Show.265 rather than Show.S02E03, this show is an anime)
|
||||||
|
<br /><br />
|
||||||
<b>DVD Order: </b>
|
<b>DVD Order: </b>
|
||||||
<input type="checkbox" name="dvdorder" #if $show.dvdorder == 1 then "checked=\"checked\"" else ""# /><br/>
|
<input type="checkbox" name="dvdorder" #if $show.dvdorder == 1 then "checked=\"checked\"" else ""# /><br/>
|
||||||
(check this if you wish to use the DVD order instead of the Airing order)
|
(check this if you wish to use the DVD order instead of the Airing order)
|
||||||
|
@ -135,6 +145,54 @@ Results without one of these words in the title will be filtered out <br />
|
||||||
Separate words with a comma, e.g. "word1,word2,word3"
|
Separate words with a comma, e.g. "word1,word2,word3"
|
||||||
<br /><br />
|
<br /><br />
|
||||||
|
|
||||||
|
#if $show.is_anime
|
||||||
|
<p>
|
||||||
|
Realease Groups:
|
||||||
|
</p>
|
||||||
|
<input type="text" id="addToPoolText"/>
|
||||||
|
<input type="button" value="Add to White" id="addToWhite">
|
||||||
|
<input type="button" value="Add to Black" id="addToBlack"><br/>
|
||||||
|
<div class="blackwhiteliste white">
|
||||||
|
<span>White:</span>
|
||||||
|
<select multiple id="white">
|
||||||
|
#for $keyword in $whitelist:
|
||||||
|
<option value="$keyword">$keyword</option>
|
||||||
|
#end for
|
||||||
|
</select>
|
||||||
|
<br/>
|
||||||
|
<input id="removeW" value="Remove >>" type="button"/>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div class="blackwhiteliste pool">
|
||||||
|
<span>Pool (Name|Rating|Subed Ep):</span>
|
||||||
|
<select multiple id="pool">
|
||||||
|
#for $group in $groups
|
||||||
|
#if $group not in $whitelist and $group['name'] not in $blacklist:
|
||||||
|
<option value="$group['name']">$group['name'] | $group['rating'] | $group['range']</option>
|
||||||
|
#end if
|
||||||
|
#end for
|
||||||
|
</select>
|
||||||
|
<br/>
|
||||||
|
<input id="addW" value="<< Add" type="button"/>
|
||||||
|
<input id="addB" value="Add >>" type="button"/>
|
||||||
|
<input id="removeP" value="Remove" type="button"/>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div class="blackwhiteliste black">
|
||||||
|
<span>Black:</span>
|
||||||
|
<select multiple id="black">
|
||||||
|
#for $keyword in $blacklist:
|
||||||
|
<option value="$keyword">$keyword</option>
|
||||||
|
#end for
|
||||||
|
</select>
|
||||||
|
<br/>
|
||||||
|
<input id="removeB" value="<< Remove" type="button"/>
|
||||||
|
</div>
|
||||||
|
<br style="clear:both;"/>
|
||||||
|
#end if
|
||||||
|
<input type="hidden" name="whitelist" id="whitelist"/>
|
||||||
|
<input type="hidden" name="blacklist" id="blacklist"/>
|
||||||
|
|
||||||
<input type="submit" id="submit" value="Submit" class="btn btn-primary" />
|
<input type="submit" id="submit" value="Submit" class="btn btn-primary" />
|
||||||
</form>
|
</form>
|
||||||
|
|
||||||
|
@ -153,6 +211,19 @@ Separate words with a comma, e.g. "word1,word2,word3"
|
||||||
|
|
||||||
\$("#exceptions_list").val(all_exceptions);
|
\$("#exceptions_list").val(all_exceptions);
|
||||||
|
|
||||||
|
var realvalues = [];
|
||||||
|
|
||||||
|
\$('#white option').each(function(i, selected) {
|
||||||
|
realvalues[i] = \$(selected).val();
|
||||||
|
});
|
||||||
|
\$("#whitelist").val(realvalues.join(","));
|
||||||
|
|
||||||
|
realvalues = [];
|
||||||
|
\$('#black option').each(function(i, selected) {
|
||||||
|
realvalues[i] = \$(selected).val();
|
||||||
|
});
|
||||||
|
\$("#blacklist").val(realvalues.join(","));
|
||||||
|
|
||||||
});
|
});
|
||||||
|
|
||||||
\$('#addSceneName').click(function() {
|
\$('#addSceneName').click(function() {
|
||||||
|
@ -197,6 +268,43 @@ Separate words with a comma, e.g. "word1,word2,word3"
|
||||||
|
|
||||||
\$(this).toggle_SceneException();
|
\$(this).toggle_SceneException();
|
||||||
|
|
||||||
|
\$('#removeW').click(function() {
|
||||||
|
return !\$('#white option:selected').remove().appendTo('#pool');
|
||||||
|
});
|
||||||
|
\$('#addW').click(function() {
|
||||||
|
return !\$('#pool option:selected').remove().appendTo('#white');
|
||||||
|
});
|
||||||
|
\$('#addB').click(function() {
|
||||||
|
return !\$('#pool option:selected').remove().appendTo('#black');
|
||||||
|
});
|
||||||
|
\$('#removeP').click(function() {
|
||||||
|
return !\$('#pool option:selected').remove();
|
||||||
|
});
|
||||||
|
\$('#removeB').click(function() {
|
||||||
|
return !\$('#black option:selected').remove().appendTo('#pool');
|
||||||
|
});
|
||||||
|
|
||||||
|
\$('#addToWhite').click(function() {
|
||||||
|
var group = \$('#addToPoolText').attr("value")
|
||||||
|
if(group == "")
|
||||||
|
return
|
||||||
|
\$('#addToPoolText').attr("value", "")
|
||||||
|
var option = \$("<option>")
|
||||||
|
option.attr("value",group)
|
||||||
|
option.html(group)
|
||||||
|
return option.appendTo('#white');
|
||||||
|
});
|
||||||
|
\$('#addToBlack').click(function() {
|
||||||
|
var group = \$('#addToPoolText').attr("value")
|
||||||
|
if(group == "")
|
||||||
|
return
|
||||||
|
\$('#addToPoolText').attr("value", "")
|
||||||
|
var option = \$("<option>")
|
||||||
|
option.attr("value",group)
|
||||||
|
option.html(group)
|
||||||
|
return option.appendTo('#black');
|
||||||
|
});
|
||||||
|
|
||||||
//-->
|
//-->
|
||||||
</script>
|
</script>
|
||||||
|
|
||||||
|
|
|
@ -65,6 +65,19 @@
|
||||||
url = '$sbRoot/history/?limit='+\$(this).val()
|
url = '$sbRoot/history/?limit='+\$(this).val()
|
||||||
window.location.href = url
|
window.location.href = url
|
||||||
});
|
});
|
||||||
|
|
||||||
|
#set $fuzzydate = 'airdate'
|
||||||
|
#if $sickbeard.FUZZY_DATING:
|
||||||
|
fuzzyMoment({
|
||||||
|
containerClass : '.${fuzzydate}',
|
||||||
|
dateHasTime : true,
|
||||||
|
dateFormat : '${sickbeard.DATE_PRESET}',
|
||||||
|
timeFormat : '${sickbeard.TIME_PRESET_W_SECONDS}',
|
||||||
|
trimZero : #if $sickbeard.TRIM_ZERO then "true" else "false"#,
|
||||||
|
dtGlue : ', ',
|
||||||
|
});
|
||||||
|
#end if
|
||||||
|
|
||||||
});
|
});
|
||||||
//-->
|
//-->
|
||||||
</script>
|
</script>
|
||||||
|
@ -97,7 +110,7 @@
|
||||||
#set $curStatus, $curQuality = $Quality.splitCompositeStatus(int($hItem["action"]))
|
#set $curStatus, $curQuality = $Quality.splitCompositeStatus(int($hItem["action"]))
|
||||||
<tr>
|
<tr>
|
||||||
#set $curdatetime = $datetime.datetime.strptime(str($hItem["date"]), $history.dateFormat)
|
#set $curdatetime = $datetime.datetime.strptime(str($hItem["date"]), $history.dateFormat)
|
||||||
<td class="nowrap">$sbdatetime.sbdatetime.sbfdatetime($curdatetime, show_seconds=True)<span class="sort_data">$time.mktime($curdatetime.timetuple())</span></td>
|
<td class="nowrap"><div class="${fuzzydate}">$sbdatetime.sbdatetime.sbfdatetime($curdatetime, show_seconds=True)</div><span class="sort_data">$time.mktime($curdatetime.timetuple())</span></td>
|
||||||
<td width="35%"><a style="color: #000000; text-align: center;" href="$sbRoot/home/displayShow?show=$hItem["showid"]#season-$hItem["season"]">$hItem["show_name"] - <%=str(hItem["season"]) +"x"+ "%02i" % int(hItem["episode"]) %>#if "proper" in $hItem["resource"].lower or "repack" in $hItem["resource"].lower then ' <span class="quality Proper">Proper</span>' else ""#</a></td>
|
<td width="35%"><a style="color: #000000; text-align: center;" href="$sbRoot/home/displayShow?show=$hItem["showid"]#season-$hItem["season"]">$hItem["show_name"] - <%=str(hItem["season"]) +"x"+ "%02i" % int(hItem["episode"]) %>#if "proper" in $hItem["resource"].lower or "repack" in $hItem["resource"].lower then ' <span class="quality Proper">Proper</span>' else ""#</a></td>
|
||||||
<td align="center" #if $curStatus == SUBTITLED then 'class="subtitles_column"' else ''#><span style="cursor: help;" title="$os.path.basename($hItem["resource"])">$statusStrings[$curStatus]</span>
|
<td align="center" #if $curStatus == SUBTITLED then 'class="subtitles_column"' else ''#><span style="cursor: help;" title="$os.path.basename($hItem["resource"])">$statusStrings[$curStatus]</span>
|
||||||
#if $curStatus == SUBTITLED:
|
#if $curStatus == SUBTITLED:
|
||||||
|
|
|
@ -98,7 +98,7 @@
|
||||||
\$(this).remove();
|
\$(this).remove();
|
||||||
});
|
});
|
||||||
|
|
||||||
\$("#showListTable:has(tbody tr)").tablesorter({
|
\$("#showListTableShows:has(tbody tr)").tablesorter({
|
||||||
|
|
||||||
sortList: [[6,1],[2,0]],
|
sortList: [[6,1],[2,0]],
|
||||||
textExtraction: {
|
textExtraction: {
|
||||||
|
@ -119,6 +119,38 @@
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
|
\$("#showListTableAnime:has(tbody tr)").tablesorter({
|
||||||
|
|
||||||
|
sortList: [[6,1],[2,0]],
|
||||||
|
textExtraction: {
|
||||||
|
0: function(node) { return \$(node).find("span").text().toLowerCase(); },
|
||||||
|
#if ( $layout != 'simple'):
|
||||||
|
3: function(node) { return \$(node).find("img").attr("alt"); },
|
||||||
|
#end if
|
||||||
|
4: function(node) { return \$(node).find("span").text(); },
|
||||||
|
6: function(node) { return \$(node).find("img").attr("alt"); }
|
||||||
|
},
|
||||||
|
widgets: ['saveSort', 'zebra'],
|
||||||
|
headers: {
|
||||||
|
0: { sorter: 'cDate' },
|
||||||
|
2: { sorter: 'loadingNames' },
|
||||||
|
3: { sorter: 'network' },
|
||||||
|
4: { sorter: 'quality' },
|
||||||
|
5: { sorter: 'eps' },
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
#set $fuzzydate = 'airdate'
|
||||||
|
#if $sickbeard.FUZZY_DATING:
|
||||||
|
fuzzyMoment({
|
||||||
|
containerClass : '.${fuzzydate}',
|
||||||
|
dateHasTime : false,
|
||||||
|
dateFormat : '${sickbeard.DATE_PRESET}',
|
||||||
|
timeFormat : '${sickbeard.TIME_PRESET}',
|
||||||
|
trimZero : #if $sickbeard.TRIM_ZERO then "true" else "false"#
|
||||||
|
});
|
||||||
|
#end if
|
||||||
|
|
||||||
});
|
});
|
||||||
|
|
||||||
//-->
|
//-->
|
||||||
|
@ -138,7 +170,14 @@
|
||||||
</span>
|
</span>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
<table id="showListTable" class="sickbeardTable tablesorter" cellspacing="1" border="0" cellpadding="0">
|
#for $curShowlist in $showlists:
|
||||||
|
#set $curListType = $curShowlist[0]
|
||||||
|
#set $myShowList = $list($curShowlist[1])
|
||||||
|
#if $curListType == "Anime":
|
||||||
|
<h2>Anime List</h2>
|
||||||
|
#end if
|
||||||
|
|
||||||
|
<table id="showListTable$curListType" class="sickbeardTable tablesorter" cellspacing="1" border="0" cellpadding="0">
|
||||||
|
|
||||||
<thead><tr><th class="nowrap">Next Ep</th>#if $layout=="poster" then "<th>Poster</th>" else "<th style='display: none;'></th>"#<th>Show</th><th>Network</th><th>Quality</th><th>Downloads</th><th>Active</th><th>Status</th></tr></thead>
|
<thead><tr><th class="nowrap">Next Ep</th>#if $layout=="poster" then "<th>Poster</th>" else "<th style='display: none;'></th>"#<th>Show</th><th>Network</th><th>Quality</th><th>Downloads</th><th>Active</th><th>Status</th></tr></thead>
|
||||||
<tfoot>
|
<tfoot>
|
||||||
|
@ -173,7 +212,6 @@
|
||||||
</tr>
|
</tr>
|
||||||
#end for
|
#end for
|
||||||
|
|
||||||
#set $myShowList = $list($sickbeard.showList)
|
|
||||||
$myShowList.sort(lambda x, y: cmp(x.name, y.name))
|
$myShowList.sort(lambda x, y: cmp(x.name, y.name))
|
||||||
#for $curShow in $myShowList:
|
#for $curShow in $myShowList:
|
||||||
#set $curEp = $curShow.nextEpisode()
|
#set $curEp = $curShow.nextEpisode()
|
||||||
|
@ -201,7 +239,7 @@ $myShowList.sort(lambda x, y: cmp(x.name, y.name))
|
||||||
<tr>
|
<tr>
|
||||||
#if len($curEp) != 0:
|
#if len($curEp) != 0:
|
||||||
#set $ldatetime = $network_timezones.parse_date_time($curEp[0].airdate.toordinal(),$curShow.airs,$curShow.network)
|
#set $ldatetime = $network_timezones.parse_date_time($curEp[0].airdate.toordinal(),$curShow.airs,$curShow.network)
|
||||||
<td align="center" class="nowrap" style="color: #555555;font-weight:bold;">$sbdatetime.sbdatetime.sbfdate($ldatetime)<span class="sort_data">$time.mktime($ldatetime.timetuple())</span> </td>
|
<td align="center" class="nowrap" style="color: #555555;font-weight:bold;"><div class="${fuzzydate}">$sbdatetime.sbdatetime.sbfdate($ldatetime)</div><span class="sort_data">$time.mktime($ldatetime.timetuple())</span> </td>
|
||||||
#else:
|
#else:
|
||||||
<td align="center" class="nowrap" style="color: #555555;font-weight:bold;"></td>
|
<td align="center" class="nowrap" style="color: #555555;font-weight:bold;"></td>
|
||||||
#end if
|
#end if
|
||||||
|
@ -258,7 +296,7 @@ $myShowList.sort(lambda x, y: cmp(x.name, y.name))
|
||||||
//-->
|
//-->
|
||||||
</script>
|
</script>
|
||||||
</td>
|
</td>
|
||||||
<td align="center"><img src="$sbRoot/images/#if int($curShow.paused) == 0 and $curShow.status != "Ended" then "yes16.png\" alt=\"Y\"" else "no16.png\" alt=\"N\""# width="16" height="16" /></td>
|
<td align="center"><img src="$sbRoot/images/#if int($curShow.paused) == 0 and "Ended" not in $curShow.status then "yes16.png\" alt=\"Y\"" else "no16.png\" alt=\"N\""# width="16" height="16" /></td>
|
||||||
<td align="center" style="color: #555555; font-weight: bold;">$curShow.status</td>
|
<td align="center" style="color: #555555; font-weight: bold;">$curShow.status</td>
|
||||||
</tr>
|
</tr>
|
||||||
|
|
||||||
|
@ -266,5 +304,8 @@ $myShowList.sort(lambda x, y: cmp(x.name, y.name))
|
||||||
#end for
|
#end for
|
||||||
</tbody>
|
</tbody>
|
||||||
</table>
|
</table>
|
||||||
|
#end for
|
||||||
|
|
||||||
|
<script type="text/javascript" src="$sbRoot/js/tableClick.js"></script>
|
||||||
|
|
||||||
#include $os.path.join($sickbeard.PROG_DIR,"gui/slick/interfaces/default/inc_bottom.tmpl")
|
#include $os.path.join($sickbeard.PROG_DIR,"gui/slick/interfaces/default/inc_bottom.tmpl")
|
||||||
|
|
|
@ -33,6 +33,22 @@
|
||||||
</label>
|
</label>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
|
<div class="field-pair alt">
|
||||||
|
<input type="checkbox" name="anime" id="anime" #if $sickbeard.ANIME_DEFAULT then "checked=\"checked\"" else ""# />
|
||||||
|
<label for="anime" class="clearfix">
|
||||||
|
<span class="component-title">Anime</span>
|
||||||
|
<span class="component-desc">Is this show an Anime?</span>
|
||||||
|
</label>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div class="field-pair alt">
|
||||||
|
<input type="checkbox" name="scene" id="scene" #if $sickbeard.SCENE_DEFAULT then "checked=\"checked\"" else ""# />
|
||||||
|
<label for="scene" class="clearfix">
|
||||||
|
<span class="component-title">Scene Numbering</span>
|
||||||
|
<span class="component-desc">Is this show scene numbered?</span>
|
||||||
|
</label>
|
||||||
|
</div>
|
||||||
|
|
||||||
#set $qualities = $Quality.splitQuality($sickbeard.QUALITY_DEFAULT)
|
#set $qualities = $Quality.splitQuality($sickbeard.QUALITY_DEFAULT)
|
||||||
#set global $anyQualities = $qualities[0]
|
#set global $anyQualities = $qualities[0]
|
||||||
#set global $bestQualities = $qualities[1]
|
#set global $bestQualities = $qualities[1]
|
||||||
|
|
|
@ -125,7 +125,10 @@ a > i.icon-question-sign { background-image: url("$sbRoot/images/glyphicons-half
|
||||||
<script type="text/javascript" src="$sbRoot/js/lib/jquery.form-3.35.js?$sbPID"></script>
|
<script type="text/javascript" src="$sbRoot/js/lib/jquery.form-3.35.js?$sbPID"></script>
|
||||||
<script type="text/javascript" src="$sbRoot/js/lib/jquery.ui.touch-punch-0.2.2.min.js?$sbPID"></script>
|
<script type="text/javascript" src="$sbRoot/js/lib/jquery.ui.touch-punch-0.2.2.min.js?$sbPID"></script>
|
||||||
<script type="text/javascript" src="$sbRoot/js/script.js?$sbPID"></script>
|
<script type="text/javascript" src="$sbRoot/js/script.js?$sbPID"></script>
|
||||||
|
#if $sickbeard.FUZZY_DATING:
|
||||||
|
<script type="text/javascript" src="$sbRoot/js/moment/moment.min.js?$sbPID"></script>
|
||||||
|
<script type="text/javascript" src="$sbRoot/js/fuzzyMoment.js?$sbPID"></script>
|
||||||
|
#end if
|
||||||
<script type="text/javascript" charset="utf-8">
|
<script type="text/javascript" charset="utf-8">
|
||||||
<!--
|
<!--
|
||||||
sbRoot = "$sbRoot"; // needed for browser.js & ajaxNotifications.js
|
sbRoot = "$sbRoot"; // needed for browser.js & ajaxNotifications.js
|
||||||
|
@ -272,9 +275,10 @@ a > i.icon-question-sign { background-image: url("$sbRoot/images/glyphicons-half
|
||||||
<li><a href="$sbRoot/config/subtitles/"><img src="$sbRoot/images/menu/config16.png" alt="" width="16" height="16" />Subtitles Settings</a></li>
|
<li><a href="$sbRoot/config/subtitles/"><img src="$sbRoot/images/menu/config16.png" alt="" width="16" height="16" />Subtitles Settings</a></li>
|
||||||
<li><a href="$sbRoot/config/postProcessing/"><img src="$sbRoot/images/menu/config16.png" alt="" width="16" height="16" />Post Processing</a></li>
|
<li><a href="$sbRoot/config/postProcessing/"><img src="$sbRoot/images/menu/config16.png" alt="" width="16" height="16" />Post Processing</a></li>
|
||||||
<li><a href="$sbRoot/config/notifications/"><img src="$sbRoot/images/menu/config16.png" alt="" width="16" height="16" />Notifications</a></li>
|
<li><a href="$sbRoot/config/notifications/"><img src="$sbRoot/images/menu/config16.png" alt="" width="16" height="16" />Notifications</a></li>
|
||||||
|
<li><a href="$sbRoot/config/anime/"><img src="$sbRoot/images/menu/config16.png" alt="" width="16" height="16" />Anime</a></li>
|
||||||
</ul>
|
</ul>
|
||||||
</li>
|
</li>
|
||||||
<li id="donate"><a href="https://www.paypal.com/cgi-bin/webscr?cmd=_s-xclick&hosted_button_id=YCTA5TEN2JE2J" rel="noreferrer" onclick="window.open('${sickbeard.ANON_REDIRECT}' + this.href); return false;"><img src="$sbRoot/images/paypal/btn_donate_LG.gif" alt="[donate]" /></a></li>
|
<li id="donate"><a href="http://sr-upgrade.appspot.com" rel="noreferrer" onclick="window.open('${sickbeard.ANON_REDIRECT}' + this.href); return false;"><img src="$sbRoot/images/btn-google.jpg" alt="[donate]" /></a></li>
|
||||||
</ul>
|
</ul>
|
||||||
|
|
||||||
</div>
|
</div>
|
||||||
|
|
|
@ -56,9 +56,12 @@
|
||||||
6: { sorter: false},
|
6: { sorter: false},
|
||||||
7: { sorter: false},
|
7: { sorter: false},
|
||||||
8: { sorter: false},
|
8: { sorter: false},
|
||||||
9: { sorter: false}
|
9: { sorter: false},
|
||||||
|
10: { sorter: false},
|
||||||
|
11: { sorter: false},
|
||||||
|
12: { sorter: false}
|
||||||
#if $sickbeard.USE_SUBTITLES
|
#if $sickbeard.USE_SUBTITLES
|
||||||
, 10: { sorter: false}
|
, 13: { sorter: false}
|
||||||
#end if
|
#end if
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
@ -79,9 +82,12 @@
|
||||||
<th width="1%">Edit<br/>
|
<th width="1%">Edit<br/>
|
||||||
<input type="checkbox" class="bulkCheck" id="editCheck" />
|
<input type="checkbox" class="bulkCheck" id="editCheck" />
|
||||||
</th>
|
</th>
|
||||||
<!-- <th>Lang</th>//-->
|
|
||||||
<th class="nowrap" style="text-align: left;">Show Name</th>
|
<th class="nowrap" style="text-align: left;">Show Name</th>
|
||||||
<th>Quality</th>
|
<th>Quality</th>
|
||||||
|
<th>Sports</th>
|
||||||
|
<th>Scene</th>
|
||||||
|
<th>Anime</th>
|
||||||
<th>Flat Folders</th>
|
<th>Flat Folders</th>
|
||||||
<th>Paused</th>
|
<th>Paused</th>
|
||||||
<th>Status</th>
|
<th>Status</th>
|
||||||
|
@ -98,7 +104,7 @@
|
||||||
<tfoot>
|
<tfoot>
|
||||||
<tr>
|
<tr>
|
||||||
<td rowspan="1" colspan="1" class="align-center alt"><input class="btn" type="button" value="Edit Selected" id="submitMassEdit" /></td>
|
<td rowspan="1" colspan="1" class="align-center alt"><input class="btn" type="button" value="Edit Selected" id="submitMassEdit" /></td>
|
||||||
<td rowspan="1" colspan="#if $sickbeard.USE_SUBTITLES then 11 else 10#" class="align-right alt"><input class="btn btn-primary" type="button" value="Submit" id="submitMassUpdate" /></td>
|
<td rowspan="1" colspan="#if $sickbeard.USE_SUBTITLES then 13 else 12#" class="align-right alt"><input class="btn btn-primary" type="button" value="Submit" id="submitMassUpdate" /></td>
|
||||||
</tr>
|
</tr>
|
||||||
</tfoot>
|
</tfoot>
|
||||||
<tbody>
|
<tbody>
|
||||||
|
@ -135,13 +141,15 @@ $myShowList.sort(lambda x, y: cmp(x.name, y.name))
|
||||||
|
|
||||||
<tr>
|
<tr>
|
||||||
<td align="center"><input type="checkbox" class="editCheck" id="edit-$curShow.indexerid" /></td>
|
<td align="center"><input type="checkbox" class="editCheck" id="edit-$curShow.indexerid" /></td>
|
||||||
<!-- <td align="center"><img src="$sbRoot/images/flags/${curShow.lang}.png" width="16" height="11" alt="$curShow.lang" /></td>//-->
|
|
||||||
<td class="tvShow"><a href="$sbRoot/home/displayShow?show=$curShow.indexerid">$curShow.name</a></td>
|
<td class="tvShow"><a href="$sbRoot/home/displayShow?show=$curShow.indexerid">$curShow.name</a></td>
|
||||||
#if $curShow.quality in $qualityPresets:
|
#if $curShow.quality in $qualityPresets:
|
||||||
<td align="center"><span class="quality $qualityPresetStrings[$curShow.quality]">$qualityPresetStrings[$curShow.quality]</span></td>
|
<td align="center"><span class="quality $qualityPresetStrings[$curShow.quality]">$qualityPresetStrings[$curShow.quality]</span></td>
|
||||||
#else:
|
#else:
|
||||||
<td align="center"><span class="quality Custom">Custom</span></td>
|
<td align="center"><span class="quality Custom">Custom</span></td>
|
||||||
#end if
|
#end if
|
||||||
|
<td align="center"><img src="$sbRoot/images/#if int($curShow.is_sports) == 1 then "yes16.png\" alt=\"Y\"" else "no16.png\" alt=\"N\""# width="16" height="16" /></td>
|
||||||
|
<td align="center"><img src="$sbRoot/images/#if int($curShow.is_scene) == 1 then "yes16.png\" alt=\"Y\"" else "no16.png\" alt=\"N\""# width="16" height="16" /></td>
|
||||||
|
<td align="center"><img src="$sbRoot/images/#if int($curShow.is_anime) == 1 then "yes16.png\" alt=\"Y\"" else "no16.png\" alt=\"N\""# width="16" height="16" /></td>
|
||||||
<td align="center"><img src="$sbRoot/images/#if int($curShow.flatten_folders) == 1 then "yes16.png\" alt=\"Y\"" else "no16.png\" alt=\"N\""# width="16" height="16" /></td>
|
<td align="center"><img src="$sbRoot/images/#if int($curShow.flatten_folders) == 1 then "yes16.png\" alt=\"Y\"" else "no16.png\" alt=\"N\""# width="16" height="16" /></td>
|
||||||
<td align="center"><img src="$sbRoot/images/#if int($curShow.paused) == 1 then "yes16.png\" alt=\"Y\"" else "no16.png\" alt=\"N\""# width="16" height="16" /></td>
|
<td align="center"><img src="$sbRoot/images/#if int($curShow.paused) == 1 then "yes16.png\" alt=\"Y\"" else "no16.png\" alt=\"N\""# width="16" height="16" /></td>
|
||||||
<td align="center">$curShow.status</td>
|
<td align="center">$curShow.status</td>
|
||||||
|
@ -151,7 +159,6 @@ $myShowList.sort(lambda x, y: cmp(x.name, y.name))
|
||||||
#if $sickbeard.USE_SUBTITLES:
|
#if $sickbeard.USE_SUBTITLES:
|
||||||
<td align="center">$curSubtitle</td>
|
<td align="center">$curSubtitle</td>
|
||||||
#end if
|
#end if
|
||||||
<!-- <td align="center"><input type="checkbox" class="metadataCheck" id="metadata-$curShow.indexerid" /></td>//-->
|
|
||||||
<td align="center">$curDelete</td>
|
<td align="center">$curDelete</td>
|
||||||
</tr>
|
</tr>
|
||||||
#end for
|
#end for
|
||||||
|
|
|
@ -21,6 +21,18 @@
|
||||||
\$('html,body').animate({scrollTop: \$("#show-"+id).offset().top -25},'slow');
|
\$('html,body').animate({scrollTop: \$("#show-"+id).offset().top -25},'slow');
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
|
#set $fuzzydate = 'airdate'
|
||||||
|
#if $sickbeard.FUZZY_DATING:
|
||||||
|
fuzzyMoment({
|
||||||
|
containerClass : '.${fuzzydate}',
|
||||||
|
dateHasTime : false,
|
||||||
|
dateFormat : '${sickbeard.DATE_PRESET}',
|
||||||
|
timeFormat : '${sickbeard.TIME_PRESET}',
|
||||||
|
trimZero : #if $sickbeard.TRIM_ZERO then "true" else "false"#
|
||||||
|
});
|
||||||
|
#end if
|
||||||
|
|
||||||
});
|
});
|
||||||
//-->
|
//-->
|
||||||
</script>
|
</script>
|
||||||
|
@ -84,7 +96,7 @@ Jump to Show
|
||||||
<tr class="$Overview.overviewStrings[$showCats[$curShow.indexerid][$whichStr]]">
|
<tr class="$Overview.overviewStrings[$showCats[$curShow.indexerid][$whichStr]]">
|
||||||
<td align="center">$whichStr</td>
|
<td align="center">$whichStr</td>
|
||||||
<td>$curResult["name"]</td>
|
<td>$curResult["name"]</td>
|
||||||
<td align="center" class="nowrap">#if int($curResult["airdate"]) == 1 then "never" else $sbdatetime.sbdatetime.sbfdate($network_timezones.parse_date_time($curResult["airdate"],$curShow.airs,$curShow.network))#</td>
|
<td align="center" class="nowrap"><div class="${fuzzydate}">#if int($curResult["airdate"]) == 1 then "never" else $sbdatetime.sbdatetime.sbfdate($network_timezones.parse_date_time($curResult["airdate"],$curShow.airs,$curShow.network))#</div></td>
|
||||||
</tr>
|
</tr>
|
||||||
|
|
||||||
#end for
|
#end for
|
||||||
|
|
|
@ -93,6 +93,28 @@
|
||||||
</div><br />
|
</div><br />
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
|
<div class="optionWrapper">
|
||||||
|
<span class="selectTitle">Scene Numbering</span>
|
||||||
|
<div class="selectChoices">
|
||||||
|
<select id="edit_scene" name="scene">
|
||||||
|
<option value="keep">< keep ></option>
|
||||||
|
<option value="enable" #if $scene_value then "selected=\"selected\"" else ""#>enable</option>
|
||||||
|
<option value="disable" #if $scene_value == False then "selected=\"selected\"" else ""#>disable</option>
|
||||||
|
</select>
|
||||||
|
</div><br />
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div class="optionWrapper">
|
||||||
|
<span class="selectTitle">Anime</span>
|
||||||
|
<div class="selectChoices">
|
||||||
|
<select id="edit_anime" name="anime">
|
||||||
|
<option value="keep">< keep ></option>
|
||||||
|
<option value="enable" #if $anime_value then "selected=\"selected\"" else ""#>enable</option>
|
||||||
|
<option value="disable" #if $anime_value == False then "selected=\"selected\"" else ""#>disable</option>
|
||||||
|
</select>
|
||||||
|
</div><br />
|
||||||
|
</div>
|
||||||
|
|
||||||
<div class="optionWrapper">
|
<div class="optionWrapper">
|
||||||
<span class="selectTitle">Subtitles<span class="separator"></span></span>
|
<span class="selectTitle">Subtitles<span class="separator"></span></span>
|
||||||
<div class="selectChoices">
|
<div class="selectChoices">
|
||||||
|
|
|
@ -3,14 +3,22 @@ $(document).ready(function () {
|
||||||
$('#saveDefaultsButton').click(function () {
|
$('#saveDefaultsButton').click(function () {
|
||||||
var anyQualArray = [];
|
var anyQualArray = [];
|
||||||
var bestQualArray = [];
|
var bestQualArray = [];
|
||||||
$('#anyQualities option:selected').each(function (i, d) {anyQualArray.push($(d).val()); });
|
$('#anyQualities option:selected').each(function (i, d) {
|
||||||
$('#bestQualities option:selected').each(function (i, d) {bestQualArray.push($(d).val()); });
|
anyQualArray.push($(d).val());
|
||||||
|
});
|
||||||
|
$('#bestQualities option:selected').each(function (i, d) {
|
||||||
|
bestQualArray.push($(d).val());
|
||||||
|
});
|
||||||
|
|
||||||
$.get(sbRoot + '/config/general/saveAddShowDefaults', {defaultStatus: $('#statusSelect').val(),
|
$.get(sbRoot + '/config/general/saveAddShowDefaults', {
|
||||||
|
defaultStatus: $('#statusSelect').val(),
|
||||||
anyQualities: anyQualArray.join(','),
|
anyQualities: anyQualArray.join(','),
|
||||||
bestQualities: bestQualArray.join(','),
|
bestQualities: bestQualArray.join(','),
|
||||||
defaultFlattenFolders: $('#flatten_folders').prop('checked'),
|
defaultFlattenFolders: $('#flatten_folders').prop('checked'),
|
||||||
subtitles: $('#subtitles').prop('checked') });
|
subtitles: $('#subtitles').prop('checked'),
|
||||||
|
anime: $('#anime').prop('checked'),
|
||||||
|
scene: $('#scene').prop('checked')
|
||||||
|
});
|
||||||
|
|
||||||
$(this).attr('disabled', true);
|
$(this).attr('disabled', true);
|
||||||
$.pnotify({
|
$.pnotify({
|
||||||
|
@ -20,7 +28,7 @@ $(document).ready(function () {
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
$('#statusSelect, #qualityPreset, #flatten_folders, #anyQualities, #bestQualities, #subtitles').change(function () {
|
$('#statusSelect, #qualityPreset, #flatten_folders, #anyQualities, #bestQualities, #subtitles, #scene, #anime').change(function () {
|
||||||
$('#saveDefaultsButton').attr('disabled', false);
|
$('#saveDefaultsButton').attr('disabled', false);
|
||||||
});
|
});
|
||||||
|
|
||||||
|
|
|
@ -11,6 +11,37 @@ $(document).ready(function(){
|
||||||
$('#content_'+$(this).attr('id')).fadeOut("fast", "linear");
|
$('#content_'+$(this).attr('id')).fadeOut("fast", "linear");
|
||||||
});
|
});
|
||||||
|
|
||||||
|
$(".viewIf").click(function() {
|
||||||
|
if ($(this).prop('checked')) {
|
||||||
|
$('.hide_if_'+$(this).attr('id')).css('display','none');
|
||||||
|
$('.show_if_'+$(this).attr('id')).fadeIn("fast", "linear");
|
||||||
|
} else {
|
||||||
|
$('.show_if_'+$(this).attr('id')).css('display','none');
|
||||||
|
$('.hide_if_'+$(this).attr('id')).fadeIn("fast", "linear");
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
$(".datePresets").click(function() {
|
||||||
|
var def = $('#date_presets').val()
|
||||||
|
if ($(this).prop('checked') && '%x' == def) {
|
||||||
|
def = '%a, %b %d, %Y'
|
||||||
|
$('#date_use_system_default').html('1')
|
||||||
|
} else if (!$(this).prop('checked') && '1' == $('#date_use_system_default').html())
|
||||||
|
def = '%x'
|
||||||
|
|
||||||
|
$('#date_presets').attr('name', 'date_preset_old')
|
||||||
|
$('#date_presets').attr('id', 'date_presets_old')
|
||||||
|
|
||||||
|
$('#date_presets_na').attr('name', 'date_preset')
|
||||||
|
$('#date_presets_na').attr('id', 'date_presets')
|
||||||
|
|
||||||
|
$('#date_presets_old').attr('name', 'date_preset_na')
|
||||||
|
$('#date_presets_old').attr('id', 'date_presets_na')
|
||||||
|
|
||||||
|
if (def)
|
||||||
|
$('#date_presets').val(def)
|
||||||
|
});
|
||||||
|
|
||||||
// bind 'myForm' and provide a simple callback function
|
// bind 'myForm' and provide a simple callback function
|
||||||
$('#configForm').ajaxForm({
|
$('#configForm').ajaxForm({
|
||||||
beforeSubmit: function(){
|
beforeSubmit: function(){
|
||||||
|
|
|
@ -170,7 +170,7 @@ $(document).ready(function(){
|
||||||
if (sceneSeason === '') sceneSeason = null;
|
if (sceneSeason === '') sceneSeason = null;
|
||||||
if (sceneEpisode === '') sceneEpisode = null;
|
if (sceneEpisode === '') sceneEpisode = null;
|
||||||
|
|
||||||
$.getJSON(sbRoot + '/home/setEpisodeSceneNumbering',
|
$.getJSON(sbRoot + '/home/setSceneNumbering',
|
||||||
{
|
{
|
||||||
'show': showId,
|
'show': showId,
|
||||||
'indexer': indexer,
|
'indexer': indexer,
|
||||||
|
@ -201,6 +201,42 @@ $(document).ready(function(){
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
function setAbsoluteSceneNumbering(forAbsolute, sceneAbsolute) {
|
||||||
|
var sbRoot = $('#sbRoot').val();
|
||||||
|
var showId = $('#showID').val();
|
||||||
|
var indexer = $('#indexer').val();
|
||||||
|
|
||||||
|
if (sceneAbsolute === '') sceneAbsolute = null;
|
||||||
|
|
||||||
|
$.getJSON(sbRoot + '/home/setSceneNumbering',
|
||||||
|
{
|
||||||
|
'show': showId,
|
||||||
|
'indexer': indexer,
|
||||||
|
'forAbsolute': forAbsolute,
|
||||||
|
'sceneAbsolute': sceneAbsolute
|
||||||
|
},
|
||||||
|
function(data) {
|
||||||
|
// Set the values we get back
|
||||||
|
if (data.sceneAbsolute === null)
|
||||||
|
{
|
||||||
|
$('#sceneAbsolute_' + showId +'_' + forAbsolute).val('');
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
$('#sceneAbsolute_' + showId +'_' + forAbsolute).val(data.sceneAbsolute);
|
||||||
|
}
|
||||||
|
if (!data.success)
|
||||||
|
{
|
||||||
|
if (data.errorMessage) {
|
||||||
|
alert(data.errorMessage);
|
||||||
|
} else {
|
||||||
|
alert('Update failed.');
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
$('.sceneSeasonXEpisode').change(function() {
|
$('.sceneSeasonXEpisode').change(function() {
|
||||||
// Strip non-numeric characters
|
// Strip non-numeric characters
|
||||||
$(this).val($(this).val().replace(/[^0-9xX]*/g,''));
|
$(this).val($(this).val().replace(/[^0-9xX]*/g,''));
|
||||||
|
@ -219,4 +255,20 @@ $(document).ready(function(){
|
||||||
}
|
}
|
||||||
setEpisodeSceneNumbering(forSeason, forEpisode, sceneSeason, sceneEpisode);
|
setEpisodeSceneNumbering(forSeason, forEpisode, sceneSeason, sceneEpisode);
|
||||||
});
|
});
|
||||||
|
|
||||||
|
$('.sceneAbsolute').change(function() {
|
||||||
|
// Strip non-numeric characters
|
||||||
|
$(this).val($(this).val().replace(/[^0-9xX]*/g,''));
|
||||||
|
var forAbsolute = $(this).attr('data-for-absolute');
|
||||||
|
var showId = $('#showID').val();
|
||||||
|
var indexer = $('#indexer').val();
|
||||||
|
|
||||||
|
var m = $(this).val().match(/^(\d{1,3})$/i);
|
||||||
|
var sceneAbsolute = null;
|
||||||
|
if (m)
|
||||||
|
{
|
||||||
|
sceneAbsolute = m[1];
|
||||||
|
}
|
||||||
|
setAbsoluteSceneNumbering(forAbsolute, sceneAbsolute);
|
||||||
|
});
|
||||||
});
|
});
|
||||||
|
|
164
gui/slick/js/fuzzyMoment.js
Normal file
|
@ -0,0 +1,164 @@
|
||||||
|
/**
|
||||||
|
* Fuzzy Moment - convert an absolute date text into a fuzzy moment
|
||||||
|
*
|
||||||
|
* containerClass string The class name of dom element to convert (default: 'fuzzydate')
|
||||||
|
* dateHasTime boolean Whether containerClass contains a time (default: false)
|
||||||
|
* dateFormat string The python token date formatting
|
||||||
|
* timeFormat string The python token time formatting
|
||||||
|
* trimZero Whether to trim leading "0"s (default : false)
|
||||||
|
* dtGlue string To insert between the output of date and time (default: '<br />')
|
||||||
|
*/
|
||||||
|
function fuzzyMoment(fmConfig) {
|
||||||
|
|
||||||
|
var containerClass = (/undefined/i.test(typeof(fmConfig)) || /undefined/i.test(typeof(fmConfig.containerClass)) ? '.fuzzydate' : fmConfig.containerClass),
|
||||||
|
dateWithTime = (/undefined/i.test(typeof(fmConfig)) || /undefined/i.test(typeof(fmConfig.dateHasTime)) ? false : !!fmConfig.dateHasTime),
|
||||||
|
dateFormat = (/undefined/i.test(typeof(fmConfig)) || /undefined/i.test(typeof(fmConfig.dateFormat)) ? '' : fmConfig.dateFormat),
|
||||||
|
timeFormat = (/undefined/i.test(typeof(fmConfig)) || /undefined/i.test(typeof(fmConfig.timeFormat)) ? '' : fmConfig.timeFormat),
|
||||||
|
trimZero = (/undefined/i.test(typeof(fmConfig)) || /undefined/i.test(typeof(fmConfig.trimZero)) ? false : !!fmConfig.trimZero),
|
||||||
|
dtGlue = (/undefined/i.test(typeof(fmConfig)) || /undefined/i.test(typeof(fmConfig.dtGlue)) ? '<br />' : fmConfig.dtGlue),
|
||||||
|
|
||||||
|
jd = (function (str) {
|
||||||
|
var token_map = ['a', 'ddd', 'A', 'dddd', 'b', 'MMM', 'B', 'MMMM', 'd', 'DD', 'm', 'MM', 'y', 'YY', 'Y', 'YYYY', 'x', 'L',
|
||||||
|
'H', 'HH', 'I', 'hh', 'M', 'mm', 'S', 'ss', 'p', 'A'],
|
||||||
|
result = '';
|
||||||
|
|
||||||
|
for (var i = 0; i < str.length; i++)
|
||||||
|
if (/[aAbBdmyYxHIMSp]/.test(str[i])) {
|
||||||
|
for (var t = 0; t < token_map.length; t = t + 2)
|
||||||
|
if (str[i] == token_map[t]) {
|
||||||
|
result += token_map[t + 1];
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
} else if ('%' != str[i])
|
||||||
|
result += str[i];
|
||||||
|
|
||||||
|
return result;
|
||||||
|
}),
|
||||||
|
dateToken = jd(dateFormat),
|
||||||
|
timeToken = jd(timeFormat),
|
||||||
|
|
||||||
|
addQTip = (function() {
|
||||||
|
$(this).css('cursor', 'help');
|
||||||
|
$(this).qtip({
|
||||||
|
show: {
|
||||||
|
solo: true
|
||||||
|
},
|
||||||
|
position: {
|
||||||
|
viewport: $(window),
|
||||||
|
my: 'left center',
|
||||||
|
adjust: {
|
||||||
|
y: -10,
|
||||||
|
x: 2
|
||||||
|
}
|
||||||
|
},
|
||||||
|
style: {
|
||||||
|
tip: {
|
||||||
|
corner: true,
|
||||||
|
method: 'polygon'
|
||||||
|
},
|
||||||
|
classes: 'qtip-rounded qtip-dark qtip-shadow ui-tooltip-sb'
|
||||||
|
}
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
if (trimZero) {
|
||||||
|
timeToken = timeToken.replace(/hh/ig, 'h');
|
||||||
|
dateToken = dateToken.replace(/\bDD\b/g, 'D');
|
||||||
|
}
|
||||||
|
|
||||||
|
$(containerClass).each(function() {
|
||||||
|
var input = $(this).text(),
|
||||||
|
dateA = '[<span class="fd">',
|
||||||
|
dtSeparator = ' ',
|
||||||
|
timeA = '</span>]', timeB = '[' + timeA;
|
||||||
|
|
||||||
|
if (dateWithTime) {
|
||||||
|
var timeMeta = input.match(/^.{6,}?([,\s]+)(\d{1,2}).(?:\d{2,2})(?:.(\d{2,2}))?(?:\s([ap]m))?$/im);
|
||||||
|
if (null != timeMeta) {
|
||||||
|
dtSeparator = (! /undefined/i.test(typeof(timeMeta[1])) ? timeMeta[1] : dtSeparator);
|
||||||
|
// adjust timeToken to num digits of input hours
|
||||||
|
timeToken = (! /undefined/i.test(typeof(timeMeta[2])) && 1 == timeMeta[2].length ? timeToken.replace(/hh/ig, 'h') : timeToken);
|
||||||
|
// adjust timeToken to use seconds if input has them
|
||||||
|
timeToken = (! /undefined/i.test(typeof(timeMeta[3])) && 2 == timeMeta[3].length ? timeToken : timeToken.replace(/.ss/, ''));
|
||||||
|
// adjust timeToken to am/pm or AM/PM if input has it
|
||||||
|
timeToken = (! /undefined/i.test(typeof(timeMeta[4])) && 2 == timeMeta[4].length ? timeToken.replace(/A$/, (/[ap]m/.test(timeMeta[4]) ? 'a' : 'A')) : timeToken);
|
||||||
|
}
|
||||||
|
timeA = '</span>' + dtGlue + '<span class="ft">]' + timeToken + '[' + timeA;
|
||||||
|
timeB = '[</span>' + dtGlue + '<span class="ft">]' + timeToken + timeB;
|
||||||
|
}
|
||||||
|
|
||||||
|
var inputTokens = dateToken + dtSeparator + (dateWithTime ? timeToken : 'HH:mm:ss');
|
||||||
|
|
||||||
|
if (! moment(input + (dateWithTime ? '' : dtSeparator + '00:00:00'), inputTokens).isValid())
|
||||||
|
return;
|
||||||
|
|
||||||
|
moment.lang('en', {
|
||||||
|
calendar: {
|
||||||
|
lastDay:dateA + 'Yesterday' + timeA, sameDay:dateA + 'Today' + timeA, nextDay:dateA + 'Tomorrow' + timeA,
|
||||||
|
lastWeek:dateA + 'last] ddd' + timeB, nextWeek:dateA + 'on] ddd' + timeB,
|
||||||
|
sameElse:dateA + ']ddd, MMM D YYYY[' + timeA
|
||||||
|
},
|
||||||
|
relativeTime: {
|
||||||
|
future:'in %s', past:'%s ago', s:'seconds', m:'a minute', mm:'%d minutes', h:'an hour', hh:'%d hours',
|
||||||
|
d:'a day', dd:'%d days', M:'a month', MM:'%d months', y:'a year', yy:'%d years'
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
var airdatetime = moment(input + (dateWithTime ? '' : dtSeparator + '00:00:00'), inputTokens),
|
||||||
|
airdate = airdatetime.clone().hour(0).minute(0).second(0).millisecond(0),
|
||||||
|
today = moment({}),
|
||||||
|
day = Math.abs(airdate.diff(today, 'days')),
|
||||||
|
week = Math.abs(weekdiff = airdate.diff(today, 'week')), isPast = weekdiff < 0,
|
||||||
|
titleThis = false, qTipTime = false,
|
||||||
|
result = (0 == week ? airdatetime.calendar() : '');
|
||||||
|
|
||||||
|
if (/\bOn\b/i.test(result)) {
|
||||||
|
var fuzzer = false, weekday = today.format('dd');
|
||||||
|
if (/we/i.test(weekday))
|
||||||
|
fuzzer = (5 <= day);
|
||||||
|
else if (/(?:th|fr)/i.test(weekday))
|
||||||
|
fuzzer = (4 <= day);
|
||||||
|
else
|
||||||
|
fuzzer = (6 == day);
|
||||||
|
if (fuzzer)
|
||||||
|
result = result.replace(/\bOn\b/i, 'Next');
|
||||||
|
|
||||||
|
} else if (! /\b((yester|to)day\b|tomo|last\b)/i.test(result)) {
|
||||||
|
if (14 > day)
|
||||||
|
result = airdate.from(today) + (dateWithTime ? dtGlue + airdatetime.format(timeToken) : '');
|
||||||
|
else if (4 > week) {
|
||||||
|
result = (isPast ? '' : 'in ') + (1 == week ? 'a' : week) + ' week' + (1 == week ? '' : 's') + (isPast ? ' ago' : '');
|
||||||
|
qTipTime = true;
|
||||||
|
} else {
|
||||||
|
result = airdate.from(today);
|
||||||
|
qTipTime = true;
|
||||||
|
var month = airdate.diff(today, 'month');
|
||||||
|
if (1 == parseInt(airdate.year() - today.year()))
|
||||||
|
result += '<br />(Next Year)';
|
||||||
|
}
|
||||||
|
titleThis = true;
|
||||||
|
}
|
||||||
|
|
||||||
|
var n = false; // disable for prod
|
||||||
|
$(this).html(result);
|
||||||
|
if (dateWithTime && /(yester|to)day/i.test(result))
|
||||||
|
$(this).find('.fd').attr('title',(n?'1) ':'') + moment.duration(airdatetime.diff(moment(),'seconds'),'seconds').humanize(true)).each(addQTip);
|
||||||
|
else if (dateWithTime)
|
||||||
|
$(this).find('.fd').attr('title',(n?'2) ':'') + airdate.from(today)).each(addQTip);
|
||||||
|
else if (! /today/i.test(result))
|
||||||
|
$(this).find('.fd').attr('title',(n?'3) ':'') + airdate.from(today)).each(addQTip);
|
||||||
|
else
|
||||||
|
titleThis = false;
|
||||||
|
|
||||||
|
if (titleThis)
|
||||||
|
if (dateWithTime && qTipTime)
|
||||||
|
$(this).attr('title',(n?'4) ':'') + airdatetime.format(inputTokens)).each(addQTip);
|
||||||
|
else
|
||||||
|
$(this).attr('title',(n?'5) ':'') + airdate.format(dateToken)).each(addQTip);
|
||||||
|
else
|
||||||
|
if (dateWithTime && qTipTime)
|
||||||
|
$(this).find('.ft').attr('title',(n?'6) ':'') + airdatetime.format(inputTokens)).each(addQTip);
|
||||||
|
else
|
||||||
|
$(this).find('.ft').attr('title',(n?'7) ':'') + airdate.format(dateToken)).each(addQTip);
|
||||||
|
});
|
||||||
|
}
|
6
gui/slick/js/moment/moment.min.js
vendored
Normal file
39
gui/slick/js/sceneExceptionsTooltip.js
Normal file
|
@ -0,0 +1,39 @@
|
||||||
|
$(function () {
|
||||||
|
$('.title a').each(function () {
|
||||||
|
match = $(this).parent().attr("id").match(/^scene_exception_(\d+)$/);
|
||||||
|
$(this).qtip({
|
||||||
|
content: {
|
||||||
|
text: 'Loading...',
|
||||||
|
ajax: {
|
||||||
|
url: $("#sbRoot").val() + '/home/sceneExceptions',
|
||||||
|
type: 'GET',
|
||||||
|
data: {
|
||||||
|
show: match[1]
|
||||||
|
},
|
||||||
|
success: function (data, status) {
|
||||||
|
this.set('content.text', data);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
show: {
|
||||||
|
solo: true
|
||||||
|
},
|
||||||
|
position: {
|
||||||
|
viewport: $(window),
|
||||||
|
my: 'top center',
|
||||||
|
at: 'bottom center',
|
||||||
|
adjust: {
|
||||||
|
y: 3,
|
||||||
|
x: 0
|
||||||
|
}
|
||||||
|
},
|
||||||
|
style: {
|
||||||
|
tip: {
|
||||||
|
corner: true,
|
||||||
|
method: 'polygon'
|
||||||
|
},
|
||||||
|
classes: 'qtip-rounded qtip-shadow ui-tooltip-sb'
|
||||||
|
}
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
24
init.freebsd
|
@ -17,6 +17,8 @@
|
||||||
# Default: /usr/local/sickbeard
|
# Default: /usr/local/sickbeard
|
||||||
# sickbeard_chdir: Change to this directory before running SickRage.
|
# sickbeard_chdir: Change to this directory before running SickRage.
|
||||||
# Default is same as sickbeard_dir.
|
# Default is same as sickbeard_dir.
|
||||||
|
# sickbeard_datadir: Data directory for Sick Beard (DB, Logs, config)
|
||||||
|
# Default is same as sickbeard_chdir
|
||||||
# sickbeard_pid: The name of the pidfile to create.
|
# sickbeard_pid: The name of the pidfile to create.
|
||||||
# Default is sickbeard.pid in sickbeard_dir.
|
# Default is sickbeard.pid in sickbeard_dir.
|
||||||
# sickbeard_host: The hostname or IP SickRage is listening on
|
# sickbeard_host: The hostname or IP SickRage is listening on
|
||||||
|
@ -27,6 +29,8 @@
|
||||||
# Default is an empty string (no username)
|
# Default is an empty string (no username)
|
||||||
# sickbeard_web_password: Password to authenticate to the SickRage web interface
|
# sickbeard_web_password: Password to authenticate to the SickRage web interface
|
||||||
# Default is an empty string (no password)
|
# Default is an empty string (no password)
|
||||||
|
# sickbeard_webroot: Set to value of web_root in config (for proxies etc)
|
||||||
|
# Default is an empty string (if set must start with a "/")
|
||||||
PATH="/sbin:/bin:/usr/sbin:/usr/bin:/usr/local/sbin:/usr/local/bin"
|
PATH="/sbin:/bin:/usr/sbin:/usr/bin:/usr/local/sbin:/usr/local/bin"
|
||||||
|
|
||||||
. /etc/rc.subr
|
. /etc/rc.subr
|
||||||
|
@ -40,13 +44,13 @@ load_rc_config ${name}
|
||||||
: ${sickbeard_user:="_sabnzbd"}
|
: ${sickbeard_user:="_sabnzbd"}
|
||||||
: ${sickbeard_dir:="/usr/local/sickbeard"}
|
: ${sickbeard_dir:="/usr/local/sickbeard"}
|
||||||
: ${sickbeard_chdir:="${sickbeard_dir}"}
|
: ${sickbeard_chdir:="${sickbeard_dir}"}
|
||||||
|
: ${sickbeard_datadir:="${sickbeard_chdir}"}
|
||||||
: ${sickbeard_pid:="${sickbeard_dir}/sickbeard.pid"}
|
: ${sickbeard_pid:="${sickbeard_dir}/sickbeard.pid"}
|
||||||
: ${sickbeard_host:="127.0.0.1"}
|
: ${sickbeard_host:="127.0.0.1"}
|
||||||
: ${sickbeard_port:="8081"}
|
: ${sickbeard_port:="8081"}
|
||||||
: ${sickbeard_web_user:=""}
|
: ${sickbeard_web_user:=""}
|
||||||
: ${sickbeard_web_password:=""}
|
: ${sickbeard_web_password:=""}
|
||||||
|
: ${sickbeard_webroot:=""}
|
||||||
WGET="/usr/local/bin/wget" # You need wget for this script to safely shutdown SickRage.
|
|
||||||
|
|
||||||
status_cmd="${name}_status"
|
status_cmd="${name}_status"
|
||||||
stop_cmd="${name}_stop"
|
stop_cmd="${name}_stop"
|
||||||
|
@ -54,11 +58,9 @@ stop_cmd="${name}_stop"
|
||||||
command="/usr/sbin/daemon"
|
command="/usr/sbin/daemon"
|
||||||
command_args="-f -p ${sickbeard_pid} python ${sickbeard_dir}/SickBeard.py --quiet --nolaunch"
|
command_args="-f -p ${sickbeard_pid} python ${sickbeard_dir}/SickBeard.py --quiet --nolaunch"
|
||||||
|
|
||||||
# Check for wget and refuse to start without it.
|
# Add datadir to the command if set
|
||||||
if [ ! -x "${WGET}" ]; then
|
[ ! -z "${sickbeard_datadir}" ] && \
|
||||||
warn "Sickbeard not started: You need wget to safely shut down SickRage."
|
command_args="${command_args} --datadir ${sickbeard_datadir}"
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
|
|
||||||
# Ensure user is root when running this script.
|
# Ensure user is root when running this script.
|
||||||
if [ `id -u` != "0" ]; then
|
if [ `id -u` != "0" ]; then
|
||||||
|
@ -77,7 +79,12 @@ verify_sickbeard_pid() {
|
||||||
sickbeard_stop() {
|
sickbeard_stop() {
|
||||||
echo "Stopping $name"
|
echo "Stopping $name"
|
||||||
verify_sickbeard_pid
|
verify_sickbeard_pid
|
||||||
${WGET} -O - -q --user=${sickbeard_web_user} --password=${sickbeard_web_password} "http://${sickbeard_host}:${sickbeard_port}/home/shutdown/" >/dev/null
|
sickbeard_url="${sickbeard_host}:${sickbeard_port}"
|
||||||
|
[ ! -z "${sickbeard_web_user}" ] && \
|
||||||
|
sickbeard_url="${sickbeard_web_user}:${sickbeard_web_password}@${sickbeard_url}"
|
||||||
|
[ ! -z "${sickbeard_webroot}" ] && \
|
||||||
|
sickbeard_url="${sickbeard_url}${sickbeard_webroot}"
|
||||||
|
fetch -o - -q "http://${sickbeard_url}/home/shutdown/?pid=${pid}" >/dev/null
|
||||||
if [ -n "${pid}" ]; then
|
if [ -n "${pid}" ]; then
|
||||||
wait_for_pids ${pid}
|
wait_for_pids ${pid}
|
||||||
echo "Stopped"
|
echo "Stopped"
|
||||||
|
@ -89,4 +96,3 @@ sickbeard_status() {
|
||||||
}
|
}
|
||||||
|
|
||||||
run_rc_command "$1"
|
run_rc_command "$1"
|
||||||
|
|
||||||
|
|
796
lib/adba/__init__.py
Normal file
|
@ -0,0 +1,796 @@
|
||||||
|
#!/usr/bin/env python
|
||||||
|
#
|
||||||
|
# This file is part of aDBa.
|
||||||
|
#
|
||||||
|
# aDBa is free software: you can redistribute it and/or modify
|
||||||
|
# it under the terms of the GNU General Public License as published by
|
||||||
|
# the Free Software Foundation, either version 3 of the License, or
|
||||||
|
# (at your option) any later version.
|
||||||
|
#
|
||||||
|
# aDBa is distributed in the hope that it will be useful,
|
||||||
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
# GNU General Public License for more details.
|
||||||
|
#
|
||||||
|
# You should have received a copy of the GNU General Public License
|
||||||
|
# along with aDBa. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
import threading
|
||||||
|
from time import time, sleep, strftime, localtime
|
||||||
|
from types import *
|
||||||
|
|
||||||
|
from aniDBlink import AniDBLink
|
||||||
|
from aniDBcommands import *
|
||||||
|
from aniDBerrors import *
|
||||||
|
from aniDBAbstracter import Anime, Episode
|
||||||
|
|
||||||
|
version = 100
|
||||||
|
|
||||||
|
class Connection(threading.Thread):
|
||||||
|
def __init__(self, clientname='adba', server='api.anidb.info', port=9000, myport=9876, user=None, password=None, session=None, log=False, logPrivate=False, keepAlive=False):
|
||||||
|
super(Connection, self).__init__()
|
||||||
|
# setting the log function
|
||||||
|
self.logPrivate = logPrivate
|
||||||
|
if type(log) in (FunctionType, MethodType):# if we get a function or a method use that.
|
||||||
|
self.log = log
|
||||||
|
self.logPrivate = True # true means sensitive data will not be NOT be logged ... yeah i know oO
|
||||||
|
elif log:# if it something else (like True) use the own print_log
|
||||||
|
self.log = self.print_log
|
||||||
|
else:# dont log at all
|
||||||
|
self.log = self.print_log_dummy
|
||||||
|
|
||||||
|
|
||||||
|
self.link = AniDBLink(server, port, myport, self.log, logPrivate=self.logPrivate)
|
||||||
|
self.link.session = session
|
||||||
|
|
||||||
|
self.clientname = clientname
|
||||||
|
self.clientver = version
|
||||||
|
|
||||||
|
# from original lib
|
||||||
|
self.mode = 1 #mode: 0=queue,1=unlock,2=callback
|
||||||
|
|
||||||
|
# to lock other threads out
|
||||||
|
self.lock = threading.RLock()
|
||||||
|
|
||||||
|
# thread keep alive stuff
|
||||||
|
self.keepAlive = keepAlive
|
||||||
|
self.setDaemon(True)
|
||||||
|
self.lastKeepAliveCheck = 0
|
||||||
|
self.lastAuth = 0
|
||||||
|
self._username = password
|
||||||
|
self._password = user
|
||||||
|
|
||||||
|
self._iamALIVE = False
|
||||||
|
|
||||||
|
self.counter = 0
|
||||||
|
self.counterAge = 0
|
||||||
|
|
||||||
|
def print_log(self, data):
|
||||||
|
print(strftime("%Y-%m-%d %H:%M:%S", localtime(time())) + ": " + str(data))
|
||||||
|
|
||||||
|
def print_log_dummy(self, data):
|
||||||
|
pass
|
||||||
|
|
||||||
|
def stop(self):
|
||||||
|
self.logout(cutConnection=True)
|
||||||
|
|
||||||
|
|
||||||
|
def cut(self):
|
||||||
|
self.link.stop()
|
||||||
|
|
||||||
|
def handle_response(self, response):
|
||||||
|
if response.rescode in ('501', '506') and response.req.command != 'AUTH':
|
||||||
|
self.log("seams like the last command got a not authed error back tring to reconnect now")
|
||||||
|
if self._reAuthenticate():
|
||||||
|
response.req.resp = None
|
||||||
|
response = self.handle(response.req, response.req.callback)
|
||||||
|
|
||||||
|
|
||||||
|
def handle(self, command, callback):
|
||||||
|
|
||||||
|
self.lock.acquire()
|
||||||
|
if self.counterAge < (time() - 120): # the last request was older then 2 min reset delay and counter
|
||||||
|
self.counter = 0
|
||||||
|
self.link.delay = 2
|
||||||
|
else: # something happend in the last 120 seconds
|
||||||
|
if self.counter < 5:
|
||||||
|
self.link.delay = 2 # short term "A Client MUST NOT send more than 0.5 packets per second (that's one packet every two seconds, not two packets a second!)"
|
||||||
|
elif self.counter >= 5:
|
||||||
|
self.link.delay = 6 # long term "A Client MUST NOT send more than one packet every four seconds over an extended amount of time."
|
||||||
|
|
||||||
|
if command.command not in ('AUTH', 'PING', 'ENCRYPT'):
|
||||||
|
self.counterAge = time()
|
||||||
|
self.counter += 1
|
||||||
|
if self.keepAlive:
|
||||||
|
self.authed()
|
||||||
|
|
||||||
|
def callback_wrapper(resp):
|
||||||
|
self.handle_response(resp)
|
||||||
|
if callback:
|
||||||
|
callback(resp)
|
||||||
|
|
||||||
|
self.log("handling(" + str(self.counter) + "-" + str(self.link.delay) + ") command " + str(command.command))
|
||||||
|
|
||||||
|
#make live request
|
||||||
|
command.authorize(self.mode, self.link.new_tag(), self.link.session, callback_wrapper)
|
||||||
|
self.link.request(command)
|
||||||
|
|
||||||
|
#handle mode 1 (wait for response)
|
||||||
|
if self.mode == 1:
|
||||||
|
command.wait_response()
|
||||||
|
try:
|
||||||
|
command.resp
|
||||||
|
except:
|
||||||
|
self.lock.release()
|
||||||
|
if self.link.banned:
|
||||||
|
raise AniDBBannedError("User is banned")
|
||||||
|
else:
|
||||||
|
raise AniDBCommandTimeoutError("Command has timed out")
|
||||||
|
|
||||||
|
self.handle_response(command.resp)
|
||||||
|
self.lock.release()
|
||||||
|
return command.resp
|
||||||
|
else:
|
||||||
|
self.lock.release()
|
||||||
|
|
||||||
|
def authed(self, reAuthenticate=False):
|
||||||
|
self.lock.acquire()
|
||||||
|
authed = (self.link.session != None)
|
||||||
|
if not authed and (reAuthenticate or self.keepAlive):
|
||||||
|
self._reAuthenticate()
|
||||||
|
authed = (self.link.session != None)
|
||||||
|
self.lock.release()
|
||||||
|
return authed
|
||||||
|
|
||||||
|
def _reAuthenticate(self):
|
||||||
|
if self._username and self._password:
|
||||||
|
self.log("auto re authenticating !")
|
||||||
|
resp = self.auth(self._username, self._password)
|
||||||
|
if resp.rescode not in ('500'):
|
||||||
|
return True
|
||||||
|
else:
|
||||||
|
return False
|
||||||
|
|
||||||
|
def _keep_alive(self):
|
||||||
|
self.lastKeepAliveCheck = time()
|
||||||
|
self.log("auto check !")
|
||||||
|
# check every 30 minutes if the session is still valid
|
||||||
|
# if not reauthenticate
|
||||||
|
if self.lastAuth and time() - self.lastAuth > 1800:
|
||||||
|
self.log("auto uptime !")
|
||||||
|
self.uptime() # this will update the self.link.session and will refresh the session if it is still alive
|
||||||
|
|
||||||
|
if self.authed(): # if we are authed we set the time
|
||||||
|
self.lastAuth = time()
|
||||||
|
else: # if we aren't authed and we have the user and pw then reauthenticate
|
||||||
|
self._reAuthenticate()
|
||||||
|
|
||||||
|
# issue a ping every 20 minutes after the last package
|
||||||
|
# this ensures the connection will be kept alive
|
||||||
|
if self.link.lastpacket and time() - self.link.lastpacket > 1200:
|
||||||
|
self.log("auto ping !")
|
||||||
|
self.ping()
|
||||||
|
|
||||||
|
|
||||||
|
def run(self):
|
||||||
|
while self.keepAlive:
|
||||||
|
self._keep_alive()
|
||||||
|
sleep(120)
|
||||||
|
|
||||||
|
|
||||||
|
def auth(self, username, password, nat=None, mtu=None, callback=None):
|
||||||
|
"""
|
||||||
|
Login to AniDB UDP API
|
||||||
|
|
||||||
|
parameters:
|
||||||
|
username - your anidb username
|
||||||
|
password - your anidb password
|
||||||
|
nat - if this is 1, response will have "address" in attributes with your "ip:port" (default:0)
|
||||||
|
mtu - maximum transmission unit (max packet size) (default: 1400)
|
||||||
|
|
||||||
|
"""
|
||||||
|
self.log("ok1")
|
||||||
|
if self.keepAlive:
|
||||||
|
self.log("ok2")
|
||||||
|
self._username = username
|
||||||
|
self._password = password
|
||||||
|
if self.is_alive() == False:
|
||||||
|
self.log("You wanted to keep this thing alive!")
|
||||||
|
if self._iamALIVE == False:
|
||||||
|
self.log("Starting thread now...")
|
||||||
|
self.start()
|
||||||
|
self._iamALIVE = True
|
||||||
|
else:
|
||||||
|
self.log("not starting thread seams like it is already running. this must be a _reAuthenticate")
|
||||||
|
|
||||||
|
|
||||||
|
self.lastAuth = time()
|
||||||
|
return self.handle(AuthCommand(username, password, 3, self.clientname, self.clientver, nat, 1, 'utf8', mtu), callback)
|
||||||
|
|
||||||
|
def logout(self, cutConnection=False, callback=None):
|
||||||
|
"""
|
||||||
|
Log out from AniDB UDP API
|
||||||
|
|
||||||
|
"""
|
||||||
|
result = self.handle(LogoutCommand(), callback)
|
||||||
|
if(cutConnection):
|
||||||
|
self.cut()
|
||||||
|
return result
|
||||||
|
|
||||||
|
def push(self, notify, msg, buddy=None, callback=None):
|
||||||
|
"""
|
||||||
|
Subscribe/unsubscribe to/from notifications
|
||||||
|
|
||||||
|
parameters:
|
||||||
|
notify - Notifications about files added?
|
||||||
|
msg - Notifications about message added?
|
||||||
|
buddy - Notifications about buddy events?
|
||||||
|
|
||||||
|
structure of parameters:
|
||||||
|
notify msg [buddy]
|
||||||
|
|
||||||
|
"""
|
||||||
|
return self.handle(PushCommand(notify, msg, buddy), callback)
|
||||||
|
|
||||||
|
def pushack(self, nid, callback=None):
|
||||||
|
"""
|
||||||
|
Acknowledge notification (do this when you get 271-274)
|
||||||
|
|
||||||
|
parameters:
|
||||||
|
nid - Notification packet id
|
||||||
|
|
||||||
|
structure of parameters:
|
||||||
|
nid
|
||||||
|
|
||||||
|
"""
|
||||||
|
return self.handle(PushAckCommand(nid), callback)
|
||||||
|
|
||||||
|
def notifyadd(self, aid=None, gid=None, type=None, priority=None, callback=None):
|
||||||
|
"""
|
||||||
|
Add a notification
|
||||||
|
|
||||||
|
parameters:
|
||||||
|
aid - Anime id
|
||||||
|
gid - Group id
|
||||||
|
type - Type of notification: type=> 0=all, 1=new, 2=group, 3=complete
|
||||||
|
priority - low = 0, medium = 1, high = 2 (unconfirmed)
|
||||||
|
|
||||||
|
structure of parameters:
|
||||||
|
[aid={int}|gid={int}]&type={int}&priority={int}
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
return self.handle(NotifyAddCommand(aid, gid, type, priority), callback)
|
||||||
|
|
||||||
|
|
||||||
|
def notify(self, buddy=None, callback=None):
|
||||||
|
"""
|
||||||
|
Get number of pending notifications and messages
|
||||||
|
|
||||||
|
parameters:
|
||||||
|
buddy - Also display number of online buddies
|
||||||
|
|
||||||
|
structure of parameters:
|
||||||
|
[buddy]
|
||||||
|
|
||||||
|
"""
|
||||||
|
return self.handle(NotifyCommand(buddy), callback)
|
||||||
|
|
||||||
|
def notifylist(self, callback=None):
|
||||||
|
"""
|
||||||
|
List all pending notifications/messages
|
||||||
|
|
||||||
|
"""
|
||||||
|
return self.handle(NotifyListCommand(), callback)
|
||||||
|
|
||||||
|
def notifyget(self, type, id, callback=None):
|
||||||
|
"""
|
||||||
|
Get notification/message
|
||||||
|
|
||||||
|
parameters:
|
||||||
|
type - (M=message, N=notification)
|
||||||
|
id - message/notification id
|
||||||
|
|
||||||
|
structure of parameters:
|
||||||
|
type id
|
||||||
|
|
||||||
|
"""
|
||||||
|
return self.handle(NotifyGetCommand(type, id), callback)
|
||||||
|
|
||||||
|
def notifyack(self, type, id, callback=None):
|
||||||
|
"""
|
||||||
|
Mark message read or clear a notification
|
||||||
|
|
||||||
|
parameters:
|
||||||
|
type - (M=message, N=notification)
|
||||||
|
id - message/notification id
|
||||||
|
|
||||||
|
structure of parameters:
|
||||||
|
type id
|
||||||
|
|
||||||
|
"""
|
||||||
|
return self.handle(NotifyAckCommand(type, id), callback)
|
||||||
|
|
||||||
|
def buddyadd(self, uid=None, uname=None, callback=None):
|
||||||
|
"""
|
||||||
|
Add a user to your buddy list
|
||||||
|
|
||||||
|
parameters:
|
||||||
|
uid - user id
|
||||||
|
uname - name of the user
|
||||||
|
|
||||||
|
structure of parameters:
|
||||||
|
(uid|uname)
|
||||||
|
|
||||||
|
"""
|
||||||
|
return self.handle(BuddyAddCommand(uid, uname), callback)
|
||||||
|
|
||||||
|
def buddydel(self, uid, callback=None):
|
||||||
|
"""
|
||||||
|
Remove a user from your buddy list
|
||||||
|
|
||||||
|
parameters:
|
||||||
|
uid - user id
|
||||||
|
|
||||||
|
structure of parameters:
|
||||||
|
uid
|
||||||
|
|
||||||
|
"""
|
||||||
|
return self.handle(BuddyDelCommand(uid), callback)
|
||||||
|
|
||||||
|
def buddyaccept(self, uid, callback=None):
|
||||||
|
"""
|
||||||
|
Accept user as buddy
|
||||||
|
|
||||||
|
parameters:
|
||||||
|
uid - user id
|
||||||
|
|
||||||
|
structure of parameters:
|
||||||
|
uid
|
||||||
|
|
||||||
|
"""
|
||||||
|
return self.handle(BuddyAcceptCommand(uid), callback)
|
||||||
|
|
||||||
|
def buddydeny(self, uid, callback=None):
|
||||||
|
"""
|
||||||
|
Deny user as buddy
|
||||||
|
|
||||||
|
parameters:
|
||||||
|
uid - user id
|
||||||
|
|
||||||
|
structure of parameters:
|
||||||
|
uid
|
||||||
|
|
||||||
|
"""
|
||||||
|
return self.handle(BuddyDenyCommand(uid), callback)
|
||||||
|
|
||||||
|
def buddylist(self, startat, callback=None):
|
||||||
|
"""
|
||||||
|
Retrieve your buddy list
|
||||||
|
|
||||||
|
parameters:
|
||||||
|
startat - number of buddy to start listing from
|
||||||
|
|
||||||
|
structure of parameters:
|
||||||
|
startat
|
||||||
|
|
||||||
|
"""
|
||||||
|
return self.handle(BuddyListCommand(startat), callback)
|
||||||
|
|
||||||
|
def buddystate(self, startat, callback=None):
|
||||||
|
"""
|
||||||
|
Retrieve buddy states
|
||||||
|
|
||||||
|
parameters:
|
||||||
|
startat - number of buddy to start listing from
|
||||||
|
|
||||||
|
structure of parameters:
|
||||||
|
startat
|
||||||
|
|
||||||
|
"""
|
||||||
|
return self.handle(BuddyStateCommand(startat), callback)
|
||||||
|
|
||||||
|
def anime(self, aid=None, aname=None, amask= -1, callback=None):
|
||||||
|
"""
|
||||||
|
Get information about an anime
|
||||||
|
|
||||||
|
parameters:
|
||||||
|
aid - anime id
|
||||||
|
aname - name of the anime
|
||||||
|
amask - a bitfield describing what information you want about the anime
|
||||||
|
|
||||||
|
structure of parameters:
|
||||||
|
(aid|aname) [amask]
|
||||||
|
|
||||||
|
structure of amask:
|
||||||
|
|
||||||
|
"""
|
||||||
|
return self.handle(AnimeCommand(aid, aname, amask), callback)
|
||||||
|
|
||||||
|
def episode(self, eid=None, aid=None, aname=None, epno=None, callback=None):
|
||||||
|
"""
|
||||||
|
Get information about an episode
|
||||||
|
|
||||||
|
parameters:
|
||||||
|
eid - episode id
|
||||||
|
aid - anime id
|
||||||
|
aname - name of the anime
|
||||||
|
epno - number of the episode
|
||||||
|
|
||||||
|
structure of parameters:
|
||||||
|
eid
|
||||||
|
(aid|aname) epno
|
||||||
|
|
||||||
|
"""
|
||||||
|
return self.handle(EpisodeCommand(eid, aid, aname, epno), callback)
|
||||||
|
|
||||||
|
def file(self, fid=None, size=None, ed2k=None, aid=None, aname=None, gid=None, gname=None, epno=None, fmask= -1, amask=0, callback=None):
|
||||||
|
"""
|
||||||
|
Get information about a file
|
||||||
|
|
||||||
|
parameters:
|
||||||
|
fid - file id
|
||||||
|
size - size of the file
|
||||||
|
ed2k - ed2k-hash of the file
|
||||||
|
aid - anime id
|
||||||
|
aname - name of the anime
|
||||||
|
gid - group id
|
||||||
|
gname - name of the group
|
||||||
|
epno - number of the episode
|
||||||
|
fmask - a bitfield describing what information you want about the file
|
||||||
|
amask - a bitfield describing what information you want about the anime
|
||||||
|
|
||||||
|
structure of parameters:
|
||||||
|
fid [fmask] [amask]
|
||||||
|
size ed2k [fmask] [amask]
|
||||||
|
(aid|aname) (gid|gname) epno [fmask] [amask]
|
||||||
|
|
||||||
|
structure of fmask:
|
||||||
|
bit key description
|
||||||
|
0 - -
|
||||||
|
1 aid aid
|
||||||
|
2 eid eid
|
||||||
|
3 gid gid
|
||||||
|
4 lid lid
|
||||||
|
5 - -
|
||||||
|
6 - -
|
||||||
|
7 - -
|
||||||
|
8 state state
|
||||||
|
9 size size
|
||||||
|
10 ed2k ed2k
|
||||||
|
11 md5 md5
|
||||||
|
12 sha1 sha1
|
||||||
|
13 crc32 crc32
|
||||||
|
14 - -
|
||||||
|
15 - -
|
||||||
|
16 dublang dub language
|
||||||
|
17 sublang sub language
|
||||||
|
18 quality quality
|
||||||
|
19 source source
|
||||||
|
20 audiocodec audio codec
|
||||||
|
21 audiobitrate audio bitrate
|
||||||
|
22 videocodec video codec
|
||||||
|
23 videobitrate video bitrate
|
||||||
|
24 resolution video resolution
|
||||||
|
25 filetype file type (extension)
|
||||||
|
26 length length in seconds
|
||||||
|
27 description description
|
||||||
|
28 - -
|
||||||
|
29 - -
|
||||||
|
30 filename anidb file name
|
||||||
|
31 - -
|
||||||
|
|
||||||
|
structure of amask:
|
||||||
|
bit key description
|
||||||
|
0 gname group name
|
||||||
|
1 gshortname group short name
|
||||||
|
2 - -
|
||||||
|
3 - -
|
||||||
|
4 - -
|
||||||
|
5 - -
|
||||||
|
6 - -
|
||||||
|
7 - -
|
||||||
|
8 epno epno
|
||||||
|
9 epname ep english name
|
||||||
|
10 epromaji ep romaji name
|
||||||
|
11 epkanji ep kanji name
|
||||||
|
12 - -
|
||||||
|
13 - -
|
||||||
|
14 - -
|
||||||
|
15 - -
|
||||||
|
16 totaleps anime total episodes
|
||||||
|
17 lastep last episode nr (highest, not special)
|
||||||
|
18 year year
|
||||||
|
19 type type
|
||||||
|
20 romaji romaji name
|
||||||
|
21 kanji kanji name
|
||||||
|
22 name english name
|
||||||
|
23 othername other name
|
||||||
|
24 shortnames short name list
|
||||||
|
25 synonyms synonym list
|
||||||
|
26 categories category list
|
||||||
|
27 relatedaids related aid list
|
||||||
|
28 producernames producer name list
|
||||||
|
29 producerids producer id list
|
||||||
|
30 - -
|
||||||
|
31 - -
|
||||||
|
|
||||||
|
"""
|
||||||
|
return self.handle(FileCommand(fid, size, ed2k, aid, aname, gid, gname, epno, fmask, amask), callback)
|
||||||
|
|
||||||
|
def group(self, gid=None, gname=None, callback=None):
|
||||||
|
"""
|
||||||
|
Get information about a group
|
||||||
|
|
||||||
|
parameters:
|
||||||
|
gid - group id
|
||||||
|
gname - name of the group
|
||||||
|
|
||||||
|
structure of parameters:
|
||||||
|
(gid|gname)
|
||||||
|
|
||||||
|
"""
|
||||||
|
return self.handle(GroupCommand(gid, gname), callback)
|
||||||
|
|
||||||
|
def groupstatus(self, aid=None, state=None, callback=None):
|
||||||
|
"""
|
||||||
|
Returns a list of group names and ranges of episodes released by the group for a given anime.
|
||||||
|
parameters:
|
||||||
|
aid - anime id
|
||||||
|
state - If state is not supplied, groups with a completion state of 'ongoing', 'finished', or 'complete' are returned
|
||||||
|
state values:
|
||||||
|
1 -> ongoing
|
||||||
|
2 -> stalled
|
||||||
|
3 -> complete
|
||||||
|
4 -> dropped
|
||||||
|
5 -> finished
|
||||||
|
6 -> specials only
|
||||||
|
"""
|
||||||
|
return self.handle(GroupstatusCommand(aid, state), callback)
|
||||||
|
|
||||||
|
def producer(self, pid=None, pname=None, callback=None):
|
||||||
|
"""
|
||||||
|
Get information about a producer
|
||||||
|
|
||||||
|
parameters:
|
||||||
|
pid - producer id
|
||||||
|
pname - name of the producer
|
||||||
|
|
||||||
|
structure of parameters:
|
||||||
|
(pid|pname)
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
return self.handle(ProducerCommand(pid, pname), callback)
|
||||||
|
|
||||||
|
def mylist(self, lid=None, fid=None, size=None, ed2k=None, aid=None, aname=None, gid=None, gname=None, epno=None, callback=None):
|
||||||
|
"""
|
||||||
|
Get information about your mylist
|
||||||
|
|
||||||
|
parameters:
|
||||||
|
lid - mylist id
|
||||||
|
fid - file id
|
||||||
|
size - size of the file
|
||||||
|
ed2k - ed2k-hash of the file
|
||||||
|
aid - anime id
|
||||||
|
aname - name of the anime
|
||||||
|
gid - group id
|
||||||
|
gname - name of the group
|
||||||
|
epno - number of the episode
|
||||||
|
|
||||||
|
structure of parameters:
|
||||||
|
lid
|
||||||
|
fid
|
||||||
|
size ed2k
|
||||||
|
(aid|aname) (gid|gname) epno
|
||||||
|
|
||||||
|
"""
|
||||||
|
return self.handle(MyListCommand(lid, fid, size, ed2k, aid, aname, gid, gname, epno), callback)
|
||||||
|
|
||||||
|
def mylistadd(self, lid=None, fid=None, size=None, ed2k=None, aid=None, aname=None, gid=None, gname=None, epno=None, edit=None, state=None, viewed=None, source=None, storage=None, other=None, callback=None):
|
||||||
|
"""
|
||||||
|
Add/Edit information to/in your mylist
|
||||||
|
|
||||||
|
parameters:
|
||||||
|
lid - mylist id
|
||||||
|
fid - file id
|
||||||
|
size - size of the file
|
||||||
|
ed2k - ed2k-hash of the file
|
||||||
|
aid - anime id
|
||||||
|
aname - name of the anime
|
||||||
|
gid - group id
|
||||||
|
gname - name of the group
|
||||||
|
epno - number of the episode
|
||||||
|
edit - whether to add to mylist or edit an existing entry (0=add,1=edit)
|
||||||
|
state - the location of the file
|
||||||
|
viewed - whether you have watched the file (0=unwatched,1=watched)
|
||||||
|
source - where you got the file (bittorrent,dc++,ed2k,...)
|
||||||
|
storage - for example the title of the cd you have this on
|
||||||
|
other - other data regarding this file
|
||||||
|
|
||||||
|
structure of parameters:
|
||||||
|
lid edit=1 [state viewed source storage other]
|
||||||
|
fid [state viewed source storage other] [edit]
|
||||||
|
size ed2k [state viewed source storage other] [edit]
|
||||||
|
(aid|aname) (gid|gname) epno [state viewed source storage other]
|
||||||
|
(aid|aname) edit=1 [(gid|gname) epno] [state viewed source storage other]
|
||||||
|
|
||||||
|
structure of state:
|
||||||
|
value meaning
|
||||||
|
0 unknown - state is unknown or the user doesn't want to provide this information
|
||||||
|
1 on hdd - the file is stored on hdd
|
||||||
|
2 on cd - the file is stored on cd
|
||||||
|
3 deleted - the file has been deleted or is not available for other reasons (i.e. reencoded)
|
||||||
|
|
||||||
|
structure of epno:
|
||||||
|
value meaning
|
||||||
|
x target episode x
|
||||||
|
0 target all episodes
|
||||||
|
-x target all episodes upto x
|
||||||
|
|
||||||
|
"""
|
||||||
|
return self.handle(MyListAddCommand(lid, fid, size, ed2k, aid, aname, gid, gname, epno, edit, state, viewed, source, storage, other), callback)
|
||||||
|
|
||||||
|
def mylistdel(self, lid=None, fid=None, aid=None, aname=None, gid=None, gname=None, epno=None, callback=None):
|
||||||
|
"""
|
||||||
|
Delete information from your mylist
|
||||||
|
|
||||||
|
parameters:
|
||||||
|
lid - mylist id
|
||||||
|
fid - file id
|
||||||
|
size - size of the file
|
||||||
|
ed2k - ed2k-hash of the file
|
||||||
|
aid - anime id
|
||||||
|
aname - name of the anime
|
||||||
|
gid - group id
|
||||||
|
gname - name of the group
|
||||||
|
epno - number of the episode
|
||||||
|
|
||||||
|
structure of parameters:
|
||||||
|
lid
|
||||||
|
fid
|
||||||
|
(aid|aname) (gid|gname) epno
|
||||||
|
|
||||||
|
"""
|
||||||
|
return self.handle(MyListCommand(lid, fid, aid, aname, gid, gname, epno), callback)
|
||||||
|
|
||||||
|
def myliststats(self, callback=None):
|
||||||
|
"""
|
||||||
|
Get summary information of your mylist
|
||||||
|
|
||||||
|
"""
|
||||||
|
return self.handle(MyListStatsCommand(), callback)
|
||||||
|
|
||||||
|
def vote(self, type, id=None, name=None, value=None, epno=None, callback=None):
|
||||||
|
"""
|
||||||
|
Rate an anime/episode/group
|
||||||
|
|
||||||
|
parameters:
|
||||||
|
type - type of the vote
|
||||||
|
id - anime/group id
|
||||||
|
name - name of the anime/group
|
||||||
|
value - the vote
|
||||||
|
epno - number of the episode
|
||||||
|
|
||||||
|
structure of parameters:
|
||||||
|
type (id|name) [value] [epno]
|
||||||
|
|
||||||
|
structure of type:
|
||||||
|
value meaning
|
||||||
|
1 rate an anime (episode if you also specify epno)
|
||||||
|
2 rate an anime temporarily (you haven't watched it all)
|
||||||
|
3 rate a group
|
||||||
|
|
||||||
|
structure of value:
|
||||||
|
value meaning
|
||||||
|
-x revoke vote
|
||||||
|
0 get old vote
|
||||||
|
100-1000 give vote
|
||||||
|
|
||||||
|
"""
|
||||||
|
return self.handle(VoteCommand(type, id, name, value, epno), callback)
|
||||||
|
|
||||||
|
def randomanime(self, type, callback=None):
|
||||||
|
"""
|
||||||
|
Get information of random anime
|
||||||
|
|
||||||
|
parameters:
|
||||||
|
type - where to take the random anime
|
||||||
|
|
||||||
|
structure of parameters:
|
||||||
|
type
|
||||||
|
|
||||||
|
structure of type:
|
||||||
|
value meaning
|
||||||
|
0 db
|
||||||
|
1 watched
|
||||||
|
2 unwatched
|
||||||
|
3 mylist
|
||||||
|
|
||||||
|
"""
|
||||||
|
return self.handle(RandomAnimeCommand(type), callback)
|
||||||
|
|
||||||
|
def ping(self, callback=None):
|
||||||
|
"""
|
||||||
|
Test connectivity to AniDB UDP API
|
||||||
|
|
||||||
|
"""
|
||||||
|
return self.handle(PingCommand(), callback)
|
||||||
|
|
||||||
|
def encrypt(self, user, apipassword, type=None, callback=None):
|
||||||
|
"""
|
||||||
|
Encrypt all future traffic
|
||||||
|
|
||||||
|
parameters:
|
||||||
|
user - your username
|
||||||
|
apipassword - your api password
|
||||||
|
type - type of encoding (1=128bit AES)
|
||||||
|
|
||||||
|
structure of parameters:
|
||||||
|
user [type]
|
||||||
|
|
||||||
|
"""
|
||||||
|
return self.handle(EncryptCommand(user, apipassword, type), callback)
|
||||||
|
|
||||||
|
def encoding(self, name, callback=None):
|
||||||
|
"""
|
||||||
|
Change encoding used in messages
|
||||||
|
|
||||||
|
parameters:
|
||||||
|
name - name of the encoding
|
||||||
|
|
||||||
|
structure of parameters:
|
||||||
|
name
|
||||||
|
|
||||||
|
comments:
|
||||||
|
DO NOT USE THIS!
|
||||||
|
utf8 is the only encoding which will support all the text in anidb responses
|
||||||
|
the responses have japanese, russian, french and probably other alphabets as well
|
||||||
|
even if you can't display utf-8 locally, don't change the server-client -connections encoding
|
||||||
|
rather, make python convert the encoding when you DISPLAY the text
|
||||||
|
it's better that way, let it go as utf8 to databases etc. because then you've the real data stored
|
||||||
|
|
||||||
|
"""
|
||||||
|
raise AniDBStupidUserError, "pylibanidb sets the encoding to utf8 as default and it's stupid to use any other encoding. you WILL lose some data if you use other encodings, and now you've been warned. you will need to modify the code yourself if you want to do something as stupid as changing the encoding"
|
||||||
|
return self.handle(EncodingCommand(name), callback)
|
||||||
|
|
||||||
|
def sendmsg(self, to, title, body, callback=None):
|
||||||
|
"""
|
||||||
|
Send message
|
||||||
|
|
||||||
|
parameters:
|
||||||
|
to - name of the user you want as the recipient
|
||||||
|
title - title of the message
|
||||||
|
body - the message
|
||||||
|
|
||||||
|
structure of parameters:
|
||||||
|
to title body
|
||||||
|
|
||||||
|
"""
|
||||||
|
return self.handle(SendMsgCommand(to, title, body), callback)
|
||||||
|
|
||||||
|
def user(self, user, callback=None):
|
||||||
|
"""
|
||||||
|
Retrieve user id
|
||||||
|
|
||||||
|
parameters:
|
||||||
|
user - username of the user
|
||||||
|
|
||||||
|
structure of parameters:
|
||||||
|
user
|
||||||
|
|
||||||
|
"""
|
||||||
|
return self.handle(UserCommand(user), callback)
|
||||||
|
|
||||||
|
def uptime(self, callback=None):
|
||||||
|
"""
|
||||||
|
Retrieve server uptime
|
||||||
|
|
||||||
|
"""
|
||||||
|
return self.handle(UptimeCommand(), callback)
|
||||||
|
|
||||||
|
def version(self, callback=None):
|
||||||
|
"""
|
||||||
|
Retrieve server version
|
||||||
|
|
||||||
|
"""
|
||||||
|
return self.handle(VersionCommand(), callback)
|
293
lib/adba/aniDBAbstracter.py
Normal file
|
@ -0,0 +1,293 @@
|
||||||
|
#!/usr/bin/env python
|
||||||
|
#
|
||||||
|
# This file is part of aDBa.
|
||||||
|
#
|
||||||
|
# aDBa is free software: you can redistribute it and/or modify
|
||||||
|
# it under the terms of the GNU General Public License as published by
|
||||||
|
# the Free Software Foundation, either version 3 of the License, or
|
||||||
|
# (at your option) any later version.
|
||||||
|
#
|
||||||
|
# aDBa is distributed in the hope that it will be useful,
|
||||||
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
# GNU General Public License for more details.
|
||||||
|
#
|
||||||
|
# You should have received a copy of the GNU General Public License
|
||||||
|
# along with aDBa. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
|
from time import time, sleep
|
||||||
|
import aniDBfileInfo as fileInfo
|
||||||
|
import xml.etree.cElementTree as etree
|
||||||
|
import os, re, string
|
||||||
|
from aniDBmaper import AniDBMaper
|
||||||
|
from aniDBtvDBmaper import TvDBMap
|
||||||
|
from aniDBerrors import *
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
class aniDBabstractObject(object):
|
||||||
|
|
||||||
|
def __init__(self, aniDB, load=False):
|
||||||
|
self.laoded = False
|
||||||
|
self.set_connection(aniDB)
|
||||||
|
if load:
|
||||||
|
self.load_data()
|
||||||
|
|
||||||
|
def set_connection(self, aniDB):
|
||||||
|
self.aniDB = aniDB
|
||||||
|
if self.aniDB:
|
||||||
|
self.log = self.aniDB.log
|
||||||
|
else:
|
||||||
|
self.log = self._fake_log()
|
||||||
|
|
||||||
|
def _fake_log(self, x=None):
|
||||||
|
pass
|
||||||
|
|
||||||
|
def _fill(self, dataline):
|
||||||
|
for key in dataline:
|
||||||
|
try:
|
||||||
|
tmpList = dataline[key].split("'")
|
||||||
|
if len(tmpList) > 1:
|
||||||
|
newList = []
|
||||||
|
for i in tmpList:
|
||||||
|
try:
|
||||||
|
newList.append(int(i))
|
||||||
|
except:
|
||||||
|
newList.append(unicode(i, "utf-8"))
|
||||||
|
self.__dict__[key] = newList
|
||||||
|
continue
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
try:
|
||||||
|
self.__dict__[key] = int(dataline[key])
|
||||||
|
except:
|
||||||
|
self.__dict__[key] = unicode(dataline[key], "utf-8")
|
||||||
|
key = property(lambda x: dataline[key])
|
||||||
|
|
||||||
|
def __getattr__(self, name):
|
||||||
|
try:
|
||||||
|
return object.__getattribute__(self, name)
|
||||||
|
except:
|
||||||
|
return None
|
||||||
|
|
||||||
|
def _build_names(self):
|
||||||
|
names = []
|
||||||
|
names = self._easy_extend(names, self.english_name)
|
||||||
|
names = self._easy_extend(names, self.short_name_list)
|
||||||
|
names = self._easy_extend(names, self.synonym_list)
|
||||||
|
names = self._easy_extend(names, self.other_name)
|
||||||
|
|
||||||
|
self.allNames = names
|
||||||
|
|
||||||
|
def _easy_extend(self, initialList, item):
|
||||||
|
if item:
|
||||||
|
if isinstance(item, list):
|
||||||
|
initialList.extend(item)
|
||||||
|
elif isinstance(item, basestring):
|
||||||
|
initialList.append(item)
|
||||||
|
|
||||||
|
return initialList
|
||||||
|
|
||||||
|
|
||||||
|
def load_data(self):
|
||||||
|
return False
|
||||||
|
|
||||||
|
def add_notification(self):
|
||||||
|
"""
|
||||||
|
type - Type of notification: type=> 0=all, 1=new, 2=group, 3=complete
|
||||||
|
priority - low = 0, medium = 1, high = 2 (unconfirmed)
|
||||||
|
|
||||||
|
"""
|
||||||
|
if(self.aid):
|
||||||
|
self.aniDB.notifyadd(aid=self.aid, type=1, priority=1)
|
||||||
|
|
||||||
|
|
||||||
|
class Anime(aniDBabstractObject):
|
||||||
|
def __init__(self, aniDB, name=None, aid=None, tvdbid=None, paramsA=None, autoCorrectName=False, load=False):
|
||||||
|
|
||||||
|
self.maper = AniDBMaper()
|
||||||
|
self.tvDBMap = TvDBMap()
|
||||||
|
self.allAnimeXML = None
|
||||||
|
|
||||||
|
self.name = name
|
||||||
|
self.aid = aid
|
||||||
|
self.tvdb_id = tvdbid
|
||||||
|
|
||||||
|
if self.tvdb_id and not self.aid:
|
||||||
|
self.aid = self.tvDBMap.get_anidb_for_tvdb(self.tvdb_id)
|
||||||
|
|
||||||
|
if not (self.name or self.aid):
|
||||||
|
raise AniDBIncorrectParameterError("No aid or name available")
|
||||||
|
|
||||||
|
if not self.aid:
|
||||||
|
self.aid = self._get_aid_from_xml(self.name)
|
||||||
|
if not self.name or autoCorrectName:
|
||||||
|
self.name = self._get_name_from_xml(self.aid)
|
||||||
|
|
||||||
|
if not (self.name or self.aid):
|
||||||
|
raise ValueError
|
||||||
|
|
||||||
|
if not self.tvdb_id:
|
||||||
|
self.tvdb_id = self.tvDBMap.get_tvdb_for_anidb(self.aid)
|
||||||
|
|
||||||
|
if not paramsA:
|
||||||
|
self.bitCode = "b2f0e0fc000000"
|
||||||
|
self.params = self.maper.getAnimeCodesA(self.bitCode)
|
||||||
|
else:
|
||||||
|
self.paramsA = paramsA
|
||||||
|
self.bitCode = self.maper.getAnimeBitsA(self.paramsA)
|
||||||
|
|
||||||
|
super(Anime, self).__init__(aniDB, load)
|
||||||
|
|
||||||
|
def load_data(self):
|
||||||
|
"""load the data from anidb"""
|
||||||
|
|
||||||
|
if not (self.name or self.aid):
|
||||||
|
raise ValueError
|
||||||
|
|
||||||
|
self.rawData = self.aniDB.anime(aid=self.aid, aname=self.name, amask=self.bitCode)
|
||||||
|
if self.rawData.datalines:
|
||||||
|
self._fill(self.rawData.datalines[0])
|
||||||
|
self._builPreSequal()
|
||||||
|
self.laoded = True
|
||||||
|
|
||||||
|
def get_groups(self):
|
||||||
|
if not self.aid:
|
||||||
|
return []
|
||||||
|
self.rawData = self.aniDB.groupstatus(aid=self.aid)
|
||||||
|
self.release_groups = []
|
||||||
|
for line in self.rawData.datalines:
|
||||||
|
self.release_groups.append({"name":unicode(line["name"], "utf-8"),
|
||||||
|
"rating":line["rating"],
|
||||||
|
"range":line["episode_range"]
|
||||||
|
})
|
||||||
|
return self.release_groups
|
||||||
|
|
||||||
|
#TODO: refactor and use the new functions in anidbFileinfo
|
||||||
|
def _get_aid_from_xml(self, name):
|
||||||
|
if not self.allAnimeXML:
|
||||||
|
self.allAnimeXML = self._read_animetitels_xml()
|
||||||
|
|
||||||
|
regex = re.compile('( \(\d{4}\))|[%s]' % re.escape(string.punctuation)) # remove any punctuation and e.g. ' (2011)'
|
||||||
|
#regex = re.compile('[%s]' % re.escape(string.punctuation)) # remove any punctuation and e.g. ' (2011)'
|
||||||
|
name = regex.sub('', name.lower())
|
||||||
|
lastAid = 0
|
||||||
|
for element in self.allAnimeXML.getiterator():
|
||||||
|
if element.get("aid", False):
|
||||||
|
lastAid = int(element.get("aid"))
|
||||||
|
if element.text:
|
||||||
|
testname = regex.sub('', element.text.lower())
|
||||||
|
|
||||||
|
if testname == name:
|
||||||
|
return lastAid
|
||||||
|
return 0
|
||||||
|
|
||||||
|
#TODO: refactor and use the new functions in anidbFileinfo
|
||||||
|
def _get_name_from_xml(self, aid, onlyMain=True):
|
||||||
|
if not self.allAnimeXML:
|
||||||
|
self.allAnimeXML = self._read_animetitels_xml()
|
||||||
|
|
||||||
|
for anime in self.allAnimeXML.findall("anime"):
|
||||||
|
if int(anime.get("aid", False)) == aid:
|
||||||
|
for title in anime.getiterator():
|
||||||
|
currentLang = title.get("{http://www.w3.org/XML/1998/namespace}lang", False)
|
||||||
|
currentType = title.get("type", False)
|
||||||
|
if (currentLang == "en" and not onlyMain) or currentType == "main":
|
||||||
|
return title.text
|
||||||
|
return ""
|
||||||
|
|
||||||
|
|
||||||
|
def _read_animetitels_xml(self, path=None):
|
||||||
|
if not path:
|
||||||
|
path = os.path.join(os.path.dirname(os.path.abspath(__file__)), "animetitles.xml")
|
||||||
|
|
||||||
|
f = open(path, "r")
|
||||||
|
allAnimeXML = etree.ElementTree(file=f)
|
||||||
|
return allAnimeXML
|
||||||
|
|
||||||
|
def _builPreSequal(self):
|
||||||
|
if self.related_aid_list and self.related_aid_type:
|
||||||
|
try:
|
||||||
|
for i in range(len(self.related_aid_list)):
|
||||||
|
if self.related_aid_type[i] == 2:
|
||||||
|
self.__dict__["prequal"] = self.related_aid_list[i]
|
||||||
|
elif self.related_aid_type[i] == 1:
|
||||||
|
self.__dict__["sequal"] = self.related_aid_list[i]
|
||||||
|
except:
|
||||||
|
if self.related_aid_type == 2:
|
||||||
|
self.__dict__["prequal"] = self.related_aid_list
|
||||||
|
elif self.str_related_aid_type == 1:
|
||||||
|
self.__dict__["sequal"] = self.related_aid_list
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
class Episode(aniDBabstractObject):
|
||||||
|
|
||||||
|
def __init__(self, aniDB, number=None, epid=None, filePath=None, fid=None, epno=None, paramsA=None, paramsF=None, load=False, calculate=False):
|
||||||
|
if not aniDB and not number and not epid and not file and not fid:
|
||||||
|
return None
|
||||||
|
|
||||||
|
self.maper = AniDBMaper()
|
||||||
|
self.epid = epid
|
||||||
|
self.filePath = filePath
|
||||||
|
self.fid = fid
|
||||||
|
self.epno = epno
|
||||||
|
if calculate:
|
||||||
|
(self.ed2k, self.size) = self._calculate_file_stuff(self.filePath)
|
||||||
|
|
||||||
|
|
||||||
|
if not paramsA:
|
||||||
|
self.bitCodeA = "C000F0C0"
|
||||||
|
self.paramsA = self.maper.getFileCodesA(self.bitCodeA)
|
||||||
|
else:
|
||||||
|
self.paramsA = paramsA
|
||||||
|
self.bitCodeA = self.maper.getFileBitsA(self.paramsA)
|
||||||
|
|
||||||
|
if not paramsF:
|
||||||
|
self.bitCodeF = "7FF8FEF8"
|
||||||
|
self.paramsF = self.maper.getFileCodesF(self.bitCodeF)
|
||||||
|
else:
|
||||||
|
self.paramsF = paramsF
|
||||||
|
self.bitCodeF = self.maper.getFileBitsF(self.paramsF)
|
||||||
|
|
||||||
|
super(Episode, self).__init__(aniDB, load)
|
||||||
|
|
||||||
|
def load_data(self):
|
||||||
|
"""load the data from anidb"""
|
||||||
|
if self.filePath and not (self.ed2k or self.size):
|
||||||
|
(self.ed2k, self.size) = self._calculate_file_stuff(self.filePath)
|
||||||
|
|
||||||
|
self.rawData = self.aniDB.file(fid=self.fid, size=self.size, ed2k=self.ed2k, aid=self.aid, aname=None, gid=None, gname=None, epno=self.epno, fmask=self.bitCodeF, amask=self.bitCodeA)
|
||||||
|
self._fill(self.rawData.datalines[0])
|
||||||
|
self._build_names()
|
||||||
|
self.laoded = True
|
||||||
|
|
||||||
|
def add_to_mylist(self, status=None):
|
||||||
|
"""
|
||||||
|
status:
|
||||||
|
0 unknown - state is unknown or the user doesn't want to provide this information (default)
|
||||||
|
1 on hdd - the file is stored on hdd
|
||||||
|
2 on cd - the file is stored on cd
|
||||||
|
3 deleted - the file has been deleted or is not available for other reasons (i.e. reencoded)
|
||||||
|
|
||||||
|
"""
|
||||||
|
if self.filePath and not (self.ed2k or self.size):
|
||||||
|
(self.ed2k, self.size) = self._calculate_file_stuff(self.filePath)
|
||||||
|
|
||||||
|
try:
|
||||||
|
self.aniDB.mylistadd(size=self.size, ed2k=self.ed2k, state=status)
|
||||||
|
except Exception, e :
|
||||||
|
self.log(u"exception msg: " + str(e))
|
||||||
|
else:
|
||||||
|
# TODO: add the name or something
|
||||||
|
self.log(u"Added the episode to anidb")
|
||||||
|
|
||||||
|
|
||||||
|
def _calculate_file_stuff(self, filePath):
|
||||||
|
if not filePath:
|
||||||
|
return (None, None)
|
||||||
|
self.log("Calculating the ed2k. Please wait...")
|
||||||
|
ed2k = fileInfo.get_file_hash(filePath)
|
||||||
|
size = fileInfo.get_file_size(filePath)
|
||||||
|
return (ed2k, size)
|
||||||
|
|
447
lib/adba/aniDBcommands.py
Normal file
|
@ -0,0 +1,447 @@
|
||||||
|
# !/usr/bin/env python
|
||||||
|
#
|
||||||
|
# This file is part of aDBa.
|
||||||
|
#
|
||||||
|
# aDBa is free software: you can redistribute it and/or modify
|
||||||
|
# it under the terms of the GNU General Public License as published by
|
||||||
|
# the Free Software Foundation, either version 3 of the License, or
|
||||||
|
# (at your option) any later version.
|
||||||
|
#
|
||||||
|
# aDBa is distributed in the hope that it will be useful,
|
||||||
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
# GNU General Public License for more details.
|
||||||
|
#
|
||||||
|
# You should have received a copy of the GNU General Public License
|
||||||
|
# along with aDBa. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
|
from threading import Lock
|
||||||
|
from aniDBresponses import *
|
||||||
|
from aniDBerrors import *
|
||||||
|
|
||||||
|
|
||||||
|
class Command:
|
||||||
|
queue = {None: None}
|
||||||
|
|
||||||
|
def __init__(self, command, **parameters):
|
||||||
|
self.command = command
|
||||||
|
self.parameters = parameters
|
||||||
|
self.raw = self.flatten(command, parameters)
|
||||||
|
|
||||||
|
self.mode = None
|
||||||
|
self.callback = None
|
||||||
|
self.waiter = Lock()
|
||||||
|
self.waiter.acquire()
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
return "Command(%s,%s) %s\n%s\n" % (repr(self.tag), repr(self.command), repr(self.parameters), self.raw_data())
|
||||||
|
|
||||||
|
def authorize(self, mode, tag, session, callback):
|
||||||
|
self.mode = mode
|
||||||
|
self.callback = callback
|
||||||
|
self.tag = tag
|
||||||
|
self.session = session
|
||||||
|
|
||||||
|
self.parameters['tag'] = tag
|
||||||
|
self.parameters['s'] = session
|
||||||
|
|
||||||
|
def handle(self, resp):
|
||||||
|
self.resp = resp
|
||||||
|
if self.mode == 1:
|
||||||
|
self.waiter.release()
|
||||||
|
elif self.mode == 2:
|
||||||
|
self.callback(resp)
|
||||||
|
|
||||||
|
def wait_response(self):
|
||||||
|
self.waiter.acquire()
|
||||||
|
|
||||||
|
def flatten(self, command, parameters):
|
||||||
|
tmp = []
|
||||||
|
for key, value in parameters.iteritems():
|
||||||
|
if value == None:
|
||||||
|
continue
|
||||||
|
tmp.append("%s=%s" % (self.escape(key), self.escape(value)))
|
||||||
|
return ' '.join([command, '&'.join(tmp)])
|
||||||
|
|
||||||
|
def escape(self, data):
|
||||||
|
return str(data).replace('&', '&')
|
||||||
|
|
||||||
|
def raw_data(self):
|
||||||
|
self.raw = self.flatten(self.command, self.parameters)
|
||||||
|
return self.raw
|
||||||
|
|
||||||
|
def cached(self, interface, database):
|
||||||
|
return None
|
||||||
|
|
||||||
|
def cache(self, interface, database):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
#first run
|
||||||
|
class AuthCommand(Command):
|
||||||
|
def __init__(self, username, password, protover, client, clientver, nat=None, comp=None, enc=None, mtu=None):
|
||||||
|
parameters = {'user': username, 'pass': password, 'protover': protover, 'client': client,
|
||||||
|
'clientver': clientver, 'nat': nat, 'comp': comp, 'enc': enc, 'mtu': mtu}
|
||||||
|
Command.__init__(self, 'AUTH', **parameters)
|
||||||
|
|
||||||
|
|
||||||
|
class LogoutCommand(Command):
|
||||||
|
def __init__(self):
|
||||||
|
Command.__init__(self, 'LOGOUT')
|
||||||
|
|
||||||
|
|
||||||
|
#third run (at the same time as second)
|
||||||
|
class PushCommand(Command):
|
||||||
|
def __init__(self, notify, msg, buddy=None):
|
||||||
|
parameters = {'notify': notify, 'msg': msg, 'buddy': buddy}
|
||||||
|
Command.__init__(self, 'PUSH', **parameters)
|
||||||
|
|
||||||
|
|
||||||
|
class PushAckCommand(Command):
|
||||||
|
def __init__(self, nid):
|
||||||
|
parameters = {'nid': nid}
|
||||||
|
Command.__init__(self, 'PUSHACK', **parameters)
|
||||||
|
|
||||||
|
|
||||||
|
class NotifyAddCommand(Command):
|
||||||
|
def __init__(self, aid=None, gid=None, type=None, priority=None):
|
||||||
|
if not (aid or gid) or (aid and gid):
|
||||||
|
raise AniDBIncorrectParameterError, "You must provide aid OR gid for NOTIFICATIONADD command"
|
||||||
|
parameters = {'aid': aid, "gid": gid, "type": type, "priority": priority}
|
||||||
|
Command.__init__(self, 'NOTIFICATIONADD', **parameters)
|
||||||
|
|
||||||
|
|
||||||
|
class NotifyCommand(Command):
|
||||||
|
def __init__(self, buddy=None):
|
||||||
|
parameters = {'buddy': buddy}
|
||||||
|
Command.__init__(self, 'NOTIFY', **parameters)
|
||||||
|
|
||||||
|
|
||||||
|
class NotifyListCommand(Command):
|
||||||
|
def __init__(self):
|
||||||
|
Command.__init__(self, 'NOTIFYLIST')
|
||||||
|
|
||||||
|
|
||||||
|
class NotifyGetCommand(Command):
|
||||||
|
def __init__(self, type, id):
|
||||||
|
parameters = {'type': type, 'id': id}
|
||||||
|
Command.__init__(self, 'NOTIFYGET', **parameters)
|
||||||
|
|
||||||
|
|
||||||
|
class NotifyAckCommand(Command):
|
||||||
|
def __init__(self, type, id):
|
||||||
|
parameters = {'type': type, 'id': id}
|
||||||
|
Command.__init__(self, 'NOTIFYACK', **parameters)
|
||||||
|
|
||||||
|
|
||||||
|
class BuddyAddCommand(Command):
|
||||||
|
def __init__(self, uid=None, uname=None):
|
||||||
|
if not (uid or uname) or (uid and uname):
|
||||||
|
raise AniDBIncorrectParameterError, "You must provide <u(id|name)> for BUDDYADD command"
|
||||||
|
parameters = {'uid': uid, 'uname': uname.lower()}
|
||||||
|
Command.__init__(self, 'BUDDYADD', **parameters)
|
||||||
|
|
||||||
|
|
||||||
|
class BuddyDelCommand(Command):
|
||||||
|
def __init__(self, uid):
|
||||||
|
parameters = {'uid': uid}
|
||||||
|
Command.__init__(self, 'BUDDYDEL', **parameters)
|
||||||
|
|
||||||
|
|
||||||
|
class BuddyAcceptCommand(Command):
|
||||||
|
def __init__(self, uid):
|
||||||
|
parameters = {'uid': uid}
|
||||||
|
Command.__init__(self, 'BUDDYACCEPT', **parameters)
|
||||||
|
|
||||||
|
|
||||||
|
class BuddyDenyCommand(Command):
|
||||||
|
def __init__(self, uid):
|
||||||
|
parameters = {'uid': uid}
|
||||||
|
Command.__init__(self, 'BUDDYDENY', **parameters)
|
||||||
|
|
||||||
|
|
||||||
|
class BuddyListCommand(Command):
|
||||||
|
def __init__(self, startat):
|
||||||
|
parameters = {'startat': startat}
|
||||||
|
Command.__init__(self, 'BUDDYLIST', **parameters)
|
||||||
|
|
||||||
|
|
||||||
|
class BuddyStateCommand(Command):
|
||||||
|
def __init__(self, startat):
|
||||||
|
parameters = {'startat': startat}
|
||||||
|
Command.__init__(self, 'BUDDYSTATE', **parameters)
|
||||||
|
|
||||||
|
|
||||||
|
#first run
|
||||||
|
class AnimeCommand(Command):
|
||||||
|
def __init__(self, aid=None, aname=None, amask=None):
|
||||||
|
if not (aid or aname):
|
||||||
|
raise AniDBIncorrectParameterError, "You must provide <a(id|name)> for ANIME command"
|
||||||
|
parameters = {'aid': aid, 'aname': aname, 'amask': amask}
|
||||||
|
Command.__init__(self, 'ANIME', **parameters)
|
||||||
|
|
||||||
|
|
||||||
|
class EpisodeCommand(Command):
|
||||||
|
def __init__(self, eid=None, aid=None, aname=None, epno=None):
|
||||||
|
if not (eid or ((aname or aid) and epno)) or (aname and aid) or (eid and (aname or aid or epno)):
|
||||||
|
raise AniDBIncorrectParameterError, "You must provide <eid XOR a(id|name)+epno> for EPISODE command"
|
||||||
|
parameters = {'eid': eid, 'aid': aid, 'aname': aname, 'epno': epno}
|
||||||
|
Command.__init__(self, 'EPISODE', **parameters)
|
||||||
|
|
||||||
|
|
||||||
|
class FileCommand(Command):
|
||||||
|
def __init__(self, fid=None, size=None, ed2k=None, aid=None, aname=None, gid=None, gname=None, epno=None,
|
||||||
|
fmask=None, amask=None):
|
||||||
|
if not (fid or (size and ed2k) or ((aid or aname) and (gid or gname) and epno)) or (
|
||||||
|
fid and (size or ed2k or aid or aname or gid or gname or epno)) or (
|
||||||
|
(size and ed2k) and (fid or aid or aname or gid or gname or epno)) or (
|
||||||
|
((aid or aname) and (gid or gname) and epno) and (fid or size or ed2k)) or (aid and aname) or (
|
||||||
|
gid and gname):
|
||||||
|
raise AniDBIncorrectParameterError, "You must provide <fid XOR size+ed2k XOR a(id|name)+g(id|name)+epno> for FILE command"
|
||||||
|
parameters = {'fid': fid, 'size': size, 'ed2k': ed2k, 'aid': aid, 'aname': aname, 'gid': gid, 'gname': gname,
|
||||||
|
'epno': epno, 'fmask': fmask, 'amask': amask}
|
||||||
|
Command.__init__(self, 'FILE', **parameters)
|
||||||
|
|
||||||
|
|
||||||
|
class GroupCommand(Command):
|
||||||
|
def __init__(self, gid=None, gname=None):
|
||||||
|
if not (gid or gname) or (gid and gname):
|
||||||
|
raise AniDBIncorrectParameterError, "You must provide <g(id|name)> for GROUP command"
|
||||||
|
parameters = {'gid': gid, 'gname': gname}
|
||||||
|
Command.__init__(self, 'GROUP', **parameters)
|
||||||
|
|
||||||
|
|
||||||
|
class GroupstatusCommand(Command):
|
||||||
|
def __init__(self, aid=None, status=None):
|
||||||
|
if not aid:
|
||||||
|
raise AniDBIncorrectParameterError, "You must provide aid for GROUPSTATUS command"
|
||||||
|
parameters = {'aid': aid, 'status': status}
|
||||||
|
Command.__init__(self, 'GROUPSTATUS', **parameters)
|
||||||
|
|
||||||
|
|
||||||
|
class ProducerCommand(Command):
|
||||||
|
def __init__(self, pid=None, pname=None):
|
||||||
|
if not (pid or pname) or (pid and pname):
|
||||||
|
raise AniDBIncorrectParameterError, "You must provide <p(id|name)> for PRODUCER command"
|
||||||
|
parameters = {'pid': pid, 'pname': pname}
|
||||||
|
Command.__init__(self, 'PRODUCER', **parameters)
|
||||||
|
|
||||||
|
def cached(self, intr, db):
|
||||||
|
pid = self.parameters['pid']
|
||||||
|
pname = self.parameters['pname']
|
||||||
|
|
||||||
|
codes = ('pid', 'name', 'shortname', 'othername', 'type', 'pic', 'url')
|
||||||
|
names = ','.join([code for code in codes if code != ''])
|
||||||
|
ruleholder = (pid and 'pid=%s' or '(name=%s OR shortname=%s OR othername=%s)')
|
||||||
|
rulevalues = (pid and [pid] or [pname, pname, pname])
|
||||||
|
|
||||||
|
rows = db.select('ptb', names, ruleholder + " AND status&8", *rulevalues)
|
||||||
|
|
||||||
|
if len(rows) > 1:
|
||||||
|
raise AniDBInternalError, "It shouldn't be possible for database to return more than 1 line for PRODUCER cache"
|
||||||
|
elif not len(rows):
|
||||||
|
return None
|
||||||
|
else:
|
||||||
|
resp = ProducerResponse(self, None, '245', 'CACHED PRODUCER', [list(rows[0])])
|
||||||
|
resp.parse()
|
||||||
|
return resp
|
||||||
|
|
||||||
|
def cache(self, intr, db):
|
||||||
|
if self.resp.rescode != '245' or self.cached(intr, db):
|
||||||
|
return
|
||||||
|
|
||||||
|
codes = ('pid', 'name', 'shortname', 'othername', 'type', 'pic', 'url')
|
||||||
|
if len(db.select('ptb', 'pid', 'pid=%s', self.resp.datalines[0]['pid'])):
|
||||||
|
sets = 'status=status|15,' + ','.join([code + '=%s' for code in codes if code != ''])
|
||||||
|
values = [self.resp.datalines[0][code] for code in codes if code != ''] + [self.resp.datalines[0]['pid']]
|
||||||
|
|
||||||
|
db.update('ptb', sets, 'pid=%s', *values)
|
||||||
|
else:
|
||||||
|
names = 'status,' + ','.join([code for code in codes if code != ''])
|
||||||
|
valueholders = '0,' + ','.join(['%s' for code in codes if code != ''])
|
||||||
|
values = [self.resp.datalines[0][code] for code in codes if code != '']
|
||||||
|
|
||||||
|
db.insert('ptb', names, valueholders, *values)
|
||||||
|
|
||||||
|
|
||||||
|
class MyListCommand(Command):
|
||||||
|
def __init__(self, lid=None, fid=None, size=None, ed2k=None, aid=None, aname=None, gid=None, gname=None, epno=None):
|
||||||
|
if not (lid or fid or (size and ed2k) or (aid or aname)) or (
|
||||||
|
lid and (fid or size or ed2k or aid or aname or gid or gname or epno)) or (
|
||||||
|
fid and (lid or size or ed2k or aid or aname or gid or gname or epno)) or (
|
||||||
|
(size and ed2k) and (lid or fid or aid or aname or gid or gname or epno)) or (
|
||||||
|
(aid or aname) and (lid or fid or size or ed2k)) or (aid and aname) or (gid and gname):
|
||||||
|
raise AniDBIncorrectParameterError, "You must provide <lid XOR fid XOR size+ed2k XOR a(id|name)+g(id|name)+epno> for MYLIST command"
|
||||||
|
parameters = {'lid': lid, 'fid': fid, 'size': size, 'ed2k': ed2k, 'aid': aid, 'aname': aname, 'gid': gid,
|
||||||
|
'gname': gname, 'epno': epno}
|
||||||
|
Command.__init__(self, 'MYLIST', **parameters)
|
||||||
|
|
||||||
|
def cached(self, intr, db):
|
||||||
|
lid = self.parameters['lid']
|
||||||
|
fid = self.parameters['fid']
|
||||||
|
size = self.parameters['size']
|
||||||
|
ed2k = self.parameters['ed2k']
|
||||||
|
aid = self.parameters['aid']
|
||||||
|
aname = self.parameters['aname']
|
||||||
|
gid = self.parameters['gid']
|
||||||
|
gname = self.parameters['gname']
|
||||||
|
epno = self.parameters['epno']
|
||||||
|
|
||||||
|
names = ','.join([code for code in MylistResponse(None, None, None, None, []).codetail if code != ''])
|
||||||
|
|
||||||
|
if lid:
|
||||||
|
ruleholder = "lid=%s"
|
||||||
|
rulevalues = [lid]
|
||||||
|
elif fid or size or ed2k:
|
||||||
|
resp = intr.file(fid=fid, size=size, ed2k=ed2k)
|
||||||
|
if resp.rescode != '220':
|
||||||
|
resp = NoSuchMylistResponse(self, None, '321', 'NO SUCH ENTRY (FILE NOT FOUND)', [])
|
||||||
|
resp.parse()
|
||||||
|
return resp
|
||||||
|
fid = resp.datalines[0]['fid']
|
||||||
|
|
||||||
|
ruleholder = "fid=%s"
|
||||||
|
rulevalues = [fid]
|
||||||
|
else:
|
||||||
|
resp = intr.anime(aid=aid, aname=aname)
|
||||||
|
if resp.rescode != '230':
|
||||||
|
resp = NoSuchFileResponse(self, None, '321', 'NO SUCH ENTRY (ANIME NOT FOUND)', [])
|
||||||
|
resp.parse()
|
||||||
|
return resp
|
||||||
|
aid = resp.datalines[0]['aid']
|
||||||
|
|
||||||
|
resp = intr.group(gid=gid, gname=gname)
|
||||||
|
if resp.rescode != '250':
|
||||||
|
resp = NoSuchFileResponse(self, None, '321', 'NO SUCH ENTRY (GROUP NOT FOUND)', [])
|
||||||
|
resp.parse()
|
||||||
|
return resp
|
||||||
|
gid = resp.datalines[0]['gid']
|
||||||
|
|
||||||
|
resp = intr.episode(aid=aid, epno=epno)
|
||||||
|
if resp.rescode != '240':
|
||||||
|
resp = NoSuchFileResponse(self, None, '321', 'NO SUCH ENTRY (EPISODE NOT FOUND)', [])
|
||||||
|
resp.parse()
|
||||||
|
return resp
|
||||||
|
eid = resp.datalines[0]['eid']
|
||||||
|
|
||||||
|
ruleholder = "aid=%s AND eid=%s AND gid=%s"
|
||||||
|
rulevalues = [aid, eid, gid]
|
||||||
|
|
||||||
|
rows = db.select('ltb', names, ruleholder + " AND status&8", *rulevalues)
|
||||||
|
|
||||||
|
if len(rows) > 1:
|
||||||
|
#resp=MultipleFilesFoundResponse(self,None,'322','CACHED MULTIPLE FILES FOUND',/*get fids from rows, not gonna do this as you haven't got a real cache out of these..*/)
|
||||||
|
return None
|
||||||
|
elif not len(rows):
|
||||||
|
return None
|
||||||
|
else:
|
||||||
|
resp = MylistResponse(self, None, '221', 'CACHED MYLIST', [list(rows[0])])
|
||||||
|
resp.parse()
|
||||||
|
return resp
|
||||||
|
|
||||||
|
def cache(self, intr, db):
|
||||||
|
if self.resp.rescode != '221' or self.cached(intr, db):
|
||||||
|
return
|
||||||
|
|
||||||
|
codes = MylistResponse(None, None, None, None, []).codetail
|
||||||
|
if len(db.select('ltb', 'lid', 'lid=%s', self.resp.datalines[0]['lid'])):
|
||||||
|
sets = 'status=status|15,' + ','.join([code + '=%s' for code in codes if code != ''])
|
||||||
|
values = [self.resp.datalines[0][code] for code in codes if code != ''] + [self.resp.datalines[0]['lid']]
|
||||||
|
|
||||||
|
db.update('ltb', sets, 'lid=%s', *values)
|
||||||
|
else:
|
||||||
|
names = 'status,' + ','.join([code for code in codes if code != ''])
|
||||||
|
valueholders = '15,' + ','.join(['%s' for code in codes if code != ''])
|
||||||
|
values = [self.resp.datalines[0][code] for code in codes if code != '']
|
||||||
|
|
||||||
|
db.insert('ltb', names, valueholders, *values)
|
||||||
|
|
||||||
|
|
||||||
|
class MyListAddCommand(Command):
|
||||||
|
def __init__(self, lid=None, fid=None, size=None, ed2k=None, aid=None, aname=None, gid=None, gname=None, epno=None,
|
||||||
|
edit=None, state=None, viewed=None, source=None, storage=None, other=None):
|
||||||
|
if not (lid or fid or (size and ed2k) or ((aid or aname) and (gid or gname))) or (
|
||||||
|
lid and (fid or size or ed2k or aid or aname or gid or gname or epno)) or (
|
||||||
|
fid and (lid or size or ed2k or aid or aname or gid or gname or epno)) or (
|
||||||
|
(size and ed2k) and (lid or fid or aid or aname or gid or gname or epno)) or (
|
||||||
|
((aid or aname) and (gid or gname)) and (lid or fid or size or ed2k)) or (aid and aname) or (
|
||||||
|
gid and gname) or (lid and not edit):
|
||||||
|
raise AniDBIncorrectParameterError, "You must provide <lid XOR fid XOR size+ed2k XOR a(id|name)+g(id|name)+epno> for MYLISTADD command"
|
||||||
|
parameters = {'lid': lid, 'fid': fid, 'size': size, 'ed2k': ed2k, 'aid': aid, 'aname': aname, 'gid': gid,
|
||||||
|
'gname': gname, 'epno': epno, 'edit': edit, 'state': state, 'viewed': viewed, 'source': source,
|
||||||
|
'storage': storage, 'other': other}
|
||||||
|
Command.__init__(self, 'MYLISTADD', **parameters)
|
||||||
|
|
||||||
|
|
||||||
|
class MyListDelCommand(Command):
|
||||||
|
def __init__(self, lid=None, fid=None, aid=None, aname=None, gid=None, gname=None, epno=None):
|
||||||
|
if not (lid or fid or ((aid or aname) and (gid or gname) and epno)) or (
|
||||||
|
lid and (fid or aid or aname or gid or gname or epno)) or (
|
||||||
|
fid and (lid or aid or aname or gid or gname or epno)) or (
|
||||||
|
((aid or aname) and (gid or gname) and epno) and (lid or fid)) or (aid and aname) or (gid and gname):
|
||||||
|
raise AniDBIncorrectParameterError, "You must provide <lid+edit=1 XOR fid XOR a(id|name)+g(id|name)+epno> for MYLISTDEL command"
|
||||||
|
parameters = {'lid': lid, 'fid': fid, 'aid': aid, 'aname': aname, 'gid': gid, 'gname': gname, 'epno': epno}
|
||||||
|
Command.__init__(self, 'MYLISTDEL', **parameters)
|
||||||
|
|
||||||
|
|
||||||
|
class MyListStatsCommand(Command):
|
||||||
|
def __init__(self):
|
||||||
|
Command.__init__(self, 'MYLISTSTATS')
|
||||||
|
|
||||||
|
|
||||||
|
class VoteCommand(Command):
|
||||||
|
def __init__(self, type, id=None, name=None, value=None, epno=None):
|
||||||
|
if not (id or name) or (id and name):
|
||||||
|
raise AniDBIncorrectParameterError, "You must provide <(id|name)> for VOTE command"
|
||||||
|
parameters = {'type': type, 'id': id, 'name': name, 'value': value, 'epno': epno}
|
||||||
|
Command.__init__(self, 'VOTE', **parameters)
|
||||||
|
|
||||||
|
|
||||||
|
class RandomAnimeCommand(Command):
|
||||||
|
def __init__(self, type):
|
||||||
|
parameters = {'type': type}
|
||||||
|
Command.__init__(self, 'RANDOMANIME', **parameters)
|
||||||
|
|
||||||
|
|
||||||
|
class PingCommand(Command):
|
||||||
|
def __init__(self):
|
||||||
|
Command.__init__(self, 'PING')
|
||||||
|
|
||||||
|
|
||||||
|
#second run
|
||||||
|
class EncryptCommand(Command):
|
||||||
|
def __init__(self, user, apipassword, type):
|
||||||
|
self.apipassword = apipassword
|
||||||
|
parameters = {'user': user.lower(), 'type': type}
|
||||||
|
Command.__init__(self, 'ENCRYPT', **parameters)
|
||||||
|
|
||||||
|
|
||||||
|
class EncodingCommand(Command):
|
||||||
|
def __init__(self, name):
|
||||||
|
parameters = {'name': type}
|
||||||
|
Command.__init__(self, 'ENCODING', **parameters)
|
||||||
|
|
||||||
|
|
||||||
|
class SendMsgCommand(Command):
|
||||||
|
def __init__(self, to, title, body):
|
||||||
|
if len(title) > 50 or len(body) > 900:
|
||||||
|
raise AniDBIncorrectParameterError, "Title must not be longer than 50 chars and body must not be longer than 900 chars for SENDMSG command"
|
||||||
|
parameters = {'to': to.lower(), 'title': title, 'body': body}
|
||||||
|
Command.__init__(self, 'SENDMSG', **parameters)
|
||||||
|
|
||||||
|
|
||||||
|
class UserCommand(Command):
|
||||||
|
def __init__(self, user):
|
||||||
|
parameters = {'user': user}
|
||||||
|
Command.__init__(self, 'USER', **parameters)
|
||||||
|
|
||||||
|
|
||||||
|
class UptimeCommand(Command):
|
||||||
|
def __init__(self):
|
||||||
|
Command.__init__(self, 'UPTIME')
|
||||||
|
|
||||||
|
|
||||||
|
class VersionCommand(Command):
|
||||||
|
def __init__(self):
|
||||||
|
Command.__init__(self, 'VERSION')
|
||||||
|
|
37
lib/adba/aniDBerrors.py
Normal file
|
@ -0,0 +1,37 @@
|
||||||
|
#!/usr/bin/env python
|
||||||
|
#
|
||||||
|
# This file is part of aDBa.
|
||||||
|
#
|
||||||
|
# aDBa is free software: you can redistribute it and/or modify
|
||||||
|
# it under the terms of the GNU General Public License as published by
|
||||||
|
# the Free Software Foundation, either version 3 of the License, or
|
||||||
|
# (at your option) any later version.
|
||||||
|
#
|
||||||
|
# aDBa is distributed in the hope that it will be useful,
|
||||||
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
# GNU General Public License for more details.
|
||||||
|
#
|
||||||
|
# You should have received a copy of the GNU General Public License
|
||||||
|
# along with aDBa. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
|
class AniDBError(Exception):
|
||||||
|
pass
|
||||||
|
|
||||||
|
class AniDBIncorrectParameterError(AniDBError):
|
||||||
|
pass
|
||||||
|
|
||||||
|
class AniDBCommandTimeoutError(AniDBError):
|
||||||
|
pass
|
||||||
|
|
||||||
|
class AniDBMustAuthError(AniDBError):
|
||||||
|
pass
|
||||||
|
|
||||||
|
class AniDBPacketCorruptedError(AniDBError):
|
||||||
|
pass
|
||||||
|
|
||||||
|
class AniDBBannedError(AniDBError):
|
||||||
|
pass
|
||||||
|
|
||||||
|
class AniDBInternalError(AniDBError):
|
||||||
|
pass
|
75
lib/adba/aniDBfileInfo.py
Normal file
|
@ -0,0 +1,75 @@
|
||||||
|
#!/usr/bin/env python
|
||||||
|
#
|
||||||
|
# This file is part of aDBa.
|
||||||
|
#
|
||||||
|
# aDBa is free software: you can redistribute it and/or modify
|
||||||
|
# it under the terms of the GNU General Public License as published by
|
||||||
|
# the Free Software Foundation, either version 3 of the License, or
|
||||||
|
# (at your option) any later version.
|
||||||
|
#
|
||||||
|
# aDBa is distributed in the hope that it will be useful,
|
||||||
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
# GNU General Public License for more details.
|
||||||
|
#
|
||||||
|
# You should have received a copy of the GNU General Public License
|
||||||
|
# along with aDBa. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
|
from __future__ import with_statement
|
||||||
|
import hashlib
|
||||||
|
import os
|
||||||
|
import xml.etree.cElementTree as etree
|
||||||
|
|
||||||
|
|
||||||
|
# http://www.radicand.org/blog/orz/2010/2/21/edonkey2000-hash-in-python/
|
||||||
|
def get_file_hash(filePath):
|
||||||
|
""" Returns the ed2k hash of a given file."""
|
||||||
|
if not filePath:
|
||||||
|
return None
|
||||||
|
md4 = hashlib.new('md4').copy
|
||||||
|
|
||||||
|
def gen(f):
|
||||||
|
while True:
|
||||||
|
x = f.read(9728000)
|
||||||
|
if x: yield x
|
||||||
|
else: return
|
||||||
|
|
||||||
|
def md4_hash(data):
|
||||||
|
m = md4()
|
||||||
|
m.update(data)
|
||||||
|
return m
|
||||||
|
|
||||||
|
with open(filePath, 'rb') as f:
|
||||||
|
a = gen(f)
|
||||||
|
hashes = [md4_hash(data).digest() for data in a]
|
||||||
|
if len(hashes) == 1:
|
||||||
|
return hashes[0].encode("hex")
|
||||||
|
else: return md4_hash(reduce(lambda a,d: a + d, hashes, "")).hexdigest()
|
||||||
|
|
||||||
|
|
||||||
|
def get_file_size(path):
|
||||||
|
size = os.path.getsize(path)
|
||||||
|
return size
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
def read_anidb_xml(filePath):
|
||||||
|
if not filePath:
|
||||||
|
filePath = os.path.join(os.path.dirname(os.path.abspath( __file__ )), "animetitles.xml")
|
||||||
|
return read_xml_into_etree(filePath)
|
||||||
|
|
||||||
|
|
||||||
|
def read_tvdb_map_xml(filePath):
|
||||||
|
if not filePath:
|
||||||
|
filePath = os.path.join(os.path.dirname(os.path.abspath( __file__ )), "anime-list.xml")
|
||||||
|
return read_xml_into_etree(filePath)
|
||||||
|
|
||||||
|
|
||||||
|
def read_xml_into_etree(filePath):
|
||||||
|
if not filePath:
|
||||||
|
return None
|
||||||
|
|
||||||
|
f = open(filePath,"r")
|
||||||
|
xmlASetree = etree.ElementTree(file = f)
|
||||||
|
return xmlASetree
|
||||||
|
|
218
lib/adba/aniDBlink.py
Normal file
|
@ -0,0 +1,218 @@
|
||||||
|
#!/usr/bin/env python
|
||||||
|
#
|
||||||
|
# This file is part of aDBa.
|
||||||
|
#
|
||||||
|
# aDBa is free software: you can redistribute it and/or modify
|
||||||
|
# it under the terms of the GNU General Public License as published by
|
||||||
|
# the Free Software Foundation, either version 3 of the License, or
|
||||||
|
# (at your option) any later version.
|
||||||
|
#
|
||||||
|
# aDBa is distributed in the hope that it will be useful,
|
||||||
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
# GNU General Public License for more details.
|
||||||
|
#
|
||||||
|
# You should have received a copy of the GNU General Public License
|
||||||
|
# along with aDBa. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
|
import socket, sys, zlib
|
||||||
|
from time import time, sleep
|
||||||
|
import threading
|
||||||
|
from aniDBresponses import ResponseResolver
|
||||||
|
from aniDBerrors import *
|
||||||
|
|
||||||
|
|
||||||
|
class AniDBLink(threading.Thread):
|
||||||
|
def __init__(self, server, port, myport, logFunction, delay=2, timeout=20, logPrivate=False):
|
||||||
|
super(AniDBLink, self).__init__()
|
||||||
|
self.server = server
|
||||||
|
self.port = port
|
||||||
|
self.target = (server, port)
|
||||||
|
self.timeout = timeout
|
||||||
|
|
||||||
|
self.myport = 0
|
||||||
|
self.bound = self.connectSocket(myport, self.timeout)
|
||||||
|
|
||||||
|
self.cmd_queue = {None:None}
|
||||||
|
self.resp_tagged_queue = {}
|
||||||
|
self.resp_untagged_queue = []
|
||||||
|
self.tags = []
|
||||||
|
self.lastpacket = time()
|
||||||
|
self.delay = delay
|
||||||
|
self.session = None
|
||||||
|
self.banned = False
|
||||||
|
self.crypt = None
|
||||||
|
|
||||||
|
self.log = logFunction
|
||||||
|
self.logPrivate = logPrivate
|
||||||
|
|
||||||
|
self._stop = threading.Event()
|
||||||
|
self._quiting = False
|
||||||
|
self.setDaemon(True)
|
||||||
|
self.start()
|
||||||
|
|
||||||
|
def connectSocket(self, myport, timeout):
|
||||||
|
self.sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
|
||||||
|
self.sock.settimeout(timeout)
|
||||||
|
portlist = [myport] + [7654]
|
||||||
|
for port in portlist:
|
||||||
|
try:
|
||||||
|
self.sock.bind(('', port))
|
||||||
|
except:
|
||||||
|
continue
|
||||||
|
else:
|
||||||
|
self.myport = port
|
||||||
|
return True
|
||||||
|
else:
|
||||||
|
return False;
|
||||||
|
|
||||||
|
def disconnectSocket(self):
|
||||||
|
self.sock.close()
|
||||||
|
|
||||||
|
def stop (self):
|
||||||
|
self.log("Releasing socket and stopping link thread")
|
||||||
|
self._quiting = True
|
||||||
|
self.disconnectSocket()
|
||||||
|
self._stop.set()
|
||||||
|
|
||||||
|
def stopped (self):
|
||||||
|
return self._stop.isSet()
|
||||||
|
|
||||||
|
def print_log(self, data):
|
||||||
|
print data
|
||||||
|
|
||||||
|
def print_log_dummy(self, data):
|
||||||
|
pass
|
||||||
|
|
||||||
|
def run(self):
|
||||||
|
while not self._quiting:
|
||||||
|
try:
|
||||||
|
data = self.sock.recv(8192)
|
||||||
|
except socket.timeout:
|
||||||
|
self._handle_timeouts()
|
||||||
|
|
||||||
|
continue
|
||||||
|
self.log("NetIO < %s" % repr(data))
|
||||||
|
try:
|
||||||
|
for i in range(2):
|
||||||
|
try:
|
||||||
|
tmp = data
|
||||||
|
resp = None
|
||||||
|
if tmp[:2] == '\x00\x00':
|
||||||
|
tmp = zlib.decompressobj().decompress(tmp[2:])
|
||||||
|
self.log("UnZip | %s" % repr(tmp))
|
||||||
|
resp = ResponseResolver(tmp)
|
||||||
|
except:
|
||||||
|
sys.excepthook(*sys.exc_info())
|
||||||
|
self.crypt = None
|
||||||
|
self.session = None
|
||||||
|
else:
|
||||||
|
break
|
||||||
|
if not resp:
|
||||||
|
raise AniDBPacketCorruptedError, "Either decrypting, decompressing or parsing the packet failed"
|
||||||
|
cmd = self._cmd_dequeue(resp)
|
||||||
|
resp = resp.resolve(cmd)
|
||||||
|
resp.parse()
|
||||||
|
if resp.rescode in ('200', '201'):
|
||||||
|
self.session = resp.attrs['sesskey']
|
||||||
|
if resp.rescode in ('209',):
|
||||||
|
print "sorry encryption is not supported"
|
||||||
|
raise
|
||||||
|
#self.crypt=aes(md5(resp.req.apipassword+resp.attrs['salt']).digest())
|
||||||
|
if resp.rescode in ('203', '403', '500', '501', '503', '506'):
|
||||||
|
self.session = None
|
||||||
|
self.crypt = None
|
||||||
|
if resp.rescode in ('504', '555'):
|
||||||
|
self.banned = True
|
||||||
|
print "AniDB API informs that user or client is banned:", resp.resstr
|
||||||
|
resp.handle()
|
||||||
|
if not cmd or not cmd.mode:
|
||||||
|
self._resp_queue(resp)
|
||||||
|
else:
|
||||||
|
self.tags.remove(resp.restag)
|
||||||
|
except:
|
||||||
|
sys.excepthook(*sys.exc_info())
|
||||||
|
print "Avoiding flood by paranoidly panicing: Aborting link thread, killing connection, releasing waiters and quiting"
|
||||||
|
self.sock.close()
|
||||||
|
try:cmd.waiter.release()
|
||||||
|
except:pass
|
||||||
|
for tag, cmd in self.cmd_queue.iteritems():
|
||||||
|
try:cmd.waiter.release()
|
||||||
|
except:pass
|
||||||
|
sys.exit()
|
||||||
|
|
||||||
|
def _handle_timeouts(self):
|
||||||
|
willpop = []
|
||||||
|
for tag, cmd in self.cmd_queue.iteritems():
|
||||||
|
if not tag:
|
||||||
|
continue
|
||||||
|
if time() - cmd.started > self.timeout:
|
||||||
|
self.tags.remove(cmd.tag)
|
||||||
|
willpop.append(cmd.tag)
|
||||||
|
cmd.waiter.release()
|
||||||
|
|
||||||
|
for tag in willpop:
|
||||||
|
self.cmd_queue.pop(tag)
|
||||||
|
|
||||||
|
def _resp_queue(self, response):
|
||||||
|
if response.restag:
|
||||||
|
self.resp_tagged_queue[response.restag] = response
|
||||||
|
else:
|
||||||
|
self.resp_untagged_queue.append(response)
|
||||||
|
|
||||||
|
def getresponse(self, command):
|
||||||
|
if command:
|
||||||
|
resp = self.resp_tagged_queue.pop(command.tag)
|
||||||
|
else:
|
||||||
|
resp = self.resp_untagged_queue.pop()
|
||||||
|
self.tags.remove(resp.restag)
|
||||||
|
return resp
|
||||||
|
|
||||||
|
def _cmd_queue(self, command):
|
||||||
|
self.cmd_queue[command.tag] = command
|
||||||
|
self.tags.append(command.tag)
|
||||||
|
|
||||||
|
def _cmd_dequeue(self, resp):
|
||||||
|
if not resp.restag:
|
||||||
|
return None
|
||||||
|
else:
|
||||||
|
return self.cmd_queue.pop(resp.restag)
|
||||||
|
|
||||||
|
def _delay(self):
|
||||||
|
return (self.delay < 2.1 and 2.1 or self.delay)
|
||||||
|
|
||||||
|
def _do_delay(self):
|
||||||
|
age = time() - self.lastpacket
|
||||||
|
delay = self._delay()
|
||||||
|
if age <= delay:
|
||||||
|
sleep(delay - age)
|
||||||
|
|
||||||
|
def _send(self, command):
|
||||||
|
if self.banned:
|
||||||
|
self.log("NetIO | BANNED")
|
||||||
|
raise AniDBBannedError, "Not sending, banned"
|
||||||
|
self._do_delay()
|
||||||
|
self.lastpacket = time()
|
||||||
|
command.started = time()
|
||||||
|
data = command.raw_data()
|
||||||
|
|
||||||
|
self.sock.sendto(data, self.target)
|
||||||
|
if command.command == 'AUTH' and self.logPrivate:
|
||||||
|
self.log("NetIO > sensitive data is not logged!")
|
||||||
|
else:
|
||||||
|
self.log("NetIO > %s" % repr(data))
|
||||||
|
|
||||||
|
def new_tag(self):
|
||||||
|
if not len(self.tags):
|
||||||
|
maxtag = "T000"
|
||||||
|
else:
|
||||||
|
maxtag = max(self.tags)
|
||||||
|
newtag = "T%03d" % (int(maxtag[1:]) + 1)
|
||||||
|
return newtag
|
||||||
|
|
||||||
|
def request(self, command):
|
||||||
|
if not (self.session and command.session) and command.command not in ('AUTH', 'PING', 'ENCRYPT'):
|
||||||
|
raise AniDBMustAuthError, "You must be authed to execute commands besides AUTH and PING"
|
||||||
|
command.started = time()
|
||||||
|
self._cmd_queue(command)
|
||||||
|
self._send(command)
|
138
lib/adba/aniDBmaper.py
Normal file
|
@ -0,0 +1,138 @@
|
||||||
|
#!/usr/bin/env python
|
||||||
|
#
|
||||||
|
# This file is part of aDBa.
|
||||||
|
#
|
||||||
|
# aDBa is free software: you can redistribute it and/or modify
|
||||||
|
# it under the terms of the GNU General Public License as published by
|
||||||
|
# the Free Software Foundation, either version 3 of the License, or
|
||||||
|
# (at your option) any later version.
|
||||||
|
#
|
||||||
|
# aDBa is distributed in the hope that it will be useful,
|
||||||
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
# GNU General Public License for more details.
|
||||||
|
#
|
||||||
|
# You should have received a copy of the GNU General Public License
|
||||||
|
# along with aDBa. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
from random import shuffle
|
||||||
|
|
||||||
|
class AniDBMaper:
|
||||||
|
|
||||||
|
blacklist = ('unused','retired','reserved')
|
||||||
|
|
||||||
|
def getAnimeBitsA(self,amask):
|
||||||
|
map = self.getAnimeMapA()
|
||||||
|
return self._getBitChain(map,amask)
|
||||||
|
|
||||||
|
def getAnimeCodesA(self,aBitChain):
|
||||||
|
amap = self.getAnimeMapA()
|
||||||
|
return self._getCodes(amap,aBitChain)
|
||||||
|
|
||||||
|
|
||||||
|
def getFileBitsF(self,fmask):
|
||||||
|
fmap = self.getFileMapF()
|
||||||
|
return self._getBitChain(fmap,fmask)
|
||||||
|
|
||||||
|
def getFileCodesF(self,bitChainF):
|
||||||
|
fmap = self.getFileMapF()
|
||||||
|
return self._getCodes(fmap,bitChainF)
|
||||||
|
|
||||||
|
|
||||||
|
def getFileBitsA(self,amask):
|
||||||
|
amap = self.getFileMapA()
|
||||||
|
return self._getBitChain(amap,amask)
|
||||||
|
|
||||||
|
def getFileCodesA(self,bitChainA):
|
||||||
|
amap = self.getFileMapA()
|
||||||
|
return self._getCodes(amap,bitChainA)
|
||||||
|
|
||||||
|
|
||||||
|
def _getBitChain(self,map,wanted):
|
||||||
|
"""Return an hex string with the correct bit set corresponding to the wanted fields in the map
|
||||||
|
"""
|
||||||
|
bit = 0
|
||||||
|
for index,field in enumerate(map):
|
||||||
|
if field in wanted and not field in self.blacklist:
|
||||||
|
bit = bit ^ (1<<len(map)-index-1)
|
||||||
|
|
||||||
|
bit = str(hex(bit)).lstrip("0x").rstrip("L")
|
||||||
|
bit = ''.join(["0" for unused in xrange(len(map)/4 - len(bit))])+bit
|
||||||
|
return bit
|
||||||
|
|
||||||
|
def _getCodes(self,map,bitChain):
|
||||||
|
"""Returns a list with the corresponding fields as set in the bitChain (hex string)
|
||||||
|
"""
|
||||||
|
codeList=[]
|
||||||
|
bitChain = int(bitChain,16)
|
||||||
|
mapLength = len(map)
|
||||||
|
for i in reversed(range(mapLength)):
|
||||||
|
if bitChain&(2**i):
|
||||||
|
codeList.append(map[mapLength-i-1])
|
||||||
|
return codeList
|
||||||
|
|
||||||
|
def getAnimeMapA(self):
|
||||||
|
# each line is one byte
|
||||||
|
# only chnage this if the api changes
|
||||||
|
map = ['aid','unused','year','type','related_aid_list','related_aid_type','category_list','category_weight_list',
|
||||||
|
'romaji_name','kanji_name','english_name','other_name','short_name_list','synonym_list','retired','retired',
|
||||||
|
'episodes','highest_episode_number','special_ep_count','air_date','end_date','url','picname','category_id_list',
|
||||||
|
'rating','vote_count','temp_rating','temp_vote_count','average_review_rating','review_count','award_list','is_18_restricted',
|
||||||
|
'anime_planet_id','ANN_id','allcinema_id','AnimeNfo_id','unused','unused','unused','date_record_updated',
|
||||||
|
'character_id_list','creator_id_list','main_creator_id_list','main_creator_name_list','unused','unused','unused','unused',
|
||||||
|
'specials_count','credits_count','other_count','trailer_count','parody_count','unused','unused','unused']
|
||||||
|
return map
|
||||||
|
|
||||||
|
def getFileMapF(self):
|
||||||
|
# each line is one byte
|
||||||
|
# only chnage this if the api changes
|
||||||
|
map = ['unused','aid','eid','gid','mylist_id','list_other_episodes','IsDeprecated','state',
|
||||||
|
'size','ed2k','md5','sha1','crc32','unused','unused','reserved',
|
||||||
|
'quality','source','audio_codec_list','audio_bitrate_list','video_codec','video_bitrate','video_resolution','file_type_extension',
|
||||||
|
'dub_language','sub_language','length_in_seconds','description','aired_date','unused','unused','anidb_file_name',
|
||||||
|
'mylist_state','mylist_filestate','mylist_viewed','mylist_viewdate','mylist_storage','mylist_source','mylist_other','unused']
|
||||||
|
return map
|
||||||
|
|
||||||
|
def getFileMapA(self):
|
||||||
|
# each line is one byte
|
||||||
|
# only chnage this if the api changes
|
||||||
|
map = ['anime_total_episodes','highest_episode_number','year','type','related_aid_list','related_aid_type','category_list','reserved',
|
||||||
|
'romaji_name','kanji_name','english_name','other_name','short_name_list','synonym_list','retired','retired',
|
||||||
|
'epno','ep_name','ep_romaji_name','ep_kanji_name','episode_rating','episode_vote_count','unused','unused',
|
||||||
|
'group_name','group_short_name','unused','unused','unused','unused','unused','date_aid_record_updated']
|
||||||
|
return map
|
||||||
|
|
||||||
|
def checkMapping(self,verbos=False):
|
||||||
|
|
||||||
|
print "------"
|
||||||
|
print "File F: "+ str(self.checkMapFileF(verbos))
|
||||||
|
print "------"
|
||||||
|
print "File A: "+ str(self.checkMapFileA(verbos))
|
||||||
|
|
||||||
|
|
||||||
|
def checkMapFileF(self,verbos=False):
|
||||||
|
getGeneralMap = lambda: self.getFileMapF()
|
||||||
|
getBits = lambda x: self.getFileBitsF(x)
|
||||||
|
getCodes = lambda x: self.getFileCodesF(x)
|
||||||
|
return self._checkMapGeneral(getGeneralMap,getBits,getCodes,verbos=verbos)
|
||||||
|
|
||||||
|
def checkMapFileA(self,verbos=False):
|
||||||
|
getGeneralMap = lambda: self.getFileMapA()
|
||||||
|
getBits = lambda x: self.getFileBitsA(x)
|
||||||
|
getCodes = lambda x: self.getFileCodesA(x)
|
||||||
|
return self._checkMapGeneral(getGeneralMap,getBits,getCodes,verbos=verbos)
|
||||||
|
|
||||||
|
def _checkMapGeneral(self,getGeneralMap,getBits,getCodes,verbos=False):
|
||||||
|
map = getGeneralMap()
|
||||||
|
shuffle(map)
|
||||||
|
mask = [elem for elem in map if elem not in self.blacklist][:5]
|
||||||
|
bits = getBits(mask)
|
||||||
|
mask_re = getCodes(bits)
|
||||||
|
bits_re = getBits(mask_re)
|
||||||
|
if verbos:
|
||||||
|
print mask
|
||||||
|
print mask_re
|
||||||
|
print bits
|
||||||
|
print bits_re
|
||||||
|
print "bits are:"+ str((bits_re == bits))
|
||||||
|
print "map is :"+ str((sorted(mask_re) == sorted(mask)))
|
||||||
|
return (bits_re == bits) and sorted(mask_re) == sorted(mask)
|
1856
lib/adba/aniDBresponses.py
Normal file
65
lib/adba/aniDBtvDBmaper.py
Normal file
|
@ -0,0 +1,65 @@
|
||||||
|
#!/usr/bin/env python
|
||||||
|
#
|
||||||
|
# This file is part of aDBa.
|
||||||
|
#
|
||||||
|
# aDBa is free software: you can redistribute it and/or modify
|
||||||
|
# it under the terms of the GNU General Public License as published by
|
||||||
|
# the Free Software Foundation, either version 3 of the License, or
|
||||||
|
# (at your option) any later version.
|
||||||
|
#
|
||||||
|
# aDBa is distributed in the hope that it will be useful,
|
||||||
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
# GNU General Public License for more details.
|
||||||
|
#
|
||||||
|
# You should have received a copy of the GNU General Public License
|
||||||
|
# along with aDBa. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
|
import os
|
||||||
|
import xml.etree.cElementTree as etree
|
||||||
|
import aniDBfileInfo as fileInfo
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
class TvDBMap():
|
||||||
|
|
||||||
|
def __init__(self,filePath=None):
|
||||||
|
self.xmlMap = fileInfo.read_tvdb_map_xml(filePath)
|
||||||
|
|
||||||
|
def get_tvdb_for_anidb(self,anidb_id):
|
||||||
|
return self._get_x_for_y(anidb_id,"anidbid","tvdbid")
|
||||||
|
|
||||||
|
def get_anidb_for_tvdb(self,tvdb_id):
|
||||||
|
return self._get_x_for_y(tvdb_id,"tvdbid","anidbid")
|
||||||
|
|
||||||
|
|
||||||
|
def _get_x_for_y(self,xValue,x,y):
|
||||||
|
#print("searching "+x+" with the value "+str(xValue)+" and want to give back "+y)
|
||||||
|
xValue = str(xValue)
|
||||||
|
for anime in self.xmlMap.findall("anime"):
|
||||||
|
try:
|
||||||
|
if anime.get(x,False) == xValue:
|
||||||
|
return int(anime.get(y,0))
|
||||||
|
except ValueError, e:
|
||||||
|
continue
|
||||||
|
return 0
|
||||||
|
|
||||||
|
|
||||||
|
def get_season_episode_for_anidb_absoluteNumber(self,anidb_id,absoluteNumber):
|
||||||
|
# NOTE: this cant be done without the length of each season from thetvdb
|
||||||
|
#TODO: implement
|
||||||
|
season = 0
|
||||||
|
episode = 0
|
||||||
|
|
||||||
|
for anime in self.xmlMap.findall("anime"):
|
||||||
|
if int(anime.get("anidbid",False)) == anidb_id:
|
||||||
|
defaultSeason = int(anime.get("defaulttvdbseason",1))
|
||||||
|
|
||||||
|
|
||||||
|
return (season,episode)
|
||||||
|
|
||||||
|
def get_season_episode_for_tvdb_absoluteNumber(self,anidb_id,absoluteNumber):
|
||||||
|
#TODO: implement
|
||||||
|
season = 0
|
||||||
|
episode = 0
|
||||||
|
return (season,episode)
|
5250
lib/adba/anime-list.xml
Normal file
49439
lib/adba/animetitles.xml
Normal file
78
lib/fuzzywuzzy/StringMatcher.py
Normal file
|
@ -0,0 +1,78 @@
|
||||||
|
#!/usr/bin/env python
|
||||||
|
# encoding: utf-8
|
||||||
|
"""
|
||||||
|
StringMatcher.py
|
||||||
|
|
||||||
|
ported from python-Levenshtein
|
||||||
|
[https://github.com/miohtama/python-Levenshtein]
|
||||||
|
"""
|
||||||
|
|
||||||
|
from Levenshtein import *
|
||||||
|
from warnings import warn
|
||||||
|
|
||||||
|
class StringMatcher:
|
||||||
|
"""A SequenceMatcher-like class built on the top of Levenshtein"""
|
||||||
|
|
||||||
|
def _reset_cache(self):
|
||||||
|
self._ratio = self._distance = None
|
||||||
|
self._opcodes = self._editops = self._matching_blocks = None
|
||||||
|
|
||||||
|
def __init__(self, isjunk=None, seq1='', seq2=''):
|
||||||
|
if isjunk:
|
||||||
|
warn("isjunk not NOT implemented, it will be ignored")
|
||||||
|
self._str1, self._str2 = seq1, seq2
|
||||||
|
self._reset_cache()
|
||||||
|
|
||||||
|
def set_seqs(self, seq1, seq2):
|
||||||
|
self._str1, self._str2 = seq1, seq2
|
||||||
|
self._reset_cache()
|
||||||
|
|
||||||
|
def set_seq1(self, seq1):
|
||||||
|
self._str1 = seq1
|
||||||
|
self._reset_cache()
|
||||||
|
|
||||||
|
def set_seq2(self, seq2):
|
||||||
|
self._str2 = seq2
|
||||||
|
self._reset_cache()
|
||||||
|
|
||||||
|
def get_opcodes(self):
|
||||||
|
if not self._opcodes:
|
||||||
|
if self._editops:
|
||||||
|
self._opcodes = opcodes(self._editops, self._str1, self._str2)
|
||||||
|
else:
|
||||||
|
self._opcodes = opcodes(self._str1, self._str2)
|
||||||
|
return self._opcodes
|
||||||
|
|
||||||
|
def get_editops(self):
|
||||||
|
if not self._editops:
|
||||||
|
if self._opcodes:
|
||||||
|
self._editops = editops(self._opcodes, self._str1, self._str2)
|
||||||
|
else:
|
||||||
|
self._editops = editops(self._str1, self._str2)
|
||||||
|
return self._editops
|
||||||
|
|
||||||
|
def get_matching_blocks(self):
|
||||||
|
if not self._matching_blocks:
|
||||||
|
self._matching_blocks = matching_blocks(self.get_opcodes(),
|
||||||
|
self._str1, self._str2)
|
||||||
|
return self._matching_blocks
|
||||||
|
|
||||||
|
def ratio(self):
|
||||||
|
if not self._ratio:
|
||||||
|
self._ratio = ratio(self._str1, self._str2)
|
||||||
|
return self._ratio
|
||||||
|
|
||||||
|
def quick_ratio(self):
|
||||||
|
# This is usually quick enough :o)
|
||||||
|
if not self._ratio:
|
||||||
|
self._ratio = ratio(self._str1, self._str2)
|
||||||
|
return self._ratio
|
||||||
|
|
||||||
|
def real_quick_ratio(self):
|
||||||
|
len1, len2 = len(self._str1), len(self._str2)
|
||||||
|
return 2.0 * min(len1, len2) / (len1 + len2)
|
||||||
|
|
||||||
|
def distance(self):
|
||||||
|
if not self._distance:
|
||||||
|
self._distance = distance(self._str1, self._str2)
|
||||||
|
return self._distance
|
0
lib/fuzzywuzzy/__init__.py
Normal file
263
lib/fuzzywuzzy/fuzz.py
Normal file
|
@ -0,0 +1,263 @@
|
||||||
|
#!/usr/bin/env python
|
||||||
|
# encoding: utf-8
|
||||||
|
"""
|
||||||
|
fuzz.py
|
||||||
|
|
||||||
|
Copyright (c) 2011 Adam Cohen
|
||||||
|
|
||||||
|
Permission is hereby granted, free of charge, to any person obtaining
|
||||||
|
a copy of this software and associated documentation files (the
|
||||||
|
"Software"), to deal in the Software without restriction, including
|
||||||
|
without limitation the rights to use, copy, modify, merge, publish,
|
||||||
|
distribute, sublicense, and/or sell copies of the Software, and to
|
||||||
|
permit persons to whom the Software is furnished to do so, subject to
|
||||||
|
the following conditions:
|
||||||
|
|
||||||
|
The above copyright notice and this permission notice shall be
|
||||||
|
included in all copies or substantial portions of the Software.
|
||||||
|
|
||||||
|
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
|
||||||
|
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
||||||
|
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
|
||||||
|
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
|
||||||
|
LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
|
||||||
|
OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
|
||||||
|
WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||||
|
"""
|
||||||
|
from __future__ import unicode_literals
|
||||||
|
|
||||||
|
try:
|
||||||
|
from StringMatcher import StringMatcher as SequenceMatcher
|
||||||
|
except:
|
||||||
|
from difflib import SequenceMatcher
|
||||||
|
|
||||||
|
from . import utils
|
||||||
|
|
||||||
|
|
||||||
|
###########################
|
||||||
|
# Basic Scoring Functions #
|
||||||
|
###########################
|
||||||
|
|
||||||
|
|
||||||
|
def ratio(s1, s2):
|
||||||
|
|
||||||
|
if s1 is None:
|
||||||
|
raise TypeError("s1 is None")
|
||||||
|
if s2 is None:
|
||||||
|
raise TypeError("s2 is None")
|
||||||
|
s1, s2 = utils.make_type_consistent(s1, s2)
|
||||||
|
if len(s1) == 0 or len(s2) == 0:
|
||||||
|
return 0
|
||||||
|
|
||||||
|
m = SequenceMatcher(None, s1, s2)
|
||||||
|
return utils.intr(100 * m.ratio())
|
||||||
|
|
||||||
|
|
||||||
|
# todo: skip duplicate indexes for a little more speed
|
||||||
|
def partial_ratio(s1, s2):
|
||||||
|
|
||||||
|
if s1 is None:
|
||||||
|
raise TypeError("s1 is None")
|
||||||
|
if s2 is None:
|
||||||
|
raise TypeError("s2 is None")
|
||||||
|
s1, s2 = utils.make_type_consistent(s1, s2)
|
||||||
|
if len(s1) == 0 or len(s2) == 0:
|
||||||
|
return 0
|
||||||
|
|
||||||
|
if len(s1) <= len(s2):
|
||||||
|
shorter = s1
|
||||||
|
longer = s2
|
||||||
|
else:
|
||||||
|
shorter = s2
|
||||||
|
longer = s1
|
||||||
|
|
||||||
|
m = SequenceMatcher(None, shorter, longer)
|
||||||
|
blocks = m.get_matching_blocks()
|
||||||
|
|
||||||
|
# each block represents a sequence of matching characters in a string
|
||||||
|
# of the form (idx_1, idx_2, len)
|
||||||
|
# the best partial match will block align with at least one of those blocks
|
||||||
|
# e.g. shorter = "abcd", longer = XXXbcdeEEE
|
||||||
|
# block = (1,3,3)
|
||||||
|
# best score === ratio("abcd", "Xbcd")
|
||||||
|
scores = []
|
||||||
|
for block in blocks:
|
||||||
|
long_start = block[1] - block[0] if (block[1] - block[0]) > 0 else 0
|
||||||
|
long_end = long_start + len(shorter)
|
||||||
|
long_substr = longer[long_start:long_end]
|
||||||
|
|
||||||
|
m2 = SequenceMatcher(None, shorter, long_substr)
|
||||||
|
r = m2.ratio()
|
||||||
|
if r > .995:
|
||||||
|
return 100
|
||||||
|
else:
|
||||||
|
scores.append(r)
|
||||||
|
|
||||||
|
return int(100 * max(scores))
|
||||||
|
|
||||||
|
|
||||||
|
##############################
|
||||||
|
# Advanced Scoring Functions #
|
||||||
|
##############################
|
||||||
|
|
||||||
|
# Sorted Token
|
||||||
|
# find all alphanumeric tokens in the string
|
||||||
|
# sort those tokens and take ratio of resulting joined strings
|
||||||
|
# controls for unordered string elements
|
||||||
|
def _token_sort(s1, s2, partial=True, force_ascii=True):
|
||||||
|
|
||||||
|
if s1 is None:
|
||||||
|
raise TypeError("s1 is None")
|
||||||
|
if s2 is None:
|
||||||
|
raise TypeError("s2 is None")
|
||||||
|
|
||||||
|
# pull tokens
|
||||||
|
tokens1 = utils.full_process(s1, force_ascii=force_ascii).split()
|
||||||
|
tokens2 = utils.full_process(s2, force_ascii=force_ascii).split()
|
||||||
|
|
||||||
|
# sort tokens and join
|
||||||
|
sorted1 = " ".join(sorted(tokens1))
|
||||||
|
sorted2 = " ".join(sorted(tokens2))
|
||||||
|
|
||||||
|
sorted1 = sorted1.strip()
|
||||||
|
sorted2 = sorted2.strip()
|
||||||
|
|
||||||
|
if partial:
|
||||||
|
return partial_ratio(sorted1, sorted2)
|
||||||
|
else:
|
||||||
|
return ratio(sorted1, sorted2)
|
||||||
|
|
||||||
|
|
||||||
|
def token_sort_ratio(s1, s2, force_ascii=True):
|
||||||
|
return _token_sort(s1, s2, partial=False, force_ascii=force_ascii)
|
||||||
|
|
||||||
|
|
||||||
|
def partial_token_sort_ratio(s1, s2, force_ascii=True):
|
||||||
|
return _token_sort(s1, s2, partial=True, force_ascii=force_ascii)
|
||||||
|
|
||||||
|
|
||||||
|
# Token Set
|
||||||
|
# find all alphanumeric tokens in each string...treat them as a set
|
||||||
|
# construct two strings of the form
|
||||||
|
# <sorted_intersection><sorted_remainder>
|
||||||
|
# take ratios of those two strings
|
||||||
|
# controls for unordered partial matches
|
||||||
|
def _token_set(s1, s2, partial=True, force_ascii=True):
|
||||||
|
|
||||||
|
if s1 is None:
|
||||||
|
raise TypeError("s1 is None")
|
||||||
|
if s2 is None:
|
||||||
|
raise TypeError("s2 is None")
|
||||||
|
|
||||||
|
p1 = utils.full_process(s1, force_ascii=force_ascii)
|
||||||
|
p2 = utils.full_process(s2, force_ascii=force_ascii)
|
||||||
|
|
||||||
|
if not utils.validate_string(p1):
|
||||||
|
return 0
|
||||||
|
if not utils.validate_string(p2):
|
||||||
|
return 0
|
||||||
|
|
||||||
|
# pull tokens
|
||||||
|
tokens1 = set(utils.full_process(p1).split())
|
||||||
|
tokens2 = set(utils.full_process(p2).split())
|
||||||
|
|
||||||
|
intersection = tokens1.intersection(tokens2)
|
||||||
|
diff1to2 = tokens1.difference(tokens2)
|
||||||
|
diff2to1 = tokens2.difference(tokens1)
|
||||||
|
|
||||||
|
sorted_sect = " ".join(sorted(intersection))
|
||||||
|
sorted_1to2 = " ".join(sorted(diff1to2))
|
||||||
|
sorted_2to1 = " ".join(sorted(diff2to1))
|
||||||
|
|
||||||
|
combined_1to2 = sorted_sect + " " + sorted_1to2
|
||||||
|
combined_2to1 = sorted_sect + " " + sorted_2to1
|
||||||
|
|
||||||
|
# strip
|
||||||
|
sorted_sect = sorted_sect.strip()
|
||||||
|
combined_1to2 = combined_1to2.strip()
|
||||||
|
combined_2to1 = combined_2to1.strip()
|
||||||
|
|
||||||
|
pairwise = [
|
||||||
|
ratio(sorted_sect, combined_1to2),
|
||||||
|
ratio(sorted_sect, combined_2to1),
|
||||||
|
ratio(combined_1to2, combined_2to1)
|
||||||
|
]
|
||||||
|
return max(pairwise)
|
||||||
|
|
||||||
|
|
||||||
|
def token_set_ratio(s1, s2, force_ascii=True):
|
||||||
|
return _token_set(s1, s2, partial=False, force_ascii=force_ascii)
|
||||||
|
|
||||||
|
|
||||||
|
def partial_token_set_ratio(s1, s2, force_ascii=True):
|
||||||
|
return _token_set(s1, s2, partial=True, force_ascii=force_ascii)
|
||||||
|
|
||||||
|
|
||||||
|
# TODO: numerics
|
||||||
|
|
||||||
|
###################
|
||||||
|
# Combination API #
|
||||||
|
###################
|
||||||
|
|
||||||
|
# q is for quick
|
||||||
|
def QRatio(s1, s2, force_ascii=True):
|
||||||
|
|
||||||
|
p1 = utils.full_process(s1, force_ascii=force_ascii)
|
||||||
|
p2 = utils.full_process(s2, force_ascii=force_ascii)
|
||||||
|
|
||||||
|
if not utils.validate_string(p1):
|
||||||
|
return 0
|
||||||
|
if not utils.validate_string(p2):
|
||||||
|
return 0
|
||||||
|
|
||||||
|
return ratio(p1, p2)
|
||||||
|
|
||||||
|
|
||||||
|
def UQRatio(s1, s2):
|
||||||
|
return QRatio(s1, s2, force_ascii=False)
|
||||||
|
|
||||||
|
|
||||||
|
# w is for weighted
|
||||||
|
def WRatio(s1, s2, force_ascii=True):
|
||||||
|
|
||||||
|
p1 = utils.full_process(s1, force_ascii=force_ascii)
|
||||||
|
p2 = utils.full_process(s2, force_ascii=force_ascii)
|
||||||
|
|
||||||
|
if not utils.validate_string(p1):
|
||||||
|
return 0
|
||||||
|
if not utils.validate_string(p2):
|
||||||
|
return 0
|
||||||
|
|
||||||
|
# should we look at partials?
|
||||||
|
try_partial = True
|
||||||
|
unbase_scale = .95
|
||||||
|
partial_scale = .90
|
||||||
|
|
||||||
|
base = ratio(p1, p2)
|
||||||
|
len_ratio = float(max(len(p1), len(p2))) / min(len(p1), len(p2))
|
||||||
|
|
||||||
|
# if strings are similar length, don't use partials
|
||||||
|
if len_ratio < 1.5:
|
||||||
|
try_partial = False
|
||||||
|
|
||||||
|
# if one string is much much shorter than the other
|
||||||
|
if len_ratio > 8:
|
||||||
|
partial_scale = .6
|
||||||
|
|
||||||
|
if try_partial:
|
||||||
|
partial = partial_ratio(p1, p2) * partial_scale
|
||||||
|
ptsor = partial_token_sort_ratio(p1, p2, force_ascii=force_ascii) \
|
||||||
|
* unbase_scale * partial_scale
|
||||||
|
ptser = partial_token_set_ratio(p1, p2, force_ascii=force_ascii) \
|
||||||
|
* unbase_scale * partial_scale
|
||||||
|
|
||||||
|
return int(max(base, partial, ptsor, ptser))
|
||||||
|
else:
|
||||||
|
tsor = token_sort_ratio(p1, p2, force_ascii=force_ascii) * unbase_scale
|
||||||
|
tser = token_set_ratio(p1, p2, force_ascii=force_ascii) * unbase_scale
|
||||||
|
|
||||||
|
return int(max(base, tsor, tser))
|
||||||
|
|
||||||
|
|
||||||
|
def UWRatio(s1, s2):
|
||||||
|
return WRatio(s1, s2, force_ascii=False)
|
119
lib/fuzzywuzzy/process.py
Normal file
|
@ -0,0 +1,119 @@
|
||||||
|
#!/usr/bin/env python
|
||||||
|
# encoding: utf-8
|
||||||
|
"""
|
||||||
|
process.py
|
||||||
|
|
||||||
|
Copyright (c) 2011 Adam Cohen
|
||||||
|
|
||||||
|
Permission is hereby granted, free of charge, to any person obtaining
|
||||||
|
a copy of this software and associated documentation files (the
|
||||||
|
"Software"), to deal in the Software without restriction, including
|
||||||
|
without limitation the rights to use, copy, modify, merge, publish,
|
||||||
|
distribute, sublicense, and/or sell copies of the Software, and to
|
||||||
|
permit persons to whom the Software is furnished to do so, subject to
|
||||||
|
the following conditions:
|
||||||
|
|
||||||
|
The above copyright notice and this permission notice shall be
|
||||||
|
included in all copies or substantial portions of the Software.
|
||||||
|
|
||||||
|
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
|
||||||
|
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
||||||
|
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
|
||||||
|
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
|
||||||
|
LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
|
||||||
|
OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
|
||||||
|
WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||||
|
"""
|
||||||
|
import itertools
|
||||||
|
|
||||||
|
from . import fuzz
|
||||||
|
from . import utils
|
||||||
|
|
||||||
|
|
||||||
|
def extract(query, choices, processor=None, scorer=None, limit=5):
|
||||||
|
"""Find best matches in a list of choices, return a list of tuples
|
||||||
|
containing the match and it's score.
|
||||||
|
|
||||||
|
Arguments:
|
||||||
|
query -- an object representing the thing we want to find
|
||||||
|
choices -- a list of objects we are attempting to extract
|
||||||
|
values from
|
||||||
|
scorer -- f(OBJ, QUERY) --> INT. We will return the objects
|
||||||
|
with the highest score by default, we use
|
||||||
|
score.WRatio() and both OBJ and QUERY should be
|
||||||
|
strings
|
||||||
|
processor -- f(OBJ_A) --> OBJ_B, where the output is an input
|
||||||
|
to scorer for example, "processor = lambda x:
|
||||||
|
x[0]" would return the first element in a
|
||||||
|
collection x (of, say, strings) this would then
|
||||||
|
be used in the scoring collection by default, we
|
||||||
|
use utils.full_process()
|
||||||
|
|
||||||
|
"""
|
||||||
|
if choices is None or len(choices) == 0:
|
||||||
|
return []
|
||||||
|
|
||||||
|
# default, turn whatever the choice is into a workable string
|
||||||
|
if processor is None:
|
||||||
|
processor = lambda x: utils.full_process(x)
|
||||||
|
|
||||||
|
# default: wratio
|
||||||
|
if scorer is None:
|
||||||
|
scorer = fuzz.WRatio
|
||||||
|
|
||||||
|
sl = list()
|
||||||
|
|
||||||
|
for choice in choices:
|
||||||
|
processed = processor(choice)
|
||||||
|
score = scorer(query, processed)
|
||||||
|
tuple = (choice, score)
|
||||||
|
sl.append(tuple)
|
||||||
|
|
||||||
|
sl.sort(key=lambda i: i[1], reverse=True)
|
||||||
|
return sl[:limit]
|
||||||
|
|
||||||
|
|
||||||
|
def extractBests(query, choices, processor=None, scorer=None, score_cutoff=0, limit=5):
|
||||||
|
"""Find best matches above a score in a list of choices, return a
|
||||||
|
list of tuples containing the match and it's score.
|
||||||
|
|
||||||
|
Convenience method which returns the choices with best scores, see
|
||||||
|
extract() for full arguments list
|
||||||
|
|
||||||
|
Optional parameter: score_cutoff.
|
||||||
|
If the choice has a score of less than or equal to score_cutoff
|
||||||
|
it will not be included on result list
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
best_list = extract(query, choices, processor, scorer, limit)
|
||||||
|
if len(best_list) > 0:
|
||||||
|
return list(itertools.takewhile(lambda x: x[1] > score_cutoff, best_list))
|
||||||
|
else:
|
||||||
|
return []
|
||||||
|
|
||||||
|
|
||||||
|
def extractOne(query, choices, processor=None, scorer=None, score_cutoff=0):
|
||||||
|
"""Find the best match above a score in a list of choices, return a
|
||||||
|
tuple containing the match and it's score if it's above the treshold
|
||||||
|
or None.
|
||||||
|
|
||||||
|
Convenience method which returns the single best choice, see
|
||||||
|
extract() for full arguments list
|
||||||
|
|
||||||
|
Optional parameter: score_cutoff.
|
||||||
|
If the best choice has a score of less than or equal to
|
||||||
|
score_cutoff we will return none (intuition: not a good enough
|
||||||
|
match)
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
best_list = extract(query, choices, processor, scorer, limit=1)
|
||||||
|
if len(best_list) > 0:
|
||||||
|
best = best_list[0]
|
||||||
|
if best[1] > score_cutoff:
|
||||||
|
return best
|
||||||
|
else:
|
||||||
|
return None
|
||||||
|
else:
|
||||||
|
return None
|
41
lib/fuzzywuzzy/string_processing.py
Normal file
|
@ -0,0 +1,41 @@
|
||||||
|
from __future__ import unicode_literals
|
||||||
|
import re
|
||||||
|
|
||||||
|
|
||||||
|
class StringProcessor(object):
|
||||||
|
"""
|
||||||
|
This class defines method to process strings in the most
|
||||||
|
efficient way. Ideally all the methods below use unicode strings
|
||||||
|
for both input and output.
|
||||||
|
"""
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def replace_non_letters_non_numbers_with_whitespace(cls, a_string):
|
||||||
|
"""
|
||||||
|
This function replaces any sequence of non letters and non
|
||||||
|
numbers with a single white space.
|
||||||
|
"""
|
||||||
|
regex = re.compile(r"(?ui)\W")
|
||||||
|
return regex.sub(" ", a_string)
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def strip(cls, a_string):
|
||||||
|
"""
|
||||||
|
This function strips leading and trailing white space.
|
||||||
|
"""
|
||||||
|
|
||||||
|
return a_string.strip()
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def to_lower_case(cls, a_string):
|
||||||
|
"""
|
||||||
|
This function returns the lower-cased version of the string given.
|
||||||
|
"""
|
||||||
|
return a_string.lower()
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def to_upper_case(cls, a_string):
|
||||||
|
"""
|
||||||
|
This function returns the upper-cased version of the string given.
|
||||||
|
"""
|
||||||
|
return a_string.upper()
|
76
lib/fuzzywuzzy/utils.py
Normal file
|
@ -0,0 +1,76 @@
|
||||||
|
from __future__ import unicode_literals
|
||||||
|
import sys
|
||||||
|
|
||||||
|
from fuzzywuzzy.string_processing import StringProcessor
|
||||||
|
|
||||||
|
|
||||||
|
PY3 = sys.version_info[0] == 3
|
||||||
|
|
||||||
|
|
||||||
|
def validate_string(s):
|
||||||
|
try:
|
||||||
|
if len(s) > 0:
|
||||||
|
return True
|
||||||
|
else:
|
||||||
|
return False
|
||||||
|
except:
|
||||||
|
return False
|
||||||
|
|
||||||
|
bad_chars = str('') # ascii dammit!
|
||||||
|
for i in range(128, 256):
|
||||||
|
bad_chars += chr(i)
|
||||||
|
if PY3:
|
||||||
|
translation_table = dict((ord(c), None) for c in bad_chars)
|
||||||
|
|
||||||
|
|
||||||
|
def asciionly(s):
|
||||||
|
if PY3:
|
||||||
|
return s.translate(translation_table)
|
||||||
|
else:
|
||||||
|
return s.translate(None, bad_chars)
|
||||||
|
|
||||||
|
|
||||||
|
def asciidammit(s):
|
||||||
|
if type(s) is str:
|
||||||
|
return asciionly(s)
|
||||||
|
elif type(s) is unicode:
|
||||||
|
return asciionly(s.encode('ascii', 'ignore'))
|
||||||
|
else:
|
||||||
|
return asciidammit(unicode(s))
|
||||||
|
|
||||||
|
|
||||||
|
def make_type_consistent(s1, s2):
|
||||||
|
if isinstance(s1, str) and isinstance(s2, str):
|
||||||
|
return s1, s2
|
||||||
|
|
||||||
|
elif isinstance(s1, unicode) and isinstance(s2, unicode):
|
||||||
|
return s1, s2
|
||||||
|
|
||||||
|
else:
|
||||||
|
return unicode(s1), unicode(s2)
|
||||||
|
|
||||||
|
|
||||||
|
def full_process(s, force_ascii=False):
|
||||||
|
"""Process string by
|
||||||
|
-- removing all but letters and numbers
|
||||||
|
-- trim whitespace
|
||||||
|
-- force to lower case
|
||||||
|
if force_ascii == True, force convert to ascii"""
|
||||||
|
|
||||||
|
if s is None:
|
||||||
|
return ""
|
||||||
|
|
||||||
|
if force_ascii:
|
||||||
|
s = asciidammit(s)
|
||||||
|
# Keep only Letters and Numbres (see Unicode docs).
|
||||||
|
string_out = StringProcessor.replace_non_letters_non_numbers_with_whitespace(s)
|
||||||
|
# Force into lowercase.
|
||||||
|
string_out = StringProcessor.to_lower_case(string_out)
|
||||||
|
# Remove leading and trailing whitespaces.
|
||||||
|
string_out = StringProcessor.strip(string_out)
|
||||||
|
return string_out
|
||||||
|
|
||||||
|
|
||||||
|
def intr(n):
|
||||||
|
'''Returns a correctly rounded integer'''
|
||||||
|
return int(round(n))
|
|
@ -6,7 +6,7 @@ a person from the IMDb database.
|
||||||
It can fetch data through different media (e.g.: the IMDb web pages,
|
It can fetch data through different media (e.g.: the IMDb web pages,
|
||||||
a SQL database, etc.)
|
a SQL database, etc.)
|
||||||
|
|
||||||
Copyright 2004-2012 Davide Alberani <da@erlug.linux.it>
|
Copyright 2004-2014 Davide Alberani <da@erlug.linux.it>
|
||||||
|
|
||||||
This program is free software; you can redistribute it and/or modify
|
This program is free software; you can redistribute it and/or modify
|
||||||
it under the terms of the GNU General Public License as published by
|
it under the terms of the GNU General Public License as published by
|
||||||
|
@ -25,7 +25,7 @@ Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
||||||
|
|
||||||
__all__ = ['IMDb', 'IMDbError', 'Movie', 'Person', 'Character', 'Company',
|
__all__ = ['IMDb', 'IMDbError', 'Movie', 'Person', 'Character', 'Company',
|
||||||
'available_access_systems']
|
'available_access_systems']
|
||||||
__version__ = VERSION = '4.9'
|
__version__ = VERSION = '5.0'
|
||||||
|
|
||||||
# Import compatibility module (importing it is enough).
|
# Import compatibility module (importing it is enough).
|
||||||
import _compat
|
import _compat
|
||||||
|
@ -160,6 +160,7 @@ def IMDb(accessSystem=None, *arguments, **keywords):
|
||||||
kwds.update(keywords)
|
kwds.update(keywords)
|
||||||
keywords = kwds
|
keywords = kwds
|
||||||
except Exception, e:
|
except Exception, e:
|
||||||
|
import logging
|
||||||
logging.getLogger('imdbpy').warn('Unable to read configuration' \
|
logging.getLogger('imdbpy').warn('Unable to read configuration' \
|
||||||
' file; complete error: %s' % e)
|
' file; complete error: %s' % e)
|
||||||
# It just LOOKS LIKE a bad habit: we tried to read config
|
# It just LOOKS LIKE a bad habit: we tried to read config
|
||||||
|
@ -303,7 +304,7 @@ class IMDbBase:
|
||||||
# http://akas.imdb.com/keyword/%s/
|
# http://akas.imdb.com/keyword/%s/
|
||||||
imdbURL_keyword_main=imdbURL_base + 'keyword/%s/'
|
imdbURL_keyword_main=imdbURL_base + 'keyword/%s/'
|
||||||
# http://akas.imdb.com/chart/top
|
# http://akas.imdb.com/chart/top
|
||||||
imdbURL_top250=imdbURL_base + 'chart/top',
|
imdbURL_top250=imdbURL_base + 'chart/top'
|
||||||
# http://akas.imdb.com/chart/bottom
|
# http://akas.imdb.com/chart/bottom
|
||||||
imdbURL_bottom100=imdbURL_base + 'chart/bottom'
|
imdbURL_bottom100=imdbURL_base + 'chart/bottom'
|
||||||
# http://akas.imdb.com/find?%s
|
# http://akas.imdb.com/find?%s
|
||||||
|
@ -824,22 +825,23 @@ class IMDbBase:
|
||||||
# subclass, somewhere under the imdb.parser package.
|
# subclass, somewhere under the imdb.parser package.
|
||||||
raise NotImplementedError('override this method')
|
raise NotImplementedError('override this method')
|
||||||
|
|
||||||
def _searchIMDb(self, kind, ton):
|
def _searchIMDb(self, kind, ton, title_kind=None):
|
||||||
"""Search the IMDb akas server for the given title or name."""
|
"""Search the IMDb akas server for the given title or name."""
|
||||||
# The Exact Primary search system has gone AWOL, so we resort
|
# The Exact Primary search system has gone AWOL, so we resort
|
||||||
# to the mobile search. :-/
|
# to the mobile search. :-/
|
||||||
if not ton:
|
if not ton:
|
||||||
return None
|
return None
|
||||||
|
ton = ton.strip('"')
|
||||||
aSystem = IMDb('mobile')
|
aSystem = IMDb('mobile')
|
||||||
if kind == 'tt':
|
if kind == 'tt':
|
||||||
searchFunct = aSystem.search_movie
|
searchFunct = aSystem.search_movie
|
||||||
check = 'long imdb canonical title'
|
check = 'long imdb title'
|
||||||
elif kind == 'nm':
|
elif kind == 'nm':
|
||||||
searchFunct = aSystem.search_person
|
searchFunct = aSystem.search_person
|
||||||
check = 'long imdb canonical name'
|
check = 'long imdb name'
|
||||||
elif kind == 'char':
|
elif kind == 'char':
|
||||||
searchFunct = aSystem.search_character
|
searchFunct = aSystem.search_character
|
||||||
check = 'long imdb canonical name'
|
check = 'long imdb name'
|
||||||
elif kind == 'co':
|
elif kind == 'co':
|
||||||
# XXX: are [COUNTRY] codes included in the results?
|
# XXX: are [COUNTRY] codes included in the results?
|
||||||
searchFunct = aSystem.search_company
|
searchFunct = aSystem.search_company
|
||||||
|
@ -852,24 +854,42 @@ class IMDbBase:
|
||||||
# exact match.
|
# exact match.
|
||||||
if len(searchRes) == 1:
|
if len(searchRes) == 1:
|
||||||
return searchRes[0].getID()
|
return searchRes[0].getID()
|
||||||
|
title_only_matches = []
|
||||||
for item in searchRes:
|
for item in searchRes:
|
||||||
# Return the first perfect match.
|
# Return the first perfect match.
|
||||||
if item[check] == ton:
|
if item[check].strip('"') == ton:
|
||||||
|
# For titles do additional check for kind
|
||||||
|
if kind != 'tt' or title_kind == item['kind']:
|
||||||
return item.getID()
|
return item.getID()
|
||||||
|
elif kind == 'tt':
|
||||||
|
title_only_matches.append(item.getID())
|
||||||
|
# imdbpy2sql.py could detected wrong type, so if no title and kind
|
||||||
|
# matches found - collect all results with title only match
|
||||||
|
# Return list of IDs if multiple matches (can happen when searching
|
||||||
|
# titles with no title_kind specified)
|
||||||
|
# Example: DB: Band of Brothers "tv series" vs "tv mini-series"
|
||||||
|
if title_only_matches:
|
||||||
|
if len(title_only_matches) == 1:
|
||||||
|
return title_only_matches[0]
|
||||||
|
else:
|
||||||
|
return title_only_matches
|
||||||
return None
|
return None
|
||||||
|
|
||||||
def title2imdbID(self, title):
|
def title2imdbID(self, title, kind=None):
|
||||||
"""Translate a movie title (in the plain text data files format)
|
"""Translate a movie title (in the plain text data files format)
|
||||||
to an imdbID.
|
to an imdbID.
|
||||||
Try an Exact Primary Title search on IMDb;
|
Try an Exact Primary Title search on IMDb;
|
||||||
return None if it's unable to get the imdbID."""
|
return None if it's unable to get the imdbID;
|
||||||
return self._searchIMDb('tt', title)
|
Always specify kind: movie, tv series, video game etc. or search can
|
||||||
|
return list of IDs if multiple matches found
|
||||||
|
"""
|
||||||
|
return self._searchIMDb('tt', title, kind)
|
||||||
|
|
||||||
def name2imdbID(self, name):
|
def name2imdbID(self, name):
|
||||||
"""Translate a person name in an imdbID.
|
"""Translate a person name in an imdbID.
|
||||||
Try an Exact Primary Name search on IMDb;
|
Try an Exact Primary Name search on IMDb;
|
||||||
return None if it's unable to get the imdbID."""
|
return None if it's unable to get the imdbID."""
|
||||||
return self._searchIMDb('tt', name)
|
return self._searchIMDb('nm', name)
|
||||||
|
|
||||||
def character2imdbID(self, name):
|
def character2imdbID(self, name):
|
||||||
"""Translate a character name in an imdbID.
|
"""Translate a character name in an imdbID.
|
||||||
|
@ -896,7 +916,8 @@ class IMDbBase:
|
||||||
imdbID = aSystem.get_imdbMovieID(mop.movieID)
|
imdbID = aSystem.get_imdbMovieID(mop.movieID)
|
||||||
else:
|
else:
|
||||||
imdbID = aSystem.title2imdbID(build_title(mop, canonical=0,
|
imdbID = aSystem.title2imdbID(build_title(mop, canonical=0,
|
||||||
ptdf=1))
|
ptdf=0, appendKind=False),
|
||||||
|
mop['kind'])
|
||||||
elif isinstance(mop, Person.Person):
|
elif isinstance(mop, Person.Person):
|
||||||
if mop.personID is not None:
|
if mop.personID is not None:
|
||||||
imdbID = aSystem.get_imdbPersonID(mop.personID)
|
imdbID = aSystem.get_imdbPersonID(mop.personID)
|
||||||
|
|
|
@ -29,7 +29,7 @@
|
||||||
|
|
||||||
[imdbpy]
|
[imdbpy]
|
||||||
## Default.
|
## Default.
|
||||||
accessSystem = mobile
|
accessSystem = http
|
||||||
|
|
||||||
## Optional (options common to every data access system):
|
## Optional (options common to every data access system):
|
||||||
# Activate adult searches (on, by default).
|
# Activate adult searches (on, by default).
|
||||||
|
@ -37,7 +37,7 @@ accessSystem = mobile
|
||||||
# Number of results for searches (20 by default).
|
# Number of results for searches (20 by default).
|
||||||
#results = 20
|
#results = 20
|
||||||
# Re-raise all caught exceptions (off, by default).
|
# Re-raise all caught exceptions (off, by default).
|
||||||
reraiseExceptions = on
|
#reraiseExceptions = off
|
||||||
|
|
||||||
## Optional (options common to http and mobile data access systems):
|
## Optional (options common to http and mobile data access systems):
|
||||||
# Proxy used to access the network. If it requires authentication,
|
# Proxy used to access the network. If it requires authentication,
|
||||||
|
@ -69,7 +69,7 @@ reraiseExceptions = on
|
||||||
## Set the threshold for logging messages.
|
## Set the threshold for logging messages.
|
||||||
# Can be one of "debug", "info", "warning", "error", "critical" (default:
|
# Can be one of "debug", "info", "warning", "error", "critical" (default:
|
||||||
# "warning").
|
# "warning").
|
||||||
loggingLevel = info
|
#loggingLevel = debug
|
||||||
|
|
||||||
## Path to a configuration file for the logging facility;
|
## Path to a configuration file for the logging facility;
|
||||||
# see: http://docs.python.org/library/logging.html#configuring-logging
|
# see: http://docs.python.org/library/logging.html#configuring-logging
|
||||||
|
|
|
@ -64,8 +64,10 @@ LANG_ARTICLES = {
|
||||||
'English': ('the', 'a', 'an'),
|
'English': ('the', 'a', 'an'),
|
||||||
'Italian': ('la', 'le', "l'", 'il', 'i', 'un', 'una', 'gli', 'lo', "un'",
|
'Italian': ('la', 'le', "l'", 'il', 'i', 'un', 'una', 'gli', 'lo', "un'",
|
||||||
'uno'),
|
'uno'),
|
||||||
'Spanish': ('la', 'le', 'el', 'les', 'un', 'los', 'una', 'uno', 'unos',
|
'Spanish': ('la', 'lo', 'el', 'las', 'un', 'los', 'una', 'al', 'del',
|
||||||
'unas'),
|
'unos', 'unas', 'uno'),
|
||||||
|
'French': ('le', "l'", 'la', 'les', 'un', 'une', 'des', 'au', 'du', '\xc3\xa0 la',
|
||||||
|
'de la', 'aux'),
|
||||||
'Portuguese': ('a', 'as', 'o', 'os', 'um', 'uns', 'uma', 'umas'),
|
'Portuguese': ('a', 'as', 'o', 'os', 'um', 'uns', 'uma', 'umas'),
|
||||||
'Turkish': (), # Some languages doesn't have articles.
|
'Turkish': (), # Some languages doesn't have articles.
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
#!/usr/bin/env python2
|
#!/usr/bin/env python
|
||||||
"""
|
"""
|
||||||
generatepot.py script.
|
generatepot.py script.
|
||||||
|
|
||||||
|
|
1303
lib/imdb/locale/imdbpy-ar.po
Normal file
1303
lib/imdb/locale/imdbpy-bg.po
Normal file
1303
lib/imdb/locale/imdbpy-de.po
Normal file
1304
lib/imdb/locale/imdbpy-es.po
Normal file
1304
lib/imdb/locale/imdbpy-fr.po
Normal file
|
@ -1,4 +1,4 @@
|
||||||
#!/usr/bin/env python2
|
#!/usr/bin/env python
|
||||||
# -*- coding: iso-8859-1 -*-
|
# -*- coding: iso-8859-1 -*-
|
||||||
"""Generate binary message catalog from textual translation description.
|
"""Generate binary message catalog from textual translation description.
|
||||||
|
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
#!/usr/bin/env python2
|
#!/usr/bin/env python
|
||||||
"""
|
"""
|
||||||
rebuildmo.py script.
|
rebuildmo.py script.
|
||||||
|
|
||||||
|
|
|
@ -104,15 +104,24 @@ PY_VERSION = sys.version_info[:2]
|
||||||
# The cookies for the "adult" search.
|
# The cookies for the "adult" search.
|
||||||
# Please don't mess with these account.
|
# Please don't mess with these account.
|
||||||
# Old 'IMDbPY' account.
|
# Old 'IMDbPY' account.
|
||||||
_old_cookie_id = 'boM2bYxz9MCsOnH9gZ0S9QHs12NWrNdApxsls1Vb5/NGrNdjcHx3dUas10UASoAjVEvhAbGagERgOpNkAPvxdbfKwaV2ikEj9SzXY1WPxABmDKQwdqzwRbM+12NSeJFGUEx3F8as10WwidLzVshDtxaPIbP13NdjVS9UZTYqgTVGrNcT9vyXU1'
|
_IMDbPY_cookie_id = 'boM2bYxz9MCsOnH9gZ0S9QHs12NWrNdApxsls1Vb5/NGrNdjcHx3dUas10UASoAjVEvhAbGagERgOpNkAPvxdbfKwaV2ikEj9SzXY1WPxABmDKQwdqzwRbM+12NSeJFGUEx3F8as10WwidLzVshDtxaPIbP13NdjVS9UZTYqgTVGrNcT9vyXU1'
|
||||||
_old_cookie_uu = '3M3AXsquTU5Gur/Svik+ewflPm5Rk2ieY3BIPlLjyK3C0Dp9F8UoPgbTyKiGtZp4x1X+uAUGKD7BM2g+dVd8eqEzDErCoYvdcvGLvVLAen1y08hNQtALjVKAe+1hM8g9QbNonlG1/t4S82ieUsBbrSIQbq1yhV6tZ6ArvSbA7rgHc8n5AdReyAmDaJ5Wm/ee3VDoCnGj/LlBs2ieUZNorhHDKK5Q=='
|
_IMDbPY_cookie_uu = '3M3AXsquTU5Gur/Svik+ewflPm5Rk2ieY3BIPlLjyK3C0Dp9F8UoPgbTyKiGtZp4x1X+uAUGKD7BM2g+dVd8eqEzDErCoYvdcvGLvVLAen1y08hNQtALjVKAe+1hM8g9QbNonlG1/t4S82ieUsBbrSIQbq1yhV6tZ6ArvSbA7rgHc8n5AdReyAmDaJ5Wm/ee3VDoCnGj/LlBs2ieUZNorhHDKK5Q=='
|
||||||
# New 'IMDbPYweb' account.
|
# 'imdbpy2010' account.
|
||||||
_cookie_id = 'rH1jNAkjTlNXvHolvBVBsgaPICNZbNdjVjzFwzas9JRmusdjVoqBs/Hs12NR+1WFxEoR9bGKEDUg6sNlADqXwkas12N131Rwdb+UQNGKN8PWrNdjcdqBQVLq8mbGDHP3hqzxhbD692NQi9D0JjpBtRaPIbP1zNdjUOqENQYv1ADWrNcT9vyXU1'
|
_imdbpy2010_cookie_id = 'QrCdxVi+L+WgqOLrQJJgBgRRXGInphxiBPU/YXSFDyExMFzCp6YcYgSVXyEUhS/xMID8wqemHGID4DlntwZ49vemP5UXsAxiJ4D6goSmHGIgNT9hMXBaRSF2vMS3phxB0bVfQiQlP1RxdrzhB6YcRHFASyIhQVowwXCKtDSlD2YhgRvxBsCKtGemHBKH9mxSI='
|
||||||
_cookie_uu = 'su4/m8cho4c6HP+W1qgq6wchOmhnF0w+lIWvHjRUPJ6nRA9sccEafjGADJ6hQGrMd4GKqLcz2X4z5+w+M4OIKnRn7FpENH7dxDQu3bQEHyx0ZEyeRFTPHfQEX03XF+yeN1dsPpcXaqjUZAw+lGRfXRQEfz3RIX9IgVEffdBAHw2wQXyf9xdMPrQELw0QNB8dsffsqcdQemjPB0w+moLcPh0JrKrHJ9hjBzdMPpcXTH7XRwwOk='
|
_imdbpy2010_cookie_uu = 'oiEo2yoJFCA2Zbn/o7Z1LAPIwotAu6QdALv3foDb1x5F/tdrFY63XkSfty4kntS8Y8jkHSDLt3406+d+JThEilPI0mtTaOQdA/t2/iErp22jaLdeVU5ya4PIREpj7HFdpzhEHadcIAngSER50IoHDpD6Bz4Qy3b+UIhE/hBbhz5Q63ceA2hEvhPo5B0FnrL9Q8jkWjDIbA0Au3d+AOtnXoCIRL4Q28c+UOtnXpP4RL4T6OQdA+6ijUCI5B0AW2d+UOtnXpPYRL4T6OQdA8jkTUOYlC0A=='
|
||||||
|
# old 'IMDbPYweb' account.
|
||||||
|
_old_IMDbPYweb_cookie_id = 'rH1jNAkjTlNXvHolvBVBsgaPICNZbNdjVjzFwzas9JRmusdjVoqBs/Hs12NR+1WFxEoR9bGKEDUg6sNlADqXwkas12N131Rwdb+UQNGKN8PWrNdjcdqBQVLq8mbGDHP3hqzxhbD692NQi9D0JjpBtRaPIbP1zNdjUOqENQYv1ADWrNcT9vyXU1'
|
||||||
|
_old_IMDbPYweb_cookie_uu = 'su4/m8cho4c6HP+W1qgq6wchOmhnF0w+lIWvHjRUPJ6nRA9sccEafjGADJ6hQGrMd4GKqLcz2X4z5+w+M4OIKnRn7FpENH7dxDQu3bQEHyx0ZEyeRFTPHfQEX03XF+yeN1dsPpcXaqjUZAw+lGRfXRQEfz3RIX9IgVEffdBAHw2wQXyf9xdMPrQELw0QNB8dsffsqcdQemjPB0w+moLcPh0JrKrHJ9hjBzdMPpcXTH7XRwwOk='
|
||||||
|
# old 'IMDbPYweb' account values (as of 2012-12-30)
|
||||||
|
_IMDbPYweb_cookie_id = 'BCYjtpb46Go0cMHAMewWZEauhwqPL7ASCPpPVNutu6BuayHZd0U6Dk3UAqVlEM8DHLDsSr02RGQn5ff3245-R4A130NAWJ_5yqXx7X-zJey8vQM8JKdv3rTUSEJznJQlojUW1Bije-Q0FXAixs4I0sePWhd_tA41i-9AF2q3lPmaksram6ilMhN9i3IPESW1PMbk'
|
||||||
|
_IMDbPYweb_cookie_uu = 'BCYttQjEMc-NyUdFUGxThidAnBo7wwalEzj4un9uzf2XoEjtqDhNfrH7bOSuwlRkMEQ11SNyTajl-b9Q-21m4HwYu0e3jXZrjYLXLYzFkrEroCDyUREqaTwPJPSjGtFmvlaVBZEZmsWpaxe18DT5KiygKyGPZKH78Xu4im6ba-Sd31WvbXHzP8KGXPpGjhhVuv7Dcv314HCWkE832Srf9ya-Uv0FdGAmYyLbIAXuxnvpYQd6oZ8-CYkSGLIqcKWdrf5S'
|
||||||
|
# 'IMDbPY2013' account
|
||||||
|
_IMDbPY2013_cookie_id = 'BCYmoyqSm2WglmOzG-SrFWSvVpxsTZOB0qEOOqmAwCBxCbaNgKOxd0DTKzUvt7t04Pya5gV2tUrpDmYxrc1Dr54DQj2UXI7QI35__M5-HI2KrbOI3PjDz6M-_U3HG8topMfN64R24tmBixoZhMYXVaEc556lf0Z4gQNJVYRANXvwytP5v1lpfeToRlu9aVJwN4kT'
|
||||||
|
_IMDbPY2013_cookie_uu = 'BCYquDS8Y2i8R1pJxS4nB77YrhjHHXeOea2Xl9KtZvE6RZKVfMvzTGU4Vl5-yxfPbgRSiFJasyf-hhPuVvXyaHlfeBjNlbFT8hz2HzFFkQ_SxKxq05J51gi7Fv4SaAws1M-i7zmQ1TRunfJqCVIYqPwIs2NO7s4_YDH2ZoISVGLgca8OY2K58HychOZB1oRWHVeAJNhLJMrCWJBuGRLCNnQK5X9tA0dPPntr2Ussy0ouul-N1GQz-8y5vda3JJ_C6xkwmHcA6JrOdOFO_HqMWjVSXuxGEdrXC919JM9H0vooVvKeVgAEJnTh2GiVlUJUoH3c'
|
||||||
|
|
||||||
# imdbpy2010 account.
|
# Currently used account.
|
||||||
#_cookie_id = 'QrCdxVi+L+WgqOLrQJJgBgRRXGInphxiBPU/YXSFDyExMFzCp6YcYgSVXyEUhS/xMID8wqemHGID4DlntwZ49vemP5UXsAxiJ4D6goSmHGIgNT9hMXBaRSF2vMS3phxB0bVfQiQlP1RxdrzhB6YcRHFASyIhQVowwXCKtDSlD2YhgRvxBsCKtGemHBKH9mxSI='
|
_cookie_id = _IMDbPY2013_cookie_id
|
||||||
#_cookie_uu = 'oiEo2yoJFCA2Zbn/o7Z1LAPIwotAu6QdALv3foDb1x5F/tdrFY63XkSfty4kntS8Y8jkHSDLt3406+d+JThEilPI0mtTaOQdA/t2/iErp22jaLdeVU5ya4PIREpj7HFdpzhEHadcIAngSER50IoHDpD6Bz4Qy3b+UIhE/hBbhz5Q63ceA2hEvhPo5B0FnrL9Q8jkWjDIbA0Au3d+AOtnXoCIRL4Q28c+UOtnXpP4RL4T6OQdA+6ijUCI5B0AW2d+UOtnXpPYRL4T6OQdA8jkTUOYlC0A=='
|
_cookie_uu = _IMDbPY2013_cookie_uu
|
||||||
|
|
||||||
|
|
||||||
class _FakeURLOpener(object):
|
class _FakeURLOpener(object):
|
||||||
|
@ -141,9 +150,10 @@ class IMDbURLopener(FancyURLopener):
|
||||||
for header in ('User-Agent', 'User-agent', 'user-agent'):
|
for header in ('User-Agent', 'User-agent', 'user-agent'):
|
||||||
self.del_header(header)
|
self.del_header(header)
|
||||||
self.set_header('User-Agent', 'Mozilla/5.0')
|
self.set_header('User-Agent', 'Mozilla/5.0')
|
||||||
|
self.set_header('Accept-Language', 'en-us,en;q=0.5')
|
||||||
# XXX: This class is used also to perform "Exact Primary
|
# XXX: This class is used also to perform "Exact Primary
|
||||||
# [Title|Name]" searches, and so by default the cookie is set.
|
# [Title|Name]" searches, and so by default the cookie is set.
|
||||||
c_header = 'id=%s; uu=%s' % (_cookie_id, _cookie_uu)
|
c_header = 'uu=%s; id=%s' % (_cookie_uu, _cookie_id)
|
||||||
self.set_header('Cookie', c_header)
|
self.set_header('Cookie', c_header)
|
||||||
|
|
||||||
def get_proxy(self):
|
def get_proxy(self):
|
||||||
|
@ -199,12 +209,11 @@ class IMDbURLopener(FancyURLopener):
|
||||||
server_encode = uopener.info().getparam('charset')
|
server_encode = uopener.info().getparam('charset')
|
||||||
# Otherwise, look at the content-type HTML meta tag.
|
# Otherwise, look at the content-type HTML meta tag.
|
||||||
if server_encode is None and content:
|
if server_encode is None and content:
|
||||||
first_bytes = content[:512]
|
begin_h = content.find('text/html; charset=')
|
||||||
begin_h = first_bytes.find('text/html; charset=')
|
|
||||||
if begin_h != -1:
|
if begin_h != -1:
|
||||||
end_h = first_bytes[19+begin_h:].find('"')
|
end_h = content[19+begin_h:].find('"')
|
||||||
if end_h != -1:
|
if end_h != -1:
|
||||||
server_encode = first_bytes[19+begin_h:19+begin_h+end_h]
|
server_encode = content[19+begin_h:19+begin_h+end_h]
|
||||||
if server_encode:
|
if server_encode:
|
||||||
try:
|
try:
|
||||||
if lookup(server_encode):
|
if lookup(server_encode):
|
||||||
|
@ -455,16 +464,16 @@ class IMDbHTTPAccessSystem(IMDbBase):
|
||||||
results is the maximum number of results to be retrieved."""
|
results is the maximum number of results to be retrieved."""
|
||||||
if isinstance(ton, unicode):
|
if isinstance(ton, unicode):
|
||||||
try:
|
try:
|
||||||
ton = ton.encode('iso8859-1')
|
ton = ton.encode('utf-8')
|
||||||
except Exception, e:
|
except Exception, e:
|
||||||
try:
|
try:
|
||||||
ton = ton.encode('utf-8')
|
ton = ton.encode('iso8859-1')
|
||||||
except Exception, e:
|
except Exception, e:
|
||||||
pass
|
pass
|
||||||
##params = 'q=%s&%s=on&mx=%s' % (quote_plus(ton), kind, str(results))
|
##params = 'q=%s&%s=on&mx=%s' % (quote_plus(ton), kind, str(results))
|
||||||
params = 'q=%s;s=%s;mx=%s' % (quote_plus(ton), kind, str(results))
|
params = 'q=%s&s=%s&mx=%s' % (quote_plus(ton), kind, str(results))
|
||||||
if kind == 'ep':
|
if kind == 'ep':
|
||||||
params = params.replace('s=ep;', 's=tt;ttype=ep;', 1)
|
params = params.replace('s=ep&', 's=tt&ttype=ep&', 1)
|
||||||
cont = self._retrieve(self.urls['find'] % params)
|
cont = self._retrieve(self.urls['find'] % params)
|
||||||
#print 'URL:', imdbURL_find % params
|
#print 'URL:', imdbURL_find % params
|
||||||
if cont.find('Your search returned more than') == -1 or \
|
if cont.find('Your search returned more than') == -1 or \
|
||||||
|
@ -472,7 +481,7 @@ class IMDbHTTPAccessSystem(IMDbBase):
|
||||||
return cont
|
return cont
|
||||||
# The retrieved page contains no results, because too many
|
# The retrieved page contains no results, because too many
|
||||||
# titles or names contain the string we're looking for.
|
# titles or names contain the string we're looking for.
|
||||||
params = 'q=%s;ls=%s;lm=0' % (quote_plus(ton), kind)
|
params = 'q=%s&ls=%s&lm=0' % (quote_plus(ton), kind)
|
||||||
size = 131072 + results * 512
|
size = 131072 + results * 512
|
||||||
return self._retrieve(self.urls['find'] % params, size=size)
|
return self._retrieve(self.urls['find'] % params, size=size)
|
||||||
|
|
||||||
|
@ -587,6 +596,10 @@ class IMDbHTTPAccessSystem(IMDbBase):
|
||||||
cont = self._retrieve(self.urls['movie_main'] % movieID + 'recommendations')
|
cont = self._retrieve(self.urls['movie_main'] % movieID + 'recommendations')
|
||||||
return self.mProxy.rec_parser.parse(cont)
|
return self.mProxy.rec_parser.parse(cont)
|
||||||
|
|
||||||
|
def get_movie_critic_reviews(self, movieID):
|
||||||
|
cont = self._retrieve(self.urls['movie_main'] % movieID + 'criticreviews')
|
||||||
|
return self.mProxy.criticrev_parser.parse(cont)
|
||||||
|
|
||||||
def get_movie_external_reviews(self, movieID):
|
def get_movie_external_reviews(self, movieID):
|
||||||
cont = self._retrieve(self.urls['movie_main'] % movieID + 'externalreviews')
|
cont = self._retrieve(self.urls['movie_main'] % movieID + 'externalreviews')
|
||||||
return self.mProxy.externalrev_parser.parse(cont)
|
return self.mProxy.externalrev_parser.parse(cont)
|
||||||
|
@ -754,7 +767,7 @@ class IMDbHTTPAccessSystem(IMDbBase):
|
||||||
return self.pProxy.person_keywords_parser.parse(cont)
|
return self.pProxy.person_keywords_parser.parse(cont)
|
||||||
|
|
||||||
def _search_character(self, name, results):
|
def _search_character(self, name, results):
|
||||||
cont = self._get_search_content('char', name, results)
|
cont = self._get_search_content('ch', name, results)
|
||||||
return self.scProxy.search_character_parser.parse(cont, results=results)['data']
|
return self.scProxy.search_character_parser.parse(cont, results=results)['data']
|
||||||
|
|
||||||
def get_character_main(self, characterID):
|
def get_character_main(self, characterID):
|
||||||
|
|
|
@ -9,7 +9,7 @@ pages would be:
|
||||||
plot summary: http://akas.imdb.com/title/tt0094226/plotsummary
|
plot summary: http://akas.imdb.com/title/tt0094226/plotsummary
|
||||||
...and so on...
|
...and so on...
|
||||||
|
|
||||||
Copyright 2004-2012 Davide Alberani <da@erlug.linux.it>
|
Copyright 2004-2013 Davide Alberani <da@erlug.linux.it>
|
||||||
2008 H. Turgut Uyar <uyar@tekir.org>
|
2008 H. Turgut Uyar <uyar@tekir.org>
|
||||||
|
|
||||||
This program is free software; you can redistribute it and/or modify
|
This program is free software; you can redistribute it and/or modify
|
||||||
|
@ -531,9 +531,6 @@ class DOMHTMLMovieParser(DOMParserBase):
|
||||||
def _process_plotsummary(x):
|
def _process_plotsummary(x):
|
||||||
"""Process a plot (contributed by Rdian06)."""
|
"""Process a plot (contributed by Rdian06)."""
|
||||||
xauthor = x.get('author')
|
xauthor = x.get('author')
|
||||||
if xauthor:
|
|
||||||
xauthor = xauthor.replace('{', '<').replace('}', '>').replace('(',
|
|
||||||
'<').replace(')', '>').strip()
|
|
||||||
xplot = x.get('plot', u'').strip()
|
xplot = x.get('plot', u'').strip()
|
||||||
if xauthor:
|
if xauthor:
|
||||||
xplot += u'::%s' % xauthor
|
xplot += u'::%s' % xauthor
|
||||||
|
@ -555,17 +552,20 @@ class DOMHTMLPlotParser(DOMParserBase):
|
||||||
# Notice that recently IMDb started to put the email of the
|
# Notice that recently IMDb started to put the email of the
|
||||||
# author only in the link, that we're not collecting, here.
|
# author only in the link, that we're not collecting, here.
|
||||||
extractors = [Extractor(label='plot',
|
extractors = [Extractor(label='plot',
|
||||||
path="//p[@class='plotpar']",
|
path="//ul[@class='zebraList']//p",
|
||||||
attrs=Attribute(key='plot',
|
attrs=Attribute(key='plot',
|
||||||
multi=True,
|
multi=True,
|
||||||
path={'plot': './text()',
|
path={'plot': './text()[1]',
|
||||||
'author': './i/a/text()'},
|
'author': './span/em/a/text()'},
|
||||||
postprocess=_process_plotsummary))]
|
postprocess=_process_plotsummary))]
|
||||||
|
|
||||||
|
|
||||||
def _process_award(x):
|
def _process_award(x):
|
||||||
award = {}
|
award = {}
|
||||||
award['award'] = x.get('award').strip()
|
_award = x.get('award')
|
||||||
|
if _award is not None:
|
||||||
|
_award = _award.strip()
|
||||||
|
award['award'] = _award
|
||||||
if not award['award']:
|
if not award['award']:
|
||||||
return {}
|
return {}
|
||||||
award['year'] = x.get('year').strip()
|
award['year'] = x.get('year').strip()
|
||||||
|
@ -709,10 +709,16 @@ class DOMHTMLTaglinesParser(DOMParserBase):
|
||||||
result = tparser.parse(taglines_html_string)
|
result = tparser.parse(taglines_html_string)
|
||||||
"""
|
"""
|
||||||
extractors = [Extractor(label='taglines',
|
extractors = [Extractor(label='taglines',
|
||||||
path="//div[@id='tn15content']/p",
|
path='//*[contains(concat(" ", normalize-space(@class), " "), " soda ")]',
|
||||||
attrs=Attribute(key='taglines', multi=True,
|
attrs=Attribute(key='taglines',
|
||||||
|
multi=True,
|
||||||
path="./text()"))]
|
path="./text()"))]
|
||||||
|
|
||||||
|
def postprocess_data(self, data):
|
||||||
|
if 'taglines' in data:
|
||||||
|
data['taglines'] = [tagline.strip() for tagline in data['taglines']]
|
||||||
|
return data
|
||||||
|
|
||||||
|
|
||||||
class DOMHTMLKeywordsParser(DOMParserBase):
|
class DOMHTMLKeywordsParser(DOMParserBase):
|
||||||
"""Parser for the "keywords" page of a given movie.
|
"""Parser for the "keywords" page of a given movie.
|
||||||
|
@ -785,9 +791,9 @@ class DOMHTMLSoundtrackParser(DOMHTMLAlternateVersionsParser):
|
||||||
]
|
]
|
||||||
|
|
||||||
def postprocess_data(self, data):
|
def postprocess_data(self, data):
|
||||||
if 'soundtrack' in data:
|
if 'alternate versions' in data:
|
||||||
nd = []
|
nd = []
|
||||||
for x in data['soundtrack']:
|
for x in data['alternate versions']:
|
||||||
ds = x.split('\n')
|
ds = x.split('\n')
|
||||||
title = ds[0]
|
title = ds[0]
|
||||||
if title[0] == '"' and title[-1] == '"':
|
if title[0] == '"' and title[-1] == '"':
|
||||||
|
@ -846,6 +852,13 @@ class DOMHTMLCrazyCreditsParser(DOMParserBase):
|
||||||
x.replace('\n', ' ').replace(' ', ' ')))]
|
x.replace('\n', ' ').replace(' ', ' ')))]
|
||||||
|
|
||||||
|
|
||||||
|
def _process_goof(x):
|
||||||
|
if x['spoiler_category']:
|
||||||
|
return x['spoiler_category'].strip() + ': SPOILER: ' + x['text'].strip()
|
||||||
|
else:
|
||||||
|
return x['category'].strip() + ': ' + x['text'].strip()
|
||||||
|
|
||||||
|
|
||||||
class DOMHTMLGoofsParser(DOMParserBase):
|
class DOMHTMLGoofsParser(DOMParserBase):
|
||||||
"""Parser for the "goofs" page of a given movie.
|
"""Parser for the "goofs" page of a given movie.
|
||||||
The page should be provided as a string, as taken from
|
The page should be provided as a string, as taken from
|
||||||
|
@ -858,9 +871,14 @@ class DOMHTMLGoofsParser(DOMParserBase):
|
||||||
"""
|
"""
|
||||||
_defGetRefs = True
|
_defGetRefs = True
|
||||||
|
|
||||||
extractors = [Extractor(label='goofs', path="//ul[@class='trivia']/li",
|
extractors = [Extractor(label='goofs', path="//div[@class='soda odd']",
|
||||||
attrs=Attribute(key='goofs', multi=True, path=".//text()",
|
attrs=Attribute(key='goofs', multi=True,
|
||||||
postprocess=lambda x: (x or u'').strip()))]
|
path={
|
||||||
|
'text':"./text()",
|
||||||
|
'category':'./preceding-sibling::h4[1]/text()',
|
||||||
|
'spoiler_category': './h4/text()'
|
||||||
|
},
|
||||||
|
postprocess=_process_goof))]
|
||||||
|
|
||||||
|
|
||||||
class DOMHTMLQuotesParser(DOMParserBase):
|
class DOMHTMLQuotesParser(DOMParserBase):
|
||||||
|
@ -876,9 +894,16 @@ class DOMHTMLQuotesParser(DOMParserBase):
|
||||||
_defGetRefs = True
|
_defGetRefs = True
|
||||||
|
|
||||||
extractors = [
|
extractors = [
|
||||||
Extractor(label='quotes',
|
Extractor(label='quotes_odd',
|
||||||
path="//div[@class='_imdbpy']",
|
path="//div[@class='quote soda odd']",
|
||||||
attrs=Attribute(key='quotes',
|
attrs=Attribute(key='quotes_odd',
|
||||||
|
multi=True,
|
||||||
|
path=".//text()",
|
||||||
|
postprocess=lambda x: x.strip().replace(' \n',
|
||||||
|
'::').replace('::\n', '::').replace('\n', ' '))),
|
||||||
|
Extractor(label='quotes_even',
|
||||||
|
path="//div[@class='quote soda even']",
|
||||||
|
attrs=Attribute(key='quotes_even',
|
||||||
multi=True,
|
multi=True,
|
||||||
path=".//text()",
|
path=".//text()",
|
||||||
postprocess=lambda x: x.strip().replace(' \n',
|
postprocess=lambda x: x.strip().replace(' \n',
|
||||||
|
@ -886,27 +911,23 @@ class DOMHTMLQuotesParser(DOMParserBase):
|
||||||
]
|
]
|
||||||
|
|
||||||
preprocessors = [
|
preprocessors = [
|
||||||
(re.compile('(<a name="?qt[0-9]{7}"?></a>)', re.I),
|
(re.compile('<a href="#" class="hidesoda hidden">Hide options</a><br>', re.I), '')
|
||||||
r'\1<div class="_imdbpy">'),
|
|
||||||
(re.compile('<hr width="30%">', re.I), '</div>'),
|
|
||||||
(re.compile('<hr/>', re.I), '</div>'),
|
|
||||||
(re.compile('<script.*?</script>', re.I|re.S), ''),
|
|
||||||
# For BeautifulSoup.
|
|
||||||
(re.compile('<!-- sid: t-channel : MIDDLE_CENTER -->', re.I), '</div>')
|
|
||||||
]
|
]
|
||||||
|
|
||||||
def preprocess_dom(self, dom):
|
def preprocess_dom(self, dom):
|
||||||
# Remove "link this quote" links.
|
# Remove "link this quote" links.
|
||||||
for qLink in self.xpath(dom, "//p[@class='linksoda']"):
|
for qLink in self.xpath(dom, "//span[@class='linksoda']"):
|
||||||
|
qLink.drop_tree()
|
||||||
|
for qLink in self.xpath(dom, "//div[@class='sharesoda_pre']"):
|
||||||
qLink.drop_tree()
|
qLink.drop_tree()
|
||||||
return dom
|
return dom
|
||||||
|
|
||||||
def postprocess_data(self, data):
|
def postprocess_data(self, data):
|
||||||
if 'quotes' not in data:
|
quotes = data.get('quotes_odd', []) + data.get('quotes_even', [])
|
||||||
|
if not quotes:
|
||||||
return {}
|
return {}
|
||||||
for idx, quote in enumerate(data['quotes']):
|
quotes = [q.split('::') for q in quotes]
|
||||||
data['quotes'][idx] = quote.split('::')
|
return {'quotes': quotes}
|
||||||
return data
|
|
||||||
|
|
||||||
|
|
||||||
class DOMHTMLReleaseinfoParser(DOMParserBase):
|
class DOMHTMLReleaseinfoParser(DOMParserBase):
|
||||||
|
@ -920,13 +941,13 @@ class DOMHTMLReleaseinfoParser(DOMParserBase):
|
||||||
result = rdparser.parse(releaseinfo_html_string)
|
result = rdparser.parse(releaseinfo_html_string)
|
||||||
"""
|
"""
|
||||||
extractors = [Extractor(label='release dates',
|
extractors = [Extractor(label='release dates',
|
||||||
path="//th[@class='xxxx']/../../tr",
|
path="//table[@id='release_dates']//tr",
|
||||||
attrs=Attribute(key='release dates', multi=True,
|
attrs=Attribute(key='release dates', multi=True,
|
||||||
path={'country': ".//td[1]//text()",
|
path={'country': ".//td[1]//text()",
|
||||||
'date': ".//td[2]//text()",
|
'date': ".//td[2]//text()",
|
||||||
'notes': ".//td[3]//text()"})),
|
'notes': ".//td[3]//text()"})),
|
||||||
Extractor(label='akas',
|
Extractor(label='akas',
|
||||||
path="//div[@class='_imdbpy_akas']/table/tr",
|
path="//table[@id='akas']//tr",
|
||||||
attrs=Attribute(key='akas', multi=True,
|
attrs=Attribute(key='akas', multi=True,
|
||||||
path={'title': "./td[1]/text()",
|
path={'title': "./td[1]/text()",
|
||||||
'countries': "./td[2]/text()"}))]
|
'countries': "./td[2]/text()"}))]
|
||||||
|
@ -961,7 +982,7 @@ class DOMHTMLReleaseinfoParser(DOMParserBase):
|
||||||
title = (aka.get('title') or '').strip()
|
title = (aka.get('title') or '').strip()
|
||||||
if not title:
|
if not title:
|
||||||
continue
|
continue
|
||||||
countries = (aka.get('countries') or '').split('/')
|
countries = (aka.get('countries') or '').split(',')
|
||||||
if not countries:
|
if not countries:
|
||||||
nakas.append(title)
|
nakas.append(title)
|
||||||
else:
|
else:
|
||||||
|
@ -1135,6 +1156,27 @@ def _normalize_href(href):
|
||||||
href = '%s%s' % (imdbURL_base, href)
|
href = '%s%s' % (imdbURL_base, href)
|
||||||
return href
|
return href
|
||||||
|
|
||||||
|
class DOMHTMLCriticReviewsParser(DOMParserBase):
|
||||||
|
"""Parser for the "critic reviews" pages of a given movie.
|
||||||
|
The page should be provided as a string, as taken from
|
||||||
|
the akas.imdb.com server. The final result will be a
|
||||||
|
dictionary, with a key for every relevant section.
|
||||||
|
|
||||||
|
Example:
|
||||||
|
osparser = DOMHTMLCriticReviewsParser()
|
||||||
|
result = osparser.parse(officialsites_html_string)
|
||||||
|
"""
|
||||||
|
kind = 'critic reviews'
|
||||||
|
|
||||||
|
extractors = [
|
||||||
|
Extractor(label='metascore',
|
||||||
|
path="//div[@class='metascore_wrap']/div/span",
|
||||||
|
attrs=Attribute(key='metascore',
|
||||||
|
path=".//text()")),
|
||||||
|
Extractor(label='metacritic url',
|
||||||
|
path="//div[@class='article']/div[@class='see-more']/a",
|
||||||
|
attrs=Attribute(key='metacritic url',
|
||||||
|
path="./@href")) ]
|
||||||
|
|
||||||
class DOMHTMLOfficialsitesParser(DOMParserBase):
|
class DOMHTMLOfficialsitesParser(DOMParserBase):
|
||||||
"""Parser for the "official sites", "external reviews", "newsgroup
|
"""Parser for the "official sites", "external reviews", "newsgroup
|
||||||
|
@ -1471,6 +1513,14 @@ class DOMHTMLSeasonEpisodesParser(DOMParserBase):
|
||||||
try: selected_season = int(selected_season)
|
try: selected_season = int(selected_season)
|
||||||
except: pass
|
except: pass
|
||||||
nd = {selected_season: {}}
|
nd = {selected_season: {}}
|
||||||
|
if 'episode -1' in data:
|
||||||
|
counter = 1
|
||||||
|
for episode in data['episode -1']:
|
||||||
|
while 'episode %d' % counter in data:
|
||||||
|
counter += 1
|
||||||
|
k = 'episode %d' % counter
|
||||||
|
data[k] = [episode]
|
||||||
|
del data['episode -1']
|
||||||
for episode_nr, episode in data.iteritems():
|
for episode_nr, episode in data.iteritems():
|
||||||
if not (episode and episode[0] and
|
if not (episode and episode[0] and
|
||||||
episode_nr.startswith('episode ')):
|
episode_nr.startswith('episode ')):
|
||||||
|
@ -1860,6 +1910,8 @@ _OBJECTS = {
|
||||||
'releasedates_parser': ((DOMHTMLReleaseinfoParser,), None),
|
'releasedates_parser': ((DOMHTMLReleaseinfoParser,), None),
|
||||||
'ratings_parser': ((DOMHTMLRatingsParser,), None),
|
'ratings_parser': ((DOMHTMLRatingsParser,), None),
|
||||||
'officialsites_parser': ((DOMHTMLOfficialsitesParser,), None),
|
'officialsites_parser': ((DOMHTMLOfficialsitesParser,), None),
|
||||||
|
'criticrev_parser': ((DOMHTMLCriticReviewsParser,),
|
||||||
|
{'kind': 'critic reviews'}),
|
||||||
'externalrev_parser': ((DOMHTMLOfficialsitesParser,),
|
'externalrev_parser': ((DOMHTMLOfficialsitesParser,),
|
||||||
{'kind': 'external reviews'}),
|
{'kind': 'external reviews'}),
|
||||||
'newsgrouprev_parser': ((DOMHTMLOfficialsitesParser,),
|
'newsgrouprev_parser': ((DOMHTMLOfficialsitesParser,),
|
||||||
|
|
|
@ -8,7 +8,7 @@ E.g., for "Mel Gibson" the referred pages would be:
|
||||||
biography: http://akas.imdb.com/name/nm0000154/bio
|
biography: http://akas.imdb.com/name/nm0000154/bio
|
||||||
...and so on...
|
...and so on...
|
||||||
|
|
||||||
Copyright 2004-20101 Davide Alberani <da@erlug.linux.it>
|
Copyright 2004-2013 Davide Alberani <da@erlug.linux.it>
|
||||||
2008 H. Turgut Uyar <uyar@tekir.org>
|
2008 H. Turgut Uyar <uyar@tekir.org>
|
||||||
|
|
||||||
This program is free software; you can redistribute it and/or modify
|
This program is free software; you can redistribute it and/or modify
|
||||||
|
@ -60,6 +60,7 @@ class DOMHTMLMaindetailsParser(DOMParserBase):
|
||||||
result = cparser.parse(categorized_html_string)
|
result = cparser.parse(categorized_html_string)
|
||||||
"""
|
"""
|
||||||
_containsObjects = True
|
_containsObjects = True
|
||||||
|
_name_imdb_index = re.compile(r'\([IVXLCDM]+\)')
|
||||||
|
|
||||||
_birth_attrs = [Attribute(key='birth date',
|
_birth_attrs = [Attribute(key='birth date',
|
||||||
path='.//time[@itemprop="birthDate"]/@datetime'),
|
path='.//time[@itemprop="birthDate"]/@datetime'),
|
||||||
|
@ -100,6 +101,10 @@ class DOMHTMLMaindetailsParser(DOMParserBase):
|
||||||
path=".//text()",
|
path=".//text()",
|
||||||
postprocess=lambda x: analyze_name(x,
|
postprocess=lambda x: analyze_name(x,
|
||||||
canonical=1))),
|
canonical=1))),
|
||||||
|
Extractor(label='name_index',
|
||||||
|
path="//h1[@class='header']/span[1]",
|
||||||
|
attrs=Attribute(key='name_index',
|
||||||
|
path="./text()")),
|
||||||
|
|
||||||
Extractor(label='birth info',
|
Extractor(label='birth info',
|
||||||
path="//div[h4='Born:']",
|
path="//div[h4='Born:']",
|
||||||
|
@ -110,7 +115,7 @@ class DOMHTMLMaindetailsParser(DOMParserBase):
|
||||||
attrs=_death_attrs),
|
attrs=_death_attrs),
|
||||||
|
|
||||||
Extractor(label='headshot',
|
Extractor(label='headshot',
|
||||||
path="//td[@id='img_primary']/a",
|
path="//td[@id='img_primary']/div[@class='image']/a",
|
||||||
attrs=Attribute(key='headshot',
|
attrs=Attribute(key='headshot',
|
||||||
path="./img/@src")),
|
path="./img/@src")),
|
||||||
|
|
||||||
|
@ -152,6 +157,11 @@ class DOMHTMLMaindetailsParser(DOMParserBase):
|
||||||
for what in 'birth date', 'death date':
|
for what in 'birth date', 'death date':
|
||||||
if what in data and not data[what]:
|
if what in data and not data[what]:
|
||||||
del data[what]
|
del data[what]
|
||||||
|
name_index = (data.get('name_index') or '').strip()
|
||||||
|
if name_index:
|
||||||
|
if self._name_imdb_index.match(name_index):
|
||||||
|
data['imdbIndex'] = name_index[1:-1]
|
||||||
|
del data['name_index']
|
||||||
# XXX: the code below is for backwards compatibility
|
# XXX: the code below is for backwards compatibility
|
||||||
# probably could be removed
|
# probably could be removed
|
||||||
for key in data.keys():
|
for key in data.keys():
|
||||||
|
@ -220,13 +230,13 @@ class DOMHTMLBioParser(DOMParserBase):
|
||||||
attrs=Attribute(key='headshot',
|
attrs=Attribute(key='headshot',
|
||||||
path="./img/@src")),
|
path="./img/@src")),
|
||||||
Extractor(label='birth info',
|
Extractor(label='birth info',
|
||||||
path="//div[h5='Date of Birth']",
|
path="//table[@id='overviewTable']//td[text()='Date of Birth']/following-sibling::td[1]",
|
||||||
attrs=_birth_attrs),
|
attrs=_birth_attrs),
|
||||||
Extractor(label='death info',
|
Extractor(label='death info',
|
||||||
path="//div[h5='Date of Death']",
|
path="//table[@id='overviewTable']//td[text()='Date of Death']/following-sibling::td[1]",
|
||||||
attrs=_death_attrs),
|
attrs=_death_attrs),
|
||||||
Extractor(label='nick names',
|
Extractor(label='nick names',
|
||||||
path="//div[h5='Nickname']",
|
path="//table[@id='overviewTable']//td[text()='Nickenames']/following-sibling::td[1]",
|
||||||
attrs=Attribute(key='nick names',
|
attrs=Attribute(key='nick names',
|
||||||
path="./text()",
|
path="./text()",
|
||||||
joiner='|',
|
joiner='|',
|
||||||
|
@ -234,25 +244,25 @@ class DOMHTMLBioParser(DOMParserBase):
|
||||||
'::(', 1) for n in x.split('|')
|
'::(', 1) for n in x.split('|')
|
||||||
if n.strip()])),
|
if n.strip()])),
|
||||||
Extractor(label='birth name',
|
Extractor(label='birth name',
|
||||||
path="//div[h5='Birth Name']",
|
path="//table[@id='overviewTable']//td[text()='Birth Name']/following-sibling::td[1]",
|
||||||
attrs=Attribute(key='birth name',
|
attrs=Attribute(key='birth name',
|
||||||
path="./text()",
|
path="./text()",
|
||||||
postprocess=lambda x: canonicalName(x.strip()))),
|
postprocess=lambda x: canonicalName(x.strip()))),
|
||||||
Extractor(label='height',
|
Extractor(label='height',
|
||||||
path="//div[h5='Height']",
|
path="//table[@id='overviewTable']//td[text()='Height']/following-sibling::td[1]",
|
||||||
attrs=Attribute(key='height',
|
attrs=Attribute(key='height',
|
||||||
path="./text()",
|
path="./text()",
|
||||||
postprocess=lambda x: x.strip())),
|
postprocess=lambda x: x.strip())),
|
||||||
Extractor(label='mini biography',
|
Extractor(label='mini biography',
|
||||||
path="//div[h5='Mini Biography']",
|
path="//a[@name='mini_bio']/following-sibling::div[1 = count(preceding-sibling::a[1] | ../a[@name='mini_bio'])]",
|
||||||
attrs=Attribute(key='mini biography',
|
attrs=Attribute(key='mini biography',
|
||||||
multi=True,
|
multi=True,
|
||||||
path={
|
path={
|
||||||
'bio': "./p//text()",
|
'bio': ".//text()",
|
||||||
'by': "./b/following-sibling::a/text()"
|
'by': ".//a[@name='ba']//text()"
|
||||||
},
|
},
|
||||||
postprocess=lambda x: "%s::%s" % \
|
postprocess=lambda x: "%s::%s" % \
|
||||||
(x.get('bio').strip(),
|
((x.get('bio') or u'').split('- IMDb Mini Biography By:')[0].strip(),
|
||||||
(x.get('by') or u'').strip() or u'Anonymous'))),
|
(x.get('by') or u'').strip() or u'Anonymous'))),
|
||||||
Extractor(label='spouse',
|
Extractor(label='spouse',
|
||||||
path="//div[h5='Spouse']/table/tr",
|
path="//div[h5='Spouse']/table/tr",
|
||||||
|
|
|
@ -5,9 +5,9 @@ This module provides the HTMLSearchCharacterParser class (and the
|
||||||
search_character_parser instance), used to parse the results of a search
|
search_character_parser instance), used to parse the results of a search
|
||||||
for a given character.
|
for a given character.
|
||||||
E.g., when searching for the name "Jesse James", the parsed page would be:
|
E.g., when searching for the name "Jesse James", the parsed page would be:
|
||||||
http://akas.imdb.com/find?s=Characters;mx=20;q=Jesse+James
|
http://akas.imdb.com/find?s=ch;mx=20;q=Jesse+James
|
||||||
|
|
||||||
Copyright 2007-2009 Davide Alberani <da@erlug.linux.it>
|
Copyright 2007-2012 Davide Alberani <da@erlug.linux.it>
|
||||||
2008 H. Turgut Uyar <uyar@tekir.org>
|
2008 H. Turgut Uyar <uyar@tekir.org>
|
||||||
|
|
||||||
This program is free software; you can redistribute it and/or modify
|
This program is free software; you can redistribute it and/or modify
|
||||||
|
@ -42,7 +42,7 @@ class DOMBasicCharacterParser(DOMBasicMovieParser):
|
||||||
|
|
||||||
class DOMHTMLSearchCharacterParser(DOMHTMLSearchMovieParser):
|
class DOMHTMLSearchCharacterParser(DOMHTMLSearchMovieParser):
|
||||||
_BaseParser = DOMBasicCharacterParser
|
_BaseParser = DOMBasicCharacterParser
|
||||||
_notDirectHitTitle = '<title>imdb search'
|
_notDirectHitTitle = '<title>find - imdb'
|
||||||
_titleBuilder = lambda self, x: build_name(x, canonical=False)
|
_titleBuilder = lambda self, x: build_name(x, canonical=False)
|
||||||
_linkPrefix = '/character/ch'
|
_linkPrefix = '/character/ch'
|
||||||
|
|
||||||
|
@ -57,7 +57,7 @@ class DOMHTMLSearchCharacterParser(DOMHTMLSearchMovieParser):
|
||||||
{'name': x.get('name')}
|
{'name': x.get('name')}
|
||||||
))]
|
))]
|
||||||
extractors = [Extractor(label='search',
|
extractors = [Extractor(label='search',
|
||||||
path="//td[3]/a[starts-with(@href, " \
|
path="//td[@class='result_text']/a[starts-with(@href, " \
|
||||||
"'/character/ch')]/..",
|
"'/character/ch')]/..",
|
||||||
attrs=_attrs)]
|
attrs=_attrs)]
|
||||||
|
|
||||||
|
|
|
@ -7,7 +7,7 @@ for a given company.
|
||||||
E.g., when searching for the name "Columbia Pictures", the parsed page would be:
|
E.g., when searching for the name "Columbia Pictures", the parsed page would be:
|
||||||
http://akas.imdb.com/find?s=co;mx=20;q=Columbia+Pictures
|
http://akas.imdb.com/find?s=co;mx=20;q=Columbia+Pictures
|
||||||
|
|
||||||
Copyright 2008-2009 Davide Alberani <da@erlug.linux.it>
|
Copyright 2008-2012 Davide Alberani <da@erlug.linux.it>
|
||||||
2008 H. Turgut Uyar <uyar@tekir.org>
|
2008 H. Turgut Uyar <uyar@tekir.org>
|
||||||
|
|
||||||
This program is free software; you can redistribute it and/or modify
|
This program is free software; you can redistribute it and/or modify
|
||||||
|
@ -42,7 +42,7 @@ class DOMBasicCompanyParser(DOMBasicMovieParser):
|
||||||
|
|
||||||
class DOMHTMLSearchCompanyParser(DOMHTMLSearchMovieParser):
|
class DOMHTMLSearchCompanyParser(DOMHTMLSearchMovieParser):
|
||||||
_BaseParser = DOMBasicCompanyParser
|
_BaseParser = DOMBasicCompanyParser
|
||||||
_notDirectHitTitle = '<title>imdb company'
|
_notDirectHitTitle = '<title>find - imdb'
|
||||||
_titleBuilder = lambda self, x: build_company_name(x)
|
_titleBuilder = lambda self, x: build_company_name(x)
|
||||||
_linkPrefix = '/company/co'
|
_linkPrefix = '/company/co'
|
||||||
|
|
||||||
|
@ -59,7 +59,7 @@ class DOMHTMLSearchCompanyParser(DOMHTMLSearchMovieParser):
|
||||||
or u''), stripNotes=True)
|
or u''), stripNotes=True)
|
||||||
))]
|
))]
|
||||||
extractors = [Extractor(label='search',
|
extractors = [Extractor(label='search',
|
||||||
path="//td[3]/a[starts-with(@href, " \
|
path="//td[@class='result_text']/a[starts-with(@href, " \
|
||||||
"'/company/co')]/..",
|
"'/company/co')]/..",
|
||||||
attrs=_attrs)]
|
attrs=_attrs)]
|
||||||
|
|
||||||
|
|
|
@ -8,7 +8,7 @@ E.g., for when searching for the title "the passion", the parsed
|
||||||
page would be:
|
page would be:
|
||||||
http://akas.imdb.com/find?q=the+passion&tt=on&mx=20
|
http://akas.imdb.com/find?q=the+passion&tt=on&mx=20
|
||||||
|
|
||||||
Copyright 2004-2010 Davide Alberani <da@erlug.linux.it>
|
Copyright 2004-2013 Davide Alberani <da@erlug.linux.it>
|
||||||
2008 H. Turgut Uyar <uyar@tekir.org>
|
2008 H. Turgut Uyar <uyar@tekir.org>
|
||||||
|
|
||||||
This program is free software; you can redistribute it and/or modify
|
This program is free software; you can redistribute it and/or modify
|
||||||
|
@ -77,7 +77,7 @@ class DOMBasicMovieParser(DOMParserBase):
|
||||||
def custom_analyze_title(title):
|
def custom_analyze_title(title):
|
||||||
"""Remove garbage notes after the (year), (year/imdbIndex) or (year) (TV)"""
|
"""Remove garbage notes after the (year), (year/imdbIndex) or (year) (TV)"""
|
||||||
# XXX: very crappy. :-(
|
# XXX: very crappy. :-(
|
||||||
nt = title.split(' ')[0]
|
nt = title.split(' aka ')[0]
|
||||||
if nt:
|
if nt:
|
||||||
title = nt
|
title = nt
|
||||||
if not title:
|
if not title:
|
||||||
|
@ -92,7 +92,7 @@ class DOMHTMLSearchMovieParser(DOMParserBase):
|
||||||
"new search system" is used, for movies."""
|
"new search system" is used, for movies."""
|
||||||
|
|
||||||
_BaseParser = DOMBasicMovieParser
|
_BaseParser = DOMBasicMovieParser
|
||||||
_notDirectHitTitle = '<title>imdb title'
|
_notDirectHitTitle = '<title>find - imdb</title>'
|
||||||
_titleBuilder = lambda self, x: build_title(x)
|
_titleBuilder = lambda self, x: build_title(x)
|
||||||
_linkPrefix = '/title/tt'
|
_linkPrefix = '/title/tt'
|
||||||
|
|
||||||
|
@ -101,8 +101,7 @@ class DOMHTMLSearchMovieParser(DOMParserBase):
|
||||||
path={
|
path={
|
||||||
'link': "./a[1]/@href",
|
'link': "./a[1]/@href",
|
||||||
'info': ".//text()",
|
'info': ".//text()",
|
||||||
#'akas': ".//div[@class='_imdbpyAKA']//text()"
|
'akas': "./i//text()"
|
||||||
'akas': ".//p[@class='find-aka']//text()"
|
|
||||||
},
|
},
|
||||||
postprocess=lambda x: (
|
postprocess=lambda x: (
|
||||||
analyze_imdbid(x.get('link') or u''),
|
analyze_imdbid(x.get('link') or u''),
|
||||||
|
@ -110,7 +109,7 @@ class DOMHTMLSearchMovieParser(DOMParserBase):
|
||||||
x.get('akas')
|
x.get('akas')
|
||||||
))]
|
))]
|
||||||
extractors = [Extractor(label='search',
|
extractors = [Extractor(label='search',
|
||||||
path="//td[3]/a[starts-with(@href, '/title/tt')]/..",
|
path="//td[@class='result_text']",
|
||||||
attrs=_attrs)]
|
attrs=_attrs)]
|
||||||
def _init(self):
|
def _init(self):
|
||||||
self.url = u''
|
self.url = u''
|
||||||
|
@ -119,14 +118,11 @@ class DOMHTMLSearchMovieParser(DOMParserBase):
|
||||||
self.url = u''
|
self.url = u''
|
||||||
|
|
||||||
def preprocess_string(self, html_string):
|
def preprocess_string(self, html_string):
|
||||||
if self._notDirectHitTitle in html_string[:1024].lower():
|
if self._notDirectHitTitle in html_string[:10240].lower():
|
||||||
if self._linkPrefix == '/title/tt':
|
if self._linkPrefix == '/title/tt':
|
||||||
# Only for movies.
|
# Only for movies.
|
||||||
|
# XXX (HTU): does this still apply?
|
||||||
html_string = html_string.replace('(TV mini-series)', '(mini)')
|
html_string = html_string.replace('(TV mini-series)', '(mini)')
|
||||||
html_string = html_string.replace('<p class="find-aka">',
|
|
||||||
'<p class="find-aka">::')
|
|
||||||
#html_string = _reAKAStitles.sub(
|
|
||||||
# r'<div class="_imdbpyAKA">\1::</div>\2', html_string)
|
|
||||||
return html_string
|
return html_string
|
||||||
# Direct hit!
|
# Direct hit!
|
||||||
dbme = self._BaseParser(useModule=self._useModule)
|
dbme = self._BaseParser(useModule=self._useModule)
|
||||||
|
@ -141,7 +137,7 @@ class DOMHTMLSearchMovieParser(DOMParserBase):
|
||||||
title = self._titleBuilder(res[0][1])
|
title = self._titleBuilder(res[0][1])
|
||||||
if not (link and title): return u''
|
if not (link and title): return u''
|
||||||
link = link.replace('http://pro.imdb.com', '')
|
link = link.replace('http://pro.imdb.com', '')
|
||||||
new_html = '<td></td><td></td><td><a href="%s">%s</a></td>' % (link,
|
new_html = '<td class="result_text"><a href="%s">%s</a></td>' % (link,
|
||||||
title)
|
title)
|
||||||
return new_html
|
return new_html
|
||||||
|
|
||||||
|
@ -161,11 +157,14 @@ class DOMHTMLSearchMovieParser(DOMParserBase):
|
||||||
if not datum[0] and datum[1]:
|
if not datum[0] and datum[1]:
|
||||||
continue
|
continue
|
||||||
if datum[2] is not None:
|
if datum[2] is not None:
|
||||||
akas = filter(None, datum[2].split('::'))
|
#akas = filter(None, datum[2].split('::'))
|
||||||
if self._linkPrefix == '/title/tt':
|
if self._linkPrefix == '/title/tt':
|
||||||
akas = [a.replace('" - ', '::').rstrip() for a in akas]
|
# XXX (HTU): couldn't find a result with multiple akas
|
||||||
akas = [a.replace('aka "', '', 1).replace('aka "',
|
aka = datum[2]
|
||||||
'', 1).lstrip() for a in akas]
|
akas = [aka[1:-1]] # remove the quotes
|
||||||
|
#akas = [a.replace('" - ', '::').rstrip() for a in akas]
|
||||||
|
#akas = [a.replace('aka "', '', 1).replace('aka "',
|
||||||
|
#'', 1).lstrip() for a in akas]
|
||||||
datum[1]['akas'] = akas
|
datum[1]['akas'] = akas
|
||||||
data['data'][idx] = (datum[0], datum[1])
|
data['data'][idx] = (datum[0], datum[1])
|
||||||
else:
|
else:
|
||||||
|
|
|
@ -7,7 +7,7 @@ for a given person.
|
||||||
E.g., when searching for the name "Mel Gibson", the parsed page would be:
|
E.g., when searching for the name "Mel Gibson", the parsed page would be:
|
||||||
http://akas.imdb.com/find?q=Mel+Gibson&nm=on&mx=20
|
http://akas.imdb.com/find?q=Mel+Gibson&nm=on&mx=20
|
||||||
|
|
||||||
Copyright 2004-2010 Davide Alberani <da@erlug.linux.it>
|
Copyright 2004-2013 Davide Alberani <da@erlug.linux.it>
|
||||||
2008 H. Turgut Uyar <uyar@tekir.org>
|
2008 H. Turgut Uyar <uyar@tekir.org>
|
||||||
|
|
||||||
This program is free software; you can redistribute it and/or modify
|
This program is free software; you can redistribute it and/or modify
|
||||||
|
@ -55,7 +55,7 @@ class DOMHTMLSearchPersonParser(DOMHTMLSearchMovieParser):
|
||||||
"""Parse the html page that the IMDb web server shows when the
|
"""Parse the html page that the IMDb web server shows when the
|
||||||
"new search system" is used, for persons."""
|
"new search system" is used, for persons."""
|
||||||
_BaseParser = DOMBasicPersonParser
|
_BaseParser = DOMBasicPersonParser
|
||||||
_notDirectHitTitle = '<title>imdb name'
|
_notDirectHitTitle = '<title>find - imdb'
|
||||||
_titleBuilder = lambda self, x: build_name(x, canonical=True)
|
_titleBuilder = lambda self, x: build_name(x, canonical=True)
|
||||||
_linkPrefix = '/name/nm'
|
_linkPrefix = '/name/nm'
|
||||||
|
|
||||||
|
@ -74,11 +74,11 @@ class DOMHTMLSearchPersonParser(DOMHTMLSearchMovieParser):
|
||||||
canonical=1), x.get('akas')
|
canonical=1), x.get('akas')
|
||||||
))]
|
))]
|
||||||
extractors = [Extractor(label='search',
|
extractors = [Extractor(label='search',
|
||||||
path="//td[3]/a[starts-with(@href, '/name/nm')]/..",
|
path="//td[@class='result_text']/a[starts-with(@href, '/name/nm')]/..",
|
||||||
attrs=_attrs)]
|
attrs=_attrs)]
|
||||||
|
|
||||||
def preprocess_string(self, html_string):
|
def preprocess_string(self, html_string):
|
||||||
if self._notDirectHitTitle in html_string[:1024].lower():
|
if self._notDirectHitTitle in html_string[:10240].lower():
|
||||||
html_string = _reAKASp.sub(
|
html_string = _reAKASp.sub(
|
||||||
r'\1<div class="_imdbpyAKA">\2::</div>\3',
|
r'\1<div class="_imdbpyAKA">\2::</div>\3',
|
||||||
html_string)
|
html_string)
|
||||||
|
|
|
@ -340,7 +340,7 @@ def build_movie(txt, movieID=None, roleID=None, status=None,
|
||||||
title = title[:nidx].rstrip()
|
title = title[:nidx].rstrip()
|
||||||
if year:
|
if year:
|
||||||
year = year.strip()
|
year = year.strip()
|
||||||
if title[-1] == ')':
|
if title[-1:] == ')':
|
||||||
fpIdx = title.rfind('(')
|
fpIdx = title.rfind('(')
|
||||||
if fpIdx != -1:
|
if fpIdx != -1:
|
||||||
if notes: notes = '%s %s' % (title[fpIdx:], notes)
|
if notes: notes = '%s %s' % (title[fpIdx:], notes)
|
||||||
|
|
|
@ -6,7 +6,7 @@ IMDb's data for mobile systems.
|
||||||
the imdb.IMDb function will return an instance of this class when
|
the imdb.IMDb function will return an instance of this class when
|
||||||
called with the 'accessSystem' argument set to "mobile".
|
called with the 'accessSystem' argument set to "mobile".
|
||||||
|
|
||||||
Copyright 2005-2011 Davide Alberani <da@erlug.linux.it>
|
Copyright 2005-2012 Davide Alberani <da@erlug.linux.it>
|
||||||
|
|
||||||
This program is free software; you can redistribute it and/or modify
|
This program is free software; you can redistribute it and/or modify
|
||||||
it under the terms of the GNU General Public License as published by
|
it under the terms of the GNU General Public License as published by
|
||||||
|
@ -193,7 +193,7 @@ class IMDbMobileAccessSystem(IMDbHTTPAccessSystem):
|
||||||
title)
|
title)
|
||||||
return res
|
return res
|
||||||
tl = title[0].lower()
|
tl = title[0].lower()
|
||||||
if not tl.startswith('imdb title'):
|
if not tl.startswith('find - imdb'):
|
||||||
# a direct hit!
|
# a direct hit!
|
||||||
title = _unHtml(title[0])
|
title = _unHtml(title[0])
|
||||||
mid = None
|
mid = None
|
||||||
|
@ -211,7 +211,7 @@ class IMDbMobileAccessSystem(IMDbHTTPAccessSystem):
|
||||||
# XXX: this results*3 prevents some recursion errors, but...
|
# XXX: this results*3 prevents some recursion errors, but...
|
||||||
# it's not exactly understandable (i.e.: why 'results' is
|
# it's not exactly understandable (i.e.: why 'results' is
|
||||||
# not enough to get all the results?)
|
# not enough to get all the results?)
|
||||||
lis = _findBetween(cont, 'td valign="top">', '</td>',
|
lis = _findBetween(cont, 'td class="result_text">', '</td>',
|
||||||
maxRes=results*3)
|
maxRes=results*3)
|
||||||
for li in lis:
|
for li in lis:
|
||||||
akas = re_makas.findall(li)
|
akas = re_makas.findall(li)
|
||||||
|
@ -492,7 +492,7 @@ class IMDbMobileAccessSystem(IMDbHTTPAccessSystem):
|
||||||
self._mobile_logger.warn('no title tag searching for name %s', name)
|
self._mobile_logger.warn('no title tag searching for name %s', name)
|
||||||
return res
|
return res
|
||||||
nl = name[0].lower()
|
nl = name[0].lower()
|
||||||
if not nl.startswith('imdb name'):
|
if not nl.startswith('find - imdb'):
|
||||||
# a direct hit!
|
# a direct hit!
|
||||||
name = _unHtml(name[0])
|
name = _unHtml(name[0])
|
||||||
name = name.replace('- Filmography by type' , '').strip()
|
name = name.replace('- Filmography by type' , '').strip()
|
||||||
|
@ -506,7 +506,7 @@ class IMDbMobileAccessSystem(IMDbHTTPAccessSystem):
|
||||||
return res
|
return res
|
||||||
res[:] = [(str(pid[0]), analyze_name(name, canonical=1))]
|
res[:] = [(str(pid[0]), analyze_name(name, canonical=1))]
|
||||||
else:
|
else:
|
||||||
lis = _findBetween(cont, 'td valign="top">', '</td>',
|
lis = _findBetween(cont, 'td class="result_text">', '</td>',
|
||||||
maxRes=results*3)
|
maxRes=results*3)
|
||||||
for li in lis:
|
for li in lis:
|
||||||
akas = _findBetween(li, '<em>"', '"</em>')
|
akas = _findBetween(li, '<em>"', '"</em>')
|
||||||
|
@ -771,7 +771,7 @@ class IMDbMobileAccessSystem(IMDbHTTPAccessSystem):
|
||||||
return {'data': d}
|
return {'data': d}
|
||||||
|
|
||||||
def _search_character(self, name, results):
|
def _search_character(self, name, results):
|
||||||
cont = subXMLRefs(self._get_search_content('char', name, results))
|
cont = subXMLRefs(self._get_search_content('ch', name, results))
|
||||||
name = _findBetween(cont, '<title>', '</title>', maxRes=1)
|
name = _findBetween(cont, '<title>', '</title>', maxRes=1)
|
||||||
res = []
|
res = []
|
||||||
if not name:
|
if not name:
|
||||||
|
@ -779,8 +779,7 @@ class IMDbMobileAccessSystem(IMDbHTTPAccessSystem):
|
||||||
name)
|
name)
|
||||||
return res
|
return res
|
||||||
nl = name[0].lower()
|
nl = name[0].lower()
|
||||||
if not (nl.startswith('imdb search') or nl.startswith('imdb search') \
|
if not nl.startswith('find - imdb'):
|
||||||
or nl.startswith('imdb character')):
|
|
||||||
# a direct hit!
|
# a direct hit!
|
||||||
name = _unHtml(name[0]).replace('(Character)', '').strip()
|
name = _unHtml(name[0]).replace('(Character)', '').strip()
|
||||||
pid = None
|
pid = None
|
||||||
|
@ -793,12 +792,7 @@ class IMDbMobileAccessSystem(IMDbHTTPAccessSystem):
|
||||||
return res
|
return res
|
||||||
res[:] = [(str(pid[0]), analyze_name(name))]
|
res[:] = [(str(pid[0]), analyze_name(name))]
|
||||||
else:
|
else:
|
||||||
sects = _findBetween(cont, '<b>Popular Characters</b>', '</table>',
|
lis = _findBetween(cont, '<td class="result_text"',
|
||||||
maxRes=results*3)
|
|
||||||
sects += _findBetween(cont, '<b>Characters', '</table>',
|
|
||||||
maxRes=results*3)
|
|
||||||
for sect in sects:
|
|
||||||
lis = _findBetween(sect, '<a href="/character/',
|
|
||||||
['<small', '</td>', '<br'])
|
['<small', '</td>', '<br'])
|
||||||
for li in lis:
|
for li in lis:
|
||||||
li = '<%s' % li
|
li = '<%s' % li
|
||||||
|
|
|
@ -7,7 +7,7 @@ the SQLObject _AND_ SQLAlchemy Object Relational Managers is available.
|
||||||
the imdb.IMDb function will return an instance of this class when
|
the imdb.IMDb function will return an instance of this class when
|
||||||
called with the 'accessSystem' argument set to "sql", "database" or "db".
|
called with the 'accessSystem' argument set to "sql", "database" or "db".
|
||||||
|
|
||||||
Copyright 2005-2010 Davide Alberani <da@erlug.linux.it>
|
Copyright 2005-2012 Davide Alberani <da@erlug.linux.it>
|
||||||
|
|
||||||
This program is free software; you can redistribute it and/or modify
|
This program is free software; you can redistribute it and/or modify
|
||||||
it under the terms of the GNU General Public License as published by
|
it under the terms of the GNU General Public License as published by
|
||||||
|
@ -452,7 +452,12 @@ def get_movie_data(movieID, kindDict, fromAka=0, _table=None):
|
||||||
else:
|
else:
|
||||||
if not fromAka: Table = Title
|
if not fromAka: Table = Title
|
||||||
else: Table = AkaTitle
|
else: Table = AkaTitle
|
||||||
|
try:
|
||||||
m = Table.get(movieID)
|
m = Table.get(movieID)
|
||||||
|
except Exception, e:
|
||||||
|
_aux_logger.warn('Unable to fetch information for movieID %s: %s', movieID, e)
|
||||||
|
mdict = {}
|
||||||
|
return mdict
|
||||||
mdict = {'title': m.title, 'kind': kindDict[m.kindID],
|
mdict = {'title': m.title, 'kind': kindDict[m.kindID],
|
||||||
'year': m.productionYear, 'imdbIndex': m.imdbIndex,
|
'year': m.productionYear, 'imdbIndex': m.imdbIndex,
|
||||||
'season': m.seasonNr, 'episode': m.episodeNr}
|
'season': m.seasonNr, 'episode': m.episodeNr}
|
||||||
|
@ -825,14 +830,14 @@ class IMDbSqlAccessSystem(IMDbBase):
|
||||||
imdbID = movie.imdbID
|
imdbID = movie.imdbID
|
||||||
if imdbID is not None: return '%07d' % imdbID
|
if imdbID is not None: return '%07d' % imdbID
|
||||||
m_dict = get_movie_data(movie.id, self._kind)
|
m_dict = get_movie_data(movie.id, self._kind)
|
||||||
titline = build_title(m_dict, ptdf=1)
|
titline = build_title(m_dict, ptdf=0)
|
||||||
imdbID = self.title2imdbID(titline)
|
imdbID = self.title2imdbID(titline, m_dict['kind'])
|
||||||
# If the imdbID was retrieved from the web and was not in the
|
# If the imdbID was retrieved from the web and was not in the
|
||||||
# database, update the database (ignoring errors, because it's
|
# database, update the database (ignoring errors, because it's
|
||||||
# possibile that the current user has not update privileges).
|
# possibile that the current user has not update privileges).
|
||||||
# There're times when I think I'm a genius; this one of
|
# There're times when I think I'm a genius; this one of
|
||||||
# those times... <g>
|
# those times... <g>
|
||||||
if imdbID is not None:
|
if imdbID is not None and not isinstance(imdbID, list):
|
||||||
try: movie.imdbID = int(imdbID)
|
try: movie.imdbID = int(imdbID)
|
||||||
except: pass
|
except: pass
|
||||||
return imdbID
|
return imdbID
|
||||||
|
@ -847,9 +852,9 @@ class IMDbSqlAccessSystem(IMDbBase):
|
||||||
imdbID = person.imdbID
|
imdbID = person.imdbID
|
||||||
if imdbID is not None: return '%07d' % imdbID
|
if imdbID is not None: return '%07d' % imdbID
|
||||||
n_dict = {'name': person.name, 'imdbIndex': person.imdbIndex}
|
n_dict = {'name': person.name, 'imdbIndex': person.imdbIndex}
|
||||||
namline = build_name(n_dict, canonical=1)
|
namline = build_name(n_dict, canonical=False)
|
||||||
imdbID = self.name2imdbID(namline)
|
imdbID = self.name2imdbID(namline)
|
||||||
if imdbID is not None:
|
if imdbID is not None and not isinstance(imdbID, list):
|
||||||
try: person.imdbID = int(imdbID)
|
try: person.imdbID = int(imdbID)
|
||||||
except: pass
|
except: pass
|
||||||
return imdbID
|
return imdbID
|
||||||
|
@ -864,9 +869,9 @@ class IMDbSqlAccessSystem(IMDbBase):
|
||||||
imdbID = character.imdbID
|
imdbID = character.imdbID
|
||||||
if imdbID is not None: return '%07d' % imdbID
|
if imdbID is not None: return '%07d' % imdbID
|
||||||
n_dict = {'name': character.name, 'imdbIndex': character.imdbIndex}
|
n_dict = {'name': character.name, 'imdbIndex': character.imdbIndex}
|
||||||
namline = build_name(n_dict, canonical=1)
|
namline = build_name(n_dict, canonical=False)
|
||||||
imdbID = self.character2imdbID(namline)
|
imdbID = self.character2imdbID(namline)
|
||||||
if imdbID is not None:
|
if imdbID is not None and not isinstance(imdbID, list):
|
||||||
try: character.imdbID = int(imdbID)
|
try: character.imdbID = int(imdbID)
|
||||||
except: pass
|
except: pass
|
||||||
return imdbID
|
return imdbID
|
||||||
|
@ -883,7 +888,7 @@ class IMDbSqlAccessSystem(IMDbBase):
|
||||||
n_dict = {'name': company.name, 'country': company.countryCode}
|
n_dict = {'name': company.name, 'country': company.countryCode}
|
||||||
namline = build_company_name(n_dict)
|
namline = build_company_name(n_dict)
|
||||||
imdbID = self.company2imdbID(namline)
|
imdbID = self.company2imdbID(namline)
|
||||||
if imdbID is not None:
|
if imdbID is not None and not isinstance(imdbID, list):
|
||||||
try: company.imdbID = int(imdbID)
|
try: company.imdbID = int(imdbID)
|
||||||
except: pass
|
except: pass
|
||||||
return imdbID
|
return imdbID
|
||||||
|
@ -1116,6 +1121,7 @@ class IMDbSqlAccessSystem(IMDbBase):
|
||||||
if mlinks:
|
if mlinks:
|
||||||
for ml in mlinks:
|
for ml in mlinks:
|
||||||
lmovieData = get_movie_data(ml[0], self._kind)
|
lmovieData = get_movie_data(ml[0], self._kind)
|
||||||
|
if lmovieData:
|
||||||
m = Movie(movieID=ml[0], data=lmovieData, accessSystem='sql')
|
m = Movie(movieID=ml[0], data=lmovieData, accessSystem='sql')
|
||||||
ml[0] = m
|
ml[0] = m
|
||||||
res['connections'] = {}
|
res['connections'] = {}
|
||||||
|
|
|
@ -466,6 +466,7 @@ class _AlchemyConnection(object):
|
||||||
|
|
||||||
def setConnection(uri, tables, encoding='utf8', debug=False):
|
def setConnection(uri, tables, encoding='utf8', debug=False):
|
||||||
"""Set connection for every table."""
|
"""Set connection for every table."""
|
||||||
|
params = {'encoding': encoding}
|
||||||
# FIXME: why on earth MySQL requires an additional parameter,
|
# FIXME: why on earth MySQL requires an additional parameter,
|
||||||
# is well beyond my understanding...
|
# is well beyond my understanding...
|
||||||
if uri.startswith('mysql'):
|
if uri.startswith('mysql'):
|
||||||
|
@ -474,7 +475,11 @@ def setConnection(uri, tables, encoding='utf8', debug=False):
|
||||||
else:
|
else:
|
||||||
uri += '?'
|
uri += '?'
|
||||||
uri += 'charset=%s' % encoding
|
uri += 'charset=%s' % encoding
|
||||||
params = {'encoding': encoding}
|
|
||||||
|
# On some server configurations, we will need to explictly enable
|
||||||
|
# loading data from local files
|
||||||
|
params['local_infile'] = 1
|
||||||
|
|
||||||
if debug:
|
if debug:
|
||||||
params['echo'] = True
|
params['echo'] = True
|
||||||
if uri.startswith('ibm_db'):
|
if uri.startswith('ibm_db'):
|
||||||
|
|
|
@ -182,6 +182,10 @@ def setConnection(uri, tables, encoding='utf8', debug=False):
|
||||||
kw['use_unicode'] = 1
|
kw['use_unicode'] = 1
|
||||||
#kw['sqlobject_encoding'] = encoding
|
#kw['sqlobject_encoding'] = encoding
|
||||||
kw['charset'] = encoding
|
kw['charset'] = encoding
|
||||||
|
|
||||||
|
# On some server configurations, we will need to explictly enable
|
||||||
|
# loading data from local files
|
||||||
|
kw['local_infile'] = 1
|
||||||
conn = connectionForURI(uri, **kw)
|
conn = connectionForURI(uri, **kw)
|
||||||
conn.debug = debug
|
conn.debug = debug
|
||||||
# XXX: doesn't work and a work-around was put in imdbpy2sql.py;
|
# XXX: doesn't work and a work-around was put in imdbpy2sql.py;
|
||||||
|
|
|
@ -3,7 +3,7 @@ utils module (imdb package).
|
||||||
|
|
||||||
This module provides basic utilities for the imdb package.
|
This module provides basic utilities for the imdb package.
|
||||||
|
|
||||||
Copyright 2004-2012 Davide Alberani <da@erlug.linux.it>
|
Copyright 2004-2013 Davide Alberani <da@erlug.linux.it>
|
||||||
2009 H. Turgut Uyar <uyar@tekir.org>
|
2009 H. Turgut Uyar <uyar@tekir.org>
|
||||||
|
|
||||||
This program is free software; you can redistribute it and/or modify
|
This program is free software; you can redistribute it and/or modify
|
||||||
|
@ -189,10 +189,9 @@ _unicodeArticles = linguistics.toUnicode(_articles)
|
||||||
articlesDicts = linguistics.articlesDictsForLang(None)
|
articlesDicts = linguistics.articlesDictsForLang(None)
|
||||||
spArticles = linguistics.spArticlesForLang(None)
|
spArticles = linguistics.spArticlesForLang(None)
|
||||||
|
|
||||||
def canonicalTitle(title, lang=None):
|
def canonicalTitle(title, lang=None, imdbIndex=None):
|
||||||
"""Return the title in the canonic format 'Movie Title, The';
|
"""Return the title in the canonic format 'Movie Title, The';
|
||||||
beware that it doesn't handle long imdb titles, but only the
|
beware that it doesn't handle long imdb titles.
|
||||||
title portion, without year[/imdbIndex] or special markup.
|
|
||||||
The 'lang' argument can be used to specify the language of the title.
|
The 'lang' argument can be used to specify the language of the title.
|
||||||
"""
|
"""
|
||||||
isUnicode = isinstance(title, unicode)
|
isUnicode = isinstance(title, unicode)
|
||||||
|
@ -203,15 +202,19 @@ def canonicalTitle(title, lang=None):
|
||||||
except IndexError:
|
except IndexError:
|
||||||
pass
|
pass
|
||||||
if isUnicode:
|
if isUnicode:
|
||||||
_format = u'%s, %s'
|
_format = u'%s%s, %s'
|
||||||
else:
|
else:
|
||||||
_format = '%s, %s'
|
_format = '%s%s, %s'
|
||||||
ltitle = title.lower()
|
ltitle = title.lower()
|
||||||
|
if imdbIndex:
|
||||||
|
imdbIndex = ' (%s)' % imdbIndex
|
||||||
|
else:
|
||||||
|
imdbIndex = ''
|
||||||
spArticles = linguistics.spArticlesForLang(lang)
|
spArticles = linguistics.spArticlesForLang(lang)
|
||||||
for article in spArticles[isUnicode]:
|
for article in spArticles[isUnicode]:
|
||||||
if ltitle.startswith(article):
|
if ltitle.startswith(article):
|
||||||
lart = len(article)
|
lart = len(article)
|
||||||
title = _format % (title[lart:], title[:lart])
|
title = _format % (title[lart:], imdbIndex, title[:lart])
|
||||||
if article[-1] == ' ':
|
if article[-1] == ' ':
|
||||||
title = title[:-1]
|
title = title[:-1]
|
||||||
break
|
break
|
||||||
|
@ -383,18 +386,42 @@ def analyze_title(title, canonical=None, canonicalSeries=None,
|
||||||
if title.endswith('(TV)'):
|
if title.endswith('(TV)'):
|
||||||
kind = u'tv movie'
|
kind = u'tv movie'
|
||||||
title = title[:-4].rstrip()
|
title = title[:-4].rstrip()
|
||||||
|
elif title.endswith('(TV Movie)'):
|
||||||
|
kind = u'tv movie'
|
||||||
|
title = title[:-10].rstrip()
|
||||||
elif title.endswith('(V)'):
|
elif title.endswith('(V)'):
|
||||||
kind = u'video movie'
|
kind = u'video movie'
|
||||||
title = title[:-3].rstrip()
|
title = title[:-3].rstrip()
|
||||||
elif title.endswith('(video)'):
|
elif title.lower().endswith('(video)'):
|
||||||
kind = u'video movie'
|
kind = u'video movie'
|
||||||
title = title[:-7].rstrip()
|
title = title[:-7].rstrip()
|
||||||
|
elif title.endswith('(TV Short)'):
|
||||||
|
kind = u'tv short'
|
||||||
|
title = title[:-10].rstrip()
|
||||||
|
elif title.endswith('(TV Mini-Series)'):
|
||||||
|
kind = u'tv mini series'
|
||||||
|
title = title[:-16].rstrip()
|
||||||
elif title.endswith('(mini)'):
|
elif title.endswith('(mini)'):
|
||||||
kind = u'tv mini series'
|
kind = u'tv mini series'
|
||||||
title = title[:-6].rstrip()
|
title = title[:-6].rstrip()
|
||||||
elif title.endswith('(VG)'):
|
elif title.endswith('(VG)'):
|
||||||
kind = u'video game'
|
kind = u'video game'
|
||||||
title = title[:-4].rstrip()
|
title = title[:-4].rstrip()
|
||||||
|
elif title.endswith('(Video Game)'):
|
||||||
|
kind = u'video game'
|
||||||
|
title = title[:-12].rstrip()
|
||||||
|
elif title.endswith('(TV Series)'):
|
||||||
|
epindex = title.find('(TV Episode) - ')
|
||||||
|
if epindex >= 0:
|
||||||
|
# It's an episode of a series.
|
||||||
|
kind = u'episode'
|
||||||
|
series_info = analyze_title(title[epindex + 15:])
|
||||||
|
result['episode of'] = series_info.get('title')
|
||||||
|
result['series year'] = series_info.get('year')
|
||||||
|
title = title[:epindex]
|
||||||
|
else:
|
||||||
|
kind = u'tv series'
|
||||||
|
title = title[:-11].rstrip()
|
||||||
# Search for the year and the optional imdbIndex (a roman number).
|
# Search for the year and the optional imdbIndex (a roman number).
|
||||||
yi = re_year_index.findall(title)
|
yi = re_year_index.findall(title)
|
||||||
if not yi:
|
if not yi:
|
||||||
|
@ -430,9 +457,6 @@ def analyze_title(title, canonical=None, canonicalSeries=None,
|
||||||
if not kind:
|
if not kind:
|
||||||
kind = u'tv series'
|
kind = u'tv series'
|
||||||
title = title[1:-1].strip()
|
title = title[1:-1].strip()
|
||||||
elif title.endswith('(TV series)'):
|
|
||||||
kind = u'tv series'
|
|
||||||
title = title[:-11].rstrip()
|
|
||||||
if not title:
|
if not title:
|
||||||
raise IMDbParserError('invalid title: "%s"' % original_t)
|
raise IMDbParserError('invalid title: "%s"' % original_t)
|
||||||
if canonical is not None:
|
if canonical is not None:
|
||||||
|
@ -489,7 +513,7 @@ def _convertTime(title, fromPTDFtoWEB=1, _emptyString=u''):
|
||||||
|
|
||||||
def build_title(title_dict, canonical=None, canonicalSeries=None,
|
def build_title(title_dict, canonical=None, canonicalSeries=None,
|
||||||
canonicalEpisode=None, ptdf=0, lang=None, _doYear=1,
|
canonicalEpisode=None, ptdf=0, lang=None, _doYear=1,
|
||||||
_emptyString=u''):
|
_emptyString=u'', appendKind=True):
|
||||||
"""Given a dictionary that represents a "long" IMDb title,
|
"""Given a dictionary that represents a "long" IMDb title,
|
||||||
return a string.
|
return a string.
|
||||||
|
|
||||||
|
@ -511,6 +535,11 @@ def build_title(title_dict, canonical=None, canonicalSeries=None,
|
||||||
doYear = 0
|
doYear = 0
|
||||||
if ptdf:
|
if ptdf:
|
||||||
doYear = 1
|
doYear = 1
|
||||||
|
# XXX: for results coming from the new search page.
|
||||||
|
if not isinstance(episode_of, (dict, _Container)):
|
||||||
|
episode_of = {'title': episode_of, 'kind': 'tv series'}
|
||||||
|
if 'series year' in title_dict:
|
||||||
|
episode_of['year'] = title_dict['series year']
|
||||||
pre_title = build_title(episode_of, canonical=canonicalSeries,
|
pre_title = build_title(episode_of, canonical=canonicalSeries,
|
||||||
ptdf=0, _doYear=doYear,
|
ptdf=0, _doYear=doYear,
|
||||||
_emptyString=_emptyString)
|
_emptyString=_emptyString)
|
||||||
|
@ -545,12 +574,14 @@ def build_title(title_dict, canonical=None, canonicalSeries=None,
|
||||||
episode_title += '.%s' % episode
|
episode_title += '.%s' % episode
|
||||||
episode_title += ')'
|
episode_title += ')'
|
||||||
episode_title = '{%s}' % episode_title
|
episode_title = '{%s}' % episode_title
|
||||||
return '%s %s' % (pre_title, episode_title)
|
return _emptyString + '%s %s' % (_emptyString + pre_title,
|
||||||
|
_emptyString + episode_title)
|
||||||
title = title_dict.get('title', '')
|
title = title_dict.get('title', '')
|
||||||
|
imdbIndex = title_dict.get('imdbIndex', '')
|
||||||
if not title: return _emptyString
|
if not title: return _emptyString
|
||||||
if canonical is not None:
|
if canonical is not None:
|
||||||
if canonical:
|
if canonical:
|
||||||
title = canonicalTitle(title, lang=lang)
|
title = canonicalTitle(title, lang=lang, imdbIndex=imdbIndex)
|
||||||
else:
|
else:
|
||||||
title = normalizeTitle(title, lang=lang)
|
title = normalizeTitle(title, lang=lang)
|
||||||
if pre_title:
|
if pre_title:
|
||||||
|
@ -558,15 +589,20 @@ def build_title(title_dict, canonical=None, canonicalSeries=None,
|
||||||
if kind in (u'tv series', u'tv mini series'):
|
if kind in (u'tv series', u'tv mini series'):
|
||||||
title = '"%s"' % title
|
title = '"%s"' % title
|
||||||
if _doYear:
|
if _doYear:
|
||||||
imdbIndex = title_dict.get('imdbIndex')
|
year = title_dict.get('year') or '????'
|
||||||
year = title_dict.get('year') or u'????'
|
|
||||||
if isinstance(_emptyString, str):
|
if isinstance(_emptyString, str):
|
||||||
year = str(year)
|
year = str(year)
|
||||||
|
imdbIndex = title_dict.get('imdbIndex')
|
||||||
|
if not ptdf:
|
||||||
|
if imdbIndex and (canonical is None or canonical):
|
||||||
|
title += ' (%s)' % imdbIndex
|
||||||
|
title += ' (%s)' % year
|
||||||
|
else:
|
||||||
title += ' (%s' % year
|
title += ' (%s' % year
|
||||||
if imdbIndex:
|
if imdbIndex and (canonical is None or canonical):
|
||||||
title += '/%s' % imdbIndex
|
title += '/%s' % imdbIndex
|
||||||
title += ')'
|
title += ')'
|
||||||
if kind:
|
if appendKind and kind:
|
||||||
if kind == 'tv movie':
|
if kind == 'tv movie':
|
||||||
title += ' (TV)'
|
title += ' (TV)'
|
||||||
elif kind == 'video movie':
|
elif kind == 'video movie':
|
||||||
|
|
|
@ -216,7 +216,7 @@ class RTorrent:
|
||||||
while i < MAX_RETRIES:
|
while i < MAX_RETRIES:
|
||||||
for torrent in self.get_torrents():
|
for torrent in self.get_torrents():
|
||||||
if torrent.info_hash != info_hash:
|
if torrent.info_hash != info_hash:
|
||||||
break
|
continue
|
||||||
time.sleep(1)
|
time.sleep(1)
|
||||||
i += 1
|
i += 1
|
||||||
|
|
||||||
|
|
|
@ -30,5 +30,5 @@ except ImportError:
|
||||||
|
|
||||||
|
|
||||||
__all__ = ['SERVICES', 'LANGUAGE_INDEX', 'SERVICE_INDEX', 'SERVICE_CONFIDENCE',
|
__all__ = ['SERVICES', 'LANGUAGE_INDEX', 'SERVICE_INDEX', 'SERVICE_CONFIDENCE',
|
||||||
'MATCHING_CONFIDENCE', 'list_subtitles', 'download_subtitles', 'Pool']
|
'MATCHING_CONFIDENCE', 'list_subtitles', 'download_subtitles', 'Pool', 'language']
|
||||||
logging.getLogger("subliminal").addHandler(NullHandler())
|
logging.getLogger("subliminal").addHandler(NullHandler())
|
||||||
|
|
|
@ -11,6 +11,7 @@ __author__ = "dbr/Ben"
|
||||||
__version__ = "1.9"
|
__version__ = "1.9"
|
||||||
|
|
||||||
import os
|
import os
|
||||||
|
import re
|
||||||
import time
|
import time
|
||||||
import getpass
|
import getpass
|
||||||
import StringIO
|
import StringIO
|
||||||
|
@ -18,8 +19,10 @@ import tempfile
|
||||||
import warnings
|
import warnings
|
||||||
import logging
|
import logging
|
||||||
import zipfile
|
import zipfile
|
||||||
|
import datetime as dt
|
||||||
import requests
|
import requests
|
||||||
import cachecontrol
|
import cachecontrol
|
||||||
|
import xmltodict
|
||||||
|
|
||||||
try:
|
try:
|
||||||
import xml.etree.cElementTree as ElementTree
|
import xml.etree.cElementTree as ElementTree
|
||||||
|
@ -31,15 +34,18 @@ try:
|
||||||
except ImportError:
|
except ImportError:
|
||||||
gzip = None
|
gzip = None
|
||||||
|
|
||||||
|
from lib.dateutil.parser import parse
|
||||||
from cachecontrol import caches
|
from cachecontrol import caches
|
||||||
|
|
||||||
from tvdb_ui import BaseUI, ConsoleUI
|
from tvdb_ui import BaseUI, ConsoleUI
|
||||||
from tvdb_exceptions import (tvdb_error, tvdb_userabort, tvdb_shownotfound,
|
from tvdb_exceptions import (tvdb_error, tvdb_userabort, tvdb_shownotfound,
|
||||||
tvdb_seasonnotfound, tvdb_episodenotfound, tvdb_attributenotfound)
|
tvdb_seasonnotfound, tvdb_episodenotfound, tvdb_attributenotfound)
|
||||||
|
|
||||||
|
|
||||||
def log():
|
def log():
|
||||||
return logging.getLogger("tvdb_api")
|
return logging.getLogger("tvdb_api")
|
||||||
|
|
||||||
|
|
||||||
def retry(ExceptionToCheck, tries=4, delay=3, backoff=2, logger=None):
|
def retry(ExceptionToCheck, tries=4, delay=3, backoff=2, logger=None):
|
||||||
"""Retry calling the decorated function using an exponential backoff.
|
"""Retry calling the decorated function using an exponential backoff.
|
||||||
|
|
||||||
|
@ -83,6 +89,7 @@ def retry(ExceptionToCheck, tries=4, delay=3, backoff=2, logger=None):
|
||||||
|
|
||||||
return deco_retry
|
return deco_retry
|
||||||
|
|
||||||
|
|
||||||
class ShowContainer(dict):
|
class ShowContainer(dict):
|
||||||
"""Simple dict that holds a series of Show instances
|
"""Simple dict that holds a series of Show instances
|
||||||
"""
|
"""
|
||||||
|
@ -560,35 +567,51 @@ class Tvdb:
|
||||||
except requests.Timeout, e:
|
except requests.Timeout, e:
|
||||||
raise tvdb_error("Connection timed out " + str(e.message) + " while loading URL " + str(url))
|
raise tvdb_error("Connection timed out " + str(e.message) + " while loading URL " + str(url))
|
||||||
|
|
||||||
if 'application/zip' in resp.headers.get("Content-Type", '') and resp.ok:
|
def process(path, key, value):
|
||||||
|
key = key.lower()
|
||||||
|
|
||||||
|
# clean up value and do type changes
|
||||||
|
if value:
|
||||||
|
try:
|
||||||
|
if key == 'firstaired' and value in "0000-00-00":
|
||||||
|
new_value = str(dt.date.fromordinal(1))
|
||||||
|
new_value = re.sub("([-]0{2}){1,}", "", new_value)
|
||||||
|
fixDate = parse(new_value, fuzzy=True).date()
|
||||||
|
value = fixDate.strftime("%Y-%m-%d")
|
||||||
|
elif key == 'firstaired':
|
||||||
|
value = parse(value, fuzzy=True).date()
|
||||||
|
value = value.strftime("%Y-%m-%d")
|
||||||
|
|
||||||
|
#if key == 'airs_time':
|
||||||
|
# value = parse(value).time()
|
||||||
|
# value = value.strftime("%I:%M %p")
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
|
||||||
|
return (key, value)
|
||||||
|
|
||||||
|
if resp.ok:
|
||||||
|
if 'application/zip' in resp.headers.get("Content-Type", ''):
|
||||||
try:
|
try:
|
||||||
# TODO: The zip contains actors.xml and banners.xml, which are currently ignored [GH-20]
|
# TODO: The zip contains actors.xml and banners.xml, which are currently ignored [GH-20]
|
||||||
log().debug("We recived a zip file unpacking now ...")
|
log().debug("We recived a zip file unpacking now ...")
|
||||||
zipdata = StringIO.StringIO()
|
zipdata = StringIO.StringIO()
|
||||||
zipdata.write(resp.content)
|
zipdata.write(resp.content)
|
||||||
myzipfile = zipfile.ZipFile(zipdata)
|
myzipfile = zipfile.ZipFile(zipdata)
|
||||||
return myzipfile.read('%s.xml' % language)
|
return xmltodict.parse(myzipfile.read('%s.xml' % language).strip(), postprocessor=process)
|
||||||
except zipfile.BadZipfile:
|
except zipfile.BadZipfile:
|
||||||
raise tvdb_error("Bad zip file received from thetvdb.com, could not read it")
|
raise tvdb_error("Bad zip file received from thetvdb.com, could not read it")
|
||||||
|
else:
|
||||||
return resp.content if resp.ok else None
|
return xmltodict.parse(resp.content.strip(), postprocessor=process)
|
||||||
|
|
||||||
def _getetsrc(self, url, params=None, language=None):
|
def _getetsrc(self, url, params=None, language=None):
|
||||||
"""Loads a URL using caching, returns an ElementTree of the source
|
"""Loads a URL using caching, returns an ElementTree of the source
|
||||||
"""
|
"""
|
||||||
src = self._loadUrl(url, params=params, language=language)
|
|
||||||
try:
|
try:
|
||||||
# TVDB doesn't sanitize \r (CR) from user input in some fields,
|
|
||||||
# remove it to avoid errors. Change from SickBeard, from will14m
|
|
||||||
return ElementTree.fromstring(src.rstrip("\r")) if src else None
|
|
||||||
except SyntaxError:
|
|
||||||
src = self._loadUrl(url, params=params, language=language)
|
src = self._loadUrl(url, params=params, language=language)
|
||||||
try:
|
src = [src[item] for item in src][0]
|
||||||
return ElementTree.fromstring(src.rstrip("\r")) if src else None
|
except:
|
||||||
except SyntaxError, exceptionmsg:
|
errormsg = "There was an error with the XML retrieved from thetvdb.com:"
|
||||||
errormsg = "There was an error with the XML retrieved from thetvdb.com:\n%s" % (
|
|
||||||
exceptionmsg
|
|
||||||
)
|
|
||||||
|
|
||||||
if self.config['cache_enabled']:
|
if self.config['cache_enabled']:
|
||||||
errormsg += "\nFirst try emptying the cache folder at..\n%s" % (
|
errormsg += "\nFirst try emptying the cache folder at..\n%s" % (
|
||||||
|
@ -599,6 +622,8 @@ class Tvdb:
|
||||||
errormsg += "\nhttp://dbr.lighthouseapp.com/projects/13342-tvdb_api/overview\n"
|
errormsg += "\nhttp://dbr.lighthouseapp.com/projects/13342-tvdb_api/overview\n"
|
||||||
raise tvdb_error(errormsg)
|
raise tvdb_error(errormsg)
|
||||||
|
|
||||||
|
return src
|
||||||
|
|
||||||
def _setItem(self, sid, seas, ep, attrib, value):
|
def _setItem(self, sid, seas, ep, attrib, value):
|
||||||
"""Creates a new episode, creating Show(), Season() and
|
"""Creates a new episode, creating Show(), Season() and
|
||||||
Episode()s as required. Called by _getShowData to populate show
|
Episode()s as required. Called by _getShowData to populate show
|
||||||
|
@ -636,7 +661,6 @@ class Tvdb:
|
||||||
- Replaces & with &
|
- Replaces & with &
|
||||||
- Trailing whitespace
|
- Trailing whitespace
|
||||||
"""
|
"""
|
||||||
if isinstance(data, str):
|
|
||||||
data = data.replace(u"&", u"&")
|
data = data.replace(u"&", u"&")
|
||||||
data = data.strip()
|
data = data.strip()
|
||||||
return data
|
return data
|
||||||
|
@ -649,9 +673,8 @@ class Tvdb:
|
||||||
log().debug("Searching for show %s" % series)
|
log().debug("Searching for show %s" % series)
|
||||||
self.config['params_getSeries']['seriesname'] = series
|
self.config['params_getSeries']['seriesname'] = series
|
||||||
seriesEt = self._getetsrc(self.config['url_getSeries'], self.config['params_getSeries'])
|
seriesEt = self._getetsrc(self.config['url_getSeries'], self.config['params_getSeries'])
|
||||||
allSeries = list(dict((s.tag.lower(), s.text) for s in x.getchildren()) for x in seriesEt)
|
|
||||||
|
|
||||||
return allSeries
|
return [seriesEt[item] for item in seriesEt][0]
|
||||||
|
|
||||||
def _getSeries(self, series):
|
def _getSeries(self, series):
|
||||||
"""This searches TheTVDB.com for the series name,
|
"""This searches TheTVDB.com for the series name,
|
||||||
|
@ -660,6 +683,8 @@ class Tvdb:
|
||||||
BaseUI is used to select the first result.
|
BaseUI is used to select the first result.
|
||||||
"""
|
"""
|
||||||
allSeries = self.search(series)
|
allSeries = self.search(series)
|
||||||
|
if not isinstance(allSeries, list):
|
||||||
|
allSeries = [allSeries]
|
||||||
|
|
||||||
if len(allSeries) == 0:
|
if len(allSeries) == 0:
|
||||||
log().debug('Series result returned zero')
|
log().debug('Series result returned zero')
|
||||||
|
@ -699,13 +724,12 @@ class Tvdb:
|
||||||
log().debug('Getting season banners for %s' % (sid))
|
log().debug('Getting season banners for %s' % (sid))
|
||||||
bannersEt = self._getetsrc(self.config['url_seriesBanner'] % (sid))
|
bannersEt = self._getetsrc(self.config['url_seriesBanner'] % (sid))
|
||||||
banners = {}
|
banners = {}
|
||||||
for cur_banner in bannersEt.findall('Banner'):
|
for cur_banner in bannersEt['banner']:
|
||||||
bid = cur_banner.find('id').text
|
bid = cur_banner['id']
|
||||||
btype = cur_banner.find('BannerType')
|
btype = cur_banner['bannertype']
|
||||||
btype2 = cur_banner.find('BannerType2')
|
btype2 = cur_banner['bannertype2']
|
||||||
if btype is None or btype2 is None:
|
if btype is None or btype2 is None:
|
||||||
continue
|
continue
|
||||||
btype, btype2 = btype.text, btype2.text
|
|
||||||
if not btype in banners:
|
if not btype in banners:
|
||||||
banners[btype] = {}
|
banners[btype] = {}
|
||||||
if not btype2 in banners[btype]:
|
if not btype2 in banners[btype]:
|
||||||
|
@ -713,13 +737,12 @@ class Tvdb:
|
||||||
if not bid in banners[btype][btype2]:
|
if not bid in banners[btype][btype2]:
|
||||||
banners[btype][btype2][bid] = {}
|
banners[btype][btype2][bid] = {}
|
||||||
|
|
||||||
for cur_element in cur_banner.getchildren():
|
for k, v in cur_banner.items():
|
||||||
tag = cur_element.tag.lower()
|
if k is None or v is None:
|
||||||
value = cur_element.text
|
|
||||||
if tag is None or value is None:
|
|
||||||
continue
|
continue
|
||||||
tag, value = tag.lower(), value.lower()
|
|
||||||
banners[btype][btype2][bid][tag] = value
|
k, v = k.lower(), v.lower()
|
||||||
|
banners[btype][btype2][bid][k] = v
|
||||||
|
|
||||||
for k, v in banners[btype][btype2][bid].items():
|
for k, v in banners[btype][btype2][bid].items():
|
||||||
if k.endswith("path"):
|
if k.endswith("path"):
|
||||||
|
@ -758,17 +781,17 @@ class Tvdb:
|
||||||
actorsEt = self._getetsrc(self.config['url_actorsInfo'] % (sid))
|
actorsEt = self._getetsrc(self.config['url_actorsInfo'] % (sid))
|
||||||
|
|
||||||
cur_actors = Actors()
|
cur_actors = Actors()
|
||||||
for curActorItem in actorsEt.findall("Actor"):
|
if actorsEt:
|
||||||
|
for curActorItem in actorsEt["actor"]:
|
||||||
curActor = Actor()
|
curActor = Actor()
|
||||||
for curInfo in curActorItem:
|
for k, v in curActorItem.items():
|
||||||
tag = curInfo.tag.lower()
|
k = k.lower()
|
||||||
value = curInfo.text
|
if v is not None:
|
||||||
if value is not None:
|
if k == "image":
|
||||||
if tag == "image":
|
v = self.config['url_artworkPrefix'] % (v)
|
||||||
value = self.config['url_artworkPrefix'] % (value)
|
|
||||||
else:
|
else:
|
||||||
value = self._cleanData(value)
|
v = self._cleanData(v)
|
||||||
curActor[tag] = value
|
curActor[k] = v
|
||||||
cur_actors.append(curActor)
|
cur_actors.append(curActor)
|
||||||
self._setShowData(sid, '_actors', cur_actors)
|
self._setShowData(sid, '_actors', cur_actors)
|
||||||
|
|
||||||
|
@ -798,24 +821,19 @@ class Tvdb:
|
||||||
self.config['url_seriesInfo'] % (sid, getShowInLanguage)
|
self.config['url_seriesInfo'] % (sid, getShowInLanguage)
|
||||||
)
|
)
|
||||||
|
|
||||||
if seriesInfoEt is None: return False
|
# check and make sure we have data to process and that it contains a series name
|
||||||
for curInfo in seriesInfoEt.findall("Series")[0]:
|
if seriesInfoEt is None or 'seriesname' not in seriesInfoEt['series']:
|
||||||
tag = curInfo.tag.lower()
|
|
||||||
value = curInfo.text
|
|
||||||
|
|
||||||
if tag == 'seriesname' and value is None:
|
|
||||||
return False
|
return False
|
||||||
|
|
||||||
if value is not None:
|
for k, v in seriesInfoEt['series'].items():
|
||||||
if tag == 'id':
|
if v is not None:
|
||||||
value = int(value)
|
if k in ['banner', 'fanart', 'poster']:
|
||||||
|
v = self.config['url_artworkPrefix'] % (v)
|
||||||
if tag in ['banner', 'fanart', 'poster']:
|
|
||||||
value = self.config['url_artworkPrefix'] % (value)
|
|
||||||
else:
|
else:
|
||||||
value = self._cleanData(value)
|
v = self._cleanData(v)
|
||||||
|
|
||||||
|
self._setShowData(sid, k, v)
|
||||||
|
|
||||||
self._setShowData(sid, tag, value)
|
|
||||||
if seriesSearch:
|
if seriesSearch:
|
||||||
return True
|
return True
|
||||||
|
|
||||||
|
@ -837,63 +855,37 @@ class Tvdb:
|
||||||
|
|
||||||
epsEt = self._getetsrc(url, language=language)
|
epsEt = self._getetsrc(url, language=language)
|
||||||
|
|
||||||
for cur_ep in epsEt.findall("Episode"):
|
for cur_ep in epsEt["episode"]:
|
||||||
|
|
||||||
if self.config['dvdorder']:
|
if self.config['dvdorder']:
|
||||||
log().debug('Using DVD ordering.')
|
log().debug('Using DVD ordering.')
|
||||||
use_dvd = cur_ep.find('DVD_season').text != None and cur_ep.find('DVD_episodenumber').text != None
|
use_dvd = cur_ep['dvd_season'] != None and cur_ep['dvd_episodenumber'] != None
|
||||||
else:
|
else:
|
||||||
use_dvd = False
|
use_dvd = False
|
||||||
|
|
||||||
if use_dvd:
|
if use_dvd:
|
||||||
elem_seasnum, elem_epno = cur_ep.find('DVD_season'), cur_ep.find('DVD_episodenumber')
|
seasnum, epno = cur_ep['dvd_season'], cur_ep['dvd_episodenumber']
|
||||||
else:
|
else:
|
||||||
elem_seasnum, elem_epno = cur_ep.find('SeasonNumber'), cur_ep.find('EpisodeNumber')
|
seasnum, epno = cur_ep['seasonnumber'], cur_ep['episodenumber']
|
||||||
|
|
||||||
if elem_seasnum is None or elem_epno is None:
|
|
||||||
|
|
||||||
|
if seasnum is None or epno is None:
|
||||||
log().warning("An episode has incomplete season/episode number (season: %r, episode: %r)" % (
|
log().warning("An episode has incomplete season/episode number (season: %r, episode: %r)" % (
|
||||||
elem_seasnum, elem_epno))
|
seasnum, epno))
|
||||||
log().debug(
|
|
||||||
" ".join(
|
|
||||||
"%r is %r" % (child.tag, child.text) for child in cur_ep.getchildren()))
|
|
||||||
# TODO: Should this happen?
|
|
||||||
continue # Skip to next episode
|
continue # Skip to next episode
|
||||||
|
|
||||||
|
|
||||||
# float() is because https://github.com/dbr/tvnamer/issues/95 - should probably be fixed in TVDB data
|
# float() is because https://github.com/dbr/tvnamer/issues/95 - should probably be fixed in TVDB data
|
||||||
seas_no = int(float(elem_seasnum.text))
|
seas_no = int(float(seasnum))
|
||||||
ep_no = int(float(elem_epno.text))
|
ep_no = int(float(epno))
|
||||||
|
|
||||||
useDVD = False
|
for k, v in cur_ep.items():
|
||||||
|
k = k.lower()
|
||||||
|
|
||||||
if (self.config['dvdorder']):
|
if v is not None:
|
||||||
log().debug('DVD Order? Yes')
|
if k == 'filename':
|
||||||
useDVD = (cur_ep.find('DVD_season').text != None and cur_ep.find('DVD_episodenumber').text != None)
|
v = self.config['url_artworkPrefix'] % (v)
|
||||||
else:
|
else:
|
||||||
log().debug('DVD Order? No')
|
v = self._cleanData(v)
|
||||||
|
|
||||||
if (useDVD):
|
self._setItem(sid, seas_no, ep_no, k, v)
|
||||||
log().debug('Use DVD Order? Yes')
|
|
||||||
seas_no = int(cur_ep.find('DVD_season').text)
|
|
||||||
ep_no = int(float(cur_ep.find('DVD_episodenumber').text))
|
|
||||||
else:
|
|
||||||
log().debug('Use DVD Order? No')
|
|
||||||
seas_no = int(cur_ep.find('SeasonNumber').text)
|
|
||||||
ep_no = int(cur_ep.find('EpisodeNumber').text)
|
|
||||||
|
|
||||||
for cur_item in cur_ep.getchildren():
|
|
||||||
tag = cur_item.tag.lower()
|
|
||||||
value = cur_item.text
|
|
||||||
if value is not None:
|
|
||||||
if tag == 'id':
|
|
||||||
value = int(value)
|
|
||||||
|
|
||||||
if tag == 'filename':
|
|
||||||
value = self.config['url_artworkPrefix'] % (value)
|
|
||||||
else:
|
|
||||||
value = self._cleanData(value)
|
|
||||||
self._setItem(sid, seas_no, ep_no, tag, value)
|
|
||||||
|
|
||||||
return True
|
return True
|
||||||
|
|
||||||
|
@ -910,7 +902,8 @@ class Tvdb:
|
||||||
selected_series = self._getSeries(name)
|
selected_series = self._getSeries(name)
|
||||||
if isinstance(selected_series, dict):
|
if isinstance(selected_series, dict):
|
||||||
selected_series = [selected_series]
|
selected_series = [selected_series]
|
||||||
sids = list(int(x['id']) for x in selected_series if self._getShowData(int(x['id']), self.config['language'], seriesSearch=True))
|
sids = list(int(x['id']) for x in selected_series if
|
||||||
|
self._getShowData(int(x['id']), self.config['language'], seriesSearch=True))
|
||||||
self.corrections.update(dict((x['seriesname'], int(x['id'])) for x in selected_series))
|
self.corrections.update(dict((x['seriesname'], int(x['id'])) for x in selected_series))
|
||||||
return sids
|
return sids
|
||||||
|
|
||||||
|
|
|
@ -24,6 +24,7 @@ import logging
|
||||||
import datetime as dt
|
import datetime as dt
|
||||||
import requests
|
import requests
|
||||||
import cachecontrol
|
import cachecontrol
|
||||||
|
import xmltodict
|
||||||
|
|
||||||
try:
|
try:
|
||||||
import xml.etree.cElementTree as ElementTree
|
import xml.etree.cElementTree as ElementTree
|
||||||
|
@ -37,9 +38,11 @@ from tvrage_ui import BaseUI
|
||||||
from tvrage_exceptions import (tvrage_error, tvrage_userabort, tvrage_shownotfound,
|
from tvrage_exceptions import (tvrage_error, tvrage_userabort, tvrage_shownotfound,
|
||||||
tvrage_seasonnotfound, tvrage_episodenotfound, tvrage_attributenotfound)
|
tvrage_seasonnotfound, tvrage_episodenotfound, tvrage_attributenotfound)
|
||||||
|
|
||||||
|
|
||||||
def log():
|
def log():
|
||||||
return logging.getLogger("tvrage_api")
|
return logging.getLogger("tvrage_api")
|
||||||
|
|
||||||
|
|
||||||
def retry(ExceptionToCheck, tries=4, delay=3, backoff=2, logger=None):
|
def retry(ExceptionToCheck, tries=4, delay=3, backoff=2, logger=None):
|
||||||
"""Retry calling the decorated function using an exponential backoff.
|
"""Retry calling the decorated function using an exponential backoff.
|
||||||
|
|
||||||
|
@ -83,6 +86,7 @@ def retry(ExceptionToCheck, tries=4, delay=3, backoff=2, logger=None):
|
||||||
|
|
||||||
return deco_retry
|
return deco_retry
|
||||||
|
|
||||||
|
|
||||||
class ShowContainer(dict):
|
class ShowContainer(dict):
|
||||||
"""Simple dict that holds a series of Show instances
|
"""Simple dict that holds a series of Show instances
|
||||||
"""
|
"""
|
||||||
|
@ -112,6 +116,7 @@ class ShowContainer(dict):
|
||||||
class Show(dict):
|
class Show(dict):
|
||||||
"""Holds a dict of seasons, and show data.
|
"""Holds a dict of seasons, and show data.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
dict.__init__(self)
|
dict.__init__(self)
|
||||||
self.data = {}
|
self.data = {}
|
||||||
|
@ -261,8 +266,10 @@ class Episode(dict):
|
||||||
if cur_value.find(unicode(term).lower()) > -1:
|
if cur_value.find(unicode(term).lower()) > -1:
|
||||||
return self
|
return self
|
||||||
|
|
||||||
|
|
||||||
class TVRage:
|
class TVRage:
|
||||||
"""Create easy-to-use interface to name of season/episode name"""
|
"""Create easy-to-use interface to name of season/episode name"""
|
||||||
|
|
||||||
def __init__(self,
|
def __init__(self,
|
||||||
interactive=False,
|
interactive=False,
|
||||||
select_first=False,
|
select_first=False,
|
||||||
|
@ -390,9 +397,9 @@ class TVRage:
|
||||||
|
|
||||||
# get response from TVRage
|
# get response from TVRage
|
||||||
if self.config['cache_enabled']:
|
if self.config['cache_enabled']:
|
||||||
resp = self.sess.get(url, cache_auto=True, params=params)
|
resp = self.sess.get(url.strip(), cache_auto=True, params=params)
|
||||||
else:
|
else:
|
||||||
resp = requests.get(url, params=params)
|
resp = requests.get(url.strip(), params=params)
|
||||||
|
|
||||||
except requests.HTTPError, e:
|
except requests.HTTPError, e:
|
||||||
raise tvrage_error("HTTP error " + str(e.errno) + " while loading URL " + str(url))
|
raise tvrage_error("HTTP error " + str(e.errno) + " while loading URL " + str(url))
|
||||||
|
@ -403,12 +410,8 @@ class TVRage:
|
||||||
except requests.Timeout, e:
|
except requests.Timeout, e:
|
||||||
raise tvrage_error("Connection timed out " + str(e.message) + " while loading URL " + str(url))
|
raise tvrage_error("Connection timed out " + str(e.message) + " while loading URL " + str(url))
|
||||||
|
|
||||||
return resp.content if resp.ok else None
|
def remap_keys(path, key, value):
|
||||||
|
name_map = {
|
||||||
def _getetsrc(self, url, params=None):
|
|
||||||
"""Loads a URL using caching, returns an ElementTree of the source
|
|
||||||
"""
|
|
||||||
reDict = {
|
|
||||||
'showid': 'id',
|
'showid': 'id',
|
||||||
'showname': 'seriesname',
|
'showname': 'seriesname',
|
||||||
'name': 'seriesname',
|
'name': 'seriesname',
|
||||||
|
@ -418,58 +421,59 @@ class TVRage:
|
||||||
'airtime': 'airs_time',
|
'airtime': 'airs_time',
|
||||||
'airday': 'airs_dayofweek',
|
'airday': 'airs_dayofweek',
|
||||||
'image': 'fanart',
|
'image': 'fanart',
|
||||||
'epnum': 'id',
|
'epnum': 'absolute_number',
|
||||||
'title': 'episodename',
|
'title': 'episodename',
|
||||||
'airdate': 'firstaired',
|
'airdate': 'firstaired',
|
||||||
'screencap': 'filename',
|
'screencap': 'filename',
|
||||||
'seasonnum': 'episodenumber',
|
'seasonnum': 'episodenumber'
|
||||||
}
|
}
|
||||||
|
|
||||||
robj = re.compile('|'.join(reDict.keys()))
|
|
||||||
src = self._loadUrl(url, params)
|
|
||||||
try:
|
try:
|
||||||
# TVRAGE doesn't sanitize \r (CR) from user input in some fields,
|
key = name_map[key.lower()]
|
||||||
# remove it to avoid errors. Change from SickBeard, from will14m
|
except (ValueError, TypeError, KeyError):
|
||||||
xml = ElementTree.fromstring(src.rstrip("\r"))
|
key = key.lower()
|
||||||
tree = ElementTree.ElementTree(xml)
|
|
||||||
for elm in tree.findall('.//*'):
|
# clean up value and do type changes
|
||||||
elm.tag = robj.sub(lambda m: reDict[m.group(0)], elm.tag)
|
if value:
|
||||||
|
if isinstance(value, dict):
|
||||||
|
if key == 'network':
|
||||||
|
value = value['#text']
|
||||||
|
if key == 'genre':
|
||||||
|
value = value['genre']
|
||||||
|
if not isinstance(value, list):
|
||||||
|
value = [value]
|
||||||
|
value = '|' + '|'.join(value) + '|'
|
||||||
|
|
||||||
if elm.tag in 'firstaired':
|
|
||||||
try:
|
try:
|
||||||
if elm.text in "0000-00-00":
|
if key == 'firstaired' and value in "0000-00-00":
|
||||||
elm.text = str(dt.date.fromordinal(1))
|
new_value = str(dt.date.fromordinal(1))
|
||||||
elm.text = re.sub("([-]0{2}){1,}", "", elm.text)
|
new_value = re.sub("([-]0{2}){1,}", "", new_value)
|
||||||
fixDate = parse(elm.text, fuzzy=True).date()
|
fixDate = parse(new_value, fuzzy=True).date()
|
||||||
elm.text = fixDate.strftime("%Y-%m-%d")
|
value = fixDate.strftime("%Y-%m-%d")
|
||||||
|
elif key == 'firstaired':
|
||||||
|
value = parse(value, fuzzy=True).date()
|
||||||
|
value = value.strftime("%Y-%m-%d")
|
||||||
|
|
||||||
|
#if key == 'airs_time':
|
||||||
|
# value = parse(value).time()
|
||||||
|
# value = value.strftime("%I:%M %p")
|
||||||
except:
|
except:
|
||||||
pass
|
pass
|
||||||
return ElementTree.fromstring(ElementTree.tostring(xml))
|
|
||||||
except SyntaxError:
|
return (key, value)
|
||||||
|
|
||||||
|
if resp.ok:
|
||||||
|
return xmltodict.parse(resp.content.strip(), postprocessor=remap_keys)
|
||||||
|
|
||||||
|
def _getetsrc(self, url, params=None):
|
||||||
|
"""Loads a URL using caching, returns an ElementTree of the source
|
||||||
|
"""
|
||||||
|
|
||||||
|
try:
|
||||||
src = self._loadUrl(url, params)
|
src = self._loadUrl(url, params)
|
||||||
try:
|
src = [src[item] for item in src][0]
|
||||||
xml = ElementTree.fromstring(src.rstrip("\r"))
|
|
||||||
tree = ElementTree.ElementTree(xml)
|
|
||||||
for elm in tree.findall('.//*'):
|
|
||||||
elm.tag = robj.sub(lambda m: reDict[m.group(0)], elm.tag)
|
|
||||||
|
|
||||||
if elm.tag in 'firstaired' and elm.text:
|
|
||||||
if elm.text == "0000-00-00":
|
|
||||||
elm.text = str(dt.date.fromordinal(1))
|
|
||||||
try:
|
|
||||||
#month = strptime(match.group('air_month')[:3],'%b').tm_mon
|
|
||||||
#day = re.sub("(st|nd|rd|th)", "", match.group('air_day'))
|
|
||||||
#dtStr = '%s/%s/%s' % (year, month, day)
|
|
||||||
|
|
||||||
fixDate = parse(elm.text, fuzzy=True)
|
|
||||||
elm.text = fixDate.strftime("%Y-%m-%d")
|
|
||||||
except:
|
except:
|
||||||
pass
|
errormsg = "There was an error with the XML retrieved from tvrage.com"
|
||||||
return ElementTree.fromstring(ElementTree.tostring(xml))
|
|
||||||
except SyntaxError, exceptionmsg:
|
|
||||||
errormsg = "There was an error with the XML retrieved from tvrage.com:\n%s" % (
|
|
||||||
exceptionmsg
|
|
||||||
)
|
|
||||||
|
|
||||||
if self.config['cache_enabled']:
|
if self.config['cache_enabled']:
|
||||||
errormsg += "\nFirst try emptying the cache folder at..\n%s" % (
|
errormsg += "\nFirst try emptying the cache folder at..\n%s" % (
|
||||||
|
@ -479,6 +483,8 @@ class TVRage:
|
||||||
errormsg += "\nIf this does not resolve the issue, please try again later. If the error persists, report a bug on\n"
|
errormsg += "\nIf this does not resolve the issue, please try again later. If the error persists, report a bug on\n"
|
||||||
raise tvrage_error(errormsg)
|
raise tvrage_error(errormsg)
|
||||||
|
|
||||||
|
return src
|
||||||
|
|
||||||
def _setItem(self, sid, seas, ep, attrib, value):
|
def _setItem(self, sid, seas, ep, attrib, value):
|
||||||
"""Creates a new episode, creating Show(), Season() and
|
"""Creates a new episode, creating Show(), Season() and
|
||||||
Episode()s as required. Called by _getShowData to populate show
|
Episode()s as required. Called by _getShowData to populate show
|
||||||
|
@ -516,7 +522,6 @@ class TVRage:
|
||||||
- Replaces & with &
|
- Replaces & with &
|
||||||
- Trailing whitespace
|
- Trailing whitespace
|
||||||
"""
|
"""
|
||||||
if isinstance(data, str):
|
|
||||||
data = data.replace(u"&", u"&")
|
data = data.replace(u"&", u"&")
|
||||||
data = data.strip()
|
data = data.strip()
|
||||||
return data
|
return data
|
||||||
|
@ -529,9 +534,8 @@ class TVRage:
|
||||||
log().debug("Searching for show %s" % series)
|
log().debug("Searching for show %s" % series)
|
||||||
self.config['params_getSeries']['show'] = series
|
self.config['params_getSeries']['show'] = series
|
||||||
seriesEt = self._getetsrc(self.config['url_getSeries'], self.config['params_getSeries'])
|
seriesEt = self._getetsrc(self.config['url_getSeries'], self.config['params_getSeries'])
|
||||||
allSeries = list(dict((s.tag.lower(),s.text) for s in x.getchildren()) for x in seriesEt)
|
|
||||||
|
|
||||||
return allSeries
|
return [seriesEt[item] for item in seriesEt][0]
|
||||||
|
|
||||||
def _getSeries(self, series):
|
def _getSeries(self, series):
|
||||||
"""This searches tvrage.com for the series name,
|
"""This searches tvrage.com for the series name,
|
||||||
|
@ -540,6 +544,8 @@ class TVRage:
|
||||||
BaseUI is used to select the first result.
|
BaseUI is used to select the first result.
|
||||||
"""
|
"""
|
||||||
allSeries = self.search(series)
|
allSeries = self.search(series)
|
||||||
|
if not isinstance(allSeries, list):
|
||||||
|
allSeries = [allSeries]
|
||||||
|
|
||||||
if len(allSeries) == 0:
|
if len(allSeries) == 0:
|
||||||
log().debug('Series result returned zero')
|
log().debug('Series result returned zero')
|
||||||
|
@ -568,60 +574,46 @@ class TVRage:
|
||||||
self.config['params_seriesInfo']
|
self.config['params_seriesInfo']
|
||||||
)
|
)
|
||||||
|
|
||||||
if seriesInfoEt is None: return False
|
# check and make sure we have data to process and that it contains a series name
|
||||||
for curInfo in seriesInfoEt:
|
if seriesInfoEt is None or 'seriesname' not in seriesInfoEt:
|
||||||
tag = curInfo.tag.lower()
|
|
||||||
value = curInfo.text
|
|
||||||
|
|
||||||
if tag == 'seriesname' and value is None:
|
|
||||||
return False
|
return False
|
||||||
|
|
||||||
if tag == 'id':
|
for k, v in seriesInfoEt.items():
|
||||||
value = int(value)
|
if v is not None:
|
||||||
|
v = self._cleanData(v)
|
||||||
|
|
||||||
if value is not None:
|
self._setShowData(sid, k, v)
|
||||||
value = self._cleanData(value)
|
|
||||||
|
|
||||||
self._setShowData(sid, tag, value)
|
# series search ends here
|
||||||
if seriesSearch: return True
|
if seriesSearch:
|
||||||
|
return True
|
||||||
try:
|
|
||||||
# Parse genre data
|
|
||||||
log().debug('Getting genres of %s' % (sid))
|
|
||||||
for genre in seriesInfoEt.find('genres'):
|
|
||||||
tag = genre.tag.lower()
|
|
||||||
|
|
||||||
value = genre.text
|
|
||||||
if value is not None:
|
|
||||||
value = self._cleanData(value)
|
|
||||||
|
|
||||||
self._setShowData(sid, tag, value)
|
|
||||||
except Exception:
|
|
||||||
log().debug('No genres for %s' % (sid))
|
|
||||||
|
|
||||||
# Parse episode data
|
# Parse episode data
|
||||||
log().debug('Getting all episodes of %s' % (sid))
|
log().debug('Getting all episodes of %s' % (sid))
|
||||||
|
|
||||||
self.config['params_epInfo']['sid'] = sid
|
self.config['params_epInfo']['sid'] = sid
|
||||||
epsEt = self._getetsrc(self.config['url_epInfo'], self.config['params_epInfo'])
|
epsEt = self._getetsrc(self.config['url_epInfo'], self.config['params_epInfo'])
|
||||||
for cur_list in epsEt.findall("Episodelist"):
|
|
||||||
for cur_seas in cur_list:
|
for season in epsEt['Episodelist']['Season']:
|
||||||
try:
|
episodes = season['episode']
|
||||||
seas_no = int(cur_seas.attrib['no'])
|
if not isinstance(episodes, list):
|
||||||
for cur_ep in cur_seas:
|
episodes = [episodes]
|
||||||
ep_no = int(cur_ep.find('episodenumber').text)
|
|
||||||
|
for episode in episodes:
|
||||||
|
seas_no = int(season['@no'])
|
||||||
|
ep_no = int(episode['episodenumber'])
|
||||||
self._setItem(sid, seas_no, ep_no, 'seasonnumber', seas_no)
|
self._setItem(sid, seas_no, ep_no, 'seasonnumber', seas_no)
|
||||||
for cur_item in cur_ep:
|
|
||||||
tag = cur_item.tag.lower()
|
|
||||||
|
|
||||||
value = cur_item.text
|
for k,v in episode.items():
|
||||||
if value is not None:
|
try:
|
||||||
if tag == 'id':
|
k = k.lower()
|
||||||
value = int(value)
|
if v is not None:
|
||||||
|
if k == 'link':
|
||||||
|
v = v.rsplit('/', 1)[1]
|
||||||
|
k = 'id'
|
||||||
|
v = self._cleanData(v)
|
||||||
|
|
||||||
value = self._cleanData(value)
|
self._setItem(sid, seas_no, ep_no, k, v)
|
||||||
|
|
||||||
self._setItem(sid, seas_no, ep_no, tag, value)
|
|
||||||
except:
|
except:
|
||||||
continue
|
continue
|
||||||
return True
|
return True
|
||||||
|
@ -673,11 +665,13 @@ def main():
|
||||||
grabs an episode name interactively.
|
grabs an episode name interactively.
|
||||||
"""
|
"""
|
||||||
import logging
|
import logging
|
||||||
|
|
||||||
logging.basicConfig(level=logging.DEBUG)
|
logging.basicConfig(level=logging.DEBUG)
|
||||||
|
|
||||||
tvrage_instance = TVRage(cache=False)
|
tvrage_instance = TVRage(cache=False)
|
||||||
print tvrage_instance['Lost']['seriesname']
|
print tvrage_instance['Lost']['seriesname']
|
||||||
print tvrage_instance['Lost'][1][4]['episodename']
|
print tvrage_instance['Lost'][1][4]['episodename']
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
main()
|
main()
|
||||||
|
|
359
lib/xmltodict.py
Normal file
|
@ -0,0 +1,359 @@
|
||||||
|
#!/usr/bin/env python
|
||||||
|
"Makes working with XML feel like you are working with JSON"
|
||||||
|
|
||||||
|
from xml.parsers import expat
|
||||||
|
from xml.sax.saxutils import XMLGenerator
|
||||||
|
from xml.sax.xmlreader import AttributesImpl
|
||||||
|
try: # pragma no cover
|
||||||
|
from cStringIO import StringIO
|
||||||
|
except ImportError: # pragma no cover
|
||||||
|
try:
|
||||||
|
from StringIO import StringIO
|
||||||
|
except ImportError:
|
||||||
|
from io import StringIO
|
||||||
|
try: # pragma no cover
|
||||||
|
from collections import OrderedDict
|
||||||
|
except ImportError: # pragma no cover
|
||||||
|
try:
|
||||||
|
from ordereddict import OrderedDict
|
||||||
|
except ImportError:
|
||||||
|
OrderedDict = dict
|
||||||
|
|
||||||
|
try: # pragma no cover
|
||||||
|
_basestring = basestring
|
||||||
|
except NameError: # pragma no cover
|
||||||
|
_basestring = str
|
||||||
|
try: # pragma no cover
|
||||||
|
_unicode = unicode
|
||||||
|
except NameError: # pragma no cover
|
||||||
|
_unicode = str
|
||||||
|
|
||||||
|
__author__ = 'Martin Blech'
|
||||||
|
__version__ = '0.9.0'
|
||||||
|
__license__ = 'MIT'
|
||||||
|
|
||||||
|
|
||||||
|
class ParsingInterrupted(Exception):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class _DictSAXHandler(object):
|
||||||
|
def __init__(self,
|
||||||
|
item_depth=0,
|
||||||
|
item_callback=lambda *args: True,
|
||||||
|
xml_attribs=True,
|
||||||
|
attr_prefix='@',
|
||||||
|
cdata_key='#text',
|
||||||
|
force_cdata=False,
|
||||||
|
cdata_separator='',
|
||||||
|
postprocessor=None,
|
||||||
|
dict_constructor=OrderedDict,
|
||||||
|
strip_whitespace=True,
|
||||||
|
namespace_separator=':',
|
||||||
|
namespaces=None):
|
||||||
|
self.path = []
|
||||||
|
self.stack = []
|
||||||
|
self.data = None
|
||||||
|
self.item = None
|
||||||
|
self.item_depth = item_depth
|
||||||
|
self.xml_attribs = xml_attribs
|
||||||
|
self.item_callback = item_callback
|
||||||
|
self.attr_prefix = attr_prefix
|
||||||
|
self.cdata_key = cdata_key
|
||||||
|
self.force_cdata = force_cdata
|
||||||
|
self.cdata_separator = cdata_separator
|
||||||
|
self.postprocessor = postprocessor
|
||||||
|
self.dict_constructor = dict_constructor
|
||||||
|
self.strip_whitespace = strip_whitespace
|
||||||
|
self.namespace_separator = namespace_separator
|
||||||
|
self.namespaces = namespaces
|
||||||
|
|
||||||
|
def _build_name(self, full_name):
|
||||||
|
if not self.namespaces:
|
||||||
|
return full_name
|
||||||
|
i = full_name.rfind(self.namespace_separator)
|
||||||
|
if i == -1:
|
||||||
|
return full_name
|
||||||
|
namespace, name = full_name[:i], full_name[i+1:]
|
||||||
|
short_namespace = self.namespaces.get(namespace, namespace)
|
||||||
|
if not short_namespace:
|
||||||
|
return name
|
||||||
|
else:
|
||||||
|
return self.namespace_separator.join((short_namespace, name))
|
||||||
|
|
||||||
|
def _attrs_to_dict(self, attrs):
|
||||||
|
if isinstance(attrs, dict):
|
||||||
|
return attrs
|
||||||
|
return self.dict_constructor(zip(attrs[0::2], attrs[1::2]))
|
||||||
|
|
||||||
|
def startElement(self, full_name, attrs):
|
||||||
|
name = self._build_name(full_name)
|
||||||
|
attrs = self._attrs_to_dict(attrs)
|
||||||
|
self.path.append((name, attrs or None))
|
||||||
|
if len(self.path) > self.item_depth:
|
||||||
|
self.stack.append((self.item, self.data))
|
||||||
|
if self.xml_attribs:
|
||||||
|
attrs = self.dict_constructor(
|
||||||
|
(self.attr_prefix+key, value)
|
||||||
|
for (key, value) in attrs.items())
|
||||||
|
else:
|
||||||
|
attrs = None
|
||||||
|
self.item = attrs or None
|
||||||
|
self.data = None
|
||||||
|
|
||||||
|
def endElement(self, full_name):
|
||||||
|
name = self._build_name(full_name)
|
||||||
|
if len(self.path) == self.item_depth:
|
||||||
|
item = self.item
|
||||||
|
if item is None:
|
||||||
|
item = self.data
|
||||||
|
should_continue = self.item_callback(self.path, item)
|
||||||
|
if not should_continue:
|
||||||
|
raise ParsingInterrupted()
|
||||||
|
if len(self.stack):
|
||||||
|
item, data = self.item, self.data
|
||||||
|
self.item, self.data = self.stack.pop()
|
||||||
|
if self.strip_whitespace and data is not None:
|
||||||
|
data = data.strip() or None
|
||||||
|
if data and self.force_cdata and item is None:
|
||||||
|
item = self.dict_constructor()
|
||||||
|
if item is not None:
|
||||||
|
if data:
|
||||||
|
self.push_data(item, self.cdata_key, data)
|
||||||
|
self.item = self.push_data(self.item, name, item)
|
||||||
|
else:
|
||||||
|
self.item = self.push_data(self.item, name, data)
|
||||||
|
else:
|
||||||
|
self.item = self.data = None
|
||||||
|
self.path.pop()
|
||||||
|
|
||||||
|
def characters(self, data):
|
||||||
|
if not self.data:
|
||||||
|
self.data = data
|
||||||
|
else:
|
||||||
|
self.data += self.cdata_separator + data
|
||||||
|
|
||||||
|
def push_data(self, item, key, data):
|
||||||
|
if self.postprocessor is not None:
|
||||||
|
result = self.postprocessor(self.path, key, data)
|
||||||
|
if result is None:
|
||||||
|
return item
|
||||||
|
key, data = result
|
||||||
|
if item is None:
|
||||||
|
item = self.dict_constructor()
|
||||||
|
try:
|
||||||
|
value = item[key]
|
||||||
|
if isinstance(value, list):
|
||||||
|
value.append(data)
|
||||||
|
else:
|
||||||
|
item[key] = [value, data]
|
||||||
|
except KeyError:
|
||||||
|
item[key] = data
|
||||||
|
return item
|
||||||
|
|
||||||
|
|
||||||
|
def parse(xml_input, encoding=None, expat=expat, process_namespaces=False,
|
||||||
|
namespace_separator=':', **kwargs):
|
||||||
|
"""Parse the given XML input and convert it into a dictionary.
|
||||||
|
|
||||||
|
`xml_input` can either be a `string` or a file-like object.
|
||||||
|
|
||||||
|
If `xml_attribs` is `True`, element attributes are put in the dictionary
|
||||||
|
among regular child elements, using `@` as a prefix to avoid collisions. If
|
||||||
|
set to `False`, they are just ignored.
|
||||||
|
|
||||||
|
Simple example::
|
||||||
|
|
||||||
|
>>> import xmltodict
|
||||||
|
>>> doc = xmltodict.parse(\"\"\"
|
||||||
|
... <a prop="x">
|
||||||
|
... <b>1</b>
|
||||||
|
... <b>2</b>
|
||||||
|
... </a>
|
||||||
|
... \"\"\")
|
||||||
|
>>> doc['a']['@prop']
|
||||||
|
u'x'
|
||||||
|
>>> doc['a']['b']
|
||||||
|
[u'1', u'2']
|
||||||
|
|
||||||
|
If `item_depth` is `0`, the function returns a dictionary for the root
|
||||||
|
element (default behavior). Otherwise, it calls `item_callback` every time
|
||||||
|
an item at the specified depth is found and returns `None` in the end
|
||||||
|
(streaming mode).
|
||||||
|
|
||||||
|
The callback function receives two parameters: the `path` from the document
|
||||||
|
root to the item (name-attribs pairs), and the `item` (dict). If the
|
||||||
|
callback's return value is false-ish, parsing will be stopped with the
|
||||||
|
:class:`ParsingInterrupted` exception.
|
||||||
|
|
||||||
|
Streaming example::
|
||||||
|
|
||||||
|
>>> def handle(path, item):
|
||||||
|
... print 'path:%s item:%s' % (path, item)
|
||||||
|
... return True
|
||||||
|
...
|
||||||
|
>>> xmltodict.parse(\"\"\"
|
||||||
|
... <a prop="x">
|
||||||
|
... <b>1</b>
|
||||||
|
... <b>2</b>
|
||||||
|
... </a>\"\"\", item_depth=2, item_callback=handle)
|
||||||
|
path:[(u'a', {u'prop': u'x'}), (u'b', None)] item:1
|
||||||
|
path:[(u'a', {u'prop': u'x'}), (u'b', None)] item:2
|
||||||
|
|
||||||
|
The optional argument `postprocessor` is a function that takes `path`,
|
||||||
|
`key` and `value` as positional arguments and returns a new `(key, value)`
|
||||||
|
pair where both `key` and `value` may have changed. Usage example::
|
||||||
|
|
||||||
|
>>> def postprocessor(path, key, value):
|
||||||
|
... try:
|
||||||
|
... return key + ':int', int(value)
|
||||||
|
... except (ValueError, TypeError):
|
||||||
|
... return key, value
|
||||||
|
>>> xmltodict.parse('<a><b>1</b><b>2</b><b>x</b></a>',
|
||||||
|
... postprocessor=postprocessor)
|
||||||
|
OrderedDict([(u'a', OrderedDict([(u'b:int', [1, 2]), (u'b', u'x')]))])
|
||||||
|
|
||||||
|
You can pass an alternate version of `expat` (such as `defusedexpat`) by
|
||||||
|
using the `expat` parameter. E.g:
|
||||||
|
|
||||||
|
>>> import defusedexpat
|
||||||
|
>>> xmltodict.parse('<a>hello</a>', expat=defusedexpat.pyexpat)
|
||||||
|
OrderedDict([(u'a', u'hello')])
|
||||||
|
|
||||||
|
"""
|
||||||
|
handler = _DictSAXHandler(namespace_separator=namespace_separator,
|
||||||
|
**kwargs)
|
||||||
|
if isinstance(xml_input, _unicode):
|
||||||
|
if not encoding:
|
||||||
|
encoding = 'utf-8'
|
||||||
|
xml_input = xml_input.encode(encoding)
|
||||||
|
if not process_namespaces:
|
||||||
|
namespace_separator = None
|
||||||
|
parser = expat.ParserCreate(
|
||||||
|
encoding,
|
||||||
|
namespace_separator
|
||||||
|
)
|
||||||
|
try:
|
||||||
|
parser.ordered_attributes = True
|
||||||
|
except AttributeError:
|
||||||
|
# Jython's expat does not support ordered_attributes
|
||||||
|
pass
|
||||||
|
parser.StartElementHandler = handler.startElement
|
||||||
|
parser.EndElementHandler = handler.endElement
|
||||||
|
parser.CharacterDataHandler = handler.characters
|
||||||
|
parser.buffer_text = True
|
||||||
|
try:
|
||||||
|
parser.ParseFile(xml_input)
|
||||||
|
except (TypeError, AttributeError):
|
||||||
|
parser.Parse(xml_input, True)
|
||||||
|
return handler.item
|
||||||
|
|
||||||
|
|
||||||
|
def _emit(key, value, content_handler,
|
||||||
|
attr_prefix='@',
|
||||||
|
cdata_key='#text',
|
||||||
|
depth=0,
|
||||||
|
preprocessor=None,
|
||||||
|
pretty=False,
|
||||||
|
newl='\n',
|
||||||
|
indent='\t'):
|
||||||
|
if preprocessor is not None:
|
||||||
|
result = preprocessor(key, value)
|
||||||
|
if result is None:
|
||||||
|
return
|
||||||
|
key, value = result
|
||||||
|
if not isinstance(value, (list, tuple)):
|
||||||
|
value = [value]
|
||||||
|
if depth == 0 and len(value) > 1:
|
||||||
|
raise ValueError('document with multiple roots')
|
||||||
|
for v in value:
|
||||||
|
if v is None:
|
||||||
|
v = OrderedDict()
|
||||||
|
elif not isinstance(v, dict):
|
||||||
|
v = _unicode(v)
|
||||||
|
if isinstance(v, _basestring):
|
||||||
|
v = OrderedDict(((cdata_key, v),))
|
||||||
|
cdata = None
|
||||||
|
attrs = OrderedDict()
|
||||||
|
children = []
|
||||||
|
for ik, iv in v.items():
|
||||||
|
if ik == cdata_key:
|
||||||
|
cdata = iv
|
||||||
|
continue
|
||||||
|
if ik.startswith(attr_prefix):
|
||||||
|
attrs[ik[len(attr_prefix):]] = iv
|
||||||
|
continue
|
||||||
|
children.append((ik, iv))
|
||||||
|
if pretty:
|
||||||
|
content_handler.ignorableWhitespace(depth * indent)
|
||||||
|
content_handler.startElement(key, AttributesImpl(attrs))
|
||||||
|
if pretty and children:
|
||||||
|
content_handler.ignorableWhitespace(newl)
|
||||||
|
for child_key, child_value in children:
|
||||||
|
_emit(child_key, child_value, content_handler,
|
||||||
|
attr_prefix, cdata_key, depth+1, preprocessor,
|
||||||
|
pretty, newl, indent)
|
||||||
|
if cdata is not None:
|
||||||
|
content_handler.characters(cdata)
|
||||||
|
if pretty and children:
|
||||||
|
content_handler.ignorableWhitespace(depth * indent)
|
||||||
|
content_handler.endElement(key)
|
||||||
|
if pretty and depth:
|
||||||
|
content_handler.ignorableWhitespace(newl)
|
||||||
|
|
||||||
|
|
||||||
|
def unparse(input_dict, output=None, encoding='utf-8', full_document=True,
|
||||||
|
**kwargs):
|
||||||
|
"""Emit an XML document for the given `input_dict` (reverse of `parse`).
|
||||||
|
|
||||||
|
The resulting XML document is returned as a string, but if `output` (a
|
||||||
|
file-like object) is specified, it is written there instead.
|
||||||
|
|
||||||
|
Dictionary keys prefixed with `attr_prefix` (default=`'@'`) are interpreted
|
||||||
|
as XML node attributes, whereas keys equal to `cdata_key`
|
||||||
|
(default=`'#text'`) are treated as character data.
|
||||||
|
|
||||||
|
The `pretty` parameter (default=`False`) enables pretty-printing. In this
|
||||||
|
mode, lines are terminated with `'\n'` and indented with `'\t'`, but this
|
||||||
|
can be customized with the `newl` and `indent` parameters.
|
||||||
|
|
||||||
|
"""
|
||||||
|
((key, value),) = input_dict.items()
|
||||||
|
must_return = False
|
||||||
|
if output is None:
|
||||||
|
output = StringIO()
|
||||||
|
must_return = True
|
||||||
|
content_handler = XMLGenerator(output, encoding)
|
||||||
|
if full_document:
|
||||||
|
content_handler.startDocument()
|
||||||
|
_emit(key, value, content_handler, **kwargs)
|
||||||
|
if full_document:
|
||||||
|
content_handler.endDocument()
|
||||||
|
if must_return:
|
||||||
|
value = output.getvalue()
|
||||||
|
try: # pragma no cover
|
||||||
|
value = value.decode(encoding)
|
||||||
|
except AttributeError: # pragma no cover
|
||||||
|
pass
|
||||||
|
return value
|
||||||
|
|
||||||
|
if __name__ == '__main__': # pragma: no cover
|
||||||
|
import sys
|
||||||
|
import marshal
|
||||||
|
|
||||||
|
(item_depth,) = sys.argv[1:]
|
||||||
|
item_depth = int(item_depth)
|
||||||
|
|
||||||
|
def handle_item(path, item):
|
||||||
|
marshal.dump((path, item), sys.stdout)
|
||||||
|
return True
|
||||||
|
|
||||||
|
try:
|
||||||
|
root = parse(sys.stdin,
|
||||||
|
item_depth=item_depth,
|
||||||
|
item_callback=handle_item,
|
||||||
|
dict_constructor=dict)
|
||||||
|
if item_depth == 0:
|
||||||
|
handle_item([], root)
|
||||||
|
except KeyboardInterrupt:
|
||||||
|
pass
|
|
@ -1,34 +0,0 @@
|
||||||
SickRage - Failed Downloads
|
|
||||||
=====
|
|
||||||
|
|
||||||
*SickRage Failed Downloads is a fork from SickRage - ThePirateBay.*
|
|
||||||
|
|
||||||
Features added:
|
|
||||||
|
|
||||||
* Failed Download Handling (Tolstyak)
|
|
||||||
* User Interface Toggle for Failed Download Handling
|
|
||||||
* Treat directories where no video files were found as if they were failed downloads
|
|
||||||
|
|
||||||
SickRage - Failed Downloads is almost a direct copy of the following project:
|
|
||||||
|
|
||||||
* [SickRage - Tolstyak's branch][tolstyak]
|
|
||||||
|
|
||||||
These features can be enabled in the Post Processing Configuration menu. It must be configured as specified below.
|
|
||||||
|
|
||||||
## The original readme from Tolstyak is:
|
|
||||||
|
|
||||||
The primary new feature of my branch is the handling of failed downloads.
|
|
||||||
If you're using SABnzbd, set the following options under Switches:
|
|
||||||
|
|
||||||
* Disable Abort jobs that cannot be completed
|
|
||||||
* Disable Post-Process Only Verified Jobs
|
|
||||||
|
|
||||||
And under Special:
|
|
||||||
|
|
||||||
* Enable empty_postproc
|
|
||||||
|
|
||||||
## Bugs
|
|
||||||
|
|
||||||
Report bugs in github
|
|
||||||
|
|
||||||
[tolstyak]: https://github.com/tolstyak/sick-beard
|
|
29
readme.md
|
@ -1,39 +1,32 @@
|
||||||
SickRage
|
SickRage
|
||||||
=====
|
=====
|
||||||
|
|
||||||
|
SickRage is a Video File Manager for TV Shows, It watches for new episodes of your favorite shows and when they are posted it does its magic.
|
||||||
|
|
||||||
*SickRage is currently in beta release stage. There may be severe bugs in it and at any given time it may not work at all.*
|
*SickRage is currently in beta release stage. There may be severe bugs in it and at any given time it may not work at all.*
|
||||||
|
|
||||||
There are currently a lot of changes that we're working on, which affect the very core of how SickRage works. We're doing this to lay the groundwork
|
There are currently a lot of changes that we're working on, which affect the very core of how SickRage works. We're doing this to lay the groundwork
|
||||||
for making SickRage seriously more awesome, scalable and resource-friendly than it already is. We know it will be a bumpy ride, but we also know
|
for making SickRage seriously more awesome, scalable and resource-friendly than it already is.
|
||||||
that SickRage deserves to grow from a USENET grabber into a media independant grabber of anything TV show related. And beyond.
|
|
||||||
|
|
||||||
While we're doing this, please expect SickRage do strange things, or maybe even not work at all. In any case, we need your help. If you see SickRage behaving weird, check if someone has reported it, and if not, open a new issue. There is little to no use to report "software should be stable". We will focus on that later, not now.
|
While we're doing this, please expect SickRage do strange things, or maybe even not work at all. In any case, we need your help. If you see SickRage behaving weird, check if someone has reported it, and if not, open a new issue. There is little to no use to report "software should be stable". We will focus on that later, not now.
|
||||||
|
|
||||||
SickRage is a PVR for torrent and newsgroup users. It watches for new episodes of your favorite shows and when they are posted it downloads them, sorts and renames them, and optionally generates metadata for them. It retrieves show information from theTVDB.com and TVRage.com.
|
|
||||||
|
|
||||||
!!! Please before using this with your existing database (sickbeard.db) please make a backup copy of it and delete any other database files such as cache.db and failed.db if present, we HIGHLY recommend starting out with no database files at all to make this a fresh start but the choice is at your own risk !!!
|
!!! Please before using this with your existing database (sickbeard.db) please make a backup copy of it and delete any other database files such as cache.db and failed.db if present, we HIGHLY recommend starting out with no database files at all to make this a fresh start but the choice is at your own risk !!!
|
||||||
|
|
||||||
FEATURES:
|
FEATURES:
|
||||||
- automatically retrieves new episode torrent or nzb files
|
|
||||||
- can scan your existing library and then download any old seasons or episodes you're missing
|
|
||||||
- can watch for better versions and upgrade your existing episodes (to from TV DVD/BluRay for example)
|
|
||||||
- XBMC library updates, poster/fanart downloads, and NFO/TBN generation
|
- XBMC library updates, poster/fanart downloads, and NFO/TBN generation
|
||||||
- configurable episode renaming
|
- configurable episode renaming
|
||||||
- sends NZBs directly to SABnzbd, prioritizes and categorizes them properly
|
|
||||||
- available for any platform, uses simple HTTP interface
|
- available for any platform, uses simple HTTP interface
|
||||||
- can notify XBMC, Growl, or Twitter when new episodes are downloaded
|
- can notify XBMC, Growl, or Twitter when new episodes are available
|
||||||
- specials and double episode support
|
- specials and double episode support
|
||||||
- Automatic XEM Scene Numbering/Naming for seasons/episodes
|
- Automatic XEM Scene Numbering/Naming for seasons/episodes
|
||||||
- Failed handling now attempts to snatch a different release and excludes failed releases from future snatch attempts.
|
- Episode Status Manager now allows for mass failing seasons/episodes to force retrying.
|
||||||
- Episode Status Manager now allows for mass failing seasons/episodes to force retrying to download new releases.
|
|
||||||
- DVD Order numbering for returning the results in DVD order instead of Air-By-Date order.
|
- DVD Order numbering for returning the results in DVD order instead of Air-By-Date order.
|
||||||
- Improved Failed handling code for both NZB and Torrent downloads.
|
- Improved Failed handling code for shows.
|
||||||
- DupeKey/DupeScore for NZBGet 12+
|
- DupeKey/DupeScore for NZBGet 12+
|
||||||
- Searches both TheTVDB.com and TVRage.com for shows, seasons, episodes
|
- Searches both TheTVDB.com, TVRage.com and AniDB.net for shows, seasons, episodes
|
||||||
- Importing of existing video files now allows you to choose which indexer you wish to have SickBeard download its show info from.
|
- Importing of existing video files now allows you to choose which indexer you wish to have SickBeard search its show info from.
|
||||||
- Your tvshow.nfo files are now tagged with a indexer key so that SickBeard can easily tell if the shows info comes from TheTVDB or TVRage.
|
- Your tvshow.nfo files are now tagged with a indexer key so that SickBeard can easily tell if the shows info comes from TheTVDB or TVRage.
|
||||||
- Failed download handling has been improved now for both NZB and Torrents.
|
- Sports shows are now able to be searched for..
|
||||||
- Sports shows are now able to be searched for and downloaded by both NZB and Torrent providers.
|
|
||||||
|
|
||||||
## Dependencies
|
## Dependencies
|
||||||
|
|
||||||
|
@ -41,8 +34,8 @@ To run SickRage from source you will need Python 2.6+ and Cheetah 2.1.0+.
|
||||||
|
|
||||||
## Forums
|
## Forums
|
||||||
|
|
||||||
Any questions or setup info your looking for can be found at out forums http://www.sickrage.tv
|
Any questions or setup info your looking for can be found at out forums https://www.sickrage.tv
|
||||||
<br>
|
<br>
|
||||||
If you find a bug please report at our forums http://sickrage.tv/forums/forum/help-support/bug-issue-reports
|
If you find a bug please report at our forums https://sickrage.tv/forums/forum/help-support/bug-issue-reports
|
||||||
<br>
|
<br>
|
||||||
Be sure to provide a sickrage log in debug mode where is the error evidence or it'll never get fixed.
|
Be sure to provide a sickrage log in debug mode where is the error evidence or it'll never get fixed.
|
||||||
|
|
|
@ -20,7 +20,7 @@ from __future__ import with_statement
|
||||||
|
|
||||||
import cherrypy
|
import cherrypy
|
||||||
import webbrowser
|
import webbrowser
|
||||||
import sqlite3
|
import time
|
||||||
import datetime
|
import datetime
|
||||||
import socket
|
import socket
|
||||||
import os, sys, subprocess, re
|
import os, sys, subprocess, re
|
||||||
|
@ -32,7 +32,7 @@ from threading import Lock
|
||||||
from sickbeard import providers, metadata, config
|
from sickbeard import providers, metadata, config
|
||||||
from sickbeard.providers.generic import GenericProvider
|
from sickbeard.providers.generic import GenericProvider
|
||||||
from providers import ezrss, tvtorrents, btn, newznab, womble, thepiratebay, torrentleech, kat, publichd, iptorrents, \
|
from providers import ezrss, tvtorrents, btn, newznab, womble, thepiratebay, torrentleech, kat, publichd, iptorrents, \
|
||||||
omgwtfnzbs, scc, hdtorrents, torrentday, hdbits, nextgen, speedcd
|
omgwtfnzbs, scc, hdtorrents, torrentday, hdbits, nextgen, speedcd, nyaatorrents, fanzub
|
||||||
from sickbeard.config import CheckSection, check_setting_int, check_setting_str, check_setting_float, ConfigMigrator, \
|
from sickbeard.config import CheckSection, check_setting_int, check_setting_str, check_setting_float, ConfigMigrator, \
|
||||||
naming_ep_type
|
naming_ep_type
|
||||||
from sickbeard import searchBacklog, showUpdater, versionChecker, properFinder, autoPostProcesser, \
|
from sickbeard import searchBacklog, showUpdater, versionChecker, properFinder, autoPostProcesser, \
|
||||||
|
@ -41,6 +41,7 @@ from sickbeard import helpers, db, exceptions, show_queue, search_queue, schedul
|
||||||
from sickbeard import logger
|
from sickbeard import logger
|
||||||
from sickbeard import naming
|
from sickbeard import naming
|
||||||
from sickbeard import dailysearcher
|
from sickbeard import dailysearcher
|
||||||
|
from sickbeard import maintenance
|
||||||
from sickbeard import scene_numbering, scene_exceptions, name_cache
|
from sickbeard import scene_numbering, scene_exceptions, name_cache
|
||||||
from indexers.indexer_api import indexerApi
|
from indexers.indexer_api import indexerApi
|
||||||
from indexers.indexer_exceptions import indexer_shownotfound, indexer_exception, indexer_error, indexer_episodenotfound, \
|
from indexers.indexer_exceptions import indexer_shownotfound, indexer_exception, indexer_error, indexer_episodenotfound, \
|
||||||
|
@ -77,6 +78,7 @@ PIDFILE = ''
|
||||||
|
|
||||||
DAEMON = None
|
DAEMON = None
|
||||||
|
|
||||||
|
maintenanceScheduler = None
|
||||||
dailySearchScheduler = None
|
dailySearchScheduler = None
|
||||||
backlogSearchScheduler = None
|
backlogSearchScheduler = None
|
||||||
showUpdateScheduler = None
|
showUpdateScheduler = None
|
||||||
|
@ -159,6 +161,8 @@ FLATTEN_FOLDERS_DEFAULT = None
|
||||||
SUBTITLES_DEFAULT = None
|
SUBTITLES_DEFAULT = None
|
||||||
INDEXER_DEFAULT = None
|
INDEXER_DEFAULT = None
|
||||||
INDEXER_TIMEOUT = None
|
INDEXER_TIMEOUT = None
|
||||||
|
SCENE_DEFAULT = None
|
||||||
|
ANIME_DEFAULT = None
|
||||||
PROVIDER_ORDER = []
|
PROVIDER_ORDER = []
|
||||||
|
|
||||||
NAMING_MULTI_EP = None
|
NAMING_MULTI_EP = None
|
||||||
|
@ -169,6 +173,7 @@ NAMING_SPORTS_PATTERN = None
|
||||||
NAMING_CUSTOM_SPORTS = None
|
NAMING_CUSTOM_SPORTS = None
|
||||||
NAMING_FORCE_FOLDERS = False
|
NAMING_FORCE_FOLDERS = False
|
||||||
NAMING_STRIP_YEAR = None
|
NAMING_STRIP_YEAR = None
|
||||||
|
NAMING_ANIME = None
|
||||||
|
|
||||||
USE_NZBS = None
|
USE_NZBS = None
|
||||||
USE_TORRENTS = None
|
USE_TORRENTS = None
|
||||||
|
@ -320,6 +325,14 @@ NMJ_HOST = None
|
||||||
NMJ_DATABASE = None
|
NMJ_DATABASE = None
|
||||||
NMJ_MOUNT = None
|
NMJ_MOUNT = None
|
||||||
|
|
||||||
|
ANIMESUPPORT = False
|
||||||
|
USE_ANIDB = False
|
||||||
|
ANIDB_USERNAME = None
|
||||||
|
ANIDB_PASSWORD = None
|
||||||
|
ANIDB_USE_MYLIST = 0
|
||||||
|
ADBA_CONNECTION = None
|
||||||
|
ANIME_SPLIT_HOME = False
|
||||||
|
|
||||||
USE_SYNOINDEX = False
|
USE_SYNOINDEX = False
|
||||||
|
|
||||||
USE_NMJv2 = False
|
USE_NMJv2 = False
|
||||||
|
@ -390,6 +403,8 @@ COMING_EPS_LAYOUT = None
|
||||||
COMING_EPS_DISPLAY_PAUSED = None
|
COMING_EPS_DISPLAY_PAUSED = None
|
||||||
COMING_EPS_SORT = None
|
COMING_EPS_SORT = None
|
||||||
COMING_EPS_MISSED_RANGE = None
|
COMING_EPS_MISSED_RANGE = None
|
||||||
|
FUZZY_DATING = False
|
||||||
|
TRIM_ZERO = False
|
||||||
DATE_PRESET = None
|
DATE_PRESET = None
|
||||||
TIME_PRESET = None
|
TIME_PRESET = None
|
||||||
TIME_PRESET_W_SECONDS = None
|
TIME_PRESET_W_SECONDS = None
|
||||||
|
@ -456,11 +471,13 @@ def initialize(consoleLogging=True):
|
||||||
USE_EMAIL, EMAIL_HOST, EMAIL_PORT, EMAIL_TLS, EMAIL_USER, EMAIL_PASSWORD, EMAIL_FROM, EMAIL_NOTIFY_ONSNATCH, EMAIL_NOTIFY_ONDOWNLOAD, EMAIL_NOTIFY_ONSUBTITLEDOWNLOAD, EMAIL_LIST, \
|
USE_EMAIL, EMAIL_HOST, EMAIL_PORT, EMAIL_TLS, EMAIL_USER, EMAIL_PASSWORD, EMAIL_FROM, EMAIL_NOTIFY_ONSNATCH, EMAIL_NOTIFY_ONDOWNLOAD, EMAIL_NOTIFY_ONSUBTITLEDOWNLOAD, EMAIL_LIST, \
|
||||||
USE_LISTVIEW, METADATA_XBMC, METADATA_XBMC_12PLUS, METADATA_MEDIABROWSER, METADATA_PS3, metadata_provider_dict, \
|
USE_LISTVIEW, METADATA_XBMC, METADATA_XBMC_12PLUS, METADATA_MEDIABROWSER, METADATA_PS3, metadata_provider_dict, \
|
||||||
NEWZBIN, NEWZBIN_USERNAME, NEWZBIN_PASSWORD, GIT_PATH, MOVE_ASSOCIATED_FILES, CLEAR_CACHE, dailySearchScheduler, \
|
NEWZBIN, NEWZBIN_USERNAME, NEWZBIN_PASSWORD, GIT_PATH, MOVE_ASSOCIATED_FILES, CLEAR_CACHE, dailySearchScheduler, \
|
||||||
GUI_NAME, HOME_LAYOUT, HISTORY_LAYOUT, DISPLAY_SHOW_SPECIALS, COMING_EPS_LAYOUT, COMING_EPS_SORT, COMING_EPS_DISPLAY_PAUSED, COMING_EPS_MISSED_RANGE, DATE_PRESET, TIME_PRESET, TIME_PRESET_W_SECONDS, \
|
GUI_NAME, HOME_LAYOUT, HISTORY_LAYOUT, DISPLAY_SHOW_SPECIALS, COMING_EPS_LAYOUT, COMING_EPS_SORT, COMING_EPS_DISPLAY_PAUSED, COMING_EPS_MISSED_RANGE, FUZZY_DATING, TRIM_ZERO, DATE_PRESET, TIME_PRESET, TIME_PRESET_W_SECONDS, \
|
||||||
METADATA_WDTV, METADATA_TIVO, METADATA_MEDE8ER, IGNORE_WORDS, CALENDAR_UNPROTECTED, CREATE_MISSING_SHOW_DIRS, \
|
METADATA_WDTV, METADATA_TIVO, METADATA_MEDE8ER, IGNORE_WORDS, CALENDAR_UNPROTECTED, CREATE_MISSING_SHOW_DIRS, \
|
||||||
ADD_SHOWS_WO_DIR, USE_SUBTITLES, SUBTITLES_LANGUAGES, SUBTITLES_DIR, SUBTITLES_SERVICES_LIST, SUBTITLES_SERVICES_ENABLED, SUBTITLES_HISTORY, SUBTITLES_FINDER_FREQUENCY, subtitlesFinderScheduler, \
|
ADD_SHOWS_WO_DIR, USE_SUBTITLES, SUBTITLES_LANGUAGES, SUBTITLES_DIR, SUBTITLES_SERVICES_LIST, SUBTITLES_SERVICES_ENABLED, SUBTITLES_HISTORY, SUBTITLES_FINDER_FREQUENCY, subtitlesFinderScheduler, \
|
||||||
USE_FAILED_DOWNLOADS, DELETE_FAILED, ANON_REDIRECT, LOCALHOST_IP, TMDB_API_KEY, DEBUG, PROXY_SETTING, \
|
USE_FAILED_DOWNLOADS, DELETE_FAILED, ANON_REDIRECT, LOCALHOST_IP, TMDB_API_KEY, DEBUG, PROXY_SETTING, \
|
||||||
AUTOPOSTPROCESSER_FREQUENCY, DEFAULT_AUTOPOSTPROCESSER_FREQUENCY, MIN_AUTOPOSTPROCESSER_FREQUENCY
|
AUTOPOSTPROCESSER_FREQUENCY, DEFAULT_AUTOPOSTPROCESSER_FREQUENCY, MIN_AUTOPOSTPROCESSER_FREQUENCY, \
|
||||||
|
ANIME_DEFAULT, NAMING_ANIME, ANIMESUPPORT, USE_ANIDB, ANIDB_USERNAME, ANIDB_PASSWORD, ANIDB_USE_MYLIST, \
|
||||||
|
ANIME_SPLIT_HOME, maintenanceScheduler, SCENE_DEFAULT
|
||||||
|
|
||||||
if __INITIALIZED__:
|
if __INITIALIZED__:
|
||||||
return False
|
return False
|
||||||
|
@ -572,6 +589,8 @@ def initialize(consoleLogging=True):
|
||||||
FLATTEN_FOLDERS_DEFAULT = bool(check_setting_int(CFG, 'General', 'flatten_folders_default', 0))
|
FLATTEN_FOLDERS_DEFAULT = bool(check_setting_int(CFG, 'General', 'flatten_folders_default', 0))
|
||||||
INDEXER_DEFAULT = check_setting_int(CFG, 'General', 'indexer_default', 0)
|
INDEXER_DEFAULT = check_setting_int(CFG, 'General', 'indexer_default', 0)
|
||||||
INDEXER_TIMEOUT = check_setting_int(CFG, 'General', 'indexer_timeout', 10)
|
INDEXER_TIMEOUT = check_setting_int(CFG, 'General', 'indexer_timeout', 10)
|
||||||
|
ANIME_DEFAULT = bool(check_setting_int(CFG, 'General', 'anime_default', 0))
|
||||||
|
SCENE_DEFAULT = bool(check_setting_int(CFG, 'General', 'scene_default', 0))
|
||||||
|
|
||||||
PROVIDER_ORDER = check_setting_str(CFG, 'General', 'provider_order', '').split()
|
PROVIDER_ORDER = check_setting_str(CFG, 'General', 'provider_order', '').split()
|
||||||
|
|
||||||
|
@ -584,6 +603,7 @@ def initialize(consoleLogging=True):
|
||||||
NAMING_MULTI_EP = check_setting_int(CFG, 'General', 'naming_multi_ep', 1)
|
NAMING_MULTI_EP = check_setting_int(CFG, 'General', 'naming_multi_ep', 1)
|
||||||
NAMING_FORCE_FOLDERS = naming.check_force_season_folders()
|
NAMING_FORCE_FOLDERS = naming.check_force_season_folders()
|
||||||
NAMING_STRIP_YEAR = bool(check_setting_int(CFG, 'General', 'naming_strip_year', 0))
|
NAMING_STRIP_YEAR = bool(check_setting_int(CFG, 'General', 'naming_strip_year', 0))
|
||||||
|
NAMING_ANIME = check_setting_int(CFG, 'General', 'naming_anime', 3)
|
||||||
|
|
||||||
USE_NZBS = bool(check_setting_int(CFG, 'General', 'use_nzbs', 0))
|
USE_NZBS = bool(check_setting_int(CFG, 'General', 'use_nzbs', 0))
|
||||||
USE_TORRENTS = bool(check_setting_int(CFG, 'General', 'use_torrents', 1))
|
USE_TORRENTS = bool(check_setting_int(CFG, 'General', 'use_torrents', 1))
|
||||||
|
@ -842,6 +862,13 @@ def initialize(consoleLogging=True):
|
||||||
|
|
||||||
USE_LISTVIEW = bool(check_setting_int(CFG, 'General', 'use_listview', 0))
|
USE_LISTVIEW = bool(check_setting_int(CFG, 'General', 'use_listview', 0))
|
||||||
|
|
||||||
|
ANIMESUPPORT = False
|
||||||
|
USE_ANIDB = check_setting_str(CFG, 'ANIDB', 'use_anidb', '')
|
||||||
|
ANIDB_USERNAME = check_setting_str(CFG, 'ANIDB', 'anidb_username', '')
|
||||||
|
ANIDB_PASSWORD = check_setting_str(CFG, 'ANIDB', 'anidb_password', '')
|
||||||
|
ANIDB_USE_MYLIST = check_setting_str(CFG, 'ANIDB', 'anidb_use_mylist', '')
|
||||||
|
ANIME_SPLIT_HOME = bool(check_setting_int(CFG, 'ANIME', 'anime_split_home', 0))
|
||||||
|
|
||||||
METADATA_XBMC = check_setting_str(CFG, 'General', 'metadata_xbmc', '0|0|0|0|0|0|0|0|0|0')
|
METADATA_XBMC = check_setting_str(CFG, 'General', 'metadata_xbmc', '0|0|0|0|0|0|0|0|0|0')
|
||||||
METADATA_XBMC_12PLUS = check_setting_str(CFG, 'General', 'metadata_xbmc_12plus', '0|0|0|0|0|0|0|0|0|0')
|
METADATA_XBMC_12PLUS = check_setting_str(CFG, 'General', 'metadata_xbmc_12plus', '0|0|0|0|0|0|0|0|0|0')
|
||||||
METADATA_MEDIABROWSER = check_setting_str(CFG, 'General', 'metadata_mediabrowser', '0|0|0|0|0|0|0|0|0|0')
|
METADATA_MEDIABROWSER = check_setting_str(CFG, 'General', 'metadata_mediabrowser', '0|0|0|0|0|0|0|0|0|0')
|
||||||
|
@ -859,6 +886,8 @@ def initialize(consoleLogging=True):
|
||||||
COMING_EPS_DISPLAY_PAUSED = bool(check_setting_int(CFG, 'GUI', 'coming_eps_display_paused', 0))
|
COMING_EPS_DISPLAY_PAUSED = bool(check_setting_int(CFG, 'GUI', 'coming_eps_display_paused', 0))
|
||||||
COMING_EPS_SORT = check_setting_str(CFG, 'GUI', 'coming_eps_sort', 'date')
|
COMING_EPS_SORT = check_setting_str(CFG, 'GUI', 'coming_eps_sort', 'date')
|
||||||
COMING_EPS_MISSED_RANGE = check_setting_int(CFG, 'GUI', 'coming_eps_missed_range', 7)
|
COMING_EPS_MISSED_RANGE = check_setting_int(CFG, 'GUI', 'coming_eps_missed_range', 7)
|
||||||
|
FUZZY_DATING = bool(check_setting_int(CFG, 'GUI', 'fuzzy_dating', 0))
|
||||||
|
TRIM_ZERO = bool(check_setting_int(CFG, 'GUI', 'trim_zero', 0))
|
||||||
DATE_PRESET = check_setting_str(CFG, 'GUI', 'date_preset', '%x')
|
DATE_PRESET = check_setting_str(CFG, 'GUI', 'date_preset', '%x')
|
||||||
TIME_PRESET_W_SECONDS = check_setting_str(CFG, 'GUI', 'time_preset', '%I:%M:%S %p')
|
TIME_PRESET_W_SECONDS = check_setting_str(CFG, 'GUI', 'time_preset', '%I:%M:%S %p')
|
||||||
TIME_PRESET = TIME_PRESET_W_SECONDS.replace(u":%S", u"")
|
TIME_PRESET = TIME_PRESET_W_SECONDS.replace(u":%S", u"")
|
||||||
|
@ -883,7 +912,7 @@ def initialize(consoleLogging=True):
|
||||||
# initialize the cache database
|
# initialize the cache database
|
||||||
db.upgradeDatabase(db.DBConnection("cache.db"), cache_db.InitialSchema)
|
db.upgradeDatabase(db.DBConnection("cache.db"), cache_db.InitialSchema)
|
||||||
|
|
||||||
# initalize the failed downloads database
|
# initialize the failed downloads database
|
||||||
db.upgradeDatabase(db.DBConnection("failed.db"), failed_db.InitialSchema)
|
db.upgradeDatabase(db.DBConnection("failed.db"), failed_db.InitialSchema)
|
||||||
|
|
||||||
# fix up any db problems
|
# fix up any db problems
|
||||||
|
@ -912,10 +941,20 @@ def initialize(consoleLogging=True):
|
||||||
newznabProviderList = providers.getNewznabProviderList(NEWZNAB_DATA)
|
newznabProviderList = providers.getNewznabProviderList(NEWZNAB_DATA)
|
||||||
providerList = providers.makeProviderList()
|
providerList = providers.makeProviderList()
|
||||||
|
|
||||||
# the interval for this is stored inside the ShowUpdater class
|
maintenanceScheduler = scheduler.Scheduler(maintenance.Maintenance(),
|
||||||
showUpdaterInstance = showUpdater.ShowUpdater()
|
cycleTime=datetime.timedelta(hours=1),
|
||||||
showUpdateScheduler = scheduler.Scheduler(showUpdaterInstance,
|
threadName="MAINTENANCE",
|
||||||
cycleTime=showUpdaterInstance.updateInterval,
|
silent=True,
|
||||||
|
runImmediately=True)
|
||||||
|
|
||||||
|
dailySearchScheduler = scheduler.Scheduler(dailysearcher.DailySearcher(),
|
||||||
|
cycleTime=datetime.timedelta(minutes=DAILYSEARCH_FREQUENCY),
|
||||||
|
threadName="DAILYSEARCHER",
|
||||||
|
silent=True,
|
||||||
|
runImmediately=DAILYSEARCH_STARTUP)
|
||||||
|
|
||||||
|
showUpdateScheduler = scheduler.Scheduler(showUpdater.ShowUpdater(),
|
||||||
|
cycleTime=showUpdater.ShowUpdater().updateInterval,
|
||||||
threadName="SHOWUPDATER",
|
threadName="SHOWUPDATER",
|
||||||
runImmediately=False)
|
runImmediately=False)
|
||||||
|
|
||||||
|
@ -935,29 +974,30 @@ def initialize(consoleLogging=True):
|
||||||
threadName="SEARCHQUEUE",
|
threadName="SEARCHQUEUE",
|
||||||
silent=True)
|
silent=True)
|
||||||
|
|
||||||
properFinderInstance = properFinder.ProperFinder()
|
properFinderScheduler = scheduler.Scheduler(properFinder.ProperFinder(),
|
||||||
properFinderScheduler = scheduler.Scheduler(properFinderInstance,
|
cycleTime=properFinder.ProperFinder().updateInterval,
|
||||||
cycleTime=properFinderInstance.updateInterval,
|
|
||||||
threadName="FINDPROPERS",
|
threadName="FINDPROPERS",
|
||||||
|
silent=False if DOWNLOAD_PROPERS else True,
|
||||||
runImmediately=True)
|
runImmediately=True)
|
||||||
if not DOWNLOAD_PROPERS:
|
|
||||||
properFinderScheduler.silent = True
|
|
||||||
|
|
||||||
autoPostProcesserScheduler = scheduler.Scheduler(autoPostProcesser.PostProcesser(),
|
autoPostProcesserScheduler = scheduler.Scheduler(autoPostProcesser.PostProcesser(),
|
||||||
cycleTime=datetime.timedelta(
|
cycleTime=datetime.timedelta(
|
||||||
minutes=AUTOPOSTPROCESSER_FREQUENCY),
|
minutes=AUTOPOSTPROCESSER_FREQUENCY),
|
||||||
threadName="POSTPROCESSER",
|
threadName="POSTPROCESSER",
|
||||||
|
silent=False if PROCESS_AUTOMATICALLY else True,
|
||||||
runImmediately=True)
|
runImmediately=True)
|
||||||
if not PROCESS_AUTOMATICALLY:
|
|
||||||
autoPostProcesserScheduler.silent = True
|
|
||||||
|
|
||||||
traktWatchListCheckerSchedular = scheduler.Scheduler(traktWatchListChecker.TraktChecker(),
|
traktWatchListCheckerSchedular = scheduler.Scheduler(traktWatchListChecker.TraktChecker(),
|
||||||
cycleTime=datetime.timedelta(hours=1),
|
cycleTime=datetime.timedelta(hours=1),
|
||||||
threadName="TRAKTWATCHLIST",
|
threadName="TRAKTWATCHLIST",
|
||||||
|
silent=False if USE_TRAKT else True,
|
||||||
runImmediately=True)
|
runImmediately=True)
|
||||||
|
|
||||||
if not USE_TRAKT:
|
subtitlesFinderScheduler = scheduler.Scheduler(subtitles.SubtitlesFinder(),
|
||||||
traktWatchListCheckerSchedular.silent = True
|
cycleTime=datetime.timedelta(hours=SUBTITLES_FINDER_FREQUENCY),
|
||||||
|
threadName="FINDSUBTITLES",
|
||||||
|
silent=False if USE_SUBTITLES else True,
|
||||||
|
runImmediately=True)
|
||||||
|
|
||||||
backlogSearchScheduler = searchBacklog.BacklogSearchScheduler(searchBacklog.BacklogSearcher(),
|
backlogSearchScheduler = searchBacklog.BacklogSearchScheduler(searchBacklog.BacklogSearcher(),
|
||||||
cycleTime=datetime.timedelta(
|
cycleTime=datetime.timedelta(
|
||||||
|
@ -966,23 +1006,6 @@ def initialize(consoleLogging=True):
|
||||||
silent=True,
|
silent=True,
|
||||||
runImmediately=BACKLOG_STARTUP)
|
runImmediately=BACKLOG_STARTUP)
|
||||||
|
|
||||||
dailySearchScheduler = scheduler.Scheduler(dailysearcher.DailySearcher(),
|
|
||||||
cycleTime=datetime.timedelta(minutes=DAILYSEARCH_FREQUENCY),
|
|
||||||
threadName="DAILYSEARCHER",
|
|
||||||
silent=True,
|
|
||||||
runImmediately=DAILYSEARCH_STARTUP)
|
|
||||||
|
|
||||||
subtitlesFinderScheduler = scheduler.Scheduler(subtitles.SubtitlesFinder(),
|
|
||||||
cycleTime=datetime.timedelta(hours=SUBTITLES_FINDER_FREQUENCY),
|
|
||||||
threadName="FINDSUBTITLES",
|
|
||||||
runImmediately=True)
|
|
||||||
|
|
||||||
if not USE_SUBTITLES:
|
|
||||||
subtitlesFinderScheduler.silent = True
|
|
||||||
|
|
||||||
showList = []
|
|
||||||
loadingShowList = {}
|
|
||||||
|
|
||||||
# dynamically load provider settings
|
# dynamically load provider settings
|
||||||
for curTorrentProvider in [curProvider for curProvider in providers.sortedProviderList() if
|
for curTorrentProvider in [curProvider for curProvider in providers.sortedProviderList() if
|
||||||
curProvider.providerType == GenericProvider.TORRENT]:
|
curProvider.providerType == GenericProvider.TORRENT]:
|
||||||
|
@ -1078,23 +1101,29 @@ def initialize(consoleLogging=True):
|
||||||
except:
|
except:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
showList = []
|
||||||
|
loadingShowList = {}
|
||||||
|
|
||||||
__INITIALIZED__ = True
|
__INITIALIZED__ = True
|
||||||
return True
|
return True
|
||||||
|
|
||||||
|
|
||||||
def start():
|
def start():
|
||||||
global __INITIALIZED__, backlogSearchScheduler, \
|
global __INITIALIZED__, maintenanceScheduler, backlogSearchScheduler, \
|
||||||
showUpdateScheduler, versionCheckScheduler, showQueueScheduler, \
|
showUpdateScheduler, versionCheckScheduler, showQueueScheduler, \
|
||||||
properFinderScheduler, autoPostProcesserScheduler, searchQueueScheduler, \
|
properFinderScheduler, autoPostProcesserScheduler, searchQueueScheduler, \
|
||||||
subtitlesFinderScheduler, started, USE_SUBTITLES, \
|
subtitlesFinderScheduler, USE_SUBTITLES,traktWatchListCheckerSchedular, \
|
||||||
traktWatchListCheckerSchedular, dailySearchScheduler, started
|
dailySearchScheduler, started
|
||||||
|
|
||||||
with INIT_LOCK:
|
with INIT_LOCK:
|
||||||
|
|
||||||
if __INITIALIZED__:
|
if __INITIALIZED__:
|
||||||
|
|
||||||
# start the version checker
|
# start the maintenance scheduler
|
||||||
versionCheckScheduler.thread.start()
|
maintenanceScheduler.thread.start()
|
||||||
|
logger.log(u"Performing initial maintenance tasks, please wait ...")
|
||||||
|
while maintenanceScheduler.action.amActive:
|
||||||
|
time.sleep(1)
|
||||||
|
|
||||||
# start the daily search scheduler
|
# start the daily search scheduler
|
||||||
dailySearchScheduler.thread.start()
|
dailySearchScheduler.thread.start()
|
||||||
|
@ -1102,18 +1131,21 @@ def start():
|
||||||
# start the backlog scheduler
|
# start the backlog scheduler
|
||||||
backlogSearchScheduler.thread.start()
|
backlogSearchScheduler.thread.start()
|
||||||
|
|
||||||
|
# start the show updater
|
||||||
|
showUpdateScheduler.thread.start()
|
||||||
|
|
||||||
|
# start the version checker
|
||||||
|
versionCheckScheduler.thread.start()
|
||||||
|
|
||||||
|
# start the queue checker
|
||||||
|
showQueueScheduler.thread.start()
|
||||||
|
|
||||||
# start the search queue checker
|
# start the search queue checker
|
||||||
searchQueueScheduler.thread.start()
|
searchQueueScheduler.thread.start()
|
||||||
|
|
||||||
# start the queue checker
|
# start the queue checker
|
||||||
properFinderScheduler.thread.start()
|
properFinderScheduler.thread.start()
|
||||||
|
|
||||||
# start the queue checker
|
|
||||||
showQueueScheduler.thread.start()
|
|
||||||
|
|
||||||
# start the show updater
|
|
||||||
showUpdateScheduler.thread.start()
|
|
||||||
|
|
||||||
# start the proper finder
|
# start the proper finder
|
||||||
autoPostProcesserScheduler.thread.start()
|
autoPostProcesserScheduler.thread.start()
|
||||||
|
|
||||||
|
@ -1128,10 +1160,11 @@ def start():
|
||||||
|
|
||||||
|
|
||||||
def halt():
|
def halt():
|
||||||
global __INITIALIZED__, backlogSearchScheduler, showUpdateScheduler, \
|
global __INITIALIZED__, maintenanceScheduler, backlogSearchScheduler, \
|
||||||
showQueueScheduler, properFinderScheduler, autoPostProcesserScheduler, searchQueueScheduler, \
|
showUpdateScheduler, versionCheckScheduler, showQueueScheduler, \
|
||||||
subtitlesFinderScheduler, dailySearchScheduler, started, \
|
properFinderScheduler, autoPostProcesserScheduler, searchQueueScheduler, \
|
||||||
traktWatchListCheckerSchedular
|
subtitlesFinderScheduler, traktWatchListCheckerSchedular, \
|
||||||
|
dailySearchScheduler, started
|
||||||
|
|
||||||
with INIT_LOCK:
|
with INIT_LOCK:
|
||||||
|
|
||||||
|
@ -1141,10 +1174,10 @@ def halt():
|
||||||
|
|
||||||
# abort all the threads
|
# abort all the threads
|
||||||
|
|
||||||
backlogSearchScheduler.abort = True
|
maintenanceScheduler.abort = True
|
||||||
logger.log(u"Waiting for the BACKLOG thread to exit")
|
logger.log(u"Waiting for the MAINTENANCE scheduler thread to exit")
|
||||||
try:
|
try:
|
||||||
backlogSearchScheduler.thread.join(10)
|
maintenanceScheduler.thread.join(10)
|
||||||
except:
|
except:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
@ -1155,6 +1188,13 @@ def halt():
|
||||||
except:
|
except:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
backlogSearchScheduler.abort = True
|
||||||
|
logger.log(u"Waiting for the BACKLOG thread to exit")
|
||||||
|
try:
|
||||||
|
backlogSearchScheduler.thread.join(10)
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
|
||||||
showUpdateScheduler.abort = True
|
showUpdateScheduler.abort = True
|
||||||
logger.log(u"Waiting for the SHOWUPDATER thread to exit")
|
logger.log(u"Waiting for the SHOWUPDATER thread to exit")
|
||||||
try:
|
try:
|
||||||
|
@ -1211,6 +1251,15 @@ def halt():
|
||||||
except:
|
except:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
if ADBA_CONNECTION:
|
||||||
|
ADBA_CONNECTION.logout()
|
||||||
|
# ADBA_CONNECTION.stop()
|
||||||
|
logger.log(u"Waiting for the ANIDB CONNECTION thread to exit")
|
||||||
|
try:
|
||||||
|
ADBA_CONNECTION.join(5)
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
|
||||||
__INITIALIZED__ = False
|
__INITIALIZED__ = False
|
||||||
|
|
||||||
|
|
||||||
|
@ -1246,7 +1295,6 @@ def saveAll():
|
||||||
|
|
||||||
def saveAndShutdown(restart=False):
|
def saveAndShutdown(restart=False):
|
||||||
halt()
|
halt()
|
||||||
|
|
||||||
saveAll()
|
saveAll()
|
||||||
|
|
||||||
logger.log(u"Killing cherrypy")
|
logger.log(u"Killing cherrypy")
|
||||||
|
@ -1276,13 +1324,14 @@ def saveAndShutdown(restart=False):
|
||||||
popen_list += MY_ARGS
|
popen_list += MY_ARGS
|
||||||
if '--nolaunch' not in popen_list:
|
if '--nolaunch' not in popen_list:
|
||||||
popen_list += ['--nolaunch']
|
popen_list += ['--nolaunch']
|
||||||
|
|
||||||
logger.log(u"Restarting SickRage with " + str(popen_list))
|
logger.log(u"Restarting SickRage with " + str(popen_list))
|
||||||
logger.close()
|
logger.close()
|
||||||
|
|
||||||
subprocess.Popen(popen_list, cwd=os.getcwd())
|
subprocess.Popen(popen_list, cwd=os.getcwd())
|
||||||
|
|
||||||
os._exit(0)
|
os._exit(0)
|
||||||
|
|
||||||
|
|
||||||
def invoke_command(to_call, *args, **kwargs):
|
def invoke_command(to_call, *args, **kwargs):
|
||||||
global invoked_command
|
global invoked_command
|
||||||
|
|
||||||
|
@ -1362,6 +1411,8 @@ def save_config():
|
||||||
new_config['General']['flatten_folders_default'] = int(FLATTEN_FOLDERS_DEFAULT)
|
new_config['General']['flatten_folders_default'] = int(FLATTEN_FOLDERS_DEFAULT)
|
||||||
new_config['General']['indexer_default'] = int(INDEXER_DEFAULT)
|
new_config['General']['indexer_default'] = int(INDEXER_DEFAULT)
|
||||||
new_config['General']['indexer_timeout'] = int(INDEXER_TIMEOUT)
|
new_config['General']['indexer_timeout'] = int(INDEXER_TIMEOUT)
|
||||||
|
new_config['General']['anime_default'] = int(ANIME_DEFAULT)
|
||||||
|
new_config['General']['scene_default'] = int(SCENE_DEFAULT)
|
||||||
new_config['General']['provider_order'] = ' '.join(PROVIDER_ORDER)
|
new_config['General']['provider_order'] = ' '.join(PROVIDER_ORDER)
|
||||||
new_config['General']['version_notify'] = int(VERSION_NOTIFY)
|
new_config['General']['version_notify'] = int(VERSION_NOTIFY)
|
||||||
new_config['General']['auto_update'] = int(AUTO_UPDATE)
|
new_config['General']['auto_update'] = int(AUTO_UPDATE)
|
||||||
|
@ -1372,6 +1423,7 @@ def save_config():
|
||||||
new_config['General']['naming_custom_sports'] = int(NAMING_CUSTOM_SPORTS)
|
new_config['General']['naming_custom_sports'] = int(NAMING_CUSTOM_SPORTS)
|
||||||
new_config['General']['naming_sports_pattern'] = NAMING_SPORTS_PATTERN
|
new_config['General']['naming_sports_pattern'] = NAMING_SPORTS_PATTERN
|
||||||
new_config['General']['naming_multi_ep'] = int(NAMING_MULTI_EP)
|
new_config['General']['naming_multi_ep'] = int(NAMING_MULTI_EP)
|
||||||
|
new_config['General']['naming_anime'] = int(NAMING_ANIME)
|
||||||
new_config['General']['launch_browser'] = int(LAUNCH_BROWSER)
|
new_config['General']['launch_browser'] = int(LAUNCH_BROWSER)
|
||||||
new_config['General']['update_shows_on_start'] = int(UPDATE_SHOWS_ON_START)
|
new_config['General']['update_shows_on_start'] = int(UPDATE_SHOWS_ON_START)
|
||||||
new_config['General']['sort_article'] = int(SORT_ARTICLE)
|
new_config['General']['sort_article'] = int(SORT_ARTICLE)
|
||||||
|
@ -1693,6 +1745,8 @@ def save_config():
|
||||||
new_config['GUI']['coming_eps_display_paused'] = int(COMING_EPS_DISPLAY_PAUSED)
|
new_config['GUI']['coming_eps_display_paused'] = int(COMING_EPS_DISPLAY_PAUSED)
|
||||||
new_config['GUI']['coming_eps_sort'] = COMING_EPS_SORT
|
new_config['GUI']['coming_eps_sort'] = COMING_EPS_SORT
|
||||||
new_config['GUI']['coming_eps_missed_range'] = int(COMING_EPS_MISSED_RANGE)
|
new_config['GUI']['coming_eps_missed_range'] = int(COMING_EPS_MISSED_RANGE)
|
||||||
|
new_config['GUI']['fuzzy_dating'] = int(FUZZY_DATING)
|
||||||
|
new_config['GUI']['trim_zero'] = int(TRIM_ZERO)
|
||||||
new_config['GUI']['date_preset'] = DATE_PRESET
|
new_config['GUI']['date_preset'] = DATE_PRESET
|
||||||
new_config['GUI']['time_preset'] = TIME_PRESET_W_SECONDS
|
new_config['GUI']['time_preset'] = TIME_PRESET_W_SECONDS
|
||||||
new_config['GUI']['timezone_display'] = TIMEZONE_DISPLAY
|
new_config['GUI']['timezone_display'] = TIMEZONE_DISPLAY
|
||||||
|
@ -1711,6 +1765,15 @@ def save_config():
|
||||||
new_config['FailedDownloads']['use_failed_downloads'] = int(USE_FAILED_DOWNLOADS)
|
new_config['FailedDownloads']['use_failed_downloads'] = int(USE_FAILED_DOWNLOADS)
|
||||||
new_config['FailedDownloads']['delete_failed'] = int(DELETE_FAILED)
|
new_config['FailedDownloads']['delete_failed'] = int(DELETE_FAILED)
|
||||||
|
|
||||||
|
new_config['ANIDB'] = {}
|
||||||
|
new_config['ANIDB']['use_anidb'] = USE_ANIDB
|
||||||
|
new_config['ANIDB']['anidb_username'] = ANIDB_USERNAME
|
||||||
|
new_config['ANIDB']['anidb_password'] = helpers.encrypt(ANIDB_PASSWORD, ENCRYPTION_VERSION)
|
||||||
|
new_config['ANIDB']['anidb_use_mylist'] = ANIDB_USE_MYLIST
|
||||||
|
|
||||||
|
new_config['ANIME'] = {}
|
||||||
|
new_config['ANIME']['anime_split_home'] = int(ANIME_SPLIT_HOME)
|
||||||
|
|
||||||
new_config.write()
|
new_config.write()
|
||||||
|
|
||||||
|
|
||||||
|
|
210
sickbeard/blackandwhitelist.py
Normal file
|
@ -0,0 +1,210 @@
|
||||||
|
# Author: Dennis Lutter <lad1337@gmail.com>
|
||||||
|
# URL: http://code.google.com/p/sickbeard/
|
||||||
|
#
|
||||||
|
# This file is part of Sick Beard.
|
||||||
|
#
|
||||||
|
# Sick Beard is free software: you can redistribute it and/or modify
|
||||||
|
# it under the terms of the GNU General Public License as published by
|
||||||
|
# the Free Software Foundation, either version 3 of the License, or
|
||||||
|
# (at your option) any later version.
|
||||||
|
#
|
||||||
|
# Sick Beard is distributed in the hope that it will be useful,
|
||||||
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
# GNU General Public License for more details.
|
||||||
|
#
|
||||||
|
# You should have received a copy of the GNU General Public License
|
||||||
|
# along with Sick Beard. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
|
from sickbeard import db, logger
|
||||||
|
|
||||||
|
class BlackAndWhiteList(object):
|
||||||
|
_tableBlack = "blacklist"
|
||||||
|
_tableWhite = "whitelist"
|
||||||
|
blackList = []
|
||||||
|
whiteList = []
|
||||||
|
blackDict = {}
|
||||||
|
whiteDict = {}
|
||||||
|
|
||||||
|
last_black_valid_result = None
|
||||||
|
last_white_valid_result = None
|
||||||
|
|
||||||
|
def __init__(self, show_id):
|
||||||
|
if not show_id:
|
||||||
|
raise BlackWhitelistNoShowIDException()
|
||||||
|
self.show_id = show_id
|
||||||
|
|
||||||
|
self.myDB = db.DBConnection()
|
||||||
|
self.refresh()
|
||||||
|
|
||||||
|
def refresh(self):
|
||||||
|
logger.log(u"Building black and white list for " + str(self.show_id), logger.DEBUG)
|
||||||
|
|
||||||
|
(self.blackList, self.blackDict) = self.load_blacklist()
|
||||||
|
(self.whiteList, self.whiteDict) = self.load_whitelist()
|
||||||
|
|
||||||
|
def load_blacklist(self):
|
||||||
|
return self._load_list(self._tableBlack)
|
||||||
|
|
||||||
|
def load_whitelist(self):
|
||||||
|
return self._load_list(self._tableWhite)
|
||||||
|
|
||||||
|
def get_black_keywords_for(self, range):
|
||||||
|
if range in self.blackDict:
|
||||||
|
return self.blackDict[range]
|
||||||
|
else:
|
||||||
|
return []
|
||||||
|
|
||||||
|
def get_white_keywords_for(self, range):
|
||||||
|
if range in self.whiteDict:
|
||||||
|
return self.whiteDict[range]
|
||||||
|
else:
|
||||||
|
return []
|
||||||
|
|
||||||
|
def set_black_keywords(self, range, values):
|
||||||
|
self._del_all_black_keywords()
|
||||||
|
self._add_keywords(self._tableBlack, range, values)
|
||||||
|
|
||||||
|
def set_white_keywords(self, range, values):
|
||||||
|
self._del_all_white_keywords()
|
||||||
|
self._add_keywords(self._tableWhite, range, values)
|
||||||
|
|
||||||
|
def set_black_keywords_for(self, range, values):
|
||||||
|
self._del_all_black_keywords_for(range)
|
||||||
|
self._add_keywords(self._tableBlack, range, values)
|
||||||
|
|
||||||
|
def set_white_keywords_for(self, range, values):
|
||||||
|
self._del_all_white_keywords_for(range)
|
||||||
|
self._add_keywords(self._tableWhite, range, values)
|
||||||
|
|
||||||
|
def add_black_keyword(self, range, value):
|
||||||
|
self._add_keywords(self._tableBlack, range, [value])
|
||||||
|
|
||||||
|
def add_white_keyword(self, range, value):
|
||||||
|
self._add_keywords(self._tableWhite, range, [value])
|
||||||
|
|
||||||
|
def get_last_result_msg(self):
|
||||||
|
blackResult = whiteResult = "Untested"
|
||||||
|
if self.last_black_valid_result == True:
|
||||||
|
blackResult = "Valid"
|
||||||
|
elif self.last_black_valid_result == False:
|
||||||
|
blackResult = "Invalid"
|
||||||
|
|
||||||
|
if self.last_white_valid_result == True:
|
||||||
|
whiteResult = "Valid"
|
||||||
|
elif self.last_white_valid_result == False:
|
||||||
|
whiteResult = "Invalid"
|
||||||
|
|
||||||
|
return "Blacklist: " + blackResult + ", Whitelist: " + whiteResult
|
||||||
|
|
||||||
|
def _add_keywords(self, table, range, values):
|
||||||
|
for value in values:
|
||||||
|
self.myDB.action("INSERT INTO " + table + " (show_id, range , keyword) VALUES (?,?,?)", [self.show_id, range, value])
|
||||||
|
self.refresh()
|
||||||
|
|
||||||
|
def _del_all_black_keywords(self):
|
||||||
|
self._del_all_keywords(self._tableBlack)
|
||||||
|
|
||||||
|
def _del_all_white_keywords(self):
|
||||||
|
self._del_all_keywords(self._tableWhite)
|
||||||
|
|
||||||
|
def _del_all_black_keywords_for(self, range):
|
||||||
|
self._del_all_keywords_for(self._tableBlack, range)
|
||||||
|
|
||||||
|
def _del_all_white_keywords_for(self, range):
|
||||||
|
self._del_all_keywords_for(self._tableWhite, range)
|
||||||
|
|
||||||
|
def _del_all_keywords(self, table):
|
||||||
|
logger.log(u"Deleting all " + table + " keywords for " + str(self.show_id), logger.DEBUG)
|
||||||
|
self.myDB.action("DELETE FROM " + table + " WHERE show_id = ?", [self.show_id])
|
||||||
|
self.refresh()
|
||||||
|
|
||||||
|
def _del_all_keywords_for(self, table, range):
|
||||||
|
logger.log(u"Deleting all " + range + " " + table + " keywords for " + str(self.show_id), logger.DEBUG)
|
||||||
|
self.myDB.action("DELETE FROM " + table + " WHERE show_id = ? and range = ?", [self.show_id, range])
|
||||||
|
self.refresh()
|
||||||
|
|
||||||
|
def _load_list(self, table):
|
||||||
|
sqlResults = self.myDB.select("SELECT range,keyword FROM " + table + " WHERE show_id = ? ", [self.show_id])
|
||||||
|
if not sqlResults or not len(sqlResults):
|
||||||
|
return ([], {})
|
||||||
|
|
||||||
|
list, dict = self._build_keyword_dict(sqlResults)
|
||||||
|
logger.log("BWL: " + str(self.show_id) + " loaded keywords from " + table + ": " + str(dict), logger.DEBUG)
|
||||||
|
return list, dict
|
||||||
|
|
||||||
|
def _build_keyword_dict(self, sql_result):
|
||||||
|
list = []
|
||||||
|
dict = {}
|
||||||
|
for row in sql_result:
|
||||||
|
list.append(row["keyword"])
|
||||||
|
if row["range"] in dict:
|
||||||
|
dict[row["range"]].append(row["keyword"])
|
||||||
|
else:
|
||||||
|
dict[row["range"]] = [row["keyword"]]
|
||||||
|
|
||||||
|
return (list, dict)
|
||||||
|
|
||||||
|
def is_valid_for_black(self, haystack):
|
||||||
|
logger.log(u"BWL: " + str(self.show_id) + " is valid black", logger.DEBUG)
|
||||||
|
result = self._is_valid_for(self.blackDict, False, haystack)
|
||||||
|
self.last_black_valid_result = result
|
||||||
|
return result
|
||||||
|
|
||||||
|
def is_valid_for_white(self, haystack):
|
||||||
|
logger.log(u"BWL: " + str(self.show_id) + " is valid white", logger.DEBUG)
|
||||||
|
result = self._is_valid_for(self.whiteDict, True, haystack)
|
||||||
|
self.last_white_valid_result = result
|
||||||
|
return result
|
||||||
|
|
||||||
|
def is_valid(self, haystack):
|
||||||
|
return self.is_valid_for_black(haystack) and self.is_valid_for_white(haystack)
|
||||||
|
|
||||||
|
def _is_valid_for(self, list, mood, haystack):
|
||||||
|
if not len(list):
|
||||||
|
return True
|
||||||
|
|
||||||
|
results = []
|
||||||
|
for range in list:
|
||||||
|
for keyword in list[range]:
|
||||||
|
string = None
|
||||||
|
if range == "global":
|
||||||
|
string = haystack.name
|
||||||
|
elif range in haystack.__dict__:
|
||||||
|
string = haystack.__dict__[range]
|
||||||
|
elif not range in haystack.__dict__:
|
||||||
|
results.append((not mood))
|
||||||
|
else:
|
||||||
|
results.append(False)
|
||||||
|
|
||||||
|
if string:
|
||||||
|
results.append(self._is_keyword_in_string(string, keyword) == mood)
|
||||||
|
|
||||||
|
# black: mood = False
|
||||||
|
# white: mood = True
|
||||||
|
if mood in results:
|
||||||
|
return mood
|
||||||
|
else:
|
||||||
|
return (not mood)
|
||||||
|
|
||||||
|
def _is_keyword_in_string(self, fromPost, fromBWList):
|
||||||
|
"""
|
||||||
|
will return true if fromBWList is found in fromPost
|
||||||
|
for now a basic find is used
|
||||||
|
"""
|
||||||
|
fromPost = fromPost.lower()
|
||||||
|
fromBWList = fromBWList.lower()
|
||||||
|
logger.log(u"BWL: " + str(self.show_id) + " comparing fromPost: " + fromPost + " vs fromBWlist: " + fromBWList, logger.DEBUG)
|
||||||
|
return (fromPost.find(fromBWList) >= 0)
|
||||||
|
|
||||||
|
class BlackWhiteKeyword(object):
|
||||||
|
range = ""
|
||||||
|
value = []
|
||||||
|
|
||||||
|
def __init__(self, range, values):
|
||||||
|
self.range = range # "global" or a parser group
|
||||||
|
self.value = values # a list of values may contain only one item (still a list)
|
||||||
|
|
||||||
|
|
||||||
|
class BlackWhitelistNoShowIDException(Exception):
|
||||||
|
"No show_id was given"
|
|
@ -151,6 +151,8 @@ class AllShowsListUI:
|
||||||
|
|
||||||
def selectSeries(self, allSeries):
|
def selectSeries(self, allSeries):
|
||||||
searchResults = []
|
searchResults = []
|
||||||
|
seriesnames = []
|
||||||
|
|
||||||
# get all available shows
|
# get all available shows
|
||||||
if allSeries:
|
if allSeries:
|
||||||
if 'searchterm' in self.config:
|
if 'searchterm' in self.config:
|
||||||
|
@ -159,14 +161,22 @@ class AllShowsListUI:
|
||||||
for curShow in allSeries:
|
for curShow in allSeries:
|
||||||
if curShow in searchResults:
|
if curShow in searchResults:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
if 'seriesname' in curShow:
|
if 'seriesname' in curShow:
|
||||||
if searchterm.lower() in curShow['seriesname'].lower():
|
seriesnames.append(str(curShow['seriesname']))
|
||||||
|
if 'aliasnames' in curShow:
|
||||||
|
seriesnames.extend(str(curShow['aliasnames']).split('|'))
|
||||||
|
|
||||||
|
for name in seriesnames:
|
||||||
|
if searchterm.lower() in name.lower():
|
||||||
if 'firstaired' not in curShow:
|
if 'firstaired' not in curShow:
|
||||||
curShow['firstaired'] = str(datetime.date.fromordinal(1))
|
curShow['firstaired'] = str(datetime.date.fromordinal(1))
|
||||||
curShow['firstaired'] = re.sub("([-]0{2}){1,}", "", curShow['firstaired'])
|
curShow['firstaired'] = re.sub("([-]0{2}){1,}", "", curShow['firstaired'])
|
||||||
fixDate = parser.parse(curShow['firstaired'], fuzzy=True).date()
|
fixDate = parser.parse(curShow['firstaired'], fuzzy=True).date()
|
||||||
curShow['firstaired'] = fixDate.strftime("%Y-%m-%d")
|
curShow['firstaired'] = fixDate.strftime("%Y-%m-%d")
|
||||||
searchResults.append(curShow)
|
|
||||||
|
if curShow not in searchResults:
|
||||||
|
searchResults += [curShow]
|
||||||
|
|
||||||
return searchResults
|
return searchResults
|
||||||
|
|
||||||
|
|
|
@ -150,9 +150,9 @@ class Quality:
|
||||||
return (sorted(anyQualities), sorted(bestQualities))
|
return (sorted(anyQualities), sorted(bestQualities))
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def nameQuality(name):
|
def nameQuality(name, anime=False):
|
||||||
"""
|
"""
|
||||||
Return The quality from an episode File renamed by Sickbeard
|
Return The quality from an episode File renamed by SickRage
|
||||||
If no quality is achieved it will try sceneQuality regex
|
If no quality is achieved it will try sceneQuality regex
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
@ -162,8 +162,9 @@ class Quality:
|
||||||
for x in sorted(Quality.qualityStrings.keys(), reverse=True):
|
for x in sorted(Quality.qualityStrings.keys(), reverse=True):
|
||||||
if x == Quality.UNKNOWN:
|
if x == Quality.UNKNOWN:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
if x == Quality.NONE: #Last chance
|
if x == Quality.NONE: #Last chance
|
||||||
return Quality.sceneQuality(name)
|
return Quality.sceneQuality(name, anime)
|
||||||
|
|
||||||
regex = '\W' + Quality.qualityStrings[x].replace(' ', '\W') + '\W'
|
regex = '\W' + Quality.qualityStrings[x].replace(' ', '\W') + '\W'
|
||||||
regex_match = re.search(regex, name, re.I)
|
regex_match = re.search(regex, name, re.I)
|
||||||
|
@ -171,7 +172,7 @@ class Quality:
|
||||||
return x
|
return x
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def sceneQuality(name):
|
def sceneQuality(name, anime=False):
|
||||||
"""
|
"""
|
||||||
Return The quality from the scene episode File
|
Return The quality from the scene episode File
|
||||||
"""
|
"""
|
||||||
|
@ -180,6 +181,26 @@ class Quality:
|
||||||
|
|
||||||
checkName = lambda list, func: func([re.search(x, name, re.I) for x in list])
|
checkName = lambda list, func: func([re.search(x, name, re.I) for x in list])
|
||||||
|
|
||||||
|
if anime:
|
||||||
|
blueRayOptions = checkName(["bluray", "blu-ray"], any)
|
||||||
|
hdOptions = checkName(["720p", "1280x720", "960x720"], any)
|
||||||
|
fullHD = checkName(["1080p", "1920x1080"], any)
|
||||||
|
|
||||||
|
if checkName(["360p", "XviD"], any):
|
||||||
|
return Quality.SDTV
|
||||||
|
elif checkName(["dvd", "480p", "848x480"], any):
|
||||||
|
return Quality.SDDVD
|
||||||
|
elif hdOptions and not blueRayOptions and not fullHD:
|
||||||
|
return Quality.HDTV
|
||||||
|
elif hdOptions and not blueRayOptions and not fullHD:
|
||||||
|
return Quality.HDWEBDL
|
||||||
|
elif blueRayOptions and hdOptions and not fullHD:
|
||||||
|
return Quality.HDBLURAY
|
||||||
|
elif fullHD:
|
||||||
|
return Quality.FULLHDBLURAY
|
||||||
|
else:
|
||||||
|
return Quality.UNKNOWN
|
||||||
|
|
||||||
if checkName(["(pdtv|hdtv|dsr|tvrip).(xvid|x264|h.?264)"], all) and not checkName(["(720|1080)[pi]"], all):
|
if checkName(["(pdtv|hdtv|dsr|tvrip).(xvid|x264|h.?264)"], all) and not checkName(["(720|1080)[pi]"], all):
|
||||||
return Quality.SDTV
|
return Quality.SDTV
|
||||||
elif checkName(["web.dl|webrip", "xvid|x264|h.?264"], all) and not checkName(["(720|1080)[pi]"], all):
|
elif checkName(["web.dl|webrip", "xvid|x264|h.?264"], all) and not checkName(["(720|1080)[pi]"], all):
|
||||||
|
|
|
@ -586,7 +586,7 @@ class ConfigMigrator():
|
||||||
logger.ERROR)
|
logger.ERROR)
|
||||||
continue
|
continue
|
||||||
|
|
||||||
if name == 'SickRage Index':
|
if name == 'Sick Beard Index':
|
||||||
key = '0'
|
key = '0'
|
||||||
|
|
||||||
if name == 'NZBs.org':
|
if name == 'NZBs.org':
|
||||||
|
|
|
@ -17,10 +17,9 @@
|
||||||
# along with SickRage. If not, see <http://www.gnu.org/licenses/>.
|
# along with SickRage. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
from __future__ import with_statement
|
from __future__ import with_statement
|
||||||
import time
|
|
||||||
import datetime
|
import datetime
|
||||||
import threading
|
import threading
|
||||||
import traceback
|
|
||||||
|
|
||||||
import sickbeard
|
import sickbeard
|
||||||
from sickbeard import logger
|
from sickbeard import logger
|
||||||
|
@ -28,9 +27,6 @@ from sickbeard import db
|
||||||
from sickbeard import common
|
from sickbeard import common
|
||||||
from sickbeard import helpers
|
from sickbeard import helpers
|
||||||
from sickbeard import exceptions
|
from sickbeard import exceptions
|
||||||
from sickbeard.exceptions import ex
|
|
||||||
from sickbeard.search import pickBestResult, snatchEpisode
|
|
||||||
from sickbeard import generic_queue
|
|
||||||
|
|
||||||
class DailySearcher():
|
class DailySearcher():
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
|
@ -42,9 +38,6 @@ class DailySearcher():
|
||||||
|
|
||||||
self.amActive = True
|
self.amActive = True
|
||||||
|
|
||||||
# remove names from cache that link back to active shows that we watch
|
|
||||||
sickbeard.name_cache.syncNameCache()
|
|
||||||
|
|
||||||
logger.log(u"Searching for coming episodes and 1 weeks worth of previously WANTED episodes ...")
|
logger.log(u"Searching for coming episodes and 1 weeks worth of previously WANTED episodes ...")
|
||||||
|
|
||||||
fromDate = datetime.date.today() - datetime.timedelta(weeks=1)
|
fromDate = datetime.date.today() - datetime.timedelta(weeks=1)
|
||||||
|
@ -54,6 +47,7 @@ class DailySearcher():
|
||||||
sqlResults = myDB.select("SELECT * FROM tv_episodes WHERE status in (?,?) AND airdate >= ? AND airdate <= ?",
|
sqlResults = myDB.select("SELECT * FROM tv_episodes WHERE status in (?,?) AND airdate >= ? AND airdate <= ?",
|
||||||
[common.UNAIRED, common.WANTED, fromDate.toordinal(), curDate.toordinal()])
|
[common.UNAIRED, common.WANTED, fromDate.toordinal(), curDate.toordinal()])
|
||||||
|
|
||||||
|
sql_l = []
|
||||||
todaysEps = {}
|
todaysEps = {}
|
||||||
for sqlEp in sqlResults:
|
for sqlEp in sqlResults:
|
||||||
|
|
||||||
|
@ -61,12 +55,12 @@ class DailySearcher():
|
||||||
show = helpers.findCertainShow(sickbeard.showList, int(sqlEp["showid"]))
|
show = helpers.findCertainShow(sickbeard.showList, int(sqlEp["showid"]))
|
||||||
except exceptions.MultipleShowObjectsException:
|
except exceptions.MultipleShowObjectsException:
|
||||||
logger.log(u"ERROR: expected to find a single show matching " + sqlEp["showid"])
|
logger.log(u"ERROR: expected to find a single show matching " + sqlEp["showid"])
|
||||||
return None
|
break
|
||||||
|
|
||||||
if show == None:
|
if not show:
|
||||||
logger.log(u"Unable to find the show with ID " + str(
|
logger.log(u"Unable to find the show with ID " + str(
|
||||||
sqlEp["showid"]) + " in your show list! DB value was " + str(sqlEp), logger.ERROR)
|
sqlEp["showid"]) + " in your show list! DB value was " + str(sqlEp), logger.ERROR)
|
||||||
return None
|
break
|
||||||
|
|
||||||
ep = show.getEpisode(sqlEp["season"], sqlEp["episode"])
|
ep = show.getEpisode(sqlEp["season"], sqlEp["episode"])
|
||||||
with ep.lock:
|
with ep.lock:
|
||||||
|
@ -77,18 +71,28 @@ class DailySearcher():
|
||||||
logger.log(u"New episode " + ep.prettyName() + " airs today, setting status to WANTED")
|
logger.log(u"New episode " + ep.prettyName() + " airs today, setting status to WANTED")
|
||||||
ep.status = common.WANTED
|
ep.status = common.WANTED
|
||||||
|
|
||||||
ep.saveToDB()
|
|
||||||
|
|
||||||
if ep.status == common.WANTED:
|
if ep.status == common.WANTED:
|
||||||
if show not in todaysEps:
|
if show not in todaysEps:
|
||||||
todaysEps[show] = [ep]
|
todaysEps[show] = [ep]
|
||||||
else:
|
else:
|
||||||
todaysEps[show].append(ep)
|
todaysEps[show].append(ep)
|
||||||
|
|
||||||
|
sql_l.append(ep.get_sql())
|
||||||
|
|
||||||
|
if sql_l:
|
||||||
|
myDB = db.DBConnection()
|
||||||
|
myDB.mass_action(sql_l)
|
||||||
|
|
||||||
if len(todaysEps):
|
if len(todaysEps):
|
||||||
for show in todaysEps:
|
for show in todaysEps:
|
||||||
segment = todaysEps[show]
|
segment = todaysEps[show]
|
||||||
|
|
||||||
|
# remove show from name cache if marked invalid
|
||||||
|
sickbeard.name_cache.clearCache(show)
|
||||||
|
|
||||||
dailysearch_queue_item = sickbeard.search_queue.DailySearchQueueItem(show, segment)
|
dailysearch_queue_item = sickbeard.search_queue.DailySearchQueueItem(show, segment)
|
||||||
sickbeard.searchQueueScheduler.action.add_item(dailysearch_queue_item) #@UndefinedVariable
|
sickbeard.searchQueueScheduler.action.add_item(dailysearch_queue_item)
|
||||||
else:
|
else:
|
||||||
logger.log(u"Could not find any needed episodes to search for ...")
|
logger.log(u"Could not find any needed episodes to search for ...")
|
||||||
|
|
||||||
|
self.amActive = False
|
|
@ -44,15 +44,14 @@ class AddSceneExceptions(InitialSchema):
|
||||||
|
|
||||||
def execute(self):
|
def execute(self):
|
||||||
self.connection.action(
|
self.connection.action(
|
||||||
"CREATE TABLE scene_exceptions (exception_id INTEGER PRIMARY KEY, tvdb_id INTEGER KEY, show_name TEXT)")
|
"CREATE TABLE scene_exceptions (exception_id INTEGER PRIMARY KEY, indexer_id INTEGER KEY, show_name TEXT)")
|
||||||
|
|
||||||
|
|
||||||
class AddSceneNameCache(AddSceneExceptions):
|
class AddSceneNameCache(AddSceneExceptions):
|
||||||
def test(self):
|
def test(self):
|
||||||
return self.hasTable("scene_names")
|
return self.hasTable("scene_names")
|
||||||
|
|
||||||
def execute(self):
|
def execute(self):
|
||||||
self.connection.action("CREATE TABLE scene_names (tvdb_id INTEGER, name TEXT)")
|
self.connection.action("CREATE TABLE scene_names (indexer_id INTEGER, name TEXT)")
|
||||||
|
|
||||||
|
|
||||||
class AddNetworkTimezones(AddSceneNameCache):
|
class AddNetworkTimezones(AddSceneNameCache):
|
||||||
|
@ -62,69 +61,31 @@ class AddNetworkTimezones(AddSceneNameCache):
|
||||||
def execute(self):
|
def execute(self):
|
||||||
self.connection.action("CREATE TABLE network_timezones (network_name TEXT PRIMARY KEY, timezone TEXT)")
|
self.connection.action("CREATE TABLE network_timezones (network_name TEXT PRIMARY KEY, timezone TEXT)")
|
||||||
|
|
||||||
|
class AddLastSearch(AddNetworkTimezones):
|
||||||
class AddXemNumbering(AddNetworkTimezones):
|
|
||||||
def test(self):
|
|
||||||
return self.hasTable("xem_numbering")
|
|
||||||
|
|
||||||
def execute(self):
|
|
||||||
self.connection.action(
|
|
||||||
"CREATE TABLE xem_numbering (indexer TEXT, indexer_id INTEGER, season INTEGER, episode INTEGER, scene_season INTEGER, scene_episode INTEGER)")
|
|
||||||
|
|
||||||
class AddXemRefresh(AddXemNumbering):
|
|
||||||
def test(self):
|
|
||||||
return self.hasTable("xem_refresh")
|
|
||||||
|
|
||||||
def execute(self):
|
|
||||||
self.connection.action(
|
|
||||||
"CREATE TABLE xem_refresh (indexer TEXT, indexer_id INTEGER PRIMARY KEY, last_refreshed INTEGER)")
|
|
||||||
|
|
||||||
class ConvertSceneExceptionsToIndexerID(AddXemRefresh):
|
|
||||||
def test(self):
|
|
||||||
return self.hasColumn("scene_exceptions", "indexer_id")
|
|
||||||
|
|
||||||
def execute(self):
|
|
||||||
if self.hasTable("tmp_scene_exceptions"):
|
|
||||||
self.connection.action("DROP TABLE tmp_scene_exceptions")
|
|
||||||
|
|
||||||
self.connection.action("ALTER TABLE scene_exceptions RENAME TO tmp_scene_exceptions")
|
|
||||||
self.connection.action(
|
|
||||||
"CREATE TABLE scene_exceptions (exception_id INTEGER PRIMARY KEY, indexer_id INTEGER KEY, show_name TEXT)")
|
|
||||||
self.connection.action(
|
|
||||||
"INSERT INTO scene_exceptions(exception_id, indexer_id, show_name) SELECT exception_id, tvdb_id, show_name FROM tmp_scene_exceptions")
|
|
||||||
self.connection.action("DROP TABLE tmp_scene_exceptions")
|
|
||||||
|
|
||||||
|
|
||||||
class ConvertSceneNamesToIndexerID(ConvertSceneExceptionsToIndexerID):
|
|
||||||
def test(self):
|
|
||||||
return self.hasColumn("scene_names", "indexer_id")
|
|
||||||
|
|
||||||
def execute(self):
|
|
||||||
if self.hasTable("tmp_scene_names"):
|
|
||||||
self.connection.action("DROP TABLE tmp_scene_names")
|
|
||||||
|
|
||||||
self.connection.action("ALTER TABLE scene_names RENAME TO tmp_scene_names")
|
|
||||||
self.connection.action("CREATE TABLE scene_names (indexer_id INTEGER, name TEXT)")
|
|
||||||
self.connection.action("INSERT INTO scene_names(indexer_id, name) SELECT tvdb_id, name FROM tmp_scene_names")
|
|
||||||
self.connection.action("DROP TABLE tmp_scene_names")
|
|
||||||
|
|
||||||
class ConvertIndexerToInteger(ConvertSceneNamesToIndexerID):
|
|
||||||
def execute(self):
|
|
||||||
ql = []
|
|
||||||
ql.append(["UPDATE xem_numbering SET indexer = ? WHERE LOWER(indexer) = ?", ["1", "tvdb"]])
|
|
||||||
ql.append(["UPDATE xem_numbering SET indexer = ? WHERE LOWER(indexer) = ?", ["2", "tvrage"]])
|
|
||||||
ql.append(["UPDATE xem_refresh SET indexer = ? WHERE LOWER(indexer) = ?", ["1", "tvdb"]])
|
|
||||||
ql.append(["UPDATE xem_refresh SET indexer = ? WHERE LOWER(indexer) = ?", ["2", "tvrage"]])
|
|
||||||
self.connection.mass_action(ql)
|
|
||||||
|
|
||||||
class RemoveKeysFromXemNumbering(ConvertIndexerToInteger):
|
|
||||||
def execute(self):
|
|
||||||
self.connection.action("ALTER TABLE xem_numbering DROP UNIQUE (indexer, indexer_id, season, episode)")
|
|
||||||
self.connection.action("ALTER TABLE xem_numbering DROP PRIMARY KEY")
|
|
||||||
|
|
||||||
class AddLastSearch(RemoveKeysFromXemNumbering):
|
|
||||||
def test(self):
|
def test(self):
|
||||||
return self.hasTable("lastSearch")
|
return self.hasTable("lastSearch")
|
||||||
|
|
||||||
def execute(self):
|
def execute(self):
|
||||||
self.connection.action("CREATE TABLE lastSearch (provider TEXT, time NUMERIC)")
|
self.connection.action("CREATE TABLE lastSearch (provider TEXT, time NUMERIC)")
|
||||||
|
|
||||||
|
class AddSceneExceptionsSeasons(AddSceneNameCache):
|
||||||
|
def test(self):
|
||||||
|
return self.hasColumn("scene_exceptions", "season")
|
||||||
|
|
||||||
|
def execute(self):
|
||||||
|
self.addColumn("scene_exceptions", "season", "NUMERIC", -1)
|
||||||
|
|
||||||
|
class AddSceneExceptionsCustom(AddSceneExceptionsSeasons):
|
||||||
|
def test(self):
|
||||||
|
return self.hasColumn("scene_exceptions", "custom")
|
||||||
|
|
||||||
|
def execute(self):
|
||||||
|
self.addColumn("scene_exceptions", "custom", "NUMERIC", 0)
|
||||||
|
|
||||||
|
class AddSceneExceptionsRefresh(AddSceneExceptionsCustom):
|
||||||
|
def test(self):
|
||||||
|
return self.hasTable("scene_exceptions_refresh")
|
||||||
|
|
||||||
|
def execute(self):
|
||||||
|
self.connection.action(
|
||||||
|
"CREATE TABLE scene_exceptions_refresh (list TEXT, last_refreshed INTEGER)")
|
|
@ -27,8 +27,7 @@ from sickbeard import encodingKludge as ek
|
||||||
from sickbeard.name_parser.parser import NameParser, InvalidNameException
|
from sickbeard.name_parser.parser import NameParser, InvalidNameException
|
||||||
|
|
||||||
MIN_DB_VERSION = 9 # oldest db version we support migrating from
|
MIN_DB_VERSION = 9 # oldest db version we support migrating from
|
||||||
MAX_DB_VERSION = 31
|
MAX_DB_VERSION = 38
|
||||||
|
|
||||||
|
|
||||||
class MainSanityCheck(db.DBSanityCheck):
|
class MainSanityCheck(db.DBSanityCheck):
|
||||||
def check(self):
|
def check(self):
|
||||||
|
@ -714,7 +713,7 @@ class AddSceneNumbering(AddArchiveFirstMatchOption):
|
||||||
self.connection.action("DROP TABLE scene_numbering")
|
self.connection.action("DROP TABLE scene_numbering")
|
||||||
|
|
||||||
self.connection.action(
|
self.connection.action(
|
||||||
"CREATE TABLE scene_numbering (indexer TEXT, indexer_id INTEGER, season INTEGER, episode INTEGER, scene_season INTEGER, scene_episode INTEGER, PRIMARY KEY (indexer_id, season, episode))")
|
"CREATE TABLE scene_numbering (indexer TEXT, indexer_id INTEGER, season INTEGER, episode INTEGER, scene_season INTEGER, scene_episode INTEGER, PRIMARY KEY (indexer_id, season, episode, scene_season, scene_episode))")
|
||||||
|
|
||||||
self.incDBVersion()
|
self.incDBVersion()
|
||||||
|
|
||||||
|
@ -794,10 +793,97 @@ class AddSceneNumberingToTvEpisodes(AddSportsOption):
|
||||||
backupDatabase(31)
|
backupDatabase(31)
|
||||||
|
|
||||||
logger.log(u"Adding column scene_season and scene_episode to tvepisodes")
|
logger.log(u"Adding column scene_season and scene_episode to tvepisodes")
|
||||||
if not self.hasColumn("tv_episodes", "scene_season"):
|
self.addColumn("tv_episodes", "scene_season", "NUMERIC", "NULL")
|
||||||
self.addColumn("tv_episodes", "scene_season", "NUMERIC", -1)
|
self.addColumn("tv_episodes", "scene_episode", "NUMERIC", "NULL")
|
||||||
|
|
||||||
if not self.hasColumn("tv_episodes", "scene_episode"):
|
|
||||||
self.addColumn("tv_episodes", "scene_episode", "NUMERIC", -1)
|
|
||||||
|
|
||||||
self.incDBVersion()
|
self.incDBVersion()
|
||||||
|
|
||||||
|
class AddAnimeTVShow(AddSceneNumberingToTvEpisodes):
|
||||||
|
def test(self):
|
||||||
|
return self.checkDBVersion() >= 32
|
||||||
|
|
||||||
|
def execute(self):
|
||||||
|
backupDatabase(32)
|
||||||
|
|
||||||
|
logger.log(u"Adding column anime to tv_episodes")
|
||||||
|
self.addColumn("tv_shows", "anime", "NUMERIC", "0")
|
||||||
|
|
||||||
|
self.incDBVersion()
|
||||||
|
|
||||||
|
class AddAbsoluteNumbering(AddAnimeTVShow):
|
||||||
|
def test(self):
|
||||||
|
return self.checkDBVersion() >= 33
|
||||||
|
|
||||||
|
def execute(self):
|
||||||
|
backupDatabase(33)
|
||||||
|
|
||||||
|
logger.log(u"Adding column absolute_number to tv_episodes")
|
||||||
|
self.addColumn("tv_episodes", "absolute_number", "NUMERIC", "0")
|
||||||
|
|
||||||
|
self.incDBVersion()
|
||||||
|
|
||||||
|
class AddSceneAbsoluteNumbering(AddAbsoluteNumbering):
|
||||||
|
def test(self):
|
||||||
|
return self.checkDBVersion() >= 34
|
||||||
|
|
||||||
|
def execute(self):
|
||||||
|
backupDatabase(34)
|
||||||
|
|
||||||
|
logger.log(u"Adding column absolute_number and scene_absolute_number to scene_numbering")
|
||||||
|
self.addColumn("scene_numbering", "absolute_number", "NUMERIC", "0")
|
||||||
|
self.addColumn("scene_numbering", "scene_absolute_number", "NUMERIC", "0")
|
||||||
|
|
||||||
|
self.incDBVersion()
|
||||||
|
|
||||||
|
class AddAnimeBlacklistWhitelist(AddSceneAbsoluteNumbering):
|
||||||
|
|
||||||
|
def test(self):
|
||||||
|
return self.checkDBVersion() >= 35
|
||||||
|
|
||||||
|
def execute(self):
|
||||||
|
backupDatabase(35)
|
||||||
|
|
||||||
|
ql = []
|
||||||
|
ql.append(["CREATE TABLE blacklist (show_id INTEGER, range TEXT, keyword TEXT)"])
|
||||||
|
ql.append(["CREATE TABLE whitelist (show_id INTEGER, range TEXT, keyword TEXT)"])
|
||||||
|
self.connection.mass_action(ql)
|
||||||
|
|
||||||
|
self.incDBVersion()
|
||||||
|
|
||||||
|
class AddSceneAbsoluteNumbering(AddAnimeBlacklistWhitelist):
|
||||||
|
def test(self):
|
||||||
|
return self.checkDBVersion() >= 36
|
||||||
|
|
||||||
|
def execute(self):
|
||||||
|
backupDatabase(36)
|
||||||
|
|
||||||
|
logger.log(u"Adding column scene_absolute_number to tv_episodes")
|
||||||
|
self.addColumn("tv_episodes", "scene_absolute_number", "NUMERIC", "0")
|
||||||
|
|
||||||
|
self.incDBVersion()
|
||||||
|
|
||||||
|
class AddXemRefresh(AddSceneAbsoluteNumbering):
|
||||||
|
def test(self):
|
||||||
|
return self.checkDBVersion() >= 37
|
||||||
|
|
||||||
|
def execute(self):
|
||||||
|
backupDatabase(37)
|
||||||
|
|
||||||
|
logger.log(u"Creating table xem_refresh")
|
||||||
|
self.connection.action(
|
||||||
|
"CREATE TABLE xem_refresh (indexer TEXT, indexer_id INTEGER PRIMARY KEY, last_refreshed INTEGER)")
|
||||||
|
|
||||||
|
self.incDBVersion()
|
||||||
|
|
||||||
|
class AddSceneToTvShows(AddXemRefresh):
|
||||||
|
def test(self):
|
||||||
|
return self.checkDBVersion() >= 38
|
||||||
|
|
||||||
|
def execute(self):
|
||||||
|
backupDatabase(38)
|
||||||
|
|
||||||
|
logger.log(u"Adding column scene to tv_shows")
|
||||||
|
self.addColumn("tv_shows", "scene", "NUMERIC", "0")
|
||||||
|
|
||||||
|
self.incDBVersion()
|
||||||
|
|
||||||
|
|
|
@ -30,6 +30,7 @@ from sickbeard import encodingKludge as ek
|
||||||
from sickbeard import logger
|
from sickbeard import logger
|
||||||
from sickbeard.exceptions import ex
|
from sickbeard.exceptions import ex
|
||||||
from sickbeard.common import cpu_presets
|
from sickbeard.common import cpu_presets
|
||||||
|
from itertools import ifilter
|
||||||
|
|
||||||
db_lock = threading.Lock()
|
db_lock = threading.Lock()
|
||||||
|
|
||||||
|
@ -113,6 +114,8 @@ class DBConnection:
|
||||||
def mass_action(self, querylist, logTransaction=False):
|
def mass_action(self, querylist, logTransaction=False):
|
||||||
|
|
||||||
with db_lock:
|
with db_lock:
|
||||||
|
# remove None types
|
||||||
|
querylist = [i for i in querylist if i != None]
|
||||||
|
|
||||||
if querylist == None:
|
if querylist == None:
|
||||||
return
|
return
|
||||||
|
@ -120,8 +123,13 @@ class DBConnection:
|
||||||
sqlResult = []
|
sqlResult = []
|
||||||
attempt = 0
|
attempt = 0
|
||||||
|
|
||||||
|
# Transaction
|
||||||
|
self.connection.isolation_level = None
|
||||||
|
self.connection.execute('BEGIN')
|
||||||
|
|
||||||
while attempt < 5:
|
while attempt < 5:
|
||||||
try:
|
try:
|
||||||
|
|
||||||
for qu in querylist:
|
for qu in querylist:
|
||||||
if len(qu) == 1:
|
if len(qu) == 1:
|
||||||
if logTransaction:
|
if logTransaction:
|
||||||
|
@ -131,7 +139,9 @@ class DBConnection:
|
||||||
if logTransaction:
|
if logTransaction:
|
||||||
logger.log(qu[0] + " with args " + str(qu[1]), logger.DEBUG)
|
logger.log(qu[0] + " with args " + str(qu[1]), logger.DEBUG)
|
||||||
sqlResult.append(self.connection.execute(qu[0], qu[1]))
|
sqlResult.append(self.connection.execute(qu[0], qu[1]))
|
||||||
self.connection.commit()
|
|
||||||
|
self.connection.execute('COMMIT')
|
||||||
|
|
||||||
logger.log(u"Transaction with " + str(len(querylist)) + u" queries executed", logger.DEBUG)
|
logger.log(u"Transaction with " + str(len(querylist)) + u" queries executed", logger.DEBUG)
|
||||||
return sqlResult
|
return sqlResult
|
||||||
except sqlite3.OperationalError, e:
|
except sqlite3.OperationalError, e:
|
||||||
|
@ -230,6 +240,9 @@ class DBConnection:
|
||||||
d[col[0]] = row[idx]
|
d[col[0]] = row[idx]
|
||||||
return d
|
return d
|
||||||
|
|
||||||
|
def hasTable(self, tableName):
|
||||||
|
return len(self.action("SELECT 1 FROM sqlite_master WHERE name = ?;", (tableName, )).fetchall()) > 0
|
||||||
|
|
||||||
|
|
||||||
def sanityCheckDatabase(connection, sanity_check):
|
def sanityCheckDatabase(connection, sanity_check):
|
||||||
sanity_check(connection).check()
|
sanity_check(connection).check()
|
||||||
|
|
|
@ -18,6 +18,7 @@
|
||||||
|
|
||||||
from sickbeard.encodingKludge import fixStupidEncodings
|
from sickbeard.encodingKludge import fixStupidEncodings
|
||||||
|
|
||||||
|
|
||||||
def ex(e):
|
def ex(e):
|
||||||
"""
|
"""
|
||||||
Returns a unicode string from the exception text if it exists.
|
Returns a unicode string from the exception text if it exists.
|
||||||
|
@ -137,3 +138,7 @@ class FailedHistoryMultiSnatchException(SickBeardException):
|
||||||
|
|
||||||
class FailedHistoryNotFoundException(SickBeardException):
|
class FailedHistoryNotFoundException(SickBeardException):
|
||||||
"The release was not found in the failed download history tracker"
|
"The release was not found in the failed download history tracker"
|
||||||
|
|
||||||
|
|
||||||
|
class EpisodeNotFoundByAbsoluteNumberException(SickBeardException):
|
||||||
|
"The show wasn't found in the DB while looking at Absolute Numbers"
|
||||||
|
|
|
@ -52,9 +52,9 @@ class FailedProcessor(object):
|
||||||
self._log(u"Warning: unable to find a valid release name.", logger.WARNING)
|
self._log(u"Warning: unable to find a valid release name.", logger.WARNING)
|
||||||
raise exceptions.FailedProcessingFailed()
|
raise exceptions.FailedProcessingFailed()
|
||||||
|
|
||||||
parser = NameParser(False)
|
|
||||||
|
|
||||||
try:
|
try:
|
||||||
|
parser = NameParser(False, convert=True)
|
||||||
parsed = parser.parse(releaseName)
|
parsed = parser.parse(releaseName)
|
||||||
except InvalidNameException:
|
except InvalidNameException:
|
||||||
self._log(u"Error: release name is invalid: " + releaseName, logger.WARNING)
|
self._log(u"Error: release name is invalid: " + releaseName, logger.WARNING)
|
||||||
|
@ -69,22 +69,19 @@ class FailedProcessor(object):
|
||||||
logger.log(u" - " + str(parsed.air_date), logger.DEBUG)
|
logger.log(u" - " + str(parsed.air_date), logger.DEBUG)
|
||||||
logger.log(u" - " + str(parsed.sports_event_date), logger.DEBUG)
|
logger.log(u" - " + str(parsed.sports_event_date), logger.DEBUG)
|
||||||
|
|
||||||
self._show_obj = sickbeard.helpers.get_show_by_name(parsed.series_name)
|
if parsed.show is None:
|
||||||
if self._show_obj is None:
|
|
||||||
self._log(
|
self._log(
|
||||||
u"Could not create show object. Either the show hasn't been added to SickRage, or it's still loading (if SB was restarted recently)",
|
u"Could not create show object. Either the show hasn't been added to SickRage, or it's still loading (if SB was restarted recently)",
|
||||||
logger.WARNING)
|
logger.WARNING)
|
||||||
raise exceptions.FailedProcessingFailed()
|
raise exceptions.FailedProcessingFailed()
|
||||||
|
|
||||||
# scene -> indexer numbering
|
|
||||||
parsed = parsed.convert(self._show_obj)
|
|
||||||
|
|
||||||
segment = {parsed.season_number:[]}
|
segment = {parsed.season_number:[]}
|
||||||
|
|
||||||
for episode in parsed.episode_numbers:
|
for episode in parsed.episode_numbers:
|
||||||
epObj = self._show_obj.getEpisode(parsed.season_number, episode)
|
epObj = parsed.show.getEpisode(parsed.season_number, episode)
|
||||||
segment[parsed.season_number].append(epObj)
|
segment[parsed.season_number].append(epObj)
|
||||||
|
|
||||||
cur_failed_queue_item = search_queue.FailedQueueItem(self._show_obj, segment)
|
cur_failed_queue_item = search_queue.FailedQueueItem(parsed.show, segment)
|
||||||
sickbeard.searchQueueScheduler.action.add_item(cur_failed_queue_item)
|
sickbeard.searchQueueScheduler.action.add_item(cur_failed_queue_item)
|
||||||
|
|
||||||
return True
|
return True
|
||||||
|
|
|
@ -129,7 +129,6 @@ def revertEpisode(epObj):
|
||||||
logger.log(u"WARNING: Episode not found in history. Setting it back to WANTED",
|
logger.log(u"WARNING: Episode not found in history. Setting it back to WANTED",
|
||||||
logger.WARNING)
|
logger.WARNING)
|
||||||
epObj.status = WANTED
|
epObj.status = WANTED
|
||||||
|
|
||||||
epObj.saveToDB()
|
epObj.saveToDB()
|
||||||
|
|
||||||
except EpisodeNotFoundException, e:
|
except EpisodeNotFoundException, e:
|
||||||
|
|
|
@ -41,6 +41,8 @@ class GenericQueue(object):
|
||||||
|
|
||||||
self.currentItem = None
|
self.currentItem = None
|
||||||
|
|
||||||
|
self.lock = threading.Lock()
|
||||||
|
|
||||||
def pause(self):
|
def pause(self):
|
||||||
logger.log(u"Pausing queue")
|
logger.log(u"Pausing queue")
|
||||||
self.min_priority = 999999999999
|
self.min_priority = 999999999999
|
||||||
|
@ -83,6 +85,7 @@ class GenericQueue(object):
|
||||||
else:
|
else:
|
||||||
return y.priority-x.priority
|
return y.priority-x.priority
|
||||||
|
|
||||||
|
with self.lock:
|
||||||
self.queue.sort(cmp=sorter)
|
self.queue.sort(cmp=sorter)
|
||||||
|
|
||||||
queueItem = self.queue[0]
|
queueItem = self.queue[0]
|
||||||
|
|
|
@ -50,13 +50,14 @@ except ImportError:
|
||||||
from xml.dom.minidom import Node
|
from xml.dom.minidom import Node
|
||||||
|
|
||||||
import sickbeard
|
import sickbeard
|
||||||
from sickbeard.exceptions import MultipleShowObjectsException, ex
|
from sickbeard.exceptions import MultipleShowObjectsException, EpisodeNotFoundByAbsoluteNumberException, ex
|
||||||
from sickbeard import logger, classes
|
from sickbeard import logger, classes
|
||||||
from sickbeard.common import USER_AGENT, mediaExtensions, subtitleExtensions, XML_NSMAP
|
from sickbeard.common import USER_AGENT, mediaExtensions, subtitleExtensions, XML_NSMAP
|
||||||
from sickbeard import db
|
from sickbeard import db
|
||||||
from sickbeard import encodingKludge as ek
|
from sickbeard import encodingKludge as ek
|
||||||
from sickbeard import notifiers
|
from sickbeard import notifiers
|
||||||
from lib import subliminal
|
from lib import subliminal
|
||||||
|
from lib import adba
|
||||||
|
|
||||||
urllib._urlopener = classes.SickBeardURLopener()
|
urllib._urlopener = classes.SickBeardURLopener()
|
||||||
|
|
||||||
|
@ -187,7 +188,6 @@ def getURL(url, post_data=None, headers=None, params=None, timeout=30, json=Fals
|
||||||
|
|
||||||
it = iter(req_headers)
|
it = iter(req_headers)
|
||||||
|
|
||||||
|
|
||||||
if use_proxy and sickbeard.PROXY_SETTING:
|
if use_proxy and sickbeard.PROXY_SETTING:
|
||||||
logger.log("Using proxy for url: " + url, logger.DEBUG)
|
logger.log("Using proxy for url: " + url, logger.DEBUG)
|
||||||
proxies = {
|
proxies = {
|
||||||
|
@ -317,15 +317,17 @@ def searchDBForShow(regShowName, log=False):
|
||||||
continue
|
continue
|
||||||
elif len(sqlResults) > 1:
|
elif len(sqlResults) > 1:
|
||||||
if log:
|
if log:
|
||||||
logger.log(u"Multiple results for " + showName + " in the DB, unable to match show name", logger.DEBUG)
|
logger.log(u"Multiple results for " + showName + " in the DB, unable to match show name",
|
||||||
|
logger.DEBUG)
|
||||||
continue
|
continue
|
||||||
else:
|
else:
|
||||||
return (int(sqlResults[0]["indexer_id"]), sqlResults[0]["show_name"])
|
return (int(sqlResults[0]["indexer_id"]), sqlResults[0]["show_name"])
|
||||||
|
|
||||||
return
|
return
|
||||||
|
|
||||||
|
|
||||||
def searchIndexerForShowID(regShowName, indexer=None, indexer_id=None, ui=None):
|
def searchIndexerForShowID(regShowName, indexer=None, indexer_id=None, ui=None):
|
||||||
showNames = list(set([re.sub('[. -]', ' ', regShowName), regShowName]))
|
showNames = [re.sub('[. -]', ' ', regShowName)]
|
||||||
|
|
||||||
# Query Indexers for each search term and build the list of results
|
# Query Indexers for each search term and build the list of results
|
||||||
for i in sickbeard.indexerApi().indexers if not indexer else int(indexer or []):
|
for i in sickbeard.indexerApi().indexers if not indexer else int(indexer or []):
|
||||||
|
@ -337,11 +339,16 @@ def searchIndexerForShowID(regShowName, indexer=None, indexer_id=None, ui=None):
|
||||||
for name in showNames:
|
for name in showNames:
|
||||||
logger.log(u"Trying to find " + name + " on " + sickbeard.indexerApi(i).name, logger.DEBUG)
|
logger.log(u"Trying to find " + name + " on " + sickbeard.indexerApi(i).name, logger.DEBUG)
|
||||||
|
|
||||||
|
try:
|
||||||
search = t[indexer_id] if indexer_id else t[name]
|
search = t[indexer_id] if indexer_id else t[name]
|
||||||
|
except:
|
||||||
|
continue
|
||||||
|
|
||||||
try:
|
try:
|
||||||
seriesname = search.seriesname
|
seriesname = search.seriesname
|
||||||
except:
|
except:
|
||||||
seriesname = None
|
seriesname = None
|
||||||
|
|
||||||
try:
|
try:
|
||||||
series_id = search.id
|
series_id = search.id
|
||||||
except:
|
except:
|
||||||
|
@ -360,6 +367,7 @@ def searchIndexerForShowID(regShowName, indexer=None, indexer_id=None, ui=None):
|
||||||
|
|
||||||
return (None, None, None)
|
return (None, None, None)
|
||||||
|
|
||||||
|
|
||||||
def sizeof_fmt(num):
|
def sizeof_fmt(num):
|
||||||
'''
|
'''
|
||||||
>>> sizeof_fmt(2)
|
>>> sizeof_fmt(2)
|
||||||
|
@ -662,6 +670,57 @@ def fixSetGroupID(childPath):
|
||||||
childPath, parentGID), logger.ERROR)
|
childPath, parentGID), logger.ERROR)
|
||||||
|
|
||||||
|
|
||||||
|
def is_anime_in_show_list():
|
||||||
|
for show in sickbeard.showList:
|
||||||
|
if show.is_anime:
|
||||||
|
return True
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
def update_anime_support():
|
||||||
|
sickbeard.ANIMESUPPORT = is_anime_in_show_list()
|
||||||
|
|
||||||
|
def get_absolute_number_from_season_and_episode(show, season, episode):
|
||||||
|
myDB = db.DBConnection()
|
||||||
|
sql = "SELECT * FROM tv_episodes WHERE showid = ? and season = ? and episode = ?"
|
||||||
|
sqlResults = myDB.select(sql, [show.indexerid, season, episode])
|
||||||
|
|
||||||
|
if len(sqlResults) == 1:
|
||||||
|
absolute_number = int(sqlResults[0]["absolute_number"])
|
||||||
|
logger.log(
|
||||||
|
"Found absolute_number:" + str(absolute_number) + " by " + str(season) + "x" + str(episode), logger.DEBUG)
|
||||||
|
|
||||||
|
return absolute_number
|
||||||
|
else:
|
||||||
|
logger.log(
|
||||||
|
"No entries for absolute number in show: " + show.name + " found using " + str(season) + "x" + str(episode),logger.DEBUG)
|
||||||
|
|
||||||
|
return None
|
||||||
|
|
||||||
|
def get_all_episodes_from_absolute_number(show, indexer_id, absolute_numbers):
|
||||||
|
if len(absolute_numbers) == 0:
|
||||||
|
raise EpisodeNotFoundByAbsoluteNumberException()
|
||||||
|
|
||||||
|
episodes = []
|
||||||
|
season = None
|
||||||
|
|
||||||
|
if not show and not indexer_id:
|
||||||
|
return (season, episodes)
|
||||||
|
|
||||||
|
if not show and indexer_id:
|
||||||
|
show = findCertainShow(sickbeard.showList, indexer_id)
|
||||||
|
|
||||||
|
for absolute_number in absolute_numbers:
|
||||||
|
ep = show.getEpisode(None, None, absolute_number=absolute_number)
|
||||||
|
if ep:
|
||||||
|
episodes.append(ep.episode)
|
||||||
|
else:
|
||||||
|
raise EpisodeNotFoundByAbsoluteNumberException()
|
||||||
|
season = ep.season # this will always take the last found seson so eps that cross the season border are not handeled well
|
||||||
|
|
||||||
|
return (season, episodes)
|
||||||
|
|
||||||
|
|
||||||
def sanitizeSceneName(name, ezrss=False):
|
def sanitizeSceneName(name, ezrss=False):
|
||||||
"""
|
"""
|
||||||
Takes a show name and returns the "scenified" version of it.
|
Takes a show name and returns the "scenified" version of it.
|
||||||
|
@ -947,11 +1006,13 @@ def full_sanitizeSceneName(name):
|
||||||
return re.sub('[. -]', ' ', sanitizeSceneName(name)).lower().lstrip()
|
return re.sub('[. -]', ' ', sanitizeSceneName(name)).lower().lstrip()
|
||||||
|
|
||||||
|
|
||||||
def _check_against_names(name, show):
|
def _check_against_names(nameInQuestion, show, season=-1):
|
||||||
nameInQuestion = full_sanitizeSceneName(name)
|
|
||||||
|
|
||||||
|
showNames = []
|
||||||
|
if season in [-1, 1]:
|
||||||
showNames = [show.name]
|
showNames = [show.name]
|
||||||
showNames.extend(sickbeard.scene_exceptions.get_scene_exceptions(show.indexerid))
|
|
||||||
|
showNames.extend(sickbeard.scene_exceptions.get_scene_exceptions(show.indexerid, season=season))
|
||||||
|
|
||||||
for showName in showNames:
|
for showName in showNames:
|
||||||
nameFromList = full_sanitizeSceneName(showName)
|
nameFromList = full_sanitizeSceneName(showName)
|
||||||
|
@ -961,16 +1022,28 @@ def _check_against_names(name, show):
|
||||||
return False
|
return False
|
||||||
|
|
||||||
|
|
||||||
def get_show_by_name(name):
|
def get_show_by_name(name, useIndexer=False):
|
||||||
|
name = full_sanitizeSceneName(name)
|
||||||
|
|
||||||
|
try:
|
||||||
|
# check cache for show
|
||||||
showObj = sickbeard.name_cache.retrieveShowFromCache(name)
|
showObj = sickbeard.name_cache.retrieveShowFromCache(name)
|
||||||
if not showObj:
|
|
||||||
showNames = list(set(sickbeard.show_name_helpers.sceneToNormalShowNames(name)))
|
|
||||||
for showName in showNames if sickbeard.showList else []:
|
|
||||||
sceneResults = [x for x in sickbeard.showList if _check_against_names(showName, x)]
|
|
||||||
showObj = sceneResults[0] if len(sceneResults) else None
|
|
||||||
if showObj:
|
if showObj:
|
||||||
break
|
return showObj
|
||||||
|
if not showObj and sickbeard.showList:
|
||||||
|
if name in sickbeard.scene_exceptions.exceptionIndexerCache:
|
||||||
|
showObj = findCertainShow(sickbeard.showList, int(sickbeard.scene_exceptions.exceptionIndexerCache[name]))
|
||||||
|
|
||||||
|
if useIndexer and not showObj:
|
||||||
|
(sn, idx, id) = searchIndexerForShowID(name, ui=classes.ShowListUI)
|
||||||
|
if id:
|
||||||
|
showObj = findCertainShow(sickbeard.showList, int(id))
|
||||||
|
|
||||||
|
# add show to cache
|
||||||
|
if showObj:
|
||||||
|
sickbeard.name_cache.addNameToCache(name, showObj.indexerid)
|
||||||
|
except:
|
||||||
|
showObj = None
|
||||||
|
|
||||||
return showObj
|
return showObj
|
||||||
|
|
||||||
|
@ -986,6 +1059,7 @@ def is_hidden_folder(folder):
|
||||||
|
|
||||||
return False
|
return False
|
||||||
|
|
||||||
|
|
||||||
def real_path(path):
|
def real_path(path):
|
||||||
"""
|
"""
|
||||||
Returns: the canonicalized absolute pathname. The resulting path will have no symbolic link, '/./' or '/../' components.
|
Returns: the canonicalized absolute pathname. The resulting path will have no symbolic link, '/./' or '/../' components.
|
||||||
|
@ -1009,3 +1083,28 @@ def validateShow(show, season=None, episode=None):
|
||||||
return t[show.indexerid][season][episode]
|
return t[show.indexerid][season][episode]
|
||||||
except (sickbeard.indexer_episodenotfound, sickbeard.indexer_seasonnotfound):
|
except (sickbeard.indexer_episodenotfound, sickbeard.indexer_seasonnotfound):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
def set_up_anidb_connection():
|
||||||
|
if not sickbeard.USE_ANIDB:
|
||||||
|
logger.log(u"Usage of anidb disabled. Skiping", logger.DEBUG)
|
||||||
|
return False
|
||||||
|
|
||||||
|
if not sickbeard.ANIDB_USERNAME and not sickbeard.ANIDB_PASSWORD:
|
||||||
|
logger.log(u"anidb username and/or password are not set. Aborting anidb lookup.", logger.DEBUG)
|
||||||
|
return False
|
||||||
|
|
||||||
|
if not sickbeard.ADBA_CONNECTION:
|
||||||
|
anidb_logger = lambda x: logger.log("ANIDB: " + str(x), logger.DEBUG)
|
||||||
|
sickbeard.ADBA_CONNECTION = adba.Connection(keepAlive=True, log=anidb_logger)
|
||||||
|
|
||||||
|
if not sickbeard.ADBA_CONNECTION.authed():
|
||||||
|
try:
|
||||||
|
sickbeard.ADBA_CONNECTION.auth(sickbeard.ANIDB_USERNAME, sickbeard.ANIDB_PASSWORD)
|
||||||
|
except Exception, e:
|
||||||
|
logger.log(u"exception msg: " + str(e))
|
||||||
|
return False
|
||||||
|
else:
|
||||||
|
return True
|
||||||
|
|
||||||
|
return sickbeard.ADBA_CONNECTION.authed()
|
|
@ -21,7 +21,7 @@ indexerConfig[INDEXER_TVDB] = {
|
||||||
'id': INDEXER_TVDB,
|
'id': INDEXER_TVDB,
|
||||||
'name': 'theTVDB',
|
'name': 'theTVDB',
|
||||||
'module': Tvdb,
|
'module': Tvdb,
|
||||||
'api_params': {'apikey': '9DAF49C96CBF8DAC',
|
'api_params': {'apikey': 'F9C450E78D99172E',
|
||||||
'language': 'en',
|
'language': 'en',
|
||||||
'useZip': True
|
'useZip': True
|
||||||
},
|
},
|
||||||
|
|
47
sickbeard/maintenance.py
Normal file
|
@ -0,0 +1,47 @@
|
||||||
|
# Author: Nic Wolfe <nic@wolfeden.ca>
|
||||||
|
# URL: http://code.google.com/p/sickbeard/
|
||||||
|
#
|
||||||
|
# This file is part of SickRage.
|
||||||
|
#
|
||||||
|
# SickRage is free software: you can redistribute it and/or modify
|
||||||
|
# it under the terms of the GNU General Public License as published by
|
||||||
|
# the Free Software Foundation, either version 3 of the License, or
|
||||||
|
# (at your option) any later version.
|
||||||
|
#
|
||||||
|
# SickRage is distributed in the hope that it will be useful,
|
||||||
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
# GNU General Public License for more details.
|
||||||
|
#
|
||||||
|
# You should have received a copy of the GNU General Public License
|
||||||
|
# along with SickRage. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
|
from __future__ import with_statement
|
||||||
|
import threading
|
||||||
|
import sickbeard
|
||||||
|
|
||||||
|
from sickbeard import scene_exceptions
|
||||||
|
from sickbeard import failed_history
|
||||||
|
from sickbeard import network_timezones
|
||||||
|
|
||||||
|
|
||||||
|
class Maintenance():
|
||||||
|
def __init__(self):
|
||||||
|
self.lock = threading.Lock()
|
||||||
|
|
||||||
|
self.amActive = False
|
||||||
|
|
||||||
|
def run(self, force=False):
|
||||||
|
self.amActive = True
|
||||||
|
|
||||||
|
# refresh scene exceptions too
|
||||||
|
scene_exceptions.retrieve_exceptions()
|
||||||
|
|
||||||
|
# refresh network timezones
|
||||||
|
network_timezones.update_network_dict()
|
||||||
|
|
||||||
|
# sure, why not?
|
||||||
|
if sickbeard.USE_FAILED_DOWNLOADS:
|
||||||
|
failed_history.trimHistory()
|
||||||
|
|
||||||
|
self.amActive = False
|
|
@ -782,14 +782,10 @@ class GenericMetadata():
|
||||||
|
|
||||||
# Try and get posters and fanart from TMDB
|
# Try and get posters and fanart from TMDB
|
||||||
if image_url is None:
|
if image_url is None:
|
||||||
for show_name in set(allPossibleShowNames(show_obj)):
|
|
||||||
if image_type in ('poster', 'poster_thumb'):
|
if image_type in ('poster', 'poster_thumb'):
|
||||||
image_url = self._retrieve_show_images_from_tmdb(show_name, poster=True)
|
image_url = self._retrieve_show_images_from_tmdb(show_obj, poster=True)
|
||||||
elif image_type == 'fanart':
|
elif image_type == 'fanart':
|
||||||
image_url = self._retrieve_show_images_from_tmdb(show_name, backdrop=True)
|
image_url = self._retrieve_show_images_from_tmdb(show_obj, backdrop=True)
|
||||||
|
|
||||||
if image_url:
|
|
||||||
break
|
|
||||||
|
|
||||||
if image_url:
|
if image_url:
|
||||||
image_data = metadata_helpers.getShowImage(image_url, which)
|
image_data = metadata_helpers.getShowImage(image_url, which)
|
||||||
|
@ -964,11 +960,9 @@ class GenericMetadata():
|
||||||
|
|
||||||
return (indexer_id, name, indexer)
|
return (indexer_id, name, indexer)
|
||||||
|
|
||||||
def _retrieve_show_images_from_tmdb(self, name, id=None, backdrop=False, poster=False):
|
def _retrieve_show_images_from_tmdb(self, show, backdrop=False, poster=False):
|
||||||
tmdb = TMDB(sickbeard.TMDB_API_KEY)
|
|
||||||
result = None
|
|
||||||
|
|
||||||
# get TMDB configuration info
|
# get TMDB configuration info
|
||||||
|
tmdb = TMDB(sickbeard.TMDB_API_KEY)
|
||||||
config = tmdb.Configuration()
|
config = tmdb.Configuration()
|
||||||
response = config.info()
|
response = config.info()
|
||||||
base_url = response['images']['base_url']
|
base_url = response['images']['base_url']
|
||||||
|
@ -980,38 +974,15 @@ class GenericMetadata():
|
||||||
max_size = max(sizes, key=size_str_to_int)
|
max_size = max(sizes, key=size_str_to_int)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
if id is None:
|
|
||||||
search = tmdb.Search()
|
search = tmdb.Search()
|
||||||
response = search.collection({'query': name})
|
for show_name in set(allPossibleShowNames(show)):
|
||||||
id = response['results'][0]['id']
|
for result in search.collection({'query': show_name})['results'] + search.tv({'query': show_name})['results']:
|
||||||
|
if backdrop and result['backdrop_path']:
|
||||||
|
return "{0}{1}{2}".format(base_url, max_size, result['backdrop_path'])
|
||||||
|
elif poster and result['poster_path']:
|
||||||
|
return "{0}{1}{2}".format(base_url, max_size, result['poster_path'])
|
||||||
|
|
||||||
result = tmdb.Collections(id)
|
except Exception, e:
|
||||||
except:
|
|
||||||
try:
|
|
||||||
if id is None:
|
|
||||||
search = tmdb.Search()
|
|
||||||
response = search.tv({'query': name})
|
|
||||||
id = response['results'][0]['id']
|
|
||||||
|
|
||||||
result = tmdb.TV(id)
|
|
||||||
except:
|
|
||||||
pass
|
pass
|
||||||
|
|
||||||
if result is None:
|
logger.log(u"Could not find any posters or background for " + show.name, logger.DEBUG)
|
||||||
return None
|
|
||||||
|
|
||||||
images = result.images()
|
|
||||||
if len(images) > 0:
|
|
||||||
# get backdrop urls
|
|
||||||
if backdrop:
|
|
||||||
rel_path = images['backdrops'][0]['file_path']
|
|
||||||
url = "{0}{1}{2}".format(base_url, max_size, rel_path)
|
|
||||||
return url
|
|
||||||
|
|
||||||
# get poster urls
|
|
||||||
if poster:
|
|
||||||
rel_path = images['posters'][0]['file_path']
|
|
||||||
url = "{0}{1}{2}".format(base_url, max_size, rel_path)
|
|
||||||
return url
|
|
||||||
|
|
||||||
return None
|
|
|
@ -479,9 +479,7 @@ class MediaBrowserMetadata(generic.GenericMetadata):
|
||||||
if not ep_obj.relatedEps:
|
if not ep_obj.relatedEps:
|
||||||
Rating = etree.SubElement(episode, "Rating")
|
Rating = etree.SubElement(episode, "Rating")
|
||||||
if getattr(myEp, 'rating', None) is not None:
|
if getattr(myEp, 'rating', None) is not None:
|
||||||
rating_text = myEp['rating']
|
Rating.text = myEp['rating']
|
||||||
if rating_text != None:
|
|
||||||
Rating.text = rating_text
|
|
||||||
|
|
||||||
IMDB_ID = etree.SubElement(episode, "IMDB_ID")
|
IMDB_ID = etree.SubElement(episode, "IMDB_ID")
|
||||||
IMDB = etree.SubElement(episode, "IMDB")
|
IMDB = etree.SubElement(episode, "IMDB")
|
||||||
|
|
|
@ -45,10 +45,13 @@ def retrieveNameFromCache(name):
|
||||||
Returns: the TVDB and TVRAGE id that resulted from the cache lookup or None if the show wasn't found in the cache
|
Returns: the TVDB and TVRAGE id that resulted from the cache lookup or None if the show wasn't found in the cache
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
cache_results = None
|
||||||
|
|
||||||
# standardize the name we're using to account for small differences in providers
|
# standardize the name we're using to account for small differences in providers
|
||||||
name = sanitizeSceneName(name)
|
name = sanitizeSceneName(name)
|
||||||
|
|
||||||
cacheDB = db.DBConnection('cache.db')
|
cacheDB = db.DBConnection('cache.db')
|
||||||
|
if cacheDB.hasTable('scene_names'):
|
||||||
cache_results = cacheDB.select("SELECT * FROM scene_names WHERE name = ?", [name])
|
cache_results = cacheDB.select("SELECT * FROM scene_names WHERE name = ?", [name])
|
||||||
|
|
||||||
if cache_results:
|
if cache_results:
|
||||||
|
@ -59,20 +62,14 @@ def retrieveShowFromCache(name):
|
||||||
if indexerid:
|
if indexerid:
|
||||||
return sickbeard.helpers.findCertainShow(sickbeard.showList, int(indexerid))
|
return sickbeard.helpers.findCertainShow(sickbeard.showList, int(indexerid))
|
||||||
|
|
||||||
def syncNameCache():
|
def clearCache(show=None, season=-1, indexer_id=0):
|
||||||
cacheDB = db.DBConnection('cache.db')
|
|
||||||
|
|
||||||
for curShow in sickbeard.showList:
|
|
||||||
for show_name in set(sickbeard.show_name_helpers.allPossibleShowNames(curShow)):
|
|
||||||
sqlResult = cacheDB.action("DELETE FROM scene_names WHERE name = ? and indexer_id = ?", [show_name, 0])
|
|
||||||
if sqlResult.rowcount > 0:
|
|
||||||
logger.log(u"Removing invalid record for [" + show_name + "] from cache ...")
|
|
||||||
break
|
|
||||||
|
|
||||||
def clearCache():
|
|
||||||
"""
|
"""
|
||||||
Deletes all "unknown" entries from the cache (names with indexer_id of 0).
|
Deletes all "unknown" entries from the cache (names with indexer_id of 0).
|
||||||
"""
|
"""
|
||||||
cacheDB = db.DBConnection('cache.db')
|
cacheDB = db.DBConnection('cache.db')
|
||||||
cacheDB.action("DELETE FROM scene_names WHERE indexer_id = ?", [0])
|
if show:
|
||||||
|
showNames = sickbeard.show_name_helpers.allPossibleShowNames(show, season=season)
|
||||||
|
for showName in showNames:
|
||||||
|
cacheDB.action("DELETE FROM scene_names WHERE name = ? and indexer_id = ?", [showName, indexer_id])
|
||||||
|
else:
|
||||||
|
cacheDB.action("DELETE FROM scene_names WHERE indexer_id = ?", [indexer_id])
|