mirror of
https://github.com/SickGear/SickGear.git
synced 2024-12-01 00:43:37 +00:00
Add TVRage network name standardization
This commit is contained in:
parent
d90af06c2c
commit
a046729436
5 changed files with 80 additions and 2 deletions
|
@ -3,6 +3,7 @@
|
||||||
* Add failed database to unit tests tear down function
|
* Add failed database to unit tests tear down function
|
||||||
* Fix purging of database files in tear down function during unit tests
|
* Fix purging of database files in tear down function during unit tests
|
||||||
* Add ability to auto focus Search Show box on Home page and control this option via General Config/Interface
|
* Add ability to auto focus Search Show box on Home page and control this option via General Config/Interface
|
||||||
|
* Add TVRage network name standardization
|
||||||
|
|
||||||
[develop changelog]
|
[develop changelog]
|
||||||
* Fix typo for commit "ShowData handler" i.e. SHA-1:3eec217
|
* Fix typo for commit "ShowData handler" i.e. SHA-1:3eec217
|
||||||
|
|
|
@ -22,6 +22,7 @@ import datetime as dt
|
||||||
import requests
|
import requests
|
||||||
import requests.exceptions
|
import requests.exceptions
|
||||||
import xmltodict
|
import xmltodict
|
||||||
|
from sickbeard.network_timezones import standardize_network
|
||||||
|
|
||||||
try:
|
try:
|
||||||
import xml.etree.cElementTree as ElementTree
|
import xml.etree.cElementTree as ElementTree
|
||||||
|
@ -446,7 +447,9 @@ class TVRage:
|
||||||
if value:
|
if value:
|
||||||
if isinstance(value, dict):
|
if isinstance(value, dict):
|
||||||
if key == 'network':
|
if key == 'network':
|
||||||
value = value['#text']
|
network = value['#text']
|
||||||
|
country = value['@country']
|
||||||
|
value = standardize_network(network, country)
|
||||||
if key == 'genre':
|
if key == 'genre':
|
||||||
value = value['genre']
|
value = value['genre']
|
||||||
if not value:
|
if not value:
|
||||||
|
|
|
@ -88,4 +88,15 @@ class AddSceneExceptionsRefresh(AddSceneExceptionsCustom):
|
||||||
|
|
||||||
def execute(self):
|
def execute(self):
|
||||||
self.connection.action(
|
self.connection.action(
|
||||||
"CREATE TABLE scene_exceptions_refresh (list TEXT PRIMARY KEY, last_refreshed INTEGER)")
|
"CREATE TABLE scene_exceptions_refresh (list TEXT PRIMARY KEY, last_refreshed INTEGER)")
|
||||||
|
|
||||||
|
|
||||||
|
class AddNetworkConversions(AddSceneExceptionsRefresh):
|
||||||
|
def test(self):
|
||||||
|
return self.hasTable('network_conversions')
|
||||||
|
|
||||||
|
def execute(self):
|
||||||
|
self.connection.action('CREATE TABLE network_conversions (tvdb_network TEXT PRIMARY KEY, tvrage_network TEXT,'
|
||||||
|
' tvrage_country TEXT)')
|
||||||
|
|
||||||
|
self.connection.action('CREATE INDEX tvrage_idx on network_conversions (tvrage_network, tvrage_country)')
|
||||||
|
|
|
@ -1423,3 +1423,6 @@ def get_size(start_path='.'):
|
||||||
total_size += ek.ek(os.path.getsize, fp)
|
total_size += ek.ek(os.path.getsize, fp)
|
||||||
return total_size
|
return total_size
|
||||||
|
|
||||||
|
|
||||||
|
def build_dict(seq, key):
|
||||||
|
return dict((d[key], dict(d, index=index)) for (index, d) in enumerate(seq))
|
||||||
|
|
|
@ -140,6 +140,7 @@ def _update_zoneinfo():
|
||||||
def update_network_dict():
|
def update_network_dict():
|
||||||
_remove_old_zoneinfo()
|
_remove_old_zoneinfo()
|
||||||
_update_zoneinfo()
|
_update_zoneinfo()
|
||||||
|
load_network_conversions()
|
||||||
|
|
||||||
d = {}
|
d = {}
|
||||||
|
|
||||||
|
@ -278,3 +279,62 @@ def test_timeformat(t):
|
||||||
return False
|
return False
|
||||||
else:
|
else:
|
||||||
return True
|
return True
|
||||||
|
|
||||||
|
|
||||||
|
def standardize_network(network, country):
|
||||||
|
myDB = db.DBConnection('cache.db')
|
||||||
|
sqlResults = myDB.select('SELECT * FROM network_conversions WHERE tvrage_network = ? and tvrage_country = ?',
|
||||||
|
[network, country])
|
||||||
|
if len(sqlResults) == 1:
|
||||||
|
return sqlResults[0]['tvdb_network']
|
||||||
|
else:
|
||||||
|
return network
|
||||||
|
|
||||||
|
|
||||||
|
def load_network_conversions():
|
||||||
|
|
||||||
|
conversions = []
|
||||||
|
|
||||||
|
# network conversions are stored on github pages
|
||||||
|
url = 'https://raw.githubusercontent.com/prinz23/sg_network_conversions/master/conversions.txt'
|
||||||
|
|
||||||
|
url_data = helpers.getURL(url)
|
||||||
|
if url_data is None:
|
||||||
|
# When urlData is None, trouble connecting to github
|
||||||
|
logger.log(u'Updating network conversions failed, this can happen from time to time. URL: %s' % url, logger.WARNING)
|
||||||
|
return
|
||||||
|
|
||||||
|
try:
|
||||||
|
for line in url_data.splitlines():
|
||||||
|
(tvdb_network, tvrage_network, tvrage_country) = line.decode('utf-8').strip().rsplit(u'::', 2)
|
||||||
|
if not (tvdb_network and tvrage_network and tvrage_country):
|
||||||
|
continue
|
||||||
|
conversions.append({'tvdb_network': tvdb_network, 'tvrage_network': tvrage_network, 'tvrage_country': tvrage_country})
|
||||||
|
except (IOError, OSError):
|
||||||
|
pass
|
||||||
|
|
||||||
|
my_db = db.DBConnection('cache.db')
|
||||||
|
|
||||||
|
old_d = my_db.select('SELECT * FROM network_conversions')
|
||||||
|
old_d = helpers.build_dict(old_d, 'tvdb_network')
|
||||||
|
|
||||||
|
# list of sql commands to update the network_conversions table
|
||||||
|
cl = []
|
||||||
|
|
||||||
|
for n_w in conversions:
|
||||||
|
cl.append(['INSERT OR REPLACE INTO network_conversions (tvdb_network, tvrage_network, tvrage_country)'
|
||||||
|
'VALUES (?,?,?)', [n_w['tvdb_network'], n_w['tvrage_network'], n_w['tvrage_country']]])
|
||||||
|
try:
|
||||||
|
del old_d[n_w['tvdb_network']]
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
|
||||||
|
# remove deleted records
|
||||||
|
if len(old_d) > 0:
|
||||||
|
old_items = list(va for va in old_d)
|
||||||
|
cl.append(['DELETE FROM network_conversions WHERE tvdb_network'
|
||||||
|
' IN (%s)' % ','.join(['?'] * len(old_items)), old_items])
|
||||||
|
|
||||||
|
# change all network conversion info at once (much faster)
|
||||||
|
if len(cl) > 0:
|
||||||
|
my_db.mass_action(cl)
|
Loading…
Reference in a new issue