mirror of
https://github.com/SickGear/SickGear.git
synced 2025-01-23 01:43:43 +00:00
Fixed robots.txt method.
This commit is contained in:
parent
037cee7820
commit
508c094f48
1 changed files with 5 additions and 1 deletions
|
@ -172,6 +172,10 @@ class MainHandler(RequestHandler):
|
||||||
path = self.request.uri.replace(sickbeard.WEB_ROOT, '').split('?')[0]
|
path = self.request.uri.replace(sickbeard.WEB_ROOT, '').split('?')[0]
|
||||||
|
|
||||||
method = path.strip('/').split('/')[-1]
|
method = path.strip('/').split('/')[-1]
|
||||||
|
|
||||||
|
if method == 'robots.txt':
|
||||||
|
method = 'robots_txt'
|
||||||
|
|
||||||
if path.startswith('/api') and method != 'builder':
|
if path.startswith('/api') and method != 'builder':
|
||||||
apikey = path.strip('/').split('/')[-1]
|
apikey = path.strip('/').split('/')[-1]
|
||||||
method = path.strip('/').split('/')[0]
|
method = path.strip('/').split('/')[0]
|
||||||
|
@ -225,7 +229,7 @@ class MainHandler(RequestHandler):
|
||||||
def robots_txt(self, *args, **kwargs):
|
def robots_txt(self, *args, **kwargs):
|
||||||
""" Keep web crawlers out """
|
""" Keep web crawlers out """
|
||||||
self.set_header('Content-Type', 'text/plain')
|
self.set_header('Content-Type', 'text/plain')
|
||||||
return 'User-agent: *\nDisallow: /\n'
|
return "User-agent: *\nDisallow: /"
|
||||||
|
|
||||||
def showPoster(self, show=None, which=None):
|
def showPoster(self, show=None, which=None):
|
||||||
# Redirect initial poster/banner thumb to default images
|
# Redirect initial poster/banner thumb to default images
|
||||||
|
|
Loading…
Reference in a new issue