diff --git a/CHANGES.md b/CHANGES.md index e06fbf85..79b28bc5 100644 --- a/CHANGES.md +++ b/CHANGES.md @@ -1,7 +1,63 @@ +### 0.4.0 (2014-12-04 10:50:00 UTC) + +* Change footer stats to not add newlines when copy/pasting from them +* Remove redundant references from Config/Help & Info +* Fix poster preview on small poster layout +* Change overhaul Config/Anime to be in line with General Configuration +* Change descriptions and layout on Config/Anime page +* Remove output of source code line when warnings highlight libraries not used with IMDb +* Add dropdown on Add Trending Shows to display all shows, shows not in library, or shows in library +* Change Help and Info icon sprites to color and text of Arguments if unused +* Change sharper looking heart image on the Add Show page +* Change Add Show on Add Trending Show page to use the full Add New Show flow +* Fix adding shows with titles that contain "&" on Add Trending Show page +* Fix unset vars on Add New Shows page used in the Add Existing Shows context +* Remove unneeded datetime convert from Coming Episodes page +* Fix the log output of the limited backlog search for episodes missed +* Remove unsupported t411 search provider +* Remove obsolete Animezb search provider +* Add option to treat anime releases that lack a quality tag as HDTV instead of "unknown" +* Remove old version checking code that no longer applies to SickGear's release system +* Fix pnotify notifications going full page +* Change overhaul Config Post Processing to be in line with General Configuration +* Change rearrange Config Post Processing items into sections for easier use +* Fix CSS overriding link colors on config pages +* Change Config Post Processing texts and descriptions throughout +* Fix Config Post Processing info icons in "Naming Legends" +* Change Config Post Processing naming sample lines to be more available +* Add Config Post Processing failed downloads Sabnzbd setup guide +* Fix Config Post Processing "Anime name pattern" custom javascript validation +* Add check that SSLv3 is available before use by requests lib +* Update Requests library 2.3.0 to 2.4.3 (9dc6602) +* Change suppress HTTPS verification InsecureRequestWarning as many sites use self-certified certificates +* Fix API endpoint Episode.SetStatus to "Wanted" +* Change airdateModifyStamp to handle hour that is "00:00" +* Fix a handler when ShowData is not available in TVDB and TVRage APIs +* Fix a handler when EpisodeData is not available in TVDB and TVRage APIs +* Add TVRage "Canceled/Ended" as "Ended" status to sort on Simple Layout of Show List page +* Fix qtips on Display Show and Config Post Processing +* Fix glitch above rating stars on Display Show page +* Change overhaul Config/Search Providers +* Change Config/Search Providers texts and descriptions +* Fix display when no providers are visible on Config/Search Providers +* Fix failing "Search Settings" link that is shown on Config/Search Providers when Torrents Search is not enabled +* Fix failing "Providers" link on Config/Search Settings/Episode Search +* Change case of labels in General Config/Interface/Timezone +* Split enabled from not enabled providers in the Configure Provider drop down on the Providers Options tab +* Fix typo on General Config/Misc +* Fix Add Trending Shows "Not In library" now filters tvrage added shows +* Add a hover over text "In library" on Add Trending Shows to display tv database show was added from +* Fix reduces time API endpoint Shows takes to return results +* Fix Coming Eps Page to include shows +/- 1 day for time zone corrections +* Fix season jumping dropdown menu for shows with over 15 seasons on Display Show +* Fix article sorting for Coming Eps, Manage, Show List, Display Show, API, and Trending Shows pages + + ### 0.3.1 (2014-11-19 16:40:00 UTC) * Fix failing travis test + ### 0.3.0 (2014-11-12 14:30:00 UTC) * Change logos, text etc. branding to SickGear diff --git a/HACKS.txt b/HACKS.txt index 8750d457..b1e6363d 100644 --- a/HACKS.txt +++ b/HACKS.txt @@ -1,3 +1,5 @@ Libs with customisations... /tornado +/lib/requests/packages/urllib3/contrib/pyopenssl.py +/lib/requests/packages/urllib3/connectionpool.py diff --git a/googlecode_upload.py b/googlecode_upload.py deleted file mode 100644 index 1b934925..00000000 --- a/googlecode_upload.py +++ /dev/null @@ -1,250 +0,0 @@ -#!/usr/bin/env python2 -# -# Copyright 2006, 2007 Google Inc. All Rights Reserved. -# Author: danderson@google.com (David Anderson) -# -# Script for uploading files to a Google Code project. -# -# This is intended to be both a useful script for people who want to -# streamline project uploads and a reference implementation for -# uploading files to Google Code projects. -# -# To upload a file to Google Code, you need to provide a path to the -# file on your local machine, a small summary of what the file is, a -# project name, and a valid account that is a member or owner of that -# project. You can optionally provide a list of labels that apply to -# the file. The file will be uploaded under the same name that it has -# in your local filesystem (that is, the "basename" or last path -# component). Run the script with '--help' to get the exact syntax -# and available options. -# -# Note that the upload script requests that you enter your -# googlecode.com password. This is NOT your Gmail account password! -# This is the password you use on googlecode.com for committing to -# Subversion and uploading files. You can find your password by going -# to http://code.google.com/hosting/settings when logged in with your -# Gmail account. If you have already committed to your project's -# Subversion repository, the script will automatically retrieve your -# credentials from there (unless disabled, see the output of '--help' -# for details). -# -# If you are looking at this script as a reference for implementing -# your own Google Code file uploader, then you should take a look at -# the upload() function, which is the meat of the uploader. You -# basically need to build a multipart/form-data POST request with the -# right fields and send it to https://PROJECT.googlecode.com/files . -# Authenticate the request using HTTP Basic authentication, as is -# shown below. -# -# Licensed under the terms of the Apache Software License 2.0: -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Questions, comments, feature requests and patches are most welcome. -# Please direct all of these to the Google Code users group: -# http://groups.google.com/group/google-code-hosting - -"""Google Code file uploader script. -""" - -__author__ = 'danderson@google.com (David Anderson)' - -import httplib -import os.path -import optparse -import getpass -import base64 -import sys - - -def upload(file, project_name, user_name, password, summary, labels=None): - """Upload a file to a Google Code project's file server. - - Args: - file: The local path to the file. - project_name: The name of your project on Google Code. - user_name: Your Google account name. - password: The googlecode.com password for your account. - Note that this is NOT your global Google Account password! - summary: A small description for the file. - labels: an optional list of label strings with which to tag the file. - - Returns: a tuple: - http_status: 201 if the upload succeeded, something else if an - error occured. - http_reason: The human-readable string associated with http_status - file_url: If the upload succeeded, the URL of the file on Google - Code, None otherwise. - """ - # The login is the user part of user@gmail.com. If the login provided - # is in the full user@domain form, strip it down. - if user_name.endswith('@gmail.com'): - user_name = user_name[:user_name.index('@gmail.com')] - - form_fields = [('summary', summary)] - if labels is not None: - form_fields.extend([('label', l.strip()) for l in labels]) - - content_type, body = encode_upload_request(form_fields, file) - - upload_host = '%s.googlecode.com' % project_name - upload_uri = '/files' - auth_token = base64.b64encode('%s:%s'% (user_name, password)) - headers = { - 'Authorization': 'Basic %s' % auth_token, - 'User-Agent': 'Googlecode.com uploader v0.9.4', - 'Content-Type': content_type, - } - - server = httplib.HTTPSConnection(upload_host) - server.request('POST', upload_uri, body, headers) - resp = server.getresponse() - server.close() - - if resp.status == 201: - location = resp.getheader('Location', None) - else: - location = None - return resp.status, resp.reason, location - - -def encode_upload_request(fields, file_path): - """Encode the given fields and file into a multipart form body. - - fields is a sequence of (name, value) pairs. file is the path of - the file to upload. The file will be uploaded to Google Code with - the same file name. - - Returns: (content_type, body) ready for httplib.HTTP instance - """ - BOUNDARY = '----------Googlecode_boundary_reindeer_flotilla' - CRLF = '\r\n' - - body = [] - - # Add the metadata about the upload first - for key, value in fields: - body.extend( - ['--' + BOUNDARY, - 'Content-Disposition: form-data; name="%s"' % key, - '', - value, - ]) - - # Now add the file itself - file_name = os.path.basename(file_path) - f = open(file_path, 'rb') - file_content = f.read() - f.close() - - body.extend( - ['--' + BOUNDARY, - 'Content-Disposition: form-data; name="filename"; filename="%s"' - % file_name, - # The upload server determines the mime-type, no need to set it. - 'Content-Type: application/octet-stream', - '', - file_content, - ]) - - # Finalize the form body - body.extend(['--' + BOUNDARY + '--', '']) - - return 'multipart/form-data; boundary=%s' % BOUNDARY, CRLF.join(body) - - -def upload_find_auth(file_path, project_name, summary, labels=None, - user_name=None, password=None, tries=3): - """Find credentials and upload a file to a Google Code project's file server. - - file_path, project_name, summary, and labels are passed as-is to upload. - - Args: - file_path: The local path to the file. - project_name: The name of your project on Google Code. - summary: A small description for the file. - labels: an optional list of label strings with which to tag the file. - config_dir: Path to Subversion configuration directory, 'none', or None. - user_name: Your Google account name. - tries: How many attempts to make. - """ - - while tries > 0: - if user_name is None: - # Read username if not specified or loaded from svn config, or on - # subsequent tries. - sys.stdout.write('Please enter your googlecode.com username: ') - sys.stdout.flush() - user_name = sys.stdin.readline().rstrip() - if password is None: - # Read password if not loaded from svn config, or on subsequent tries. - print 'Please enter your googlecode.com password.' - print '** Note that this is NOT your Gmail account password! **' - print 'It is the password you use to access Subversion repositories,' - print 'and can be found here: http://code.google.com/hosting/settings' - password = getpass.getpass() - - status, reason, url = upload(file_path, project_name, user_name, password, - summary, labels) - # Returns 403 Forbidden instead of 401 Unauthorized for bad - # credentials as of 2007-07-17. - if status in [httplib.FORBIDDEN, httplib.UNAUTHORIZED]: - # Rest for another try. - user_name = password = None - tries = tries - 1 - else: - # We're done. - break - - return status, reason, url - - -def main(): - parser = optparse.OptionParser(usage='googlecode-upload.py -s SUMMARY ' - '-p PROJECT [options] FILE') - parser.add_option('-s', '--summary', dest='summary', - help='Short description of the file') - parser.add_option('-p', '--project', dest='project', - help='Google Code project name') - parser.add_option('-u', '--user', dest='user', - help='Your Google Code username') - parser.add_option('-w', '--password', dest='password', - help='Your Google Code password') - parser.add_option('-l', '--labels', dest='labels', - help='An optional list of comma-separated labels to attach ' - 'to the file') - - options, args = parser.parse_args() - - if not options.summary: - parser.error('File summary is missing.') - elif not options.project: - parser.error('Project name is missing.') - elif len(args) < 1: - parser.error('File to upload not provided.') - elif len(args) > 1: - parser.error('Only one file may be specified.') - - file_path = args[0] - - if options.labels: - labels = options.labels.split(',') - else: - labels = None - -def upload_file(file, project, summary, labels, username, password): - - status, reason, url = upload_find_auth(file, project, - summary, labels, - username, password) - if url: - print 'The file was uploaded successfully.' - print 'URL: %s' % url - return 0 - else: - print 'An error occurred. Your file was not uploaded.' - print 'Google Code upload server said: %s (%s)' % (reason, status) - return 1 - - -if __name__ == '__main__': - sys.exit(main()) diff --git a/gui/slick/css/dark.css b/gui/slick/css/dark.css index 594333cd..d72fb0cf 100644 --- a/gui/slick/css/dark.css +++ b/gui/slick/css/dark.css @@ -4,139 +4,139 @@ fonts /* Open Sans */ /* Regular */ @font-face { - font-family: 'Open Sans'; - - src: url('fonts/OpenSans-Regular-webfont.eot'); - src: url('fonts/OpenSans-Regular-webfont.eot?#iefix') format('embedded-opentype'), - url('fonts/OpenSans-Regular-webfont.woff') format('woff'), - url('fonts/OpenSans-Regular-webfont.ttf') format('truetype'), - url('fonts/OpenSans-Regular-webfont.svg#OpenSansRegular') format('svg'); - font-weight: normal; - font-weight: 400; - font-style: normal; + font-family: 'Open Sans'; + + src: url('fonts/OpenSans-Regular-webfont.eot'); + src: url('fonts/OpenSans-Regular-webfont.eot?#iefix') format('embedded-opentype'), + url('fonts/OpenSans-Regular-webfont.woff') format('woff'), + url('fonts/OpenSans-Regular-webfont.ttf') format('truetype'), + url('fonts/OpenSans-Regular-webfont.svg#OpenSansRegular') format('svg'); + font-weight: normal; + font-weight: 400; + font-style: normal; } /* Italic */ @font-face { - font-family: 'Open Sans'; - src: url('fonts/OpenSans-Italic-webfont.eot'); - src: url('fonts/OpenSans-Italic-webfont.eot?#iefix') format('embedded-opentype'), - url('fonts/OpenSans-Italic-webfont.woff') format('woff'), - url('fonts/OpenSans-Italic-webfont.ttf') format('truetype'), - url('fonts/OpenSans-Italic-webfont.svg#OpenSansItalic') format('svg'); - font-weight: normal; - font-weight: 400; - font-style: italic; + font-family: 'Open Sans'; + src: url('fonts/OpenSans-Italic-webfont.eot'); + src: url('fonts/OpenSans-Italic-webfont.eot?#iefix') format('embedded-opentype'), + url('fonts/OpenSans-Italic-webfont.woff') format('woff'), + url('fonts/OpenSans-Italic-webfont.ttf') format('truetype'), + url('fonts/OpenSans-Italic-webfont.svg#OpenSansItalic') format('svg'); + font-weight: normal; + font-weight: 400; + font-style: italic; } /* Light */ @font-face { - font-family: 'Open Sans'; - src: url('fonts/OpenSans-Light-webfont.eot'); - src: url('fonts/OpenSans-Light-webfont.eot?#iefix') format('embedded-opentype'), - url('fonts/OpenSans-Light-webfont.woff') format('woff'), - url('fonts/OpenSans-Light-webfont.ttf') format('truetype'), - url('fonts/OpenSans-Light-webfont.svg#OpenSansLight') format('svg'); - font-weight: 200; - font-style: normal; + font-family: 'Open Sans'; + src: url('fonts/OpenSans-Light-webfont.eot'); + src: url('fonts/OpenSans-Light-webfont.eot?#iefix') format('embedded-opentype'), + url('fonts/OpenSans-Light-webfont.woff') format('woff'), + url('fonts/OpenSans-Light-webfont.ttf') format('truetype'), + url('fonts/OpenSans-Light-webfont.svg#OpenSansLight') format('svg'); + font-weight: 200; + font-style: normal; } /* Light Italic */ @font-face { - font-family: 'Open Sans'; - src: url('fonts/OpenSans-LightItalic-webfont.eot'); - src: url('fonts/OpenSans-LightItalic-webfont.eot?#iefix') format('embedded-opentype'), - url('fonts/OpenSans-LightItalic-webfont.woff') format('woff'), - url('fonts/OpenSans-LightItalic-webfont.ttf') format('truetype'), - url('fonts/OpenSans-LightItalic-webfont.svg#OpenSansLightItalic') format('svg'); - font-weight: 200; - font-style: italic; + font-family: 'Open Sans'; + src: url('fonts/OpenSans-LightItalic-webfont.eot'); + src: url('fonts/OpenSans-LightItalic-webfont.eot?#iefix') format('embedded-opentype'), + url('fonts/OpenSans-LightItalic-webfont.woff') format('woff'), + url('fonts/OpenSans-LightItalic-webfont.ttf') format('truetype'), + url('fonts/OpenSans-LightItalic-webfont.svg#OpenSansLightItalic') format('svg'); + font-weight: 200; + font-style: italic; } /* Semibold */ @font-face { - font-family: 'Open Sans'; - src: url('fonts/OpenSans-Semibold-webfont.eot'); - src: url('fonts/OpenSans-Semibold-webfont.eot?#iefix') format('embedded-opentype'), - url('fonts/OpenSans-Semibold-webfont.woff') format('woff'), - url('fonts/OpenSans-Semibold-webfont.ttf') format('truetype'), - url('fonts/OpenSans-Semibold-webfont.svg#OpenSansSemibold') format('svg'); - font-weight: 600; - font-style: normal; + font-family: 'Open Sans'; + src: url('fonts/OpenSans-Semibold-webfont.eot'); + src: url('fonts/OpenSans-Semibold-webfont.eot?#iefix') format('embedded-opentype'), + url('fonts/OpenSans-Semibold-webfont.woff') format('woff'), + url('fonts/OpenSans-Semibold-webfont.ttf') format('truetype'), + url('fonts/OpenSans-Semibold-webfont.svg#OpenSansSemibold') format('svg'); + font-weight: 600; + font-style: normal; } /* Semibold Italic */ @font-face { - font-family: 'Open Sans'; - src: url('fonts/OpenSans-SemiboldItalic-webfont.eot'); - src: url('fonts/OpenSans-SemiboldItalic-webfont.eot?#iefix') format('embedded-opentype'), - url('fonts/OpenSans-SemiboldItalic-webfont.woff') format('woff'), - url('fonts/OpenSans-SemiboldItalic-webfont.ttf') format('truetype'), - url('fonts/OpenSans-SemiboldItalic-webfont.svg#OpenSansSemiboldItalic') format('svg'); - font-weight: 600; - font-style: italic; + font-family: 'Open Sans'; + src: url('fonts/OpenSans-SemiboldItalic-webfont.eot'); + src: url('fonts/OpenSans-SemiboldItalic-webfont.eot?#iefix') format('embedded-opentype'), + url('fonts/OpenSans-SemiboldItalic-webfont.woff') format('woff'), + url('fonts/OpenSans-SemiboldItalic-webfont.ttf') format('truetype'), + url('fonts/OpenSans-SemiboldItalic-webfont.svg#OpenSansSemiboldItalic') format('svg'); + font-weight: 600; + font-style: italic; } /* Bold */ @font-face { - font-family: 'Open Sans'; - src: url('fonts/OpenSans-Semibold-webfont.eot'); - src: url('fonts/OpenSans-Semibold-webfont.eot?#iefix') format('embedded-opentype'), - url('fonts/OpenSans-Semibold-webfont.woff') format('woff'), - url('fonts/OpenSans-Semibold-webfont.ttf') format('truetype'), - url('fonts/OpenSans-Semibold-webfont.svg#OpenSansSemibold') format('svg'); - font-weight: bold; - font-weight: 700; - font-style: normal; + font-family: 'Open Sans'; + src: url('fonts/OpenSans-Semibold-webfont.eot'); + src: url('fonts/OpenSans-Semibold-webfont.eot?#iefix') format('embedded-opentype'), + url('fonts/OpenSans-Semibold-webfont.woff') format('woff'), + url('fonts/OpenSans-Semibold-webfont.ttf') format('truetype'), + url('fonts/OpenSans-Semibold-webfont.svg#OpenSansSemibold') format('svg'); + font-weight: bold; + font-weight: 700; + font-style: normal; } /* Bold Italic */ @font-face { - font-family: 'Open Sans'; - src: url('fonts/OpenSans-SemiboldItalic-webfont.eot'); - src: url('fonts/OpenSans-SemiboldItalic-webfont.eot?#iefix') format('embedded-opentype'), - url('fonts/OpenSans-SemiboldItalic-webfont.woff') format('woff'), - url('fonts/OpenSans-SemiboldItalic-webfont.ttf') format('truetype'), - url('fonts/OpenSans-SemiboldItalic-webfont.svg#OpenSansSemiboldItalic') format('svg'); - font-weight: bold; - font-weight: 700; - font-style: italic; + font-family: 'Open Sans'; + src: url('fonts/OpenSans-SemiboldItalic-webfont.eot'); + src: url('fonts/OpenSans-SemiboldItalic-webfont.eot?#iefix') format('embedded-opentype'), + url('fonts/OpenSans-SemiboldItalic-webfont.woff') format('woff'), + url('fonts/OpenSans-SemiboldItalic-webfont.ttf') format('truetype'), + url('fonts/OpenSans-SemiboldItalic-webfont.svg#OpenSansSemiboldItalic') format('svg'); + font-weight: bold; + font-weight: 700; + font-style: italic; } /* Extra Bold */ @font-face { - font-family: 'Open Sans'; - src: url('fonts/OpenSans-Bold-webfont.eot'); - src: url('fonts/OpenSans-Bold-webfont.eot?#iefix') format('embedded-opentype'), - url('fonts/OpenSans-Bold-webfont.woff') format('woff'), - url('fonts/OpenSans-Bold-webfont.ttf') format('truetype'), - url('fonts/OpenSans-Bold-webfont.svg#OpenSansBold') format('svg'); - font-weight: 900; - font-style: normal; + font-family: 'Open Sans'; + src: url('fonts/OpenSans-Bold-webfont.eot'); + src: url('fonts/OpenSans-Bold-webfont.eot?#iefix') format('embedded-opentype'), + url('fonts/OpenSans-Bold-webfont.woff') format('woff'), + url('fonts/OpenSans-Bold-webfont.ttf') format('truetype'), + url('fonts/OpenSans-Bold-webfont.svg#OpenSansBold') format('svg'); + font-weight: 900; + font-style: normal; } /* Extra Bold Italic */ @font-face { - font-family: 'Open Sans'; - src: url('fonts/OpenSans-BoldItalic-webfont.eot'); - src: url('fonts/OpenSans-BoldItalic-webfont.eot?#iefix') format('embedded-opentype'), - url('fonts/OpenSans-BoldItalic-webfont.woff') format('woff'), - url('fonts/OpenSans-BoldItalic-webfont.ttf') format('truetype'), - url('fonts/OpenSans-BoldItalic-webfont.svg#OpenSansBoldItalic') format('svg'); - font-weight: 900; - font-style: italic; + font-family: 'Open Sans'; + src: url('fonts/OpenSans-BoldItalic-webfont.eot'); + src: url('fonts/OpenSans-BoldItalic-webfont.eot?#iefix') format('embedded-opentype'), + url('fonts/OpenSans-BoldItalic-webfont.woff') format('woff'), + url('fonts/OpenSans-BoldItalic-webfont.ttf') format('truetype'), + url('fonts/OpenSans-BoldItalic-webfont.svg#OpenSansBoldItalic') format('svg'); + font-weight: 900; + font-style: italic; } /* Droid Sans */ @font-face { - font-family: 'droid_sans_mono'; - src: url('fonts/droidsansmono-webfont.eot'); - src: url('fonts/droidsansmono-webfont.eot?#iefix') format('embedded-opentype'), - url('fonts/droidsansmono-webfont.woff') format('woff'), - url('fonts/droidsansmono-webfont.ttf') format('truetype'), - url('fonts/droidsansmono-webfont.svg#droid_sans_monoregular') format('svg'); - font-weight: normal; - font-style: normal; + font-family: 'droid_sans_mono'; + src: url('fonts/droidsansmono-webfont.eot'); + src: url('fonts/droidsansmono-webfont.eot?#iefix') format('embedded-opentype'), + url('fonts/droidsansmono-webfont.woff') format('woff'), + url('fonts/droidsansmono-webfont.ttf') format('truetype'), + url('fonts/droidsansmono-webfont.svg#droid_sans_monoregular') format('svg'); + font-weight: normal; + font-style: normal; } @@ -206,12 +206,11 @@ inc_top.tmpl } .ui-widget-content a { - color: #fff; + color: #2D8FBF; } .ui-widget-content a:hover { - color: #09A2FF; - text-decoration: none; + color: #09A2FF; } .ui-widget-header { @@ -293,32 +292,32 @@ inc_top.tmpl } .ui-state-active a, .ui-state-active a:link, .ui-state-active a:visited { - color: #140F06; - text-decoration: none; + color: #140F06; + text-decoration: none; } .ui-state-default a, .ui-state-default a:link, .ui-state-default a:visited { - color: #fff; - text-decoration: none; + color: #fff; + text-decoration: none; } .ui-dialog .ui-dialog-titlebar-close { - background: #333; + background: #333; } .ui-tabs { - padding: 0px; - background: none; - border-width: 0px; + padding: 0px; + background: none; + border-width: 0px; } .ui-tabs .ui-tabs-nav { - padding-left: 0px; - background: transparent; - border-width: 0px 0px 0px 0px; - -moz-border-radius: 0px; - -webkit-border-radius: 0px; - border-radius: 0px; + padding-left: 0px; + background: transparent; + border-width: 0px 0px 0px 0px; + -moz-border-radius: 0px; + -webkit-border-radius: 0px; + border-radius: 0px; } .ui-tabs .ui-tabs-panel { @@ -327,8 +326,8 @@ inc_top.tmpl } .ui-tabs .ui-tabs-nav li.ui-tabs-active { - border-top-left-radius: 5px; - border-top-right-radius: 5px; + border-top-left-radius: 5px; + border-top-right-radius: 5px; } .ui-tabs-nav > :not(.ui-tabs-active){ @@ -338,12 +337,12 @@ inc_top.tmpl } #content { - width: 95%; - min-width: 875px; - padding: 15px; - margin-left: auto; - margin-right: auto; - clear: both; + width: 95%; + min-width: 875px; + padding: 15px; + margin-left: auto; + margin-right: auto; + clear: both; } #SubMenu { @@ -363,7 +362,7 @@ inc_top.tmpl } [class^="menu-icon-"], [class*=" menu-icon-"] { - background: url("../images/menu/menu-icons-white.png"); + background: url("../images/menu/menu-icons-white.png"); height: 16px; width: 16px; display: inline-block; @@ -465,7 +464,7 @@ inc_top.tmpl } [class^="submenu-icon-"], [class*=" submenu-icon-"] { - background: url("../images/menu/menu-icons-white.png"); + background: url("../images/menu/menu-icons-white.png"); height: 16px; width: 16px; } @@ -517,24 +516,24 @@ home.tmpl ========================================================================== */ .imgbanner .banner { - border: 1px solid #111; - overflow: hidden; - height: 66px; - overflow: hidden; - border-radius: 8px; - vertical-align: top; - width: 360px; + border: 1px solid #111; + overflow: hidden; + height: 66px; + overflow: hidden; + border-radius: 8px; + vertical-align: top; + width: 360px; display: block; margin-left: auto; margin-right: auto; } .imgsmallposter .small { - height: 66px; - overflow: hidden; - border-radius: 3px; - vertical-align: middle; - width: 45px; + height: 66px; + overflow: hidden; + border-radius: 3px; + vertical-align: middle; + width: 45px; border: 1px solid #111; margin-right: 5px; } @@ -549,7 +548,7 @@ home.tmpl } .ui-progressbar .ui-progressbar-value { - box-sizing: content-box !important; + box-sizing: content-box !important; } .progressbarText { @@ -610,7 +609,7 @@ home.tmpl } .show { - margin: 12px; + margin: 12px; width: 188px; height: 352px; background-color: #333; @@ -621,7 +620,7 @@ home.tmpl .show-image { overflow: hidden; height: 273px; - width: 186px; + width: 186px; border-top-left-radius: 5px; border-top-right-radius: 5px; } @@ -632,23 +631,23 @@ home.tmpl } .show .ui-corner-all, .ui-corner-bottom, .ui-corner-right, .ui-corner-br { - border-bottom-right-radius: 0px; + border-bottom-right-radius: 0px; } .show .ui-corner-all, .ui-corner-bottom, .ui-corner-left, .ui-corner-bl { - border-bottom-left-radius: 0px; + border-bottom-left-radius: 0px; } .show .ui-corner-all, .ui-corner-top, .ui-corner-right, .ui-corner-tr { - border-top-right-radius: 0px; + border-top-right-radius: 0px; } .show .ui-corner-all, .ui-corner-top, .ui-corner-left, .ui-corner-tl { - border-top-left-radius: 0px; + border-top-left-radius: 0px; } .show .ui-widget-content { - border-top: 1px solid #111; + border-top: 1px solid #111; border-bottom: 1px solid #111; border-left: 0px; border-right: 0px; @@ -756,7 +755,7 @@ home.tmpl #sort-by { display: inline; list-style-type: none; - padding: 0; + padding: 0; margin-left: 5px; } @@ -771,13 +770,13 @@ home.tmpl } td.tvShow a { - color: #fff; - text-decoration: none; + color: #fff; + text-decoration: none; } td.tvShow a:hover { - cursor: pointer; - color: #09A2FF; + cursor: pointer; + color: #09A2FF; } /* ======================================================================= @@ -796,8 +795,8 @@ home_addShows.tmpl } div.button { - display: table-cell; - vertical-align: middle; + display: table-cell; + vertical-align: middle; padding-left: 10px; } @@ -844,8 +843,8 @@ div.buttontext p { home_newShow.tmpl ========================================================================== */ #addShowForm, #recommendedShowsForm { - margin-left: auto; - margin-right: auto; + margin-left: auto; + margin-right: auto; } #newShowPortal { @@ -864,7 +863,7 @@ home_newShow.tmpl } #searchResults input[type="radio"] { - vertical-align: -2px; + vertical-align: -2px; } /* ======================================================================= @@ -876,10 +875,10 @@ home_addExistingShow.tmpl } ul#rootDirStaticList { - width: 90%; - margin-right: auto; - margin-left: auto; - text-align: left; + width: 90%; + margin-right: auto; + margin-left: auto; + text-align: left; } ul#rootDirStaticList li { @@ -891,12 +890,12 @@ ul#rootDirStaticList li { } ul#rootDirStaticList li label { - margin-top: 5px; + margin-top: 5px; margin-bottom: 5px; } ul#rootDirStaticList li input[type="checkbox"] { - vertical-align: -2px; + vertical-align: -2px; } /* ======================================================================= @@ -904,7 +903,7 @@ home_trendingShows.tmpl ========================================================================== */ .traktShowTitleIcons { - float: right; + float: right; padding-right: 4px; padding-bottom: 4px; } @@ -919,7 +918,7 @@ home_trendingShows.tmpl } .traktContainer p, .traktContainer i { - white-space: nowrap; + white-space: nowrap; font-size: 12px; overflow: hidden; /* text-shadow: 1px 1px 0px #000;*/ @@ -928,7 +927,7 @@ home_trendingShows.tmpl } .traktContainer { - margin: 12px; + margin: 12px; width: 188px; background-color: #333; border: 1px solid #111; @@ -938,7 +937,7 @@ home_trendingShows.tmpl .trakt-image { overflow: hidden; height: 273px; - width: 186px; + width: 186px; border-top-left-radius: 5px; border-top-right-radius: 5px; border-bottom: 1px solid #111; @@ -953,7 +952,7 @@ home_postprocess.tmpl width: 800px; padding-top: 10px; margin-right: auto; - margin-left: auto; + margin-left: auto; } @@ -961,20 +960,14 @@ home_postprocess.tmpl displayShow.tmpl ========================================================================== */ -#posterCol { - float: left; - margin-right: 10px; - margin-bottom: 20px; -} - #showCol { - overflow: hidden; + overflow: hidden; margin-bottom: 20px; } .navShow { - display: inline; - cursor: pointer; + display: inline; + cursor: pointer; } #prevShow, @@ -985,12 +978,12 @@ displayShow.tmpl } h1.title { - padding-bottom: 12px; - margin-bottom: 15px; - line-height: 30px; - text-align: left; - text-rendering: optimizelegibility; - border-bottom: 1px solid #555; + padding-bottom: 12px; + margin-bottom: 15px; + line-height: 30px; + text-align: left; + text-rendering: optimizelegibility; + border-bottom: 1px solid #555; } .displayspecials { @@ -1008,48 +1001,32 @@ h1.title { top: -3px; } -span.imdbstars { - display: inline-block; - vertical-align: top; - cursor: help; - margin-top: 4px; -} - span.imdbstars, span.imdbstars > * { - height: 12px; - background: url(../images/rating.png) 0 -12px repeat-x; - width: 120px; - display: inline-block; - vertical-align: top; -} - -span.imdbstars > * { - background-position: 0 0; - max-width:120px; + background: url(../images/rating.png) 0 -12px repeat-x; } ul.tags { - list-style-type: none; + list-style-type: none; position: relative; top: -5px; margin-left: -40px; } ul.tags li { - margin-right: 4px; + margin-right: 4px; margin-bottom: 5px; - padding: 3px 4px 3px 25px; + padding: 3px 4px 3px 25px; background: url(../images/tag.png) no-repeat scroll 5px 4px #15528F; - border-radius: 3px; + border-radius: 3px; border: 1px solid #111; - color: #FFF; - font: 14px/18px "Open Sans", "Helvetica Neue", Helvetica, Arial, Geneva, sans-serif; + color: #FFF; + font: 14px/18px "Open Sans", "Helvetica Neue", Helvetica, Arial, Geneva, sans-serif; text-shadow: 0px 1px rgba(0, 0, 0, 0.8); - float: left; + float: left; } ul.tags li a{ - color: #FFF; + color: #FFF; } .tvshowImg { @@ -1087,12 +1064,12 @@ ul.tags li a{ } #checkboxControls label { - white-space: nowrap; - display: inline-block; + white-space: nowrap; + display: inline-block; } #checkboxControls input[type="checkbox"] { - vertical-align: -2px; + vertical-align: -2px; } .unaired { @@ -1115,27 +1092,27 @@ ul.tags li a{ } span.unaired { - color: #584b20; + color: #584b20; border: 1px solid #584b20; } span.skipped { - color: #1d5068; + color: #1d5068; border: 1px solid #1d5068; } span.good { - color: #295730; + color: #295730; border: 1px solid #295730; } span.qual { - color: #765100; + color: #765100; border: 1px solid #765100; } span.wanted { - color: #890000; + color: #890000; border: 1px solid #890000; } span.snatched { - color: #652164; + color: #652164; border: 1px solid #652164; } @@ -1145,22 +1122,22 @@ span.good b, span.qual b, span.wanted b, span.snatched b { - color: #000000; + color: #000000; font-weight: 800; } .plotInfo { - cursor: help; - float: right; - position: relative; - top: 2px; + cursor: help; + float: right; + position: relative; + top: 2px; } .plotInfoNone { - cursor: help; - float: right; - position: relative; - top: 2px; + cursor: help; + float: right; + position: relative; + top: 2px; opacity: 0.4; } @@ -1213,7 +1190,7 @@ td.col-checkbox { th.col-checkbox input[type="checkbox"], td.col-checkbox input[type="checkbox"] { - vertical-align: -2px; + vertical-align: -2px; } th.col-metadata, @@ -1271,16 +1248,16 @@ td.col-search { } .input-scene { - height: 20px; - line-height: 1.5; - border-radius: 3px; + height: 20px; + line-height: 1.5; + border-radius: 3px; } #editShow { - width: 700px; - padding-top: 10px; - margin-right: auto; - margin-left: auto; + width: 700px; + padding-top: 10px; + margin-right: auto; + margin-left: auto; } /* ======================================================================= @@ -1293,10 +1270,10 @@ comingEpisodes.tmpl .listing-key { - padding: 5px; - font-size: 13px; - font-weight: bold; - border-radius: 5px; + padding: 5px; + font-size: 13px; + font-weight: bold; + border-radius: 5px; } .listing-default { @@ -1316,22 +1293,22 @@ comingEpisodes.tmpl } span.listing-default { - color: #826f30; + color: #826f30; border: 1px solid #826f30; } span.listing-current { - color: #295730; + color: #295730; border: 1px solid #295730; } span.listing-overdue { - color: #890000; + color: #890000; border: 1px solid #890000; } span.listing-toofar { - color: #1d5068; + color: #1d5068; border: 1px solid #1d5068; } @@ -1348,13 +1325,13 @@ h2.day, h2.network { } .tvshowDiv { - display: block; - clear: both; - border: 1px solid #ccc; - margin: auto; - padding: 0px; - text-align: left; - width: 750px; + display: block; + clear: both; + border: 1px solid #ccc; + margin: auto; + padding: 0px; + text-align: left; + width: 750px; border-radius: 5px; background: #fff; cursor: default; @@ -1363,7 +1340,7 @@ h2.day, h2.network { } .tvshowDiv a:hover { - color: #09A2FF; + color: #09A2FF; } .tvshowDiv a, .tvshowDiv a:link, .tvshowDiv a:visited, .tvshowDiv a:hover { @@ -1372,54 +1349,54 @@ h2.day, h2.network { } .tvshowTitle a { - color: #000000; - float: left; - line-height: 1.4em; - font-size: 1.4em; - text-shadow: -1px -1px 0 #FFF); + color: #000000; + float: left; + line-height: 1.4em; + font-size: 1.4em; + text-shadow: -1px -1px 0 #FFF); } .tvshowTitleIcons { - float: right; - padding: 3px 5px; + float: right; + padding: 3px 5px; } .tvshowDiv td { - padding: 5px 10px; + padding: 5px 10px; } .tvshowDiv td.next_episode { - width: 100%; - height: 90%; - border-bottom: 1px solid #ccc; - vertical-align: top; - color: #000; + width: 100%; + height: 90%; + border-bottom: 1px solid #ccc; + vertical-align: top; + color: #000; } .bannerThumb { - vertical-align: top; - height: auto; - width: 748px; + vertical-align: top; + height: auto; + width: 748px; border-bottom: 1px solid #ccc; } .posterThumb { - vertical-align: top; - height: auto; - width: 180px; + vertical-align: top; + height: auto; + width: 180px; border-right: 1px solid #ccc; } .ep_listing { - width: auto; - border: 1px solid #ccc; - margin-bottom: 10px; - padding: 10px; + width: auto; + border: 1px solid #ccc; + margin-bottom: 10px; + padding: 10px; } .ep_summary { - margin-left: 5px; - font-style: italic; + margin-left: 5px; + font-style: italic; } .ep_summaryTrigger { @@ -1437,21 +1414,21 @@ h2.day, h2.network { } #showListTable td.tvShow a:hover { - cursor: pointer; - color: #09A2FF; + cursor: pointer; + color: #09A2FF; } table.cal-odd { - background-color: #333; + background-color: #333; } table.cal-even { - background-color: #3d3d3d; + background-color: #3d3d3d; } .calendarShow .text .airtime { - color:#fff + color:#fff } .calendarShow .text .episode-title { - color:#aaa + color:#aaa } /* ======================================================================= @@ -1474,13 +1451,8 @@ config*.tmpl } .component-item { - border-bottom: 1px dotted #666; - min-height: 200px; -} - -.component-group-desc{ - float: left; - width: 250px; + border-bottom: 1px dotted #666; + min-height: 200px; } .component-group-desc h3{ @@ -1488,7 +1460,6 @@ config*.tmpl } .component-group-desc p { - width: 90%; margin: 10px 0; color: #ddd; } @@ -1525,16 +1496,16 @@ select .selected { } .testNotification { - padding: 5px; - margin-bottom: 10px; - line-height: 20px; - border: 1px dotted #CCC; + padding: 5px; + margin-bottom: 10px; + line-height: 20px; + border: 1px dotted #CCC; } #providerOrderList { - width: 250px; - padding-left: 20px; - list-style-type: none; + width: 250px; + padding-left: 20px; + list-style-type: none; } #provider_order_list, @@ -1546,27 +1517,19 @@ select .selected { #provider_order_list li, #service_order_list li { - padding: 5px; - margin: 5px 0; - font-size: 14px; background: #333 !important; color: #fff; } -#provider_order_list input, -#service_order_list input { - margin: 0px 2px; -} - #config .tip_scale label span.component-title { - width: 85px !important; - font-size: 12px !important; - margin-top: 2px !important; + width: 85px !important; + font-size: 12px !important; + margin-top: 2px !important; } #config .tip_scale label span.component-desc { - margin-left: 120px !important; - width: 220px !important; + margin-left: 120px !important; + width: 220px !important; } .infoTableHeader, @@ -1579,40 +1542,43 @@ select .selected { } [class^="icon16-"], [class*=" icon16-"] { - background-image: url("../images/glyphicons-config-white.png"); - background-position: -40px 0; - background-repeat: no-repeat; - display: inline-block; - height: 16px; - line-height: 16px; - vertical-align: text-top; - width: 16px; + background-image: url("../images/glyphicons-config.png"); + background-repeat: no-repeat; + display: inline-block; + height: 16px; + vertical-align: text-top; + width: 16px; + margin-top: 1px; } .icon16-github { - background-position: 0 0; + background-position: 0 0; } .icon16-mirc { - background-position: -20px 0; + background-position: -26px 0; } -.icon16-sb { - background-position: -40px 0; +.icon16-sg { + background-position: -52px 0; } .icon16-web { - background-position: -60px 0; + background-position: -78px 0; } .icon16-win { - background-position: -80px 0; + background-position: -104px 0; } /* ======================================================================= config_postProcessing.tmpl ========================================================================== */ +#config .episode-sample { + background-color: rgb(34, 34, 34); + border-color: rgb(17, 17, 17); +} + #config div.example { - padding: 10px; background-color: #333333; - border: 1px solid #111; + border-color: #111; } .Key { @@ -1654,19 +1620,7 @@ config_postProcessing.tmpl config_notifications.tmpl ========================================================================== */ -div.metadata_options_wrapper { - float: left; - width: 190px; -} - -div.metadata_example_wrapper { - float: right; - width: 325px; -} - div.metadata_options { - padding: 7px; - overflow: auto; background: #333; color: #fff; border: 1px solid #111; @@ -1675,34 +1629,22 @@ div.metadata_options { div.metadata_options label:hover { color: #fff; background-color: #15528F; - cursor: pointer; } div.metadata_options label { - display: block; - padding-left: 7px; - line-height: 20px; color: #fff; } div.metadata_example { - padding: 8px; + border: 1px solid rgb(61, 61, 61); } div.metadata_example label { - display: block; - line-height: 21px; color: #fff; - cursor: pointer; } div.metadataDiv .disabled { - color: #ccc; -} - -.notifier-icon { - float: left; - margin: 6px 4px 0px 0px; + color: #888; } .warning { @@ -1861,23 +1803,23 @@ option.flag { } #Anime { - clear: both; - overflow-x: hidden; - overflow-y: hidden; - font-size: 14px; + clear: both; + overflow-x: hidden; + overflow-y: hidden; + font-size: 14px; } #Anime div.component-group-desc { - float: left; - width: 165px; + float: left; + width: 165px; } #Anime div.component-group-desc p { - margin-bottom: 0.4em; - margin-left: 0; - margin-right: 0; - margin-top: 0.4em; - width: 95%; + margin-bottom: 0.4em; + margin-left: 0; + margin-right: 0; + margin-top: 0.4em; + width: 95%; } div.blackwhitelist{ @@ -1886,15 +1828,15 @@ div.blackwhitelist{ } div.blackwhitelist input { - margin: 5px 5px; + margin: 5px 5px; } div.blackwhitelist.pool select{ - width: 300px; + width: 300px; } div.blackwhitelist.pool { - margin:5px; + margin:5px; } div.blackwhitelist.white select, div.blackwhitelist.black select { @@ -1933,8 +1875,8 @@ html * { input[type="radio"] { - margin: 2px 0px 0px; - line-height: normal; + margin: 2px 0px 0px; + line-height: normal; } input, textarea, select, .uneditable-input { @@ -1943,20 +1885,20 @@ input, textarea, select, .uneditable-input { } .container-fluid { - margin-left: 10px; + margin-left: 10px; margin-right: 10px; } .navbar-brand { - padding: 0px; + padding: 0px; } /* navbar styling */ .navbar-default { - background-color: #15528F; - filter: progid:DXImageTransform.Microsoft.gradient(startColorstr='#297AB8', endColorstr='#15528F'); - background: -webkit-gradient(linear, left top, left bottom, from(#297AB8), to(#15528F)); - background: -moz-linear-gradient(top, #297AB8, #15528F); + background-color: #15528F; + filter: progid:DXImageTransform.Microsoft.gradient(startColorstr='#297AB8', endColorstr='#15528F'); + background: -webkit-gradient(linear, left top, left bottom, from(#297AB8), to(#15528F)); + background: -moz-linear-gradient(top, #297AB8, #15528F); border-color: #3e3f3a; } @@ -2071,46 +2013,35 @@ fieldset[disabled] .navbar-default .btn-link:focus { } .dropdown-menu > li > a:hover, .dropdown-menu > li > a:focus { - color: #fff; - text-decoration: none; - background-color: #15528F; + color: #fff; + text-decoration: none; + background-color: #15528F; } .dropdown-menu > li > a { - padding: 4px 36px 4px 20px; + padding: 4px 36px 4px 20px; color: #fff; } .dropdown-menu { - background-color: #333; - border: 1px solid rgba(0, 0, 0, 0.15); - box-shadow: 0px 6px 12px rgba(0, 0, 0, 0.176); + background-color: #333; + border: 1px solid rgba(0, 0, 0, 0.15); + box-shadow: 0px 6px 12px rgba(0, 0, 0, 0.176); } .form-control { - color: #000000; + color: #000000; } .form-control-inline { - min-width: 0; - width: auto; - display: inline; + min-width: 0; + width: auto; + display: inline; } .btn { - display: inline-block; - *display: inline; - padding: 4px 10px 4px; - margin-bottom: 0; - *margin-left: .3em; - font-size: 12px; - line-height: 16px; - *line-height: 20px; color: #fff; - text-align: center; text-shadow: 0 1px 1px rgba(0, 0, 0, 0.75); - vertical-align: middle; - cursor: pointer; background-color: #2672B6; *background-color: #2672B6; background-image: -ms-linear-gradient(top, #297AB8, #15528F); @@ -2119,21 +2050,15 @@ fieldset[disabled] .navbar-default .btn-link:focus { background-image: -o-linear-gradient(top, #297AB8, #15528F); background-image: linear-gradient(top, #297AB8, #15528F); background-image: -moz-linear-gradient(top, #297AB8, #15528F); - background-repeat: repeat-x; border: 1px solid #111; - *border: 0; border-color: rgba(0, 0, 0, 0.1) rgba(0, 0, 0, 0.1) rgba(0, 0, 0, 0.25); border-color: #111 #111 #111; border-bottom-color: #111; - -webkit-border-radius: 4px; - -moz-border-radius: 4px; - border-radius: 4px; filter: progid:dximagetransform.microsoft.gradient(startColorstr='#297AB8', endColorstr='#15528F', GradientType=0); filter: progid:dximagetransform.microsoft.gradient(enabled=false); - *zoom: 1; -webkit-box-shadow: inset 0 1px 0 rgba(255, 255, 255, 0.0), 0 1px 2px rgba(0, 0, 0, 0.05); - -moz-box-shadow: inset 0 1px 0 rgba(255, 255, 255, 0.0), 0 1px 2px rgba(0, 0, 0, 0.05); - box-shadow: inset 0 1px 0 rgba(255, 255, 255, 0.0), 0 1px 2px rgba(0, 0, 0, 0.05); + -moz-box-shadow: inset 0 1px 0 rgba(255, 255, 255, 0.0), 0 1px 2px rgba(0, 0, 0, 0.05); + box-shadow: inset 0 1px 0 rgba(255, 255, 255, 0.0), 0 1px 2px rgba(0, 0, 0, 0.05); } .btn:hover, @@ -2163,10 +2088,10 @@ fieldset[disabled] .navbar-default .btn-link:focus { *background-color: #2672B6; background-position: 0 -150px; -webkit-transition: background-position 0.0s linear; - -moz-transition: background-position 0.0s linear; - -ms-transition: background-position 0.0s linear; - -o-transition: background-position 0.0s linear; - transition: background-position 0.0s linear; + -moz-transition: background-position 0.0s linear; + -ms-transition: background-position 0.0s linear; + -o-transition: background-position 0.0s linear; + transition: background-position 0.0s linear; } .btn:focus { @@ -2184,8 +2109,8 @@ fieldset[disabled] .navbar-default .btn-link:focus { color: #fff; outline: 0; -webkit-box-shadow: inset 0 2px 4px rgba(0, 0, 0, 0.15), 0 1px 2px rgba(0, 0, 0, 0.05); - -moz-box-shadow: inset 0 2px 4px rgba(0, 0, 0, 0.15), 0 1px 2px rgba(0, 0, 0, 0.05); - box-shadow: inset 0 2px 4px rgba(0, 0, 0, 0.15), 0 1px 2px rgba(0, 0, 0, 0.05); + -moz-box-shadow: inset 0 2px 4px rgba(0, 0, 0, 0.15), 0 1px 2px rgba(0, 0, 0, 0.05); + box-shadow: inset 0 2px 4px rgba(0, 0, 0, 0.15), 0 1px 2px rgba(0, 0, 0, 0.05); } .btn.disabled, @@ -2196,8 +2121,8 @@ fieldset[disabled] .navbar-default .btn-link:focus { opacity: 0.65; filter: alpha(opacity=65); -webkit-box-shadow: none; - -moz-box-shadow: none; - box-shadow: none; + -moz-box-shadow: none; + box-shadow: none; } .btn-large { @@ -2205,8 +2130,8 @@ fieldset[disabled] .navbar-default .btn-link:focus { font-size: 15px; line-height: normal; -webkit-border-radius: 5px; - -moz-border-radius: 5px; - border-radius: 5px; + -moz-border-radius: 5px; + border-radius: 5px; } .btn-large [class^="icon-"] { @@ -2435,10 +2360,10 @@ fieldset[disabled] .navbar-default .btn-link:focus { } .btn-xs { - padding: 1px 5px; - font-size: 12px; - line-height: 1.5; - border-radius: 3px; + padding: 1px 5px; + font-size: 12px; + line-height: 1.5; + border-radius: 3px; } @media(min-width:768px){ @@ -2455,13 +2380,13 @@ fieldset[disabled] .navbar-default .btn-link:focus { } label { - font-weight: normal; + font-weight: normal; } pre { - color: #fff; - background-color: #3d3d3d; - border-color: #111; + color: #fff; + background-color: #3d3d3d; + border-color: #111; } .alert { @@ -2472,16 +2397,25 @@ pre { /* ======================================================================= input sizing (for config pages) ========================================================================== */ +#editAProvider optgroup { + color: #eee; + background-color: rgb(51, 51, 51); +} + +#editAProvider optgroup option { + color: #222; + background-color: #fff; +} #config select { - min-width: 0; - width: auto; - display: inline; + min-width: 0; + width: auto; + display: inline; margin-top: -4px; } .btn-inline { - margin-top: -3px; + margin-top: -3px; } .input75 { @@ -2840,133 +2774,133 @@ token-input.css ========================================================================== */ ul.token-input-list { - overflow: hidden; - height: auto !important; - height: 1%; - width: 273px; - border: 1px solid #ccc; - cursor: text; - font-size: 10px; - font-family: Verdana; - z-index: 999; - margin: 0; - padding: 0 0 1px 0; - background-color: #fff; - list-style-type: none; + overflow: hidden; + height: auto !important; + height: 1%; + width: 273px; + border: 1px solid #ccc; + cursor: text; + font-size: 10px; + font-family: Verdana; + z-index: 999; + margin: 0; + padding: 0 0 1px 0; + background-color: #fff; + list-style-type: none; /* clear: left; */ - border-top-left-radius: 3px; - border-top-right-radius: 3px; - border-bottom-left-radius: 3px; - border-bottom-right-radius: 3px; + border-top-left-radius: 3px; + border-top-right-radius: 3px; + border-bottom-left-radius: 3px; + border-bottom-right-radius: 3px; } ul.token-input-list li { - list-style-type: none; + list-style-type: none; } ul.token-input-list li input { - border: 0; - padding: 3px 4px; - background-color: white; + border: 0; + padding: 3px 4px; + background-color: white; /* -webkit-appearance: caret; */ } li.token-input-token { - overflow: hidden; - height: auto !important; - height: 1%; - margin: 3px; - padding: 3px 5px 0 5px; - background-color: #d0efa0; - color: #000; - font-weight: bold; - cursor: default; - display: block; + overflow: hidden; + height: auto !important; + height: 1%; + margin: 3px; + padding: 3px 5px 0 5px; + background-color: #d0efa0; + color: #000; + font-weight: bold; + cursor: default; + display: block; } li.token-input-token img { - padding-top: 7px; - padding-right: 4px; - float: left; + padding-top: 7px; + padding-right: 4px; + float: left; } li.token-input-token input { - padding-top: 2px !important; - padding-right: 4px !important; - float: left; + padding-top: 2px !important; + padding-right: 4px !important; + float: left; } li.token-input-token p { - float: left; - padding: 0; - margin: 0; - line-height: 2.0 !important; + float: left; + padding: 0; + margin: 0; + line-height: 2.0 !important; } li.token-input-token span { - float: right; - color: #777; - cursor: pointer; + float: right; + color: #777; + cursor: pointer; } li.token-input-selected-token { - background-color: #08844e; - color: #fff; + background-color: #08844e; + color: #fff; } li.token-input-selected-token span { - color: #bbb; + color: #bbb; } li.token-input-input-token input { - margin: 3px 3px 3px 3px !important; + margin: 3px 3px 3px 3px !important; } div.token-input-dropdown { - background-color: #fff; - color: #000; - border-left-color: #ccc; - border-right-color: #ccc; - border-bottom-color: #ccc; + background-color: #fff; + color: #000; + border-left-color: #ccc; + border-right-color: #ccc; + border-bottom-color: #ccc; } div.token-input-dropdown p { - margin: 0; - padding: 3px; - font-weight: bold; - color: #777; + margin: 0; + padding: 3px; + font-weight: bold; + color: #777; } div.token-input-dropdown ul { - margin: 0; - padding: 0; + margin: 0; + padding: 0; } div.token-input-dropdown ul li { - background-color: #fff; - padding: 3px; - list-style-type: none; + background-color: #fff; + padding: 3px; + list-style-type: none; } div.token-input-dropdown ul li.token-input-dropdown-item { - background-color: #fafafa; + background-color: #fafafa; } div.token-input-dropdown ul li.token-input-dropdown-item2 { - background-color: #fff; + background-color: #fff; } div.token-input-dropdown ul li em { - font-weight: bold; - font-style: normal; + font-weight: bold; + font-style: normal; } div.token-input-dropdown ul li.token-input-selected-dropdown-item { - background-color: #6196c2; + background-color: #6196c2; } span.token-input-delete-token { - margin: 0 1px; + margin: 0 1px; } /* ======================================================================= @@ -2993,7 +2927,7 @@ jquery.confirm.css top: 50%; margin: -130px 0 0 -230px; border: 1px solid #111; - box-shadow: 0px 0px 12px 0px rgba(0, 0, 0, 0.175); + box-shadow: 0px 0px 12px 0px rgba(0, 0, 0, 0.175); } #confirmBox h1, @@ -3002,18 +2936,18 @@ jquery.confirm.css } #confirmBox h1 { - background-color: #15528F; + background-color: #15528F; border-bottom: 1px solid #111; color: #fff; - margin: 0; - font-size: 22px; - text-shadow: 0px 1px 1px rgba(0, 0, 0, 0.75); + margin: 0; + font-size: 22px; + text-shadow: 0px 1px 1px rgba(0, 0, 0, 0.75); } #confirmBox p { padding-top: 20px; - color: #fff; - text-shadow: 0px 1px 1px rgba(0, 0, 0, 0.75); + color: #fff; + text-shadow: 0px 1px 1px rgba(0, 0, 0, 0.75); } #confirmButtons { diff --git a/gui/slick/css/light.css b/gui/slick/css/light.css index e2cc2b76..a110113c 100644 --- a/gui/slick/css/light.css +++ b/gui/slick/css/light.css @@ -4,139 +4,139 @@ fonts /* Open Sans */ /* Regular */ @font-face { - font-family: 'Open Sans'; - - src: url('fonts/OpenSans-Regular-webfont.eot'); - src: url('fonts/OpenSans-Regular-webfont.eot?#iefix') format('embedded-opentype'), - url('fonts/OpenSans-Regular-webfont.woff') format('woff'), - url('fonts/OpenSans-Regular-webfont.ttf') format('truetype'), - url('fonts/OpenSans-Regular-webfont.svg#OpenSansRegular') format('svg'); - font-weight: normal; - font-weight: 400; - font-style: normal; + font-family: 'Open Sans'; + + src: url('fonts/OpenSans-Regular-webfont.eot'); + src: url('fonts/OpenSans-Regular-webfont.eot?#iefix') format('embedded-opentype'), + url('fonts/OpenSans-Regular-webfont.woff') format('woff'), + url('fonts/OpenSans-Regular-webfont.ttf') format('truetype'), + url('fonts/OpenSans-Regular-webfont.svg#OpenSansRegular') format('svg'); + font-weight: normal; + font-weight: 400; + font-style: normal; } /* Italic */ @font-face { - font-family: 'Open Sans'; - src: url('fonts/OpenSans-Italic-webfont.eot'); - src: url('fonts/OpenSans-Italic-webfont.eot?#iefix') format('embedded-opentype'), - url('fonts/OpenSans-Italic-webfont.woff') format('woff'), - url('fonts/OpenSans-Italic-webfont.ttf') format('truetype'), - url('fonts/OpenSans-Italic-webfont.svg#OpenSansItalic') format('svg'); - font-weight: normal; - font-weight: 400; - font-style: italic; + font-family: 'Open Sans'; + src: url('fonts/OpenSans-Italic-webfont.eot'); + src: url('fonts/OpenSans-Italic-webfont.eot?#iefix') format('embedded-opentype'), + url('fonts/OpenSans-Italic-webfont.woff') format('woff'), + url('fonts/OpenSans-Italic-webfont.ttf') format('truetype'), + url('fonts/OpenSans-Italic-webfont.svg#OpenSansItalic') format('svg'); + font-weight: normal; + font-weight: 400; + font-style: italic; } /* Light */ @font-face { - font-family: 'Open Sans'; - src: url('fonts/OpenSans-Light-webfont.eot'); - src: url('fonts/OpenSans-Light-webfont.eot?#iefix') format('embedded-opentype'), - url('fonts/OpenSans-Light-webfont.woff') format('woff'), - url('fonts/OpenSans-Light-webfont.ttf') format('truetype'), - url('fonts/OpenSans-Light-webfont.svg#OpenSansLight') format('svg'); - font-weight: 200; - font-style: normal; + font-family: 'Open Sans'; + src: url('fonts/OpenSans-Light-webfont.eot'); + src: url('fonts/OpenSans-Light-webfont.eot?#iefix') format('embedded-opentype'), + url('fonts/OpenSans-Light-webfont.woff') format('woff'), + url('fonts/OpenSans-Light-webfont.ttf') format('truetype'), + url('fonts/OpenSans-Light-webfont.svg#OpenSansLight') format('svg'); + font-weight: 200; + font-style: normal; } /* Light Italic */ @font-face { - font-family: 'Open Sans'; - src: url('fonts/OpenSans-LightItalic-webfont.eot'); - src: url('fonts/OpenSans-LightItalic-webfont.eot?#iefix') format('embedded-opentype'), - url('fonts/OpenSans-LightItalic-webfont.woff') format('woff'), - url('fonts/OpenSans-LightItalic-webfont.ttf') format('truetype'), - url('fonts/OpenSans-LightItalic-webfont.svg#OpenSansLightItalic') format('svg'); - font-weight: 200; - font-style: italic; + font-family: 'Open Sans'; + src: url('fonts/OpenSans-LightItalic-webfont.eot'); + src: url('fonts/OpenSans-LightItalic-webfont.eot?#iefix') format('embedded-opentype'), + url('fonts/OpenSans-LightItalic-webfont.woff') format('woff'), + url('fonts/OpenSans-LightItalic-webfont.ttf') format('truetype'), + url('fonts/OpenSans-LightItalic-webfont.svg#OpenSansLightItalic') format('svg'); + font-weight: 200; + font-style: italic; } /* Semibold */ @font-face { - font-family: 'Open Sans'; - src: url('fonts/OpenSans-Semibold-webfont.eot'); - src: url('fonts/OpenSans-Semibold-webfont.eot?#iefix') format('embedded-opentype'), - url('fonts/OpenSans-Semibold-webfont.woff') format('woff'), - url('fonts/OpenSans-Semibold-webfont.ttf') format('truetype'), - url('fonts/OpenSans-Semibold-webfont.svg#OpenSansSemibold') format('svg'); - font-weight: 600; - font-style: normal; + font-family: 'Open Sans'; + src: url('fonts/OpenSans-Semibold-webfont.eot'); + src: url('fonts/OpenSans-Semibold-webfont.eot?#iefix') format('embedded-opentype'), + url('fonts/OpenSans-Semibold-webfont.woff') format('woff'), + url('fonts/OpenSans-Semibold-webfont.ttf') format('truetype'), + url('fonts/OpenSans-Semibold-webfont.svg#OpenSansSemibold') format('svg'); + font-weight: 600; + font-style: normal; } /* Semibold Italic */ @font-face { - font-family: 'Open Sans'; - src: url('fonts/OpenSans-SemiboldItalic-webfont.eot'); - src: url('fonts/OpenSans-SemiboldItalic-webfont.eot?#iefix') format('embedded-opentype'), - url('fonts/OpenSans-SemiboldItalic-webfont.woff') format('woff'), - url('fonts/OpenSans-SemiboldItalic-webfont.ttf') format('truetype'), - url('fonts/OpenSans-SemiboldItalic-webfont.svg#OpenSansSemiboldItalic') format('svg'); - font-weight: 600; - font-style: italic; + font-family: 'Open Sans'; + src: url('fonts/OpenSans-SemiboldItalic-webfont.eot'); + src: url('fonts/OpenSans-SemiboldItalic-webfont.eot?#iefix') format('embedded-opentype'), + url('fonts/OpenSans-SemiboldItalic-webfont.woff') format('woff'), + url('fonts/OpenSans-SemiboldItalic-webfont.ttf') format('truetype'), + url('fonts/OpenSans-SemiboldItalic-webfont.svg#OpenSansSemiboldItalic') format('svg'); + font-weight: 600; + font-style: italic; } /* Bold */ @font-face { - font-family: 'Open Sans'; - src: url('fonts/OpenSans-Semibold-webfont.eot'); - src: url('fonts/OpenSans-Semibold-webfont.eot?#iefix') format('embedded-opentype'), - url('fonts/OpenSans-Semibold-webfont.woff') format('woff'), - url('fonts/OpenSans-Semibold-webfont.ttf') format('truetype'), - url('fonts/OpenSans-Semibold-webfont.svg#OpenSansSemibold') format('svg'); - font-weight: bold; - font-weight: 700; - font-style: normal; + font-family: 'Open Sans'; + src: url('fonts/OpenSans-Semibold-webfont.eot'); + src: url('fonts/OpenSans-Semibold-webfont.eot?#iefix') format('embedded-opentype'), + url('fonts/OpenSans-Semibold-webfont.woff') format('woff'), + url('fonts/OpenSans-Semibold-webfont.ttf') format('truetype'), + url('fonts/OpenSans-Semibold-webfont.svg#OpenSansSemibold') format('svg'); + font-weight: bold; + font-weight: 700; + font-style: normal; } /* Bold Italic */ @font-face { - font-family: 'Open Sans'; - src: url('fonts/OpenSans-SemiboldItalic-webfont.eot'); - src: url('fonts/OpenSans-SemiboldItalic-webfont.eot?#iefix') format('embedded-opentype'), - url('fonts/OpenSans-SemiboldItalic-webfont.woff') format('woff'), - url('fonts/OpenSans-SemiboldItalic-webfont.ttf') format('truetype'), - url('fonts/OpenSans-SemiboldItalic-webfont.svg#OpenSansSemiboldItalic') format('svg'); - font-weight: bold; - font-weight: 700; - font-style: italic; + font-family: 'Open Sans'; + src: url('fonts/OpenSans-SemiboldItalic-webfont.eot'); + src: url('fonts/OpenSans-SemiboldItalic-webfont.eot?#iefix') format('embedded-opentype'), + url('fonts/OpenSans-SemiboldItalic-webfont.woff') format('woff'), + url('fonts/OpenSans-SemiboldItalic-webfont.ttf') format('truetype'), + url('fonts/OpenSans-SemiboldItalic-webfont.svg#OpenSansSemiboldItalic') format('svg'); + font-weight: bold; + font-weight: 700; + font-style: italic; } /* Extra Bold */ @font-face { - font-family: 'Open Sans'; - src: url('fonts/OpenSans-Bold-webfont.eot'); - src: url('fonts/OpenSans-Bold-webfont.eot?#iefix') format('embedded-opentype'), - url('fonts/OpenSans-Bold-webfont.woff') format('woff'), - url('fonts/OpenSans-Bold-webfont.ttf') format('truetype'), - url('fonts/OpenSans-Bold-webfont.svg#OpenSansBold') format('svg'); - font-weight: 900; - font-style: normal; + font-family: 'Open Sans'; + src: url('fonts/OpenSans-Bold-webfont.eot'); + src: url('fonts/OpenSans-Bold-webfont.eot?#iefix') format('embedded-opentype'), + url('fonts/OpenSans-Bold-webfont.woff') format('woff'), + url('fonts/OpenSans-Bold-webfont.ttf') format('truetype'), + url('fonts/OpenSans-Bold-webfont.svg#OpenSansBold') format('svg'); + font-weight: 900; + font-style: normal; } /* Extra Bold Italic */ @font-face { - font-family: 'Open Sans'; - src: url('fonts/OpenSans-BoldItalic-webfont.eot'); - src: url('fonts/OpenSans-BoldItalic-webfont.eot?#iefix') format('embedded-opentype'), - url('fonts/OpenSans-BoldItalic-webfont.woff') format('woff'), - url('fonts/OpenSans-BoldItalic-webfont.ttf') format('truetype'), - url('fonts/OpenSans-BoldItalic-webfont.svg#OpenSansBoldItalic') format('svg'); - font-weight: 900; - font-style: italic; + font-family: 'Open Sans'; + src: url('fonts/OpenSans-BoldItalic-webfont.eot'); + src: url('fonts/OpenSans-BoldItalic-webfont.eot?#iefix') format('embedded-opentype'), + url('fonts/OpenSans-BoldItalic-webfont.woff') format('woff'), + url('fonts/OpenSans-BoldItalic-webfont.ttf') format('truetype'), + url('fonts/OpenSans-BoldItalic-webfont.svg#OpenSansBoldItalic') format('svg'); + font-weight: 900; + font-style: italic; } /* Droid Sans */ @font-face { - font-family: 'droid_sans_mono'; - src: url('fonts/droidsansmono-webfont.eot'); - src: url('fonts/droidsansmono-webfont.eot?#iefix') format('embedded-opentype'), - url('fonts/droidsansmono-webfont.woff') format('woff'), - url('fonts/droidsansmono-webfont.ttf') format('truetype'), - url('fonts/droidsansmono-webfont.svg#droid_sans_monoregular') format('svg'); - font-weight: normal; - font-style: normal; + font-family: 'droid_sans_mono'; + src: url('fonts/droidsansmono-webfont.eot'); + src: url('fonts/droidsansmono-webfont.eot?#iefix') format('embedded-opentype'), + url('fonts/droidsansmono-webfont.woff') format('woff'), + url('fonts/droidsansmono-webfont.ttf') format('truetype'), + url('fonts/droidsansmono-webfont.svg#droid_sans_monoregular') format('svg'); + font-weight: normal; + font-style: normal; } @@ -199,7 +199,15 @@ inc_top.tmpl background: #dcdcdc url("../css/lib/images/ui-bg_highlight-soft_75_dcdcdc_1x100.png") 50% top repeat-x; } -.ui-widget-header { +.ui-widget-content a { + color: rgb(42, 100, 150); +} + +.ui-widget-content a:hover { + color: #09A2FF; +} + +.ui-widget-header { background: #ffffff url("../css/lib/images/ui-bg_flat_0_ffffff_40x100.png") 50% 50% repeat-x; } @@ -277,27 +285,27 @@ inc_top.tmpl } .ui-state-active a, .ui-state-active a:link, .ui-state-active a:visited { - color: #140F06; - text-decoration: none; + color: #140F06; + text-decoration: none; } .ui-state-default a, .ui-state-default a:link, .ui-state-default a:visited { - color: #222; - text-decoration: none; + color: #222; + text-decoration: none; } .ui-tabs { - padding: 0px; - background: none; - border-width: 0px; + padding: 0px; + background: none; + border-width: 0px; } .ui-tabs .ui-tabs-nav { - padding-left: 0px; - background: transparent; - border-width: 0px 0px 0px 0px; - -moz-border-radius: 0px; - -webkit-border-radius: 0px; - border-radius: 0px; + padding-left: 0px; + background: transparent; + border-width: 0px 0px 0px 0px; + -moz-border-radius: 0px; + -webkit-border-radius: 0px; + border-radius: 0px; } .ui-tabs .ui-tabs-panel { @@ -306,8 +314,8 @@ inc_top.tmpl } .ui-tabs .ui-tabs-nav li.ui-tabs-active { - border-top-left-radius: 5px; - border-top-right-radius: 5px; + border-top-left-radius: 5px; + border-top-right-radius: 5px; } .ui-tabs-nav > :not(.ui-tabs-active){ @@ -316,12 +324,12 @@ inc_top.tmpl } #content { - width: 95%; - min-width: 875px; - padding: 15px; - margin-left: auto; - margin-right: auto; - clear: both; + width: 95%; + min-width: 875px; + padding: 15px; + margin-left: auto; + margin-right: auto; + clear: both; } #SubMenu { @@ -341,7 +349,7 @@ inc_top.tmpl } [class^="menu-icon-"], [class*=" menu-icon-"] { - background: url("../images/menu/menu-icons-black.png"); + background: url("../images/menu/menu-icons-black.png"); height: 16px; width: 16px; display: inline-block; @@ -443,7 +451,7 @@ inc_top.tmpl } [class^="submenu-icon-"], [class*=" submenu-icon-"] { - background: url("../images/menu/menu-icons-black.png"); + background: url("../images/menu/menu-icons-black.png"); height: 16px; width: 16px; } @@ -495,24 +503,24 @@ home.tmpl ========================================================================== */ .imgbanner .banner { - border: 1px solid #ccc; - overflow: hidden; - height: 66px; - overflow: hidden; - border-radius: 8px; - vertical-align: top; - width: 360px; + border: 1px solid #ccc; + overflow: hidden; + height: 66px; + overflow: hidden; + border-radius: 8px; + vertical-align: top; + width: 360px; display: block; margin-left: auto; margin-right: auto; } .imgsmallposter .small { - height: 66px; - overflow: hidden; - border-radius: 3px; - vertical-align: middle; - width: 45px; + height: 66px; + overflow: hidden; + border-radius: 3px; + vertical-align: middle; + width: 45px; border: 1px solid #ccc; margin-right: 5px; } @@ -527,7 +535,7 @@ home.tmpl } .ui-progressbar .ui-progressbar-value { - box-sizing: content-box !important; + box-sizing: content-box !important; } .progressbarText { @@ -588,7 +596,7 @@ home.tmpl } .show { - margin: 12px; + margin: 12px; width: 188px; height: 352px; background-color: #DFDACF; @@ -599,7 +607,7 @@ home.tmpl .show-image { overflow: hidden; height: 273px; - width: 186px; + width: 186px; border-top-left-radius: 5px; border-top-right-radius: 5px; } @@ -610,23 +618,23 @@ home.tmpl } .show .ui-corner-all, .ui-corner-bottom, .ui-corner-right, .ui-corner-br { - border-bottom-right-radius: 0px; + border-bottom-right-radius: 0px; } .show .ui-corner-all, .ui-corner-bottom, .ui-corner-left, .ui-corner-bl { - border-bottom-left-radius: 0px; + border-bottom-left-radius: 0px; } .show .ui-corner-all, .ui-corner-top, .ui-corner-right, .ui-corner-tr { - border-top-right-radius: 0px; + border-top-right-radius: 0px; } .show .ui-corner-all, .ui-corner-top, .ui-corner-left, .ui-corner-tl { - border-top-left-radius: 0px; + border-top-left-radius: 0px; } .show .ui-widget-content { - border-top: 1px solid #111; + border-top: 1px solid #111; border-bottom: 1px solid #111; border-left: 0px; border-right: 0px; @@ -734,7 +742,7 @@ home.tmpl #sort-by { display: inline; list-style-type: none; - padding: 0; + padding: 0; margin-left: 5px; } @@ -749,13 +757,13 @@ home.tmpl } td.tvShow a { - color: #000; - text-decoration: none; + color: #000; + text-decoration: none; } td.tvShow a:hover { - cursor: pointer; - color: #428BCA; + cursor: pointer; + color: #428BCA; } /* ======================================================================= @@ -774,8 +782,8 @@ home_addShows.tmpl } div.button { - display: table-cell; - vertical-align: middle; + display: table-cell; + vertical-align: middle; padding-left: 10px; } @@ -822,8 +830,8 @@ div.buttontext p { home_newShow.tmpl ========================================================================== */ #addShowForm, #recommendedShowsForm { - margin-left: auto; - margin-right: auto; + margin-left: auto; + margin-right: auto; } #newShowPortal { @@ -842,7 +850,7 @@ home_newShow.tmpl } #searchResults input[type="radio"] { - vertical-align: -2px; + vertical-align: -2px; } /* ======================================================================= @@ -854,10 +862,10 @@ home_addExistingShow.tmpl } ul#rootDirStaticList { - width: 90%; - margin-right: auto; - margin-left: auto; - text-align: left; + width: 90%; + margin-right: auto; + margin-left: auto; + text-align: left; } ul#rootDirStaticList li { @@ -869,12 +877,12 @@ ul#rootDirStaticList li { } ul#rootDirStaticList li label { - margin-top: 5px; + margin-top: 5px; margin-bottom: 5px; } ul#rootDirStaticList li input[type="checkbox"] { - vertical-align: -2px; + vertical-align: -2px; } /* ======================================================================= @@ -882,7 +890,7 @@ home_trendingShows.tmpl ========================================================================== */ .traktShowTitleIcons { - float: right; + float: right; padding-right: 4px; padding-bottom: 4px; } @@ -897,7 +905,7 @@ home_trendingShows.tmpl } .traktContainer p, .traktContainer i { - white-space: nowrap; + white-space: nowrap; font-size: 12px; overflow: hidden; /* text-shadow: 1px 1px 0px #000;*/ @@ -906,7 +914,7 @@ home_trendingShows.tmpl } .traktContainer { - margin: 12px; + margin: 12px; width: 188px; background-color: #DFDACF; border: 1px solid #111; @@ -916,7 +924,7 @@ home_trendingShows.tmpl .trakt-image { overflow: hidden; height: 273px; - width: 186px; + width: 186px; border-top-left-radius: 5px; border-top-right-radius: 5px; border-bottom: 1px solid #111; @@ -931,7 +939,7 @@ home_postprocess.tmpl width: 800px; padding-top: 10px; margin-right: auto; - margin-left: auto; + margin-left: auto; } @@ -939,20 +947,14 @@ home_postprocess.tmpl displayShow.tmpl ========================================================================== */ -#posterCol { - float: left; - margin-right: 10px; - margin-bottom: 20px; -} - #showCol { - overflow: hidden; + overflow: hidden; margin-bottom: 20px; } .navShow { - display: inline; - cursor: pointer; + display: inline; + cursor: pointer; } #prevShow, @@ -965,12 +967,12 @@ displayShow.tmpl } h1.title { - padding-bottom: 12px; - margin-bottom: 15px; - line-height: 30px; - text-align: left; - text-rendering: optimizelegibility; - border-bottom: 1px solid #888; + padding-bottom: 12px; + margin-bottom: 15px; + line-height: 30px; + text-align: left; + text-rendering: optimizelegibility; + border-bottom: 1px solid #888; } .displayspecials { @@ -988,48 +990,32 @@ h1.title { top: -3px; } -span.imdbstars { - display: inline-block; - vertical-align: top; - cursor: help; - margin-top: 4px; -} - span.imdbstars, span.imdbstars > * { - height: 12px; - background: url(../images/rating.png) 0 -12px repeat-x; - width: 120px; - display: inline-block; - vertical-align: top; -} - -span.imdbstars > * { - background-position: 0 0; - max-width:120px; + background: url(../images/rating.png) 0 -12px repeat-x; } ul.tags { - list-style-type: none; + list-style-type: none; position: relative; top: -5px; margin-left: -40px; } ul.tags li { - margin-right: 4px; + margin-right: 4px; margin-bottom: 5px; - padding: 3px 4px 3px 25px; + padding: 3px 4px 3px 25px; background: url(../images/tag.png) no-repeat scroll 5px 4px #555; - border-radius: 3px; + border-radius: 3px; border: 1px solid #111; - color: #FFF; - font: 14px/18px "Open Sans", "Helvetica Neue", Helvetica, Arial, Geneva, sans-serif; + color: #FFF; + font: 14px/18px "Open Sans", "Helvetica Neue", Helvetica, Arial, Geneva, sans-serif; text-shadow: 0px 1px rgba(0, 0, 0, 0.8); - float: left; + float: left; } ul.tags li a{ - color: #FFF; + color: #FFF; } .tvshowImg { @@ -1067,12 +1053,12 @@ ul.tags li a{ } #checkboxControls label { - white-space: nowrap; - display: inline-block; + white-space: nowrap; + display: inline-block; } #checkboxControls input[type="checkbox"] { - vertical-align: -2px; + vertical-align: -2px; } .unaired { @@ -1095,27 +1081,27 @@ ul.tags li a{ } span.unaired { - color: #584b20; + color: #584b20; border: 1px solid #584b20; } span.skipped { - color: #1d5068; + color: #1d5068; border: 1px solid #1d5068; } span.good { - color: #295730; + color: #295730; border: 1px solid #295730; } span.qual { - color: #765100; + color: #765100; border: 1px solid #765100; } span.wanted { - color: #890000; + color: #890000; border: 1px solid #890000; } span.snatched { - color: #652164; + color: #652164; border: 1px solid #652164; } @@ -1125,22 +1111,22 @@ span.good b, span.qual b, span.wanted b, span.snatched b { - color: #000000; + color: #000000; font-weight: 800; } .plotInfo { - cursor: help; - float: right; - position: relative; - top: 2px; + cursor: help; + float: right; + position: relative; + top: 2px; } .plotInfoNone { - cursor: help; - float: right; - position: relative; - top: 2px; + cursor: help; + float: right; + position: relative; + top: 2px; opacity: 0.4; } @@ -1193,7 +1179,7 @@ td.col-checkbox { th.col-checkbox input[type="checkbox"], td.col-checkbox input[type="checkbox"] { - vertical-align: -2px; + vertical-align: -2px; } th.col-metadata, @@ -1251,16 +1237,16 @@ td.col-search { } .input-scene { - height: 20px; - line-height: 1.5; - border-radius: 3px; + height: 20px; + line-height: 1.5; + border-radius: 3px; } #editShow { - width: 700px; - padding-top: 10px; - margin-right: auto; - margin-left: auto; + width: 700px; + padding-top: 10px; + margin-right: auto; + margin-left: auto; } /* ======================================================================= @@ -1276,10 +1262,10 @@ comingEpisodes.tmpl } .listing-key { - padding: 5px; - font-size: 13px; - font-weight: bold; - border-radius: 5px; + padding: 5px; + font-size: 13px; + font-weight: bold; + border-radius: 5px; } .listing-default { @@ -1299,22 +1285,22 @@ comingEpisodes.tmpl } span.listing-default { - color: #826f30; + color: #826f30; border: 1px solid #826f30; } span.listing-current { - color: #295730; + color: #295730; border: 1px solid #295730; } span.listing-overdue { - color: #890000; + color: #890000; border: 1px solid #890000; } span.listing-toofar { - color: #1d5068; + color: #1d5068; border: 1px solid #1d5068; } @@ -1331,13 +1317,13 @@ h2.day, h2.network { } .tvshowDiv { - display: block; - clear: both; - border: 1px solid #ccc; - margin: auto; - padding: 0px; - text-align: left; - width: 750px; + display: block; + clear: both; + border: 1px solid #ccc; + margin: auto; + padding: 0px; + text-align: left; + width: 750px; border-radius: 5px; background: #fff; cursor: default; @@ -1345,7 +1331,7 @@ h2.day, h2.network { } .tvshowDiv a:hover { - color: #428BCA; + color: #428BCA; } .tvshowDiv a, .tvshowDiv a:link, .tvshowDiv a:visited, .tvshowDiv a:hover { @@ -1354,54 +1340,54 @@ h2.day, h2.network { } .tvshowTitle a { - color: #000000; - float: left; - line-height: 1.4em; - font-size: 1.4em; - text-shadow: -1px -1px 0 #FFF); + color: #000000; + float: left; + line-height: 1.4em; + font-size: 1.4em; + text-shadow: -1px -1px 0 #FFF); } .tvshowTitleIcons { - float: right; - padding: 3px 5px; + float: right; + padding: 3px 5px; } .tvshowDiv td { - padding: 5px 10px; + padding: 5px 10px; } .tvshowDiv td.next_episode { - width: 100%; - height: 90%; - border-bottom: 1px solid #ccc; - vertical-align: top; - color: #000; + width: 100%; + height: 90%; + border-bottom: 1px solid #ccc; + vertical-align: top; + color: #000; } .bannerThumb { - vertical-align: top; - height: auto; - width: 748px; + vertical-align: top; + height: auto; + width: 748px; border-bottom: 1px solid #ccc; } .posterThumb { - vertical-align: top; - height: auto; - width: 180px; + vertical-align: top; + height: auto; + width: 180px; border-right: 1px solid #ccc; } .ep_listing { - width: auto; - border: 1px solid #ccc; - margin-bottom: 10px; - padding: 10px; + width: auto; + border: 1px solid #ccc; + margin-bottom: 10px; + padding: 10px; } .ep_summary { - margin-left: 5px; - font-style: italic; + margin-left: 5px; + font-style: italic; } .ep_summaryTrigger { @@ -1415,16 +1401,16 @@ h2.day, h2.network { } table.cal-odd { - background-color: #ddd; + background-color: #ddd; } table.cal-even { - background-color: #d2d2d2; + background-color: #d2d2d2; } .calendarShow .text .airtime { - color:#000 + color:#000 } .calendarShow .text .episode-title { - color:#888 + color:#888 } /* ======================================================================= @@ -1447,21 +1433,16 @@ config*.tmpl } .component-item { - border-bottom: 1px dotted #666; - min-height: 200px; + border-bottom: 1px dotted #666; + min-height: 200px; } -.component-group-desc{ - float: left; - width: 250px; -} .component-group-desc h3{ margin-top: 5px; } .component-group-desc p { - width: 90%; margin: 10px 0; color: #666; } @@ -1498,16 +1479,16 @@ select .selected { } .testNotification { - padding: 5px; - margin-bottom: 10px; - line-height: 20px; - border: 1px dotted #CCC; + padding: 5px; + margin-bottom: 10px; + line-height: 20px; + border: 1px dotted #CCC; } #providerOrderList { - width: 250px; - padding-left: 20px; - list-style-type: none; + width: 250px; + padding-left: 20px; + list-style-type: none; } #provider_order_list, @@ -1517,27 +1498,15 @@ select .selected { list-style-type: none; } -#provider_order_list li, -#service_order_list li { - padding: 5px; - margin: 5px 0; - font-size: 14px; -} - -#provider_order_list input, -#service_order_list input { - margin: 0px 2px; -} - #config .tip_scale label span.component-title { - width: 85px !important; - font-size: 12px !important; - margin-top: 2px !important; + width: 85px !important; + font-size: 12px !important; + margin-top: 2px !important; } #config .tip_scale label span.component-desc { - margin-left: 120px !important; - width: 220px !important; + margin-left: 120px !important; + width: 220px !important; } .infoTableHeader, @@ -1549,43 +1518,44 @@ select .selected { border-top: 1px dotted #666666; } -.infoTableHeader .icon16-sb { - background: url("../images/ico/favicon-16x16.png") 0 0 no-repeat; -} - [class^="icon16-"], [class*=" icon16-"] { - background-image: url("../images/glyphicons-config-black.png"); - background-position: -40px 0; - background-repeat: no-repeat; - display: inline-block; - height: 16px; - line-height: 16px; - vertical-align: text-top; - width: 16px; + background-image: url("../images/glyphicons-config.png"); + background-repeat: no-repeat; + display: inline-block; + height: 16px; + vertical-align: text-top; + width: 16px; + margin-top: 1px; } .icon16-github { - background-position: 0 0; + background-position: 0 0; } .icon16-mirc { - background-position: -20px 0; + background-position: -26px 0; } -.icon16-sb { - background-position: -40px 0; +.icon16-sg { + background-position: -52px 0; } .icon16-web { - background-position: -60px 0; + background-position: -78px 0; } .icon16-win { - background-position: -80px 0; + background-position: -104px 0; } /* ======================================================================= config_postProcessing.tmpl ========================================================================== */ +#config .episode-sample { + background-color: rgb(255, 255, 255); + border-color: rgb(204, 204, 204); +} + #config div.example { - padding: 10px; background-color: #efefef; + background-color: #efefef; + border-color: rgb(204, 204, 204); } .Key { @@ -1627,19 +1597,7 @@ config_postProcessing.tmpl config_notifications.tmpl ========================================================================== */ -div.metadata_options_wrapper { - float: left; - width: 190px; -} - -div.metadata_example_wrapper { - float: right; - width: 325px; -} - div.metadata_options { - padding: 7px; - overflow: auto; background: #f5f1e4; border: 1px solid #ccc; } @@ -1647,34 +1605,22 @@ div.metadata_options { div.metadata_options label:hover { color: #fff; background-color: #57442b; - cursor: pointer; } div.metadata_options label { - display: block; - padding-left: 7px; - line-height: 20px; color: #036; } div.metadata_example { - padding: 8px; + border: 1px solid rgb(247, 247, 247); } div.metadata_example label { - display: block; - line-height: 21px; color: #000; - cursor: pointer; } div.metadataDiv .disabled { - color: #ccc; -} - -.notifier-icon { - float: left; - margin: 6px 4px 0px 0px; + color: #aaa; } .warning { @@ -1837,23 +1783,23 @@ option.flag { } #Anime { - clear: both; - overflow-x: hidden; - overflow-y: hidden; - font-size: 14px; + clear: both; + overflow-x: hidden; + overflow-y: hidden; + font-size: 14px; } #Anime div.component-group-desc { - float: left; - width: 165px; + float: left; + width: 165px; } #Anime div.component-group-desc p { - margin-bottom: 0.4em; - margin-left: 0; - margin-right: 0; - margin-top: 0.4em; - width: 95%; + margin-bottom: 0.4em; + margin-left: 0; + margin-right: 0; + margin-top: 0.4em; + width: 95%; } div.blackwhitelist{ @@ -1862,15 +1808,15 @@ div.blackwhitelist{ } div.blackwhitelist input { - margin: 5px 5px; + margin: 5px 5px; } div.blackwhitelist.pool select{ - width: 300px; + width: 300px; } div.blackwhitelist.pool { - margin:5px; + margin:5px; } div.blackwhitelist.white select, div.blackwhitelist.black select { @@ -1907,8 +1853,8 @@ html * { } input[type="radio"] { - margin: 2px 0px 0px; - line-height: normal; + margin: 2px 0px 0px; + line-height: normal; } input, textarea, select, .uneditable-input { @@ -1917,20 +1863,20 @@ input, textarea, select, .uneditable-input { } .container-fluid { - margin-left: 10px; + margin-left: 10px; margin-right: 10px; } .navbar-brand { - padding: 0px; + padding: 0px; } /* navbar styling */ .navbar-default { - background-color: #333333; - filter: progid:DXImageTransform.Microsoft.gradient(startColorstr='#555555', endColorstr='#333333'); - background: -webkit-gradient(linear, left top, left bottom, from(#555), to(#333)); - background: -moz-linear-gradient(top, #555, #333); + background-color: #333333; + filter: progid:DXImageTransform.Microsoft.gradient(startColorstr='#555555', endColorstr='#333333'); + background: -webkit-gradient(linear, left top, left bottom, from(#555), to(#333)); + background: -moz-linear-gradient(top, #555, #333); border-color: #3e3f3a; } @@ -2045,45 +1991,34 @@ fieldset[disabled] .navbar-default .btn-link:focus { } .dropdown-menu > li > a:hover, .dropdown-menu > li > a:focus { - color: #fff; - text-decoration: none; - background-color: #333; + color: #fff; + text-decoration: none; + background-color: #333; } .dropdown-menu > li > a { - padding: 4px 36px 4px 20px; + padding: 4px 36px 4px 20px; } .dropdown-menu { - background-color: #F5F1E4; - border: 1px solid rgba(0, 0, 0, 0.15); - box-shadow: 0px 6px 12px rgba(0, 0, 0, 0.176); + background-color: #F5F1E4; + border: 1px solid rgba(0, 0, 0, 0.15); + box-shadow: 0px 6px 12px rgba(0, 0, 0, 0.176); } .form-control { - color: #000000; + color: #000000; } .form-control-inline { - min-width: 0; - width: auto; - display: inline; + min-width: 0; + width: auto; + display: inline; } .btn { - display: inline-block; - *display: inline; - padding: 4px 10px 4px; - margin-bottom: 0; - *margin-left: .3em; - font-size: 12px; - line-height: 16px; - *line-height: 20px; color: #333333; - text-align: center; text-shadow: 0 1px 1px rgba(255, 255, 255, 0.75); - vertical-align: middle; - cursor: pointer; background-color: #f5f5f5; *background-color: #e6e6e6; background-image: -ms-linear-gradient(top, #ffffff, #e6e6e6); @@ -2092,21 +2027,15 @@ fieldset[disabled] .navbar-default .btn-link:focus { background-image: -o-linear-gradient(top, #ffffff, #e6e6e6); background-image: linear-gradient(top, #ffffff, #e6e6e6); background-image: -moz-linear-gradient(top, #ffffff, #e6e6e6); - background-repeat: repeat-x; border: 1px solid #cccccc; - *border: 0; border-color: rgba(0, 0, 0, 0.1) rgba(0, 0, 0, 0.1) rgba(0, 0, 0, 0.25); border-color: #e6e6e6 #e6e6e6 #bfbfbf; border-bottom-color: #b3b3b3; - -webkit-border-radius: 4px; - -moz-border-radius: 4px; - border-radius: 4px; filter: progid:dximagetransform.microsoft.gradient(startColorstr='#ffffff', endColorstr='#e6e6e6', GradientType=0); filter: progid:dximagetransform.microsoft.gradient(enabled=false); - *zoom: 1; -webkit-box-shadow: inset 0 1px 0 rgba(255, 255, 255, 0.2), 0 1px 2px rgba(0, 0, 0, 0.05); - -moz-box-shadow: inset 0 1px 0 rgba(255, 255, 255, 0.2), 0 1px 2px rgba(0, 0, 0, 0.05); - box-shadow: inset 0 1px 0 rgba(255, 255, 255, 0.2), 0 1px 2px rgba(0, 0, 0, 0.05); + -moz-box-shadow: inset 0 1px 0 rgba(255, 255, 255, 0.2), 0 1px 2px rgba(0, 0, 0, 0.05); + box-shadow: inset 0 1px 0 rgba(255, 255, 255, 0.2), 0 1px 2px rgba(0, 0, 0, 0.05); } .btn:hover, @@ -2134,10 +2063,10 @@ fieldset[disabled] .navbar-default .btn-link:focus { *background-color: #d9d9d9; background-position: 0 -15px; -webkit-transition: background-position 0.1s linear; - -moz-transition: background-position 0.1s linear; - -ms-transition: background-position 0.1s linear; - -o-transition: background-position 0.1s linear; - transition: background-position 0.1s linear; + -moz-transition: background-position 0.1s linear; + -ms-transition: background-position 0.1s linear; + -o-transition: background-position 0.1s linear; + transition: background-position 0.1s linear; } .btn:focus { @@ -2153,8 +2082,8 @@ fieldset[disabled] .navbar-default .btn-link:focus { background-image: none; outline: 0; -webkit-box-shadow: inset 0 2px 4px rgba(0, 0, 0, 0.15), 0 1px 2px rgba(0, 0, 0, 0.05); - -moz-box-shadow: inset 0 2px 4px rgba(0, 0, 0, 0.15), 0 1px 2px rgba(0, 0, 0, 0.05); - box-shadow: inset 0 2px 4px rgba(0, 0, 0, 0.15), 0 1px 2px rgba(0, 0, 0, 0.05); + -moz-box-shadow: inset 0 2px 4px rgba(0, 0, 0, 0.15), 0 1px 2px rgba(0, 0, 0, 0.05); + box-shadow: inset 0 2px 4px rgba(0, 0, 0, 0.15), 0 1px 2px rgba(0, 0, 0, 0.05); } .btn.disabled, @@ -2165,8 +2094,8 @@ fieldset[disabled] .navbar-default .btn-link:focus { opacity: 0.65; filter: alpha(opacity=65); -webkit-box-shadow: none; - -moz-box-shadow: none; - box-shadow: none; + -moz-box-shadow: none; + box-shadow: none; } .btn-large { @@ -2174,8 +2103,8 @@ fieldset[disabled] .navbar-default .btn-link:focus { font-size: 15px; line-height: normal; -webkit-border-radius: 5px; - -moz-border-radius: 5px; - border-radius: 5px; + -moz-border-radius: 5px; + border-radius: 5px; } .btn-large [class^="icon-"] { @@ -2404,10 +2333,10 @@ fieldset[disabled] .navbar-default .btn-link:focus { } .btn-xs { - padding: 1px 5px; - font-size: 12px; - line-height: 1.5; - border-radius: 3px; + padding: 1px 5px; + font-size: 12px; + line-height: 1.5; + border-radius: 3px; } @media(min-width:768px){ @@ -2424,13 +2353,13 @@ fieldset[disabled] .navbar-default .btn-link:focus { } label { - font-weight: normal; + font-weight: normal; } pre { - color: #000; - background-color: #F5F5F5; - border-color: #ccc; + color: #000; + background-color: #F5F5F5; + border-color: #ccc; } .alert { @@ -2441,16 +2370,25 @@ pre { /* ======================================================================= input sizing (for config pages) ========================================================================== */ +#editAProvider optgroup { + color: #eee; + background-color: #888; +} + +#editAProvider optgroup option { + color: #222; + background-color: #fff; +} #config select { - min-width: 0; - width: auto; - display: inline; + min-width: 0; + width: auto; + display: inline; margin-top: -4px; } .btn-inline { - margin-top: -3px; + margin-top: -3px; } .input75 { @@ -2777,133 +2715,133 @@ token-input.css ========================================================================== */ ul.token-input-list { - overflow: hidden; - height: auto !important; - height: 1%; - width: 273px; - border: 1px solid #ccc; - cursor: text; - font-size: 10px; - font-family: Verdana; - z-index: 999; - margin: 0; - padding: 0 0 1px 0; - background-color: #fff; - list-style-type: none; + overflow: hidden; + height: auto !important; + height: 1%; + width: 273px; + border: 1px solid #ccc; + cursor: text; + font-size: 10px; + font-family: Verdana; + z-index: 999; + margin: 0; + padding: 0 0 1px 0; + background-color: #fff; + list-style-type: none; /* clear: left; */ - border-top-left-radius: 3px; - border-top-right-radius: 3px; - border-bottom-left-radius: 3px; - border-bottom-right-radius: 3px; + border-top-left-radius: 3px; + border-top-right-radius: 3px; + border-bottom-left-radius: 3px; + border-bottom-right-radius: 3px; } ul.token-input-list li { - list-style-type: none; + list-style-type: none; } ul.token-input-list li input { - border: 0; - padding: 3px 4px; - background-color: white; + border: 0; + padding: 3px 4px; + background-color: white; /* -webkit-appearance: caret; */ } li.token-input-token { - overflow: hidden; - height: auto !important; - height: 1%; - margin: 3px; - padding: 3px 5px 0 5px; - background-color: #d0efa0; - color: #000; - font-weight: bold; - cursor: default; - display: block; + overflow: hidden; + height: auto !important; + height: 1%; + margin: 3px; + padding: 3px 5px 0 5px; + background-color: #d0efa0; + color: #000; + font-weight: bold; + cursor: default; + display: block; } li.token-input-token img { - padding-top: 7px; - padding-right: 4px; - float: left; + padding-top: 7px; + padding-right: 4px; + float: left; } li.token-input-token input { - padding-top: 2px !important; - padding-right: 4px !important; - float: left; + padding-top: 2px !important; + padding-right: 4px !important; + float: left; } li.token-input-token p { - float: left; - padding: 0; - margin: 0; - line-height: 2.0 !important; + float: left; + padding: 0; + margin: 0; + line-height: 2.0 !important; } li.token-input-token span { - float: right; - color: #777; - cursor: pointer; + float: right; + color: #777; + cursor: pointer; } li.token-input-selected-token { - background-color: #08844e; - color: #fff; + background-color: #08844e; + color: #fff; } li.token-input-selected-token span { - color: #bbb; + color: #bbb; } li.token-input-input-token input { - margin: 3px 3px 3px 3px !important; + margin: 3px 3px 3px 3px !important; } div.token-input-dropdown { - background-color: #fff; - color: #000; - border-left-color: #ccc; - border-right-color: #ccc; - border-bottom-color: #ccc; + background-color: #fff; + color: #000; + border-left-color: #ccc; + border-right-color: #ccc; + border-bottom-color: #ccc; } div.token-input-dropdown p { - margin: 0; - padding: 3px; - font-weight: bold; - color: #777; + margin: 0; + padding: 3px; + font-weight: bold; + color: #777; } div.token-input-dropdown ul { - margin: 0; - padding: 0; + margin: 0; + padding: 0; } div.token-input-dropdown ul li { - background-color: #fff; - padding: 3px; - list-style-type: none; + background-color: #fff; + padding: 3px; + list-style-type: none; } div.token-input-dropdown ul li.token-input-dropdown-item { - background-color: #fafafa; + background-color: #fafafa; } div.token-input-dropdown ul li.token-input-dropdown-item2 { - background-color: #fff; + background-color: #fff; } div.token-input-dropdown ul li em { - font-weight: bold; - font-style: normal; + font-weight: bold; + font-style: normal; } div.token-input-dropdown ul li.token-input-selected-dropdown-item { - background-color: #6196c2; + background-color: #6196c2; } span.token-input-delete-token { - margin: 0 1px; + margin: 0 1px; } /* ======================================================================= @@ -2930,7 +2868,7 @@ jquery.confirm.css top: 50%; margin: -130px 0 0 -230px; border: 1px solid #111; - box-shadow: 0px 0px 12px 0px rgba(0, 0, 0, 0.175); + box-shadow: 0px 0px 12px 0px rgba(0, 0, 0, 0.175); } #confirmBox h1, @@ -2939,18 +2877,18 @@ jquery.confirm.css } #confirmBox h1 { - background-color: #333; + background-color: #333; border-bottom: 1px solid #111; color: #fff; - margin: 0; - font-size: 22px; - text-shadow: 0px 1px 1px rgba(0, 0, 0, 0.75); + margin: 0; + font-size: 22px; + text-shadow: 0px 1px 1px rgba(0, 0, 0, 0.75); } #confirmBox p { padding-top: 20px; - color: #000; - text-shadow: 0px 1px 1px rgba(255, 255, 255, 0.75); + color: #000; + text-shadow: 0px 1px 1px rgba(255, 255, 255, 0.75); } #confirmButtons { diff --git a/gui/slick/css/style.css b/gui/slick/css/style.css index a5ca2119..5a3870a8 100644 --- a/gui/slick/css/style.css +++ b/gui/slick/css/style.css @@ -199,6 +199,10 @@ inc_top.tmpl background: #dcdcdc url("../css/lib/images/ui-bg_highlight-soft_75_dcdcdc_1x100.png") 50% top repeat-x; } +.ui-widget-content a { + text-decoration: none; +} + .ui-widget-header { background: #ffffff url("../css/lib/images/ui-bg_flat_0_ffffff_40x100.png") 50% 50% repeat-x; } @@ -967,8 +971,7 @@ displayShow.tmpl #posterCol { float: left; - margin-right: 10px; - margin-bottom: 20px; + margin: 3px 10px 20px 0; } #showCol { @@ -1015,23 +1018,34 @@ h1.title { } span.imdbstars { - display: inline-block; - vertical-align: top; + display: inline-block; + margin: 0 3px 0 0; + vertical-align: baseline; cursor: help; - margin-top: 4px; } - +span.imdbstars > * { + background-position: 0 0 !important; + max-width:120px; +} span.imdbstars, span.imdbstars > * { height: 12px; - background: url(../images/rating.png) 0 -12px repeat-x; width: 120px; display: inline-block; - vertical-align: top; + font-size:10px } -span.imdbstars > * { - background-position: 0 0; - max-width:120px; +#showinfo .flag { + margin: 0 3px 0 0; + vertical-align: baseline; +} + +#showinfo .imdb-info { + margin: 0 3px 0 0; +} + +#showinfo a.service { + margin: 0 3px 0 0; + font-size: 16px; } ul.tags { @@ -1512,6 +1526,7 @@ config*.tmpl .component-group-desc{ float: left; width: 250px; + padding-right: 10px; } .component-group-desc h3{ @@ -1519,7 +1534,6 @@ config*.tmpl } .component-group-desc p { - width: 90%; margin: 10px 0; color: #666; } @@ -1530,7 +1544,7 @@ config*.tmpl #config div.field-pair select, #config div.field-pair input { - margin-right: 6px; + margin-right: 15px; } #config div.field-pair input { @@ -1558,7 +1572,7 @@ config*.tmpl } #config label.space-right { - margin-right:10px + margin-right:20px } #config .metadataDiv { display: none; @@ -1598,16 +1612,56 @@ select .selected { list-style-type: none; } -#provider_order_list li, +#config.search_providers #core-component-group1 #provider_key h4 { + display: inline-block; + float: left; + margin: 0; +} + +#config.search_providers #core-component-group1 #provider_key p { + margin: 0 0 20px 30px; +} + +#config.search_providers #core-component-group1 .component-group-desc, +#config.search_providers #provider_order_list, +#config.search_providers #core-component-group1 #provider_key { + width: 300px +} + +#config.search_providers #provider_order_list { + padding: 0; + float: left +} + +#config.search_providers #provider_order_list, +#config.search_providers #core-component-group1 .btn { + margin: 0 auto +} + +#config.search_providers #core-component-group1 .btn { + display: block +} + +#config.search_providers #core-component-group1 #provider_key { + float: right; + margin-bottom:25px +} + +#provider_order_list li, #service_order_list li { padding: 5px; - margin: 5px 0; + margin: 0 0 5px; font-size: 14px; } #provider_order_list input, #service_order_list input { - margin: 0px 2px; + margin: 0 5px 0 2px; + vertical-align: middle; +} + +#provider_order_list a.imgLink { + margin-right: 3px } #config .tip_scale label span.component-title { @@ -1631,38 +1685,50 @@ select .selected { } [class^="icon16-"], [class*=" icon16-"] { - background-image: url("../images/glyphicons-config-black.png"); - background-position: -40px 0; + background-image: url("../images/glyphicons-config.png"); background-repeat: no-repeat; display: inline-block; height: 16px; - line-height: 16px; vertical-align: text-top; width: 16px; + margin-top: 1px; } .icon16-github { background-position: 0 0; } .icon16-mirc { - background-position: -20px 0; + background-position: -26px 0; } -.icon16-sb { - background-position: -40px 0; +.icon16-sg { + background-position: -52px 0; } .icon16-web { - background-position: -60px 0; + background-position: -78px 0; } .icon16-win { - background-position: -80px 0; + background-position: -104px 0; } /* ======================================================================= config_postProcessing.tmpl ========================================================================== */ +#config .episode-sample { + width: 240px; + margin-right: 10px; + border: 1px solid; +} + +#config .episode-sample h3 { + margin: 10px; + font-size: 18px; + line-height: 24px; +} + #config div.example { - padding: 10px; background-color: #efefef; + padding: 10px; + border: 1px solid; } .Key { @@ -1710,43 +1776,47 @@ div.metadata_options_wrapper { } div.metadata_example_wrapper { - float: right; - width: 325px; + margin-left: 220px; +} + +div.metadata_options_wrapper h4, +div.metadata_example_wrapper h4 { + margin: 0 0 10px; } div.metadata_options { - padding: 7px; overflow: auto; background: #f5f1e4; border: 1px solid #ccc; } div.metadata_options label:hover { - color: #fff; - background-color: #57442b; cursor: pointer; } div.metadata_options label { - display: block; - padding-left: 7px; - line-height: 20px; color: #036; } -div.metadata_example { - padding: 8px; +div.metadata_example label { + cursor: pointer; + font-weight: 600; } +div.metadata_options label, div.metadata_example label { - display: block; line-height: 21px; - color: #000; - cursor: pointer; + display: block; + padding: 3px; + margin: 0px; +} +div.metadata_options input { + margin-right: 3px; + vertical-align: baseline; } div.metadataDiv .disabled { - color: #ccc; + font-weight: normal; } .notifier-icon { @@ -1754,11 +1824,6 @@ div.metadataDiv .disabled { margin: 6px 4px 0px 0px; } -.warning { - border-color: #F89406; - background: url("../images/warning16.png") no-repeat right 5px center #fff; -} - /* ======================================================================= manage*.tmpl ========================================================================== */ @@ -2153,34 +2218,15 @@ fieldset[disabled] .navbar-default .btn-link:focus { font-size: 12px; line-height: 16px; *line-height: 20px; - color: #333333; text-align: center; - text-shadow: 0 1px 1px rgba(255, 255, 255, 0.75); vertical-align: middle; cursor: pointer; - background-color: #f5f5f5; - *background-color: #e6e6e6; - background-image: -ms-linear-gradient(top, #ffffff, #e6e6e6); - background-image: -webkit-gradient(linear, 0 0, 0 100%, from(#ffffff), to(#e6e6e6)); - background-image: -webkit-linear-gradient(top, #ffffff, #e6e6e6); - background-image: -o-linear-gradient(top, #ffffff, #e6e6e6); - background-image: linear-gradient(top, #ffffff, #e6e6e6); - background-image: -moz-linear-gradient(top, #ffffff, #e6e6e6); background-repeat: repeat-x; - border: 1px solid #cccccc; *border: 0; - border-color: rgba(0, 0, 0, 0.1) rgba(0, 0, 0, 0.1) rgba(0, 0, 0, 0.25); - border-color: #e6e6e6 #e6e6e6 #bfbfbf; - border-bottom-color: #b3b3b3; -webkit-border-radius: 4px; -moz-border-radius: 4px; border-radius: 4px; - filter: progid:dximagetransform.microsoft.gradient(startColorstr='#ffffff', endColorstr='#e6e6e6', GradientType=0); - filter: progid:dximagetransform.microsoft.gradient(enabled=false); *zoom: 1; - -webkit-box-shadow: inset 0 1px 0 rgba(255, 255, 255, 0.2), 0 1px 2px rgba(0, 0, 0, 0.05); - -moz-box-shadow: inset 0 1px 0 rgba(255, 255, 255, 0.2), 0 1px 2px rgba(0, 0, 0, 0.05); - box-shadow: inset 0 1px 0 rgba(255, 255, 255, 0.2), 0 1px 2px rgba(0, 0, 0, 0.05); } .btn:hover, @@ -3007,16 +3053,37 @@ span.token-input-delete-token { margin: 0 1px; } +.boldest {font-weight: 900} .red-text {color:#d33} .clear-left {clear:left} +.float-left {float:left} .nextline-block {display:block} +#failed-guide, +#failed-guide .title, +#failed-guide li {margin:0; padding:0} +#failed-guide .title {list-style-type: none} +#failed-guide li {margin-left:15px} + +.icon-info-sign { + display: block; + width: 16px; + height: 16px; + margin: 2px 5px; + float: left; +} + +.pp .component-group-list.right, +.pp .field-pair.right { + margin: 0 0 0 250px; +} + .trakt-image { display: block; width: 100%; height: 100%; z-index: 0; - background-image: url(/images/poster-dark.jpg) + background-image: url(../images/poster-dark.jpg) } /* ======================================================================= jquery.confirm.css @@ -3028,7 +3095,7 @@ jquery.confirm.css position: fixed; top: 0; left: 0; - background: url('../images/bg.gif'); + background: url(../images/bg.gif); background: -moz-linear-gradient(rgba(0,0,0,0.5), rgba(0,0,0,0.5)) repeat-x rgba(0,0,0,0.5); background:-webkit-gradient(linear, 0% 0%, 0% 100%, from(rgba(0,0,0,0.5)), to(rgba(0,0,0,0.5))) repeat-x rgba(0,0,0,0.5); z-index: 100000; @@ -3056,13 +3123,13 @@ jquery.confirm.css color: #fff; margin: 0; font-size: 22px; - text-shadow: 0px 1px 1px rgba(0, 0, 0, 0.75); + text-shadow: 0 1px 1px rgba(0, 0, 0, 0.75); } #confirmBox p { padding-top: 20px; color: #000; - text-shadow: 0px 1px 1px rgba(255, 255, 255, 0.75); + text-shadow: 0 1px 1px rgba(255, 255, 255, 0.75); } #confirmButtons { @@ -3118,10 +3185,10 @@ pnotify.css ========================================================================== */ .ui-pnotify-sticker { - margin-top: -12px; + margin-top: -12px; } .ui-pnotify-closer { - margin-top: -12px; - margin-right: -10px; + margin-top: -12px; + margin-right: -10px; } \ No newline at end of file diff --git a/gui/slick/images/addshows/add-trending32-black.png b/gui/slick/images/addshows/add-trending32-black.png index 136db480..8f14e668 100644 Binary files a/gui/slick/images/addshows/add-trending32-black.png and b/gui/slick/images/addshows/add-trending32-black.png differ diff --git a/gui/slick/images/addshows/add-trending32-white.png b/gui/slick/images/addshows/add-trending32-white.png index 9dae6063..5c5b460b 100644 Binary files a/gui/slick/images/addshows/add-trending32-white.png and b/gui/slick/images/addshows/add-trending32-white.png differ diff --git a/gui/slick/images/glyphicons-config-black.png b/gui/slick/images/glyphicons-config-black.png deleted file mode 100644 index 53c24e48..00000000 Binary files a/gui/slick/images/glyphicons-config-black.png and /dev/null differ diff --git a/gui/slick/images/glyphicons-config-white.png b/gui/slick/images/glyphicons-config-white.png deleted file mode 100644 index bd3834c2..00000000 Binary files a/gui/slick/images/glyphicons-config-white.png and /dev/null differ diff --git a/gui/slick/images/glyphicons-config.png b/gui/slick/images/glyphicons-config.png new file mode 100644 index 00000000..3c1b55b8 Binary files /dev/null and b/gui/slick/images/glyphicons-config.png differ diff --git a/gui/slick/images/providers/animezb.png b/gui/slick/images/providers/animezb.png deleted file mode 100644 index 59412e68..00000000 Binary files a/gui/slick/images/providers/animezb.png and /dev/null differ diff --git a/gui/slick/images/providers/t411.png b/gui/slick/images/providers/t411.png deleted file mode 100644 index cffc3785..00000000 Binary files a/gui/slick/images/providers/t411.png and /dev/null differ diff --git a/gui/slick/interfaces/default/comingEpisodes.tmpl b/gui/slick/interfaces/default/comingEpisodes.tmpl index e278593b..9e012219 100644 --- a/gui/slick/interfaces/default/comingEpisodes.tmpl +++ b/gui/slick/interfaces/default/comingEpisodes.tmpl @@ -82,7 +82,7 @@ if (0 == s.indexOf('Loading...')) return s.replace('Loading...', '000') #if not $sickbeard.SORT_ARTICLE: - return (s || '').replace(/^(The|A|An)\s/i, '') + return (s || '').replace(/^(?:(?:A(?!\s+to)n?)|The)\s(\w)/i, '$1') #else: return (s || '') #end if @@ -359,10 +359,10 @@ #set $too_late_header = True #elif $cur_ep_enddate >= $today and $cur_ep_airdate < $next_week.date(): #if $cur_ep_airdate == $today.date(): -

$datetime.date.fromordinal($cur_ep_airdate.toordinal).strftime('%A').decode($sickbeard.SYS_ENCODING).capitalize() [Today]

+

$sbdatetime.sbdatetime.sbfdate($cur_ep_airdate, '%A').decode($sickbeard.SYS_ENCODING).capitalize() [Today]

#set $today_header = True #else: -

$datetime.date.fromordinal($cur_ep_airdate.toordinal).strftime('%A').decode($sickbeard.SYS_ENCODING).capitalize()

+

$sbdatetime.sbdatetime.sbfdate($cur_ep_airdate, '%A').decode($sickbeard.SYS_ENCODING).capitalize()

#end if #end if #end if @@ -371,10 +371,11 @@ #if $cur_ep_airdate == $today.date() and not $today_header:
-

$datetime.date.fromordinal($cur_ep_airdate.toordinal).strftime('%A').decode($sickbeard.SYS_ENCODING).capitalize() [Today]

+

$sbdatetime.sbdatetime.sbfdate($cur_ep_airdate, '%A').decode($sickbeard.SYS_ENCODING).capitalize() [Today]

#set $today_header = True #end if #if $runtime: + #set $cur_ep_enddate = $cur_result['localtime'] + datetime.timedelta(minutes = $runtime) #if $cur_ep_enddate < $today: #set $show_div = 'ep_listing listing-overdue' #elif $cur_ep_airdate >= $next_week.date(): @@ -489,7 +490,7 @@ #for $day in $dates #set $tbl_day += 1 - + #set $day_has_show = False #for $cur_result in $sql_results: @@ -503,7 +504,7 @@ #if $airday == $day: #set $day_has_show = True - #set $airtime = $sbdatetime.sbdatetime.fromtimestamp($time.mktime($cur_result['localtime'].timetuple())).sbftime().decode($sickbeard.SYS_ENCODING) + #set $airtime = $sbdatetime.sbdatetime.sbftime($cur_result['localtime']).decode($sickbeard.SYS_ENCODING) #if $sickbeard.TRIM_ZERO: #set $airtime = re.sub(r'0(\d:\d\d)', r'\1', $airtime, 0, re.IGNORECASE | re.MULTILINE) #end if diff --git a/gui/slick/interfaces/default/config.tmpl b/gui/slick/interfaces/default/config.tmpl index 75ca89ab..b71dce72 100644 --- a/gui/slick/interfaces/default/config.tmpl +++ b/gui/slick/interfaces/default/config.tmpl @@ -3,18 +3,18 @@ #from sickbeard.helpers import anon_url #import os.path -#set global $title="Configuration" -#set global $header="Configuration" +#set global $title = 'Configuration' +#set global $header = 'Configuration' -#set global $sbPath=".." +#set global $sbPath = '..' -#set global $topmenu="config"# -#include $os.path.join($sickbeard.PROG_DIR, "gui/slick/interfaces/default/inc_top.tmpl") +#set global $topmenu = 'config' +#include $os.path.join($sickbeard.PROG_DIR, 'gui/slick/interfaces/default/inc_top.tmpl') -#if $varExists('header') -

$header

-#else -

$title

+#if $varExists('header') +

$header

+#else +

$title

#end if ##set cpu_usage = $psutil.cpu_percent() @@ -31,24 +31,24 @@
$day.strftime('%A').decode($sickbeard.SYS_ENCODING).capitalize()
$sbdatetime.sbdatetime.sbfdate($day, '%A').decode($sickbeard.SYS_ENCODING).capitalize()
- - - - - - - - - - + + + + + + + + +
Version: +
Version: #if $sickbeard.VERSION_NOTIFY - BRANCH: ($sickbeard.BRANCH) / COMMIT: ($sickbeard.CUR_COMMIT_HASH)
+ BRANCH: ($sickbeard.BRANCH) / COMMIT: ($sickbeard.CUR_COMMIT_HASH)
#else - You don't have version checking turned on. Please turn on "Check for Update" in Config > General.
+ You don't have version checking turned on, see "Check software updates" in Config > General.
#end if - You are using BETA software + This is BETA software.
SR Config file:$sickbeard.CONFIG_FILE
SR Database file:$db.dbFilename()
SR Cache Dir:$sickbeard.CACHE_DIR
SR Arguments:$sickbeard.MY_ARGS
SR Web Root:$sickbeard.WEB_ROOT
Python Version:$sys.version[:120]
Homepagehttps://github.com/SickGear/SickGear/wiki
Sourcehttps://github.com/SickGear/SickGear/
Internet Relay Chat#SickGear on irc.freenode.net
Config file:$sickbeard.CONFIG_FILE
Database file:$db.dbFilename()
Cache Dir:$sickbeard.CACHE_DIR
Arguments:<%= (sickbeard.MY_ARGS, 'None used')[0 == len(sickbeard.MY_ARGS)] %>
Web Root:$sickbeard.WEB_ROOT
Python Version:$sys.version[:120]
Homepagehttps://github.com/SickGear/SickGear/wiki
Sourcehttps://github.com/SickGear/SickGear/
Internet Relay Chat#SickGear on irc.freenode.net
-#include $os.path.join($sickbeard.PROG_DIR,"gui/slick/interfaces/default/inc_bottom.tmpl") +#include $os.path.join($sickbeard.PROG_DIR,'gui/slick/interfaces/default/inc_bottom.tmpl') diff --git a/gui/slick/interfaces/default/config_anime.tmpl b/gui/slick/interfaces/default/config_anime.tmpl index 17f56de5..d3d8a437 100644 --- a/gui/slick/interfaces/default/config_anime.tmpl +++ b/gui/slick/interfaces/default/config_anime.tmpl @@ -1,109 +1,128 @@ #import sickbeard #from sickbeard.helpers import anon_url -#set global $title="Config - Anime" -#set global $header="Anime" +#set global $title = 'Config - Anime' +#set global $header = 'Anime' -#set global $sbPath="../.." +#set global $sbPath = '../..' -#set global $topmenu="config"# -#include $os.path.join($sickbeard.PROG_DIR, "gui/slick/interfaces/default/inc_top.tmpl") +#set global $topmenu = 'config' +#include $os.path.join($sickbeard.PROG_DIR, 'gui/slick/interfaces/default/inc_top.tmpl') - -
+ #if $varExists('header')

$header

#else

$title

#end if +
-
+
- - -
-
- AniDB -

AniDB

-

AniDB is non-profit database of anime information that is freely open to the public

-
+ -
-
- - -
- -
-
- - -
- -
- - -
-
- - -
+
+
+ +
+

Misc

+

User interface and general configuration.

+
+ +
+
+
- -
-
- -
+
+ +
+ + +
+
+ +
+ +
+ AniDB +

AniDB

+

Manage anime releases with AniDB.

+
+ +
+
+ +
+ +
+
+ +
+ +
+ +
+ +
+ +
+
+ + +
+
+ +
-
-

Look and Feel

-
-
-
- - -
- -
-
-

+
- -#include $os.path.join($sickbeard.PROG_DIR, "gui/slick/interfaces/default/inc_bottom.tmpl") +#include $os.path.join($sickbeard.PROG_DIR, 'gui/slick/interfaces/default/inc_bottom.tmpl') \ No newline at end of file diff --git a/gui/slick/interfaces/default/config_general.tmpl b/gui/slick/interfaces/default/config_general.tmpl index 708fbfd2..1b2cd342 100644 --- a/gui/slick/interfaces/default/config_general.tmpl +++ b/gui/slick/interfaces/default/config_general.tmpl @@ -68,7 +68,7 @@ Update shows on startup -

with information such as next air dates, show ended, etc. Disable for a faster startup as show info is sheduled to update in the background anyway

+

with information such as next air dates, show ended, etc. Disable for a faster startup as show info is scheduled to update in the background anyway

@@ -282,10 +282,10 @@ Timezone:

display dates and times in either your timezone or the shows network timezone

diff --git a/gui/slick/interfaces/default/config_postProcessing.tmpl b/gui/slick/interfaces/default/config_postProcessing.tmpl index c9199aa4..a20ed3c1 100644 --- a/gui/slick/interfaces/default/config_postProcessing.tmpl +++ b/gui/slick/interfaces/default/config_postProcessing.tmpl @@ -1,1062 +1,1190 @@ #import os.path #import sickbeard #from sickbeard.common import * +#from sickbeard.helpers import anon_url #from sickbeard import config #from sickbeard import metadata #from sickbeard.metadata.generic import GenericMetadata #from sickbeard import naming -#set global $title = "Config - Post Processing" -#set global $header = "Post Processing" +#set global $title = 'Config - Post Processing' +#set global $header = 'Post Processing' -#set global $sbPath="../.." +#set global $sbPath = '../..' -#set global $topmenu="config"# -#include $os.path.join($sickbeard.PROG_DIR, "gui/slick/interfaces/default/inc_top.tmpl") +#set global $topmenu = 'config' +#include $os.path.join($sickbeard.PROG_DIR, 'gui/slick/interfaces/default/inc_top.tmpl') -
+ #if $varExists('header')

$header

#else

$title

#end if -
-
+#set $checked = 'checked="checked"' +#set $selected = 'selected="selected"' -
+
+
-
- - -
+ -
-

Post-Processing

-

Settings that dictate how SickGear should process completed downloads.

-
+
+ -
-
- - - - -
- -
- - -
-
- - - -
+
+
+ +
+

Post-Processing

+

SickGear options to process completed downloads.

+
+ +
- - - +
-
- - -
- -
- - -
+
+ +
-
- - -
+
+ +
-
- - -
+
+ +
-
- - - -
+
+ +
-
- - - - -
+
+ +
-
- - -
+
-
- - - -
- -
- - - -
- -
-
- - - -
+
+
-
+
-
+
+

File Handling

+

More file processing controls.

+
-
-
+
-
+
+ +
-
-

Episode Naming

-

How SickGear will name and sort your episodes.

-
+
+ +
-
+
+ +
-
- -
+
+ +
-
-
- -
+
+ +
- -
- -
- -
+
+ +
-
-

Single-EP Sample:

-
-   -
-
-
+
-
-

Multi-EP sample:

-
-   -
-
-
+
+
-
- - - -
+
-
- - -
+
+

Failed Downloads

+

SickGear can attempt a different release if a download is found to fail

+
-
-
- -
+
-
-
- -
+
+ +
- -
- -
-

Sample:

-
-   -
-
-
+
+
+ +
    +
  • SABnzbd setup guide
  • +
  • menu "Switches" ... [disable] "Abort jobs that cannot be completed"
  • +
  • menu "Switches" ... [disable] "Post-Process Only Verified Jobs"
  • +
  • menu "Special" ... [enable] "empty_postproc"
  • +
+
+
-
+
+
+
-
- - -
+
-
-
- -
+
+
-
-
- -
+
- -
- -
-

Sample:

-
-   -
-
-
+<% ######################################################################################### %> +<% ######################################################################################### %> +<% ## TAB 2 ## %> +<% ######################################################################################### %> +<% ######################################################################################### %> -
+<% ########################### %> +<% ## Start of Regular Show ## %> +<% ########################### %> - -
- - -
+
-
-
- -
+
+

Episode Naming

+

Regular episode naming.

+
-
-
- -
+
- -
+
+ +
-
- -
+
+
+ +
-
-

Single-EP Anime Sample:

-
-   -
-
-
+ +
-
-

Multi-EP Anime sample:

-
-   -
-
-
+
+ +
-
- - - -
+
+ +
-
- - - -
+ -
- - - -
+
-
+
+
+

Single episode sample:

+
+
+   +
+
+
-
-
+
+
+

Multi episode sample:

+
+
+   +
+
+
- -
+ + +<% ########################## %> +<% ## End of Regular Show ## %> +<% ## ## %> +<% ## Start of Air-by-Date ## %> +<% ########################## %> +
+

Air-by-date episode naming.

+
+ +
+ +
+
+ +
+
+ + +
+
+ +
+ + +
+
+
+ +
+
+
+
+

Air-by-date sample:

+
+
+   +
+
+
+
+
+ +
+
+
+ +
+ +<% ######################## %> +<% ## End of Air-by-Date ## %> +<% ## ## %> +<% ## Start of Sports ## %> +<% ######################## %> +
+

Sport episode naming.

+
+ +
+ +
+
+ +
+
+ + +
+
+ +
+ + +
+
+ +
+ +
+ +
+
+
+

Sports sample:

+
+
+   +
+
+
+ +
+ +
+ +
+
+
+ +
+ +<% #################### %> +<% ## End of Sports ## %> +<% ## ## %> +<% ## Start of Anime ## %> +<% #################### %> +
+

Anime episode naming.

+
+ + +
+ +
+
+ +
+
+ + +
+
+ +
+ + +
+ +
+ +
+ +
+ +
+ +
+ +
+
+

Single anime sample:

+
+
+   +
+
+
+ +
+
+

Multi anime sample:

+
+
+   +
+
+
+ +
+ +
+ +
+ +
+ +
+ +
+ +
+ +
+ +
+ +
+
+
+ +
+ +
+ +
+ +<% ######################################################################################### %> +<% ######################################################################################### %> +<% ## TAB 3 ## %> +<% ######################################################################################### %> +<% ######################################################################################### %>
-
-

Metadata

-

The data associated to the data. These are files associated to a TV show in the form of images and text that, when supported, will enhance the viewing experience.

-
+
+

Metadata

+

These are files associated to a TV show in the form of images and text that, where supported, enhance the viewing experience.

+
-
-
- - Toggle the metadata options that you wish to be created. Multiple targets may be used. -
+
+
+ +
- #for ($cur_name, $cur_generator) in $m_dict.items(): - #set $cur_metadata_inst = $sickbeard.metadata_provider_dict[$cur_generator.name] - #set $cur_id = $cur_generator.get_id() +
+ Choose data files to create with the following toggle options... +
+ +
+#for ($cur_name, $cur_generator) in $m_dict.items(): + #set $cur_metadata_inst = $sickbeard.metadata_provider_dict[$cur_generator.name] + #set $cur_id = $cur_generator.get_id()
- #end for - +#end for

- +
-
-
- + +
+
All non-absolute folder locations are relative to $sickbeard.DATA_DIR
@@ -1096,8 +1223,8 @@ -#include $os.path.join($sickbeard.PROG_DIR,"gui/slick/interfaces/default/inc_bottom.tmpl") +#include $os.path.join($sickbeard.PROG_DIR, 'gui/slick/interfaces/default/inc_bottom.tmpl') diff --git a/gui/slick/interfaces/default/config_providers.tmpl b/gui/slick/interfaces/default/config_providers.tmpl index f97624e2..7eaccdf6 100644 --- a/gui/slick/interfaces/default/config_providers.tmpl +++ b/gui/slick/interfaces/default/config_providers.tmpl @@ -6,710 +6,766 @@ #set global $title="Config - Providers" #set global $header="Search Providers" -#set global $sbPath="../.." +#set global $sbPath = '../..' -#set global $topmenu="config"# +#set global $topmenu = 'config' #import os.path -#include $os.path.join($sickbeard.PROG_DIR, "gui/slick/interfaces/default/inc_top.tmpl") +#include $os.path.join($sickbeard.PROG_DIR, 'gui/slick/interfaces/default/inc_top.tmpl') #if $varExists('header') -

$header

+

$header

#else -

$title

+

$title

#end if + +#set $html_selected = ' selected="selected"' +#set $html_checked = 'checked="checked" ' + -#if $sickbeard.USE_NZBS + +#set $methods_notused = [] +#if not $sickbeard.USE_NZBS + $methods_notused.append('Newznab') +#end if +#if not $sickbeard.USE_TORRENTS + $methods_notused.append('Torrent') +#end if + +#if $sickbeard.USE_NZBS or $sickbeard.USE_TORRENTS #end if +
+
+ + + +
+ + + + +
+
+
+

Provider Priorities

+

Check off and drag the providers into the order you want them to be used.

+

At least one provider is required but two are recommended.

+ +#if $methods_notused +
<%= '/'.join(x for x in methods_notused) %> providers can be enabled in Search Settings
+#else +
+#end if +
+ + +
    +#for $curProvider in $sickbeard.providers.sortedProviderList() + #if $curProvider.providerType == $GenericProvider.NZB and not $sickbeard.USE_NZBS + #continue + #elif $curProvider.providerType == $GenericProvider.TORRENT and not $sickbeard.USE_TORRENTS + #continue + #end if + #set $curName = $curProvider.getID() +
  • + /> + $curProvider.name + $curProvider.name +<%= '*' if not curProvider.supportsBacklog else '' %> +<%= '**' if 'EZRSS' == curProvider.name else '' %> + +
  • +#end for +
+ + +
+

*

Provider does not support backlog searches at this time

+#if $sickbeard.USE_TORRENTS +

**

Provider supports limited backlog searches, some episodes/qualities may not be available

+#end if + ##

!

Provider is NOT WORKING

+
+ + "/> +
+
+ +
+
+
+ +
+ + + +
+ +
+

Provider Options

+

Configure individual provider settings here.

+
+ +
+
+ +
+ + + +#for $curNewznabProvider in [$curProvider for $curProvider in $sickbeard.newznabProviderList] +
+ #if $curNewznabProvider.default and $curNewznabProvider.needs_auth +
+ +
+ +
+ +
+ #end if + + #if $hasattr($curNewznabProvider, 'enable_daily'): +
+ +
+ #end if + + #if $hasattr($curNewznabProvider, 'enable_backlog'): +
+ +
+ #end if + + #if $hasattr($curNewznabProvider, 'search_mode'): +
+ Season search mode + + + +

when searching for complete seasons, search for packs or collect single episodes

+
+
+ #end if + + #if $hasattr($curNewznabProvider, 'search_fallback'): +
+ +
+ #end if +
+#end for + +#for $curNzbProvider in [$curProvider for $curProvider in $sickbeard.providers.sortedProviderList() if $curProvider.providerType == $GenericProvider.NZB and $curProvider not in $sickbeard.newznabProviderList]: +
+ #if $hasattr($curNzbProvider, 'username'): +
+ +
+ #end if + + #if $hasattr($curNzbProvider, 'api_key'): +
+ +
+ #end if + + + #if $hasattr($curNzbProvider, 'enable_daily'): +
+ +
+ #end if + + #if $hasattr($curNzbProvider, 'enable_backlog'): +
+ +
+ #end if + + #if $hasattr($curNzbProvider, 'search_fallback'): +
+ +
+ #end if + + #if $hasattr($curNzbProvider, 'search_mode'): +
+ + + +
+ #end if + +
+#end for + +#for $curTorrentProvider in [$curProvider for $curProvider in $sickbeard.providers.sortedProviderList() if $curProvider.providerType == $GenericProvider.TORRENT]: +
+ #if $hasattr($curTorrentProvider, 'api_key'): +
+ +
+ #end if + + #if $hasattr($curTorrentProvider, 'digest'): +
+ +
+ #end if + + #if $hasattr($curTorrentProvider, 'hash'): +
+ +
+ #end if + + #if $hasattr($curTorrentProvider, 'username'): +
+ +
+ #end if + + #if $hasattr($curTorrentProvider, 'password'): +
+ +
+ #end if + + #if $hasattr($curTorrentProvider, 'passkey'): +
+ +
+ #end if + + #if $hasattr($curTorrentProvider, 'ratio'): +
+ + +
+ #end if + + #if $hasattr($curTorrentProvider, 'minseed'): +
+ +
+ #end if + + #if $hasattr($curTorrentProvider, 'minleech'): +
+ +
+ #end if + + #if $hasattr($curTorrentProvider, 'proxy'): +
+ +
+ + #if $hasattr($curTorrentProvider.proxy, 'url'): +
+ +
+ #end if + #end if + + #if $hasattr($curTorrentProvider, 'confirmed'): +
+ +
+ #end if + + #if $hasattr($curTorrentProvider, 'freeleech'): +
+ +
+ #end if + + #if $hasattr($curTorrentProvider, 'enable_daily'): +
+ +
+ #end if + + #if $hasattr($curTorrentProvider, 'enable_backlog'): +
+ +
+ #end if + + #if $hasattr($curTorrentProvider, 'search_fallback'): +
+ +
+ #end if + + #if $hasattr($curTorrentProvider, 'search_mode'): +
+ + + +
+ #end if + + #if $hasattr($curTorrentProvider, 'options'): +
+ +
+ Advanced options +
+ +
+
+ +
+
+ +
+
+
+ #end if + +
+#end for + +#if $provider_config_list +
+#end if +
+
+ + + +#if $sickbeard.USE_NZBS + +
+ +
+

Configure Custom
Newznab Providers

+

Add and setup or remove custom newznab providers.

+
+ +
+
+ +
+ +
+
+ +
+ +
+ +
+ +
+ +
+ +
+ +
+ +
+ +
+ +
+ +
+
+#end if + + + #if $sickbeard.USE_TORRENTS - + +
+ +
+

Configure Custom Torrent Providers

+

Add or remove custom RSS providers.

+
+ +
+
+ +
+ +
+
+ +
+ +
+ +
+ +
+ +
+ +
+ +
+ +
+
+
#end if -
-
- +

-
- - -
+
-
-

Provider Priorities

-

Check off and drag the providers into the order you want them to be used.

-

At least one provider is required but two are recommended.

- - #if not $sickbeard.USE_NZBS or not $sickbeard.USE_TORRENTS: -
NZB/Torrent providers can be toggled in Search Settings
- #else: -
- #end if - -
-

*

Provider does not support backlog searches at this time.

-

**

Provider supports limited backlog searches, all episodes/qualities may not be available.

-

!

Provider is NOT WORKING.

-
-
- -
-
    - #for $curProvider in $sickbeard.providers.sortedProviderList(): - #if $curProvider.providerType == $GenericProvider.NZB and not $sickbeard.USE_NZBS: - #continue - #elif $curProvider.providerType == $GenericProvider.TORRENT and not $sickbeard.USE_TORRENTS: - #continue - #end if - #set $curName = $curProvider.getID() -
  • - - $curProvider.name - $curProvider.name - #if not $curProvider.supportsBacklog then "*" else ""# - #if $curProvider.name == "EZRSS" then "**" else ""# - -
  • - #end for -
- "/> -

-
-
- -
- -
-

Provider Options

-

Configure individual provider settings here.

-

Check with provider's website on how to obtain an API key if needed.

-
- -
-
- -
- - - - #for $curNewznabProvider in [$curProvider for $curProvider in $sickbeard.newznabProviderList]: -
- #if $curNewznabProvider.default and $curNewznabProvider.needs_auth -
- -
-
- -
- #end if - - #if $hasattr($curNewznabProvider, 'enable_daily'): -
- -
- #end if - - #if $hasattr($curNewznabProvider, 'enable_backlog'): -
- -
- #end if - - #if $hasattr($curNewznabProvider, 'search_fallback'): -
- -
- #end if - - #if $hasattr($curNewznabProvider, 'search_mode'): -
- - - -
- #end if - -
- #end for - - #for $curNzbProvider in [$curProvider for $curProvider in $sickbeard.providers.sortedProviderList() if $curProvider.providerType == $GenericProvider.NZB and $curProvider not in $sickbeard.newznabProviderList]: -
- #if $hasattr($curNzbProvider, 'username'): -
- -
- #end if - - #if $hasattr($curNzbProvider, 'api_key'): -
- -
- #end if - - - #if $hasattr($curNzbProvider, 'enable_daily'): -
- -
- #end if - - #if $hasattr($curNzbProvider, 'enable_backlog'): -
- -
- #end if - - #if $hasattr($curNzbProvider, 'search_fallback'): -
- -
- #end if - - #if $hasattr($curNzbProvider, 'search_mode'): -
- - - -
- #end if - -
- #end for - - #for $curTorrentProvider in [$curProvider for $curProvider in $sickbeard.providers.sortedProviderList() if $curProvider.providerType == $GenericProvider.TORRENT]: -
- #if $hasattr($curTorrentProvider, 'api_key'): -
- -
- #end if - - #if $hasattr($curTorrentProvider, 'digest'): -
- -
- #end if - - #if $hasattr($curTorrentProvider, 'hash'): -
- -
- #end if - - #if $hasattr($curTorrentProvider, 'username'): -
- -
- #end if - - #if $hasattr($curTorrentProvider, 'password'): -
- -
- #end if - - #if $hasattr($curTorrentProvider, 'passkey'): -
- -
- #end if - - #if $hasattr($curTorrentProvider, 'ratio'): -
- - -
- #end if - - #if $hasattr($curTorrentProvider, 'minseed'): -
- -
- #end if - - #if $hasattr($curTorrentProvider, 'minleech'): -
- -
- #end if - - #if $hasattr($curTorrentProvider, 'proxy'): -
- -
- - #if $hasattr($curTorrentProvider.proxy, 'url'): -
- -
- #end if - #end if - - #if $hasattr($curTorrentProvider, 'confirmed'): -
- -
- #end if - - #if $hasattr($curTorrentProvider, 'freeleech'): -
- -
- #end if - - #if $hasattr($curTorrentProvider, 'enable_daily'): -
- -
- #end if - - #if $hasattr($curTorrentProvider, 'enable_backlog'): -
- -
- #end if - - #if $hasattr($curTorrentProvider, 'search_fallback'): -
- -
- #end if - - #if $hasattr($curTorrentProvider, 'search_mode'): -
- - - -
- #end if - - #if $hasattr($curTorrentProvider, 'options'): -
- -
- Advanced options -
- -
-
- -
-
- -
-
-
- #end if - -
- #end for - - - - -
- -
-
- - #if $sickbeard.USE_NZBS -
- -
-

Configure Custom
Newznab Providers

-

Add and setup or remove custom Newznab providers.

-
- -
-
- -
- -
-
- -
-
- -
-
- - -
- -
- - - -
- -
- -
- -
- -
-
- #end if - - #if $sickbeard.USE_TORRENTS: - -
- -
-

Configure Custom Torrent Providers

-

Add and setup or remove custom RSS providers.

-
- -
-
- -
- -
-
- -
-
- -
-
- - -
- -
- -
- -
-
-
- #end if - -

- -
- - -
+ +
-#include $os.path.join($sickbeard.PROG_DIR,"gui/slick/interfaces/default/inc_bottom.tmpl") +#include $os.path.join($sickbeard.PROG_DIR,'gui/slick/interfaces/default/inc_bottom.tmpl') diff --git a/gui/slick/interfaces/default/config_search.tmpl b/gui/slick/interfaces/default/config_search.tmpl index c1115ea4..96fa4c25 100755 --- a/gui/slick/interfaces/default/config_search.tmpl +++ b/gui/slick/interfaces/default/config_search.tmpl @@ -38,7 +38,7 @@

Episode Search

-

How to manage searching with providers.

+

How to manage searching with providers.

diff --git a/gui/slick/interfaces/default/displayShow.tmpl b/gui/slick/interfaces/default/displayShow.tmpl index b4d87707..38a40221 100644 --- a/gui/slick/interfaces/default/displayShow.tmpl +++ b/gui/slick/interfaces/default/displayShow.tmpl @@ -82,7 +82,7 @@
-

$show.name

+

$show.name

@@ -163,17 +163,17 @@ #else #if 'country_codes' in $show.imdb_info: #for $country in $show.imdb_info['country_codes'].split('|') - + #end for #end if #if 'year' in $show.imdb_info: - ($show.imdb_info['year']) - $show.imdb_info['runtimes'] minutes - + ($show.imdb_info['year']) - $show.imdb_info['runtimes'] minutes #end if - [imdb] + [imdb] #end if - $sickbeard.indexerApi($show.indexer).name + $sickbeard.indexerApi($show.indexer).name #if $xem_numbering or $xem_absolute_numbering: - [xem] + [xem] #end if
@@ -349,7 +349,7 @@ #end if #if int($epResult["season"]) != $curSeason: - +

#if int($epResult["season"]) == 0 then "Specials" else "Season " + str($epResult["season"])#

diff --git a/gui/slick/interfaces/default/home.tmpl b/gui/slick/interfaces/default/home.tmpl index 17520e79..e9b988f0 100644 --- a/gui/slick/interfaces/default/home.tmpl +++ b/gui/slick/interfaces/default/home.tmpl @@ -58,7 +58,7 @@ return s.replace('Loading...','000'); else #if not $sickbeard.SORT_ARTICLE: - return (s || '').replace(/^(The|A|An)\s/i,''); + return (s || '').replace(/^(?:(?:A(?!\s+to)n?)|The)\s(\w)/i, '$1'); #else: return (s || ''); #end if @@ -162,7 +162,7 @@ name: function( itemElem ) { var name = \$( itemElem ).attr('data-name'); #if not $sickbeard.SORT_ARTICLE: - return (name || '').replace(/^(The|A|An)\s/i,''); + return (name || '').replace(/^(?:(?:A(?!\s+to)n?)|The)\s(\w)/i, '$1'); #else: return (name || ''); #end if @@ -280,10 +280,13 @@ $myShowList.sort(lambda x, y: cmp(x.name, y.name)) #set $cur_downloaded = 0 #set $cur_total = 0 #set $download_stat_tip = '' - #if None is not $curShow.status and re.search(r'(?i)(?:new|returning)\s*series', $curShow.status) - #set $display_status = 'Continuing' - #else - #set $display_status = $curShow.status + #set $display_status = $curShow.status + #if None is not $display_status + #if re.search(r'(?i)(?:new|returning)\s*series', $curShow.status) + #set $display_status = 'Continuing' + #else if re.search(r'(?i)(?:nded)', $curShow.status) + #set $display_status = 'Ended' + #end if #end if #if $curShow.indexerid in $show_stat: @@ -396,7 +399,7 @@ $myShowList.sort(lambda x, y: cmp(x.name, y.name)) - #if $layout != 'simple': + #if $layout != 'simple': #if $curShow.network: $curShow.network #else: @@ -404,7 +407,7 @@ $myShowList.sort(lambda x, y: cmp(x.name, y.name)) #end if #else: $curShow.network - #end if + #end if @@ -420,8 +423,8 @@ $myShowList.sort(lambda x, y: cmp(x.name, y.name))
- -#end for + +#end for
@@ -535,7 +538,7 @@ $myShowList.sort(lambda x, y: cmp(x.name, y.name)) #if $layout == 'small':
- + $curShow.indexerid $curShow.name @@ -604,11 +607,16 @@ $myShowList.sort(lambda x, y: cmp(x.name, y.name)) -#if None is not $curShow.status and re.search(r'(?i)(?:new|returning)\s*series', $curShow.status) - Continuing -#else: - $curShow.status + +#set $display_status = $curShow.status +#if None is not $display_status + #if re.search(r'(?i)(?:new|returning)\s*series', $curShow.status) + #set $display_status = 'Continuing' + #else if re.search(r'(?i)(?:nded)', $curShow.status) + #set $display_status = 'Ended' + #end if #end if + $display_status diff --git a/gui/slick/interfaces/default/home_newShow.tmpl b/gui/slick/interfaces/default/home_newShow.tmpl index e18ec6fb..715f8e90 100644 --- a/gui/slick/interfaces/default/home_newShow.tmpl +++ b/gui/slick/interfaces/default/home_newShow.tmpl @@ -2,28 +2,28 @@ #import sickbeard #from sickbeard.helpers import anon_url -#set global $header="New Show" -#set global $title="New Show" +#set global $header = 'New Show' +#set global $title = 'New Show' -#set global $sbPath="../.." +#set global $sbPath = '../..' -#set global $statpath="../.."# -#set global $topmenu="home"# +#set global $statpath = '../..' +#set global $topmenu = 'home' #import os.path -#include $os.path.join($sickbeard.PROG_DIR, "gui/slick/interfaces/default/inc_top.tmpl") +#include $os.path.join($sickbeard.PROG_DIR, 'gui/slick/interfaces/default/inc_top.tmpl') - #if $varExists('header') -

$header

- #else -

$title

- #end if - +#if $varExists('header') +

$header

+#else +

$title

+#end if +
aoeu
@@ -31,20 +31,21 @@
-
- Find a show on the TVDB or TVRAGE +
+ Find a show on the TVDB or TVRAGE -
- +
+ - #if $use_provided_info: - Show retrieved from existing metadata: $provided_indexer_name +#if $use_provided_info + #set $provided_indexer_local = $provided_indexer + #set $provided_indexer_id_local = $provided_indexer_id + Show retrieved from existing metadata: $provided_indexer_name - #else: - +#else   - #for $indexer in $indexers - - #end for + #for $indexer in $indexers + + #end for   - +
* This will only affect the language of the retrieved metadata file contents and episode filenames.
This DOES NOT allow SickGear to download non-english TV episodes!


- #end if +#end if -
-
+
+ -
- Pick the parent folder +
+ Pick the parent folder -
- #if $provided_show_dir: +
+#if $provided_show_dir Pre-chosen Destination Folder: $provided_show_dir

- #else - #include $os.path.join($sickbeard.PROG_DIR, "gui/slick/interfaces/default/inc_rootDirs.tmpl") - #end if -
-
- -
- Customize options -
- #include $os.path.join($sickbeard.PROG_DIR, "gui/slick/interfaces/default/inc_addShowOptions.tmpl") +#else + #include $os.path.join($sickbeard.PROG_DIR, 'gui/slick/interfaces/default/inc_rootDirs.tmpl') +#end if
-
+
- #for $curNextDir in $other_shows: - - #end for - +
+ Customize options +
+#include $os.path.join($sickbeard.PROG_DIR, 'gui/slick/interfaces/default/inc_addShowOptions.tmpl') +
+
+ +#for $curNextDir in $other_shows + +#end for + -
+
-
- -#if $provided_show_dir: - +
+ +#if $provided_show_dir + #end if -
+
-#include $os.path.join($sickbeard.PROG_DIR,"gui/slick/interfaces/default/inc_bottom.tmpl") +#include $os.path.join($sickbeard.PROG_DIR, 'gui/slick/interfaces/default/inc_bottom.tmpl') diff --git a/gui/slick/interfaces/default/home_trendingShows.tmpl b/gui/slick/interfaces/default/home_trendingShows.tmpl index 011f170f..6bc04d26 100644 --- a/gui/slick/interfaces/default/home_trendingShows.tmpl +++ b/gui/slick/interfaces/default/home_trendingShows.tmpl @@ -1,6 +1,7 @@ #import sickbeard #import datetime #import re +#import urllib #from sickbeard.common import * #from sickbeard import sbdatetime #from sickbeard.helpers import anon_url @@ -23,6 +24,7 @@ // initialise combos for dirty page refreshes \$('#showsort').val('original'); \$('#showsortdirection').val('asc'); + \$('#showfilter').val('*'); var \$container = [\$('#container')]; jQuery.each(\$container, function (j) { @@ -34,7 +36,7 @@ name: function( itemElem ) { var name = \$( itemElem ).attr('data-name') || ''; #if not $sickbeard.SORT_ARTICLE: - name = name.replace(/^(The|A|An)\s/i, ''); + name = name.replace(/^(?:(?:A(?!\s+to)n?)|The)\s(\w)/i, '$1'); #end if return name.toLowerCase(); }, @@ -73,6 +75,11 @@ \$('#showsortdirection').on( 'change', function() { \$('#container').isotope({sortAscending: ('asc' == this.value)}); }); + + \$('#showfilter').on( 'change', function() { + var filterValue = this.value; + \$('#container').isotope({ filter: filterValue }); + }); }); //--> @@ -86,7 +93,16 @@ #if $trending_shows
- Sort By: + Show: + + + Sort By: ' + name + ' ' + name + '' $('#provider_order_list').append(toAdd); - $('#provider_order_list').sortable("refresh"); + $('#provider_order_list').sortable('refresh'); } $(this).makeNewznabProviderString(); @@ -84,12 +84,12 @@ $(document).ready(function(){ $('#editATorrentRssProvider').addOption(id, name); $(this).populateTorrentRssSection(); - if ($('#provider_order_list > #'+id).length == 0) { + if ($('#provider_order_list > #' + id).length == 0) { var toAdd = '
  • ' + name + ' ' + name + '
  • ' $('#provider_order_list').append(toAdd); - $('#provider_order_list').sortable("refresh"); + $('#provider_order_list').sortable('refresh'); } $(this).makeTorrentRssProviderString(); @@ -113,7 +113,7 @@ $(document).ready(function(){ $('#editANewznabProvider').removeOption(id); delete newznabProviders[id]; $(this).populateNewznabSection(); - $('li').remove('#'+id); + $('li').remove('#' + id); $(this).makeNewznabProviderString(); } @@ -129,7 +129,7 @@ $(document).ready(function(){ $('#editATorrentRssProvider').removeOption(id); delete torrentRssProviders[id]; $(this).populateTorrentRssSection(); - $('li').remove('#'+id); + $('li').remove('#' + id); $(this).makeTorrentRssProviderString(); } @@ -144,115 +144,115 @@ $(document).ready(function(){ $('#newznab_update_div').hide(); $('#newznab_cat').attr('disabled','disabled'); $('#newznab_cap').attr('disabled','disabled'); - - $("#newznab_cat option").each(function() { - $(this).remove(); - return; + + $('#newznab_cat option').each(function() { + $(this).remove(); + return; }); - - $("#newznab_cap option").each(function() { - $(this).remove(); - return; + + $('#newznab_cap option').each(function() { + $(this).remove(); + return; }); - + } else { var data = newznabProviders[selectedProvider][1]; var isDefault = newznabProviders[selectedProvider][0]; $('#newznab_add_div').hide(); $('#newznab_update_div').show(); - $('#newznab_cat').removeAttr("disabled"); - $('#newznab_cap').removeAttr("disabled"); + $('#newznab_cat').removeAttr('disabled'); + $('#newznab_cap').removeAttr('disabled'); } $('#newznab_name').val(data[0]); $('#newznab_url').val(data[1]); $('#newznab_key').val(data[2]); - + //Check if not already array if (typeof data[3] === 'string') { - rrcat = data[3].split(",") - } - else { - rrcat = data[3]; + rrcat = data[3].split(',') } - + else { + rrcat = data[3]; + } + // Update the category select box (on the right) var newCatOptions = []; if (rrcat) { - rrcat.forEach(function (cat) { - newCatOptions.push({text : cat, value : cat}); + rrcat.forEach(function (cat) { + newCatOptions.push({text : cat, value : cat}); }); - $("#newznab_cat").replaceOptions(newCatOptions); + $('#newznab_cat').replaceOptions(newCatOptions); }; - + if (selectedProvider == 'addNewznab') { - $('#newznab_name').removeAttr("disabled"); - $('#newznab_url').removeAttr("disabled"); + $('#newznab_name').removeAttr('disabled'); + $('#newznab_url').removeAttr('disabled'); } else { - $('#newznab_name').attr("disabled", "disabled"); + $('#newznab_name').attr('disabled', 'disabled'); if (isDefault) { - $('#newznab_url').attr("disabled", "disabled"); - $('#newznab_delete').attr("disabled", "disabled"); + $('#newznab_url').attr('disabled', 'disabled'); + $('#newznab_delete').attr('disabled', 'disabled'); } else { - $('#newznab_url').removeAttr("disabled"); - $('#newznab_delete').removeAttr("disabled"); - + $('#newznab_url').removeAttr('disabled'); + $('#newznab_delete').removeAttr('disabled'); + //Get Categories Capabilities if (data[0] && data[1] && data[2] && !ifExists($.fn.newznabProvidersCapabilities, data[0])) { - $(this).getCategories(isDefault, data); + $(this).getCategories(isDefault, data); } else { - updateNewznabCaps( null, data ); + updateNewznabCaps( null, data ); } } } } ifExists = function(loopThroughArray, searchFor) { - var found = false; - - loopThroughArray.forEach(function(rootObject) { - if (rootObject.name == searchFor) { - found = true; - } - console.log(rootObject.name + " while searching for: "+ searchFor); - }); - return found; + var found = false; + + loopThroughArray.forEach(function(rootObject) { + if (rootObject.name == searchFor) { + found = true; + } + console.log(rootObject.name + ' while searching for: ' + searchFor); + }); + return found; }; - + /** - * Updates the Global array $.fn.newznabProvidersCapabilities with a combination of newznab prov name + * Updates the Global array $.fn.newznabProvidersCapabilities with a combination of newznab prov name * and category capabilities. Return * @param {Array} newzNabCaps, is the returned object with newzNabprod Name and Capabilities. * @param {Array} selectedProvider * @return no return data. The multiselect input $("#newznab_cap") is updated, as a result. */ updateNewznabCaps = function( newzNabCaps, selectedProvider ) { - - if (newzNabCaps && !ifExists($.fn.newznabProvidersCapabilities, selectedProvider[0])) { - $.fn.newznabProvidersCapabilities.push({'name' : selectedProvider[0], 'categories' : newzNabCaps.tv_categories}); - } - - //Loop through the array and if currently selected newznab provider name matches one in the array, use it to + + if (newzNabCaps && !ifExists($.fn.newznabProvidersCapabilities, selectedProvider[0])) { + $.fn.newznabProvidersCapabilities.push({'name' : selectedProvider[0], 'categories' : newzNabCaps.tv_categories}); + } + + //Loop through the array and if currently selected newznab provider name matches one in the array, use it to //update the capabilities select box (on the left). if (selectedProvider[0]) { - $.fn.newznabProvidersCapabilities.forEach(function(newzNabCap) { - - if (newzNabCap.name && newzNabCap.name == selectedProvider[0] && newzNabCap.categories instanceof Array) { - var newCapOptions = []; - newzNabCap.categories.forEach(function(category_set) { - if (category_set.id && category_set.name) { - newCapOptions.push({value : category_set.id, text : category_set.name + "(" + category_set.id + ")"}); - }; - }); - $("#newznab_cap").replaceOptions(newCapOptions); - } + $.fn.newznabProvidersCapabilities.forEach(function(newzNabCap) { + + if (newzNabCap.name && newzNabCap.name == selectedProvider[0] && newzNabCap.categories instanceof Array) { + var newCapOptions = []; + newzNabCap.categories.forEach(function(category_set) { + if (category_set.id && category_set.name) { + newCapOptions.push({value : category_set.id, text : category_set.name + '(' + category_set.id + ')'}); + }; + }); + $('#newznab_cap').replaceOptions(newCapOptions); + } }); }; } - + $.fn.makeNewznabProviderString = function() { var provStrings = new Array(); @@ -284,14 +284,14 @@ $(document).ready(function(){ $('#torrentrss_cookies').val(data[2]); if (selectedProvider == 'addTorrentRss') { - $('#torrentrss_name').removeAttr("disabled"); - $('#torrentrss_url').removeAttr("disabled"); - $('#torrentrss_cookies').removeAttr("disabled"); + $('#torrentrss_name').removeAttr('disabled'); + $('#torrentrss_url').removeAttr('disabled'); + $('#torrentrss_cookies').removeAttr('disabled'); } else { - $('#torrentrss_name').attr("disabled", "disabled"); - $('#torrentrss_url').removeAttr("disabled"); - $('#torrentrss_cookies').removeAttr("disabled"); - $('#torrentrss_delete').removeAttr("disabled"); + $('#torrentrss_name').attr('disabled', 'disabled'); + $('#torrentrss_url').removeAttr('disabled'); + $('#torrentrss_cookies').removeAttr('disabled'); + $('#torrentrss_delete').removeAttr('disabled'); } } @@ -309,26 +309,26 @@ $(document).ready(function(){ $.fn.refreshProviderList = function() { - var idArr = $("#provider_order_list").sortable('toArray'); + var idArr = $('#provider_order_list').sortable('toArray'); var finalArr = new Array(); $.each(idArr, function(key, val) { - var checked = + $('#enable_'+val).prop('checked') ? '1' : '0'; + var checked = + $('#enable_' + val).prop('checked') ? '1' : '0'; finalArr.push(val + ':' + checked); }); - $("#provider_order").val(finalArr.join(' ')); + $('#provider_order').val(finalArr.join(' ')); } var newznabProviders = new Array(); var torrentRssProviders = new Array(); - + $(this).on('change', '.newznab_key', function(){ var provider_id = $(this).attr('id'); provider_id = provider_id.substring(0, provider_id.length-'_hash'.length); - var url = $('#'+provider_id+'_url').val(); - var cat = $('#'+provider_id+'_cat').val(); + var url = $('#' + provider_id + '_url').val(); + var cat = $('#' + provider_id + '_cat').val(); var key = $(this).val(); $(this).updateProvider(provider_id, url, key, cat); @@ -339,16 +339,16 @@ $(document).ready(function(){ var selectedProvider = $('#editANewznabProvider :selected').val(); - if (selectedProvider == "addNewznab") + if (selectedProvider == 'addNewznab') return; - + var url = $('#newznab_url').val(); var key = $('#newznab_key').val(); var cat = $('#newznab_cat option').map(function(i, opt) { - return $(opt).text(); - }).toArray().join(','); - + return $(opt).text(); + }).toArray().join(','); + $(this).updateProvider(selectedProvider, url, key, cat); }); @@ -357,7 +357,7 @@ $(document).ready(function(){ var selectedProvider = $('#editATorrentRssProvider :selected').val(); - if (selectedProvider == "addTorrentRss") + if (selectedProvider == 'addTorrentRss') return; var url = $('#torrentrss_url').val(); @@ -385,46 +385,46 @@ $(document).ready(function(){ $(this).on('click', '#newznab_cat_update', function(){ console.debug('Clicked Button'); - + //Maybe check if there is anything selected? - $("#newznab_cat option").each(function() { - $(this).remove(); - return; + $('#newznab_cat option').each(function() { + $(this).remove(); + return; }); - + var newOptions = []; - - // When the update botton is clicked, loop through the capabilities list + + // When the update botton is clicked, loop through the capabilities list // and copy the selected category id's to the category list on the right. - $("#newznab_cap option").each(function(){ + $('#newznab_cap option').each(function(){ if($(this).attr('selected') == 'selected') { - var selected_cat = $(this).val(); - console.debug(selected_cat); - newOptions.push({text: selected_cat, value: selected_cat}) + var selected_cat = $(this).val(); + console.debug(selected_cat); + newOptions.push({text: selected_cat, value: selected_cat}) }; }); - - $("#newznab_cat").replaceOptions(newOptions); - - var selectedProvider = $('#editANewznabProvider :selected').val(); - if (selectedProvider == "addNewznab") - return; - - var url = $('#newznab_url').val(); - var key = $('#newznab_key').val(); - var cat = $('#newznab_cat option').map(function(i, opt) { - return $(opt).text(); - }).toArray().join(','); - - $("#newznab_cat option:not([value])").remove(); - + $('#newznab_cat').replaceOptions(newOptions); + + var selectedProvider = $('#editANewznabProvider :selected').val(); + if (selectedProvider == 'addNewznab') + return; + + var url = $('#newznab_url').val(); + var key = $('#newznab_key').val(); + + var cat = $('#newznab_cat option').map(function(i, opt) { + return $(opt).text(); + }).toArray().join(','); + + $('#newznab_cat option:not([value])').remove(); + $(this).updateProvider(selectedProvider, url, key, cat); }); - - + + $('#newznab_add').click(function(){ var selectedProvider = $('#editANewznabProvider :selected').val(); @@ -433,19 +433,19 @@ $(document).ready(function(){ var url = $.trim($('#newznab_url').val()); var key = $.trim($('#newznab_key').val()); //var cat = $.trim($('#newznab_cat').val()); - + var cat = $.trim($('#newznab_cat option').map(function(i, opt) { - return $(opt).text();}).toArray().join(',')); - - + return $(opt).text();}).toArray().join(',')); + + if (!name) - return; + return; if (!url) - return; + return; if (!key) - return; + return; var params = {name: name}; @@ -496,12 +496,12 @@ $(document).ready(function(){ }); - $(this).on('change', "[class='providerDiv_tip'] input", function(){ - $('div .providerDiv ' + "[name=" + $(this).attr('name') + "]").replaceWith($(this).clone()); - $('div .providerDiv ' + "[newznab_name=" + $(this).attr('id') + "]").replaceWith($(this).clone()); + $(this).on('change', '[class="providerDiv_tip"] input', function(){ + $('div .providerDiv ' + '[name=' + $(this).attr('name') + ']').replaceWith($(this).clone()); + $('div .providerDiv ' + '[newznab_name=' + $(this).attr('id') + ']').replaceWith($(this).clone()); }); - $(this).on('change', "[class='providerDiv_tip'] select", function(){ + $(this).on('change', '[class="providerDiv_tip"] select', function(){ $(this).find('option').each( function() { if ($(this).is(':selected')) { @@ -511,34 +511,34 @@ $(document).ready(function(){ } }); - $('div .providerDiv ' + "[name=" + $(this).attr('name') + "]").empty().replaceWith($(this).clone())}); + $('div .providerDiv ' + '[name=' + $(this).attr('name') + ']').empty().replaceWith($(this).clone())}); $(this).on('change', '.enabler', function(){ if ($(this).is(':checked')) { - $('.content_'+$(this).attr('id')).each( function() { + $('.content_' + $(this).attr('id')).each( function() { $(this).show() }) } else { - $('.content_'+$(this).attr('id')).each( function() { + $('.content_' + $(this).attr('id')).each( function() { $(this).hide() }) } }); - $(".enabler").each(function(){ + $('.enabler').each(function(){ if (!$(this).is(':checked')) { - $('.content_'+$(this).attr('id')).hide(); + $('.content_' + $(this).attr('id')).hide(); } else { - $('.content_'+$(this).attr('id')).show(); + $('.content_' + $(this).attr('id')).show(); } }); $.fn.makeTorrentOptionString = function(provider_id) { - var seed_ratio = $('.providerDiv_tip #'+provider_id+'_seed_ratio').prop('value'); - var seed_time = $('.providerDiv_tip #'+provider_id+'_seed_time').prop('value'); - var process_met = $('.providerDiv_tip #'+provider_id+'_process_method').prop('value'); - var option_string = $('.providerDiv_tip #'+provider_id+'_option_string'); + var seed_ratio = $('.providerDiv_tip #' + provider_id + '_seed_ratio').prop('value'); + var seed_time = $('.providerDiv_tip #' + provider_id + '_seed_time').prop('value'); + var process_met = $('.providerDiv_tip #' + provider_id + '_process_method').prop('value'); + var option_string = $('.providerDiv_tip #' + provider_id + '_option_string'); option_string.val([seed_ratio, seed_time, process_met].join('|')) @@ -548,11 +548,11 @@ $(document).ready(function(){ var provider_id = $(this).attr('id').split('_')[0]; - $(this).makeTorrentOptionString(provider_id); + $(this).makeTorrentOptionString(provider_id); }); - - + + $.fn.replaceOptions = function(options) { var self, $option; @@ -560,8 +560,8 @@ $(document).ready(function(){ self = this; $.each(options, function(index, option) { - $option = $("") - .attr("value", option.value) + $option = $('') + .attr('value', option.value) .text(option.text); self.append($option); }); @@ -569,18 +569,18 @@ $(document).ready(function(){ // initialization stuff - + $.fn.newznabProvidersCapabilities = []; $(this).showHideProviders(); - $("#provider_order_list").sortable({ + $('#provider_order_list').sortable({ placeholder: 'ui-state-highlight', update: function (event, ui) { $(this).refreshProviderList(); } }); - $("#provider_order_list").disableSelection(); + $('#provider_order_list').disableSelection(); }); \ No newline at end of file diff --git a/gui/slick/js/sceneExceptionsTooltip.js b/gui/slick/js/sceneExceptionsTooltip.js index 20c04e88..46e525a3 100644 --- a/gui/slick/js/sceneExceptionsTooltip.js +++ b/gui/slick/js/sceneExceptionsTooltip.js @@ -1,6 +1,5 @@ $(function () { - $('.title a').each(function () { - match = $(this).parent().attr("id").match(/^scene_exception_(\d+)$/); + $('.title span').each(function () { $(this).qtip({ content: { text: 'Loading...', @@ -20,11 +19,11 @@ $(function () { }, position: { viewport: $(window), - my: 'bottom center', - at: 'top center', + my: 'left middle', + at: 'right middle', adjust: { - y: 10, - x: 0 + y: 0, + x: 10 } }, style: { diff --git a/lib/cachecontrol/compat.py b/lib/cachecontrol/compat.py index cb6e1b0b..aa117d02 100644 --- a/lib/cachecontrol/compat.py +++ b/lib/cachecontrol/compat.py @@ -21,6 +21,6 @@ except ImportError: # Handle the case where the requests has been patched to not have urllib3 # bundled as part of it's source. try: - from requests.packages.urllib3.response import HTTPResponse + from lib.requests.packages.urllib3.response import HTTPResponse except ImportError: from urllib3.response import HTTPResponse diff --git a/lib/imdb/parser/http/utils.py b/lib/imdb/parser/http/utils.py index 8b4e17e3..031a4d3a 100644 --- a/lib/imdb/parser/http/utils.py +++ b/lib/imdb/parser/http/utils.py @@ -441,6 +441,12 @@ class DOMParserBase(object): self._useModule = useModule nrMods = len(useModule) _gotError = False + + # Force warnings.warn() to omit the source code line in the message + formatwarning_orig = warnings.formatwarning + warnings.formatwarning = lambda message, category, filename, lineno, line=None: \ + formatwarning_orig(message, category, filename, lineno, line='') + for idx, mod in enumerate(useModule): mod = mod.strip().lower() try: diff --git a/lib/requests/__init__.py b/lib/requests/__init__.py index bba19002..d5e1956e 100644 --- a/lib/requests/__init__.py +++ b/lib/requests/__init__.py @@ -13,7 +13,7 @@ Requests is an HTTP library, written in Python, for human beings. Basic GET usage: >>> import requests - >>> r = requests.get('http://python.org') + >>> r = requests.get('https://www.python.org') >>> r.status_code 200 >>> 'Python is a programming language' in r.content @@ -22,7 +22,7 @@ usage: ... or POST: >>> payload = dict(key1='value1', key2='value2') - >>> r = requests.post("http://httpbin.org/post", data=payload) + >>> r = requests.post('http://httpbin.org/post', data=payload) >>> print(r.text) { ... @@ -42,8 +42,8 @@ is at . """ __title__ = 'requests' -__version__ = '2.3.0' -__build__ = 0x020300 +__version__ = '2.4.3' +__build__ = 0x020403 __author__ = 'Kenneth Reitz' __license__ = 'Apache 2.0' __copyright__ = 'Copyright 2014 Kenneth Reitz' diff --git a/lib/requests/adapters.py b/lib/requests/adapters.py index 0f297ab2..40088900 100644 --- a/lib/requests/adapters.py +++ b/lib/requests/adapters.py @@ -9,23 +9,26 @@ and maintain connections. """ import socket -import copy from .models import Response +from .packages.urllib3 import Retry from .packages.urllib3.poolmanager import PoolManager, proxy_from_url from .packages.urllib3.response import HTTPResponse from .packages.urllib3.util import Timeout as TimeoutSauce -from .compat import urlparse, basestring, urldefrag, unquote +from .compat import urlparse, basestring from .utils import (DEFAULT_CA_BUNDLE_PATH, get_encoding_from_headers, - except_on_missing_scheme, get_auth_from_url) + prepend_scheme_if_needed, get_auth_from_url, urldefragauth) from .structures import CaseInsensitiveDict -from .packages.urllib3.exceptions import MaxRetryError -from .packages.urllib3.exceptions import TimeoutError -from .packages.urllib3.exceptions import SSLError as _SSLError +from .packages.urllib3.exceptions import ConnectTimeoutError from .packages.urllib3.exceptions import HTTPError as _HTTPError +from .packages.urllib3.exceptions import MaxRetryError from .packages.urllib3.exceptions import ProxyError as _ProxyError +from .packages.urllib3.exceptions import ProtocolError +from .packages.urllib3.exceptions import ReadTimeoutError +from .packages.urllib3.exceptions import SSLError as _SSLError from .cookies import extract_cookies_to_jar -from .exceptions import ConnectionError, Timeout, SSLError, ProxyError +from .exceptions import (ConnectionError, ConnectTimeout, ReadTimeout, SSLError, + ProxyError) from .auth import _basic_auth_str DEFAULT_POOLBLOCK = False @@ -57,13 +60,15 @@ class HTTPAdapter(BaseAdapter): :param pool_connections: The number of urllib3 connection pools to cache. :param pool_maxsize: The maximum number of connections to save in the pool. :param int max_retries: The maximum number of retries each connection - should attempt. Note, this applies only to failed connections and - timeouts, never to requests where the server returns a response. + should attempt. Note, this applies only to failed DNS lookups, socket + connections and connection timeouts, never to requests where data has + made it to the server. By default, Requests does not retry failed + connections. :param pool_block: Whether the connection pool should block for connections. Usage:: - >>> import lib.requests + >>> import requests >>> s = requests.Session() >>> a = requests.adapters.HTTPAdapter(max_retries=3) >>> s.mount('http://', a) @@ -102,14 +107,17 @@ class HTTPAdapter(BaseAdapter): self.init_poolmanager(self._pool_connections, self._pool_maxsize, block=self._pool_block) - def init_poolmanager(self, connections, maxsize, block=DEFAULT_POOLBLOCK): - """Initializes a urllib3 PoolManager. This method should not be called - from user code, and is only exposed for use when subclassing the + def init_poolmanager(self, connections, maxsize, block=DEFAULT_POOLBLOCK, **pool_kwargs): + """Initializes a urllib3 PoolManager. + + This method should not be called from user code, and is only + exposed for use when subclassing the :class:`HTTPAdapter `. :param connections: The number of urllib3 connection pools to cache. :param maxsize: The maximum number of connections to save in the pool. :param block: Block when no free connections are available. + :param pool_kwargs: Extra keyword arguments used to initialize the Pool Manager. """ # save these values for pickling self._pool_connections = connections @@ -117,7 +125,30 @@ class HTTPAdapter(BaseAdapter): self._pool_block = block self.poolmanager = PoolManager(num_pools=connections, maxsize=maxsize, - block=block) + block=block, strict=True, **pool_kwargs) + + def proxy_manager_for(self, proxy, **proxy_kwargs): + """Return urllib3 ProxyManager for the given proxy. + + This method should not be called from user code, and is only + exposed for use when subclassing the + :class:`HTTPAdapter `. + + :param proxy: The proxy to return a urllib3 ProxyManager for. + :param proxy_kwargs: Extra keyword arguments used to configure the Proxy Manager. + :returns: ProxyManager + """ + if not proxy in self.proxy_manager: + proxy_headers = self.proxy_headers(proxy) + self.proxy_manager[proxy] = proxy_from_url( + proxy, + proxy_headers=proxy_headers, + num_pools=self._pool_connections, + maxsize=self._pool_maxsize, + block=self._pool_block, + **proxy_kwargs) + + return self.proxy_manager[proxy] def cert_verify(self, conn, url, verify, cert): """Verify a SSL certificate. This method should not be called from user @@ -204,18 +235,9 @@ class HTTPAdapter(BaseAdapter): proxy = proxies.get(urlparse(url.lower()).scheme) if proxy: - except_on_missing_scheme(proxy) - proxy_headers = self.proxy_headers(proxy) - - if not proxy in self.proxy_manager: - self.proxy_manager[proxy] = proxy_from_url( - proxy, - proxy_headers=proxy_headers, - num_pools=self._pool_connections, - maxsize=self._pool_maxsize, - block=self._pool_block) - - conn = self.proxy_manager[proxy].connection_from_url(url) + proxy = prepend_scheme_if_needed(proxy, 'http') + proxy_manager = self.proxy_manager_for(proxy) + conn = proxy_manager.connection_from_url(url) else: # Only scheme should be lower case parsed = urlparse(url) @@ -250,7 +272,7 @@ class HTTPAdapter(BaseAdapter): proxy = proxies.get(scheme) if proxy and scheme != 'https': - url, _ = urldefrag(request.url) + url = urldefragauth(request.url) else: url = request.path_url @@ -297,7 +319,10 @@ class HTTPAdapter(BaseAdapter): :param request: The :class:`PreparedRequest ` being sent. :param stream: (optional) Whether to stream the request content. - :param timeout: (optional) The timeout on the request. + :param timeout: (optional) How long to wait for the server to send + data before giving up, as a float, or a (`connect timeout, read + timeout `_) tuple. + :type timeout: float or tuple :param verify: (optional) Whether to verify SSL certificates. :param cert: (optional) Any user-provided SSL certificate to be trusted. :param proxies: (optional) The proxies dictionary to apply to the request. @@ -311,7 +336,18 @@ class HTTPAdapter(BaseAdapter): chunked = not (request.body is None or 'Content-Length' in request.headers) - timeout = TimeoutSauce(connect=timeout, read=timeout) + if isinstance(timeout, tuple): + try: + connect, read = timeout + timeout = TimeoutSauce(connect=connect, read=read) + except ValueError as e: + # this may raise a string formatting error. + err = ("Invalid timeout {0}. Pass a (connect, read) " + "timeout tuple, or a single float to set " + "both timeouts to the same value".format(timeout)) + raise ValueError(err) + else: + timeout = TimeoutSauce(connect=timeout, read=timeout) try: if not chunked: @@ -324,7 +360,7 @@ class HTTPAdapter(BaseAdapter): assert_same_host=False, preload_content=False, decode_content=False, - retries=self.max_retries, + retries=Retry(self.max_retries, read=False), timeout=timeout ) @@ -369,10 +405,13 @@ class HTTPAdapter(BaseAdapter): # All is well, return the connection to the pool. conn._put_conn(low_conn) - except socket.error as sockerr: - raise ConnectionError(sockerr, request=request) + except (ProtocolError, socket.error) as err: + raise ConnectionError(err, request=request) except MaxRetryError as e: + if isinstance(e.reason, ConnectTimeoutError): + raise ConnectTimeout(e, request=request) + raise ConnectionError(e, request=request) except _ProxyError as e: @@ -381,14 +420,9 @@ class HTTPAdapter(BaseAdapter): except (_SSLError, _HTTPError) as e: if isinstance(e, _SSLError): raise SSLError(e, request=request) - elif isinstance(e, TimeoutError): - raise Timeout(e, request=request) + elif isinstance(e, ReadTimeoutError): + raise ReadTimeout(e, request=request) else: raise - r = self.build_response(request, resp) - - if not stream: - r.content - - return r \ No newline at end of file + return self.build_response(request, resp) diff --git a/lib/requests/api.py b/lib/requests/api.py index 01d853d5..1469b05c 100644 --- a/lib/requests/api.py +++ b/lib/requests/api.py @@ -22,12 +22,17 @@ def request(method, url, **kwargs): :param url: URL for the new :class:`Request` object. :param params: (optional) Dictionary or bytes to be sent in the query string for the :class:`Request`. :param data: (optional) Dictionary, bytes, or file-like object to send in the body of the :class:`Request`. + :param json: (optional) json data to send in the body of the :class:`Request`. :param headers: (optional) Dictionary of HTTP Headers to send with the :class:`Request`. :param cookies: (optional) Dict or CookieJar object to send with the :class:`Request`. - :param files: (optional) Dictionary of 'name': file-like-objects (or {'name': ('filename', fileobj)}) for multipart encoding upload. + :param files: (optional) Dictionary of ``'name': file-like-objects`` (or ``{'name': ('filename', fileobj)}``) for multipart encoding upload. :param auth: (optional) Auth tuple to enable Basic/Digest/Custom HTTP Auth. - :param timeout: (optional) Float describing the timeout of the request in seconds. + :param timeout: (optional) How long to wait for the server to send data + before giving up, as a float, or a (`connect timeout, read timeout + `_) tuple. + :type timeout: float or tuple :param allow_redirects: (optional) Boolean. Set to True if POST/PUT/DELETE redirect following is allowed. + :type allow_redirects: bool :param proxies: (optional) Dictionary mapping protocol to the URL of the proxy. :param verify: (optional) if ``True``, the SSL cert will be verified. A CA_BUNDLE path can also be provided. :param stream: (optional) if ``False``, the response content will be immediately downloaded. @@ -41,7 +46,12 @@ def request(method, url, **kwargs): """ session = sessions.Session() - return session.request(method=method, url=url, **kwargs) + response = session.request(method=method, url=url, **kwargs) + # By explicitly closing the session, we avoid leaving sockets open which + # can trigger a ResourceWarning in some cases, and look like a memory leak + # in others. + session.close() + return response def get(url, **kwargs): @@ -77,15 +87,16 @@ def head(url, **kwargs): return request('head', url, **kwargs) -def post(url, data=None, **kwargs): +def post(url, data=None, json=None, **kwargs): """Sends a POST request. Returns :class:`Response` object. :param url: URL for the new :class:`Request` object. :param data: (optional) Dictionary, bytes, or file-like object to send in the body of the :class:`Request`. + :param json: (optional) json data to send in the body of the :class:`Request`. :param \*\*kwargs: Optional arguments that ``request`` takes. """ - return request('post', url, data=data, **kwargs) + return request('post', url, data=data, json=json, **kwargs) def put(url, data=None, **kwargs): diff --git a/lib/requests/auth.py b/lib/requests/auth.py index 9f831b7a..618a902a 100644 --- a/lib/requests/auth.py +++ b/lib/requests/auth.py @@ -16,7 +16,8 @@ from base64 import b64encode from .compat import urlparse, str from .cookies import extract_cookies_to_jar -from .utils import parse_dict_header +from .utils import parse_dict_header, to_native_string +from .status_codes import codes CONTENT_TYPE_FORM_URLENCODED = 'application/x-www-form-urlencoded' CONTENT_TYPE_MULTI_PART = 'multipart/form-data' @@ -25,7 +26,11 @@ CONTENT_TYPE_MULTI_PART = 'multipart/form-data' def _basic_auth_str(username, password): """Returns a Basic Auth string.""" - return 'Basic ' + b64encode(('%s:%s' % (username, password)).encode('latin1')).strip().decode('latin1') + authstr = 'Basic ' + to_native_string( + b64encode(('%s:%s' % (username, password)).encode('latin1')).strip() + ) + + return authstr class AuthBase(object): @@ -146,6 +151,11 @@ class HTTPDigestAuth(AuthBase): return 'Digest %s' % (base) + def handle_redirect(self, r, **kwargs): + """Reset num_401_calls counter on redirects.""" + if r.is_redirect: + setattr(self, 'num_401_calls', 1) + def handle_401(self, r, **kwargs): """Takes the given response and tries digest-auth, if needed.""" @@ -178,7 +188,7 @@ class HTTPDigestAuth(AuthBase): return _r - setattr(self, 'num_401_calls', 1) + setattr(self, 'num_401_calls', num_401_calls + 1) return r def __call__(self, r): @@ -188,6 +198,11 @@ class HTTPDigestAuth(AuthBase): try: self.pos = r.body.tell() except AttributeError: - pass + # In the case of HTTPDigestAuth being reused and the body of + # the previous request was a file-like object, pos has the + # file position of the previous body. Ensure it's set to + # None. + self.pos = None r.register_hook('response', self.handle_401) + r.register_hook('response', self.handle_redirect) return r diff --git a/lib/requests/certs.py b/lib/requests/certs.py index bc008261..07e64750 100644 --- a/lib/requests/certs.py +++ b/lib/requests/certs.py @@ -11,14 +11,15 @@ If you are packaging Requests, e.g., for a Linux distribution or a managed environment, you can change the definition of where() to return a separately packaged CA bundle. """ - import os.path - -def where(): - """Return the preferred certificate bundle.""" - # vendored bundle inside Requests - return os.path.join(os.path.dirname(__file__), 'cacert.pem') +try: + from certifi import where +except ImportError: + def where(): + """Return the preferred certificate bundle.""" + # vendored bundle inside Requests + return os.path.join(os.path.dirname(__file__), 'cacert.pem') if __name__ == '__main__': print(where()) diff --git a/lib/requests/compat.py b/lib/requests/compat.py index bdf10d6a..be5a1ed6 100644 --- a/lib/requests/compat.py +++ b/lib/requests/compat.py @@ -75,7 +75,9 @@ is_solaris = ('solar==' in str(sys.platform).lower()) # Complete guess. try: import simplejson as json -except ImportError: +except (ImportError, SyntaxError): + # simplejson does not support Python 3.2, it thows a SyntaxError + # because of u'...' Unicode literals. import json # --------- @@ -90,7 +92,6 @@ if is_py2: from Cookie import Morsel from StringIO import StringIO from .packages.urllib3.packages.ordered_dict import OrderedDict - from httplib import IncompleteRead builtin_str = str bytes = str @@ -106,7 +107,6 @@ elif is_py3: from http.cookies import Morsel from io import StringIO from collections import OrderedDict - from http.client import IncompleteRead builtin_str = str str = str diff --git a/lib/requests/exceptions.py b/lib/requests/exceptions.py index a4ee9d63..34c7a0db 100644 --- a/lib/requests/exceptions.py +++ b/lib/requests/exceptions.py @@ -44,7 +44,23 @@ class SSLError(ConnectionError): class Timeout(RequestException): - """The request timed out.""" + """The request timed out. + + Catching this error will catch both + :exc:`~requests.exceptions.ConnectTimeout` and + :exc:`~requests.exceptions.ReadTimeout` errors. + """ + + +class ConnectTimeout(ConnectionError, Timeout): + """The request timed out while trying to connect to the remote server. + + Requests that produced this error are safe to retry. + """ + + +class ReadTimeout(Timeout): + """The server did not send any data in the allotted amount of time.""" class URLRequired(RequestException): @@ -73,3 +89,6 @@ class ChunkedEncodingError(RequestException): class ContentDecodingError(RequestException, BaseHTTPError): """Failed to decode response content""" + +class StreamConsumedError(RequestException, TypeError): + """The content for this response was already consumed""" diff --git a/lib/requests/models.py b/lib/requests/models.py index e2fa09f8..2370b67f 100644 --- a/lib/requests/models.py +++ b/lib/requests/models.py @@ -19,31 +19,36 @@ from .cookies import cookiejar_from_dict, get_cookie_header from .packages.urllib3.fields import RequestField from .packages.urllib3.filepost import encode_multipart_formdata from .packages.urllib3.util import parse_url -from .packages.urllib3.exceptions import DecodeError +from .packages.urllib3.exceptions import ( + DecodeError, ReadTimeoutError, ProtocolError) from .exceptions import ( - HTTPError, RequestException, MissingSchema, InvalidURL, - ChunkedEncodingError, ContentDecodingError) + HTTPError, RequestException, MissingSchema, InvalidURL, + ChunkedEncodingError, ContentDecodingError, ConnectionError, + StreamConsumedError) from .utils import ( guess_filename, get_auth_from_url, requote_uri, stream_decode_response_unicode, to_key_val_list, parse_header_links, iter_slices, guess_json_utf, super_len, to_native_string) from .compat import ( cookielib, urlunparse, urlsplit, urlencode, str, bytes, StringIO, - is_py2, chardet, json, builtin_str, basestring, IncompleteRead) + is_py2, chardet, json, builtin_str, basestring) from .status_codes import codes #: The set of HTTP status codes that indicate an automatically #: processable redirect. REDIRECT_STATI = ( - codes.moved, # 301 - codes.found, # 302 - codes.other, # 303 - codes.temporary_moved, # 307 + codes.moved, # 301 + codes.found, # 302 + codes.other, # 303 + codes.temporary_redirect, # 307 + codes.permanent_redirect, # 308 ) DEFAULT_REDIRECT_LIMIT = 30 CONTENT_CHUNK_SIZE = 10 * 1024 ITER_CHUNK_SIZE = 512 +json_dumps = json.dumps + class RequestEncodingMixin(object): @property @@ -187,7 +192,8 @@ class Request(RequestHooksMixin): :param url: URL to send. :param headers: dictionary of headers to send. :param files: dictionary of {filename: fileobject} files to multipart upload. - :param data: the body to attach the request. If a dictionary is provided, form-encoding will take place. + :param data: the body to attach to the request. If a dictionary is provided, form-encoding will take place. + :param json: json for the body to attach to the request (if data is not specified). :param params: dictionary of URL parameters to append to the URL. :param auth: Auth handler or (user, pass) tuple. :param cookies: dictionary or CookieJar of cookies to attach to this request. @@ -210,7 +216,8 @@ class Request(RequestHooksMixin): params=None, auth=None, cookies=None, - hooks=None): + hooks=None, + json=None): # Default empty dicts for dict params. data = [] if data is None else data @@ -228,6 +235,7 @@ class Request(RequestHooksMixin): self.headers = headers self.files = files self.data = data + self.json = json self.params = params self.auth = auth self.cookies = cookies @@ -244,6 +252,7 @@ class Request(RequestHooksMixin): headers=self.headers, files=self.files, data=self.data, + json=self.json, params=self.params, auth=self.auth, cookies=self.cookies, @@ -287,14 +296,15 @@ class PreparedRequest(RequestEncodingMixin, RequestHooksMixin): self.hooks = default_hooks() def prepare(self, method=None, url=None, headers=None, files=None, - data=None, params=None, auth=None, cookies=None, hooks=None): + data=None, params=None, auth=None, cookies=None, hooks=None, + json=None): """Prepares the entire request with the given parameters.""" self.prepare_method(method) self.prepare_url(url, params) self.prepare_headers(headers) self.prepare_cookies(cookies) - self.prepare_body(data, files) + self.prepare_body(data, files, json) self.prepare_auth(auth, url) # Note that prepare_auth must be last to enable authentication schemes # such as OAuth to work on a fully prepared request. @@ -309,8 +319,8 @@ class PreparedRequest(RequestEncodingMixin, RequestHooksMixin): p = PreparedRequest() p.method = self.method p.url = self.url - p.headers = self.headers.copy() - p._cookies = self._cookies.copy() + p.headers = self.headers.copy() if self.headers is not None else None + p._cookies = self._cookies.copy() if self._cookies is not None else None p.body = self.body p.hooks = self.hooks return p @@ -324,15 +334,18 @@ class PreparedRequest(RequestEncodingMixin, RequestHooksMixin): def prepare_url(self, url, params): """Prepares the given HTTP URL.""" #: Accept objects that have string representations. - try: - url = unicode(url) - except NameError: - # We're on Python 3. - url = str(url) - except UnicodeDecodeError: - pass + #: We're unable to blindy call unicode/str functions + #: as this will include the bytestring indicator (b'') + #: on python 3.x. + #: https://github.com/kennethreitz/requests/pull/2238 + if isinstance(url, bytes): + url = url.decode('utf8') + else: + url = unicode(url) if is_py2 else str(url) - # Don't do any URL preparation for oddball schemes + # Don't do any URL preparation for non-HTTP schemes like `mailto`, + # `data` etc to work around exceptions from `url_parse`, which + # handles RFC 3986 only. if ':' in url and not url.lower().startswith('http'): self.url = url return @@ -395,7 +408,7 @@ class PreparedRequest(RequestEncodingMixin, RequestHooksMixin): else: self.headers = CaseInsensitiveDict() - def prepare_body(self, data, files): + def prepare_body(self, data, files, json=None): """Prepares the given HTTP body data.""" # Check if file, fo, generator, iterator. @@ -406,11 +419,13 @@ class PreparedRequest(RequestEncodingMixin, RequestHooksMixin): content_type = None length = None + if json is not None: + content_type = 'application/json' + body = json_dumps(json) + is_stream = all([ hasattr(data, '__iter__'), - not isinstance(data, basestring), - not isinstance(data, list), - not isinstance(data, dict) + not isinstance(data, (basestring, list, tuple, dict)) ]) try: @@ -433,9 +448,9 @@ class PreparedRequest(RequestEncodingMixin, RequestHooksMixin): if files: (body, content_type) = self._encode_files(files, data) else: - if data: + if data and json is None: body = self._encode_params(data) - if isinstance(data, str) or isinstance(data, builtin_str) or hasattr(data, 'read'): + if isinstance(data, basestring) or hasattr(data, 'read'): content_type = None else: content_type = 'application/x-www-form-urlencoded' @@ -443,7 +458,7 @@ class PreparedRequest(RequestEncodingMixin, RequestHooksMixin): self.prepare_content_length(body) # Add content-type if it wasn't explicitly provided. - if (content_type) and (not 'content-type' in self.headers): + if content_type and ('content-type' not in self.headers): self.headers['Content-Type'] = content_type self.body = body @@ -457,7 +472,7 @@ class PreparedRequest(RequestEncodingMixin, RequestHooksMixin): l = super_len(body) if l: self.headers['Content-Length'] = builtin_str(l) - elif self.method not in ('GET', 'HEAD'): + elif (self.method not in ('GET', 'HEAD')) and (self.headers.get('Content-Length') is None): self.headers['Content-Length'] = '0' def prepare_auth(self, auth, url=''): @@ -558,6 +573,10 @@ class Response(object): #: and the arrival of the response (as a timedelta) self.elapsed = datetime.timedelta(0) + #: The :class:`PreparedRequest ` object to which this + #: is a response. + self.request = None + def __getstate__(self): # Consume everything; accessing the content attribute makes # sure the content has been fully read. @@ -607,6 +626,11 @@ class Response(object): """ return ('location' in self.headers and self.status_code in REDIRECT_STATI) + @property + def is_permanent_redirect(self): + """True if this Response one of the permanant versions of redirect""" + return ('location' in self.headers and self.status_code in (codes.moved_permanently, codes.permanent_redirect)) + @property def apparent_encoding(self): """The apparent encoding, provided by the chardet library""" @@ -618,21 +642,22 @@ class Response(object): large responses. The chunk size is the number of bytes it should read into memory. This is not necessarily the length of each item returned as decoding can take place. - """ - if self._content_consumed: - # simulate reading small chunks of the content - return iter_slices(self._content, chunk_size) + If decode_unicode is True, content will be decoded using the best + available encoding based on the response. + """ def generate(): try: # Special case for urllib3. try: for chunk in self.raw.stream(chunk_size, decode_content=True): yield chunk - except IncompleteRead as e: + except ProtocolError as e: raise ChunkedEncodingError(e) except DecodeError as e: raise ContentDecodingError(e) + except ReadTimeoutError as e: + raise ConnectionError(e) except AttributeError: # Standard file-like object. while True: @@ -643,14 +668,21 @@ class Response(object): self._content_consumed = True - gen = generate() + if self._content_consumed and isinstance(self._content, bool): + raise StreamConsumedError() + # simulate reading small chunks of the content + reused_chunks = iter_slices(self._content, chunk_size) + + stream_chunks = generate() + + chunks = reused_chunks if self._content_consumed else stream_chunks if decode_unicode: - gen = stream_decode_response_unicode(gen, self) + chunks = stream_decode_response_unicode(chunks, self) - return gen + return chunks - def iter_lines(self, chunk_size=ITER_CHUNK_SIZE, decode_unicode=None): + def iter_lines(self, chunk_size=ITER_CHUNK_SIZE, decode_unicode=None, delimiter=None): """Iterates over the response data, one line at a time. When stream=True is set on the request, this avoids reading the content at once into memory for large responses. @@ -662,7 +694,11 @@ class Response(object): if pending is not None: chunk = pending + chunk - lines = chunk.splitlines() + + if delimiter: + lines = chunk.split(delimiter) + else: + lines = chunk.splitlines() if lines and lines[-1] and chunk and lines[-1][-1] == chunk[-1]: pending = lines.pop() @@ -793,8 +829,8 @@ class Response(object): raise HTTPError(http_error_msg, response=self) def close(self): - """Closes the underlying file descriptor and releases the connection - back to the pool. + """Releases the connection back to the pool. Once this method has been + called the underlying ``raw`` object must not be accessed again. *Note: Should not normally need to be called explicitly.* """ diff --git a/lib/requests/packages/urllib3/__init__.py b/lib/requests/packages/urllib3/__init__.py index 73071f70..4b36b5ae 100644 --- a/lib/requests/packages/urllib3/__init__.py +++ b/lib/requests/packages/urllib3/__init__.py @@ -1,9 +1,3 @@ -# urllib3/__init__.py -# Copyright 2008-2013 Andrey Petrov and contributors (see CONTRIBUTORS.txt) -# -# This module is part of urllib3 and is released under -# the MIT License: http://www.opensource.org/licenses/mit-license.php - """ urllib3 - Thread-safe connection pooling and re-using. """ @@ -23,7 +17,10 @@ from . import exceptions from .filepost import encode_multipart_formdata from .poolmanager import PoolManager, ProxyManager, proxy_from_url from .response import HTTPResponse -from .util import make_headers, get_host, Timeout +from .util.request import make_headers +from .util.url import get_host +from .util.timeout import Timeout +from .util.retry import Retry # Set default logging handler to avoid "No handler found" warnings. @@ -51,8 +48,19 @@ def add_stderr_logger(level=logging.DEBUG): handler.setFormatter(logging.Formatter('%(asctime)s %(levelname)s %(message)s')) logger.addHandler(handler) logger.setLevel(level) - logger.debug('Added an stderr logging handler to logger: %s' % __name__) + logger.debug('Added a stderr logging handler to logger: %s' % __name__) return handler # ... Clean up. del NullHandler + + +# Set security warning to only go off once by default. +import warnings +warnings.simplefilter('module', exceptions.SecurityWarning) + +def disable_warnings(category=exceptions.HTTPWarning): + """ + Helper for quickly disabling all urllib3 warnings. + """ + warnings.simplefilter('ignore', category) diff --git a/lib/requests/packages/urllib3/_collections.py b/lib/requests/packages/urllib3/_collections.py index 5907b0dc..d77ebb8d 100644 --- a/lib/requests/packages/urllib3/_collections.py +++ b/lib/requests/packages/urllib3/_collections.py @@ -1,10 +1,4 @@ -# urllib3/_collections.py -# Copyright 2008-2013 Andrey Petrov and contributors (see CONTRIBUTORS.txt) -# -# This module is part of urllib3 and is released under -# the MIT License: http://www.opensource.org/licenses/mit-license.php - -from collections import MutableMapping +from collections import Mapping, MutableMapping try: from threading import RLock except ImportError: # Platform-specific: No threads available @@ -20,9 +14,10 @@ try: # Python 2.7+ from collections import OrderedDict except ImportError: from .packages.ordered_dict import OrderedDict +from .packages.six import itervalues -__all__ = ['RecentlyUsedContainer'] +__all__ = ['RecentlyUsedContainer', 'HTTPHeaderDict'] _Null = object() @@ -101,3 +96,104 @@ class RecentlyUsedContainer(MutableMapping): def keys(self): with self.lock: return self._container.keys() + + +class HTTPHeaderDict(MutableMapping): + """ + :param headers: + An iterable of field-value pairs. Must not contain multiple field names + when compared case-insensitively. + + :param kwargs: + Additional field-value pairs to pass in to ``dict.update``. + + A ``dict`` like container for storing HTTP Headers. + + Field names are stored and compared case-insensitively in compliance with + RFC 7230. Iteration provides the first case-sensitive key seen for each + case-insensitive pair. + + Using ``__setitem__`` syntax overwrites fields that compare equal + case-insensitively in order to maintain ``dict``'s api. For fields that + compare equal, instead create a new ``HTTPHeaderDict`` and use ``.add`` + in a loop. + + If multiple fields that are equal case-insensitively are passed to the + constructor or ``.update``, the behavior is undefined and some will be + lost. + + >>> headers = HTTPHeaderDict() + >>> headers.add('Set-Cookie', 'foo=bar') + >>> headers.add('set-cookie', 'baz=quxx') + >>> headers['content-length'] = '7' + >>> headers['SET-cookie'] + 'foo=bar, baz=quxx' + >>> headers['Content-Length'] + '7' + + If you want to access the raw headers with their original casing + for debugging purposes you can access the private ``._data`` attribute + which is a normal python ``dict`` that maps the case-insensitive key to a + list of tuples stored as (case-sensitive-original-name, value). Using the + structure from above as our example: + + >>> headers._data + {'set-cookie': [('Set-Cookie', 'foo=bar'), ('set-cookie', 'baz=quxx')], + 'content-length': [('content-length', '7')]} + """ + + def __init__(self, headers=None, **kwargs): + self._data = {} + if headers is None: + headers = {} + self.update(headers, **kwargs) + + def add(self, key, value): + """Adds a (name, value) pair, doesn't overwrite the value if it already + exists. + + >>> headers = HTTPHeaderDict(foo='bar') + >>> headers.add('Foo', 'baz') + >>> headers['foo'] + 'bar, baz' + """ + self._data.setdefault(key.lower(), []).append((key, value)) + + def getlist(self, key): + """Returns a list of all the values for the named field. Returns an + empty list if the key doesn't exist.""" + return self[key].split(', ') if key in self else [] + + def copy(self): + h = HTTPHeaderDict() + for key in self._data: + for rawkey, value in self._data[key]: + h.add(rawkey, value) + return h + + def __eq__(self, other): + if not isinstance(other, Mapping): + return False + other = HTTPHeaderDict(other) + return dict((k1, self[k1]) for k1 in self._data) == \ + dict((k2, other[k2]) for k2 in other._data) + + def __getitem__(self, key): + values = self._data[key.lower()] + return ', '.join(value[1] for value in values) + + def __setitem__(self, key, value): + self._data[key.lower()] = [(key, value)] + + def __delitem__(self, key): + del self._data[key.lower()] + + def __len__(self): + return len(self._data) + + def __iter__(self): + for headers in itervalues(self._data): + yield headers[0][0] + + def __repr__(self): + return '%s(%r)' % (self.__class__.__name__, dict(self.items())) diff --git a/lib/requests/packages/urllib3/connection.py b/lib/requests/packages/urllib3/connection.py index c7d5b77d..c6e1959a 100644 --- a/lib/requests/packages/urllib3/connection.py +++ b/lib/requests/packages/urllib3/connection.py @@ -1,88 +1,146 @@ -# urllib3/connection.py -# Copyright 2008-2013 Andrey Petrov and contributors (see CONTRIBUTORS.txt) -# -# This module is part of urllib3 and is released under -# the MIT License: http://www.opensource.org/licenses/mit-license.php - +import datetime +import sys import socket from socket import timeout as SocketTimeout +import warnings -try: # Python 3 +try: # Python 3 from http.client import HTTPConnection as _HTTPConnection, HTTPException except ImportError: from httplib import HTTPConnection as _HTTPConnection, HTTPException + class DummyConnection(object): "Used to detect a failed ConnectionCls import." pass -try: # Compiled with SSL? - ssl = None + +try: # Compiled with SSL? HTTPSConnection = DummyConnection + import ssl + BaseSSLError = ssl.SSLError +except (ImportError, AttributeError): # Platform-specific: No SSL. + ssl = None class BaseSSLError(BaseException): pass - try: # Python 3 - from http.client import HTTPSConnection as _HTTPSConnection - except ImportError: - from httplib import HTTPSConnection as _HTTPSConnection - - import ssl - BaseSSLError = ssl.SSLError - -except (ImportError, AttributeError): # Platform-specific: No SSL. - pass from .exceptions import ( ConnectTimeoutError, + SystemTimeWarning, ) from .packages.ssl_match_hostname import match_hostname -from .util import ( - assert_fingerprint, +from .packages import six + +from .util.ssl_ import ( resolve_cert_reqs, resolve_ssl_version, ssl_wrap_socket, + assert_fingerprint, ) +from .util import connection + port_by_scheme = { 'http': 80, 'https': 443, } +RECENT_DATE = datetime.date(2014, 1, 1) + class HTTPConnection(_HTTPConnection, object): + """ + Based on httplib.HTTPConnection but provides an extra constructor + backwards-compatibility layer between older and newer Pythons. + + Additional keyword parameters are used to configure attributes of the connection. + Accepted parameters include: + + - ``strict``: See the documentation on :class:`urllib3.connectionpool.HTTPConnectionPool` + - ``source_address``: Set the source address for the current connection. + + .. note:: This is ignored for Python 2.6. It is only applied for 2.7 and 3.x + + - ``socket_options``: Set specific options on the underlying socket. If not specified, then + defaults are loaded from ``HTTPConnection.default_socket_options`` which includes disabling + Nagle's algorithm (sets TCP_NODELAY to 1) unless the connection is behind a proxy. + + For example, if you wish to enable TCP Keep Alive in addition to the defaults, + you might pass:: + + HTTPConnection.default_socket_options + [ + (socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1), + ] + + Or you may want to disable the defaults by passing an empty list (e.g., ``[]``). + """ + default_port = port_by_scheme['http'] - # By default, disable Nagle's Algorithm. - tcp_nodelay = 1 + #: Disable Nagle's algorithm by default. + #: ``[(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)]`` + default_socket_options = [(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)] + + #: Whether this connection verifies the host's certificate. + is_verified = False + + def __init__(self, *args, **kw): + if six.PY3: # Python 3 + kw.pop('strict', None) + + # Pre-set source_address in case we have an older Python like 2.6. + self.source_address = kw.get('source_address') + + if sys.version_info < (2, 7): # Python 2.6 + # _HTTPConnection on Python 2.6 will balk at this keyword arg, but + # not newer versions. We can still use it when creating a + # connection though, so we pop it *after* we have saved it as + # self.source_address. + kw.pop('source_address', None) + + #: The socket options provided by the user. If no options are + #: provided, we use the default options. + self.socket_options = kw.pop('socket_options', self.default_socket_options) + + # Superclass also sets self.source_address in Python 2.7+. + _HTTPConnection.__init__(self, *args, **kw) def _new_conn(self): - """ Establish a socket connection and set nodelay settings on it + """ Establish a socket connection and set nodelay settings on it. - :return: a new socket connection + :return: New socket connection. """ + extra_kw = {} + if self.source_address: + extra_kw['source_address'] = self.source_address + + if self.socket_options: + extra_kw['socket_options'] = self.socket_options + try: - conn = socket.create_connection( - (self.host, self.port), - self.timeout, - self.source_address, - ) - except AttributeError: # Python 2.6 - conn = socket.create_connection( - (self.host, self.port), - self.timeout, - ) - conn.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, - self.tcp_nodelay) + conn = connection.create_connection( + (self.host, self.port), self.timeout, **extra_kw) + + except SocketTimeout: + raise ConnectTimeoutError( + self, "Connection to %s timed out. (connect timeout=%s)" % + (self.host, self.timeout)) + return conn def _prepare_conn(self, conn): self.sock = conn + # the _tunnel_host attribute was added in python 2.6.3 (via + # http://hg.python.org/cpython/rev/0f57b30a152f) so pythons 2.6(0-2) do + # not have them. if getattr(self, '_tunnel_host', None): # TODO: Fix tunnel so it doesn't depend on self.sock state. self._tunnel() + # Mark this connection as not reusable + self.auto_open = 0 def connect(self): conn = self._new_conn() @@ -93,15 +151,18 @@ class HTTPSConnection(HTTPConnection): default_port = port_by_scheme['https'] def __init__(self, host, port=None, key_file=None, cert_file=None, - strict=None, timeout=socket._GLOBAL_DEFAULT_TIMEOUT, - source_address=None): - try: - HTTPConnection.__init__(self, host, port, strict, timeout, source_address) - except TypeError: # Python 2.6 - HTTPConnection.__init__(self, host, port, strict, timeout) + strict=None, timeout=socket._GLOBAL_DEFAULT_TIMEOUT, **kw): + + HTTPConnection.__init__(self, host, port, strict=strict, + timeout=timeout, **kw) + self.key_file = key_file self.cert_file = cert_file + # Required property for Google AppEngine 1.9.0 which otherwise causes + # HTTPS requests to go out as HTTP. (See Issue #356) + self._protocol = 'https' + def connect(self): conn = self._new_conn() self._prepare_conn(conn) @@ -116,6 +177,7 @@ class VerifiedHTTPSConnection(HTTPSConnection): cert_reqs = None ca_certs = None ssl_version = None + assert_fingerprint = None def set_cert(self, key_file=None, cert_file=None, cert_reqs=None, ca_certs=None, @@ -130,46 +192,52 @@ class VerifiedHTTPSConnection(HTTPSConnection): def connect(self): # Add certificate verification - try: - sock = socket.create_connection( - address=(self.host, self.port), - timeout=self.timeout, - ) - except SocketTimeout: - raise ConnectTimeoutError( - self, "Connection to %s timed out. (connect timeout=%s)" % - (self.host, self.timeout)) - - sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, - self.tcp_nodelay) + conn = self._new_conn() resolved_cert_reqs = resolve_cert_reqs(self.cert_reqs) resolved_ssl_version = resolve_ssl_version(self.ssl_version) - # the _tunnel_host attribute was added in python 2.6.3 (via - # http://hg.python.org/cpython/rev/0f57b30a152f) so pythons 2.6(0-2) do - # not have them. + hostname = self.host if getattr(self, '_tunnel_host', None): - self.sock = sock + # _tunnel_host was added in Python 2.6.3 + # (See: http://hg.python.org/cpython/rev/0f57b30a152f) + + self.sock = conn # Calls self._set_hostport(), so self.host is # self._tunnel_host below. self._tunnel() + # Mark this connection as not reusable + self.auto_open = 0 + + # Override the host with the one we're requesting data from. + hostname = self._tunnel_host + + is_time_off = datetime.date.today() < RECENT_DATE + if is_time_off: + warnings.warn(( + 'System time is way off (before {0}). This will probably ' + 'lead to SSL verification errors').format(RECENT_DATE), + SystemTimeWarning + ) # Wrap socket using verification with the root certs in # trusted_root_certs - self.sock = ssl_wrap_socket(sock, self.key_file, self.cert_file, + self.sock = ssl_wrap_socket(conn, self.key_file, self.cert_file, cert_reqs=resolved_cert_reqs, ca_certs=self.ca_certs, - server_hostname=self.host, + server_hostname=hostname, ssl_version=resolved_ssl_version) - if resolved_cert_reqs != ssl.CERT_NONE: - if self.assert_fingerprint: - assert_fingerprint(self.sock.getpeercert(binary_form=True), - self.assert_fingerprint) - elif self.assert_hostname is not False: - match_hostname(self.sock.getpeercert(), - self.assert_hostname or self.host) + if self.assert_fingerprint: + assert_fingerprint(self.sock.getpeercert(binary_form=True), + self.assert_fingerprint) + elif resolved_cert_reqs != ssl.CERT_NONE \ + and self.assert_hostname is not False: + match_hostname(self.sock.getpeercert(), + self.assert_hostname or hostname) + + self.is_verified = (resolved_cert_reqs == ssl.CERT_REQUIRED + or self.assert_fingerprint is not None) if ssl: diff --git a/lib/requests/packages/urllib3/connectionpool.py b/lib/requests/packages/urllib3/connectionpool.py index 243d700e..e693bbd8 100644 --- a/lib/requests/packages/urllib3/connectionpool.py +++ b/lib/requests/packages/urllib3/connectionpool.py @@ -1,16 +1,12 @@ -# urllib3/connectionpool.py -# Copyright 2008-2013 Andrey Petrov and contributors (see CONTRIBUTORS.txt) -# -# This module is part of urllib3 and is released under -# the MIT License: http://www.opensource.org/licenses/mit-license.php - import errno import logging +import sys +import warnings from socket import error as SocketError, timeout as SocketTimeout import socket -try: # Python 3 +try: # Python 3 from queue import LifoQueue, Empty, Full except ImportError: from Queue import LifoQueue, Empty, Full @@ -19,14 +15,16 @@ except ImportError: from .exceptions import ( ClosedPoolError, - ConnectTimeoutError, + ProtocolError, EmptyPoolError, HostChangedError, + LocationValueError, MaxRetryError, + ProxyError, + ReadTimeoutError, SSLError, TimeoutError, - ReadTimeoutError, - ProxyError, + InsecureRequestWarning, ) from .packages.ssl_match_hostname import CertificateError from .packages import six @@ -38,12 +36,11 @@ from .connection import ( ) from .request import RequestMethods from .response import HTTPResponse -from .util import ( - assert_fingerprint, - get_host, - is_connection_dropped, - Timeout, -) + +from .util.connection import is_connection_dropped +from .util.retry import Retry +from .util.timeout import Timeout +from .util.url import get_host xrange = six.moves.xrange @@ -52,8 +49,8 @@ log = logging.getLogger(__name__) _Default = object() -## Pool objects +## Pool objects class ConnectionPool(object): """ Base class for all connection pools, such as @@ -64,10 +61,11 @@ class ConnectionPool(object): QueueCls = LifoQueue def __init__(self, host, port=None): - # httplib doesn't like it when we include brackets in ipv6 addresses - host = host.strip('[]') + if not host: + raise LocationValueError("No host specified.") - self.host = host + # httplib doesn't like it when we include brackets in ipv6 addresses + self.host = host.strip('[]') self.port = port def __str__(self): @@ -77,6 +75,7 @@ class ConnectionPool(object): # This is taken from http://hg.python.org/cpython/file/7aaba721ebc0/Lib/socket.py#l252 _blocking_errnos = set([errno.EAGAIN, errno.EWOULDBLOCK]) + class HTTPConnectionPool(ConnectionPool, RequestMethods): """ Thread-safe connection pool for one host. @@ -121,6 +120,9 @@ class HTTPConnectionPool(ConnectionPool, RequestMethods): Headers to include with all requests, unless other headers are given explicitly. + :param retries: + Retry configuration to use by default with requests in this pool. + :param _proxy: Parsed proxy URL, should not be used directly, instead, see :class:`urllib3.connectionpool.ProxyManager`" @@ -128,6 +130,10 @@ class HTTPConnectionPool(ConnectionPool, RequestMethods): :param _proxy_headers: A dictionary with proxy headers, should not be used directly, instead, see :class:`urllib3.connectionpool.ProxyManager`" + + :param \**conn_kw: + Additional parameters are used to create fresh :class:`urllib3.connection.HTTPConnection`, + :class:`urllib3.connection.HTTPSConnection` instances. """ scheme = 'http' @@ -135,18 +141,22 @@ class HTTPConnectionPool(ConnectionPool, RequestMethods): def __init__(self, host, port=None, strict=False, timeout=Timeout.DEFAULT_TIMEOUT, maxsize=1, block=False, - headers=None, _proxy=None, _proxy_headers=None): + headers=None, retries=None, + _proxy=None, _proxy_headers=None, + **conn_kw): ConnectionPool.__init__(self, host, port) RequestMethods.__init__(self, headers) self.strict = strict - # This is for backwards compatibility and can be removed once a timeout - # can only be set to a Timeout object if not isinstance(timeout, Timeout): timeout = Timeout.from_float(timeout) + if retries is None: + retries = Retry.DEFAULT + self.timeout = timeout + self.retries = retries self.pool = self.QueueCls(maxsize) self.block = block @@ -161,6 +171,13 @@ class HTTPConnectionPool(ConnectionPool, RequestMethods): # These are mostly for testing and debugging purposes. self.num_connections = 0 self.num_requests = 0 + self.conn_kw = conn_kw + + if self.proxy: + # Enable Nagle's algorithm for proxies, to avoid packet fragmentation. + # We cannot know if the user has added default socket options, so we cannot replace the + # list. + self.conn_kw.setdefault('socket_options', []) def _new_conn(self): """ @@ -170,17 +187,9 @@ class HTTPConnectionPool(ConnectionPool, RequestMethods): log.info("Starting new HTTP connection (%d): %s" % (self.num_connections, self.host)) - extra_params = {} - if not six.PY3: # Python 2 - extra_params['strict'] = self.strict - conn = self.ConnectionCls(host=self.host, port=self.port, timeout=self.timeout.connect_timeout, - **extra_params) - if self.proxy is not None: - # Enable Nagle's algorithm for proxies, to avoid packet - # fragmentation. - conn.tcp_nodelay = 0 + strict=self.strict, **self.conn_kw) return conn def _get_conn(self, timeout=None): @@ -199,7 +208,7 @@ class HTTPConnectionPool(ConnectionPool, RequestMethods): try: conn = self.pool.get(block=self.block, timeout=timeout) - except AttributeError: # self.pool is None + except AttributeError: # self.pool is None raise ClosedPoolError(self, "Pool is closed.") except Empty: @@ -213,6 +222,11 @@ class HTTPConnectionPool(ConnectionPool, RequestMethods): if conn and is_connection_dropped(conn): log.info("Resetting dropped connection: %s" % self.host) conn.close() + if getattr(conn, 'auto_open', 1) == 0: + # This is a proxied connection that has been mutated by + # httplib._tunnel() and cannot be reused (since it would + # attempt to bypass the proxy) + conn = None return conn or self._new_conn() @@ -232,19 +246,26 @@ class HTTPConnectionPool(ConnectionPool, RequestMethods): """ try: self.pool.put(conn, block=False) - return # Everything is dandy, done. + return # Everything is dandy, done. except AttributeError: # self.pool is None. pass except Full: # This should never happen if self.block == True - log.warning("HttpConnectionPool is full, discarding connection: %s" - % self.host) + log.warning( + "Connection pool is full, discarding connection: %s" % + self.host) # Connection never got put back into the pool, close it. if conn: conn.close() + def _validate_conn(self, conn): + """ + Called right before a request is made, after the socket is created. + """ + pass + def _get_timeout(self, timeout): """ Helper that always returns a :class:`urllib3.util.Timeout` """ if timeout is _Default: @@ -276,23 +297,21 @@ class HTTPConnectionPool(ConnectionPool, RequestMethods): self.num_requests += 1 timeout_obj = self._get_timeout(timeout) + timeout_obj.start_connect() + conn.timeout = timeout_obj.connect_timeout - try: - timeout_obj.start_connect() - conn.timeout = timeout_obj.connect_timeout - # conn.request() calls httplib.*.request, not the method in - # urllib3.request. It also calls makefile (recv) on the socket. - conn.request(method, url, **httplib_request_kw) - except SocketTimeout: - raise ConnectTimeoutError( - self, "Connection to %s timed out. (connect timeout=%s)" % - (self.host, timeout_obj.connect_timeout)) + # Trigger any extra validation we need to do. + self._validate_conn(conn) + + # conn.request() calls httplib.*.request, not the method in + # urllib3.request. It also calls makefile (recv) on the socket. + conn.request(method, url, **httplib_request_kw) # Reset the timeout for the recv() on the socket read_timeout = timeout_obj.read_timeout # App Engine doesn't have a sock attr - if hasattr(conn, 'sock'): + if getattr(conn, 'sock', None): # In Python 3 socket.py will catch EAGAIN and return None when you # try and read into the file pointer created by http.client, which # instead raises a BadStatusLine exception. Instead of catching @@ -300,18 +319,17 @@ class HTTPConnectionPool(ConnectionPool, RequestMethods): # timeouts, check for a zero timeout before making the request. if read_timeout == 0: raise ReadTimeoutError( - self, url, - "Read timed out. (read timeout=%s)" % read_timeout) + self, url, "Read timed out. (read timeout=%s)" % read_timeout) if read_timeout is Timeout.DEFAULT_TIMEOUT: conn.sock.settimeout(socket.getdefaulttimeout()) - else: # None or a value + else: # None or a value conn.sock.settimeout(read_timeout) # Receive the response from the server try: - try: # Python 2.7+, use buffering of HTTP responses + try: # Python 2.7+, use buffering of HTTP responses httplib_response = conn.getresponse(buffering=True) - except TypeError: # Python 2.6 and older + except TypeError: # Python 2.6 and older httplib_response = conn.getresponse() except SocketTimeout: raise ReadTimeoutError( @@ -323,17 +341,17 @@ class HTTPConnectionPool(ConnectionPool, RequestMethods): # http://bugs.python.org/issue10272 if 'timed out' in str(e) or \ 'did not complete (read)' in str(e): # Python 2.6 - raise ReadTimeoutError(self, url, "Read timed out.") + raise ReadTimeoutError( + self, url, "Read timed out. (read timeout=%s)" % read_timeout) raise - except SocketError as e: # Platform-specific: Python 2 + except SocketError as e: # Platform-specific: Python 2 # See the above comment about EAGAIN in Python 3. In Python 2 we # have to specifically catch it and throw the timeout error if e.errno in _blocking_errnos: raise ReadTimeoutError( - self, url, - "Read timed out. (read timeout=%s)" % read_timeout) + self, url, "Read timed out. (read timeout=%s)" % read_timeout) raise @@ -358,7 +376,7 @@ class HTTPConnectionPool(ConnectionPool, RequestMethods): conn.close() except Empty: - pass # Done. + pass # Done. def is_same_host(self, url): """ @@ -379,7 +397,7 @@ class HTTPConnectionPool(ConnectionPool, RequestMethods): return (scheme, host, port) == (self.scheme, self.host, self.port) - def urlopen(self, method, url, body=None, headers=None, retries=3, + def urlopen(self, method, url, body=None, headers=None, retries=None, redirect=True, assert_same_host=True, timeout=_Default, pool_timeout=None, release_conn=None, **response_kw): """ @@ -413,11 +431,25 @@ class HTTPConnectionPool(ConnectionPool, RequestMethods): these headers completely replace any pool-specific headers. :param retries: - Number of retries to allow before raising a MaxRetryError exception. + Configure the number of retries to allow before raising a + :class:`~urllib3.exceptions.MaxRetryError` exception. + + Pass ``None`` to retry until you receive a response. Pass a + :class:`~urllib3.util.retry.Retry` object for fine-grained control + over different types of retries. + Pass an integer number to retry connection errors that many times, + but no other types of errors. Pass zero to never retry. + + If ``False``, then retries are disabled and any exception is raised + immediately. Also, instead of raising a MaxRetryError on redirects, + the redirect response will be returned. + + :type retries: :class:`~urllib3.util.retry.Retry`, False, or an int. :param redirect: If True, automatically handle redirects (status codes 301, 302, - 303, 307, 308). Each redirect counts as a retry. + 303, 307, 308). Each redirect counts as a retry. Disabling retries + will disable redirect, too. :param assert_same_host: If ``True``, will make sure that the host of the pool requests is @@ -451,15 +483,15 @@ class HTTPConnectionPool(ConnectionPool, RequestMethods): if headers is None: headers = self.headers - if retries < 0: - raise MaxRetryError(self, url) + if not isinstance(retries, Retry): + retries = Retry.from_int(retries, redirect=redirect, default=self.retries) if release_conn is None: release_conn = response_kw.get('preload_content', True) # Check host if assert_same_host and not self.is_same_host(url): - raise HostChangedError(self, url, retries - 1) + raise HostChangedError(self, url, retries) conn = None @@ -470,11 +502,15 @@ class HTTPConnectionPool(ConnectionPool, RequestMethods): headers = headers.copy() headers.update(self.proxy_headers) + # Must keep the exception bound to a separate variable or else Python 3 + # complains about UnboundLocalError. + err = None + try: - # Request a connection from the queue + # Request a connection from the queue. conn = self._get_conn(timeout=pool_timeout) - # Make the request on the httplib connection object + # Make the request on the httplib connection object. httplib_response = self._make_request(conn, method, url, timeout=timeout, body=body, headers=headers) @@ -497,38 +533,35 @@ class HTTPConnectionPool(ConnectionPool, RequestMethods): # ``response.read()``) except Empty: - # Timed out by queue + # Timed out by queue. raise EmptyPoolError(self, "No pool connections are available.") - except BaseSSLError as e: + except (BaseSSLError, CertificateError) as e: + # Release connection unconditionally because there is no way to + # close it externally in case of exception. + release_conn = True raise SSLError(e) - except CertificateError as e: - # Name mismatch - raise SSLError(e) + except (TimeoutError, HTTPException, SocketError) as e: + if conn: + # Discard the connection for these exceptions. It will be + # be replaced during the next _get_conn() call. + conn.close() + conn = None - except TimeoutError as e: - # Connection broken, discard. - conn = None - # Save the error off for retry logic. + stacktrace = sys.exc_info()[2] + if isinstance(e, SocketError) and self.proxy: + e = ProxyError('Cannot connect to proxy.', e) + elif isinstance(e, (SocketError, HTTPException)): + e = ProtocolError('Connection aborted.', e) + + retries = retries.increment(method, url, error=e, + _pool=self, _stacktrace=stacktrace) + retries.sleep() + + # Keep track of the error for the retry warning. err = e - if retries == 0: - raise - - except (HTTPException, SocketError) as e: - # Connection broken, discard. It will be replaced next _get_conn(). - conn = None - # This is necessary so we can access e below - err = e - - if retries == 0: - if isinstance(e, SocketError) and self.proxy is not None: - raise ProxyError('Cannot connect to proxy. ' - 'Socket error: %s.' % e) - else: - raise MaxRetryError(self, url, e) - finally: if release_conn: # Put the connection back to be reused. If the connection is @@ -538,9 +571,9 @@ class HTTPConnectionPool(ConnectionPool, RequestMethods): if not conn: # Try again - log.warn("Retrying (%d attempts remain) after connection " - "broken by '%r': %s" % (retries, err, url)) - return self.urlopen(method, url, body, headers, retries - 1, + log.warning("Retrying (%r) after connection " + "broken by '%r': %s" % (retries, err, url)) + return self.urlopen(method, url, body, headers, retries, redirect, assert_same_host, timeout=timeout, pool_timeout=pool_timeout, release_conn=release_conn, **response_kw) @@ -550,11 +583,31 @@ class HTTPConnectionPool(ConnectionPool, RequestMethods): if redirect_location: if response.status == 303: method = 'GET' + + try: + retries = retries.increment(method, url, response=response, _pool=self) + except MaxRetryError: + if retries.raise_on_redirect: + raise + return response + log.info("Redirecting %s -> %s" % (url, redirect_location)) return self.urlopen(method, redirect_location, body, headers, - retries - 1, redirect, assert_same_host, - timeout=timeout, pool_timeout=pool_timeout, - release_conn=release_conn, **response_kw) + retries=retries, redirect=redirect, + assert_same_host=assert_same_host, + timeout=timeout, pool_timeout=pool_timeout, + release_conn=release_conn, **response_kw) + + # Check if we should retry the HTTP response. + if retries.is_forced_retry(method, status_code=response.status): + retries = retries.increment(method, url, response=response, _pool=self) + retries.sleep() + log.info("Forced retry: %s" % url) + return self.urlopen(method, url, body, headers, + retries=retries, redirect=redirect, + assert_same_host=assert_same_host, + timeout=timeout, pool_timeout=pool_timeout, + release_conn=release_conn, **response_kw) return response @@ -581,15 +634,17 @@ class HTTPSConnectionPool(HTTPConnectionPool): ConnectionCls = HTTPSConnection def __init__(self, host, port=None, - strict=False, timeout=None, maxsize=1, - block=False, headers=None, + strict=False, timeout=Timeout.DEFAULT_TIMEOUT, maxsize=1, + block=False, headers=None, retries=None, _proxy=None, _proxy_headers=None, key_file=None, cert_file=None, cert_reqs=None, ca_certs=None, ssl_version=None, - assert_hostname=None, assert_fingerprint=None): + assert_hostname=None, assert_fingerprint=None, + **conn_kw): HTTPConnectionPool.__init__(self, host, port, strict, timeout, maxsize, - block, headers, _proxy, _proxy_headers) + block, headers, retries, _proxy, _proxy_headers, + **conn_kw) self.key_file = key_file self.cert_file = cert_file self.cert_reqs = cert_reqs @@ -619,7 +674,12 @@ class HTTPSConnectionPool(HTTPConnectionPool): set_tunnel = conn.set_tunnel except AttributeError: # Platform-specific: Python 2.6 set_tunnel = conn._set_tunnel - set_tunnel(self.host, self.port, self.proxy_headers) + + if sys.version_info <= (2, 6, 4) and not self.proxy_headers: # Python 2.6.4 and older + set_tunnel(self.host, self.port) + else: + set_tunnel(self.host, self.port, self.proxy_headers) + # Establish tunnel connection early, because otherwise httplib # would improperly set Host: header to proxy's IP:port. conn.connect() @@ -645,20 +705,32 @@ class HTTPSConnectionPool(HTTPConnectionPool): actual_host = self.proxy.host actual_port = self.proxy.port - extra_params = {} - if not six.PY3: # Python 2 - extra_params['strict'] = self.strict - conn = self.ConnectionCls(host=actual_host, port=actual_port, timeout=self.timeout.connect_timeout, - **extra_params) - if self.proxy is not None: - # Enable Nagle's algorithm for proxies, to avoid packet - # fragmentation. - conn.tcp_nodelay = 0 + strict=self.strict, **self.conn_kw) return self._prepare_conn(conn) + def _validate_conn(self, conn): + """ + Called right before a request is made, after the socket is created. + """ + super(HTTPSConnectionPool, self)._validate_conn(conn) + + # Force connect early to allow us to validate the connection. + if not getattr(conn, 'sock', None): # AppEngine might not have `.sock` + conn.connect() + + """ + if not conn.is_verified: + warnings.warn(( + 'Unverified HTTPS request is being made. ' + 'Adding certificate verification is strongly advised. See: ' + 'https://urllib3.readthedocs.org/en/latest/security.html ' + '(This warning will only appear once by default.)'), + InsecureRequestWarning) + """ + def connection_from_url(url, **kw): """ @@ -675,7 +747,7 @@ def connection_from_url(url, **kw): :class:`.ConnectionPool`. Useful for specifying things like timeout, maxsize, headers, etc. - Example: :: + Example:: >>> conn = connection_from_url('http://google.com/') >>> r = conn.request('GET', '/') diff --git a/lib/requests/packages/urllib3/contrib/ntlmpool.py b/lib/requests/packages/urllib3/contrib/ntlmpool.py index b8cd9330..c6b266f5 100644 --- a/lib/requests/packages/urllib3/contrib/ntlmpool.py +++ b/lib/requests/packages/urllib3/contrib/ntlmpool.py @@ -1,9 +1,3 @@ -# urllib3/contrib/ntlmpool.py -# Copyright 2008-2013 Andrey Petrov and contributors (see CONTRIBUTORS.txt) -# -# This module is part of urllib3 and is released under -# the MIT License: http://www.opensource.org/licenses/mit-license.php - """ NTLM authenticating pool, contributed by erikcederstran diff --git a/lib/requests/packages/urllib3/contrib/pyopenssl.py b/lib/requests/packages/urllib3/contrib/pyopenssl.py index d9bda15a..374b544f 100644 --- a/lib/requests/packages/urllib3/contrib/pyopenssl.py +++ b/lib/requests/packages/urllib3/contrib/pyopenssl.py @@ -1,4 +1,7 @@ -'''SSL with SNI_-support for Python 2. +'''SSL with SNI_-support for Python 2. Follow these instructions if you would +like to verify SSL certificates in Python 2. Note, the default libraries do +*not* do certificate checking; you need to do additional work to validate +certificates yourself. This needs the following packages installed: @@ -6,9 +9,15 @@ This needs the following packages installed: * ndg-httpsclient (tested with 0.3.2) * pyasn1 (tested with 0.1.6) -To activate it call :func:`~urllib3.contrib.pyopenssl.inject_into_urllib3`. -This can be done in a ``sitecustomize`` module, or at any other time before -your application begins using ``urllib3``, like this:: +You can install them with the following command: + + pip install pyopenssl ndg-httpsclient pyasn1 + +To activate certificate checking, call +:func:`~urllib3.contrib.pyopenssl.inject_into_urllib3` from your Python code +before you begin making HTTP requests. This can be done in a ``sitecustomize`` +module, or at any other time before your application begins using ``urllib3``, +like this:: try: import urllib3.contrib.pyopenssl @@ -29,24 +38,26 @@ Module Variables ---------------- :var DEFAULT_SSL_CIPHER_LIST: The list of supported SSL/TLS cipher suites. - Default: ``EECDH+ECDSA+AESGCM EECDH+aRSA+AESGCM EECDH+ECDSA+SHA256 - EECDH+aRSA+SHA256 EECDH+aRSA+RC4 EDH+aRSA EECDH RC4 !aNULL !eNULL !LOW !3DES - !MD5 !EXP !PSK !SRP !DSS'`` + Default: ``ECDH+AESGCM:DH+AESGCM:ECDH+AES256:DH+AES256:ECDH+AES128:DH+AES: + ECDH+3DES:DH+3DES:RSA+AESGCM:RSA+AES:RSA+3DES:!aNULL:!MD5:!DSS`` .. _sni: https://en.wikipedia.org/wiki/Server_Name_Indication .. _crime attack: https://en.wikipedia.org/wiki/CRIME_(security_exploit) ''' -from ndg.httpsclient.ssl_peer_verification import SUBJ_ALT_NAME_SUPPORT -from ndg.httpsclient.subj_alt_name import SubjectAltName as BaseSubjectAltName +try: + from ndg.httpsclient.ssl_peer_verification import SUBJ_ALT_NAME_SUPPORT + from ndg.httpsclient.subj_alt_name import SubjectAltName as BaseSubjectAltName +except SyntaxError as e: + raise ImportError(e) + import OpenSSL.SSL from pyasn1.codec.der import decoder as der_decoder from pyasn1.type import univ, constraint -from socket import _fileobject +from socket import _fileobject, timeout import ssl import select -from cStringIO import StringIO from .. import connection from .. import util @@ -57,11 +68,18 @@ __all__ = ['inject_into_urllib3', 'extract_from_urllib3'] HAS_SNI = SUBJ_ALT_NAME_SUPPORT # Map from urllib3 to PyOpenSSL compatible parameter-values. -_openssl_versions = { - ssl.PROTOCOL_SSLv23: OpenSSL.SSL.SSLv23_METHOD, - ssl.PROTOCOL_SSLv3: OpenSSL.SSL.SSLv3_METHOD, - ssl.PROTOCOL_TLSv1: OpenSSL.SSL.TLSv1_METHOD, -} +try: + _openssl_versions = { + ssl.PROTOCOL_SSLv23: OpenSSL.SSL.SSLv23_METHOD, + ssl.PROTOCOL_SSLv3: OpenSSL.SSL.SSLv3_METHOD, + ssl.PROTOCOL_TLSv1: OpenSSL.SSL.TLSv1_METHOD, + } +except AttributeError: + _openssl_versions = { + ssl.PROTOCOL_SSLv23: OpenSSL.SSL.SSLv23_METHOD, + ssl.PROTOCOL_TLSv1: OpenSSL.SSL.TLSv1_METHOD, + } + _openssl_verify = { ssl.CERT_NONE: OpenSSL.SSL.VERIFY_NONE, ssl.CERT_OPTIONAL: OpenSSL.SSL.VERIFY_PEER, @@ -69,12 +87,22 @@ _openssl_verify = { + OpenSSL.SSL.VERIFY_FAIL_IF_NO_PEER_CERT, } -# Default SSL/TLS cipher list. -# Recommendation by https://community.qualys.com/blogs/securitylabs/2013/08/05/ -# configuring-apache-nginx-and-openssl-for-forward-secrecy -DEFAULT_SSL_CIPHER_LIST = 'EECDH+ECDSA+AESGCM EECDH+aRSA+AESGCM ' + \ - 'EECDH+ECDSA+SHA256 EECDH+aRSA+SHA256 EECDH+aRSA+RC4 EDH+aRSA ' + \ - 'EECDH RC4 !aNULL !eNULL !LOW !3DES !MD5 !EXP !PSK !SRP !DSS' +# A secure default. +# Sources for more information on TLS ciphers: +# +# - https://wiki.mozilla.org/Security/Server_Side_TLS +# - https://www.ssllabs.com/projects/best-practices/index.html +# - https://hynek.me/articles/hardening-your-web-servers-ssl-ciphers/ +# +# The general intent is: +# - Prefer cipher suites that offer perfect forward secrecy (DHE/ECDHE), +# - prefer ECDHE over DHE for better performance, +# - prefer any AES-GCM over any AES-CBC for better performance and security, +# - use 3DES as fallback which is secure but slow, +# - disable NULL authentication, MD5 MACs and DSS for security reasons. +DEFAULT_SSL_CIPHER_LIST = "ECDH+AESGCM:DH+AESGCM:ECDH+AES256:DH+AES256:" + \ + "ECDH+AES128:DH+AES:ECDH+3DES:DH+3DES:RSA+AESGCM:RSA+AES:RSA+3DES:" + \ + "!aNULL:!MD5:!DSS" orig_util_HAS_SNI = util.HAS_SNI @@ -137,184 +165,43 @@ def get_subj_alt_name(peer_cert): return dns_name -class fileobject(_fileobject): - - def read(self, size=-1): - # Use max, disallow tiny reads in a loop as they are very inefficient. - # We never leave read() with any leftover data from a new recv() call - # in our internal buffer. - rbufsize = max(self._rbufsize, self.default_bufsize) - # Our use of StringIO rather than lists of string objects returned by - # recv() minimizes memory usage and fragmentation that occurs when - # rbufsize is large compared to the typical return value of recv(). - buf = self._rbuf - buf.seek(0, 2) # seek end - if size < 0: - # Read until EOF - self._rbuf = StringIO() # reset _rbuf. we consume it via buf. - while True: - try: - data = self._sock.recv(rbufsize) - except OpenSSL.SSL.WantReadError: - continue - if not data: - break - buf.write(data) - return buf.getvalue() - else: - # Read until size bytes or EOF seen, whichever comes first - buf_len = buf.tell() - if buf_len >= size: - # Already have size bytes in our buffer? Extract and return. - buf.seek(0) - rv = buf.read(size) - self._rbuf = StringIO() - self._rbuf.write(buf.read()) - return rv - - self._rbuf = StringIO() # reset _rbuf. we consume it via buf. - while True: - left = size - buf_len - # recv() will malloc the amount of memory given as its - # parameter even though it often returns much less data - # than that. The returned data string is short lived - # as we copy it into a StringIO and free it. This avoids - # fragmentation issues on many platforms. - try: - data = self._sock.recv(left) - except OpenSSL.SSL.WantReadError: - continue - if not data: - break - n = len(data) - if n == size and not buf_len: - # Shortcut. Avoid buffer data copies when: - # - We have no data in our buffer. - # AND - # - Our call to recv returned exactly the - # number of bytes we were asked to read. - return data - if n == left: - buf.write(data) - del data # explicit free - break - assert n <= left, "recv(%d) returned %d bytes" % (left, n) - buf.write(data) - buf_len += n - del data # explicit free - #assert buf_len == buf.tell() - return buf.getvalue() - - def readline(self, size=-1): - buf = self._rbuf - buf.seek(0, 2) # seek end - if buf.tell() > 0: - # check if we already have it in our buffer - buf.seek(0) - bline = buf.readline(size) - if bline.endswith('\n') or len(bline) == size: - self._rbuf = StringIO() - self._rbuf.write(buf.read()) - return bline - del bline - if size < 0: - # Read until \n or EOF, whichever comes first - if self._rbufsize <= 1: - # Speed up unbuffered case - buf.seek(0) - buffers = [buf.read()] - self._rbuf = StringIO() # reset _rbuf. we consume it via buf. - data = None - recv = self._sock.recv - while True: - try: - while data != "\n": - data = recv(1) - if not data: - break - buffers.append(data) - except OpenSSL.SSL.WantReadError: - continue - break - return "".join(buffers) - - buf.seek(0, 2) # seek end - self._rbuf = StringIO() # reset _rbuf. we consume it via buf. - while True: - try: - data = self._sock.recv(self._rbufsize) - except OpenSSL.SSL.WantReadError: - continue - if not data: - break - nl = data.find('\n') - if nl >= 0: - nl += 1 - buf.write(data[:nl]) - self._rbuf.write(data[nl:]) - del data - break - buf.write(data) - return buf.getvalue() - else: - # Read until size bytes or \n or EOF seen, whichever comes first - buf.seek(0, 2) # seek end - buf_len = buf.tell() - if buf_len >= size: - buf.seek(0) - rv = buf.read(size) - self._rbuf = StringIO() - self._rbuf.write(buf.read()) - return rv - self._rbuf = StringIO() # reset _rbuf. we consume it via buf. - while True: - try: - data = self._sock.recv(self._rbufsize) - except OpenSSL.SSL.WantReadError: - continue - if not data: - break - left = size - buf_len - # did we just receive a newline? - nl = data.find('\n', 0, left) - if nl >= 0: - nl += 1 - # save the excess data to _rbuf - self._rbuf.write(data[nl:]) - if buf_len: - buf.write(data[:nl]) - break - else: - # Shortcut. Avoid data copy through buf when returning - # a substring of our first recv(). - return data[:nl] - n = len(data) - if n == size and not buf_len: - # Shortcut. Avoid data copy through buf when - # returning exactly all of our first recv(). - return data - if n >= left: - buf.write(data[:left]) - self._rbuf.write(data[left:]) - break - buf.write(data) - buf_len += n - #assert buf_len == buf.tell() - return buf.getvalue() - - class WrappedSocket(object): - '''API-compatibility wrapper for Python OpenSSL's Connection-class.''' + '''API-compatibility wrapper for Python OpenSSL's Connection-class. - def __init__(self, connection, socket): + Note: _makefile_refs, _drop() and _reuse() are needed for the garbage + collector of pypy. + ''' + + def __init__(self, connection, socket, suppress_ragged_eofs=True): self.connection = connection self.socket = socket + self.suppress_ragged_eofs = suppress_ragged_eofs + self._makefile_refs = 0 def fileno(self): return self.socket.fileno() def makefile(self, mode, bufsize=-1): - return fileobject(self.connection, mode, bufsize) + self._makefile_refs += 1 + return _fileobject(self, mode, bufsize, close=True) + + def recv(self, *args, **kwargs): + try: + data = self.connection.recv(*args, **kwargs) + except OpenSSL.SSL.SysCallError as e: + if self.suppress_ragged_eofs and e.args == (-1, 'Unexpected EOF'): + return b'' + else: + raise + except OpenSSL.SSL.WantReadError: + rd, wd, ed = select.select( + [self.socket], [], [], self.socket.gettimeout()) + if not rd: + raise timeout('The read operation timed out') + else: + return self.recv(*args, **kwargs) + else: + return data def settimeout(self, timeout): return self.socket.settimeout(timeout) @@ -323,7 +210,10 @@ class WrappedSocket(object): return self.connection.sendall(data) def close(self): - return self.connection.shutdown() + if self._makefile_refs < 1: + return self.connection.shutdown() + else: + self._makefile_refs -= 1 def getpeercert(self, binary_form=False): x509 = self.connection.get_peer_certificate() @@ -346,6 +236,15 @@ class WrappedSocket(object): ] } + def _reuse(self): + self._makefile_refs += 1 + + def _drop(self): + if self._makefile_refs < 1: + self.close() + else: + self._makefile_refs -= 1 + def _verify_callback(cnx, x509, err_no, err_depth, return_code): return err_no == 0 @@ -366,6 +265,8 @@ def ssl_wrap_socket(sock, keyfile=None, certfile=None, cert_reqs=None, ctx.load_verify_locations(ca_certs, None) except OpenSSL.SSL.Error as e: raise ssl.SSLError('bad ca_certs: %r' % ca_certs, e) + else: + ctx.set_default_verify_paths() # Disable TLS compression to migitate CRIME attack (issue #309) OP_NO_COMPRESSION = 0x20000 diff --git a/lib/requests/packages/urllib3/exceptions.py b/lib/requests/packages/urllib3/exceptions.py index 98ef9abc..7519ba98 100644 --- a/lib/requests/packages/urllib3/exceptions.py +++ b/lib/requests/packages/urllib3/exceptions.py @@ -1,9 +1,3 @@ -# urllib3/exceptions.py -# Copyright 2008-2013 Andrey Petrov and contributors (see CONTRIBUTORS.txt) -# -# This module is part of urllib3 and is released under -# the MIT License: http://www.opensource.org/licenses/mit-license.php - ## Base Exceptions @@ -11,6 +5,11 @@ class HTTPError(Exception): "Base exception used by this module." pass +class HTTPWarning(Warning): + "Base warning used by this module." + pass + + class PoolError(HTTPError): "Base exception for errors caused within a pool." @@ -49,17 +48,33 @@ class DecodeError(HTTPError): pass +class ProtocolError(HTTPError): + "Raised when something unexpected happens mid-request/response." + pass + + +#: Renamed to ProtocolError but aliased for backwards compatibility. +ConnectionError = ProtocolError + + ## Leaf Exceptions class MaxRetryError(RequestError): - "Raised when the maximum number of retries is exceeded." + """Raised when the maximum number of retries is exceeded. + + :param pool: The connection pool + :type pool: :class:`~urllib3.connectionpool.HTTPConnectionPool` + :param string url: The requested Url + :param exceptions.Exception reason: The underlying error + + """ def __init__(self, pool, url, reason=None): self.reason = reason message = "Max retries exceeded with url: %s" % url if reason: - message += " (Caused by %s: %s)" % (type(reason), reason) + message += " (Caused by %r)" % reason else: message += " (Caused by redirect)" @@ -111,7 +126,12 @@ class ClosedPoolError(PoolError): pass -class LocationParseError(ValueError, HTTPError): +class LocationValueError(ValueError, HTTPError): + "Raised when there is something wrong with a given URL input." + pass + + +class LocationParseError(LocationValueError): "Raised when get_host or similar fails to parse the URL input." def __init__(self, location): @@ -119,3 +139,18 @@ class LocationParseError(ValueError, HTTPError): HTTPError.__init__(self, message) self.location = location + + +class SecurityWarning(HTTPWarning): + "Warned when perfoming security reducing actions" + pass + + +class InsecureRequestWarning(SecurityWarning): + "Warned when making an unverified HTTPS request." + pass + + +class SystemTimeWarning(SecurityWarning): + "Warned when system time is suspected to be wrong" + pass diff --git a/lib/requests/packages/urllib3/fields.py b/lib/requests/packages/urllib3/fields.py index ed017657..c853f8d5 100644 --- a/lib/requests/packages/urllib3/fields.py +++ b/lib/requests/packages/urllib3/fields.py @@ -1,9 +1,3 @@ -# urllib3/fields.py -# Copyright 2008-2013 Andrey Petrov and contributors (see CONTRIBUTORS.txt) -# -# This module is part of urllib3 and is released under -# the MIT License: http://www.opensource.org/licenses/mit-license.php - import email.utils import mimetypes @@ -15,7 +9,7 @@ def guess_content_type(filename, default='application/octet-stream'): Guess the "Content-Type" of a file. :param filename: - The filename to guess the "Content-Type" of using :mod:`mimetimes`. + The filename to guess the "Content-Type" of using :mod:`mimetypes`. :param default: If no "Content-Type" can be guessed, default to `default`. """ @@ -78,9 +72,10 @@ class RequestField(object): """ A :class:`~urllib3.fields.RequestField` factory from old-style tuple parameters. - Supports constructing :class:`~urllib3.fields.RequestField` from parameter - of key/value strings AND key/filetuple. A filetuple is a (filename, data, MIME type) - tuple where the MIME type is optional. For example: :: + Supports constructing :class:`~urllib3.fields.RequestField` from + parameter of key/value strings AND key/filetuple. A filetuple is a + (filename, data, MIME type) tuple where the MIME type is optional. + For example:: 'foo': 'bar', 'fakefile': ('foofile.txt', 'contents of foofile'), @@ -125,8 +120,8 @@ class RequestField(object): 'Content-Disposition' fields. :param header_parts: - A sequence of (k, v) typles or a :class:`dict` of (k, v) to format as - `k1="v1"; k2="v2"; ...`. + A sequence of (k, v) typles or a :class:`dict` of (k, v) to format + as `k1="v1"; k2="v2"; ...`. """ parts = [] iterable = header_parts @@ -158,7 +153,8 @@ class RequestField(object): lines.append('\r\n') return '\r\n'.join(lines) - def make_multipart(self, content_disposition=None, content_type=None, content_location=None): + def make_multipart(self, content_disposition=None, content_type=None, + content_location=None): """ Makes this request field into a multipart request field. @@ -172,6 +168,10 @@ class RequestField(object): """ self.headers['Content-Disposition'] = content_disposition or 'form-data' - self.headers['Content-Disposition'] += '; '.join(['', self._render_parts((('name', self._name), ('filename', self._filename)))]) + self.headers['Content-Disposition'] += '; '.join([ + '', self._render_parts( + (('name', self._name), ('filename', self._filename)) + ) + ]) self.headers['Content-Type'] = content_type self.headers['Content-Location'] = content_location diff --git a/lib/requests/packages/urllib3/filepost.py b/lib/requests/packages/urllib3/filepost.py index e8b30bdd..0fbf488d 100644 --- a/lib/requests/packages/urllib3/filepost.py +++ b/lib/requests/packages/urllib3/filepost.py @@ -1,11 +1,4 @@ -# urllib3/filepost.py -# Copyright 2008-2013 Andrey Petrov and contributors (see CONTRIBUTORS.txt) -# -# This module is part of urllib3 and is released under -# the MIT License: http://www.opensource.org/licenses/mit-license.php - import codecs -import mimetypes from uuid import uuid4 from io import BytesIO @@ -38,10 +31,10 @@ def iter_field_objects(fields): i = iter(fields) for field in i: - if isinstance(field, RequestField): - yield field - else: - yield RequestField.from_tuples(*field) + if isinstance(field, RequestField): + yield field + else: + yield RequestField.from_tuples(*field) def iter_fields(fields): diff --git a/lib/requests/packages/urllib3/packages/ordered_dict.py b/lib/requests/packages/urllib3/packages/ordered_dict.py index 7f8ee154..4479363c 100644 --- a/lib/requests/packages/urllib3/packages/ordered_dict.py +++ b/lib/requests/packages/urllib3/packages/ordered_dict.py @@ -2,7 +2,6 @@ # Passes Python2.7's test suite and incorporates all the latest updates. # Copyright 2009 Raymond Hettinger, released under the MIT License. # http://code.activestate.com/recipes/576693/ - try: from thread import get_ident as _get_ident except ImportError: diff --git a/lib/requests/packages/urllib3/packages/ssl_match_hostname/__init__.py b/lib/requests/packages/urllib3/packages/ssl_match_hostname/__init__.py index 3aa5b2e1..dd59a75f 100644 --- a/lib/requests/packages/urllib3/packages/ssl_match_hostname/__init__.py +++ b/lib/requests/packages/urllib3/packages/ssl_match_hostname/__init__.py @@ -7,7 +7,7 @@ except ImportError: from backports.ssl_match_hostname import CertificateError, match_hostname except ImportError: # Our vendored copy - from _implementation import CertificateError, match_hostname + from ._implementation import CertificateError, match_hostname # Not needed, but documenting what we provide. __all__ = ('CertificateError', 'match_hostname') diff --git a/lib/requests/packages/urllib3/poolmanager.py b/lib/requests/packages/urllib3/poolmanager.py index f18ff2bb..515dc962 100644 --- a/lib/requests/packages/urllib3/poolmanager.py +++ b/lib/requests/packages/urllib3/poolmanager.py @@ -1,9 +1,3 @@ -# urllib3/poolmanager.py -# Copyright 2008-2014 Andrey Petrov and contributors (see CONTRIBUTORS.txt) -# -# This module is part of urllib3 and is released under -# the MIT License: http://www.opensource.org/licenses/mit-license.php - import logging try: # Python 3 @@ -14,8 +8,10 @@ except ImportError: from ._collections import RecentlyUsedContainer from .connectionpool import HTTPConnectionPool, HTTPSConnectionPool from .connectionpool import port_by_scheme +from .exceptions import LocationValueError from .request import RequestMethods -from .util import parse_url +from .util.url import parse_url +from .util.retry import Retry __all__ = ['PoolManager', 'ProxyManager', 'proxy_from_url'] @@ -49,7 +45,7 @@ class PoolManager(RequestMethods): Additional parameters are used to create fresh :class:`urllib3.connectionpool.ConnectionPool` instances. - Example: :: + Example:: >>> manager = PoolManager(num_pools=2) >>> r = manager.request('GET', 'http://google.com/') @@ -102,10 +98,11 @@ class PoolManager(RequestMethods): ``urllib3.connectionpool.port_by_scheme``. """ + if not host: + raise LocationValueError("No host specified.") + scheme = scheme or 'http' - port = port or port_by_scheme.get(scheme, 80) - pool_key = (scheme, host, port) with self.pools.lock: @@ -118,6 +115,7 @@ class PoolManager(RequestMethods): # Make a fresh ConnectionPool of the desired type pool = self._new_pool(scheme, host, port) self.pools[pool_key] = pool + return pool def connection_from_url(self, url): @@ -161,13 +159,18 @@ class PoolManager(RequestMethods): # Support relative URLs for redirecting. redirect_location = urljoin(url, redirect_location) - # RFC 2616, Section 10.3.4 + # RFC 7231, Section 6.4.4 if response.status == 303: method = 'GET' - log.info("Redirecting %s -> %s" % (url, redirect_location)) - kw['retries'] = kw.get('retries', 3) - 1 # Persist retries countdown + retries = kw.get('retries') + if not isinstance(retries, Retry): + retries = Retry.from_int(retries, redirect=redirect) + + kw['retries'] = retries.increment(method, redirect_location) kw['redirect'] = redirect + + log.info("Redirecting %s -> %s" % (url, redirect_location)) return self.urlopen(method, redirect_location, **kw) @@ -208,12 +211,16 @@ class ProxyManager(PoolManager): if not proxy.port: port = port_by_scheme.get(proxy.scheme, 80) proxy = proxy._replace(port=port) + + assert proxy.scheme in ("http", "https"), \ + 'Not supported proxy scheme %s' % proxy.scheme + self.proxy = proxy self.proxy_headers = proxy_headers or {} - assert self.proxy.scheme in ("http", "https"), \ - 'Not supported proxy scheme %s' % self.proxy.scheme + connection_pool_kw['_proxy'] = self.proxy connection_pool_kw['_proxy_headers'] = self.proxy_headers + super(ProxyManager, self).__init__( num_pools, headers, **connection_pool_kw) @@ -248,10 +255,10 @@ class ProxyManager(PoolManager): # For proxied HTTPS requests, httplib sets the necessary headers # on the CONNECT to the proxy. For HTTP, we'll definitely # need to set 'Host' at the very least. - kw['headers'] = self._set_proxy_headers(url, kw.get('headers', - self.headers)) + headers = kw.get('headers', self.headers) + kw['headers'] = self._set_proxy_headers(url, headers) - return super(ProxyManager, self).urlopen(method, url, redirect, **kw) + return super(ProxyManager, self).urlopen(method, url, redirect=redirect, **kw) def proxy_from_url(url, **kw): diff --git a/lib/requests/packages/urllib3/request.py b/lib/requests/packages/urllib3/request.py index 2a92cc20..51fe2386 100644 --- a/lib/requests/packages/urllib3/request.py +++ b/lib/requests/packages/urllib3/request.py @@ -1,9 +1,3 @@ -# urllib3/request.py -# Copyright 2008-2013 Andrey Petrov and contributors (see CONTRIBUTORS.txt) -# -# This module is part of urllib3 and is released under -# the MIT License: http://www.opensource.org/licenses/mit-license.php - try: from urllib.parse import urlencode except ImportError: @@ -26,8 +20,8 @@ class RequestMethods(object): Specifically, - :meth:`.request_encode_url` is for sending requests whose fields are encoded - in the URL (such as GET, HEAD, DELETE). + :meth:`.request_encode_url` is for sending requests whose fields are + encoded in the URL (such as GET, HEAD, DELETE). :meth:`.request_encode_body` is for sending requests whose fields are encoded in the *body* of the request using multipart or www-form-urlencoded @@ -51,7 +45,7 @@ class RequestMethods(object): def urlopen(self, method, url, body=None, headers=None, encode_multipart=True, multipart_boundary=None, - **kw): # Abstract + **kw): # Abstract raise NotImplemented("Classes extending RequestMethods must implement " "their own ``urlopen`` method.") @@ -61,8 +55,8 @@ class RequestMethods(object): ``fields`` based on the ``method`` used. This is a convenience method that requires the least amount of manual - effort. It can be used in most situations, while still having the option - to drop down to more specific methods when necessary, such as + effort. It can be used in most situations, while still having the + option to drop down to more specific methods when necessary, such as :meth:`request_encode_url`, :meth:`request_encode_body`, or even the lowest level :meth:`urlopen`. """ @@ -70,12 +64,12 @@ class RequestMethods(object): if method in self._encode_url_methods: return self.request_encode_url(method, url, fields=fields, - headers=headers, - **urlopen_kw) + headers=headers, + **urlopen_kw) else: return self.request_encode_body(method, url, fields=fields, - headers=headers, - **urlopen_kw) + headers=headers, + **urlopen_kw) def request_encode_url(self, method, url, fields=None, **urlopen_kw): """ @@ -94,18 +88,18 @@ class RequestMethods(object): the body. This is useful for request methods like POST, PUT, PATCH, etc. When ``encode_multipart=True`` (default), then - :meth:`urllib3.filepost.encode_multipart_formdata` is used to encode the - payload with the appropriate content type. Otherwise + :meth:`urllib3.filepost.encode_multipart_formdata` is used to encode + the payload with the appropriate content type. Otherwise :meth:`urllib.urlencode` is used with the 'application/x-www-form-urlencoded' content type. Multipart encoding must be used when posting files, and it's reasonably - safe to use it in other times too. However, it may break request signing, - such as with OAuth. + safe to use it in other times too. However, it may break request + signing, such as with OAuth. Supports an optional ``fields`` parameter of key/value strings AND key/filetuple. A filetuple is a (filename, data, MIME type) tuple where - the MIME type is optional. For example: :: + the MIME type is optional. For example:: fields = { 'foo': 'bar', @@ -119,17 +113,17 @@ class RequestMethods(object): When uploading a file, providing a filename (the first parameter of the tuple) is optional but recommended to best mimick behavior of browsers. - Note that if ``headers`` are supplied, the 'Content-Type' header will be - overwritten because it depends on the dynamic random boundary string + Note that if ``headers`` are supplied, the 'Content-Type' header will + be overwritten because it depends on the dynamic random boundary string which is used to compose the body of the request. The random boundary string can be explicitly set with the ``multipart_boundary`` parameter. """ if encode_multipart: - body, content_type = encode_multipart_formdata(fields or {}, - boundary=multipart_boundary) + body, content_type = encode_multipart_formdata( + fields or {}, boundary=multipart_boundary) else: body, content_type = (urlencode(fields or {}), - 'application/x-www-form-urlencoded') + 'application/x-www-form-urlencoded') if headers is None: headers = self.headers diff --git a/lib/requests/packages/urllib3/response.py b/lib/requests/packages/urllib3/response.py index 6a1fe1a7..e69de957 100644 --- a/lib/requests/packages/urllib3/response.py +++ b/lib/requests/packages/urllib3/response.py @@ -1,21 +1,14 @@ -# urllib3/response.py -# Copyright 2008-2013 Andrey Petrov and contributors (see CONTRIBUTORS.txt) -# -# This module is part of urllib3 and is released under -# the MIT License: http://www.opensource.org/licenses/mit-license.php - - -import logging import zlib import io +from socket import timeout as SocketTimeout -from .exceptions import DecodeError +from ._collections import HTTPHeaderDict +from .exceptions import ProtocolError, DecodeError, ReadTimeoutError from .packages.six import string_types as basestring, binary_type -from .util import is_fp_closed +from .connection import HTTPException, BaseSSLError +from .util.response import is_fp_closed -log = logging.getLogger(__name__) - class DeflateDecoder(object): @@ -55,7 +48,10 @@ class HTTPResponse(io.IOBase): HTTP Response container. Backwards-compatible to httplib's HTTPResponse but the response ``body`` is - loaded and decoded on-demand when the ``data`` property is accessed. + loaded and decoded on-demand when the ``data`` property is accessed. This + class is also compatible with the Python standard library's :mod:`io` + module, and can hence be treated as a readable object in the context of that + framework. Extra parameters for behaviour not present in httplib.HTTPResponse: @@ -79,7 +75,10 @@ class HTTPResponse(io.IOBase): def __init__(self, body='', headers=None, status=0, version=0, reason=None, strict=0, preload_content=True, decode_content=True, original_response=None, pool=None, connection=None): - self.headers = headers or {} + + self.headers = HTTPHeaderDict() + if headers: + self.headers.update(headers) self.status = status self.version = version self.reason = reason @@ -87,11 +86,14 @@ class HTTPResponse(io.IOBase): self.decode_content = decode_content self._decoder = None - self._body = body if body and isinstance(body, basestring) else None + self._body = None self._fp = None self._original_response = original_response self._fp_bytes_read = 0 + if body and isinstance(body, (basestring, binary_type)): + self._body = body + self._pool = pool self._connection = connection @@ -159,8 +161,8 @@ class HTTPResponse(io.IOBase): after having ``.read()`` the file object. (Overridden if ``amt`` is set.) """ - # Note: content-encoding value should be case-insensitive, per RFC 2616 - # Section 3.5 + # Note: content-encoding value should be case-insensitive, per RFC 7230 + # Section 3.2 content_encoding = self.headers.get('content-encoding', '').lower() if self._decoder is None: if content_encoding in self.CONTENT_DECODERS: @@ -174,23 +176,42 @@ class HTTPResponse(io.IOBase): flush_decoder = False try: - if amt is None: - # cStringIO doesn't like amt=None - data = self._fp.read() - flush_decoder = True - else: - cache_content = False - data = self._fp.read(amt) - if amt != 0 and not data: # Platform-specific: Buggy versions of Python. - # Close the connection when no data is returned - # - # This is redundant to what httplib/http.client _should_ - # already do. However, versions of python released before - # December 15, 2012 (http://bugs.python.org/issue16298) do not - # properly close the connection in all cases. There is no harm - # in redundantly calling close. - self._fp.close() + try: + if amt is None: + # cStringIO doesn't like amt=None + data = self._fp.read() flush_decoder = True + else: + cache_content = False + data = self._fp.read(amt) + if amt != 0 and not data: # Platform-specific: Buggy versions of Python. + # Close the connection when no data is returned + # + # This is redundant to what httplib/http.client _should_ + # already do. However, versions of python released before + # December 15, 2012 (http://bugs.python.org/issue16298) do + # not properly close the connection in all cases. There is + # no harm in redundantly calling close. + self._fp.close() + flush_decoder = True + + except SocketTimeout: + # FIXME: Ideally we'd like to include the url in the ReadTimeoutError but + # there is yet no clean way to get at it from this context. + raise ReadTimeoutError(self._pool, None, 'Read timed out.') + + except BaseSSLError as e: + # FIXME: Is there a better way to differentiate between SSLErrors? + if not 'read operation timed out' in str(e): # Defensive: + # This shouldn't happen but just in case we're missing an edge + # case, let's avoid swallowing SSL errors. + raise + + raise ReadTimeoutError(self._pool, None, 'Read timed out.') + + except HTTPException as e: + # This includes IncompleteRead. + raise ProtocolError('Connection broken: %r' % e, e) self._fp_bytes_read += len(data) @@ -200,8 +221,7 @@ class HTTPResponse(io.IOBase): except (IOError, zlib.error) as e: raise DecodeError( "Received response with content-encoding: %s, but " - "failed to decode it." % content_encoding, - e) + "failed to decode it." % content_encoding, e) if flush_decoder and decode_content and self._decoder: buf = self._decoder.decompress(binary_type()) @@ -238,7 +258,6 @@ class HTTPResponse(io.IOBase): if data: yield data - @classmethod def from_httplib(ResponseCls, r, **response_kw): """ @@ -249,17 +268,9 @@ class HTTPResponse(io.IOBase): with ``original_response=r``. """ - # Normalize headers between different versions of Python - headers = {} + headers = HTTPHeaderDict() for k, v in r.getheaders(): - # Python 3: Header keys are returned capitalised - k = k.lower() - - has_value = headers.get(k) - if has_value: # Python 3: Repeating header keys are unmerged. - v = ', '.join([has_value, v]) - - headers[k] = v + headers.add(k, v) # HTTPResponse objects in Python 3 don't have a .strict attribute strict = getattr(r, 'strict', 0) @@ -301,7 +312,7 @@ class HTTPResponse(io.IOBase): elif hasattr(self._fp, "fileno"): return self._fp.fileno() else: - raise IOError("The file-like object this HTTPResponse is wrapped " + raise IOError("The file-like object this HTTPResponse is wrapped " "around has no file descriptor") def flush(self): @@ -309,4 +320,14 @@ class HTTPResponse(io.IOBase): return self._fp.flush() def readable(self): + # This method is required for `io` module compatibility. return True + + def readinto(self, b): + # This method is required for `io` module compatibility. + temp = self.read(len(b)) + if len(temp) == 0: + return 0 + else: + b[:len(temp)] = temp + return len(temp) diff --git a/lib/requests/packages/urllib3/util.py b/lib/requests/packages/urllib3/util.py deleted file mode 100644 index bd266317..00000000 --- a/lib/requests/packages/urllib3/util.py +++ /dev/null @@ -1,648 +0,0 @@ -# urllib3/util.py -# Copyright 2008-2013 Andrey Petrov and contributors (see CONTRIBUTORS.txt) -# -# This module is part of urllib3 and is released under -# the MIT License: http://www.opensource.org/licenses/mit-license.php - - -from base64 import b64encode -from binascii import hexlify, unhexlify -from collections import namedtuple -from hashlib import md5, sha1 -from socket import error as SocketError, _GLOBAL_DEFAULT_TIMEOUT -import time - -try: - from select import poll, POLLIN -except ImportError: # `poll` doesn't exist on OSX and other platforms - poll = False - try: - from select import select - except ImportError: # `select` doesn't exist on AppEngine. - select = False - -try: # Test for SSL features - SSLContext = None - HAS_SNI = False - - import ssl - from ssl import wrap_socket, CERT_NONE, PROTOCOL_SSLv23 - from ssl import SSLContext # Modern SSL? - from ssl import HAS_SNI # Has SNI? -except ImportError: - pass - -from .packages import six -from .exceptions import LocationParseError, SSLError, TimeoutStateError - - -_Default = object() -# The default timeout to use for socket connections. This is the attribute used -# by httplib to define the default timeout - - -def current_time(): - """ - Retrieve the current time, this function is mocked out in unit testing. - """ - return time.time() - - -class Timeout(object): - """ - Utility object for storing timeout values. - - Example usage: - - .. code-block:: python - - timeout = urllib3.util.Timeout(connect=2.0, read=7.0) - pool = HTTPConnectionPool('www.google.com', 80, timeout=timeout) - pool.request(...) # Etc, etc - - :param connect: - The maximum amount of time to wait for a connection attempt to a server - to succeed. Omitting the parameter will default the connect timeout to - the system default, probably `the global default timeout in socket.py - `_. - None will set an infinite timeout for connection attempts. - - :type connect: integer, float, or None - - :param read: - The maximum amount of time to wait between consecutive - read operations for a response from the server. Omitting - the parameter will default the read timeout to the system - default, probably `the global default timeout in socket.py - `_. - None will set an infinite timeout. - - :type read: integer, float, or None - - :param total: - This combines the connect and read timeouts into one; the read timeout - will be set to the time leftover from the connect attempt. In the - event that both a connect timeout and a total are specified, or a read - timeout and a total are specified, the shorter timeout will be applied. - - Defaults to None. - - :type total: integer, float, or None - - .. note:: - - Many factors can affect the total amount of time for urllib3 to return - an HTTP response. Specifically, Python's DNS resolver does not obey the - timeout specified on the socket. Other factors that can affect total - request time include high CPU load, high swap, the program running at a - low priority level, or other behaviors. The observed running time for - urllib3 to return a response may be greater than the value passed to - `total`. - - In addition, the read and total timeouts only measure the time between - read operations on the socket connecting the client and the server, - not the total amount of time for the request to return a complete - response. For most requests, the timeout is raised because the server - has not sent the first byte in the specified time. This is not always - the case; if a server streams one byte every fifteen seconds, a timeout - of 20 seconds will not ever trigger, even though the request will - take several minutes to complete. - - If your goal is to cut off any request after a set amount of wall clock - time, consider having a second "watcher" thread to cut off a slow - request. - """ - - #: A sentinel object representing the default timeout value - DEFAULT_TIMEOUT = _GLOBAL_DEFAULT_TIMEOUT - - def __init__(self, total=None, connect=_Default, read=_Default): - self._connect = self._validate_timeout(connect, 'connect') - self._read = self._validate_timeout(read, 'read') - self.total = self._validate_timeout(total, 'total') - self._start_connect = None - - def __str__(self): - return '%s(connect=%r, read=%r, total=%r)' % ( - type(self).__name__, self._connect, self._read, self.total) - - - @classmethod - def _validate_timeout(cls, value, name): - """ Check that a timeout attribute is valid - - :param value: The timeout value to validate - :param name: The name of the timeout attribute to validate. This is used - for clear error messages - :return: the value - :raises ValueError: if the type is not an integer or a float, or if it - is a numeric value less than zero - """ - if value is _Default: - return cls.DEFAULT_TIMEOUT - - if value is None or value is cls.DEFAULT_TIMEOUT: - return value - - try: - float(value) - except (TypeError, ValueError): - raise ValueError("Timeout value %s was %s, but it must be an " - "int or float." % (name, value)) - - try: - if value < 0: - raise ValueError("Attempted to set %s timeout to %s, but the " - "timeout cannot be set to a value less " - "than 0." % (name, value)) - except TypeError: # Python 3 - raise ValueError("Timeout value %s was %s, but it must be an " - "int or float." % (name, value)) - - return value - - @classmethod - def from_float(cls, timeout): - """ Create a new Timeout from a legacy timeout value. - - The timeout value used by httplib.py sets the same timeout on the - connect(), and recv() socket requests. This creates a :class:`Timeout` - object that sets the individual timeouts to the ``timeout`` value passed - to this function. - - :param timeout: The legacy timeout value - :type timeout: integer, float, sentinel default object, or None - :return: a Timeout object - :rtype: :class:`Timeout` - """ - return Timeout(read=timeout, connect=timeout) - - def clone(self): - """ Create a copy of the timeout object - - Timeout properties are stored per-pool but each request needs a fresh - Timeout object to ensure each one has its own start/stop configured. - - :return: a copy of the timeout object - :rtype: :class:`Timeout` - """ - # We can't use copy.deepcopy because that will also create a new object - # for _GLOBAL_DEFAULT_TIMEOUT, which socket.py uses as a sentinel to - # detect the user default. - return Timeout(connect=self._connect, read=self._read, - total=self.total) - - def start_connect(self): - """ Start the timeout clock, used during a connect() attempt - - :raises urllib3.exceptions.TimeoutStateError: if you attempt - to start a timer that has been started already. - """ - if self._start_connect is not None: - raise TimeoutStateError("Timeout timer has already been started.") - self._start_connect = current_time() - return self._start_connect - - def get_connect_duration(self): - """ Gets the time elapsed since the call to :meth:`start_connect`. - - :return: the elapsed time - :rtype: float - :raises urllib3.exceptions.TimeoutStateError: if you attempt - to get duration for a timer that hasn't been started. - """ - if self._start_connect is None: - raise TimeoutStateError("Can't get connect duration for timer " - "that has not started.") - return current_time() - self._start_connect - - @property - def connect_timeout(self): - """ Get the value to use when setting a connection timeout. - - This will be a positive float or integer, the value None - (never timeout), or the default system timeout. - - :return: the connect timeout - :rtype: int, float, :attr:`Timeout.DEFAULT_TIMEOUT` or None - """ - if self.total is None: - return self._connect - - if self._connect is None or self._connect is self.DEFAULT_TIMEOUT: - return self.total - - return min(self._connect, self.total) - - @property - def read_timeout(self): - """ Get the value for the read timeout. - - This assumes some time has elapsed in the connection timeout and - computes the read timeout appropriately. - - If self.total is set, the read timeout is dependent on the amount of - time taken by the connect timeout. If the connection time has not been - established, a :exc:`~urllib3.exceptions.TimeoutStateError` will be - raised. - - :return: the value to use for the read timeout - :rtype: int, float, :attr:`Timeout.DEFAULT_TIMEOUT` or None - :raises urllib3.exceptions.TimeoutStateError: If :meth:`start_connect` - has not yet been called on this object. - """ - if (self.total is not None and - self.total is not self.DEFAULT_TIMEOUT and - self._read is not None and - self._read is not self.DEFAULT_TIMEOUT): - # in case the connect timeout has not yet been established. - if self._start_connect is None: - return self._read - return max(0, min(self.total - self.get_connect_duration(), - self._read)) - elif self.total is not None and self.total is not self.DEFAULT_TIMEOUT: - return max(0, self.total - self.get_connect_duration()) - else: - return self._read - - -class Url(namedtuple('Url', ['scheme', 'auth', 'host', 'port', 'path', 'query', 'fragment'])): - """ - Datastructure for representing an HTTP URL. Used as a return value for - :func:`parse_url`. - """ - slots = () - - def __new__(cls, scheme=None, auth=None, host=None, port=None, path=None, query=None, fragment=None): - return super(Url, cls).__new__(cls, scheme, auth, host, port, path, query, fragment) - - @property - def hostname(self): - """For backwards-compatibility with urlparse. We're nice like that.""" - return self.host - - @property - def request_uri(self): - """Absolute path including the query string.""" - uri = self.path or '/' - - if self.query is not None: - uri += '?' + self.query - - return uri - - @property - def netloc(self): - """Network location including host and port""" - if self.port: - return '%s:%d' % (self.host, self.port) - return self.host - - -def split_first(s, delims): - """ - Given a string and an iterable of delimiters, split on the first found - delimiter. Return two split parts and the matched delimiter. - - If not found, then the first part is the full input string. - - Example: :: - - >>> split_first('foo/bar?baz', '?/=') - ('foo', 'bar?baz', '/') - >>> split_first('foo/bar?baz', '123') - ('foo/bar?baz', '', None) - - Scales linearly with number of delims. Not ideal for large number of delims. - """ - min_idx = None - min_delim = None - for d in delims: - idx = s.find(d) - if idx < 0: - continue - - if min_idx is None or idx < min_idx: - min_idx = idx - min_delim = d - - if min_idx is None or min_idx < 0: - return s, '', None - - return s[:min_idx], s[min_idx+1:], min_delim - - -def parse_url(url): - """ - Given a url, return a parsed :class:`.Url` namedtuple. Best-effort is - performed to parse incomplete urls. Fields not provided will be None. - - Partly backwards-compatible with :mod:`urlparse`. - - Example: :: - - >>> parse_url('http://google.com/mail/') - Url(scheme='http', host='google.com', port=None, path='/', ...) - >>> parse_url('google.com:80') - Url(scheme=None, host='google.com', port=80, path=None, ...) - >>> parse_url('/foo?bar') - Url(scheme=None, host=None, port=None, path='/foo', query='bar', ...) - """ - - # While this code has overlap with stdlib's urlparse, it is much - # simplified for our needs and less annoying. - # Additionally, this implementations does silly things to be optimal - # on CPython. - - scheme = None - auth = None - host = None - port = None - path = None - fragment = None - query = None - - # Scheme - if '://' in url: - scheme, url = url.split('://', 1) - - # Find the earliest Authority Terminator - # (http://tools.ietf.org/html/rfc3986#section-3.2) - url, path_, delim = split_first(url, ['/', '?', '#']) - - if delim: - # Reassemble the path - path = delim + path_ - - # Auth - if '@' in url: - # Last '@' denotes end of auth part - auth, url = url.rsplit('@', 1) - - # IPv6 - if url and url[0] == '[': - host, url = url.split(']', 1) - host += ']' - - # Port - if ':' in url: - _host, port = url.split(':', 1) - - if not host: - host = _host - - if port: - # If given, ports must be integers. - if not port.isdigit(): - raise LocationParseError("Failed to parse: %s" % url) - port = int(port) - else: - # Blank ports are cool, too. (rfc3986#section-3.2.3) - port = None - - elif not host and url: - host = url - - if not path: - return Url(scheme, auth, host, port, path, query, fragment) - - # Fragment - if '#' in path: - path, fragment = path.split('#', 1) - - # Query - if '?' in path: - path, query = path.split('?', 1) - - return Url(scheme, auth, host, port, path, query, fragment) - - -def get_host(url): - """ - Deprecated. Use :func:`.parse_url` instead. - """ - p = parse_url(url) - return p.scheme or 'http', p.hostname, p.port - - -def make_headers(keep_alive=None, accept_encoding=None, user_agent=None, - basic_auth=None, proxy_basic_auth=None): - """ - Shortcuts for generating request headers. - - :param keep_alive: - If ``True``, adds 'connection: keep-alive' header. - - :param accept_encoding: - Can be a boolean, list, or string. - ``True`` translates to 'gzip,deflate'. - List will get joined by comma. - String will be used as provided. - - :param user_agent: - String representing the user-agent you want, such as - "python-urllib3/0.6" - - :param basic_auth: - Colon-separated username:password string for 'authorization: basic ...' - auth header. - - :param proxy_basic_auth: - Colon-separated username:password string for 'proxy-authorization: basic ...' - auth header. - - Example: :: - - >>> make_headers(keep_alive=True, user_agent="Batman/1.0") - {'connection': 'keep-alive', 'user-agent': 'Batman/1.0'} - >>> make_headers(accept_encoding=True) - {'accept-encoding': 'gzip,deflate'} - """ - headers = {} - if accept_encoding: - if isinstance(accept_encoding, str): - pass - elif isinstance(accept_encoding, list): - accept_encoding = ','.join(accept_encoding) - else: - accept_encoding = 'gzip,deflate' - headers['accept-encoding'] = accept_encoding - - if user_agent: - headers['user-agent'] = user_agent - - if keep_alive: - headers['connection'] = 'keep-alive' - - if basic_auth: - headers['authorization'] = 'Basic ' + \ - b64encode(six.b(basic_auth)).decode('utf-8') - - if proxy_basic_auth: - headers['proxy-authorization'] = 'Basic ' + \ - b64encode(six.b(proxy_basic_auth)).decode('utf-8') - - return headers - - -def is_connection_dropped(conn): # Platform-specific - """ - Returns True if the connection is dropped and should be closed. - - :param conn: - :class:`httplib.HTTPConnection` object. - - Note: For platforms like AppEngine, this will always return ``False`` to - let the platform handle connection recycling transparently for us. - """ - sock = getattr(conn, 'sock', False) - if not sock: # Platform-specific: AppEngine - return False - - if not poll: - if not select: # Platform-specific: AppEngine - return False - - try: - return select([sock], [], [], 0.0)[0] - except SocketError: - return True - - # This version is better on platforms that support it. - p = poll() - p.register(sock, POLLIN) - for (fno, ev) in p.poll(0.0): - if fno == sock.fileno(): - # Either data is buffered (bad), or the connection is dropped. - return True - - -def resolve_cert_reqs(candidate): - """ - Resolves the argument to a numeric constant, which can be passed to - the wrap_socket function/method from the ssl module. - Defaults to :data:`ssl.CERT_NONE`. - If given a string it is assumed to be the name of the constant in the - :mod:`ssl` module or its abbrevation. - (So you can specify `REQUIRED` instead of `CERT_REQUIRED`. - If it's neither `None` nor a string we assume it is already the numeric - constant which can directly be passed to wrap_socket. - """ - if candidate is None: - return CERT_NONE - - if isinstance(candidate, str): - res = getattr(ssl, candidate, None) - if res is None: - res = getattr(ssl, 'CERT_' + candidate) - return res - - return candidate - - -def resolve_ssl_version(candidate): - """ - like resolve_cert_reqs - """ - if candidate is None: - return PROTOCOL_SSLv23 - - if isinstance(candidate, str): - res = getattr(ssl, candidate, None) - if res is None: - res = getattr(ssl, 'PROTOCOL_' + candidate) - return res - - return candidate - - -def assert_fingerprint(cert, fingerprint): - """ - Checks if given fingerprint matches the supplied certificate. - - :param cert: - Certificate as bytes object. - :param fingerprint: - Fingerprint as string of hexdigits, can be interspersed by colons. - """ - - # Maps the length of a digest to a possible hash function producing - # this digest. - hashfunc_map = { - 16: md5, - 20: sha1 - } - - fingerprint = fingerprint.replace(':', '').lower() - - digest_length, rest = divmod(len(fingerprint), 2) - - if rest or digest_length not in hashfunc_map: - raise SSLError('Fingerprint is of invalid length.') - - # We need encode() here for py32; works on py2 and p33. - fingerprint_bytes = unhexlify(fingerprint.encode()) - - hashfunc = hashfunc_map[digest_length] - - cert_digest = hashfunc(cert).digest() - - if not cert_digest == fingerprint_bytes: - raise SSLError('Fingerprints did not match. Expected "{0}", got "{1}".' - .format(hexlify(fingerprint_bytes), - hexlify(cert_digest))) - -def is_fp_closed(obj): - """ - Checks whether a given file-like object is closed. - - :param obj: - The file-like object to check. - """ - if hasattr(obj, 'fp'): - # Object is a container for another file-like object that gets released - # on exhaustion (e.g. HTTPResponse) - return obj.fp is None - - return obj.closed - - -if SSLContext is not None: # Python 3.2+ - def ssl_wrap_socket(sock, keyfile=None, certfile=None, cert_reqs=None, - ca_certs=None, server_hostname=None, - ssl_version=None): - """ - All arguments except `server_hostname` have the same meaning as for - :func:`ssl.wrap_socket` - - :param server_hostname: - Hostname of the expected certificate - """ - context = SSLContext(ssl_version) - context.verify_mode = cert_reqs - - # Disable TLS compression to migitate CRIME attack (issue #309) - OP_NO_COMPRESSION = 0x20000 - context.options |= OP_NO_COMPRESSION - - if ca_certs: - try: - context.load_verify_locations(ca_certs) - # Py32 raises IOError - # Py33 raises FileNotFoundError - except Exception as e: # Reraise as SSLError - raise SSLError(e) - if certfile: - # FIXME: This block needs a test. - context.load_cert_chain(certfile, keyfile) - if HAS_SNI: # Platform-specific: OpenSSL with enabled SNI - return context.wrap_socket(sock, server_hostname=server_hostname) - return context.wrap_socket(sock) - -else: # Python 3.1 and earlier - def ssl_wrap_socket(sock, keyfile=None, certfile=None, cert_reqs=None, - ca_certs=None, server_hostname=None, - ssl_version=None): - return wrap_socket(sock, keyfile=keyfile, certfile=certfile, - ca_certs=ca_certs, cert_reqs=cert_reqs, - ssl_version=ssl_version) diff --git a/lib/requests/packages/urllib3/util/__init__.py b/lib/requests/packages/urllib3/util/__init__.py new file mode 100644 index 00000000..8becc814 --- /dev/null +++ b/lib/requests/packages/urllib3/util/__init__.py @@ -0,0 +1,24 @@ +# For backwards compatibility, provide imports that used to be here. +from .connection import is_connection_dropped +from .request import make_headers +from .response import is_fp_closed +from .ssl_ import ( + SSLContext, + HAS_SNI, + assert_fingerprint, + resolve_cert_reqs, + resolve_ssl_version, + ssl_wrap_socket, +) +from .timeout import ( + current_time, + Timeout, +) + +from .retry import Retry +from .url import ( + get_host, + parse_url, + split_first, + Url, +) diff --git a/lib/requests/packages/urllib3/util/connection.py b/lib/requests/packages/urllib3/util/connection.py new file mode 100644 index 00000000..2156993a --- /dev/null +++ b/lib/requests/packages/urllib3/util/connection.py @@ -0,0 +1,97 @@ +import socket +try: + from select import poll, POLLIN +except ImportError: # `poll` doesn't exist on OSX and other platforms + poll = False + try: + from select import select + except ImportError: # `select` doesn't exist on AppEngine. + select = False + + +def is_connection_dropped(conn): # Platform-specific + """ + Returns True if the connection is dropped and should be closed. + + :param conn: + :class:`httplib.HTTPConnection` object. + + Note: For platforms like AppEngine, this will always return ``False`` to + let the platform handle connection recycling transparently for us. + """ + sock = getattr(conn, 'sock', False) + if sock is False: # Platform-specific: AppEngine + return False + if sock is None: # Connection already closed (such as by httplib). + return True + + if not poll: + if not select: # Platform-specific: AppEngine + return False + + try: + return select([sock], [], [], 0.0)[0] + except socket.error: + return True + + # This version is better on platforms that support it. + p = poll() + p.register(sock, POLLIN) + for (fno, ev) in p.poll(0.0): + if fno == sock.fileno(): + # Either data is buffered (bad), or the connection is dropped. + return True + + +# This function is copied from socket.py in the Python 2.7 standard +# library test suite. Added to its signature is only `socket_options`. +def create_connection(address, timeout=socket._GLOBAL_DEFAULT_TIMEOUT, + source_address=None, socket_options=None): + """Connect to *address* and return the socket object. + + Convenience function. Connect to *address* (a 2-tuple ``(host, + port)``) and return the socket object. Passing the optional + *timeout* parameter will set the timeout on the socket instance + before attempting to connect. If no *timeout* is supplied, the + global default timeout setting returned by :func:`getdefaulttimeout` + is used. If *source_address* is set it must be a tuple of (host, port) + for the socket to bind as a source address before making the connection. + An host of '' or port 0 tells the OS to use the default. + """ + + host, port = address + err = None + for res in socket.getaddrinfo(host, port, 0, socket.SOCK_STREAM): + af, socktype, proto, canonname, sa = res + sock = None + try: + sock = socket.socket(af, socktype, proto) + + # If provided, set socket level options before connecting. + # This is the only addition urllib3 makes to this function. + _set_socket_options(sock, socket_options) + + if timeout is not socket._GLOBAL_DEFAULT_TIMEOUT: + sock.settimeout(timeout) + if source_address: + sock.bind(source_address) + sock.connect(sa) + return sock + + except socket.error as _: + err = _ + if sock is not None: + sock.close() + + if err is not None: + raise err + else: + raise socket.error("getaddrinfo returns an empty list") + + +def _set_socket_options(sock, options): + if options is None: + return + + for opt in options: + sock.setsockopt(*opt) diff --git a/lib/requests/packages/urllib3/util/request.py b/lib/requests/packages/urllib3/util/request.py new file mode 100644 index 00000000..bc64f6b1 --- /dev/null +++ b/lib/requests/packages/urllib3/util/request.py @@ -0,0 +1,71 @@ +from base64 import b64encode + +from ..packages.six import b + +ACCEPT_ENCODING = 'gzip,deflate' + + +def make_headers(keep_alive=None, accept_encoding=None, user_agent=None, + basic_auth=None, proxy_basic_auth=None, disable_cache=None): + """ + Shortcuts for generating request headers. + + :param keep_alive: + If ``True``, adds 'connection: keep-alive' header. + + :param accept_encoding: + Can be a boolean, list, or string. + ``True`` translates to 'gzip,deflate'. + List will get joined by comma. + String will be used as provided. + + :param user_agent: + String representing the user-agent you want, such as + "python-urllib3/0.6" + + :param basic_auth: + Colon-separated username:password string for 'authorization: basic ...' + auth header. + + :param proxy_basic_auth: + Colon-separated username:password string for 'proxy-authorization: basic ...' + auth header. + + :param disable_cache: + If ``True``, adds 'cache-control: no-cache' header. + + Example:: + + >>> make_headers(keep_alive=True, user_agent="Batman/1.0") + {'connection': 'keep-alive', 'user-agent': 'Batman/1.0'} + >>> make_headers(accept_encoding=True) + {'accept-encoding': 'gzip,deflate'} + """ + headers = {} + if accept_encoding: + if isinstance(accept_encoding, str): + pass + elif isinstance(accept_encoding, list): + accept_encoding = ','.join(accept_encoding) + else: + accept_encoding = ACCEPT_ENCODING + headers['accept-encoding'] = accept_encoding + + if user_agent: + headers['user-agent'] = user_agent + + if keep_alive: + headers['connection'] = 'keep-alive' + + if basic_auth: + headers['authorization'] = 'Basic ' + \ + b64encode(b(basic_auth)).decode('utf-8') + + if proxy_basic_auth: + headers['proxy-authorization'] = 'Basic ' + \ + b64encode(b(proxy_basic_auth)).decode('utf-8') + + if disable_cache: + headers['cache-control'] = 'no-cache' + + return headers diff --git a/lib/requests/packages/urllib3/util/response.py b/lib/requests/packages/urllib3/util/response.py new file mode 100644 index 00000000..45fff552 --- /dev/null +++ b/lib/requests/packages/urllib3/util/response.py @@ -0,0 +1,22 @@ +def is_fp_closed(obj): + """ + Checks whether a given file-like object is closed. + + :param obj: + The file-like object to check. + """ + + try: + # Check via the official file-like-object way. + return obj.closed + except AttributeError: + pass + + try: + # Check if the object is a container for another file-like object that + # gets released on exhaustion (e.g. HTTPResponse). + return obj.fp is None + except AttributeError: + pass + + raise ValueError("Unable to determine whether fp is closed.") diff --git a/lib/requests/packages/urllib3/util/retry.py b/lib/requests/packages/urllib3/util/retry.py new file mode 100644 index 00000000..eb560dfc --- /dev/null +++ b/lib/requests/packages/urllib3/util/retry.py @@ -0,0 +1,279 @@ +import time +import logging + +from ..exceptions import ( + ProtocolError, + ConnectTimeoutError, + ReadTimeoutError, + MaxRetryError, +) +from ..packages import six + + +log = logging.getLogger(__name__) + + +class Retry(object): + """ Retry configuration. + + Each retry attempt will create a new Retry object with updated values, so + they can be safely reused. + + Retries can be defined as a default for a pool:: + + retries = Retry(connect=5, read=2, redirect=5) + http = PoolManager(retries=retries) + response = http.request('GET', 'http://example.com/') + + Or per-request (which overrides the default for the pool):: + + response = http.request('GET', 'http://example.com/', retries=Retry(10)) + + Retries can be disabled by passing ``False``:: + + response = http.request('GET', 'http://example.com/', retries=False) + + Errors will be wrapped in :class:`~urllib3.exceptions.MaxRetryError` unless + retries are disabled, in which case the causing exception will be raised. + + + :param int total: + Total number of retries to allow. Takes precedence over other counts. + + Set to ``None`` to remove this constraint and fall back on other + counts. It's a good idea to set this to some sensibly-high value to + account for unexpected edge cases and avoid infinite retry loops. + + Set to ``0`` to fail on the first retry. + + Set to ``False`` to disable and imply ``raise_on_redirect=False``. + + :param int connect: + How many connection-related errors to retry on. + + These are errors raised before the request is sent to the remote server, + which we assume has not triggered the server to process the request. + + Set to ``0`` to fail on the first retry of this type. + + :param int read: + How many times to retry on read errors. + + These errors are raised after the request was sent to the server, so the + request may have side-effects. + + Set to ``0`` to fail on the first retry of this type. + + :param int redirect: + How many redirects to perform. Limit this to avoid infinite redirect + loops. + + A redirect is a HTTP response with a status code 301, 302, 303, 307 or + 308. + + Set to ``0`` to fail on the first retry of this type. + + Set to ``False`` to disable and imply ``raise_on_redirect=False``. + + :param iterable method_whitelist: + Set of uppercased HTTP method verbs that we should retry on. + + By default, we only retry on methods which are considered to be + indempotent (multiple requests with the same parameters end with the + same state). See :attr:`Retry.DEFAULT_METHOD_WHITELIST`. + + :param iterable status_forcelist: + A set of HTTP status codes that we should force a retry on. + + By default, this is disabled with ``None``. + + :param float backoff_factor: + A backoff factor to apply between attempts. urllib3 will sleep for:: + + {backoff factor} * (2 ^ ({number of total retries} - 1)) + + seconds. If the backoff_factor is 0.1, then :func:`.sleep` will sleep + for [0.1s, 0.2s, 0.4s, ...] between retries. It will never be longer + than :attr:`Retry.MAX_BACKOFF`. + + By default, backoff is disabled (set to 0). + + :param bool raise_on_redirect: Whether, if the number of redirects is + exhausted, to raise a MaxRetryError, or to return a response with a + response code in the 3xx range. + """ + + DEFAULT_METHOD_WHITELIST = frozenset([ + 'HEAD', 'GET', 'PUT', 'DELETE', 'OPTIONS', 'TRACE']) + + #: Maximum backoff time. + BACKOFF_MAX = 120 + + def __init__(self, total=10, connect=None, read=None, redirect=None, + method_whitelist=DEFAULT_METHOD_WHITELIST, status_forcelist=None, + backoff_factor=0, raise_on_redirect=True, _observed_errors=0): + + self.total = total + self.connect = connect + self.read = read + + if redirect is False or total is False: + redirect = 0 + raise_on_redirect = False + + self.redirect = redirect + self.status_forcelist = status_forcelist or set() + self.method_whitelist = method_whitelist + self.backoff_factor = backoff_factor + self.raise_on_redirect = raise_on_redirect + self._observed_errors = _observed_errors # TODO: use .history instead? + + def new(self, **kw): + params = dict( + total=self.total, + connect=self.connect, read=self.read, redirect=self.redirect, + method_whitelist=self.method_whitelist, + status_forcelist=self.status_forcelist, + backoff_factor=self.backoff_factor, + raise_on_redirect=self.raise_on_redirect, + _observed_errors=self._observed_errors, + ) + params.update(kw) + return type(self)(**params) + + @classmethod + def from_int(cls, retries, redirect=True, default=None): + """ Backwards-compatibility for the old retries format.""" + if retries is None: + retries = default if default is not None else cls.DEFAULT + + if isinstance(retries, Retry): + return retries + + redirect = bool(redirect) and None + new_retries = cls(retries, redirect=redirect) + log.debug("Converted retries value: %r -> %r" % (retries, new_retries)) + return new_retries + + def get_backoff_time(self): + """ Formula for computing the current backoff + + :rtype: float + """ + if self._observed_errors <= 1: + return 0 + + backoff_value = self.backoff_factor * (2 ** (self._observed_errors - 1)) + return min(self.BACKOFF_MAX, backoff_value) + + def sleep(self): + """ Sleep between retry attempts using an exponential backoff. + + By default, the backoff factor is 0 and this method will return + immediately. + """ + backoff = self.get_backoff_time() + if backoff <= 0: + return + time.sleep(backoff) + + def _is_connection_error(self, err): + """ Errors when we're fairly sure that the server did not receive the + request, so it should be safe to retry. + """ + return isinstance(err, ConnectTimeoutError) + + def _is_read_error(self, err): + """ Errors that occur after the request has been started, so we can't + assume that the server did not process any of it. + """ + return isinstance(err, (ReadTimeoutError, ProtocolError)) + + def is_forced_retry(self, method, status_code): + """ Is this method/response retryable? (Based on method/codes whitelists) + """ + if self.method_whitelist and method.upper() not in self.method_whitelist: + return False + + return self.status_forcelist and status_code in self.status_forcelist + + def is_exhausted(self): + """ Are we out of retries? + """ + retry_counts = (self.total, self.connect, self.read, self.redirect) + retry_counts = list(filter(None, retry_counts)) + if not retry_counts: + return False + + return min(retry_counts) < 0 + + def increment(self, method=None, url=None, response=None, error=None, _pool=None, _stacktrace=None): + """ Return a new Retry object with incremented retry counters. + + :param response: A response object, or None, if the server did not + return a response. + :type response: :class:`~urllib3.response.HTTPResponse` + :param Exception error: An error encountered during the request, or + None if the response was received successfully. + + :return: A new ``Retry`` object. + """ + if self.total is False and error: + # Disabled, indicate to re-raise the error. + raise six.reraise(type(error), error, _stacktrace) + + total = self.total + if total is not None: + total -= 1 + + _observed_errors = self._observed_errors + connect = self.connect + read = self.read + redirect = self.redirect + + if error and self._is_connection_error(error): + # Connect retry? + if connect is False: + raise six.reraise(type(error), error, _stacktrace) + elif connect is not None: + connect -= 1 + _observed_errors += 1 + + elif error and self._is_read_error(error): + # Read retry? + if read is False: + raise six.reraise(type(error), error, _stacktrace) + elif read is not None: + read -= 1 + _observed_errors += 1 + + elif response and response.get_redirect_location(): + # Redirect retry? + if redirect is not None: + redirect -= 1 + + else: + # FIXME: Nothing changed, scenario doesn't make sense. + _observed_errors += 1 + + new_retry = self.new( + total=total, + connect=connect, read=read, redirect=redirect, + _observed_errors=_observed_errors) + + if new_retry.is_exhausted(): + raise MaxRetryError(_pool, url, error) + + log.debug("Incremented Retry for (url='%s'): %r" % (url, new_retry)) + + return new_retry + + + def __repr__(self): + return ('{cls.__name__}(total={self.total}, connect={self.connect}, ' + 'read={self.read}, redirect={self.redirect})').format( + cls=type(self), self=self) + + +# For backwards compatibility (equivalent to pre-v1.9): +Retry.DEFAULT = Retry(3) diff --git a/lib/requests/packages/urllib3/util/ssl_.py b/lib/requests/packages/urllib3/util/ssl_.py new file mode 100644 index 00000000..9cfe2d2a --- /dev/null +++ b/lib/requests/packages/urllib3/util/ssl_.py @@ -0,0 +1,132 @@ +from binascii import hexlify, unhexlify +from hashlib import md5, sha1 + +from ..exceptions import SSLError + + +try: # Test for SSL features + SSLContext = None + HAS_SNI = False + + import ssl + from ssl import wrap_socket, CERT_NONE, PROTOCOL_SSLv23 + from ssl import SSLContext # Modern SSL? + from ssl import HAS_SNI # Has SNI? +except ImportError: + pass + + +def assert_fingerprint(cert, fingerprint): + """ + Checks if given fingerprint matches the supplied certificate. + + :param cert: + Certificate as bytes object. + :param fingerprint: + Fingerprint as string of hexdigits, can be interspersed by colons. + """ + + # Maps the length of a digest to a possible hash function producing + # this digest. + hashfunc_map = { + 16: md5, + 20: sha1 + } + + fingerprint = fingerprint.replace(':', '').lower() + digest_length, odd = divmod(len(fingerprint), 2) + + if odd or digest_length not in hashfunc_map: + raise SSLError('Fingerprint is of invalid length.') + + # We need encode() here for py32; works on py2 and p33. + fingerprint_bytes = unhexlify(fingerprint.encode()) + + hashfunc = hashfunc_map[digest_length] + + cert_digest = hashfunc(cert).digest() + + if not cert_digest == fingerprint_bytes: + raise SSLError('Fingerprints did not match. Expected "{0}", got "{1}".' + .format(hexlify(fingerprint_bytes), + hexlify(cert_digest))) + + +def resolve_cert_reqs(candidate): + """ + Resolves the argument to a numeric constant, which can be passed to + the wrap_socket function/method from the ssl module. + Defaults to :data:`ssl.CERT_NONE`. + If given a string it is assumed to be the name of the constant in the + :mod:`ssl` module or its abbrevation. + (So you can specify `REQUIRED` instead of `CERT_REQUIRED`. + If it's neither `None` nor a string we assume it is already the numeric + constant which can directly be passed to wrap_socket. + """ + if candidate is None: + return CERT_NONE + + if isinstance(candidate, str): + res = getattr(ssl, candidate, None) + if res is None: + res = getattr(ssl, 'CERT_' + candidate) + return res + + return candidate + + +def resolve_ssl_version(candidate): + """ + like resolve_cert_reqs + """ + if candidate is None: + return PROTOCOL_SSLv23 + + if isinstance(candidate, str): + res = getattr(ssl, candidate, None) + if res is None: + res = getattr(ssl, 'PROTOCOL_' + candidate) + return res + + return candidate + + +if SSLContext is not None: # Python 3.2+ + def ssl_wrap_socket(sock, keyfile=None, certfile=None, cert_reqs=None, + ca_certs=None, server_hostname=None, + ssl_version=None): + """ + All arguments except `server_hostname` have the same meaning as for + :func:`ssl.wrap_socket` + + :param server_hostname: + Hostname of the expected certificate + """ + context = SSLContext(ssl_version) + context.verify_mode = cert_reqs + + # Disable TLS compression to migitate CRIME attack (issue #309) + OP_NO_COMPRESSION = 0x20000 + context.options |= OP_NO_COMPRESSION + + if ca_certs: + try: + context.load_verify_locations(ca_certs) + # Py32 raises IOError + # Py33 raises FileNotFoundError + except Exception as e: # Reraise as SSLError + raise SSLError(e) + if certfile: + # FIXME: This block needs a test. + context.load_cert_chain(certfile, keyfile) + if HAS_SNI: # Platform-specific: OpenSSL with enabled SNI + return context.wrap_socket(sock, server_hostname=server_hostname) + return context.wrap_socket(sock) + +else: # Python 3.1 and earlier + def ssl_wrap_socket(sock, keyfile=None, certfile=None, cert_reqs=None, + ca_certs=None, server_hostname=None, + ssl_version=None): + return wrap_socket(sock, keyfile=keyfile, certfile=certfile, + ca_certs=ca_certs, cert_reqs=cert_reqs, + ssl_version=ssl_version) diff --git a/lib/requests/packages/urllib3/util/timeout.py b/lib/requests/packages/urllib3/util/timeout.py new file mode 100644 index 00000000..ea7027f3 --- /dev/null +++ b/lib/requests/packages/urllib3/util/timeout.py @@ -0,0 +1,240 @@ +# The default socket timeout, used by httplib to indicate that no timeout was +# specified by the user +from socket import _GLOBAL_DEFAULT_TIMEOUT +import time + +from ..exceptions import TimeoutStateError + +# A sentinel value to indicate that no timeout was specified by the user in +# urllib3 +_Default = object() + +def current_time(): + """ + Retrieve the current time. This function is mocked out in unit testing. + """ + return time.time() + + +class Timeout(object): + """ Timeout configuration. + + Timeouts can be defined as a default for a pool:: + + timeout = Timeout(connect=2.0, read=7.0) + http = PoolManager(timeout=timeout) + response = http.request('GET', 'http://example.com/') + + Or per-request (which overrides the default for the pool):: + + response = http.request('GET', 'http://example.com/', timeout=Timeout(10)) + + Timeouts can be disabled by setting all the parameters to ``None``:: + + no_timeout = Timeout(connect=None, read=None) + response = http.request('GET', 'http://example.com/, timeout=no_timeout) + + + :param total: + This combines the connect and read timeouts into one; the read timeout + will be set to the time leftover from the connect attempt. In the + event that both a connect timeout and a total are specified, or a read + timeout and a total are specified, the shorter timeout will be applied. + + Defaults to None. + + :type total: integer, float, or None + + :param connect: + The maximum amount of time to wait for a connection attempt to a server + to succeed. Omitting the parameter will default the connect timeout to + the system default, probably `the global default timeout in socket.py + `_. + None will set an infinite timeout for connection attempts. + + :type connect: integer, float, or None + + :param read: + The maximum amount of time to wait between consecutive + read operations for a response from the server. Omitting + the parameter will default the read timeout to the system + default, probably `the global default timeout in socket.py + `_. + None will set an infinite timeout. + + :type read: integer, float, or None + + .. note:: + + Many factors can affect the total amount of time for urllib3 to return + an HTTP response. + + For example, Python's DNS resolver does not obey the timeout specified + on the socket. Other factors that can affect total request time include + high CPU load, high swap, the program running at a low priority level, + or other behaviors. + + In addition, the read and total timeouts only measure the time between + read operations on the socket connecting the client and the server, + not the total amount of time for the request to return a complete + response. For most requests, the timeout is raised because the server + has not sent the first byte in the specified time. This is not always + the case; if a server streams one byte every fifteen seconds, a timeout + of 20 seconds will not trigger, even though the request will take + several minutes to complete. + + If your goal is to cut off any request after a set amount of wall clock + time, consider having a second "watcher" thread to cut off a slow + request. + """ + + #: A sentinel object representing the default timeout value + DEFAULT_TIMEOUT = _GLOBAL_DEFAULT_TIMEOUT + + def __init__(self, total=None, connect=_Default, read=_Default): + self._connect = self._validate_timeout(connect, 'connect') + self._read = self._validate_timeout(read, 'read') + self.total = self._validate_timeout(total, 'total') + self._start_connect = None + + def __str__(self): + return '%s(connect=%r, read=%r, total=%r)' % ( + type(self).__name__, self._connect, self._read, self.total) + + @classmethod + def _validate_timeout(cls, value, name): + """ Check that a timeout attribute is valid. + + :param value: The timeout value to validate + :param name: The name of the timeout attribute to validate. This is + used to specify in error messages. + :return: The validated and casted version of the given value. + :raises ValueError: If the type is not an integer or a float, or if it + is a numeric value less than zero. + """ + if value is _Default: + return cls.DEFAULT_TIMEOUT + + if value is None or value is cls.DEFAULT_TIMEOUT: + return value + + try: + float(value) + except (TypeError, ValueError): + raise ValueError("Timeout value %s was %s, but it must be an " + "int or float." % (name, value)) + + try: + if value < 0: + raise ValueError("Attempted to set %s timeout to %s, but the " + "timeout cannot be set to a value less " + "than 0." % (name, value)) + except TypeError: # Python 3 + raise ValueError("Timeout value %s was %s, but it must be an " + "int or float." % (name, value)) + + return value + + @classmethod + def from_float(cls, timeout): + """ Create a new Timeout from a legacy timeout value. + + The timeout value used by httplib.py sets the same timeout on the + connect(), and recv() socket requests. This creates a :class:`Timeout` + object that sets the individual timeouts to the ``timeout`` value + passed to this function. + + :param timeout: The legacy timeout value. + :type timeout: integer, float, sentinel default object, or None + :return: Timeout object + :rtype: :class:`Timeout` + """ + return Timeout(read=timeout, connect=timeout) + + def clone(self): + """ Create a copy of the timeout object + + Timeout properties are stored per-pool but each request needs a fresh + Timeout object to ensure each one has its own start/stop configured. + + :return: a copy of the timeout object + :rtype: :class:`Timeout` + """ + # We can't use copy.deepcopy because that will also create a new object + # for _GLOBAL_DEFAULT_TIMEOUT, which socket.py uses as a sentinel to + # detect the user default. + return Timeout(connect=self._connect, read=self._read, + total=self.total) + + def start_connect(self): + """ Start the timeout clock, used during a connect() attempt + + :raises urllib3.exceptions.TimeoutStateError: if you attempt + to start a timer that has been started already. + """ + if self._start_connect is not None: + raise TimeoutStateError("Timeout timer has already been started.") + self._start_connect = current_time() + return self._start_connect + + def get_connect_duration(self): + """ Gets the time elapsed since the call to :meth:`start_connect`. + + :return: Elapsed time. + :rtype: float + :raises urllib3.exceptions.TimeoutStateError: if you attempt + to get duration for a timer that hasn't been started. + """ + if self._start_connect is None: + raise TimeoutStateError("Can't get connect duration for timer " + "that has not started.") + return current_time() - self._start_connect + + @property + def connect_timeout(self): + """ Get the value to use when setting a connection timeout. + + This will be a positive float or integer, the value None + (never timeout), or the default system timeout. + + :return: Connect timeout. + :rtype: int, float, :attr:`Timeout.DEFAULT_TIMEOUT` or None + """ + if self.total is None: + return self._connect + + if self._connect is None or self._connect is self.DEFAULT_TIMEOUT: + return self.total + + return min(self._connect, self.total) + + @property + def read_timeout(self): + """ Get the value for the read timeout. + + This assumes some time has elapsed in the connection timeout and + computes the read timeout appropriately. + + If self.total is set, the read timeout is dependent on the amount of + time taken by the connect timeout. If the connection time has not been + established, a :exc:`~urllib3.exceptions.TimeoutStateError` will be + raised. + + :return: Value to use for the read timeout. + :rtype: int, float, :attr:`Timeout.DEFAULT_TIMEOUT` or None + :raises urllib3.exceptions.TimeoutStateError: If :meth:`start_connect` + has not yet been called on this object. + """ + if (self.total is not None and + self.total is not self.DEFAULT_TIMEOUT and + self._read is not None and + self._read is not self.DEFAULT_TIMEOUT): + # In case the connect timeout has not yet been established. + if self._start_connect is None: + return self._read + return max(0, min(self.total - self.get_connect_duration(), + self._read)) + elif self.total is not None and self.total is not self.DEFAULT_TIMEOUT: + return max(0, self.total - self.get_connect_duration()) + else: + return self._read diff --git a/lib/requests/packages/urllib3/util/url.py b/lib/requests/packages/urllib3/util/url.py new file mode 100644 index 00000000..487d456c --- /dev/null +++ b/lib/requests/packages/urllib3/util/url.py @@ -0,0 +1,171 @@ +from collections import namedtuple + +from ..exceptions import LocationParseError + + +url_attrs = ['scheme', 'auth', 'host', 'port', 'path', 'query', 'fragment'] + + +class Url(namedtuple('Url', url_attrs)): + """ + Datastructure for representing an HTTP URL. Used as a return value for + :func:`parse_url`. + """ + slots = () + + def __new__(cls, scheme=None, auth=None, host=None, port=None, path=None, + query=None, fragment=None): + return super(Url, cls).__new__(cls, scheme, auth, host, port, path, + query, fragment) + + @property + def hostname(self): + """For backwards-compatibility with urlparse. We're nice like that.""" + return self.host + + @property + def request_uri(self): + """Absolute path including the query string.""" + uri = self.path or '/' + + if self.query is not None: + uri += '?' + self.query + + return uri + + @property + def netloc(self): + """Network location including host and port""" + if self.port: + return '%s:%d' % (self.host, self.port) + return self.host + + +def split_first(s, delims): + """ + Given a string and an iterable of delimiters, split on the first found + delimiter. Return two split parts and the matched delimiter. + + If not found, then the first part is the full input string. + + Example:: + + >>> split_first('foo/bar?baz', '?/=') + ('foo', 'bar?baz', '/') + >>> split_first('foo/bar?baz', '123') + ('foo/bar?baz', '', None) + + Scales linearly with number of delims. Not ideal for large number of delims. + """ + min_idx = None + min_delim = None + for d in delims: + idx = s.find(d) + if idx < 0: + continue + + if min_idx is None or idx < min_idx: + min_idx = idx + min_delim = d + + if min_idx is None or min_idx < 0: + return s, '', None + + return s[:min_idx], s[min_idx+1:], min_delim + + +def parse_url(url): + """ + Given a url, return a parsed :class:`.Url` namedtuple. Best-effort is + performed to parse incomplete urls. Fields not provided will be None. + + Partly backwards-compatible with :mod:`urlparse`. + + Example:: + + >>> parse_url('http://google.com/mail/') + Url(scheme='http', host='google.com', port=None, path='/', ...) + >>> parse_url('google.com:80') + Url(scheme=None, host='google.com', port=80, path=None, ...) + >>> parse_url('/foo?bar') + Url(scheme=None, host=None, port=None, path='/foo', query='bar', ...) + """ + + # While this code has overlap with stdlib's urlparse, it is much + # simplified for our needs and less annoying. + # Additionally, this implementations does silly things to be optimal + # on CPython. + + if not url: + # Empty + return Url() + + scheme = None + auth = None + host = None + port = None + path = None + fragment = None + query = None + + # Scheme + if '://' in url: + scheme, url = url.split('://', 1) + + # Find the earliest Authority Terminator + # (http://tools.ietf.org/html/rfc3986#section-3.2) + url, path_, delim = split_first(url, ['/', '?', '#']) + + if delim: + # Reassemble the path + path = delim + path_ + + # Auth + if '@' in url: + # Last '@' denotes end of auth part + auth, url = url.rsplit('@', 1) + + # IPv6 + if url and url[0] == '[': + host, url = url.split(']', 1) + host += ']' + + # Port + if ':' in url: + _host, port = url.split(':', 1) + + if not host: + host = _host + + if port: + # If given, ports must be integers. + if not port.isdigit(): + raise LocationParseError(url) + port = int(port) + else: + # Blank ports are cool, too. (rfc3986#section-3.2.3) + port = None + + elif not host and url: + host = url + + if not path: + return Url(scheme, auth, host, port, path, query, fragment) + + # Fragment + if '#' in path: + path, fragment = path.split('#', 1) + + # Query + if '?' in path: + path, query = path.split('?', 1) + + return Url(scheme, auth, host, port, path, query, fragment) + + +def get_host(url): + """ + Deprecated. Use :func:`.parse_url` instead. + """ + p = parse_url(url) + return p.scheme or 'http', p.hostname, p.port diff --git a/lib/requests/sessions.py b/lib/requests/sessions.py index 425db22c..c2f42b14 100644 --- a/lib/requests/sessions.py +++ b/lib/requests/sessions.py @@ -12,18 +12,24 @@ import os from collections import Mapping from datetime import datetime -from .compat import cookielib, OrderedDict, urljoin, urlparse, builtin_str +from .auth import _basic_auth_str +from .compat import cookielib, OrderedDict, urljoin, urlparse from .cookies import ( cookiejar_from_dict, extract_cookies_to_jar, RequestsCookieJar, merge_cookies) from .models import Request, PreparedRequest, DEFAULT_REDIRECT_LIMIT from .hooks import default_hooks, dispatch_hook from .utils import to_key_val_list, default_headers, to_native_string -from .exceptions import TooManyRedirects, InvalidSchema +from .exceptions import ( + TooManyRedirects, InvalidSchema, ChunkedEncodingError, ContentDecodingError) +from .packages.urllib3._collections import RecentlyUsedContainer from .structures import CaseInsensitiveDict from .adapters import HTTPAdapter -from .utils import requote_uri, get_environ_proxies, get_netrc_auth +from .utils import ( + requote_uri, get_environ_proxies, get_netrc_auth, should_bypass_proxies, + get_auth_from_url +) from .status_codes import codes @@ -86,11 +92,21 @@ class SessionRedirectMixin(object): """Receives a Response. Returns a generator of Responses.""" i = 0 + hist = [] # keep track of history while resp.is_redirect: prepared_request = req.copy() - resp.content # Consume socket so it can be released + if i > 0: + # Update history and keep track of redirects. + hist.append(resp) + new_hist = list(hist) + resp.history = new_hist + + try: + resp.content # Consume socket so it can be released + except (ChunkedEncodingError, ContentDecodingError, RuntimeError): + resp.raw.read(decode_content=False) if i >= self.max_redirects: raise TooManyRedirects('Exceeded %s redirects.' % self.max_redirects) @@ -110,7 +126,7 @@ class SessionRedirectMixin(object): parsed = urlparse(url) url = parsed.geturl() - # Facilitate non-RFC2616-compliant 'location' headers + # Facilitate relative 'location' headers, as allowed by RFC 7231. # (e.g. '/path/to/resource' instead of 'http://domain.tld/path/to/resource') # Compliant with RFC3986, we percent encode the url. if not urlparse(url).netloc: @@ -119,8 +135,11 @@ class SessionRedirectMixin(object): url = requote_uri(url) prepared_request.url = to_native_string(url) + # Cache the url, unless it redirects to itself. + if resp.is_permanent_redirect and req.url != prepared_request.url: + self.redirect_cache[req.url] = prepared_request.url - # http://www.w3.org/Protocols/rfc2616/rfc2616-sec10.html#sec10.3.4 + # http://tools.ietf.org/html/rfc7231#section-6.4.4 if (resp.status_code == codes.see_other and method != 'HEAD'): method = 'GET' @@ -138,7 +157,7 @@ class SessionRedirectMixin(object): prepared_request.method = method # https://github.com/kennethreitz/requests/issues/1084 - if resp.status_code not in (codes.temporary, codes.resume): + if resp.status_code not in (codes.temporary_redirect, codes.permanent_redirect): if 'Content-Length' in prepared_request.headers: del prepared_request.headers['Content-Length'] @@ -154,22 +173,15 @@ class SessionRedirectMixin(object): prepared_request._cookies.update(self.cookies) prepared_request.prepare_cookies(prepared_request._cookies) - if 'Authorization' in headers: - # If we get redirected to a new host, we should strip out any - # authentication headers. - original_parsed = urlparse(resp.request.url) - redirect_parsed = urlparse(url) + # Rebuild auth and proxy information. + proxies = self.rebuild_proxies(prepared_request, proxies) + self.rebuild_auth(prepared_request, resp) - if (original_parsed.hostname != redirect_parsed.hostname): - del headers['Authorization'] - - # .netrc might have more auth for us. - new_auth = get_netrc_auth(url) if self.trust_env else None - if new_auth is not None: - prepared_request.prepare_auth(new_auth) + # Override the original request. + req = prepared_request resp = self.send( - prepared_request, + req, stream=stream, timeout=timeout, verify=verify, @@ -183,6 +195,68 @@ class SessionRedirectMixin(object): i += 1 yield resp + def rebuild_auth(self, prepared_request, response): + """ + When being redirected we may want to strip authentication from the + request to avoid leaking credentials. This method intelligently removes + and reapplies authentication where possible to avoid credential loss. + """ + headers = prepared_request.headers + url = prepared_request.url + + if 'Authorization' in headers: + # If we get redirected to a new host, we should strip out any + # authentication headers. + original_parsed = urlparse(response.request.url) + redirect_parsed = urlparse(url) + + if (original_parsed.hostname != redirect_parsed.hostname): + del headers['Authorization'] + + # .netrc might have more auth for us on our new host. + new_auth = get_netrc_auth(url) if self.trust_env else None + if new_auth is not None: + prepared_request.prepare_auth(new_auth) + + return + + def rebuild_proxies(self, prepared_request, proxies): + """ + This method re-evaluates the proxy configuration by considering the + environment variables. If we are redirected to a URL covered by + NO_PROXY, we strip the proxy configuration. Otherwise, we set missing + proxy keys for this URL (in case they were stripped by a previous + redirect). + + This method also replaces the Proxy-Authorization header where + necessary. + """ + headers = prepared_request.headers + url = prepared_request.url + scheme = urlparse(url).scheme + new_proxies = proxies.copy() if proxies is not None else {} + + if self.trust_env and not should_bypass_proxies(url): + environ_proxies = get_environ_proxies(url) + + proxy = environ_proxies.get(scheme) + + if proxy: + new_proxies.setdefault(scheme, environ_proxies[scheme]) + + if 'Proxy-Authorization' in headers: + del headers['Proxy-Authorization'] + + try: + username, password = get_auth_from_url(new_proxies[scheme]) + except KeyError: + username, password = None, None + + if username and password: + headers['Proxy-Authorization'] = _basic_auth_str(username, password) + + return new_proxies + class Session(SessionRedirectMixin): """A Requests session. @@ -198,9 +272,10 @@ class Session(SessionRedirectMixin): """ __attrs__ = [ - 'headers', 'cookies', 'auth', 'timeout', 'proxies', 'hooks', - 'params', 'verify', 'cert', 'prefetch', 'adapters', 'stream', - 'trust_env', 'max_redirects'] + 'headers', 'cookies', 'auth', 'proxies', 'hooks', 'params', 'verify', + 'cert', 'prefetch', 'adapters', 'stream', 'trust_env', + 'max_redirects', 'redirect_cache' + ] def __init__(self): @@ -253,6 +328,9 @@ class Session(SessionRedirectMixin): self.mount('https://', HTTPAdapter()) self.mount('http://', HTTPAdapter()) + # Only store 1000 redirects to prevent using infinite memory + self.redirect_cache = RecentlyUsedContainer(1000) + def __enter__(self): return self @@ -290,6 +368,7 @@ class Session(SessionRedirectMixin): url=request.url, files=request.files, data=request.data, + json=request.json, headers=merge_setting(request.headers, self.headers, dict_class=CaseInsensitiveDict), params=merge_setting(request.params, self.params), auth=merge_setting(auth, self.auth), @@ -311,7 +390,8 @@ class Session(SessionRedirectMixin): hooks=None, stream=None, verify=None, - cert=None): + cert=None, + json=None): """Constructs a :class:`Request `, prepares it and sends it. Returns :class:`Response ` object. @@ -321,17 +401,22 @@ class Session(SessionRedirectMixin): string for the :class:`Request`. :param data: (optional) Dictionary or bytes to send in the body of the :class:`Request`. + :param json: (optional) json to send in the body of the + :class:`Request`. :param headers: (optional) Dictionary of HTTP Headers to send with the :class:`Request`. :param cookies: (optional) Dict or CookieJar object to send with the :class:`Request`. - :param files: (optional) Dictionary of 'filename': file-like-objects + :param files: (optional) Dictionary of ``'filename': file-like-objects`` for multipart encoding upload. :param auth: (optional) Auth tuple or callable to enable Basic/Digest/Custom HTTP Auth. - :param timeout: (optional) Float describing the timeout of the - request in seconds. - :param allow_redirects: (optional) Boolean. Set to True by default. + :param timeout: (optional) How long to wait for the server to send + data before giving up, as a float, or a (`connect timeout, read + timeout `_) tuple. + :type timeout: float or tuple + :param allow_redirects: (optional) Set to True by default. + :type allow_redirects: bool :param proxies: (optional) Dictionary mapping protocol to the URL of the proxy. :param stream: (optional) whether to immediately download the response @@ -342,7 +427,7 @@ class Session(SessionRedirectMixin): If Tuple, ('cert', 'key') pair. """ - method = builtin_str(method) + method = to_native_string(method) # Create the Request. req = Request( @@ -351,6 +436,7 @@ class Session(SessionRedirectMixin): headers = headers, files = files, data = data or {}, + json = json, params = params or {}, auth = auth, cookies = cookies, @@ -360,36 +446,16 @@ class Session(SessionRedirectMixin): proxies = proxies or {} - # Gather clues from the surrounding environment. - if self.trust_env: - # Set environment's proxies. - env_proxies = get_environ_proxies(url) or {} - for (k, v) in env_proxies.items(): - proxies.setdefault(k, v) - - # Look for configuration. - if not verify and verify is not False: - verify = os.environ.get('REQUESTS_CA_BUNDLE') - - # Curl compatibility. - if not verify and verify is not False: - verify = os.environ.get('CURL_CA_BUNDLE') - - # Merge all the kwargs. - proxies = merge_setting(proxies, self.proxies) - stream = merge_setting(stream, self.stream) - verify = merge_setting(verify, self.verify) - cert = merge_setting(cert, self.cert) + settings = self.merge_environment_settings( + prep.url, proxies, stream, verify, cert + ) # Send the request. send_kwargs = { - 'stream': stream, 'timeout': timeout, - 'verify': verify, - 'cert': cert, - 'proxies': proxies, 'allow_redirects': allow_redirects, } + send_kwargs.update(settings) resp = self.send(prep, **send_kwargs) return resp @@ -424,15 +490,16 @@ class Session(SessionRedirectMixin): kwargs.setdefault('allow_redirects', False) return self.request('HEAD', url, **kwargs) - def post(self, url, data=None, **kwargs): + def post(self, url, data=None, json=None, **kwargs): """Sends a POST request. Returns :class:`Response` object. :param url: URL for the new :class:`Request` object. :param data: (optional) Dictionary, bytes, or file-like object to send in the body of the :class:`Request`. + :param json: (optional) json to send in the body of the :class:`Request`. :param \*\*kwargs: Optional arguments that ``request`` takes. """ - return self.request('POST', url, data=data, **kwargs) + return self.request('POST', url, data=data, json=json, **kwargs) def put(self, url, data=None, **kwargs): """Sends a PUT request. Returns :class:`Response` object. @@ -477,6 +544,14 @@ class Session(SessionRedirectMixin): if not isinstance(request, PreparedRequest): raise ValueError('You can only send PreparedRequests.') + checked_urls = set() + while request.url in self.redirect_cache: + checked_urls.add(request.url) + new_url = self.redirect_cache.get(request.url) + if new_url in checked_urls: + break + request.url = new_url + # Set up variables needed for resolve_redirects and dispatching of hooks allow_redirects = kwargs.pop('allow_redirects', True) stream = kwargs.get('stream') @@ -527,10 +602,37 @@ class Session(SessionRedirectMixin): history.insert(0, r) # Get the last request made r = history.pop() - r.history = tuple(history) + r.history = history + + if not stream: + r.content return r + def merge_environment_settings(self, url, proxies, stream, verify, cert): + """Check the environment and merge it with some settings.""" + # Gather clues from the surrounding environment. + if self.trust_env: + # Set environment's proxies. + env_proxies = get_environ_proxies(url) or {} + for (k, v) in env_proxies.items(): + proxies.setdefault(k, v) + + # Look for requests environment configuration and be compatible + # with cURL. + if verify is True or verify is None: + verify = (os.environ.get('REQUESTS_CA_BUNDLE') or + os.environ.get('CURL_CA_BUNDLE')) + + # Merge all the kwargs. + proxies = merge_setting(proxies, self.proxies) + stream = merge_setting(stream, self.stream) + verify = merge_setting(verify, self.verify) + cert = merge_setting(cert, self.cert) + + return {'verify': verify, 'proxies': proxies, 'stream': stream, + 'cert': cert} + def get_adapter(self, url): """Returns the appropriate connnection adapter for the given URL.""" for (prefix, adapter) in self.adapters.items(): diff --git a/lib/requests/status_codes.py b/lib/requests/status_codes.py index ed7a8660..e0887f21 100644 --- a/lib/requests/status_codes.py +++ b/lib/requests/status_codes.py @@ -30,7 +30,8 @@ _codes = { 305: ('use_proxy',), 306: ('switch_proxy',), 307: ('temporary_redirect', 'temporary_moved', 'temporary'), - 308: ('resume_incomplete', 'resume'), + 308: ('permanent_redirect', + 'resume_incomplete', 'resume',), # These 2 to be removed in 3.0 # Client Error. 400: ('bad_request', 'bad'), diff --git a/lib/requests/structures.py b/lib/requests/structures.py index a1759137..3e5f2faa 100644 --- a/lib/requests/structures.py +++ b/lib/requests/structures.py @@ -8,30 +8,7 @@ Data structures that power Requests. """ -import os import collections -from itertools import islice - - -class IteratorProxy(object): - """docstring for IteratorProxy""" - def __init__(self, i): - self.i = i - # self.i = chain.from_iterable(i) - - def __iter__(self): - return self.i - - def __len__(self): - if hasattr(self.i, '__len__'): - return len(self.i) - if hasattr(self.i, 'len'): - return self.i.len - if hasattr(self.i, 'fileno'): - return os.fstat(self.i.fileno()).st_size - - def read(self, n): - return "".join(islice(self.i, None, n)) class CaseInsensitiveDict(collections.MutableMapping): @@ -46,7 +23,7 @@ class CaseInsensitiveDict(collections.MutableMapping): case of the last key to be set, and ``iter(instance)``, ``keys()``, ``items()``, ``iterkeys()``, and ``iteritems()`` will contain case-sensitive keys. However, querying and contains - testing is case insensitive: + testing is case insensitive:: cid = CaseInsensitiveDict() cid['Accept'] = 'application/json' @@ -106,8 +83,7 @@ class CaseInsensitiveDict(collections.MutableMapping): return CaseInsensitiveDict(self._store.values()) def __repr__(self): - return '%s(%r)' % (self.__class__.__name__, dict(self.items())) - + return str(dict(self.items())) class LookupDict(dict): """Dictionary lookup object.""" diff --git a/lib/requests/utils.py b/lib/requests/utils.py index 4d648bc5..aa5c140e 100644 --- a/lib/requests/utils.py +++ b/lib/requests/utils.py @@ -19,15 +19,16 @@ import re import sys import socket import struct +import warnings from . import __version__ from . import certs from .compat import parse_http_list as _parse_list_header from .compat import (quote, urlparse, bytes, str, OrderedDict, unquote, is_py2, - builtin_str, getproxies, proxy_bypass) + builtin_str, getproxies, proxy_bypass, urlunparse) from .cookies import RequestsCookieJar, cookiejar_from_dict from .structures import CaseInsensitiveDict -from .exceptions import MissingSchema, InvalidURL +from .exceptions import InvalidURL _hush_pyflakes = (RequestsCookieJar,) @@ -61,7 +62,7 @@ def super_len(o): return os.fstat(fileno).st_size if hasattr(o, 'getvalue'): - # e.g. BytesIO, cStringIO.StringI + # e.g. BytesIO, cStringIO.StringIO return len(o.getvalue()) @@ -287,6 +288,11 @@ def get_encodings_from_content(content): :param content: bytestring to extract encodings from. """ + warnings.warn(( + 'In requests 3.0, get_encodings_from_content will be removed. For ' + 'more information, please see the discussion on issue #2266. (This' + ' warning should only appear once.)'), + DeprecationWarning) charset_re = re.compile(r']', flags=re.I) pragma_re = re.compile(r']', flags=re.I) @@ -351,12 +357,14 @@ def get_unicode_from_response(r): Tried: 1. charset from content-type - - 2. every encodings from ```` - - 3. fall back and replace all unicode characters + 2. fall back and replace all unicode characters """ + warnings.warn(( + 'In requests 3.0, get_unicode_from_response will be removed. For ' + 'more information, please see the discussion on issue #2266. (This' + ' warning should only appear once.)'), + DeprecationWarning) tried_encodings = [] @@ -466,9 +474,10 @@ def is_valid_cidr(string_network): return True -def get_environ_proxies(url): - """Return a dict of environment proxies.""" - +def should_bypass_proxies(url): + """ + Returns whether we should bypass proxies or not. + """ get_proxy = lambda k: os.environ.get(k) or os.environ.get(k.upper()) # First check whether no_proxy is defined. If it is, check that the URL @@ -486,13 +495,13 @@ def get_environ_proxies(url): for proxy_ip in no_proxy: if is_valid_cidr(proxy_ip): if address_in_network(ip, proxy_ip): - return {} + return True else: for host in no_proxy: if netloc.endswith(host) or netloc.split(':')[0].endswith(host): # The URL does match something in no_proxy, so we don't want # to apply the proxies on this URL. - return {} + return True # If the system proxy settings indicate that this URL should be bypassed, # don't proxy. @@ -506,12 +515,16 @@ def get_environ_proxies(url): bypass = False if bypass: - return {} + return True - # If we get here, we either didn't have no_proxy set or we're not going - # anywhere that no_proxy applies to, and the system settings don't require - # bypassing the proxy for the current URL. - return getproxies() + return False + +def get_environ_proxies(url): + """Return a dict of environment proxies.""" + if should_bypass_proxies(url): + return {} + else: + return getproxies() def default_user_agent(name="python-requests"): @@ -549,7 +562,8 @@ def default_headers(): return CaseInsensitiveDict({ 'User-Agent': default_user_agent(), 'Accept-Encoding': ', '.join(('gzip', 'deflate')), - 'Accept': '*/*' + 'Accept': '*/*', + 'Connection': 'keep-alive', }) @@ -564,7 +578,7 @@ def parse_header_links(value): replace_chars = " '\"" - for val in value.split(","): + for val in re.split(", *<", value): try: url, params = val.split(";", 1) except ValueError: @@ -622,13 +636,18 @@ def guess_json_utf(data): return None -def except_on_missing_scheme(url): - """Given a URL, raise a MissingSchema exception if the scheme is missing. - """ - scheme, netloc, path, params, query, fragment = urlparse(url) +def prepend_scheme_if_needed(url, new_scheme): + '''Given a URL that may or may not have a scheme, prepend the given scheme. + Does not replace a present scheme with the one provided as an argument.''' + scheme, netloc, path, params, query, fragment = urlparse(url, new_scheme) - if not scheme: - raise MissingSchema('Proxy URLs must have explicit schemes.') + # urlparse is a finicky beast, and sometimes decides that there isn't a + # netloc present. Assume that it's being over-cautious, and switch netloc + # and path if urlparse decided there was no netloc. + if not netloc: + netloc, path = path, netloc + + return urlunparse((scheme, netloc, path, params, query, fragment)) def get_auth_from_url(url): @@ -661,3 +680,18 @@ def to_native_string(string, encoding='ascii'): out = string.decode(encoding) return out + + +def urldefragauth(url): + """ + Given a url remove the fragment and the authentication part + """ + scheme, netloc, path, params, query, fragment = urlparse(url) + + # see func:`prepend_scheme_if_needed` + if not netloc: + netloc, path = path, netloc + + netloc = netloc.rsplit('@', 1)[-1] + + return urlunparse((scheme, netloc, path, params, query, '')) diff --git a/lib/tvdb_api/tvdb_api.py b/lib/tvdb_api/tvdb_api.py index 00a3bf89..49cd6666 100644 --- a/lib/tvdb_api/tvdb_api.py +++ b/lib/tvdb_api/tvdb_api.py @@ -871,8 +871,10 @@ class Tvdb: url = self.config['url_epInfo'] % (sid, language) epsEt = self._getetsrc(url, language=language) + if 'episode' not in epsEt: + return False - episodes = epsEt["episode"] + episodes = epsEt['episode'] if not isinstance(episodes, list): episodes = [episodes] @@ -936,7 +938,7 @@ class Tvdb: # Item is integer, treat as show id if key not in self.shows: self._getShowData(key, self.config['language'], True) - return self.shows[key] + return (None, self.shows[key])[key in self.shows] key = str(key).lower() self.config['searchterm'] = key diff --git a/lib/tvrage_api/tvrage_api.py b/lib/tvrage_api/tvrage_api.py index ef291aa3..95a49811 100644 --- a/lib/tvrage_api/tvrage_api.py +++ b/lib/tvrage_api/tvrage_api.py @@ -29,7 +29,7 @@ except ImportError: import xml.etree.ElementTree as ElementTree from lib.dateutil.parser import parse -from cachecontrol import CacheControl, caches +from lib.cachecontrol import CacheControl, caches from tvrage_ui import BaseUI from tvrage_exceptions import (tvrage_error, tvrage_userabort, tvrage_shownotfound, @@ -97,15 +97,12 @@ class ShowContainer(dict): #keep only the 100th latest results if time.time() - self._lastgc > 20: - tbd = self._stack[:-100] - i = 0 - for o in tbd: + for o in self._stack[:-100]: del self[o] - del self._stack[i] - i += 1 + + self._stack = self._stack[-100:] - _lastgc = time.time() - del tbd + self._lastgc = time.time() super(ShowContainer, self).__setitem__(key, value) @@ -604,6 +601,8 @@ class TVRage: self.config['params_epInfo']['sid'] = sid epsEt = self._getetsrc(self.config['url_epInfo'], self.config['params_epInfo']) + if 'episodelist' not in epsEt and 'season' not in epsEt['episodelist']: + return False seasons = epsEt['episodelist']['season'] if not isinstance(seasons, list): @@ -658,7 +657,7 @@ class TVRage: # Item is integer, treat as show id if key not in self.shows: self._getShowData(key, True) - return self.shows[key] + return (None, self.shows[key])[key in self.shows] key = key.lower() self.config['searchterm'] = key diff --git a/sickbeard/__init__.py b/sickbeard/__init__.py index 8015afa7..75170f5c 100755 --- a/sickbeard/__init__.py +++ b/sickbeard/__init__.py @@ -33,8 +33,8 @@ sys.path.append(os.path.abspath('../lib')) from sickbeard import providers, metadata, config, webserveInit from sickbeard.providers.generic import GenericProvider from providers import ezrss, tvtorrents, btn, newznab, womble, thepiratebay, torrentleech, kat, iptorrents, \ - omgwtfnzbs, scc, hdtorrents, torrentday, hdbits, nextgen, speedcd, nyaatorrents, fanzub, torrentbytes, animezb, \ - freshontv, bitsoup, t411, tokyotoshokan + omgwtfnzbs, scc, hdtorrents, torrentday, hdbits, nextgen, speedcd, nyaatorrents, fanzub, torrentbytes, \ + freshontv, bitsoup, tokyotoshokan from sickbeard.config import CheckSection, check_setting_int, check_setting_str, check_setting_float, ConfigMigrator, \ naming_ep_type from sickbeard import searchBacklog, showUpdater, versionChecker, properFinder, autoPostProcesser, \ @@ -346,6 +346,7 @@ ANIDB_PASSWORD = None ANIDB_USE_MYLIST = False ADBA_CONNECTION = None ANIME_SPLIT_HOME = False +ANIME_TREAT_AS_HDTV = False USE_SYNOINDEX = False @@ -504,7 +505,7 @@ def initialize(consoleLogging=True): USE_FAILED_DOWNLOADS, DELETE_FAILED, ANON_REDIRECT, LOCALHOST_IP, TMDB_API_KEY, DEBUG, PROXY_SETTING, PROXY_INDEXERS, \ AUTOPOSTPROCESSER_FREQUENCY, DEFAULT_AUTOPOSTPROCESSER_FREQUENCY, MIN_AUTOPOSTPROCESSER_FREQUENCY, \ ANIME_DEFAULT, NAMING_ANIME, ANIMESUPPORT, USE_ANIDB, ANIDB_USERNAME, ANIDB_PASSWORD, ANIDB_USE_MYLIST, \ - ANIME_SPLIT_HOME, SCENE_DEFAULT, BACKLOG_DAYS + ANIME_SPLIT_HOME, SCENE_DEFAULT, BACKLOG_DAYS, ANIME_TREAT_AS_HDTV if __INITIALIZED__: return False @@ -928,6 +929,7 @@ def initialize(consoleLogging=True): ANIDB_USE_MYLIST = bool(check_setting_int(CFG, 'ANIDB', 'anidb_use_mylist', 0)) ANIME_SPLIT_HOME = bool(check_setting_int(CFG, 'ANIME', 'anime_split_home', 0)) + ANIME_TREAT_AS_HDTV = bool(check_setting_int(CFG, 'ANIME', 'anime_treat_as_hdtv', 0)) METADATA_XBMC = check_setting_str(CFG, 'General', 'metadata_xbmc', '0|0|0|0|0|0|0|0|0|0') METADATA_XBMC_12PLUS = check_setting_str(CFG, 'General', 'metadata_xbmc_12plus', '0|0|0|0|0|0|0|0|0|0') @@ -1794,6 +1796,7 @@ def save_config(): new_config['ANIME'] = {} new_config['ANIME']['anime_split_home'] = int(ANIME_SPLIT_HOME) + new_config['ANIME']['anime_treat_as_hdtv'] = int(ANIME_TREAT_AS_HDTV) new_config.write() diff --git a/sickbeard/common.py b/sickbeard/common.py index 56d8c1c9..d51866cb 100644 --- a/sickbeard/common.py +++ b/sickbeard/common.py @@ -22,6 +22,9 @@ import platform import re import uuid +import logger +import sickbeard + INSTANCE_ID = str(uuid.uuid1()) USER_AGENT = ('SickGear/(' + platform.system() + '; ' + platform.release() + '; ' + INSTANCE_ID + ')') @@ -202,6 +205,10 @@ class Quality: return Quality.HDBLURAY elif blueRayOptions and fullHD and not hdOptions: return Quality.FULLHDBLURAY + elif sickbeard.ANIME_TREAT_AS_HDTV: + logger.log(u'Treating file: ' + name + ' with "unknown" quality as HDTV per user settings', + logger.DEBUG) + return Quality.HDTV else: return Quality.UNKNOWN diff --git a/sickbeard/helpers.py b/sickbeard/helpers.py index 8388281c..1be5b37e 100644 --- a/sickbeard/helpers.py +++ b/sickbeard/helpers.py @@ -62,7 +62,7 @@ from sickbeard import encodingKludge as ek from sickbeard import notifiers from sickbeard import clients -from cachecontrol import CacheControl, caches +from lib.cachecontrol import CacheControl, caches from itertools import izip, cycle urllib._urlopener = classes.SickBeardURLopener() @@ -667,9 +667,9 @@ def get_all_episodes_from_absolute_number(show, absolute_numbers, indexer_id=Non def sanitizeSceneName(name, ezrss=False): """ Takes a show name and returns the "scenified" version of it. - + ezrss: If true the scenified version will follow EZRSS's cracksmoker rules as best as possible - + Returns: A string containing the scene version of the show name given. """ @@ -900,7 +900,7 @@ def md5_for_file(filename, block_size=2 ** 16): def get_lan_ip(): """ - Simple function to get LAN localhost_ip + Simple function to get LAN localhost_ip http://stackoverflow.com/questions/11735821/python-get-localhost-ip """ @@ -970,7 +970,7 @@ By Pedro Jose Pereira Vieito (@pvieito) * The keys should be unique for each device To add a new encryption_version: - 1) Code your new encryption_version + 1) Code your new encryption_version 2) Update the last encryption_version available in webserve.py 3) Remember to maintain old encryption versions and key generators for retrocompatibility """ @@ -1164,8 +1164,13 @@ def mapIndexersToShow(showObj): # for each mapped entry for curResult in sqlResults: - logger.log(u"Found indexer mapping in cache for show: " + showObj.name, logger.DEBUG) - mapped[int(curResult['mindexer'])] = int(curResult['mindexer_id']) + nlist = [i for i in curResult if None is not i] + # Check if its mapped with both tvdb and tvrage. + if 4 <= len(nlist): + logger.log(u"Found indexer mapping in cache for show: " + showObj.name, logger.DEBUG) + mapped[int(curResult['mindexer'])] = int(curResult['mindexer_id']) + break + else: sql_l = [] for indexer in sickbeard.indexerApi().indexers: @@ -1423,3 +1428,6 @@ def get_size(start_path='.'): total_size += ek.ek(os.path.getsize, fp) return total_size + +def remove_article(text=''): + return re.sub(r'(?i)/^(?:(?:A(?!\s+to)n?)|The)\s(\w)', r'\1', text) \ No newline at end of file diff --git a/sickbeard/providers/__init__.py b/sickbeard/providers/__init__.py index 10019eae..c4e752f3 100755 --- a/sickbeard/providers/__init__.py +++ b/sickbeard/providers/__init__.py @@ -34,10 +34,8 @@ __all__ = ['ezrss', 'nyaatorrents', 'fanzub', 'torrentbytes', - 'animezb', 'freshontv', 'bitsoup', - 't411', 'tokyotoshokan', ] diff --git a/sickbeard/providers/animezb.py b/sickbeard/providers/animezb.py deleted file mode 100644 index 2260227b..00000000 --- a/sickbeard/providers/animezb.py +++ /dev/null @@ -1,155 +0,0 @@ -# Author: Nic Wolfe -# URL: http://code.google.com/p/sickbeard/ -# -# This file is part of Sick Beard. -# -# Sick Beard is free software: you can redistribute it and/or modify -# it under the terms of the GNU General Public License as published by -# the Free Software Foundation, either version 3 of the License, or -# (at your option) any later version. -# -# Sick Beard is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with Sick Beard. If not, see . - -import urllib -import datetime - -import sickbeard -import generic - -from sickbeard import classes, show_name_helpers, helpers - -from sickbeard import exceptions, logger -from sickbeard.common import * -from sickbeard import tvcache -from lib.dateutil.parser import parse as parseDate - - -class Animezb(generic.NZBProvider): - - def __init__(self): - - generic.NZBProvider.__init__(self, "Animezb") - - self.supportsBacklog = False - self.supportsAbsoluteNumbering = True - self.anime_only = True - - self.enabled = False - - self.cache = AnimezbCache(self) - - self.url = 'https://animezb.com/' - - def isEnabled(self): - return self.enabled - - def imageName(self): - return 'animezb.png' - - def _get_season_search_strings(self, ep_obj): - return [x for x in show_name_helpers.makeSceneSeasonSearchString(self.show, ep_obj)] - - def _get_episode_search_strings(self, ep_obj, add_string=''): - search_string = [] - for show_name in set(show_name_helpers.allPossibleShowNames(self.show)): - ep_string = '+'.join( - [helpers.sanitizeSceneName(show_name).replace('.', '+'), str(ep_obj.scene_absolute_number).zfill(2)]) - search_string.append(ep_string) - return search_string - - def _doSearch(self, search_string, epcount=0, age=0): - if self.show and not self.show.is_anime: - logger.log(u"" + str(self.show.name) + " is not an anime skiping ...") - return [] - - params = { - "cat": "anime", - "q": search_string.encode('utf-8'), - "max": "100" - } - - search_url = self.url + "rss?" + urllib.urlencode(params) - - logger.log(u"Search url: " + search_url, logger.DEBUG) - - data = self.cache.getRSSFeed(search_url) - if not data: - return [] - - if 'entries' in data: - - items = data.entries - results = [] - - for curItem in items: - (title, url) = self._get_title_and_url(curItem) - - if title and url: - results.append(curItem) - else: - logger.log( - u"The data returned from the " + self.name + " is incomplete, this result is unusable", - logger.DEBUG) - - return results - - return [] - - def findPropers(self, date=None): - - results = [] - - for item in self._doSearch("v2 OR v3 OR v4 OR v5"): - - (title, url) = self._get_title_and_url(item) - - if item.has_key('published_parsed') and item['published_parsed']: - result_date = item.published_parsed - if result_date: - result_date = datetime.datetime(*result_date[0:6]) - else: - logger.log(u"Unable to figure out the date for entry " + title + ", skipping it") - continue - - if not date or result_date > date: - search_result = classes.Proper(title, url, result_date, self.show) - results.append(search_result) - - return results - - -class AnimezbCache(tvcache.TVCache): - - def __init__(self, provider): - - tvcache.TVCache.__init__(self, provider) - - # only poll Animezb every 20 minutes max - self.minTime = 20 - - def _getRSSData(self): - - params = { - "cat": "anime".encode('utf-8'), - "max": "100".encode('utf-8') - } - - rss_url = self.provider.url + 'rss?' + urllib.urlencode(params) - - logger.log(self.provider.name + u" cache update URL: " + rss_url, logger.DEBUG) - - data = self.getRSSFeed(rss_url) - - if data and 'entries' in data: - return data.entries - else: - return [] - - -provider = Animezb() diff --git a/sickbeard/providers/t411.py b/sickbeard/providers/t411.py deleted file mode 100644 index 45f6e4ed..00000000 --- a/sickbeard/providers/t411.py +++ /dev/null @@ -1,295 +0,0 @@ -# -*- coding: latin-1 -*- -# Author: djoole -# URL: http://code.google.com/p/sickbeard/ -# -# This file is part of Sick Beard. -# -# Sick Beard is free software: you can redistribute it and/or modify -# it under the terms of the GNU General Public License as published by -# the Free Software Foundation, either version 3 of the License, or -# (at your option) any later version. -# -# Sick Beard is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with Sick Beard. If not, see . - -import traceback -import time -import re -import datetime -import sickbeard -import generic -from lib import requests -from sickbeard.common import USER_AGENT, Quality, cpu_presets -from sickbeard import logger -from sickbeard import tvcache -from sickbeard import show_name_helpers -from sickbeard.bs4_parser import BS4Parser -from sickbeard import db - -class T411Provider(generic.TorrentProvider): - urls = {'base_url': 'http://www.t411.me/', - 'search': 'http://www.t411.me/torrents/search/?name=%s&cat=210&subcat=433&search=%s&submit=Recherche', - 'login_page': 'http://www.t411.me/users/login/', - 'download': 'http://www.t411.me/torrents/download/?id=%s', - } - - def __init__(self): - - generic.TorrentProvider.__init__(self, "T411") - - self.supportsBacklog = True - - self.enabled = False - self.username = None - self.password = None - self.ratio = None - - self.cache = T411Cache(self) - - self.url = self.urls['base_url'] - - self.last_login_check = None - - self.login_opener = None - - def isEnabled(self): - return self.enabled - - def imageName(self): - return 't411.png' - - def getQuality(self, item, anime=False): - - quality = Quality.sceneQuality(item[0], anime) - return quality - - def getLoginParams(self): - return { - 'login': self.username, - 'password': self.password, - 'remember': '1', - } - - def loginSuccess(self, output): - if "Ratio: 0: - for result in entries: - - try: - link = result.find('a', title=True) - torrentName = link['title'] - torrent_name = str(torrentName) - torrentId = result.find_all('td')[2].find_all('a')[0]['href'][1:].replace('torrents/nfo/?id=','') - torrent_download_url = (self.urls['download'] % torrentId).encode('utf8') - except (AttributeError, TypeError): - continue - - if not torrent_name or not torrent_download_url: - continue - - item = torrent_name, torrent_download_url - logger.log(u"Found result: " + torrent_name + " (" + torrent_download_url + ")", logger.DEBUG) - items[mode].append(item) - - else: - logger.log(u"The Data returned from " + self.name + " do not contains any torrent", - logger.WARNING) - continue - - except Exception, e: - logger.log(u"Failed parsing " + self.name + " Traceback: " + traceback.format_exc(), - logger.ERROR) - - results += items[mode] - - return results - - def _get_title_and_url(self, item): - - title, url = item - - if title: - title = u'' + title - title = title.replace(' ', '.') - - if url: - url = str(url).replace('&', '&') - - return title, url - - def findPropers(self, search_date=datetime.datetime.today()): - - results = [] - - myDB = db.DBConnection() - sqlResults = myDB.select( - 'SELECT s.show_name, e.showid, e.season, e.episode, e.status, e.airdate FROM tv_episodes AS e' + - ' INNER JOIN tv_shows AS s ON (e.showid = s.indexer_id)' + - ' WHERE e.airdate >= ' + str(search_date.toordinal()) + - ' AND (e.status IN (' + ','.join([str(x) for x in Quality.DOWNLOADED]) + ')' + - ' OR (e.status IN (' + ','.join([str(x) for x in Quality.SNATCHED]) + ')))' - ) - - if not sqlResults: - return [] - - for sqlshow in sqlResults: - self.show = helpers.findCertainShow(sickbeard.showList, int(sqlshow["showid"])) - if self.show: - curEp = self.show.getEpisode(int(sqlshow["season"]), int(sqlshow["episode"])) - searchString = self._get_episode_search_strings(curEp, add_string='PROPER|REPACK') - - for item in self._doSearch(searchString[0]): - title, url = self._get_title_and_url(item) - results.append(classes.Proper(title, url, datetime.datetime.today(), self.show)) - - return results - - def seedRatio(self): - return self.ratio - - -class T411Cache(tvcache.TVCache): - def __init__(self, provider): - - tvcache.TVCache.__init__(self, provider) - - # Only poll T411 every 10 minutes max - self.minTime = 10 - - def _getDailyData(self): - search_params = {'RSS': ['']} - return self.provider._doSearch(search_params) - - -provider = T411Provider() diff --git a/sickbeard/searchBacklog.py b/sickbeard/searchBacklog.py index 0159deb0..66eb489b 100644 --- a/sickbeard/searchBacklog.py +++ b/sickbeard/searchBacklog.py @@ -85,7 +85,7 @@ class BacklogSearcher: fromDate = datetime.date.fromordinal(1) if not which_shows and not curDate - self._lastBacklog >= self.cycleTime: - logger.log(u"Running limited backlog on missed episodes " + str(sickbeard.BACKLOG_DAYS) + " day(s) and older only") + logger.log(u'Running limited backlog for episodes missed during the last %s day(s)' % str(sickbeard.BACKLOG_DAYS)) fromDate = datetime.date.today() - datetime.timedelta(days=sickbeard.BACKLOG_DAYS) self.amActive = True diff --git a/sickbeard/tv.py b/sickbeard/tv.py index b690b5d0..54dbca54 100644 --- a/sickbeard/tv.py +++ b/sickbeard/tv.py @@ -2494,7 +2494,7 @@ class TVEpisode(object): if airs: hr = int(airs.group(1)) hr = (12 + hr, hr)[None is airs.group(3)] - hr = (hr, hr - 12)[0 == hr % 12] + hr = (hr, hr - 12)[0 == hr % 12 and 0 != hr] min = int((airs.group(2), min)[None is airs.group(2)]) airtime = datetime.time(hr, min) diff --git a/sickbeard/versionChecker.py b/sickbeard/versionChecker.py index dfab40b2..4c6d5a32 100644 --- a/sickbeard/versionChecker.py +++ b/sickbeard/versionChecker.py @@ -44,9 +44,7 @@ class CheckVersion(): def __init__(self): self.install_type = self.find_install_type() - if self.install_type == 'win': - self.updater = WindowsUpdateManager() - elif self.install_type == 'git': + if self.install_type == 'git': self.updater = GitUpdateManager() elif self.install_type == 'source': self.updater = SourceUpdateManager() @@ -71,15 +69,11 @@ class CheckVersion(): Determines how this copy of sr was installed. returns: type of installation. Possible values are: - 'win': any compiled windows build 'git': running from source using git 'source': running from source without git """ - # check if we're a windows build - if sickbeard.BRANCH.startswith('build '): - install_type = 'win' - elif os.path.isdir(ek.ek(os.path.join, sickbeard.PROG_DIR, u'.git')): + if os.path.isdir(ek.ek(os.path.join, sickbeard.PROG_DIR, u'.git')): install_type = 'git' else: install_type = 'source' @@ -139,158 +133,6 @@ class UpdateManager(): return sickbeard.WEB_ROOT + "/home/update/?pid=" + str(sickbeard.PID) -class WindowsUpdateManager(UpdateManager): - def __init__(self): - self.github_repo_user = self.get_github_repo_user() - self.github_repo = self.get_github_repo() - - self.branch = sickbeard.BRANCH - if sickbeard.BRANCH == '': - self.branch = self._find_installed_branch() - - self._cur_version = None - self._cur_commit_hash = None - self._newest_version = None - - self.gc_url = 'http://code.google.com/p/sickbeard/downloads/list' - self.version_url = 'https://raw.github.com/' + self.github_repo_user + '/' + self.github_repo + '/' + self.branch + '/updates.txt' - - def _find_installed_version(self): - version = '' - - try: - version = sickbeard.BRANCH - return int(version[6:]) - except ValueError: - logger.log(u"Unknown SickGear Windows binary release: " + version, logger.ERROR) - return None - - def _find_installed_branch(self): - return 'windows_binaries' - - def _find_newest_version(self, whole_link=False): - """ - Checks git for the newest Windows binary build. Returns either the - build number or the entire build URL depending on whole_link's value. - - whole_link: If True, returns the entire URL to the release. If False, it returns - only the build number. default: False - """ - - regex = ".*SickGear\-win32\-alpha\-build(\d+)(?:\.\d+)?\.zip" - - version_url_data = helpers.getURL(self.version_url) - if not version_url_data: - return - - for curLine in version_url_data.splitlines(): - logger.log(u"checking line " + curLine, logger.DEBUG) - match = re.match(regex, curLine) - if match: - logger.log(u"found a match", logger.DEBUG) - if whole_link: - return curLine.strip() - else: - return int(match.group(1)) - - def need_update(self): - if self.branch != self._find_installed_branch(): - logger.log(u"Branch checkout: " + self._find_installed_branch() + "->" + self.branch, logger.DEBUG) - return True - - self._cur_version = self._find_installed_version() - self._newest_version = self._find_newest_version() - - logger.log(u"newest version: " + repr(self._newest_version), logger.DEBUG) - if self._newest_version and self._newest_version > self._cur_version: - return True - - return False - - def set_newest_text(self): - - sickbeard.NEWEST_VERSION_STRING = None - - if not self._cur_version: - newest_text = "Unknown SickGear Windows binary version. Not updating with original version." - else: - newest_text = 'There is a newer version available (build ' + str( - self._newest_version) + ')' - newest_text += "— Update Now" - - sickbeard.NEWEST_VERSION_STRING = newest_text - - def update(self): - - zip_download_url = self._find_newest_version(True) - logger.log(u"new_link: " + repr(zip_download_url), logger.DEBUG) - - if not zip_download_url: - logger.log(u"Unable to find a new version link on google code, not updating") - return False - - try: - # prepare the update dir - sr_update_dir = ek.ek(os.path.join, sickbeard.PROG_DIR, u'sr-update') - - if os.path.isdir(sr_update_dir): - logger.log(u"Clearing out update folder " + sr_update_dir + " before extracting") - shutil.rmtree(sr_update_dir) - - logger.log(u"Creating update folder " + sr_update_dir + " before extracting") - os.makedirs(sr_update_dir) - - # retrieve file - logger.log(u"Downloading update from " + zip_download_url) - zip_download_path = os.path.join(sr_update_dir, u'sr-update.zip') - urllib.urlretrieve(zip_download_url, zip_download_path) - - if not ek.ek(os.path.isfile, zip_download_path): - logger.log(u"Unable to retrieve new version from " + zip_download_url + ", can't update", logger.ERROR) - return False - - if not ek.ek(zipfile.is_zipfile, zip_download_path): - logger.log(u"Retrieved version from " + zip_download_url + " is corrupt, can't update", logger.ERROR) - return False - - # extract to sr-update dir - logger.log(u"Unzipping from " + str(zip_download_path) + " to " + sr_update_dir) - update_zip = zipfile.ZipFile(zip_download_path, 'r') - update_zip.extractall(sr_update_dir) - update_zip.close() - - # delete the zip - logger.log(u"Deleting zip file from " + str(zip_download_path)) - os.remove(zip_download_path) - - # find update dir name - update_dir_contents = [x for x in os.listdir(sr_update_dir) if - os.path.isdir(os.path.join(sr_update_dir, x))] - - if len(update_dir_contents) != 1: - logger.log(u"Invalid update data, update failed. Maybe try deleting your sr-update folder?", - logger.ERROR) - return False - - content_dir = os.path.join(sr_update_dir, update_dir_contents[0]) - old_update_path = os.path.join(content_dir, u'updater.exe') - new_update_path = os.path.join(sickbeard.PROG_DIR, u'updater.exe') - logger.log(u"Copying new update.exe file from " + old_update_path + " to " + new_update_path) - shutil.move(old_update_path, new_update_path) - - # Notify update successful - notifiers.notify_git_update(sickbeard.NEWEST_VERSION_STRING) - - except Exception, e: - logger.log(u"Error while trying to update: " + ex(e), logger.ERROR) - return False - - return True - - def list_remote_branches(self): - return ['windows_binaries'] - - class GitUpdateManager(UpdateManager): def __init__(self): self._git_path = self._find_working_git() diff --git a/sickbeard/webapi.py b/sickbeard/webapi.py index 1f7cedfc..0051d23c 100644 --- a/sickbeard/webapi.py +++ b/sickbeard/webapi.py @@ -37,6 +37,7 @@ from sickbeard import processTV from sickbeard import network_timezones, sbdatetime from sickbeard.exceptions import ex from sickbeard.common import SNATCHED, SNATCHED_PROPER, DOWNLOADED, SKIPPED, UNAIRED, IGNORED, ARCHIVED, WANTED, UNKNOWN +from sickbeard.helpers import remove_article from common import Quality, qualityPresetStrings, statusStrings try: @@ -125,15 +126,7 @@ class Api(webserve.MainHandler): t = webserve.PageTemplate(headers=self.request.headers, file="apiBuilder.tmpl") def titler(x): - if not x or sickbeard.SORT_ARTICLE: - return x - if x.lower().startswith('a '): - x = x[2:] - elif x.lower().startswith('an '): - x = x[3:] - elif x.lower().startswith('the '): - x = x[4:] - return x + return (remove_article(x), x)[not x or sickbeard.SORT_ARTICLE] t.sortedShowList = sorted(sickbeard.showList, lambda x, y: cmp(titler(x.name), titler(y.name))) @@ -725,17 +718,22 @@ class CMD_ComingEpisodes(ApiCall): def run(self): """ display the coming episodes """ - today = datetime.date.today().toordinal() - next_week = (datetime.date.today() + datetime.timedelta(days=7)).toordinal() - recently = (datetime.date.today() - datetime.timedelta(days=sickbeard.COMING_EPS_MISSED_RANGE)).toordinal() + today_dt = datetime.date.today() + today = today_dt.toordinal() + yesterday_dt = today_dt - datetime.timedelta(days=1) + yesterday = yesterday_dt.toordinal() + tomorrow = (datetime.date.today() + datetime.timedelta(days=1)).toordinal() + next_week_dt = (datetime.date.today() + datetime.timedelta(days=7)) + next_week = (next_week_dt + datetime.timedelta(days=1)).toordinal() + recently = (yesterday_dt - datetime.timedelta(days=sickbeard.COMING_EPS_MISSED_RANGE)).toordinal() done_show_list = [] qualList = Quality.DOWNLOADED + Quality.SNATCHED + [ARCHIVED, IGNORED] - myDB = db.DBConnection(row_type="dict") + myDB = db.DBConnection() sql_results = myDB.select( - "SELECT airdate, airs, episode, name AS 'ep_name', description AS 'ep_plot', network, season, showid AS 'indexerid', show_name, tv_shows.quality AS quality, tv_shows.status AS 'show_status', tv_shows.paused AS 'paused' FROM tv_episodes, tv_shows WHERE season != 0 AND airdate >= ? AND airdate < ? AND tv_shows.indexer_id = tv_episodes.showid AND tv_episodes.status NOT IN (" + ','.join( - ['?'] * len(qualList)) + ")", [today, next_week] + qualList) + "SELECT airdate, airs, episode, name AS 'ep_name', description AS 'ep_plot', network, season, showid AS 'indexerid', show_name, tv_shows.quality AS quality, tv_shows.status AS 'show_status', tv_shows.paused AS 'paused' FROM tv_episodes, tv_shows WHERE season != 0 AND airdate >= ? AND airdate <= ? AND tv_shows.indexer_id = tv_episodes.showid AND tv_episodes.status NOT IN (" + ','.join( + ['?'] * len(qualList)) + ")", [yesterday, next_week] + qualList) for cur_result in sql_results: done_show_list.append(int(cur_result["indexerid"])) @@ -748,17 +746,42 @@ class CMD_ComingEpisodes(ApiCall): sql_results += more_sql_results more_sql_results = myDB.select( - "SELECT airdate, airs, episode, name AS 'ep_name', description AS 'ep_plot', network, season, showid AS 'indexerid', show_name, tv_shows.quality AS quality, tv_shows.status AS 'show_status', tv_shows.paused AS 'paused' FROM tv_episodes, tv_shows WHERE season != 0 AND tv_shows.indexer_id = tv_episodes.showid AND airdate < ? AND airdate >= ? AND tv_episodes.status = ? AND tv_episodes.status NOT IN (" + ','.join( - ['?'] * len(qualList)) + ")", [today, recently, WANTED] + qualList) + "SELECT airdate, airs, episode, name AS 'ep_name', description AS 'ep_plot', network, season, showid AS 'indexerid', show_name, tv_shows.quality AS quality, tv_shows.status AS 'show_status', tv_shows.paused AS 'paused' FROM tv_episodes, tv_shows WHERE season != 0 AND tv_shows.indexer_id = tv_episodes.showid AND airdate <= ? AND airdate >= ? AND tv_episodes.status = ? AND tv_episodes.status NOT IN (" + ','.join( + ['?'] * len(qualList)) + ")", [tomorrow, recently, WANTED] + qualList) sql_results += more_sql_results - # sort by air date + sql_results = list(set(sql_results)) + + # make a dict out of the sql results + sql_results = [dict(row) for row in sql_results] + + # multi dimension sort sorts = { - 'date': (lambda x, y: cmp(int(x["airdate"]), int(y["airdate"]))), - 'show': (lambda a, b: cmp(a["show_name"], b["show_name"])), - 'network': (lambda a, b: cmp(a["network"], b["network"])), + 'date': (lambda a, b: cmp( + (a['parsed_datetime'], + (a['show_name'], remove_article(a['show_name']))[not sickbeard.SORT_ARTICLE], + a['season'], a['episode']), + (b['parsed_datetime'], + (b['show_name'], remove_article(b['show_name']))[not sickbeard.SORT_ARTICLE], + b['season'], b['episode']))), + 'show': (lambda a, b: cmp( + ((a['show_name'], remove_article(a['show_name']))[not sickbeard.SORT_ARTICLE], + a['parsed_datetime'], a['season'], a['episode']), + ((b['show_name'], remove_article(b['show_name']))[not sickbeard.SORT_ARTICLE], + b['parsed_datetime'], b['season'], b['episode']))), + 'network': (lambda a, b: cmp( + (a['network'], a['parsed_datetime'], + (a['show_name'], remove_article(a['show_name']))[not sickbeard.SORT_ARTICLE], + a['season'], a['episode']), + (b['network'], b['parsed_datetime'], + (b['show_name'], remove_article(b['show_name']))[not sickbeard.SORT_ARTICLE], + b['season'], b['episode']))) } + # add parsed_datetime to the dict + for index, item in enumerate(sql_results): + sql_results[index]['parsed_datetime'] = network_timezones.parse_date_time(item['airdate'], item['airs'], item['network']) + sql_results.sort(sorts[self.sort]) finalEpResults = {} @@ -777,9 +800,7 @@ class CMD_ComingEpisodes(ApiCall): if ep["paused"] and not self.paused: continue - ep['airs'] = str(ep['airs']).replace('am', ' AM').replace('pm', ' PM').replace(' ', ' ') - dtEpisodeAirs = sbdatetime.sbdatetime.convert_to_setting(network_timezones.parse_date_time(int(ep['airdate']), ep['airs'], ep['network'])) - ep['airdate'] = dtEpisodeAirs.toordinal() + ep['airdate'] = int(ep["airdate"]) status = "soon" if ep["airdate"] < today: @@ -801,12 +822,13 @@ class CMD_ComingEpisodes(ApiCall): ep["quality"] = _get_quality_string(ep["quality"]) # clean up tvdb horrible airs field - ep['airs'] = sbdatetime.sbdatetime.sbftime(dtEpisodeAirs, t_preset=timeFormat).lstrip('0').replace(' 0', ' ') + ep['airs'] = str(ep['airs']).replace('am', ' AM').replace('pm', ' PM').replace(' ', ' ') # start day of the week on 1 (monday) - ep['weekday'] = 1 + datetime.date.fromordinal(dtEpisodeAirs.toordinal()).weekday() + ep['weekday'] = 1 + datetime.date.fromordinal(ep['airdate']).weekday() # Add tvdbid for backward compability ep["tvdbid"] = ep['indexerid'] - ep['airdate'] = sbdatetime.sbdatetime.sbfdate(dtEpisodeAirs, d_preset=dateFormat) + ep['airdate'] = sbdatetime.sbdatetime.sbfdate(datetime.date.fromordinal(ep['airdate']), d_preset=dateFormat) + ep['parsed_datetime'] = sbdatetime.sbdatetime.sbfdatetime(ep['parsed_datetime'], d_preset=dateFormat, t_preset='%H:%M %z') # TODO: check if this obsolete if not status in finalEpResults: @@ -985,7 +1007,7 @@ class CMD_EpisodeSetStatus(ApiCall): with epObj.lock: if self.status == WANTED: # figure out what episodes are wanted so we can backlog them - if epObj.season in ep_segment: + if epObj.season in segments: segments[epObj.season].append(epObj) else: segments[epObj.season] = [epObj] diff --git a/sickbeard/webserve.py b/sickbeard/webserve.py index 9800cac0..b72c57cc 100644 --- a/sickbeard/webserve.py +++ b/sickbeard/webserve.py @@ -52,6 +52,7 @@ from sickbeard.common import Quality, Overview, statusStrings, qualityPresetStri from sickbeard.common import SNATCHED, UNAIRED, IGNORED, ARCHIVED, WANTED, FAILED from sickbeard.common import SD, HD720p, HD1080p from sickbeard.exceptions import ex +from sickbeard.helpers import remove_article from sickbeard.scene_exceptions import get_scene_exceptions from sickbeard.scene_numbering import get_scene_numbering, set_scene_numbering, get_scene_numbering_for_show, \ get_xem_numbering_for_show, get_scene_absolute_numbering_for_show, get_xem_absolute_numbering_for_show, \ @@ -350,42 +351,67 @@ class MainHandler(RequestHandler): redirect("/comingEpisodes/") def comingEpisodes(self, layout="None"): - - today1 = datetime.date.today() - today = today1.toordinal() - next_week1 = (datetime.date.today() + datetime.timedelta(days=7)) - next_week = next_week1.toordinal() - recently = (datetime.date.today() - datetime.timedelta(days=sickbeard.COMING_EPS_MISSED_RANGE)).toordinal() + """ display the coming episodes """ + today_dt = datetime.date.today() + #today = today_dt.toordinal() + yesterday_dt = today_dt - datetime.timedelta(days=1) + yesterday = yesterday_dt.toordinal() + tomorrow = (datetime.date.today() + datetime.timedelta(days=1)).toordinal() + next_week_dt = (datetime.date.today() + datetime.timedelta(days=7)) + next_week = (next_week_dt + datetime.timedelta(days=1)).toordinal() + if not (layout and layout in ('calendar')) and not (sickbeard.COMING_EPS_LAYOUT and sickbeard.COMING_EPS_LAYOUT in ('calendar')): + recently = (yesterday_dt - datetime.timedelta(days=sickbeard.COMING_EPS_MISSED_RANGE)).toordinal() + else: + recently = yesterday done_show_list = [] qualList = Quality.DOWNLOADED + Quality.SNATCHED + [ARCHIVED, IGNORED] myDB = db.DBConnection() sql_results = myDB.select( - "SELECT *, tv_shows.status as show_status FROM tv_episodes, tv_shows WHERE season != 0 AND airdate >= ? AND airdate < ? AND tv_shows.indexer_id = tv_episodes.showid AND tv_episodes.status NOT IN (" + ','.join( - ['?'] * len(qualList)) + ")", [today, next_week] + qualList) + "SELECT *, tv_shows.status as show_status FROM tv_episodes, tv_shows WHERE season != 0 AND airdate >= ? AND airdate <= ? AND tv_shows.indexer_id = tv_episodes.showid AND tv_episodes.status NOT IN (" + ','.join( + ['?'] * len(qualList)) + ")", [yesterday, next_week] + qualList) for cur_result in sql_results: done_show_list.append(int(cur_result["showid"])) - more_sql_results = myDB.select( - "SELECT *, tv_shows.status as show_status FROM tv_episodes outer_eps, tv_shows WHERE season != 0 AND showid NOT IN (" + ','.join( - ['?'] * len( - done_show_list)) + ") AND tv_shows.indexer_id = outer_eps.showid AND airdate = (SELECT airdate FROM tv_episodes inner_eps WHERE inner_eps.season != 0 AND inner_eps.showid = outer_eps.showid AND inner_eps.airdate >= ? ORDER BY inner_eps.airdate ASC LIMIT 1) AND outer_eps.status NOT IN (" + ','.join( - ['?'] * len(Quality.DOWNLOADED + Quality.SNATCHED)) + ")", - done_show_list + [next_week] + Quality.DOWNLOADED + Quality.SNATCHED) - sql_results += more_sql_results + if not (layout and layout in ('calendar')) and not (sickbeard.COMING_EPS_LAYOUT and sickbeard.COMING_EPS_LAYOUT in ('calendar')): + more_sql_results = myDB.select( + "SELECT *, tv_shows.status as show_status FROM tv_episodes outer_eps, tv_shows WHERE season != 0 AND showid NOT IN (" + ','.join( + ['?'] * len( + done_show_list)) + ") AND tv_shows.indexer_id = outer_eps.showid AND airdate = (SELECT airdate FROM tv_episodes inner_eps WHERE inner_eps.season != 0 AND inner_eps.showid = outer_eps.showid AND inner_eps.airdate >= ? ORDER BY inner_eps.airdate ASC LIMIT 1) AND outer_eps.status NOT IN (" + ','.join( + ['?'] * len(Quality.DOWNLOADED + Quality.SNATCHED)) + ")", + done_show_list + [next_week] + Quality.DOWNLOADED + Quality.SNATCHED) + sql_results += more_sql_results more_sql_results = myDB.select( - "SELECT *, tv_shows.status as show_status FROM tv_episodes, tv_shows WHERE season != 0 AND tv_shows.indexer_id = tv_episodes.showid AND airdate < ? AND airdate >= ? AND tv_episodes.status = ? AND tv_episodes.status NOT IN (" + ','.join( - ['?'] * len(qualList)) + ")", [today, recently, WANTED] + qualList) + "SELECT *, tv_shows.status as show_status FROM tv_episodes, tv_shows WHERE season != 0 AND tv_shows.indexer_id = tv_episodes.showid AND airdate <= ? AND airdate >= ? AND tv_episodes.status = ? AND tv_episodes.status NOT IN (" + ','.join( + ['?'] * len(qualList)) + ")", [tomorrow, recently, WANTED] + qualList) sql_results += more_sql_results - # sort by localtime + sql_results = list(set(sql_results)) + + # multi dimension sort sorts = { - 'date': (lambda x, y: cmp(x["localtime"], y["localtime"])), - 'show': (lambda a, b: cmp((a["show_name"], a["localtime"]), (b["show_name"], b["localtime"]))), - 'network': (lambda a, b: cmp((a["network"], a["localtime"]), (b["network"], b["localtime"]))), + 'date': (lambda a, b: cmp( + (a['localtime'], + (a['show_name'], remove_article(a['show_name']))[not sickbeard.SORT_ARTICLE], + a['season'], a['episode']), + (b['localtime'], + (b['show_name'], remove_article(b['show_name']))[not sickbeard.SORT_ARTICLE], + b['season'], b['episode']))), + 'show': (lambda a, b: cmp( + ((a['show_name'], remove_article(a['show_name']))[not sickbeard.SORT_ARTICLE], + a['localtime'], a['season'], a['episode']), + ((b['show_name'], remove_article(b['show_name']))[not sickbeard.SORT_ARTICLE], + b['localtime'], b['season'], b['episode']))), + 'network': (lambda a, b: cmp( + (a['network'], a['localtime'], + (a['show_name'], remove_article(a['show_name']))[not sickbeard.SORT_ARTICLE], + a['season'], a['episode']), + (b['network'], b['localtime'], + (b['show_name'], remove_article(b['show_name']))[not sickbeard.SORT_ARTICLE], + b['season'], b['episode']))) } # make a dict out of the sql results @@ -418,8 +444,8 @@ class MainHandler(RequestHandler): paused_item, ] - t.next_week = datetime.datetime.combine(next_week1, datetime.time(tzinfo=network_timezones.sb_timezone)) - t.today = datetime.datetime.now().replace(tzinfo=network_timezones.sb_timezone) + t.next_week = datetime.datetime.combine(next_week_dt, datetime.time(tzinfo=network_timezones.sb_timezone)) + t.today = datetime.datetime.now(network_timezones.sb_timezone) t.sql_results = sql_results # Allow local overriding of layout parameter @@ -2642,7 +2668,7 @@ class ConfigAnime(MainHandler): def saveAnime(self, use_anidb=None, anidb_username=None, anidb_password=None, anidb_use_mylist=None, - split_home=None): + split_home=None, anime_treat_as_hdtv=None): results = [] @@ -2651,6 +2677,7 @@ class ConfigAnime(MainHandler): sickbeard.ANIDB_PASSWORD = anidb_password sickbeard.ANIDB_USE_MYLIST = config.checkbox_to_value(anidb_use_mylist) sickbeard.ANIME_SPLIT_HOME = config.checkbox_to_value(split_home) + sickbeard.ANIME_TREAT_AS_HDTV = config.checkbox_to_value(anime_treat_as_hdtv) sickbeard.save_config() @@ -2885,8 +2912,7 @@ class NewHomeAddShows(MainHandler): return _munge(t) - - def newShow(self, show_to_add=None, other_shows=None): + def newShow(self, show_to_add=None, other_shows=None, use_show_name=None): """ Display the new show page which collects a tvdb id, folder, and extra options and posts them to addNewShow @@ -2905,7 +2931,9 @@ class NewHomeAddShows(MainHandler): t.use_provided_info = use_provided_info # use the given show_dir for the indexer search if available - if not show_dir: + if use_show_name: + t.default_show_name = show_name + elif not show_dir: t.default_show_name = '' elif not show_name: t.default_show_name = ek.ek(os.path.basename, ek.ek(os.path.normpath, show_dir)).replace('.', ' ') @@ -2988,11 +3016,21 @@ class NewHomeAddShows(MainHandler): t.submenu = HomeMenu() t.trending_shows = TraktCall("shows/trending.json/%API%", sickbeard.TRAKT_API_KEY) - + t.trending_inlibrary = 0 if None is not t.trending_shows: for item in t.trending_shows: - if helpers.findCertainShow(sickbeard.showList, int(item['tvdb_id'])): - item['tvdb_id'] = u'ExistsInLibrary' + tvdbs = ['tvdb_id', 'tvrage_id'] + for index, tvdb in enumerate(tvdbs): + try: + item[u'show_id'] = item[tvdb] + tvshow = helpers.findCertainShow(sickbeard.showList, int(item[tvdb])) + except: + continue + # check tvshow indexer is not using the same id from another indexer + if tvshow and (index + 1) == tvshow.indexer: + item[u'show_id'] = u'%s:%s' % (tvshow.indexer, item[tvdb]) + t.trending_inlibrary += 1 + break return _munge(t) @@ -3008,37 +3046,7 @@ class NewHomeAddShows(MainHandler): def addTraktShow(self, indexer_id, showName): if helpers.findCertainShow(sickbeard.showList, int(indexer_id)): return - - if sickbeard.ROOT_DIRS: - root_dirs = sickbeard.ROOT_DIRS.split('|') - location = root_dirs[int(root_dirs[0]) + 1] - else: - location = None - - if location: - show_dir = ek.ek(os.path.join, location, helpers.sanitizeFileName(showName)) - dir_exists = helpers.makeDir(show_dir) - if not dir_exists: - logger.log(u"Unable to create the folder " + show_dir + ", can't add the show", logger.ERROR) - return - else: - helpers.chmodAsParent(show_dir) - - sickbeard.showQueueScheduler.action.addShow(1, int(indexer_id), show_dir, - default_status=sickbeard.STATUS_DEFAULT, - quality=sickbeard.QUALITY_DEFAULT, - flatten_folders=sickbeard.FLATTEN_FOLDERS_DEFAULT, - subtitles=sickbeard.SUBTITLES_DEFAULT, - anime=sickbeard.ANIME_DEFAULT, - scene=sickbeard.SCENE_DEFAULT) - - ui.notifications.message('Show added', 'Adding the specified show into ' + show_dir) - else: - logger.log(u"There was an error creating the show, no root directory setting found", logger.ERROR) - return - - # done adding show - redirect('/home/') + return self.newShow('|'.join(['', '', indexer_id, showName]), use_show_name=True) def addNewShow(self, whichSeries=None, indexerLang="en", rootDir=None, defaultStatus=None, anyQualities=None, bestQualities=None, flatten_folders=None, subtitles=None, @@ -3751,15 +3759,7 @@ class Home(MainHandler): epCounts[curEpCat] += 1 def titler(x): - if not x or sickbeard.SORT_ARTICLE: - return x - if x.lower().startswith('a '): - x = x[2:] - if x.lower().startswith('an '): - x = x[3:] - elif x.lower().startswith('the '): - x = x[4:] - return x + return (remove_article(x), x)[not x or sickbeard.SORT_ARTICLE] if sickbeard.ANIME_SPLIT_HOME: shows = []