Merge pull request #177 from JackDandy/feature/AddProxyUrlRequestParsing

Add proxy request url parsing to enforce netloc only matching which prev...
This commit is contained in:
JackDandy 2015-02-11 00:35:05 +00:00
commit 9cefc6a35b
2 changed files with 5 additions and 1 deletions

View file

@ -24,6 +24,7 @@
* Change minor PEP8 tweaks in sab.py
* Add api disabled error code for newznab providers
* Add support for a proxy host PAC url on the General Config/Advanced Settings page
* Add proxy request url parsing to enforce netloc only matching which prevents false positives when url query parts contain FQDNs
[develop changelog]

View file

@ -1273,13 +1273,15 @@ def proxy_setting(proxy_setting, request_url, force=False):
proxy_address = None
request_url_match = False
parsed_url = urlparse.urlparse(request_url)
netloc = (parsed_url.path, parsed_url.netloc)['' != parsed_url.netloc]
for pac_data in re.finditer(r"""(?:[^'"]*['"])([^\.]+\.[^'"]*)(?:['"])""", resp, re.I):
data = re.search(r"""PROXY\s+([^'"]+)""", pac_data.group(1), re.I)
if data:
if force:
return data.group(1), True
proxy_address = (proxy_address, data.group(1))[None is proxy_address]
elif re.search(re.escape(pac_data.group(1)), request_url, re.I):
elif re.search(re.escape(pac_data.group(1)), netloc, re.I):
request_url_match = True
if None is not proxy_address:
break
@ -1360,6 +1362,7 @@ def getURL(url, post_data=None, params=None, headers=None, timeout=30, session=N
return resp.content
def download_file(url, filename, session=None):
# create session
cache_dir = sickbeard.CACHE_DIR or _getTempDir()