Browse Source

Add proxy request url parsing to enforce netloc only matching which prevents false positives when url query parts contain FQDNs.

pull/177/head
JackDandy 10 years ago
parent
commit
acafdbb16a
  1. 1
      CHANGES.md
  2. 5
      sickbeard/helpers.py

1
CHANGES.md

@ -24,6 +24,7 @@
* Change minor PEP8 tweaks in sab.py
* Add api disabled error code for newznab providers
* Add support for a proxy host PAC url on the General Config/Advanced Settings page
* Add proxy request url parsing to enforce netloc only matching which prevents false positives when url query parts contain FQDNs
[develop changelog]

5
sickbeard/helpers.py

@ -1273,13 +1273,15 @@ def proxy_setting(proxy_setting, request_url, force=False):
proxy_address = None
request_url_match = False
parsed_url = urlparse.urlparse(request_url)
netloc = (parsed_url.path, parsed_url.netloc)['' != parsed_url.netloc]
for pac_data in re.finditer(r"""(?:[^'"]*['"])([^\.]+\.[^'"]*)(?:['"])""", resp, re.I):
data = re.search(r"""PROXY\s+([^'"]+)""", pac_data.group(1), re.I)
if data:
if force:
return data.group(1), True
proxy_address = (proxy_address, data.group(1))[None is proxy_address]
elif re.search(re.escape(pac_data.group(1)), request_url, re.I):
elif re.search(re.escape(pac_data.group(1)), netloc, re.I):
request_url_match = True
if None is not proxy_address:
break
@ -1360,6 +1362,7 @@ def getURL(url, post_data=None, params=None, headers=None, timeout=30, session=N
return resp.content
def download_file(url, filename, session=None):
# create session
cache_dir = sickbeard.CACHE_DIR or _getTempDir()

Loading…
Cancel
Save