Browse Source

Change add exclude_no_data to get_url, in most cases `no_data` is not a failure.

Change prevent log sources containing `sg.` from clipping in log format.
Change rename var failure_handling to failure_monitor to differentiate from the failed handling feature.
pull/1289/head
Prinz23 5 years ago
committed by JackDandy
parent
commit
9fe8565d0c
  1. 3
      CHANGES.md
  2. 4
      lib/plex/plex.py
  3. 26
      lib/sg_helpers.py
  4. 3
      sickbeard/logger.py
  5. 2
      sickbeard/nzbSplitter.py
  6. 5
      sickbeard/providers/generic.py
  7. 2
      sickbeard/search.py
  8. 4
      sickbeard/webserve.py

3
CHANGES.md

@ -57,7 +57,8 @@
* Update urllib3 release 1.25.6 (4a6c288) to 1.25.7 (37ba61a)
* Fix import in Tornado py2
* Fix removed unneeded import
* Change add exclude_no_data to get_url, in most cases `no_data` is not a failure
* Change prevent log sources containing `sg.` from clipping in log format
### 0.21.30 (2020-04-30 10:20:00 UTC)

4
lib/plex/plex.py

@ -118,7 +118,7 @@ class Plex(object):
'X-Plex-Username': user
},
parse_json=True,
failure_handling=False,
failure_monitor=False,
post_data=urlencode({b'user[login]': user, b'user[password]': passw}).encode('utf-8')
)['user']['authentication_token']
except TypeError:
@ -199,7 +199,7 @@ class Plex(object):
}
if self.username:
headers.update({'X-Plex-Username': self.username})
page = get_url(url, headers=headers, failure_handling=False, **kwargs)
page = get_url(url, headers=headers, failure_monitor=False, **kwargs)
if page:
parsed = parse_xml(page)
if None is not parsed and len(parsed):

26
lib/sg_helpers.py

@ -687,11 +687,12 @@ def get_url(url, # type: AnyStr
raise_exceptions=False, # type: bool
as_binary=False, # type: bool
encoding=None, # type: Optional[AnyStr]
failure_handling=True, # type: bool
failure_monitor=True, # type: bool
use_tmr_limit=True, # type: bool
raise_skip_exception=False, # type: bool
exclude_client_http_codes=True, # type: bool
exclude_http_codes=(404, 429), # type: Tuple[integer_types]
exclude_no_data=True, # type: bool
**kwargs):
# type: (...) -> Optional[Union[AnyStr, bool, bytes, Dict, Tuple[Union[Dict, List], requests.Session]]]
"""
@ -715,17 +716,18 @@ def get_url(url, # type: AnyStr
:param raise_exceptions: raise exceptions
:param as_binary: return bytes instead of text
:param encoding: overwrite encoding return header if as_binary is False
:param failure_handling: if True, will enable failure handling for this request
:param failure_monitor: if True, will enable failure monitor for this request
:param use_tmr_limit: an API limit can be +ve before a fetch, but unwanted, set False to short should_skip
:param raise_skip_exception: if True, will raise ConnectionSkipException if this request should be skipped
:param exclude_client_http_codes: if True, exclude client http codes 4XX from failure handling
:param exclude_http_codes: http codes to exclude from failure handling, default: (404, 429)
:param exclude_client_http_codes: if True, exclude client http codes 4XX from failure monitor
:param exclude_http_codes: http codes to exclude from failure monitor, default: (404, 429)
:param exclude_no_data: exclude no data as failure
:param kwargs: keyword params to passthru to Requests
:return: None or data fetched from address
"""
domain = None
if failure_handling:
if failure_monitor:
domain = DOMAIN_FAILURES.get_domain(url)
if domain not in DOMAIN_FAILURES.domain_list:
DOMAIN_FAILURES.domain_list[domain] = ConnectionFailList(domain)
@ -852,7 +854,7 @@ def get_url(url, # type: AnyStr
except requests.exceptions.HTTPError as e:
raised = e
is_client_error = 400 <= e.response.status_code < 500
if failure_handling and e.response.status_code not in exclude_http_codes and \
if failure_monitor and e.response.status_code not in exclude_http_codes and \
not (exclude_client_http_codes and is_client_error):
connection_fail_params = dict(fail_type=ConnectionFailTypes.http, code=e.response.status_code)
if not raise_status_code:
@ -861,26 +863,26 @@ def get_url(url, # type: AnyStr
raised = e
if 'mute_connect_err' not in mute:
logger.warning(u'Connection error msg:%s while loading URL%s' % (ex(e), _maybe_request_url(e)))
if failure_handling:
if failure_monitor:
connection_fail_params = dict(fail_type=ConnectionFailTypes.connection)
except requests.exceptions.ReadTimeout as e:
raised = e
if 'mute_read_timeout' not in mute:
logger.warning(u'Read timed out msg:%s while loading URL%s' % (ex(e), _maybe_request_url(e)))
if failure_handling:
if failure_monitor:
connection_fail_params = dict(fail_type=ConnectionFailTypes.timeout)
except (requests.exceptions.Timeout, socket.timeout) as e:
raised = e
if 'mute_connect_timeout' not in mute:
logger.warning(u'Connection timed out msg:%s while loading URL %s' % (ex(e), _maybe_request_url(e, url)))
if failure_handling:
if failure_monitor:
connection_fail_params = dict(fail_type=ConnectionFailTypes.connection_timeout)
except (BaseException, Exception) as e:
raised = e
logger.warning((u'Exception caught while loading URL {0}\r\nDetail... %s\r\n{1}' % ex(e),
u'Unknown exception while loading URL {0}\r\nDetail... {1}')[not ex(e)]
.format(url, traceback.format_exc()))
if failure_handling:
if failure_monitor:
connection_fail_params = dict(fail_type=ConnectionFailTypes.other)
log_failure_url = True
finally:
@ -919,7 +921,7 @@ def get_url(url, # type: AnyStr
if raise_exceptions and isinstance(raised, Exception):
raise raised
if failure_handling:
if failure_monitor:
if result and not isinstance(result, tuple) \
or isinstance(result, tuple) and result[0]:
domain = DOMAIN_FAILURES.get_domain(url)
@ -928,7 +930,7 @@ def get_url(url, # type: AnyStr
DOMAIN_FAILURES.domain_list[domain].failure_count = 0
DOMAIN_FAILURES.domain_list[domain].failure_time = None
save_failure(url, domain, False, post_data, post_json)
else:
elif not exclude_no_data:
DOMAIN_FAILURES.inc_failure_count(url, ConnectionFail(fail_type=ConnectionFailTypes.nodata))
save_failure(url, domain, True, post_data, post_json)

3
sickbeard/logger.py

@ -171,7 +171,8 @@ class SBRotatingLogHandler(object):
"""
fmt = {}
for logger_name in self.log_types + self.external_loggers:
source = (re.sub(r'(.*\.\w\w\w).*$', r'\1', logger_name).upper() + ' :: ', '')['sickbeard' == logger_name]
source = ((re.sub(r'(.*\.\w\w\w).*$', r'\1', logger_name), logger_name)['sg.' in logger_name]
.upper() + ' :: ', '')['sickbeard' == logger_name]
fmt.setdefault(logger_name, logging.Formatter(
'%(asctime)s %(levelname)' + ('-8', '')[log_simple] + 's ' + source
+ '%(message)s', ('%Y-%m-%d ', '')[log_simple] + '%H:%M:%S'))

2
sickbeard/nzbSplitter.py

@ -180,7 +180,7 @@ def splitResult(result):
:return: list of search results
:rtype: List[sickbeard.classes.SearchResult]
"""
resp = helpers.get_url(result.url, failure_handling=False)
resp = helpers.get_url(result.url, failure_monitor=False)
if None is resp:
logger.log(u'Unable to load url %s, can\'t download season NZB' % result.url, logger.ERROR)
return False

5
sickbeard/providers/generic.py

@ -558,7 +558,8 @@ class GenericProvider(object):
kwargs['raise_exceptions'] = True
kwargs['raise_status_code'] = True
kwargs['failure_handling'] = False
kwargs['failure_monitor'] = False
kwargs['exclude_no_data'] = False
for k, v in iteritems(dict(headers=self.headers, hooks=dict(response=self.cb_response))):
kwargs.setdefault(k, v)
if 'nzbs.in' not in url: # this provider returns 503's 3 out of 4 requests with the persistent session system
@ -753,7 +754,7 @@ class GenericProvider(object):
self.session.headers['Referer'] = url
if cached or helpers.download_file(url, cache_file, session=self.session, allow_redirects='/it' not in url,
failure_handling=False):
failure_monitor=False):
if self._verify_download(cache_file):
logger.log(u'Downloaded %s result from %s' % (self.name, url))

2
sickbeard/search.py

@ -1007,7 +1007,7 @@ def search_providers(
cache_file = ek.ek(os.path.join, sickbeard.CACHE_DIR or helpers.get_system_temp_dir(),
'%s.torrent' % (helpers.sanitize_filename(best_result.name)))
if not helpers.download_file(best_result.url, cache_file, session=best_result.provider.session,
failure_handling=False):
failure_monitor=False):
continue
try:

4
sickbeard/webserve.py

@ -6142,7 +6142,7 @@ class History(MainHandler):
mapped = 0
mapping = None
maps = [x.split('=') for x in sickbeard.EMBY_PARENT_MAPS.split(',') if any(x)]
args = dict(params=dict(format='json'), timeout=10, parse_json=True, failure_handling=False)
args = dict(params=dict(format='json'), timeout=10, parse_json=True, failure_monitor=False)
for i, cur_host in enumerate(hosts):
base_url = 'http://%s/emby' % cur_host
headers.update({'X-MediaBrowser-Token': keys[i]})
@ -6166,7 +6166,7 @@ class History(MainHandler):
if not folder or 'tvshows' != folder.get('CollectionType', ''):
continue
items = helpers.get_url('%s/Items' % user_url, failure_handling=False, headers=headers,
items = helpers.get_url('%s/Items' % user_url, failure_monitor=False, headers=headers,
params=dict(SortBy='DatePlayed,SeriesSortName,SortName',
SortOrder='Descending',
IncludeItemTypes='Episode',

Loading…
Cancel
Save