Browse Source

Merge branch 'release/0.14.0'

tags/release_0.14.0^0 release_0.14.0
JackDandy 7 years ago
parent
commit
f31332185a
  1. 20
      CHANGES.md
  2. 51
      SickBeard.py
  3. 57
      gui/slick/css/dark.css
  4. 64
      gui/slick/css/light.css
  5. 217
      gui/slick/css/style.css
  6. 11
      gui/slick/interfaces/default/config_general.tmpl
  7. 12
      gui/slick/interfaces/default/config_search.tmpl
  8. 6
      gui/slick/interfaces/default/displayShow.tmpl
  9. 8
      gui/slick/interfaces/default/editShow.tmpl
  10. 6
      gui/slick/interfaces/default/history.tmpl
  11. 1
      gui/slick/interfaces/default/home_addExistingShow.tmpl
  12. 8
      gui/slick/interfaces/default/inc_addShowOptions.tmpl
  13. 10
      gui/slick/interfaces/default/inc_blackwhitelist.tmpl
  14. 82
      gui/slick/interfaces/default/inc_bottom.tmpl
  15. 4
      gui/slick/interfaces/default/inc_displayShow.tmpl
  16. 93
      gui/slick/interfaces/default/inc_qualityChooser.tmpl
  17. 292
      gui/slick/interfaces/default/manage_manageSearches.tmpl
  18. 12
      gui/slick/interfaces/default/manage_massEdit.tmpl
  19. 8
      gui/slick/js/addShowOptions.js
  20. 5
      gui/slick/js/config.js
  21. 59
      gui/slick/js/manageSearches.js
  22. 2
      gui/slick/js/newShow.js
  23. 65
      gui/slick/js/qualityChooser.js
  24. 44
      sickbeard/__init__.py
  25. 6
      sickbeard/auto_post_processer.py
  26. 22
      sickbeard/common.py
  27. 28
      sickbeard/config.py
  28. 125
      sickbeard/databases/cache_db.py
  29. 1
      sickbeard/databases/failed_db.py
  30. 23
      sickbeard/databases/mainDB.py
  31. 24
      sickbeard/db.py
  32. 31
      sickbeard/event_queue.py
  33. 61
      sickbeard/helpers.py
  34. 9
      sickbeard/name_parser/parser.py
  35. 43
      sickbeard/name_parser/regexes.py
  36. 5
      sickbeard/network_timezones.py
  37. 240
      sickbeard/properFinder.py
  38. 2
      sickbeard/providers/alpharatio.py
  39. 2
      sickbeard/providers/anizb.py
  40. 2
      sickbeard/providers/beyondhd.py
  41. 2
      sickbeard/providers/bithdtv.py
  42. 2
      sickbeard/providers/bitmetv.py
  43. 2
      sickbeard/providers/blutopia.py
  44. 56
      sickbeard/providers/btn.py
  45. 4
      sickbeard/providers/btscene.py
  46. 2
      sickbeard/providers/dh.py
  47. 5
      sickbeard/providers/ettv.py
  48. 2
      sickbeard/providers/fano.py
  49. 2
      sickbeard/providers/filelist.py
  50. 2
      sickbeard/providers/funfile.py
  51. 525
      sickbeard/providers/generic.py
  52. 2
      sickbeard/providers/gftracker.py
  53. 2
      sickbeard/providers/grabtheinfo.py
  54. 2
      sickbeard/providers/hd4free.py
  55. 6
      sickbeard/providers/hdbits.py
  56. 2
      sickbeard/providers/hdspace.py
  57. 2
      sickbeard/providers/hdtorrents.py
  58. 2
      sickbeard/providers/iptorrents.py
  59. 2
      sickbeard/providers/limetorrents.py
  60. 2
      sickbeard/providers/magnetdl.py
  61. 3
      sickbeard/providers/morethan.py
  62. 2
      sickbeard/providers/ncore.py
  63. 6
      sickbeard/providers/nebulance.py
  64. 62
      sickbeard/providers/newznab.py
  65. 2
      sickbeard/providers/nyaa.py
  66. 19
      sickbeard/providers/omgwtfnzbs.py
  67. 2
      sickbeard/providers/pisexy.py
  68. 5
      sickbeard/providers/potuk.py
  69. 3
      sickbeard/providers/pretome.py
  70. 2
      sickbeard/providers/privatehd.py
  71. 5
      sickbeard/providers/ptf.py
  72. 6
      sickbeard/providers/rarbg.py
  73. 2
      sickbeard/providers/revtt.py
  74. 8
      sickbeard/providers/rsstorrent.py
  75. 2
      sickbeard/providers/scenehd.py
  76. 2
      sickbeard/providers/scenetime.py
  77. 11
      sickbeard/providers/shazbat.py
  78. 2
      sickbeard/providers/skytorrents.py
  79. 2
      sickbeard/providers/speedcd.py
  80. 4
      sickbeard/providers/thepiratebay.py
  81. 5
      sickbeard/providers/tokyotoshokan.py
  82. 2
      sickbeard/providers/torlock.py
  83. 4
      sickbeard/providers/torrentbytes.py
  84. 2
      sickbeard/providers/torrentday.py
  85. 2
      sickbeard/providers/torrenting.py
  86. 2
      sickbeard/providers/torrentleech.py
  87. 2
      sickbeard/providers/torrentz2.py
  88. 2
      sickbeard/providers/tvchaosuk.py
  89. 2
      sickbeard/providers/wop.py
  90. 2
      sickbeard/providers/zooqle.py
  91. 64
      sickbeard/rssfeeds.py
  92. 33
      sickbeard/scheduler.py
  93. 9
      sickbeard/search.py
  94. 8
      sickbeard/search_propers.py
  95. 72
      sickbeard/search_queue.py
  96. 8
      sickbeard/show_updater.py
  97. 6
      sickbeard/subtitles.py
  98. 2
      sickbeard/tvcache.py
  99. 92
      sickbeard/webserve.py
  100. 8
      tests/db_tests.py

20
CHANGES.md

@ -1,4 +1,22 @@
### 0.13.15 (2018-01-26 10:30:00 UTC) ### 0.14.0 (2018-02-01 02:30:00 UTC)
* Change improve core scheduler logic
* Change improve media process to parse anime format 'Show Name 123 - 001 - Ep 1 name'
* Add free space stat (if obtainable) of parent folder(s) to footer
* Add option "Display disk free" to general config/interface page (default enabled)
* Add a provider error table to page Manage/Media Search
* Add failure handling, skip provider for x hour(s) depending on count of failures
* Add detection of Too Many Requests (Supporting providers UC and BTN)
* Add footer icon button to switch time layouts
* Add performance gains for proper search by integrating it into recent search
* Add the once per day proper finder time to footer, this process catches any propers missed during recent searches
* Add ability to differentiate webdl/rip sources so overwriting propers is always done from the same source (e.g. AMZN)
* Change layout of quality custom to improve clarity
* Change tweak text of SD DVD to include BD/BR
* Change TBy prov add UHD cat
### 0.13.15 (2018-01-26 10:30:00 UTC)
* Fix save on config general * Fix save on config general

51
SickBeard.py

@ -76,6 +76,7 @@ from sickbeard.exceptions import ex
from lib.configobj import ConfigObj from lib.configobj import ConfigObj
throwaway = datetime.datetime.strptime('20110101', '%Y%m%d') throwaway = datetime.datetime.strptime('20110101', '%Y%m%d')
rollback_loaded = None
signal.signal(signal.SIGINT, sickbeard.sig_handler) signal.signal(signal.SIGINT, sickbeard.sig_handler)
signal.signal(signal.SIGTERM, sickbeard.sig_handler) signal.signal(signal.SIGTERM, sickbeard.sig_handler)
@ -153,6 +154,19 @@ class SickGear(object):
return '\n'.join(help_msg) return '\n'.join(help_msg)
@staticmethod
def execute_rollback(mo, max_v):
global rollback_loaded
try:
if None is rollback_loaded:
rollback_loaded = db.get_rollback_module()
if None is not rollback_loaded:
rollback_loaded.__dict__[mo]().run(max_v)
else:
print(u'ERROR: Could not download Rollback Module.')
except (StandardError, Exception):
pass
def start(self): def start(self):
# do some preliminary stuff # do some preliminary stuff
sickbeard.MY_FULLNAME = os.path.normpath(os.path.abspath(__file__)) sickbeard.MY_FULLNAME = os.path.normpath(os.path.abspath(__file__))
@ -324,14 +338,28 @@ class SickGear(object):
print('Stack Size %s not set: %s' % (stack_size, e.message)) print('Stack Size %s not set: %s' % (stack_size, e.message))
# check all db versions # check all db versions
for d, min_v, max_v, mo in [ for d, min_v, max_v, base_v, mo in [
('failed.db', sickbeard.failed_db.MIN_DB_VERSION, sickbeard.failed_db.MAX_DB_VERSION, 'FailedDb'), ('failed.db', sickbeard.failed_db.MIN_DB_VERSION, sickbeard.failed_db.MAX_DB_VERSION, sickbeard.failed_db.TEST_BASE_VERSION, 'FailedDb'),
('cache.db', sickbeard.cache_db.MIN_DB_VERSION, sickbeard.cache_db.MAX_DB_VERSION, 'CacheDb'), ('cache.db', sickbeard.cache_db.MIN_DB_VERSION, sickbeard.cache_db.MAX_DB_VERSION, sickbeard.cache_db.TEST_BASE_VERSION, 'CacheDb'),
('sickbeard.db', sickbeard.mainDB.MIN_DB_VERSION, sickbeard.mainDB.MAX_DB_VERSION, 'MainDb') ('sickbeard.db', sickbeard.mainDB.MIN_DB_VERSION, sickbeard.mainDB.MAX_DB_VERSION, sickbeard.mainDB.TEST_BASE_VERSION, 'MainDb')
]: ]:
cur_db_version = db.DBConnection(d).checkDBVersion() cur_db_version = db.DBConnection(d).checkDBVersion()
if cur_db_version > 0: # handling of standalone TEST db versions
if cur_db_version >= 100000 and cur_db_version != max_v:
print('Your [%s] database version (%s) is a test db version and doesn\'t match SickGear required '
'version (%s), downgrading to production db' % (d, cur_db_version, max_v))
self.execute_rollback(mo, max_v)
cur_db_version = db.DBConnection(d).checkDBVersion()
if cur_db_version >= 100000:
print(u'Rollback to production failed.')
sys.exit(u'If you have used other forks, your database may be unusable due to their changes')
if 100000 <= max_v and None is not base_v:
max_v = base_v # set max_v to the needed base production db for test_db
print(u'Rollback to production of [%s] successful.' % d)
# handling of production db versions
if 0 < cur_db_version < 100000:
if cur_db_version < min_v: if cur_db_version < min_v:
print(u'Your [%s] database version (%s) is too old to migrate from with this version of SickGear' print(u'Your [%s] database version (%s) is too old to migrate from with this version of SickGear'
% (d, cur_db_version)) % (d, cur_db_version))
@ -341,19 +369,16 @@ class SickGear(object):
print(u'Your [%s] database version (%s) has been incremented past' print(u'Your [%s] database version (%s) has been incremented past'
u' what this version of SickGear supports. Trying to rollback now. Please wait...' % u' what this version of SickGear supports. Trying to rollback now. Please wait...' %
(d, cur_db_version)) (d, cur_db_version))
try: self.execute_rollback(mo, max_v)
rollback_loaded = db.get_rollback_module()
if None is not rollback_loaded:
rollback_loaded.__dict__[mo]().run(max_v)
else:
print(u'ERROR: Could not download Rollback Module.')
except (StandardError, Exception):
pass
if db.DBConnection(d).checkDBVersion() > max_v: if db.DBConnection(d).checkDBVersion() > max_v:
print(u'Rollback failed.') print(u'Rollback failed.')
sys.exit(u'If you have used other forks, your database may be unusable due to their changes') sys.exit(u'If you have used other forks, your database may be unusable due to their changes')
print(u'Rollback of [%s] successful.' % d) print(u'Rollback of [%s] successful.' % d)
# free memory
global rollback_loaded
rollback_loaded = None
# Initialize the config and our threads # Initialize the config and our threads
sickbeard.initialize(console_logging=self.console_logging) sickbeard.initialize(console_logging=self.console_logging)

57
gui/slick/css/dark.css

@ -336,6 +336,7 @@ home_newShow.tmpl
color:#707070 color:#707070
} }
.btn-inverse.dark-bg,
#addRootDirTable td label .filepath, #addRootDirTable td label .filepath,
.grey-text{color:#999} .grey-text{color:#999}
.highlight-text{color:#fff} .highlight-text{color:#fff}
@ -762,6 +763,60 @@ a.whitelink{
} }
/* TABLE BACKGROUND color */
.provider-failures.hover-highlight td:before,
.provider-failures.focus-highlight td:before{
background:#222
}
/* ODD ZEBRA STRIPE color (needs zebra widget) */
.provider-failures.hover-highlight .odd td:before,
.provider-failures.hover-highlight .odd th:before,
.provider-failures.focus-highlight .odd td:before,
.provider-failures.focus-highlight .odd th:before{
background:#333
}
/* EVEN ZEBRA STRIPE color (needs zebra widget) */
.provider-failures.hover-highlight .even td:before,
.provider-failures.hover-highlight .even th:before,
.provider-failures.focus-highlight .even td:before,
.provider-failures.focus-highlight .even th:before{
background-color:#2e2e2e
}
/* HOVER ROW highlight colors */
.provider-failures.hover-highlight tbody > tr:hover > td, /* override tablesorter theme row hover */
.provider-failures.hover-highlight tbody > tr.odd:hover > td,
.provider-failures.hover-highlight tbody > tr.even:hover > td{
background-color:#282828
}
/* HOVER COLUMN highlight colors */
.provider-failures.hover-highlight tbody tr th:hover::after,
.provider-failures.hover-highlight tbody tr td:hover::after{
background-color:#282828
}
/* FOCUS ROW highlight color (touch devices) */
.provider-failures.focus-highlight td:focus::before,
.provider-failures.focus-highlight th:focus::before{
background-color:#181818
}
/* FOCUS COLUMN highlight color (touch devices) */
.provider-failures.focus-highlight td:focus::after,
.provider-failures.focus-highlight th:focus::after{
background-color:#181818
}
/* FOCUS CELL highlight color */
.provider-failures.focus-highlight th:focus,
.provider-failures.focus-highlight td:focus,
.provider-failures.focus-highlight .odd th:focus,
.provider-failures.focus-highlight .odd td:focus,
.provider-failures.focus-highlight .even th:focus,
.provider-failures.focus-highlight .even td:focus{
background-color:#181818;
color:#ddd
}
/* ======================================================================= /* =======================================================================
404.tmpl 404.tmpl
========================================================================== */ ========================================================================== */
@ -1374,7 +1429,7 @@ div.formpaginate .prev, div.formpaginate .next{
background:#2265a1 background:#2265a1
} }
#customQualityWrapper .tip-text p{ #custom-quality-wrapper .tip-text p{
color:#999 color:#999
} }

64
gui/slick/css/light.css

@ -29,6 +29,7 @@ pre .prelight-num{
background-image:url("../images/glyphicons-halflings-white.png") background-image:url("../images/glyphicons-halflings-white.png")
} }
.dark-bg .icon-glyph,
.icon-white{ .icon-white{
background-image:url("../images/glyphicons-halflings.png") background-image:url("../images/glyphicons-halflings.png")
} }
@ -351,6 +352,7 @@ home_newShow.tmpl
color:#909090 color:#909090
} }
.btn-inverse.dark-bg,
#addRootDirTable td label .filepath, #addRootDirTable td label .filepath,
.grey-text{color:#666} .grey-text{color:#666}
.highlight-text{color:#000} .highlight-text{color:#000}
@ -742,6 +744,60 @@ a.whitelink{
color:#000 color:#000
} }
/* TABLE BACKGROUND color */
.provider-failures.hover-highlight td:before,
.provider-failures.focus-highlight td:before{
background:#fff
}
/* ODD ZEBRA STRIPE color (needs zebra widget) */
.provider-failures.hover-highlight .odd th:before,
.provider-failures.hover-highlight .odd td:before,
.provider-failures.focus-highlight .odd th:before,
.provider-failures.focus-highlight .odd td:before{
background:#f5f1e4
}
/* EVEN ZEBRA STRIPE color (needs zebra widget) */
.provider-failures.hover-highlight .even th:before,
.provider-failures.hover-highlight .even td:before,
.provider-failures.focus-highlight .even th:before,
.provider-failures.focus-highlight .even td:before{
background-color:#dfdacf;
}
/* HOVER ROW highlight colors */
.provider-failures.hover-highlight tbody > tr:hover > td, /* override tablesorter theme row hover */
.provider-failures.hover-highlight tbody > tr.odd:hover > td,
.provider-failures.hover-highlight tbody > tr.even:hover > td{
background-color:#f4f3c2
}
/* HOVER COLUMN highlight colors */
.provider-failures.hover-highlight tbody tr th:hover::after,
.provider-failures.hover-highlight tbody tr td:hover::after{
background-color:#f4f3c2
}
/* FOCUS ROW highlight color (touch devices) */
.provider-failures.focus-highlight th:focus::before,
.provider-failures.focus-highlight td:focus::before{
background-color:#dfdead
}
/* FOCUS COLUMN highlight color (touch devices) */
.provider-failures.focus-highlight th:focus::after,
.provider-failures.focus-highlight td:focus::after{
background-color:#dfdead
}
/* FOCUS CELL highlight color */
.provider-failures.focus-highlight th:focus,
.provider-failures.focus-highlight td:focus,
.provider-failures.focus-highlight .odd th:focus,
.provider-failures.focus-highlight .odd td:focus,
.provider-failures.focus-highlight .even th:focus,
.provider-failures.focus-highlight .even td:focus{
background-color:#dfdead;
color:#222
}
/* ======================================================================= /* =======================================================================
404.tmpl 404.tmpl
========================================================================== */ ========================================================================== */
@ -1335,7 +1391,7 @@ div.formpaginate .prev, div.formpaginate .next{
background:#57442b background:#57442b
} }
#customQualityWrapper .tip-text p{ #custom-quality-wrapper .tip-text p{
color:#666 color:#666
} }
@ -1381,8 +1437,8 @@ tablesorter.css
} }
thead.tablesorter-stickyHeader{ thead.tablesorter-stickyHeader{
border-top:2px solid #fff; border-top:2px solid #ddd;
border-bottom:2px solid #fff border-bottom:2px solid #ddd
} }
/* Zebra Widget - row alternating colors */ /* Zebra Widget - row alternating colors */
@ -1404,7 +1460,7 @@ thead.tablesorter-stickyHeader{
} }
.tablesorter tfoot tr{ .tablesorter tfoot tr{
color:#fff; color:#ddd;
text-align:center; text-align:center;
text-shadow:-1px -1px 0 rgba(0, 0, 0, 0.3); text-shadow:-1px -1px 0 rgba(0, 0, 0, 0.3);
background-color:#333; background-color:#333;

217
gui/slick/css/style.css

@ -623,6 +623,20 @@ inc_top.tmpl
content:"\e900" content:"\e900"
} }
.searchadd.icon-glyph{
display:none
}
.active .searchadd.icon-glyph{
opacity:0.4;filter:alpha(opacity=40);
float:none;
display:inline-block;
margin:0 0 -2px 0;
height:14px
}
.searchadd.icon-glyph{
background-position:-337px 0
}
/* ======================================================================= /* =======================================================================
inc_bottom.tmpl inc_bottom.tmpl
========================================================================== */ ========================================================================== */
@ -639,6 +653,27 @@ inc_bottom.tmpl
display:inline display:inline
} }
.footer .icon-glyph{
opacity:0.4;filter:alpha(opacity=40);
float:none;
display:inline-block;
margin:0 0 -1px 2px;
height:12px;
width:14px
}
.footer .icon-glyph:hover{
opacity:0.6;filter:alpha(opacity=60);
cursor:pointer
}
.footer .icon-glyph.timeleft,
.footer .icon-glyph.time:hover{
background-position:-49px -25px
}
.footer .icon-glyph.time,
.footer .icon-glyph.timeleft:hover{
background-position:-193px -121px
}
/* ======================================================================= /* =======================================================================
inc_rootDirs.tmpl inc_rootDirs.tmpl
========================================================================== */ ========================================================================== */
@ -1103,14 +1138,14 @@ div.formpaginate{
margin-right:6px margin-right:6px
} }
#edit-show #customQualityWrapper .tip-text p, #edit-show #custom-quality-wrapper .tip-text p,
#addShowForm #customQualityWrapper .tip-text p, #addShowForm #custom-quality-wrapper .tip-text p,
#edit-show #customQualityWrapper .tip-text em, #edit-show #custom-quality-wrapper .tip-text em,
#addShowForm #customQualityWrapper .tip-text em{ #addShowForm #custom-quality-wrapper .tip-text em{
font-size:13px font-size:13px
} }
#addShowForm .stepDiv #customQuality.show-if-quality-custom span.component-desc p{ #addShowForm .stepDiv #custom-quality.show-if-quality-custom span.component-desc p{
font-size:12px font-size:12px
} }
@ -2711,7 +2746,7 @@ config*.tmpl
color:#666 color:#666
} }
.stepDiv #customQualityWrapper h4{ .stepDiv #custom-quality-wrapper h4{
margin-top:6px; margin-top:6px;
padding:0 0 padding:0 0
} }
@ -2743,7 +2778,7 @@ config*.tmpl
float:left float:left
} }
#config .nocheck, #config div #customQuality, .metadataDiv{ #config .nocheck, #config div #custom-quality, .metadataDiv{
padding-left:20px padding-left:20px
} }
@ -2826,19 +2861,46 @@ select .selected:before{
} }
#editShow .field-pair #SceneException h4, #editShow .field-pair #SceneException h4,
#editShow .field-pair #customQuality h4{ #editShow .field-pair #custom-quality h4{
font-size:13px !important; font-size:13px !important;
} }
#editShow .field-pair #SceneException h4, #editShow .field-pair #SceneException h4,
#editShow .field-pair #customQuality h4{ #editShow .field-pair #custom-quality h4{
margin-bottom:6px margin-bottom:6px
} }
#editShow .field-pair #customQuality h4{ #editShow .field-pair #custom-quality h4{
line-height:normal line-height:normal
} }
#custom-quality .btn,
#custom-quality .btn-placeholder{
width:13em
}
#custom-quality .btn-placeholder{
display:inline-block;
border:1px transparent
}
#addShowForm #add-white,
#addShowForm #add-black{
margin:0 0 10px 30px !important
}
#addShowForm #remove-white,
#addShowForm #remove-black{
margin:0 0 0 30px !important
}
#edit-show #add-white,
#edit-show #add-black{
margin:0 0 10px !important
}
#edit-show #remove-white,
#edit-show #remove-black{
margin:0 !important
}
.test-notification{ .test-notification{
padding:5px; padding:5px;
margin-bottom:10px; margin-bottom:10px;
@ -3191,6 +3253,85 @@ input.get_less_eps{
display:none display:none
} }
#media-search .section{
padding-bottom:10px
}
#media-search .btn{
margin:0 6px 0 0;
min-width:70px
}
#media-search .btn.shows-more,
#media-search .btn.shows-less{
margin:6px 6px 6px 0;
}
#media-search .btn.provider-retry{
margin:6px 0 6px 4px;
}
.tablesorter.provider-failures{width:auto;clear:both;margin-bottom:10px}
.tablesorter.provider-failures > tbody > tr.tablesorter-childRow td{display:none}
.tablesorter.provider-failures.tablesorter > tbody > tr{background-color:transparent}
.provider-failures.hover-highlight th:hover::after,
.provider-failures.hover-highlight td:hover::after,
.provider-failures.focus-highlight th:focus::after,
.provider-failures.focus-highlight td:focus::after{
content:'';
position:absolute;
width:100%;
height:999em;
left:0;
top:-555em;
z-index:-1
}
.provider-failures.focus-highlight th:focus::before,
.provider-failures.focus-highlight td:focus::before{
content:'';
position:absolute;
width:999em;
height:100%;
left:-555em;
top:0;
z-index:-2
}
/* required styles */
.provider-failures.hover-highlight,
.provider-failures.focus-highlight{
overflow:hidden
}
.provider-failures.hover-highlight th,
.provider-failures.hover-highlight td,
.provider-failures.focus-highlight th,
.provider-failures.focus-highlight td{
position:relative;
outline:0
}
/* override the tablesorter theme styling */
.provider-failures.hover-highlight,
.provider-failures.hover-highlight tbody > tr > td,
.provider-failures.focus-highlight,
.provider-failures.focus-highlight tbody > tr > td,
/* override zebra styling */
.provider-failures.hover-highlight tbody tr.even > th,
.provider-failures.hover-highlight tbody tr.even > td,
.provider-failures.hover-highlight tbody tr.odd > th,
.provider-failures.hover-highlight tbody tr.odd > td,
.provider-failures.focus-highlight tbody tr.even > th,
.provider-failures.focus-highlight tbody tr.even > td,
.provider-failures.focus-highlight tbody tr.odd > th,
.provider-failures.focus-highlight tbody tr.odd > td{
background:transparent
}
/* table background positioned under the highlight */
.provider-failures.hover-highlight td:before,
.provider-failures.focus-highlight td:before{
content:'';
position:absolute;
width:100%;
height:100%;
left:0;
top:0;
z-index:-3
}
/* ======================================================================= /* =======================================================================
404.tmpl 404.tmpl
========================================================================== */ ========================================================================== */
@ -3423,6 +3564,12 @@ img[src=""],img:not([src]){
left:0 left:0
} }
.bfr{
position:absolute;
left:-999px;
top:-999px
}
/* ======================================================================= /* =======================================================================
bootstrap Overrides bootstrap Overrides
========================================================================== */ ========================================================================== */
@ -4227,6 +4374,24 @@ div.formpaginate .prev, div.formpaginate .next{
padding:15px padding:15px
} }
#import-shows .stepDiv,
.step-three .stepDiv{
padding:15px 0
}
#import-shows #addShowForm{
width:861px
}
.step-three #custom-quality-wrapper{
width:831px
}
#import-shows #addShowForm .stepDiv span.component-desc,
#addShowForm .step-three .stepDiv span.component-desc{
width:639px
}
.stepDiv.parent-folder{ .stepDiv.parent-folder{
padding:15px 0 0; padding:15px 0 0;
width:430px; width:430px;
@ -4238,21 +4403,21 @@ div.formpaginate .prev, div.formpaginate .next{
} }
/* step 3 related */ /* step 3 related */
#edit-show #customQualityWrapper #customQuality, #edit-show #custom-quality-wrapper #custom-quality,
#customQuality{ #custom-quality{
display:block; display:block;
padding:0 0 10px 0; padding:0 0 10px 0;
overflow:hidden; overflow:hidden;
clear:both clear:both
} }
#customQualityWrapper div.component-group-desc{ #custom-quality-wrapper div.component-group-desc{
float:left; float:left;
width:172px; width:172px;
padding:0 padding:0
} }
#customQualityWrapper div.component-group-desc p{ #custom-quality-wrapper div.component-group-desc p{
margin:.8em 0; margin:.8em 0;
font-size:1.2em font-size:1.2em
} }
@ -4268,8 +4433,7 @@ tablesorter.css
margin-left:auto; margin-left:auto;
color:#000; color:#000;
/* text-align:left;*/ /* text-align:left;*/
background-color:#ddd/*; /* border-spacing:0*/
border-spacing:0*/
} }
#display-show .tablesorter{ #display-show .tablesorter{
@ -4317,20 +4481,6 @@ tablesorter.css
cursor:default cursor:default
} }
thead.tablesorter-stickyHeader{
border-top:2px solid #ddd;
border-bottom:2px solid #ddd
}
/* Zebra Widget - row alternating colors */
.tablesorter tr.odd, .sickbeardTable tr.odd{
background-color:#f5f1e4
}
.tablesorter tr.even, .sickbeardTable tr.even{
background-color:#dfdacf
}
/* filter widget */ /* filter widget */
.tablesorter .filtered{ .tablesorter .filtered{
display:none display:none
@ -4346,9 +4496,7 @@ thead.tablesorter-stickyHeader{
.tablesorter tr.tablesorter-filter-row, .tablesorter tr.tablesorter-filter-row,
.tablesorter tr.tablesorter-filter-row td{ .tablesorter tr.tablesorter-filter-row td{
text-align:center; text-align:center
background:#eee;
border-bottom:1px solid #ddd
} }
/* optional disabled input styling */ /* optional disabled input styling */
@ -4362,10 +4510,7 @@ thead.tablesorter-stickyHeader{
}*/ }*/
.tablesorter tfoot tr{ .tablesorter tfoot tr{
color:#ddd;
text-align:center; text-align:center;
text-shadow:-1px -1px 0 rgba(0, 0, 0, 0.3);
background-color:#333;
border-collapse:collapse border-collapse:collapse
} }

11
gui/slick/interfaces/default/config_general.tmpl

@ -335,6 +335,17 @@
</label> </label>
</div> </div>
#if not hasattr($sickbeard, 'DISPLAY_FREESPACE')#<span class="red-text">Restart SickGear to reveal new option here</span>#else#
<div class="field-pair">
<label for="display-freespace">
<span class="component-title">Display freespace</span>
<span class="component-desc">
<input type="checkbox" name="display_freespace" id="display-freespace"#echo ('', $checked)[$sg_var('DISPLAY_FREESPACE', True)]#>
<p>free space of parent locations is refreshed into any page footer request</p>
</span>
</label>
</div>
#end if
<div class="field-pair"> <div class="field-pair">
<label for="sort_article"> <label for="sort_article">
<span class="component-title">Sort with "The", "A", "An"</span> <span class="component-title">Sort with "The", "A", "An"</span>

12
gui/slick/interfaces/default/config_search.tmpl

@ -74,18 +74,6 @@
</span> </span>
</label> </label>
</div> </div>
<div class="field-pair">
<label for="check_propers_interval">
<span class="component-title">Check propers every:</span>
<span class="component-desc">
<select id="check_propers_interval" name="check_propers_interval" class="form-control input-sm">
#for $curKey, $curText, $void in $propers_intervals:
<option value="$curKey"#echo ('', $html_selected)[$sickbeard.CHECK_PROPERS_INTERVAL == $curKey]#>$curText</option>
#end for
</select>
</span>
</label>
</div>
</div> </div>
<div class="field-pair"> <div class="field-pair">

6
gui/slick/interfaces/default/displayShow.tmpl

@ -295,7 +295,7 @@
<div> <div>
<span class="details-title">Initial</span> <span class="details-title">Initial</span>
<span class="details-info"> <span class="details-info">
#echo ', '.join([$Quality.qualityStrings[$x] for $x in sorted($anyQualities)])# #echo ', '.join([$Quality.get_quality_ui($x) for $x in sorted($anyQualities)])#
</span> </span>
</div> </div>
#end if #end if
@ -303,7 +303,7 @@
<div> <div>
<span class="details-title">Upgrade to</span> <span class="details-title">Upgrade to</span>
<span class="details-info"> <span class="details-info">
#echo ', '.join([$Quality.qualityStrings[$x] for $x in sorted($bestQualities)])# #echo ', '.join([$Quality.get_quality_ui($x) for $x in sorted($bestQualities)])#
</span> </span>
</div> </div>
#end if #end if
@ -396,7 +396,7 @@
<optgroup label="Downloaded"> <optgroup label="Downloaded">
#for $curStatus in sorted($Quality.DOWNLOADED) #for $curStatus in sorted($Quality.DOWNLOADED)
#if $DOWNLOADED != $curStatus #if $DOWNLOADED != $curStatus
<option value="$curStatus">$re.sub('Downloaded\s*\(([^\)]+)\)', r'\1', $statusStrings[$curStatus])</option> <option value="$curStatus">$re.sub('Downloaded\s*\(([^\)]+)\)', r'\1', $statusStrings[$curStatus].replace('SD DVD', 'SD DVD/BR/BD'))</option>
#end if #end if
#end for #end for
<option value="$DOWNLOADED">with archived quality</option> <option value="$DOWNLOADED">with archived quality</option>

8
gui/slick/interfaces/default/editShow.tmpl

@ -146,17 +146,17 @@
<div class="field-pair"> <div class="field-pair">
#set $qualities = $common.Quality.splitQuality(int($show.quality)) #set $qualities = $common.Quality.splitQuality(int($show.quality))
#set global $anyQualities = $qualities[0] #set global $any_qualities = $qualities[0]
#set global $bestQualities = $qualities[1] #set global $best_qualities = $qualities[1]
#include $os.path.join($sg_str('PROG_DIR'), 'gui/slick/interfaces/default/inc_qualityChooser.tmpl') #include $os.path.join($sg_str('PROG_DIR'), 'gui/slick/interfaces/default/inc_qualityChooser.tmpl')
#if $anyQualities + $bestQualities #if $any_qualities + $best_qualities
<div class="field-pair show-if-quality-custom" style="display:none"> <div class="field-pair show-if-quality-custom" style="display:none">
<label for="upgrade-once"> <label for="upgrade-once">
<span class="component-title">Upgrade once</span> <span class="component-title">Upgrade once</span>
<span class="component-desc"> <span class="component-desc">
<input type="checkbox" name="upgrade_once" id="upgrade-once"#echo ('', $html_checked)[$show.upgrade_once]#> <input type="checkbox" name="upgrade_once" id="upgrade-once"#echo ('', $html_checked)[$show.upgrade_once]#>
<p>stop upgrading after matching the first best <em>Upgrade to</em> quality</p> <p>stop upgrading after matching the first best <em>upgrade</em> quality</p>
</span> </span>
</label> </label>
</div> </div>

6
gui/slick/interfaces/default/history.tmpl

@ -134,7 +134,7 @@
#if $SUBTITLED == $curStatus #if $SUBTITLED == $curStatus
<img width="16" height="11" src="$sbRoot/images/flags/<%= hItem["resource"][len(hItem["resource"])-6:len(hItem["resource"])-4] + '.png' %>"> <img width="16" height="11" src="$sbRoot/images/flags/<%= hItem["resource"][len(hItem["resource"])-6:len(hItem["resource"])-4] + '.png' %>">
#end if #end if
<span class="help" title="$os.path.basename($hItem["resource"])">$statusStrings[$curStatus]</span> <span class="help" title="$os.path.basename($hItem["resource"])">$statusStrings[$curStatus].replace('SD DVD', 'SD DVD/BR/BD')</span>
</td> </td>
<td class="provider"> <td class="provider">
#if $DOWNLOADED == $curStatus #if $DOWNLOADED == $curStatus
@ -156,7 +156,7 @@
#end if #end if
#end if #end if
</td> </td>
<td><span class="hide">$curQuality</span><span class="quality $Quality.get_quality_css($curQuality)">$Quality.qualityStrings[$curQuality]</span></td> <td><span class="hide">$curQuality</span><span class="quality $Quality.get_quality_css($curQuality)">$Quality.get_quality_ui($curQuality)</span></td>
</tr> </tr>
#end for #end for
@ -258,7 +258,7 @@
#end for #end for
</td> </td>
#end if #end if
<td quality="$curQuality"><span class="quality $Quality.get_quality_css($curQuality)">$Quality.qualityStrings[$curQuality]</span></td> <td quality="$curQuality"><span class="quality $Quality.get_quality_css($curQuality)">$Quality.get_quality_ui($curQuality)</span></td>
</tr> </tr>
#end for #end for

1
gui/slick/interfaces/default/home_addExistingShow.tmpl

@ -8,6 +8,7 @@
#set global $sbPath = '../..' #set global $sbPath = '../..'
#set global $statpath = '../..' #set global $statpath = '../..'
#set global $topmenu = 'home' #set global $topmenu = 'home'
#set global $page_body_attr = 'import-shows"'
## ##
#import os.path #import os.path
#include $os.path.join($sg_str('PROG_DIR'), 'gui/slick/interfaces/default/inc_top.tmpl') #include $os.path.join($sg_str('PROG_DIR'), 'gui/slick/interfaces/default/inc_top.tmpl')

8
gui/slick/interfaces/default/inc_addShowOptions.tmpl

@ -9,7 +9,7 @@
<% def sg_str(varname, default=''): return getattr(sickbeard, varname, default) %>#slurp# <% def sg_str(varname, default=''): return getattr(sickbeard, varname, default) %>#slurp#
<div class="stepDiv linefix"> <div class="stepDiv linefix">
<div style="float:right;margin:-30px -15px 20px 15px;font-size:12px;line-height:27px;"> <div style="float:right;margin:-30px 0px 20px 15px;font-size:12px;line-height:27px;">
<span class="grey-text">To reuse options below when adding more shows&nbsp;<input class="btn btn-inline" type="button" id="saveDefaultsButton" value="Save as defaults" disabled="disabled" /></span> <span class="grey-text">To reuse options below when adding more shows&nbsp;<input class="btn btn-inline" type="button" id="saveDefaultsButton" value="Save as defaults" disabled="disabled" /></span>
</div> </div>
@ -19,7 +19,7 @@
<span class="component-desc"> <span class="component-desc">
<select name="defaultStatus" id="statusSelect" class="form-control form-control-inline input-sm"> <select name="defaultStatus" id="statusSelect" class="form-control form-control-inline input-sm">
#for $curStatus in [$SKIPPED, $WANTED, $ARCHIVED, $IGNORED]: #for $curStatus in [$SKIPPED, $WANTED, $ARCHIVED, $IGNORED]:
<option value="$curStatus"#if $sg_var('STATUS_DEFAULT', SKIPPED) == $curStatus then $selected else ''#>$statusStrings[$curStatus]</option> <option value="$curStatus"#if $sg_var('STATUS_DEFAULT', SKIPPED) == $curStatus then $selected else ''#>$statusStrings[$curStatus].replace('SD DVD', 'SD DVD/BR/BD')</option>
#end for #end for
</select> </select>
<span>set the initial status of missing episodes</span> <span>set the initial status of missing episodes</span>
@ -33,8 +33,8 @@
<div class="field-pair"> <div class="field-pair">
#set $qualities = $Quality.splitQuality($sg_var('QUALITY_DEFAULT', SD)) #set $qualities = $Quality.splitQuality($sg_var('QUALITY_DEFAULT', SD))
#set global $anyQualities = $qualities[0] #set global $any_qualities = $qualities[0]
#set global $bestQualities = $qualities[1] #set global $best_qualities = $qualities[1]
#include $os.path.join($sg_str('PROG_DIR'), 'gui/slick/interfaces/default/inc_qualityChooser.tmpl') #include $os.path.join($sg_str('PROG_DIR'), 'gui/slick/interfaces/default/inc_qualityChooser.tmpl')
</div> </div>

10
gui/slick/interfaces/default/inc_blackwhitelist.tmpl

@ -19,8 +19,8 @@
#end for #end for
</select> </select>
<div style="position:relative; width:36px; height:64px; margin: -32px 0 0; top: 50%;" class="pull-right"> <div style="position:relative; width:36px; height:64px; margin: -32px 0 0; top: 50%;" class="pull-right">
<input style="margin:0 0 10px !important" class="btn" id="add-white" value="<<" type="button"> <input id="add-white" class="btn" value="<<" type="button">
<input style="margin:0 !important" class="btn clear:right" id="remove-white" value=">>" type="button"> <input id="remove-white" class="btn clear:right" value=">>" type="button">
</div> </div>
</div> </div>
</div> </div>
@ -34,8 +34,8 @@
#end for #end for
</select> </select>
<div style="position:relative; width:36px; height:64px; margin: -32px 0 0; top: 50%;" class="pull-right"> <div style="position:relative; width:36px; height:64px; margin: -32px 0 0; top: 50%;" class="pull-right">
<input style="margin:0 0 10px !important" class="btn" id="add-black" value="<<" type="button"> <input id="add-black" class="btn" value="<<" type="button">
<input style="margin:0 !important" class="btn clear:right" id="remove-black" value=">>" type="button"> <input id="remove-black" class="btn clear:right" value=">>" type="button">
</div> </div>
</div> </div>
</div> </div>
@ -65,4 +65,4 @@
</div> </div>
</div> </div>
</span> </span>
</div><!-- /blackwhitelist --> </div><!-- /blackwhitelist -->

82
gui/slick/interfaces/default/inc_bottom.tmpl

@ -3,6 +3,8 @@
#import re #import re
#from sickbeard import db, sbdatetime #from sickbeard import db, sbdatetime
#from sickbeard.common import * #from sickbeard.common import *
#from sickbeard.helpers import df
#from sickbeard.webserve import MainHandler
<% def sg_var(varname, default=False): return getattr(sickbeard, varname, default) %>#slurp# <% def sg_var(varname, default=False): return getattr(sickbeard, varname, default) %>#slurp#
<% def sg_str(varname, default=''): return getattr(sickbeard, varname, default) %>#slurp# <% def sg_str(varname, default=''): return getattr(sickbeard, varname, default) %>#slurp#
## ##
@ -53,15 +55,10 @@
#set $localheader = '' #set $localheader = ''
#end try #end try
<% <%
try: diskfree, min_output = df()
next_backlog_timeleft = str(sickbeard.backlogSearchScheduler.next_backlog_timeleft()).split('.')[0] if min_output:
except AttributeError: avail = ', '.join(['%s <span class="footerhighlight">%s</span>' % (drive, free) for (drive, free) in diskfree])
next_backlog_timeleft = 'soon' %>#slurp#
try:
recent_search_timeleft = str(sickbeard.recentSearchScheduler.timeLeft()).split('.')[0]
except AttributeError:
recent_search_timeleft = 'soon'
%>
## ##
<span class="footerhighlight">$shows_total</span> shows (<span class="footerhighlight">$shows_active</span> active) <span class="footerhighlight">$shows_total</span> shows (<span class="footerhighlight">$shows_active</span> active)
| <span class="footerhighlight">$ep_downloaded</span><%= | <span class="footerhighlight">$ep_downloaded</span><%=
@ -72,9 +69,72 @@ except AttributeError:
% (localRoot, str(ep_snatched)) % (localRoot, str(ep_snatched))
)[0 < ep_snatched] )[0 < ep_snatched]
%>&nbsp;/&nbsp;<span class="footerhighlight">$ep_total</span> episodes downloaded $ep_percentage %>&nbsp;/&nbsp;<span class="footerhighlight">$ep_total</span> episodes downloaded $ep_percentage
| recent search: <span class="footerhighlight">$recent_search_timeleft</span> #for i, event in enumerate($MainHandler.getFooterTime(change_layout=False, json_dump=False))
| backlog search: <span class="footerhighlight">$next_backlog_timeleft</span> #for k, v in event.items()
#set info = re.findall('(.*)_(timeleft|time)', k)[0]
#if not i
<br>next connect <i class="icon-glyph layout $info[1]" title="Change time layout"></i> for...
#end if
<span id="next-connect-$info[0]">| $info[0].replace('-', ' '): <span class="footerhighlight $info[0]">$v</span></span>
#end for
#end for
#if diskfree
#if min_output
<br>free space&nbsp;&nbsp;$avail
#else
<div class="table-responsive">
<style>
.stat-table{margin:0 auto}
.stat-table > tbody > tr > td{padding:0 5px}
</style>
<table class="stat-table" cellspacing="5" cellpadding="5">
<caption style="display:none">Free space stats for volume/path</caption>
<tbody>
#for i, drive in enumerate(diskfree)
<tr>
<td>#if not i#free space#end if#</td>
<td><span class="footerhighlight">$drive[1]</span></td>
<td style="text-align:left">$drive[0]</td>
</tr>
#end for
</tobdy>
</table>
</div>
#end if
#end if
</div> </div>
<script>
var footerTimeUrl = '$localRoot/getFooterTime';
#raw
function getFooterTime(params){
params = /undefined/.test(params) && {} || params;
$.getJSON(footerTimeUrl, params, function(data){
var info, footerIcon$ = $('.footer').find('.icon-glyph.layout'), enabledPropers = !1;
$.each(data, function(i, eventItems){
$.each(eventItems, function(k, v){
info = k.match(/(.*)_(timeleft|time)/);
$('.footer').find('.' + info[1]).html(v);
footerIcon$.removeClass('time').removeClass('timeleft').addClass(info[2]);
enabledPropers |= /propers/.test(info[1]); // enable only if key is found in response
});
});
var propers$ = $('#next-connect-propers');
if(enabledPropers){
propers$.show();
} else {
propers$.hide();
}
});
}
$(function(){
$('.footer').find('.layout').click(function(){
getFooterTime();
});
});
#end raw
</script>
</footer> </footer>
</body> </body>
</html> </html>

4
gui/slick/interfaces/default/inc_displayShow.tmpl

@ -105,9 +105,9 @@
#slurp #slurp
#set $curStatus, $curQuality = $Quality.splitCompositeStatus(int($ep['status'])) #set $curStatus, $curQuality = $Quality.splitCompositeStatus(int($ep['status']))
#if Quality.NONE != $curQuality #if Quality.NONE != $curQuality
<td class="col-status">#if $SUBTITLED == $curStatus#<span class="addQTip" title="$statusStrings[$curStatus]"><i class="sgicon-subtitles" style="vertical-align:middle"></i></span>#else#$statusStrings[$curStatus].replace('Downloaded', '')#end if# <span class="quality $Quality.get_quality_css($curQuality)#if $downloaded# addQTip" title="$downloaded#end if#">$Quality.qualityStrings[$curQuality]</span></td> <td class="col-status">#if $SUBTITLED == $curStatus#<span class="addQTip" title="$statusStrings[$curStatus]"><i class="sgicon-subtitles" style="vertical-align:middle"></i></span>#else#$statusStrings[$curStatus].replace('Downloaded', '')#end if# <span class="quality $Quality.get_quality_css($curQuality)#if $downloaded# addQTip" title="$downloaded#end if#">$Quality.get_quality_ui($curQuality)</span></td>
#else #else
<td class="col-status">$statusStrings[$curStatus]</td> <td class="col-status">$statusStrings[$curStatus].replace('SD DVD', 'SD DVD/BR/BD')</td>
#end if #end if
<td class="col-search"> <td class="col-search">
#if 0 != int($ep['season']) #if 0 != int($ep['season'])

93
gui/slick/interfaces/default/inc_qualityChooser.tmpl

@ -3,15 +3,15 @@
#set $html_selected = ' selected="selected"' #set $html_selected = ' selected="selected"'
<div class="field-pair"> <div class="field-pair">
<label for="qualityPreset" class="clearfix"> <label for="quality-preset" class="clearfix">
#set $overall_quality = $Quality.combineQualities($anyQualities, $bestQualities) #set $overall_quality = $Quality.combineQualities($any_qualities, $best_qualities)
<span class="component-title input">Quality to download</span> <span class="component-title input">Quality to download</span>
<span class="component-desc"> <span class="component-desc">
#set $selected = None #set $selected = None
<select id="qualityPreset" name="quality_preset" class="form-control form-control-inline input-sm"> <select id="quality-preset" name="quality_preset" class="form-control form-control-inline input-sm">
<option value="0">Custom</option> <option value="0">Custom</option>
#for $curPreset in $qualityPresets: #for $cur_preset in $qualityPresets:
<option value="$curPreset"#echo ('', $html_selected)[$curPreset == $overall_quality]##echo ('', ' style="padding-left:15px"')[$qualityPresetStrings[$curPreset].endswith('0p') and 'UHD' not in $qualityPresetStrings[$curPreset]]#>$qualityPresetStrings[$curPreset]</option> <option value="$cur_preset"#echo ('', $html_selected)[$cur_preset == $overall_quality]##echo ('', ' style="padding-left:15px"')[$qualityPresetStrings[$cur_preset].endswith('0p') and 'UHD' not in $qualityPresetStrings[$cur_preset]]#>$qualityPresetStrings[$cur_preset]</option>
#end for #end for
</select> </select>
<span>tip: select a quality then "Custom" for a default selection</span> <span>tip: select a quality then "Custom" for a default selection</span>
@ -19,43 +19,76 @@
</label> </label>
</div> </div>
<div id="customQualityWrapper"> <div id="custom-quality-wrapper">
<div id="customQuality" class="show-if-quality-custom" style="display:none"> <div id="custom-quality" class="show-if-quality-custom" style="display:none">
<div class="component-group-desc tip-text"> <div class="field-pair">
<p>An <em class="highlight-text">Initial</em> quality downloads before any <em class="highlight-text">Upgrade to</em> selections are considered.</p> <div class="component-group-desc tip-text">
<p>Deselect all <em class="highlight-text">Upgrade to</em> qualities to keep the first best <em class="highlight-text">Initial</em> release found.</p> <p style="margin-bottom:25px">An <em class="highlight-text">initial</em> quality downloads before optional upgrades</p>
<p>All found <em class="highlight-text">Upgrade to</em> qualities download until the best.</p>
<p id="unknown-quality" style="display:none">
Temporarily use <em class="red-text">'Unknown'</em> to skip release qual checks.
Results in spam if left on
</p>
</div>
<span id="initial-quality" class="component-desc">
<p>Select one or more qualities; the best one found when searching will be snatched</p>
#set $any_quality_list = filter(lambda x: x > $Quality.NONE and x < $Quality.UNKNOWN, $Quality.qualityStrings)
#set $has_unknown = False
#for $cur_quality in sorted($any_quality_list):
##set $has_unknown |= ($Quality.UNKNOWN == $cur_quality and $cur_quality in $any_qualities)
<a href="#" data-quality="$cur_quality" class="btn btn-inverse dark-bg#echo ('', ' active')[$cur_quality in $any_qualities]#" role="button"><i class="icon-glyph searchadd"></i>$Quality.get_quality_ui($cur_quality)</a>
#if $Quality.UHD4KWEB == $cur_quality
<span class="btn-placeholder"></span>
<span class="btn-placeholder"></span>
<a href="#" data-quality="$Quality.UNKNOWN" class="btn btn-inverse dark-bg#echo ('', ' active')[$Quality.UNKNOWN in $any_qualities]#" role="button"><i class="icon-glyph searchadd"></i>$Quality.get_quality_ui($Quality.UNKNOWN)</a>
#end if
#if $cur_quality in [$Quality.SDDVD, $Quality.FULLHDTV, $Quality.FULLHDBLURAY, $Quality.UHD4KWEB]
<br>
#end if
#end for
</span>
</div> </div>
<span class="component-desc"> <div class="field-pair" style="clear:both">
<div class="component-group-desc tip-text">
<p style="margin-bottom:25px">All found <em class="highlight-text">upgrade</em> qualities download until the best</p>
<p id="no-upgrade" style="display:none">No <em class="highlight-text">upgrades</em> selected, an <em class="highlight-text">initial</em> snatch will complete any search</p>
<p id="upgrade-cond" style="display:none">An <em class="highlight-text">upgrade</em> will only search after an <em class="highlight-text">initial</em> has complete</p>
</div>
<span id="upgrade-quality" class="component-desc">
<p>Optional, upgrade a completed download to any selected quality</p>
#set $best_quality_list = filter(lambda x: x > $Quality.SDTV and x < $Quality.UNKNOWN, $Quality.qualityStrings)
#for $cur_quality in sorted($best_quality_list):
<a href="#" data-quality="$cur_quality" class="btn btn-inverse dark-bg#echo ('', ' active')[$cur_quality in $best_qualities]#" role="button"><i class="icon-glyph searchadd"></i>$Quality.get_quality_ui($cur_quality)</a>
#if $cur_quality in [$Quality.SDDVD, $Quality.FULLHDTV, $Quality.FULLHDBLURAY]
<br>
#end if
#end for
</span>
</div>
<span class="component-desc bfr">
<div style="float:left;padding-right:28px"> <div style="float:left;padding-right:28px">
<h4 class="jumbo">Initial</h4> <h4 class="jumbo">Initial</h4>
#set $anyQualityList = filter(lambda x: x > $Quality.NONE, $Quality.qualityStrings) #set $any_quality_list = filter(lambda x: x > $Quality.NONE, $Quality.qualityStrings)
<select id="anyQualities" name="anyQualities" multiple="multiple" size="$len($anyQualityList)" class="form-control form-control-inline input-sm"> <select id="initial-qualities" name="anyQualities" multiple="multiple" size="$len($any_quality_list)" class="form-control form-control-inline input-sm">
#set $has_unknown = False #for $cur_quality in sorted($any_quality_list):
#for $curQuality in sorted($anyQualityList): <option value="$cur_quality"#echo ('', $html_selected)[$cur_quality in $any_qualities]#>$Quality.get_quality_ui($cur_quality)</option>
#set $has_unknown |= ($Quality.UNKNOWN == $curQuality and $curQuality in $anyQualities)
<option value="$curQuality"#echo ('', $html_selected)[$curQuality in $anyQualities]#>$Quality.qualityStrings[$curQuality]</option>
#end for #end for
</select> </select>
</div> </div>
<div style="float:left;padding-right:20px"> <div style="float:left;padding-right:20px">
<h4 class="jumbo">Upgrade to</h4> <h4 class="jumbo">Upgrade to</h4>
#set $bestQualityList = filter(lambda x: x > $Quality.SDTV and x < $Quality.UNKNOWN, $Quality.qualityStrings) #set $best_quality_list = filter(lambda x: x > $Quality.SDTV and x < $Quality.UNKNOWN, $Quality.qualityStrings)
<select id="bestQualities" name="bestQualities" multiple="multiple" size="$len($bestQualityList)" class="form-control form-control-inline input-sm"> <select id="upgrade-qualities" name="bestQualities" multiple="multiple" size="$len($best_quality_list)" class="form-control form-control-inline input-sm">
#for $curQuality in sorted($bestQualityList): #for $cur_quality in sorted($best_quality_list):
<option value="$curQuality"#echo ('', $html_selected)[$curQuality in $bestQualities]#>$Quality.qualityStrings[$curQuality]</option> <option value="$cur_quality"#echo ('', $html_selected)[$cur_quality in $best_qualities]#>$Quality.get_quality_ui($cur_quality)</option>
#end for #end for
</select><br /> </select><br />
<span>Ctrl + Click = toggle a quality</span>
</div>
<div style="line-height:normal;padding-top:50px" id="quality-notes" class="tip-text">
<p id="unknown"#if not $has_unknown# style="display:none"#end if#>
<em class="highlight-text">Note:</em> Temporarily use 'Unknown' for releases with no recognised quality.
Full-time use risks snatching bad releases and wastes API hits.
</p>
</div> </div>
</span> </span>
</div> </div>

292
gui/slick/interfaces/default/manage_manageSearches.tmpl

@ -1,4 +1,5 @@
#import sickbeard #import sickbeard
#from sickbeard import sbdatetime
## ##
#set global $title = 'Media Search' #set global $title = 'Media Search'
#set global $header = 'Media Search' #set global $header = 'Media Search'
@ -8,131 +9,252 @@
#import os.path #import os.path
#include $os.path.join($sickbeard.PROG_DIR, 'gui/slick/interfaces/default/inc_top.tmpl') #include $os.path.join($sickbeard.PROG_DIR, 'gui/slick/interfaces/default/inc_top.tmpl')
<input type="hidden" id="sbRoot" value="$sbRoot">
<script type="text/javascript" src="$sbRoot/js/plotTooltip.js?v=$sbPID"></script> <script type="text/javascript" src="$sbRoot/js/plotTooltip.js?v=$sbPID"></script>
<script type="text/javascript" src="$sbRoot/js/manageSearches.js?v=$sbPID"></script> <script type="text/javascript" src="$sbRoot/js/manageSearches.js?v=$sbPID"></script>
<div id="content800">
<div id="media-search" class="align-left">
#if $varExists('header') #if $varExists('header')
<h1 class="header">$header</h1> <h1 class="header">$header</h1>
#else #else
<h1 class="title">$title</h1> <h1 class="title">$title</h1>
#end if #end if
<div id="summary2" class="align-left">
<div id="backlog-search" class="section">
<h3>Backlog Search:</h3> <h3>Backlog Search:</h3>
<a id="forcebacklog" class="btn#if $standard_backlog_running or $backlog_is_active# disabled#end if#" href="$sbRoot/manage/manageSearches/forceBacklog"><i class="sgicon-play"></i> Force</a> <a id="forcebacklog" class="btn#if $standard_backlog_running or $backlog_is_active# disabled#end if#" href="$sbRoot/manage/manageSearches/forceBacklog"><i class="sgicon-play"></i> Force</a>
<a id="pausebacklog" class="btn" href="$sbRoot/manage/manageSearches/pauseBacklog?paused=#if $backlog_paused then "0" else "1"#"><i class="#if $backlog_paused then "sgicon-play" else "sgicon-pause"#"></i> #if $backlog_paused then "Unpause" else "Pause"#</a> <a id="pausebacklog" class="btn" href="$sbRoot/manage/manageSearches/pauseBacklog?paused=#if $backlog_paused then "0" else "1"#"><i class="#if $backlog_paused then "sgicon-play" else "sgicon-pause"#"></i> #if $backlog_paused then "Unpause" else "Pause"#</a>
#if $backlog_paused then 'Paused: ' else ''# #if $backlog_paused
Paused:
#end if##slurp#
#if not $backlog_running and not $backlog_is_active: #if not $backlog_running and not $backlog_is_active:
Not in progress<br /> Not in progress
#else #else
Currently running#if $backlog_running_type != "None"# ($backlog_running_type)#end if#<br /> Currently running#if $backlog_running_type != "None"# ($backlog_running_type)#end if#
#end if #end if
<br /> </div>
<div id="recent-search" class="section">
<h3>Recent Search:</h3> <h3>Recent Search:</h3>
<a id="recentsearch" class="btn#if $recent_search_status# disabled#end if#" href="$sbRoot/manage/manageSearches/forceSearch"><i class="sgicon-play"></i> Force</a> <a id="recentsearch" class="btn#if $recent_search_status# disabled#end if#" href="$sbRoot/manage/manageSearches/forceSearch"><i class="sgicon-play"></i> Force</a>
#if not $recent_search_status #if not $recent_search_status
Not in progress<br /> Not in progress
#else #else
In Progress<br /> In Progress
#end if #end if
<br /> </div>
<div id="propers-search" class="section">
<h3>Find Propers Search:</h3> <h3>Find Propers Search:</h3>
<a id="propersearch" class="btn#if $find_propers_status# disabled#end if#" href="$sbRoot/manage/manageSearches/forceFindPropers"><i class="sgicon-play"></i> Force</a> <a id="propersearch" class="btn#if $find_propers_status# disabled#end if#" href="$sbRoot/manage/manageSearches/forceFindPropers"><i class="sgicon-play"></i> Force</a>
#if not $find_propers_status #if not $find_propers_status
Not in progress<br /> Not in progress
#else #else
In Progress<br /> In Progress
#end if #end if
<br /> </div>
<div id="provider-failures" class="section">
<h3>Provider Failures:</h3>
#if not $provider_fails
<p>No current failures. Failure stats display here when appropriate.</p>
#else
<p>Some providers can be often down over periods, SickGear will back off then retry connecting at a later time</p>
#for $prov in $provider_fail_stats
#if $len($prov['fails'])
<!-- $prov['name'] -->
<div>
<input type="button" class="shows-more btn" value="Expand" style="display:none"><input type="button" class="shows-less btn" value="Collapse"><img src="$sbRoot/images/providers/$prov['prov_img']" width="16" height="16" style="margin:0 6px 0 3px">$prov['name']
#if $prov['active']
#if $prov['next_try']
#set nt = $str($prov['next_try']).split('.', 2)
... is blocked until $sbdatetime.sbdatetime.sbftime($sbdatetime.sbdatetime.now() + $prov['next_try'], markup=True) (in $nt[0]) <input type="button" class="provider-retry btn" id="$prov['prov_id']-btn-retry" value="Ignore block on next search">
#end if
#else
... is not enabled
#end if
</div>
<table class="manageTable provider-failures tablesorter hover-highlight focus-highlight text-center" cellspacing="0" border="0" cellpadding="0">
<thead>
<tr>
<th class="text-center" style="width:13em;padding-right:20px">period of 1hr</th>
<th class="text-center" style="padding-right:20px">server/timeout</th>
<th class="text-center" style="padding-right:20px">network</th>
<th class="text-center" style="padding-right:20px">no data</th>
<th class="text-center" style="padding-right:20px">other</th>
#if $prov['has_limit']
<th class="text-center" style="padding-right:20px">hit limit</th>
#end if
</tr>
</thead>
<tbody>
#set $day = []
#for $fail in $prov['fails']
#set $child = True
#if $fail['date'] not in $day
#set $day += [$fail['date']]
#set $child = False
#end if
#slurp#
<tr#if $fail['multirow'] and $child# class="tablesorter-childRow"#end if#>
#if $fail['multirow']
#if not $child
<td><a href="#" class="provider-fail-parent-toggle" title="Totals (expand for detail)">$sbdatetime.sbdatetime.sbfdate($fail['date_time'])</a></td>
#else
<td>$sbdatetime.sbdatetime.sbftime($fail['date_time'], markup=True)</td>
#end if
#else
<td>$sbdatetime.sbdatetime.sbfdatetime($fail['date_time'], markup=True)</td>
#end if
#set $blank = '-'
#set $title=None
#if $fail['http']['count']
#set $title=$fail['http']['code']
#end if
<td>#if $fail['http']['count']#<span title="#if $child or not $fail['multirow']#$title#else#Expand for fail codes#end if#">$fail['http']['count']</span>#else#$blank#end if# / #echo $fail['timeout'].get('count', 0) or $blank#</td>
<td>#echo ($fail['connection'].get('count', 0) + $fail['connection_timeout'].get('count', 0)) or $blank#</td>
<td>#echo $fail['nodata'].get('count', 0) or $blank#</td>
<td>#echo $fail['other'].get('count', 0) or $blank#</td>
#if $prov['has_limit']
<td>#echo $fail.get('limit', {}).get('count', 0) or $blank#</td>
#end if
</tr>
#end for
</tbody>
</table>
<!-- /$prov['name'] -->
#end if
#end for
#end if
</div>
<br /><br /> <div id="search-queues" class="section">
<h3>Search Queues:</h3>
<h3>Search Queue:</h3>
#if $queue_length['backlog'] or $queue_length['manual'] or $queue_length['failed'] #if $queue_length['backlog'] or $queue_length['manual'] or $queue_length['failed']
<input type="button" class="show-all-more btn" id="all-btn-more" value="Expand All"><input type="button" class="show-all-less btn" id="all-btn-less" value="Collapse All"><br> <input type="button" class="show-all-more btn" id="all-btn-more" value="Expand All"><input type="button" class="show-all-less btn" id="all-btn-less" value="Collapse All"><br>
#end if #end if
<br> <div id="queue-recent" class="section">
Recent: <i>$queue_length['recent'] item$sickbeard.helpers.maybe_plural($queue_length['recent'])</i><br><br> Recent: <i>$queue_length['recent'] item$sickbeard.helpers.maybe_plural($queue_length['recent'])</i>
Proper: <i>$queue_length['proper'] item$sickbeard.helpers.maybe_plural($queue_length['proper'])</i><br><br> </div>
Backlog: <i>$len($queue_length['backlog']) item$sickbeard.helpers.maybe_plural($len($queue_length['backlog']))</i>
#if $queue_length['backlog']
<input type="button" class="shows-more btn" id="backlog-btn-more" value="Expand" #if not $queue_length['backlog']# style="display:none" #end if#><input type="button" class="shows-less btn" id="backlog-btn-less" value="Collapse" style="display:none"><br> <div id="queue-proper" class="section">
<table class="sickbeardTable manageTable" cellspacing="1" border="0" cellpadding="0" style="display:none"> Proper: <i>$len($queue_length['proper']) item$sickbeard.helpers.maybe_plural($queue_length['proper'])</i>
<thead></thead> #if $queue_length['proper']
<tbody> <input type="button" class="shows-more btn" id="proper-btn-more" value="Expand" #if not $queue_length['proper']# style="display:none" #end if#><input type="button" class="shows-less btn" id="proper-btn-less" value="Collapse" style="display:none"><br>
<table class="sickbeardTable manageTable" cellspacing="1" border="0" cellpadding="0" style="display:none">
<thead></thead>
<tbody>
#set $row = 0 #set $row = 0
#for $cur_item in $queue_length['backlog']: #for $cur_item in $queue_length['proper']:
#set $search_type = 'On Demand' #if $cur_item['recent']:
#if $cur_item['standard_backlog']: #set $search_type = 'Recent'
#if $cur_item['forced']: #else
#set $search_type = 'Forced' #set $search_type = 'Scheduled'
#else
#set $search_type = 'Scheduled'
#end if
#if $cur_item['torrent_only']:
#set $search_type += ', Torrent Only'
#end if
#if $cur_item['limited_backlog']:
#set $search_type += ' (Limited)'
#else
#set $search_type += ' (Full)'
#end if
#end if #end if
<tr class="#echo ('odd', 'even')[$row % 2]##set $row+=1#"> <tr class="#echo ('odd', 'even')[$row % 2]##set $row+=1#">
<td style="width:80%;text-align:left;color:white"> <td style="width:20%;text-align:center;color:white">$search_type</td>
<a class="whitelink" href="$sbRoot/home/displayShow?show=$cur_item['indexerid']">$cur_item['name']</a> - $sickbeard.helpers.make_search_segment_html_string($cur_item['segment']) </tr>
</td>
<td style="width:20%;text-align:center;color:white">$search_type</td>
</tr>
#end for #end for
</tbody> </tbody>
</table> </table>
#else #end if
<br> </div>
<div id="queue-backlog" class="section">
Backlog: <i>$len($queue_length['backlog']) item$sickbeard.helpers.maybe_plural($len($queue_length['backlog']))</i>
#if $queue_length['backlog']
<input type="button" class="shows-more btn" id="backlog-btn-more" value="Expand" #if not $queue_length['backlog']# style="display:none" #end if#><input type="button" class="shows-less btn" id="backlog-btn-less" value="Collapse" style="display:none"><br>
<table class="sickbeardTable manageTable" cellspacing="1" border="0" cellpadding="0" style="display:none">
<thead></thead>
<tbody>
#set $row = 0
#for $cur_item in $queue_length['backlog']:
#set $search_type = 'On Demand'
#if $cur_item['standard_backlog']:
#if $cur_item['forced']:
#set $search_type = 'Forced'
#else
#set $search_type = 'Scheduled'
#end if
#if $cur_item['torrent_only']:
#set $search_type += ', Torrent Only'
#end if
#if $cur_item['limited_backlog']:
#set $search_type += ' (Limited)'
#else
#set $search_type += ' (Full)'
#end if
#end if
<tr class="#echo ('odd', 'even')[$row % 2]##set $row+=1#">
<td style="width:80%;text-align:left;color:white">
<a class="whitelink" href="$sbRoot/home/displayShow?show=$cur_item['indexerid']">$cur_item['name']</a> - $sickbeard.helpers.make_search_segment_html_string($cur_item['segment'])
</td>
<td style="width:20%;text-align:center;color:white">$search_type</td>
</tr>
#end for
</tbody>
</table>
#end if #end if
<br> </div>
Manual: <i>$len($queue_length['manual']) item$sickbeard.helpers.maybe_plural($len($queue_length['manual']))</i>
<div id="queue-manual" class="section">
Manual: <i>$len($queue_length['manual']) item$sickbeard.helpers.maybe_plural($len($queue_length['manual']))</i>
#if $queue_length['manual'] #if $queue_length['manual']
<input type="button" class="shows-more btn" id="manual-btn-more" value="Expand" #if not $queue_length['manual']# style="display:none" #end if#><input type="button" class="shows-less btn" id="manual-btn-less" value="Collapse" style="display:none"><br> <input type="button" class="shows-more btn" id="manual-btn-more" value="Expand" #if not $queue_length['manual']# style="display:none" #end if#><input type="button" class="shows-less btn" id="manual-btn-less" value="Collapse" style="display:none"><br>
<table class="sickbeardTable manageTable" cellspacing="1" border="0" cellpadding="0" style="display:none"> <table class="sickbeardTable manageTable" cellspacing="1" border="0" cellpadding="0" style="display:none">
<thead></thead> <thead></thead>
<tbody> <tbody>
#set $row = 0 #set $row = 0
#for $cur_item in $queue_length['manual']: #for $cur_item in $queue_length['manual']:
<tr class="#echo ('odd', 'even')[$row % 2]##set $row+=1#"> <tr class="#echo ('odd', 'even')[$row % 2]##set $row+=1#">
<td style="width:100%;text-align:left;color:white"> <td style="width:100%;text-align:left;color:white">
<a class="whitelink" href="$sbRoot/home/displayShow?show=$cur_item['indexerid']">$cur_item['name']</a> - $sickbeard.helpers.make_search_segment_html_string($cur_item['segment']) <a class="whitelink" href="$sbRoot/home/displayShow?show=$cur_item['indexerid']">$cur_item['name']</a> - $sickbeard.helpers.make_search_segment_html_string($cur_item['segment'])
</td> </td>
</tr> </tr>
#end for #end for
</tbody> </tbody>
</table> </table>
#else
<br>
#end if #end if
<br> </div>
Failed: <i>$len($queue_length['failed']) item$sickbeard.helpers.maybe_plural($len($queue_length['failed']))</i>
<div id="queue-failed" class="section">
Failed: <i>$len($queue_length['failed']) item$sickbeard.helpers.maybe_plural($len($queue_length['failed']))</i>
#if $queue_length['failed'] #if $queue_length['failed']
<input type="button" class="shows-more btn" id="failed-btn-more" value="Expand" #if not $queue_length['failed']# style="display:none" #end if#><input type="button" class="shows-less btn" id="failed-btn-less" value="Collapse" style="display:none"><br> <input type="button" class="shows-more btn" id="failed-btn-more" value="Expand" #if not $queue_length['failed']# style="display:none" #end if#><input type="button" class="shows-less btn" id="failed-btn-less" value="Collapse" style="display:none"><br>
<table class="sickbeardTable manageTable" cellspacing="1" border="0" cellpadding="0" style="display:none"> <table class="sickbeardTable manageTable" cellspacing="1" border="0" cellpadding="0" style="display:none">
<thead></thead> <thead></thead>
<tbody> <tbody>
#set $row = 0 #set $row = 0
#for $cur_item in $queue_length['failed']: #for $cur_item in $queue_length['failed']:
<tr class="#echo ('odd', 'even')[$row % 2]##set $row+=1#"> <tr class="#echo ('odd', 'even')[$row % 2]##set $row+=1#">
<td style="width:100%;text-align:left;color:white"> <td style="width:100%;text-align:left;color:white">
<a class="whitelink" href="$sbRoot/home/displayShow?show=$cur_item['indexerid']">$cur_item['name']</a> - $sickbeard.helpers.make_search_segment_html_string($cur_item['segment']) <a class="whitelink" href="$sbRoot/home/displayShow?show=$cur_item['indexerid']">$cur_item['name']</a> - $sickbeard.helpers.make_search_segment_html_string($cur_item['segment'])
</td> </td>
</tr> </tr>
#end for #end for
</tbody> </tbody>
</table> </table>
#else
<br>
#end if #end if
</div>
</div> </div>
</div> </div>
#include $os.path.join($sickbeard.PROG_DIR, 'gui/slick/interfaces/default/inc_bottom.tmpl') #include $os.path.join($sickbeard.PROG_DIR, 'gui/slick/interfaces/default/inc_bottom.tmpl')

12
gui/slick/interfaces/default/manage_massEdit.tmpl

@ -53,7 +53,7 @@
<div class="optionWrapper"> <div class="optionWrapper">
<span class="selectTitle">Quality</span> <span class="selectTitle">Quality</span>
<div class="selectChoices"> <div class="selectChoices">
<select id="qualityPreset" name="quality_preset" class="form-control form-control-inline input-sm"> <select id="quality-preset" name="quality_preset" class="form-control form-control-inline input-sm">
<option value="keep">&lt; keep &gt;</option> <option value="keep">&lt; keep &gt;</option>
<option value="0" #if None is not $quality_value and $quality_value not in $common.qualityPresets then $selected else ''#>Custom</option> <option value="0" #if None is not $quality_value and $quality_value not in $common.qualityPresets then $selected else ''#>Custom</option>
#for $curPreset in sorted($common.qualityPresets): #for $curPreset in sorted($common.qualityPresets):
@ -62,22 +62,22 @@
</select> </select>
</div><br /> </div><br />
<div id="customQuality" class="show-if-quality-custom"> <div id="custom-quality" class="show-if-quality-custom">
<div class="manageCustom pull-left"> <div class="manageCustom pull-left">
<h4 style="font-size:14px">Initial</h4> <h4 style="font-size:14px">Initial</h4>
#set $anyQualityList = filter(lambda x: x > $common.Quality.NONE, $common.Quality.qualityStrings) #set $anyQualityList = filter(lambda x: x > $common.Quality.NONE, $common.Quality.qualityStrings)
<select id="anyQualities" name="anyQualities" multiple="multiple" size="$len($anyQualityList)"> <select id="initial-qualities" name="anyQualities" multiple="multiple" size="$len($anyQualityList)">
#for $curQuality in sorted($anyQualityList): #for $curQuality in sorted($anyQualityList):
<option value="$curQuality" #if $curQuality in $anyQualities then $selected else ''#>$common.Quality.qualityStrings[$curQuality]</option> <option value="$curQuality" #if $curQuality in $anyQualities then $selected else ''#>$common.Quality.get_quality_ui($curQuality)</option>
#end for #end for
</select> </select>
</div> </div>
<div class="manageCustom pull-left"> <div class="manageCustom pull-left">
<h4 style="font-size:14px">Upgrade to</h4> <h4 style="font-size:14px">Upgrade to</h4>
#set $bestQualityList = filter(lambda x: x > $common.Quality.SDTV, $common.Quality.qualityStrings) #set $bestQualityList = filter(lambda x: x > $common.Quality.SDTV, $common.Quality.qualityStrings)
<select id="bestQualities" name="bestQualities" multiple="multiple" size="$len($bestQualityList)"> <select id="upgrade-qualities" name="bestQualities" multiple="multiple" size="$len($bestQualityList)">
#for $curQuality in sorted($bestQualityList): #for $curQuality in sorted($bestQualityList):
<option value="$curQuality" #if $curQuality in $bestQualities then $selected else ''#>$common.Quality.qualityStrings[$curQuality]</option> <option value="$curQuality" #if $curQuality in $bestQualities then $selected else ''#>$common.Quality.get_quality_ui($curQuality)</option>
#end for #end for
</select> </select>
</div><br /> </div><br />

8
gui/slick/js/addShowOptions.js

@ -3,10 +3,10 @@ $(document).ready(function(){
$('#saveDefaultsButton').click(function() { $('#saveDefaultsButton').click(function() {
var anyQualArray = [], bestQualArray = []; var anyQualArray = [], bestQualArray = [];
$('#anyQualities option:selected').each(function(i, d) { $('#initial-qualities option:selected').each(function(i, d) {
anyQualArray.push($(d).val()); anyQualArray.push($(d).val());
}); });
$('#bestQualities option:selected').each(function(i, d) { $('#upgrade-qualities option:selected').each(function(i, d) {
bestQualArray.push($(d).val()); bestQualArray.push($(d).val());
}); });
@ -32,9 +32,9 @@ $(document).ready(function(){
$(this).attr('disabled', true); $(this).attr('disabled', true);
}); });
$('#statusSelect, #qualityPreset, #anyQualities, #bestQualities, #wanted_begin, #wanted_latest,' $('#statusSelect, #quality-preset, #initial-qualities, #upgrade-qualities, #wanted_begin, #wanted_latest,'
+ ' #flatten_folders, #scene, #subtitles, #anime, #tag').change(function() { + ' #flatten_folders, #scene, #subtitles, #anime, #tag').change(function() {
$('#saveDefaultsButton').attr('disabled', false); $('#saveDefaultsButton').attr('disabled', false);
}); });
}); });

5
gui/slick/js/config.js

@ -262,6 +262,11 @@ function config_success(response) {
$(this).show(); $(this).show();
}); });
$('#email_show').trigger('notify'); $('#email_show').trigger('notify');
// update footer only on the config page for the propers option
if('saveSearch' == $('#configForm').attr('action')){
getFooterTime({'change_layout': 0});
}
} }
function fetch_pullrequests() { function fetch_pullrequests() {

59
gui/slick/js/manageSearches.js

@ -1,33 +1,70 @@
$(document).ready(function() { $(function(){
$('#recentsearch,#propersearch').click(function(){ $('#recentsearch,#propersearch').click(function(){
$(this).addClass('disabled'); $(this).addClass('disabled');
}) });
$('#forcebacklog,#forcefullbacklog').click(function(){ $('#forcebacklog,#forcefullbacklog').click(function(){
$('#forcebacklog,#forcefullbacklog').addClass('disabled'); $('#forcebacklog,#forcefullbacklog').addClass('disabled');
$('#pausebacklog').removeClass('disabled'); $('#pausebacklog').removeClass('disabled');
}) });
$('#pausebacklog').click(function(){ $('#pausebacklog').click(function(){
$(this).addClass('disabled'); $(this).addClass('disabled');
}) });
$('.show-all-less').click(function(){ $('.show-all-less').click(function(){
$(this).nextAll('table').hide(); $(this).nextAll('table').hide();
$(this).nextAll('input.shows-more').show(); $(this).nextAll('input.shows-more').show();
$(this).nextAll('input.shows-less').hide(); $(this).nextAll('input.shows-less').hide();
}) });
$('.show-all-more').click(function(){ $('.show-all-more').click(function(){
$(this).nextAll('table').show(); $(this).nextAll('table').show();
$(this).nextAll('input.shows-more').hide(); $(this).nextAll('input.shows-more').hide();
$(this).nextAll('input.shows-less').show(); $(this).nextAll('input.shows-less').show();
}) });
$('.shows-less').click(function(){ $('.shows-less').click(function(){
$(this).nextAll('table:first').hide(); var table$ = $(this).nextAll('table:first');
table$ = table$.length ? table$ : $(this).parent().nextAll('table:first');
table$.hide();
$(this).hide(); $(this).hide();
$(this).prevAll('input:first').show(); $(this).prevAll('input:first').show();
}) });
$('.shows-more').click(function(){ $('.shows-more').click(function(){
$(this).nextAll('table:first').show(); var table$ = $(this).nextAll('table:first');
table$ = table$.length ? table$ : $(this).parent().nextAll('table:first');
table$.show();
$(this).hide(); $(this).hide();
$(this).nextAll('input:first').show(); $(this).nextAll('input:first').show();
}) });
}); $('.provider-retry').click(function () {
$(this).addClass('disabled');
var match = $(this).attr('id').match(/^(.+)-btn-retry$/);
$.ajax({
url: sbRoot + '/manage/manageSearches/retryProvider?provider=' + match[1],
type: 'GET',
complete: function () {
window.location.reload(true);
}
});
});
$('.provider-failures').tablesorter({widgets : ['zebra'],
headers : { 0:{sorter:!1}, 1:{sorter:!1}, 2:{sorter:!1}, 3:{sorter:!1}, 4:{sorter:!1}, 5:{sorter:!1} }
});
$('.provider-fail-parent-toggle').click(function(){
$(this).closest('tr').nextUntil('tr:not(.tablesorter-childRow)').find('td').toggle();
return !1;
});
// Make table cell focusable
// http://css-tricks.com/simple-css-row-column-highlighting/
var focus$ = $('.focus-highlight');
if (focus$.length){
focus$.find('td, th')
.attr('tabindex', '1')
// add touch device support
.on('touchstart', function(){
$(this).focus();
});
}
});

2
gui/slick/js/newShow.js

@ -251,7 +251,7 @@ $(document).ready(function () {
$('#addShowForm').submit(); $('#addShowForm').submit();
}); });
$('#qualityPreset').change(function () { $('#quality-preset').change(function () {
myform.loadsection(2); myform.loadsection(2);
}); });

65
gui/slick/js/qualityChooser.js

@ -1,12 +1,12 @@
function setFromPresets (preset) { function setFromPresets (preset) {
var elCustomQuality = $('.show-if-quality-custom'), var elCustomQuality = $('.show-if-quality-custom'),
selected = 'selected'; selected = 'selected', quality, selectState, btn$, dev = !1;
if (preset = parseInt(preset)) { if (preset = parseInt(preset)) {
elCustomQuality.fadeOut('fast', 'linear'); !dev && elCustomQuality.fadeOut('fast', 'linear');
var upgrade = !0; var upgrade = !0;
$('#anyQualities, #bestQualities').find('option').each(function() { $('#initial-qualities, #upgrade-qualities').find('option').each(function() {
if (upgrade && 'bestQualities' === $(this).parent().attr('id')) { if (upgrade && 'upgrade-qualities' === $(this).parent().attr('id')) {
upgrade = !1; upgrade = !1;
switch (preset) { switch (preset) {
case 3: preset = 128 + 32 + 4; break; case 3: preset = 128 + 32 + 4; break;
@ -15,8 +15,22 @@ function setFromPresets (preset) {
default: preset = 0; default: preset = 0;
} }
} }
$(this).attr(selected, ((preset & parseInt($(this).val())) ? selected : false));
quality = $(this).val();
selectState = ((preset & parseInt(quality, 10)) ? selected : !1);
$(this).attr(selected, selectState);
var list = /initial/.test($(this).parent().attr('id')) ? '#initial-quality': '#upgrade-quality';
btn$ = $(/initial/.test($(this).parent().attr('id')) ? '#initial-quality': '#upgrade-quality').find('a.btn[data-quality="' + quality + '"]');
if(!selectState){
btn$.removeClass('active')
} else {
btn$.addClass('active')
}
dev && console.log(preset, list, 'this.val():', quality, 'selectState:', selectState, 'hasClass:', btn$.hasClass('active'))
}); });
dev && console.log('-----------------------');
} else } else
elCustomQuality.fadeIn('fast', 'linear'); elCustomQuality.fadeIn('fast', 'linear');
@ -24,16 +38,23 @@ function setFromPresets (preset) {
} }
function presentTips() { function presentTips() {
var tip$ = $('#unknown'); var tip$ = $('#unknown-quality');
if (/undefined/i.test($('#anyQualities').find('option[value="32768"]').attr('selected'))) { if ($('#initial-quality').find('a.btn[data-quality="32768"]').hasClass('active')) {
tip$.fadeIn('fast', 'linear');
} else {
tip$.fadeOut('fast', 'linear'); tip$.fadeOut('fast', 'linear');
}
var tip$ = $('#no-upgrade'), tip2$ = $('#upgrade-cond');
if ($('#upgrade-quality').find('a.btn').hasClass('active')) {
tip$.fadeOut('fast', 'linear', function(){tip2$.fadeIn('fast', 'linear');});
} else { } else {
tip$.fadeIn('fast', 'linear'); tip2$.fadeOut('fast', 'linear', function(){tip$.fadeIn('fast', 'linear');});
} }
} }
$(document).ready(function() { $(function() {
var elQualityPreset = $('#qualityPreset'), var elQualityPreset = $('#quality-preset'),
selected = ':selected'; selected = ':selected';
elQualityPreset.change(function() { elQualityPreset.change(function() {
@ -42,7 +63,29 @@ $(document).ready(function() {
setFromPresets(elQualityPreset.find(selected).val()); setFromPresets(elQualityPreset.find(selected).val());
$('#anyQualities').change(function() { $('#initial-qualities').change(function() {
presentTips();
});
$('#custom-quality').find('a[href="#"].btn').on('click', function(event){
event.stopPropagation();
$(this).toggleClass('active');
var select$ = $('initial-quality' === $(this).closest('.component-desc').attr('id') ? '#initial-qualities' : '#upgrade-qualities'),
quality = $(this).data('quality'), arrSelected = $.map(select$.val(), function(v){return parseInt(v, 10)}) || Array();
if($(this).hasClass('active')){
arrSelected.push(quality);
} else {
arrSelected = arrSelected.filter(function(elem){
return elem !== quality;
});
}
select$.val(arrSelected).change();
presentTips(); presentTips();
return !1;
}); });
}); });

44
sickbeard/__init__.py

@ -37,7 +37,7 @@ sys.path.insert(1, os.path.abspath('../lib'))
from sickbeard import helpers, encodingKludge as ek from sickbeard import helpers, encodingKludge as ek
from sickbeard import db, image_cache, logger, naming, metadata, providers, scene_exceptions, scene_numbering, \ from sickbeard import db, image_cache, logger, naming, metadata, providers, scene_exceptions, scene_numbering, \
scheduler, auto_post_processer, search_queue, search_propers, search_recent, search_backlog, \ scheduler, auto_post_processer, search_queue, search_propers, search_recent, search_backlog, \
show_queue, show_updater, subtitles, traktChecker, version_checker, indexermapper, classes show_queue, show_updater, subtitles, traktChecker, version_checker, indexermapper, classes, properFinder
from sickbeard.config import CheckSection, check_setting_int, check_setting_str, ConfigMigrator, minimax from sickbeard.config import CheckSection, check_setting_int, check_setting_str, ConfigMigrator, minimax
from sickbeard.common import SD, SKIPPED from sickbeard.common import SD, SKIPPED
from sickbeard.databases import mainDB, cache_db, failed_db from sickbeard.databases import mainDB, cache_db, failed_db
@ -153,6 +153,7 @@ ROOT_DIRS = None
TRASH_REMOVE_SHOW = False TRASH_REMOVE_SHOW = False
TRASH_ROTATE_LOGS = False TRASH_ROTATE_LOGS = False
HOME_SEARCH_FOCUS = True HOME_SEARCH_FOCUS = True
DISPLAY_FREESPACE = True
SORT_ARTICLE = False SORT_ARTICLE = False
DEBUG = False DEBUG = False
SHOW_TAGS = [] SHOW_TAGS = []
@ -209,8 +210,8 @@ USENET_RETENTION = None
TORRENT_METHOD = None TORRENT_METHOD = None
TORRENT_DIR = None TORRENT_DIR = None
DOWNLOAD_PROPERS = False DOWNLOAD_PROPERS = False
CHECK_PROPERS_INTERVAL = None
PROPERS_WEBDL_ONEGRP = True PROPERS_WEBDL_ONEGRP = True
WEBDL_TYPES = []
ALLOW_HIGH_PRIORITY = False ALLOW_HIGH_PRIORITY = False
NEWZNAB_DATA = '' NEWZNAB_DATA = ''
@ -462,6 +463,7 @@ FANART_LIMIT = None
FANART_PANEL = None FANART_PANEL = None
FANART_RATINGS = {} FANART_RATINGS = {}
HOME_LAYOUT = None HOME_LAYOUT = None
FOOTER_TIME_LAYOUT = 0
POSTER_SORTBY = None POSTER_SORTBY = None
POSTER_SORTDIR = None POSTER_SORTDIR = None
DISPLAY_SHOW_VIEWMODE = 0 DISPLAY_SHOW_VIEWMODE = 0
@ -573,7 +575,7 @@ def initialize(console_logging=True):
# Post processing # Post processing
global KEEP_PROCESSED_DIR global KEEP_PROCESSED_DIR
# Views # Views
global GUI_NAME, HOME_LAYOUT, POSTER_SORTBY, POSTER_SORTDIR, DISPLAY_SHOW_SPECIALS, \ global GUI_NAME, HOME_LAYOUT, FOOTER_TIME_LAYOUT, POSTER_SORTBY, POSTER_SORTDIR, DISPLAY_SHOW_SPECIALS, \
EPISODE_VIEW_LAYOUT, EPISODE_VIEW_SORT, EPISODE_VIEW_DISPLAY_PAUSED, \ EPISODE_VIEW_LAYOUT, EPISODE_VIEW_SORT, EPISODE_VIEW_DISPLAY_PAUSED, \
EPISODE_VIEW_MISSED_RANGE, EPISODE_VIEW_POSTERS, FANART_PANEL, FANART_RATINGS, \ EPISODE_VIEW_MISSED_RANGE, EPISODE_VIEW_POSTERS, FANART_PANEL, FANART_RATINGS, \
EPISODE_VIEW_VIEWMODE, EPISODE_VIEW_BACKGROUND, EPISODE_VIEW_BACKGROUND_TRANSLUCENT, \ EPISODE_VIEW_VIEWMODE, EPISODE_VIEW_BACKGROUND, EPISODE_VIEW_BACKGROUND_TRANSLUCENT, \
@ -585,7 +587,7 @@ def initialize(console_logging=True):
VERSION_NOTIFY, AUTO_UPDATE, UPDATE_FREQUENCY, NOTIFY_ON_UPDATE VERSION_NOTIFY, AUTO_UPDATE, UPDATE_FREQUENCY, NOTIFY_ON_UPDATE
# Gen Config/Interface # Gen Config/Interface
global THEME_NAME, DEFAULT_HOME, FANART_LIMIT, SHOWLIST_TAGVIEW, SHOW_TAGS, \ global THEME_NAME, DEFAULT_HOME, FANART_LIMIT, SHOWLIST_TAGVIEW, SHOW_TAGS, \
HOME_SEARCH_FOCUS, USE_IMDB_INFO, IMDB_ACCOUNTS, SORT_ARTICLE, FUZZY_DATING, TRIM_ZERO, \ HOME_SEARCH_FOCUS, USE_IMDB_INFO, IMDB_ACCOUNTS, DISPLAY_FREESPACE, SORT_ARTICLE, FUZZY_DATING, TRIM_ZERO, \
DATE_PRESET, TIME_PRESET, TIME_PRESET_W_SECONDS, TIMEZONE_DISPLAY, \ DATE_PRESET, TIME_PRESET, TIME_PRESET_W_SECONDS, TIMEZONE_DISPLAY, \
WEB_USERNAME, WEB_PASSWORD, CALENDAR_UNPROTECTED, USE_API, API_KEY, WEB_PORT, WEB_LOG, \ WEB_USERNAME, WEB_PASSWORD, CALENDAR_UNPROTECTED, USE_API, API_KEY, WEB_PORT, WEB_LOG, \
ENABLE_HTTPS, HTTPS_CERT, HTTPS_KEY, WEB_IPV6, WEB_IPV64, HANDLE_REVERSE_PROXY, SEND_SECURITY_HEADERS ENABLE_HTTPS, HTTPS_CERT, HTTPS_KEY, WEB_IPV6, WEB_IPV64, HANDLE_REVERSE_PROXY, SEND_SECURITY_HEADERS
@ -593,7 +595,7 @@ def initialize(console_logging=True):
global BRANCH, CUR_COMMIT_BRANCH, GIT_REMOTE, CUR_COMMIT_HASH, GIT_PATH, CPU_PRESET, ANON_REDIRECT, \ global BRANCH, CUR_COMMIT_BRANCH, GIT_REMOTE, CUR_COMMIT_HASH, GIT_PATH, CPU_PRESET, ANON_REDIRECT, \
ENCRYPTION_VERSION, PROXY_SETTING, PROXY_INDEXERS, FILE_LOGGING_PRESET ENCRYPTION_VERSION, PROXY_SETTING, PROXY_INDEXERS, FILE_LOGGING_PRESET
# Search Settings/Episode # Search Settings/Episode
global DOWNLOAD_PROPERS, PROPERS_WEBDL_ONEGRP, CHECK_PROPERS_INTERVAL, RECENTSEARCH_FREQUENCY, \ global DOWNLOAD_PROPERS, PROPERS_WEBDL_ONEGRP, WEBDL_TYPES, RECENTSEARCH_FREQUENCY, \
BACKLOG_DAYS, BACKLOG_NOFULL, BACKLOG_FREQUENCY, USENET_RETENTION, IGNORE_WORDS, REQUIRE_WORDS, \ BACKLOG_DAYS, BACKLOG_NOFULL, BACKLOG_FREQUENCY, USENET_RETENTION, IGNORE_WORDS, REQUIRE_WORDS, \
ALLOW_HIGH_PRIORITY, SEARCH_UNAIRED, UNAIRED_RECENT_SEARCH_ONLY ALLOW_HIGH_PRIORITY, SEARCH_UNAIRED, UNAIRED_RECENT_SEARCH_ONLY
# Search Settings/NZB search # Search Settings/NZB search
@ -727,6 +729,7 @@ def initialize(console_logging=True):
USE_IMDB_INFO = bool(check_setting_int(CFG, 'GUI', 'use_imdb_info', 1)) USE_IMDB_INFO = bool(check_setting_int(CFG, 'GUI', 'use_imdb_info', 1))
IMDB_ACCOUNTS = CFG.get('GUI', []).get('imdb_accounts', [IMDB_DEFAULT_LIST_ID, IMDB_DEFAULT_LIST_NAME]) IMDB_ACCOUNTS = CFG.get('GUI', []).get('imdb_accounts', [IMDB_DEFAULT_LIST_ID, IMDB_DEFAULT_LIST_NAME])
HOME_SEARCH_FOCUS = bool(check_setting_int(CFG, 'General', 'home_search_focus', HOME_SEARCH_FOCUS)) HOME_SEARCH_FOCUS = bool(check_setting_int(CFG, 'General', 'home_search_focus', HOME_SEARCH_FOCUS))
DISPLAY_FREESPACE = bool(check_setting_int(CFG, 'General', 'display_freespace', 1))
SORT_ARTICLE = bool(check_setting_int(CFG, 'General', 'sort_article', 0)) SORT_ARTICLE = bool(check_setting_int(CFG, 'General', 'sort_article', 0))
FUZZY_DATING = bool(check_setting_int(CFG, 'GUI', 'fuzzy_dating', 0)) FUZZY_DATING = bool(check_setting_int(CFG, 'GUI', 'fuzzy_dating', 0))
TRIM_ZERO = bool(check_setting_int(CFG, 'GUI', 'trim_zero', 0)) TRIM_ZERO = bool(check_setting_int(CFG, 'GUI', 'trim_zero', 0))
@ -843,9 +846,6 @@ def initialize(console_logging=True):
DOWNLOAD_PROPERS = bool(check_setting_int(CFG, 'General', 'download_propers', 1)) DOWNLOAD_PROPERS = bool(check_setting_int(CFG, 'General', 'download_propers', 1))
PROPERS_WEBDL_ONEGRP = bool(check_setting_int(CFG, 'General', 'propers_webdl_onegrp', 1)) PROPERS_WEBDL_ONEGRP = bool(check_setting_int(CFG, 'General', 'propers_webdl_onegrp', 1))
CHECK_PROPERS_INTERVAL = check_setting_str(CFG, 'General', 'check_propers_interval', '')
if CHECK_PROPERS_INTERVAL not in ('15m', '45m', '90m', '4h', 'daily'):
CHECK_PROPERS_INTERVAL = 'daily'
ALLOW_HIGH_PRIORITY = bool(check_setting_int(CFG, 'General', 'allow_high_priority', 1)) ALLOW_HIGH_PRIORITY = bool(check_setting_int(CFG, 'General', 'allow_high_priority', 1))
@ -1151,6 +1151,7 @@ def initialize(console_logging=True):
METADATA_KODI = check_setting_str(CFG, 'General', 'metadata_kodi', '0|0|0|0|0|0|0|0|0|0') METADATA_KODI = check_setting_str(CFG, 'General', 'metadata_kodi', '0|0|0|0|0|0|0|0|0|0')
HOME_LAYOUT = check_setting_str(CFG, 'GUI', 'home_layout', 'poster') HOME_LAYOUT = check_setting_str(CFG, 'GUI', 'home_layout', 'poster')
FOOTER_TIME_LAYOUT = check_setting_int(CFG, 'GUI', 'footer_time_layout', 0)
POSTER_SORTBY = check_setting_str(CFG, 'GUI', 'poster_sortby', 'name') POSTER_SORTBY = check_setting_str(CFG, 'GUI', 'poster_sortby', 'name')
POSTER_SORTDIR = check_setting_int(CFG, 'GUI', 'poster_sortdir', 1) POSTER_SORTDIR = check_setting_int(CFG, 'GUI', 'poster_sortdir', 1)
DISPLAY_SHOW_VIEWMODE = check_setting_int(CFG, 'GUI', 'display_show_viewmode', 0) DISPLAY_SHOW_VIEWMODE = check_setting_int(CFG, 'GUI', 'display_show_viewmode', 0)
@ -1371,19 +1372,17 @@ def initialize(console_logging=True):
prevent_cycle_run=searchQueueScheduler.action.is_standard_backlog_in_progress) prevent_cycle_run=searchQueueScheduler.action.is_standard_backlog_in_progress)
propers_searcher = search_propers.ProperSearcher() propers_searcher = search_propers.ProperSearcher()
item = [(k, n, v) for (k, n, v) in propers_searcher.search_intervals if k == CHECK_PROPERS_INTERVAL] last_proper_search = datetime.datetime.fromtimestamp(properFinder.get_last_proper_search())
if item: time_diff = datetime.timedelta(days=1) - (datetime.datetime.now() - last_proper_search)
update_interval = datetime.timedelta(minutes=item[0][2]) if time_diff < datetime.timedelta(seconds=0):
run_at = None properdelay = 20
else: else:
update_interval = datetime.timedelta(hours=1) properdelay = helpers.tryInt((time_diff.total_seconds() / 60) + 5, 20)
run_at = datetime.time(hour=1) # 1 AM
properFinderScheduler = scheduler.Scheduler( properFinderScheduler = scheduler.Scheduler(
propers_searcher, propers_searcher,
cycleTime=update_interval, cycleTime=datetime.timedelta(days=1),
run_delay=update_interval, run_delay=datetime.timedelta(minutes=properdelay),
start_time=run_at,
threadName='FINDPROPERS', threadName='FINDPROPERS',
prevent_cycle_run=searchQueueScheduler.action.is_propersearch_in_progress) prevent_cycle_run=searchQueueScheduler.action.is_propersearch_in_progress)
@ -1416,10 +1415,8 @@ def enabled_schedulers(is_init=False):
# ([], [traktCheckerScheduler])[USE_TRAKT] + \ # ([], [traktCheckerScheduler])[USE_TRAKT] + \
for s in ([], [events])[is_init] + \ for s in ([], [events])[is_init] + \
[recentSearchScheduler, backlogSearchScheduler, showUpdateScheduler, [recentSearchScheduler, backlogSearchScheduler, showUpdateScheduler,
versionCheckScheduler, showQueueScheduler, searchQueueScheduler] + \ versionCheckScheduler, showQueueScheduler, searchQueueScheduler, properFinderScheduler,
([], [properFinderScheduler])[DOWNLOAD_PROPERS] + \ autoPostProcesserScheduler, subtitlesFinderScheduler] + \
([], [autoPostProcesserScheduler])[PROCESS_AUTOMATICALLY] + \
([], [subtitlesFinderScheduler])[USE_SUBTITLES] + \
([events], [])[is_init]: ([events], [])[is_init]:
yield s yield s
@ -1503,7 +1500,7 @@ def halt():
logger.log('Fail, thread %s did not exit' % ADBA_CONNECTION.name) logger.log('Fail, thread %s did not exit' % ADBA_CONNECTION.name)
for thread in enabled_schedulers(): for thread in enabled_schedulers():
thread.stop.set() thread.stop()
for thread in enabled_schedulers(): for thread in enabled_schedulers():
try: try:
@ -1577,7 +1574,6 @@ def save_config():
new_config['General']['update_frequency'] = int(UPDATE_FREQUENCY) new_config['General']['update_frequency'] = int(UPDATE_FREQUENCY)
new_config['General']['download_propers'] = int(DOWNLOAD_PROPERS) new_config['General']['download_propers'] = int(DOWNLOAD_PROPERS)
new_config['General']['propers_webdl_onegrp'] = int(PROPERS_WEBDL_ONEGRP) new_config['General']['propers_webdl_onegrp'] = int(PROPERS_WEBDL_ONEGRP)
new_config['General']['check_propers_interval'] = CHECK_PROPERS_INTERVAL
new_config['General']['allow_high_priority'] = int(ALLOW_HIGH_PRIORITY) new_config['General']['allow_high_priority'] = int(ALLOW_HIGH_PRIORITY)
new_config['General']['recentsearch_startup'] = int(RECENTSEARCH_STARTUP) new_config['General']['recentsearch_startup'] = int(RECENTSEARCH_STARTUP)
new_config['General']['backlog_nofull'] = int(BACKLOG_NOFULL) new_config['General']['backlog_nofull'] = int(BACKLOG_NOFULL)
@ -1615,6 +1611,7 @@ def save_config():
new_config['General']['trash_remove_show'] = int(TRASH_REMOVE_SHOW) new_config['General']['trash_remove_show'] = int(TRASH_REMOVE_SHOW)
new_config['General']['trash_rotate_logs'] = int(TRASH_ROTATE_LOGS) new_config['General']['trash_rotate_logs'] = int(TRASH_ROTATE_LOGS)
new_config['General']['home_search_focus'] = int(HOME_SEARCH_FOCUS) new_config['General']['home_search_focus'] = int(HOME_SEARCH_FOCUS)
new_config['General']['display_freespace'] = int(DISPLAY_FREESPACE)
new_config['General']['sort_article'] = int(SORT_ARTICLE) new_config['General']['sort_article'] = int(SORT_ARTICLE)
new_config['General']['proxy_setting'] = PROXY_SETTING new_config['General']['proxy_setting'] = PROXY_SETTING
new_config['General']['proxy_indexers'] = int(PROXY_INDEXERS) new_config['General']['proxy_indexers'] = int(PROXY_INDEXERS)
@ -1950,6 +1947,7 @@ def save_config():
new_config['GUI']['showlist_tagview'] = SHOWLIST_TAGVIEW new_config['GUI']['showlist_tagview'] = SHOWLIST_TAGVIEW
new_config['GUI']['home_layout'] = HOME_LAYOUT new_config['GUI']['home_layout'] = HOME_LAYOUT
new_config['GUI']['footer_time_layout'] = FOOTER_TIME_LAYOUT
new_config['GUI']['poster_sortby'] = POSTER_SORTBY new_config['GUI']['poster_sortby'] = POSTER_SORTBY
new_config['GUI']['poster_sortdir'] = POSTER_SORTDIR new_config['GUI']['poster_sortdir'] = POSTER_SORTDIR

6
sickbeard/auto_post_processer.py

@ -27,6 +27,12 @@ class PostProcesser():
def __init__(self): def __init__(self):
self.amActive = False self.amActive = False
@staticmethod
def check_paused():
if sickbeard.PROCESS_AUTOMATICALLY:
return False
return True
def run(self): def run(self):
if not sickbeard.PROCESS_AUTOMATICALLY: if not sickbeard.PROCESS_AUTOMATICALLY:
return return

22
sickbeard/common.py

@ -152,6 +152,10 @@ class Quality:
.replace('720p', 'HD720p').replace('HD TV', 'HD720p').replace('RawHD TV', 'RawHD')) .replace('720p', 'HD720p').replace('HD TV', 'HD720p').replace('RawHD TV', 'RawHD'))
@staticmethod @staticmethod
def get_quality_ui(quality):
return Quality.qualityStrings[quality].replace('SD DVD', 'SD DVD/BR/BD')
@staticmethod
def _getStatusStrings(status): def _getStatusStrings(status):
toReturn = {} toReturn = {}
for x in Quality.qualityStrings.keys(): for x in Quality.qualityStrings.keys():
@ -546,6 +550,24 @@ class neededQualities(object):
if isinstance(v, bool) and True is v: if isinstance(v, bool) and True is v:
self.need_sd = self.need_hd = self.need_uhd = self.need_webdl = True self.need_sd = self.need_hd = self.need_uhd = self.need_webdl = True
def all_show_qualities_needed(self, show):
from sickbeard.tv import TVShow
if isinstance(show, TVShow):
init, upgrade = Quality.splitQuality(show.quality)
all_qual = set(init + upgrade)
need_sd = need_hd = need_uhd = need_webdl = False
for wanted_qualities in all_qual:
if not need_sd and wanted_qualities <= neededQualities.max_sd:
need_sd = True
if not need_hd and wanted_qualities in neededQualities.hd_qualities:
need_hd = True
if not need_webdl and wanted_qualities in neededQualities.webdl_qualities:
need_webdl = True
if not need_uhd and wanted_qualities > neededQualities.max_hd:
need_uhd = True
return self.need_sd == need_sd and self.need_hd == need_hd and self.need_webdl == need_webdl and \
self.need_uhd == need_uhd
def check_needed_types(self, show): def check_needed_types(self, show):
if getattr(show, 'is_anime', False): if getattr(show, 'is_anime', False):
self.need_anime = True self.need_anime = True

28
sickbeard/config.py

@ -197,15 +197,7 @@ def change_DOWNLOAD_PROPERS(download_propers):
return return
sickbeard.DOWNLOAD_PROPERS = download_propers sickbeard.DOWNLOAD_PROPERS = download_propers
if sickbeard.DOWNLOAD_PROPERS: sickbeard.properFinderScheduler.check_paused()
sickbeard.properFinderScheduler.start()
else:
sickbeard.properFinderScheduler.stop.set()
logger.log(u'Waiting for the PROPERFINDER thread to exit')
try:
sickbeard.properFinderScheduler.join(10)
except:
pass
def change_USE_TRAKT(use_trakt): def change_USE_TRAKT(use_trakt):
@ -216,7 +208,7 @@ def change_USE_TRAKT(use_trakt):
# if sickbeard.USE_TRAKT: # if sickbeard.USE_TRAKT:
# sickbeard.traktCheckerScheduler.start() # sickbeard.traktCheckerScheduler.start()
# else: # else:
# sickbeard.traktCheckerScheduler.stop.set() # sickbeard.traktCheckerScheduler.stop()
# logger.log(u'Waiting for the TRAKTCHECKER thread to exit') # logger.log(u'Waiting for the TRAKTCHECKER thread to exit')
# try: # try:
# sickbeard.traktCheckerScheduler.join(10) # sickbeard.traktCheckerScheduler.join(10)
@ -229,21 +221,7 @@ def change_USE_SUBTITLES(use_subtitles):
return return
sickbeard.USE_SUBTITLES = use_subtitles sickbeard.USE_SUBTITLES = use_subtitles
if sickbeard.USE_SUBTITLES and not sickbeard.subtitlesFinderScheduler.isAlive(): sickbeard.subtitlesFinderScheduler.check_paused()
sickbeard.subtitlesFinderScheduler = sickbeard.scheduler.Scheduler(
sickbeard.subtitles.SubtitlesFinder(),
cycleTime=datetime.timedelta(hours=sickbeard.SUBTITLES_FINDER_FREQUENCY),
threadName='FINDSUBTITLES', silent=False)
sickbeard.subtitlesFinderScheduler.start()
else:
sickbeard.subtitlesFinderScheduler.stop.set()
sickbeard.subtitlesFinderScheduler.silent = True
threadname = sickbeard.subtitlesFinderScheduler.name
try:
sickbeard.subtitlesFinderScheduler.join(10)
logger.log('Thread %s has exit' % threadname)
except RuntimeError:
logger.log('Fail, thread %s did not exit' % threadname)
def CheckSection(CFG, sec): def CheckSection(CFG, sec):

125
sickbeard/databases/cache_db.py

@ -17,91 +17,90 @@
# along with SickGear. If not, see <http://www.gnu.org/licenses/>. # along with SickGear. If not, see <http://www.gnu.org/licenses/>.
from sickbeard import db from sickbeard import db
from collections import OrderedDict
import re
MIN_DB_VERSION = 1 MIN_DB_VERSION = 1
MAX_DB_VERSION = 3 MAX_DB_VERSION = 4
TEST_BASE_VERSION = None # the base production db version, only needed for TEST db versions (>=100000)
# Add new migrations at the bottom of the list; subclass the previous migration. # Add new migrations at the bottom of the list; subclass the previous migration.
class InitialSchema(db.SchemaUpgrade): class InitialSchema(db.SchemaUpgrade):
def test(self): def __init__(self, connection):
return self.hasTable('lastUpdate') super(InitialSchema, self).__init__(connection)
def execute(self): self.queries = OrderedDict([
queries = [ ('base', [
'CREATE TABLE lastUpdate (provider TEXT, time NUMERIC)', 'CREATE TABLE lastUpdate(provider TEXT, time NUMERIC)',
'CREATE TABLE lastSearch (provider TEXT, time NUMERIC)', 'CREATE TABLE lastSearch(provider TEXT, time NUMERIC)',
'CREATE TABLE db_version (db_version INTEGER)', 'CREATE TABLE db_version(db_version INTEGER)',
'INSERT INTO db_version (db_version) VALUES (1)', 'INSERT INTO db_version(db_version) VALUES (1)',
'CREATE TABLE network_timezones (network_name TEXT PRIMARY KEY, timezone TEXT)', 'CREATE TABLE network_timezones(network_name TEXT PRIMARY KEY, timezone TEXT)'
'CREATE TABLE network_conversions (' ]),
('consolidate_providers', [
'CREATE TABLE provider_cache(provider TEXT, name TEXT, season NUMERIC, episodes TEXT,'
' indexerid NUMERIC, url TEXT UNIQUE, time NUMERIC, quality TEXT, release_group TEXT, version NUMERIC)',
'CREATE TABLE network_conversions('
'tvdb_network TEXT PRIMARY KEY, tvrage_network TEXT, tvrage_country TEXT)', 'tvdb_network TEXT PRIMARY KEY, tvrage_network TEXT, tvrage_country TEXT)',
'CREATE INDEX tvrage_idx on network_conversions (tvrage_network, tvrage_country)', 'CREATE INDEX tvrage_idx ON network_conversions(tvrage_network, tvrage_country)'
'CREATE TABLE provider_cache (provider TEXT ,name TEXT, season NUMERIC, episodes TEXT,' ]),
' indexerid NUMERIC, url TEXT UNIQUE, time NUMERIC, quality TEXT, release_group TEXT, ' ('add_backlogparts', [
'version NUMERIC)', 'CREATE TABLE backlogparts('
'CREATE TABLE IF NOT EXISTS "backlogparts" ("part" NUMERIC NOT NULL ,' 'part NUMERIC NOT NULL, indexer NUMERIC NOT NULL, indexerid NUMERIC NOT NULL)',
' "indexer" NUMERIC NOT NULL , "indexerid" NUMERIC NOT NULL )', 'CREATE TABLE lastrecentsearch(name TEXT PRIMARY KEY NOT NULL, datetime NUMERIC NOT NULL)'
'CREATE TABLE IF NOT EXISTS "lastrecentsearch" ("name" TEXT PRIMARY KEY NOT NULL' ]),
' , "datetime" NUMERIC NOT NULL )', ('add_provider_fails', [
] 'CREATE TABLE provider_fails(prov_name TEXT, fail_type INTEGER, fail_code INTEGER, fail_time NUMERIC)',
for query in queries: 'CREATE INDEX idx_prov_name_error ON provider_fails (prov_name)',
self.connection.action(query) 'CREATE UNIQUE INDEX idx_prov_errors ON provider_fails (prov_name, fail_time)',
self.setDBVersion(3) 'CREATE TABLE provider_fails_count(prov_name TEXT PRIMARY KEY,'
' failure_count NUMERIC, failure_time NUMERIC,'
' tmr_limit_count NUMERIC, tmr_limit_time NUMERIC, tmr_limit_wait NUMERIC)'
])
])
class ConsolidateProviders(InitialSchema):
def test(self): def test(self):
return self.checkDBVersion() > 1 return self.hasTable('lastUpdate')
def execute(self): def execute(self):
self.do_query(self.queries.values())
self.setDBVersion(MAX_DB_VERSION)
def backup(self):
db.backup_database('cache.db', self.checkDBVersion()) db.backup_database('cache.db', self.checkDBVersion())
if self.hasTable('provider_cache'):
self.connection.action('DROP TABLE provider_cache')
self.connection.action('CREATE TABLE provider_cache (provider TEXT, name TEXT, season NUMERIC, episodes TEXT, '
'indexerid NUMERIC, url TEXT UNIQUE, time NUMERIC, quality TEXT, release_group TEXT, '
'version NUMERIC)')
if not self.hasTable('network_conversions'):
self.connection.action('CREATE TABLE network_conversions ' +
'(tvdb_network TEXT PRIMARY KEY, tvrage_network TEXT, tvrage_country TEXT)')
self.connection.action('CREATE INDEX tvrage_idx ' +
'on network_conversions (tvrage_network, tvrage_country)')
keep_tables = set(['lastUpdate', 'lastSearch', 'db_version', class ConsolidateProviders(InitialSchema):
'network_timezones', 'network_conversions', 'provider_cache']) def test(self):
current_tables = set(self.listTables()) return 1 < self.checkDBVersion()
remove_tables = list(current_tables - keep_tables)
for table in remove_tables:
self.connection.action('DROP TABLE [%s]' % table)
self.incDBVersion() def execute(self):
self.backup()
keep_tables = {'lastUpdate', 'lastSearch', 'db_version',
'network_timezones', 'network_conversions', 'provider_cache'}
# old provider_cache is dropped before re-creation
self.do_query(['DROP TABLE [provider_cache]'] + self.queries['consolidate_providers'] +
['DROP TABLE [%s]' % t for t in (set(self.listTables()) - keep_tables)])
self.finish(True)
class AddBacklogParts(ConsolidateProviders): class AddBacklogParts(ConsolidateProviders):
def test(self): def test(self):
return self.checkDBVersion() > 2 return 2 < self.checkDBVersion()
def execute(self): def execute(self):
self.backup()
self.do_query(self.queries['add_backlogparts'] +
['DROP TABLE [%s]' % t for t in ('scene_names', 'scene_exceptions_refresh', 'scene_exceptions')])
self.finish(True)
db.backup_database('cache.db', self.checkDBVersion())
if self.hasTable('scene_names'):
self.connection.action('DROP TABLE scene_names')
if not self.hasTable('backlogparts'):
self.connection.action('CREATE TABLE IF NOT EXISTS "backlogparts" ("part" NUMERIC NOT NULL ,'
' "indexer" NUMERIC NOT NULL , "indexerid" NUMERIC NOT NULL )')
if not self.hasTable('lastrecentsearch'):
self.connection.action('CREATE TABLE IF NOT EXISTS "lastrecentsearch" ("name" TEXT PRIMARY KEY NOT NULL'
' , "datetime" NUMERIC NOT NULL )')
if self.hasTable('scene_exceptions_refresh'): class AddProviderFailureHandling(AddBacklogParts):
self.connection.action('DROP TABLE scene_exceptions_refresh') def test(self):
if self.hasTable('scene_exceptions'): return 3 < self.checkDBVersion()
self.connection.action('DROP TABLE scene_exceptions')
self.connection.action('VACUUM')
self.incDBVersion() def execute(self):
self.backup()
self.do_query(self.queries['add_provider_fails'])
self.finish()

1
sickbeard/databases/failed_db.py

@ -21,6 +21,7 @@ from sickbeard.common import Quality
MIN_DB_VERSION = 1 MIN_DB_VERSION = 1
MAX_DB_VERSION = 1 MAX_DB_VERSION = 1
TEST_BASE_VERSION = None # the base production db version, only needed for TEST db versions (>=100000)
# Add new migrations at the bottom of the list; subclass the previous migration. # Add new migrations at the bottom of the list; subclass the previous migration.
class InitialSchema(db.SchemaUpgrade): class InitialSchema(db.SchemaUpgrade):

23
sickbeard/databases/mainDB.py

@ -27,7 +27,8 @@ from sickbeard import encodingKludge as ek
from sickbeard.name_parser.parser import NameParser, InvalidNameException, InvalidShowException from sickbeard.name_parser.parser import NameParser, InvalidNameException, InvalidShowException
MIN_DB_VERSION = 9 # oldest db version we support migrating from MIN_DB_VERSION = 9 # oldest db version we support migrating from
MAX_DB_VERSION = 20006 MAX_DB_VERSION = 20008
TEST_BASE_VERSION = None # the base production db version, only needed for TEST db versions (>=100000)
class MainSanityCheck(db.DBSanityCheck): class MainSanityCheck(db.DBSanityCheck):
@ -1258,3 +1259,23 @@ class AddFlagTable(db.SchemaUpgrade):
self.setDBVersion(20006) self.setDBVersion(20006)
return self.checkDBVersion() return self.checkDBVersion()
# 20006 -> 20007
class DBIncreaseTo20007(db.SchemaUpgrade):
def execute(self):
logger.log(u'Bumping database version')
self.setDBVersion(20007)
return self.checkDBVersion()
# 20007 -> 20008
class AddWebdlTypesTable(db.SchemaUpgrade):
def execute(self):
db.backup_database('sickbeard.db', self.checkDBVersion())
self.connection.action('CREATE TABLE webdl_types (dname TEXT NOT NULL , regex TEXT NOT NULL )')
self.setDBVersion(20008)
return self.checkDBVersion()

24
sickbeard/db.py

@ -432,6 +432,26 @@ class SchemaUpgrade(object):
tables.append(table[0]) tables.append(table[0])
return tables return tables
def do_query(self, queries):
if not isinstance(queries, list):
queries = list(queries)
elif isinstance(queries[0], list):
queries = [item for sublist in queries for item in sublist]
for query in queries:
tbl_name = re.findall('(?i)DROP.*?TABLE.*?\[?([^\s\]]+)', query)
if tbl_name and not self.hasTable(tbl_name[0]):
continue
tbl_name = re.findall('(?i)CREATE.*?TABLE.*?\s([^\s(]+)\s*\(', query)
if tbl_name and self.hasTable(tbl_name[0]):
continue
self.connection.action(query)
def finish(self, tbl_dropped=False):
if tbl_dropped:
self.connection.action('VACUUM')
self.incDBVersion()
def MigrationCode(myDB): def MigrationCode(myDB):
schema = { schema = {
@ -492,7 +512,9 @@ def MigrationCode(myDB):
20002: sickbeard.mainDB.AddTvShowTags, 20002: sickbeard.mainDB.AddTvShowTags,
20003: sickbeard.mainDB.ChangeMapIndexer, 20003: sickbeard.mainDB.ChangeMapIndexer,
20004: sickbeard.mainDB.AddShowNotFoundCounter, 20004: sickbeard.mainDB.AddShowNotFoundCounter,
20005: sickbeard.mainDB.AddFlagTable 20005: sickbeard.mainDB.AddFlagTable,
20006: sickbeard.mainDB.DBIncreaseTo20007,
20007: sickbeard.mainDB.AddWebdlTypesTable,
# 20002: sickbeard.mainDB.AddCoolSickGearFeature3, # 20002: sickbeard.mainDB.AddCoolSickGearFeature3,
} }

31
sickbeard/event_queue.py

@ -2,44 +2,49 @@ from lib.six import moves
import threading import threading
class Event: class Event:
def __init__(self, type): def __init__(self, etype):
self._type = type self._type = etype
@property @property
def type(self): def type(self):
return self._type return self._type
class Events(threading.Thread): class Events(threading.Thread):
def __init__(self, callback): def __init__(self, callback):
super(Events, self).__init__() super(Events, self).__init__()
self.queue = moves.queue.Queue() self.queue = moves.queue.Queue()
self.daemon = True self.daemon = True
self.callback = callback self.callback = callback
self.name = "EVENT-QUEUE" self.name = 'EVENT-QUEUE'
self.stop = threading.Event() self._stop = threading.Event()
def put(self, etype):
self.queue.put(etype)
def put(self, type): def stop(self):
self.queue.put(type) self._stop.set()
def run(self): def run(self):
while (not self.stop.is_set()): while not self._stop.is_set():
try: try:
# get event type # get event type
type = self.queue.get(True, 1) etype = self.queue.get(True, 1)
# perform callback if we got a event type # perform callback if we got a event type
self.callback(type) self.callback(etype)
# event completed # event completed
self.queue.task_done() self.queue.task_done()
except moves.queue.Empty: except moves.queue.Empty:
type = None pass
# exiting thread # exiting thread
self.stop.clear() self._stop.clear()
# System Events # System Events
class SystemEvent(Event): class SystemEvent(Event):
RESTART = "RESTART" RESTART = 'RESTART'
SHUTDOWN = "SHUTDOWN" SHUTDOWN = 'SHUTDOWN'

61
sickbeard/helpers.py

@ -275,7 +275,7 @@ def searchIndexerForShowID(regShowName, indexer=None, indexer_id=None, ui=None):
def sizeof_fmt(num): def sizeof_fmt(num):
for x in ['bytes', 'KB', 'MB', 'GB', 'TB']: for x in ['bytes', 'KB', 'MB', 'GB', 'TB', 'PB', 'EB', 'ZB', 'YB']:
if num < 1024.0: if num < 1024.0:
return "%3.1f %s" % (num, x) return "%3.1f %s" % (num, x)
num /= 1024.0 num /= 1024.0
@ -1556,3 +1556,62 @@ def datetime_to_epoch(dt):
dt = dt.replace(tzinfo=sb_timezone) dt = dt.replace(tzinfo=sb_timezone)
utc_naive = dt.replace(tzinfo=None) - dt.utcoffset() utc_naive = dt.replace(tzinfo=None) - dt.utcoffset()
return int((utc_naive - datetime.datetime(1970, 1, 1)).total_seconds()) return int((utc_naive - datetime.datetime(1970, 1, 1)).total_seconds())
def df():
"""
Return disk free space at known parent locations
:return: string path, string value that is formatted size
:rtype: list of tuples
"""
result = []
min_output = True
if sickbeard.ROOT_DIRS and sickbeard.DISPLAY_FREESPACE:
targets = []
for path in sickbeard.ROOT_DIRS.split('|')[1:]:
location_parts = os.path.splitdrive(path)
target = location_parts[0]
if 'win32' == sys.platform:
if not re.match('(?i)[a-z]:(?:\\\\)?$', target):
# simple drive letter not found, fallback to full path
target = path
min_output = False
elif sys.platform.startswith(('linux', 'darwin', 'sunos5')) or 'bsd' in sys.platform:
target = path
min_output = False
if target and target not in targets:
targets += [target]
free = freespace(path)
if None is not free:
result += [(target, sizeof_fmt(free).replace(' ', ''))]
return result, min_output
def freespace(path=None):
"""
Return free space available at path location
:param path: Example paths (Windows) = '\\\\192.168.0.1\\sharename\\existing_path', 'd:\\existing_path'
Untested with mount points under linux
:type path: basestring
:return: Size in bytes
:rtype: long
"""
result = None
if 'win32' == sys.platform:
try:
import ctypes
if None is not ctypes:
max_val = (2 ** 64) - 1
storage = ctypes.c_ulonglong(max_val)
ctypes.windll.kernel32.GetDiskFreeSpaceExW(ctypes.c_wchar_p(path), None, None, ctypes.pointer(storage))
result = (storage.value, None)[max_val == storage.value]
except(StandardError, Exception):
pass
elif sys.platform.startswith(('linux', 'darwin', 'sunos5')) or 'bsd' in sys.platform:
storage = os.statvfs(path)
result = storage.f_bavail * storage.f_frsize
return result

9
sickbeard/name_parser/parser.py

@ -113,7 +113,7 @@ class NameParser(object):
return return
matches = [] matches = []
initial_best_result = None
for reg_ex in self.compiled_regexes: for reg_ex in self.compiled_regexes:
for (cur_regex_num, cur_regex_name, cur_regex) in self.compiled_regexes[reg_ex]: for (cur_regex_num, cur_regex_name, cur_regex) in self.compiled_regexes[reg_ex]:
new_name = helpers.remove_non_release_groups(name, 'anime' in cur_regex_name) new_name = helpers.remove_non_release_groups(name, 'anime' in cur_regex_name)
@ -266,6 +266,13 @@ class NameParser(object):
# if this is a naming pattern test then return best result # if this is a naming pattern test then return best result
if not show or self.naming_pattern: if not show or self.naming_pattern:
if not show and not self.naming_pattern and not self.testing:
# ensure anime regex test but use initial best if show still not found
if 0 == reg_ex:
initial_best_result = best_result
matches = [] # clear non-anime match scores
continue
return initial_best_result
return best_result return best_result
# get quality # get quality

43
sickbeard/name_parser/regexes.py

@ -302,15 +302,15 @@ anime_regexes = [
# Bleach s16e03e04 313-314 # Bleach s16e03e04 313-314
''' '''
^(\[(?P<release_group>.+?)\][ ._-]*)? ^(\[(?P<release_group>.+?)\][ ._-]*)?
(?P<series_name>.+?)[ ._-]+ # start of string and series name and non optinal separator (?P<series_name>.+?)[ ._-]+ # start of string and series name and non optional separator
[sS](?P<season_num>\d+)[. _-]* # S01 and optional separator [sS](?P<season_num>\d+)[. _-]* # S01 and optional separator
[eE](?P<ep_num>\d+) # epipisode E02 [eE](?P<ep_num>\d+) # episode E02
(([. _-]*e|-) # linking e/- char (([. _-]*e|-) # linking e/- char
(?P<extra_ep_num>\d+))* # additional E03/etc (?P<extra_ep_num>\d+))* # additional E03/etc
([ ._-]{2,}|[ ._]+) # if "-" is used to separate at least something else has to be ([ ._-]{2,}|[ ._]+) # if "-" is used to separate at least something else has to be
# there(->{2,}) "s16e03-04-313-314" would make sens any way # there(->{2,}) "s16e03-04-313-314" would make sens any way
(?P<ep_ab_num>\d{1,3}) # absolute number (?<!H.)(?P<ep_ab_num>\d{1,3})(?!0p) # absolute number
(-(?P<extra_ab_ep_num>\d{1,3}))* # "-" as separator and anditional absolute number, all optinal (-(?P<extra_ab_ep_num>\d{1,3}))* # "-" as separator and additional absolute number, all optional
(v(?P<version>[0-9]))? # the version e.g. "v2" (v(?P<version>[0-9]))? # the version e.g. "v2"
.*? .*?
''' '''
@ -321,15 +321,15 @@ anime_regexes = [
# Bleach.s16e03-04.313-314 # Bleach.s16e03-04.313-314
# Bleach s16e03e04 313-314 # Bleach s16e03e04 313-314
''' '''
^(?P<series_name>.+?)[ ._-]+ # start of string and series name and non optinal separator ^(?P<series_name>.+?)[ ._-]+ # start of string and series name and non optional separator
(?P<season_num>\d+)[. _-]* # S01 and optional separator (?P<season_num>\d+)[. _-]* # S01 and optional separator
[xX](?P<ep_num>\d+) # epipisode E02 [xX](?P<ep_num>\d+) # episode E02
(([. _-]*e|-) # linking e/- char (([. _-]*e|-) # linking e/- char
(?P<extra_ep_num>\d+))* # additional E03/etc (?P<extra_ep_num>\d+))* # additional E03/etc
([ ._-]{2,}|[ ._]+) # if "-" is used to separate at least something else has to be ([ ._-]{2,}|[ ._]+) # if "-" is used to separate at least something else has to be
# there(->{2,}) "s16e03-04-313-314" would make sens any way # there(->{2,}) "s16e03-04-313-314" would make sens any way
(?P<ep_ab_num>\d{1,3}) # absolute number (?<!H.)(?P<ep_ab_num>\d{1,3})(?!0p) # absolute number
(-(?P<extra_ab_ep_num>\d{1,3}))* # "-" as separator and anditional absolute number, all optinal (-(?P<extra_ab_ep_num>\d{1,3}))* # "-" as separator and additional absolute number, all optional
(v(?P<version>[0-9]))? # the version e.g. "v2" (v(?P<version>[0-9]))? # the version e.g. "v2"
.*? .*?
''' '''
@ -338,14 +338,14 @@ anime_regexes = [
('anime_and_normal_reverse', ('anime_and_normal_reverse',
# Bleach - 313-314 - s16e03-04 # Bleach - 313-314 - s16e03-04
''' '''
^(?P<series_name>.+?)[ ._-]+ # start of string and series name and non optinal separator ^(?P<series_name>.+?)[ ._-]+ # start of string and series name and non optional separator
(?P<ep_ab_num>\d{1,3}) # absolute number (?<!H.)(?P<ep_ab_num>\d{1,3})(?!0p) # absolute number
(-(?P<extra_ab_ep_num>\d{1,3}))* # "-" as separator and anditional absolute number, all optinal (-(?P<extra_ab_ep_num>\d{1,3}))* # "-" as separator and additional absolute number, all optional
(v(?P<version>[0-9]))? # the version e.g. "v2" (v(?P<version>[0-9]))? # the version e.g. "v2"
([ ._-]{2,}|[ ._]+) # if "-" is used to separate at least something else has to be ([ ._-]{2,}|[ ._]+) # if "-" is used to separate at least something else has to be
# there(->{2,}) "s16e03-04-313-314" would make sens any way # there(->{2,}) "s16e03-04-313-314" would make sens any way
[sS](?P<season_num>\d+)[. _-]* # S01 and optional separator [sS](?P<season_num>\d+)[. _-]* # S01 and optional separator
[eE](?P<ep_num>\d+) # epipisode E02 [eE](?P<ep_num>\d+) # episode E02
(([. _-]*e|-) # linking e/- char (([. _-]*e|-) # linking e/- char
(?P<extra_ep_num>\d+))* # additional E03/etc (?P<extra_ep_num>\d+))* # additional E03/etc
.*? .*?
@ -355,8 +355,8 @@ anime_regexes = [
('anime_and_normal_front', ('anime_and_normal_front',
# 165.Naruto Shippuuden.s08e014 # 165.Naruto Shippuuden.s08e014
''' '''
^(?P<ep_ab_num>\d{1,3}) # start of string and absolute number ^(?<!H.)(?P<ep_ab_num>\d{1,3})(?!0p) # start of string and absolute number
(-(?P<extra_ab_ep_num>\d{1,3}))* # "-" as separator and anditional absolute number, all optinal (-(?P<extra_ab_ep_num>\d{1,3}))* # "-" as separator and additional absolute number, all optional
(v(?P<version>[0-9]))?[ ._-]+ # the version e.g. "v2" (v(?P<version>[0-9]))?[ ._-]+ # the version e.g. "v2"
(?P<series_name>.+?)[ ._-]+ (?P<series_name>.+?)[ ._-]+
[sS](?P<season_num>\d+)[. _-]* # S01 and optional separator [sS](?P<season_num>\d+)[. _-]* # S01 and optional separator
@ -371,7 +371,7 @@ anime_regexes = [
''' '''
^(?:\[(?P<release_group>.+?)\][ ._-]*) ^(?:\[(?P<release_group>.+?)\][ ._-]*)
(?P<series_name>.+?)[ ._-]+ (?P<series_name>.+?)[ ._-]+
(?P<ep_ab_num>\d{1,3}) (?<!H.)(?P<ep_ab_num>\d{1,3})(?!0p)
(-(?P<extra_ab_ep_num>\d{1,3}))*[ ._-]*? (-(?P<extra_ab_ep_num>\d{1,3}))*[ ._-]*?
(?:v(?P<version>[0-9])[ ._-]+?)? (?:v(?P<version>[0-9])[ ._-]+?)?
(?:.+?[ ._-]+?)? (?:.+?[ ._-]+?)?
@ -381,8 +381,19 @@ anime_regexes = [
''' '''
), ),
('anime_bare', ('anime_bare_ep',
# One Piece - 102 # One Piece - 102
# Show Name 123 - 001
'''
^(?:\[(?P<release_group>.+?)\][ ._-]*)?
(?P<series_name>.+?)[ ._-]+[ ._-]{2,} # Show_Name and min 2 char separator
(?<!H.)(?P<ep_ab_num>\d{1,3})(?!0p) # 1/001, while avoiding H.264 and 1080p from being matched
(-(?P<extra_ab_ep_num>\d{1,3}))*[ ._-]* # 2/002
(?:v(?P<version>[0-9]))? # v2
'''
),
('anime_bare',
# [ACX]_Wolf's_Spirit_001.mkv # [ACX]_Wolf's_Spirit_001.mkv
''' '''
^(\[(?P<release_group>.+?)\][ ._-]*)? ^(\[(?P<release_group>.+?)\][ ._-]*)?

5
sickbeard/network_timezones.py

@ -226,7 +226,10 @@ def update_network_dict():
try: try:
for line in url_data.splitlines(): for line in url_data.splitlines():
(key, val) = line.decode('utf-8').strip().rsplit(u':', 1) try:
(key, val) = line.decode('utf-8').strip().rsplit(u':', 1)
except (StandardError, Exception):
continue
if key is None or val is None: if key is None or val is None:
continue continue
d[key] = val d[key] = val

240
sickbeard/properFinder.py

@ -27,27 +27,28 @@ import sickbeard
from sickbeard import db, exceptions, helpers, history, logger, search, show_name_helpers from sickbeard import db, exceptions, helpers, history, logger, search, show_name_helpers
from sickbeard import encodingKludge as ek from sickbeard import encodingKludge as ek
from sickbeard.common import DOWNLOADED, SNATCHED_ANY, SNATCHED_PROPER, Quality, ARCHIVED, FAILED from sickbeard.common import DOWNLOADED, SNATCHED_ANY, SNATCHED_PROPER, Quality, ARCHIVED, FAILED, neededQualities
from sickbeard.exceptions import ex, MultipleShowObjectsException from sickbeard.exceptions import ex, MultipleShowObjectsException
from sickbeard import failed_history from sickbeard import failed_history
from sickbeard.history import dateFormat from sickbeard.history import dateFormat
from sickbeard.sbdatetime import sbdatetime
from name_parser.parser import NameParser, InvalidNameException, InvalidShowException from name_parser.parser import NameParser, InvalidNameException, InvalidShowException
def search_propers(): def search_propers(proper_list=None):
if not sickbeard.DOWNLOAD_PROPERS: if not sickbeard.DOWNLOAD_PROPERS:
return return
logger.log(u'Beginning search for new propers') logger.log(('Checking propers from recent search', 'Beginning search for new propers')[None is proper_list])
age_shows, age_anime = sickbeard.BACKLOG_DAYS + 2, 14 age_shows, age_anime = sickbeard.BACKLOG_DAYS + 2, 14
aired_since_shows = datetime.datetime.today() - datetime.timedelta(days=age_shows) aired_since_shows = datetime.datetime.today() - datetime.timedelta(days=age_shows)
aired_since_anime = datetime.datetime.today() - datetime.timedelta(days=age_anime) aired_since_anime = datetime.datetime.today() - datetime.timedelta(days=age_anime)
recent_shows, recent_anime = _recent_history(aired_since_shows, aired_since_anime) recent_shows, recent_anime = _recent_history(aired_since_shows, aired_since_anime)
if recent_shows or recent_anime: if recent_shows or recent_anime:
propers = _get_proper_list(aired_since_shows, recent_shows, recent_anime) propers = _get_proper_list(aired_since_shows, recent_shows, recent_anime, proper_list=proper_list)
if propers: if propers:
_download_propers(propers) _download_propers(propers)
@ -55,52 +56,59 @@ def search_propers():
logger.log(u'No downloads or snatches found for the last %s%s days to use for a propers search' % logger.log(u'No downloads or snatches found for the last %s%s days to use for a propers search' %
(age_shows, ('', ' (%s for anime)' % age_anime)[helpers.has_anime()])) (age_shows, ('', ' (%s for anime)' % age_anime)[helpers.has_anime()]))
_set_last_proper_search(datetime.datetime.today().toordinal())
run_at = '' run_at = ''
proper_sch = sickbeard.properFinderScheduler if None is proper_list:
if None is proper_sch.start_time: _set_last_proper_search(datetime.datetime.now())
run_in = proper_sch.lastRun + proper_sch.cycleTime - datetime.datetime.now()
run_at = u', next check ' proper_sch = sickbeard.properFinderScheduler
if datetime.timedelta() > run_in: if None is proper_sch.start_time:
run_at += u'imminent' run_in = proper_sch.lastRun + proper_sch.cycleTime - datetime.datetime.now()
else: run_at = u', next check '
hours, remainder = divmod(run_in.seconds, 3600) if datetime.timedelta() > run_in:
minutes, seconds = divmod(remainder, 60) run_at += u'imminent'
run_at += u'in approx. ' + ('%dh, %dm' % (hours, minutes) if 0 < hours else '%dm, %ds' % (minutes, seconds)) else:
hours, remainder = divmod(run_in.seconds, 3600)
minutes, seconds = divmod(remainder, 60)
run_at += u'in approx. ' + ('%dh, %dm' % (hours, minutes) if 0 < hours else
'%dm, %ds' % (minutes, seconds))
logger.log(u'Completed the search for new propers%s' % run_at) logger.log(u'Completed search for new propers%s' % run_at)
else:
logger.log(u'Completed checking propers from recent search')
def get_old_proper_level(showObj, indexer, indexerid, season, episodes, old_status, new_quality, def get_old_proper_level(show_obj, indexer, indexerid, season, episodes, old_status, new_quality,
extra_no_name, version, is_anime=False): extra_no_name, version, is_anime=False):
level = 0 level = 0
is_internal = False is_internal = False
codec = '' codec = ''
rel_name = None
if old_status not in SNATCHED_ANY: if old_status not in SNATCHED_ANY:
level = Quality.get_proper_level(extra_no_name, version, is_anime) level = Quality.get_proper_level(extra_no_name, version, is_anime)
elif showObj: elif show_obj:
myDB = db.DBConnection() my_db = db.DBConnection()
np = NameParser(False, showObj=showObj) np = NameParser(False, showObj=show_obj)
for episode in episodes: for episode in episodes:
result = myDB.select('SELECT resource FROM history WHERE showid = ? AND season = ? AND episode = ? AND ' result = my_db.select('SELECT resource FROM history WHERE showid = ? AND season = ? AND episode = ? AND '
'(' + ' OR '.join("action LIKE '%%%02d'" % x for x in SNATCHED_ANY) + ') ' '(' + ' OR '.join("action LIKE '%%%02d'" % x for x in SNATCHED_ANY) + ') '
'ORDER BY date DESC LIMIT 1', 'ORDER BY date DESC LIMIT 1',
[indexerid, season, episode]) [indexerid, season, episode])
if not result or not isinstance(result[0]['resource'], basestring) or not result[0]['resource']: if not result or not isinstance(result[0]['resource'], basestring) or not result[0]['resource']:
continue continue
nq = Quality.sceneQuality(result[0]['resource'], showObj.is_anime) nq = Quality.sceneQuality(result[0]['resource'], show_obj.is_anime)
if nq != new_quality: if nq != new_quality:
continue continue
try: try:
p = np.parse(result[0]['resource']) p = np.parse(result[0]['resource'])
except (StandardError, Exception): except (StandardError, Exception):
continue continue
level = Quality.get_proper_level(p.extra_info_no_name(), p.version, showObj.is_anime) level = Quality.get_proper_level(p.extra_info_no_name(), p.version, show_obj.is_anime)
extra_no_name = p.extra_info_no_name()
rel_name = result[0]['resource']
is_internal = p.extra_info_no_name() and re.search(r'\binternal\b', p.extra_info_no_name(), flags=re.I) is_internal = p.extra_info_no_name() and re.search(r'\binternal\b', p.extra_info_no_name(), flags=re.I)
codec = _get_codec(p.extra_info_no_name()) codec = _get_codec(p.extra_info_no_name())
break break
return level, is_internal, codec return level, is_internal, codec, extra_no_name, rel_name
def _get_codec(extra_info_no_name): def _get_codec(extra_info_no_name):
@ -110,12 +118,66 @@ def _get_codec(extra_info_no_name):
return '264' return '264'
elif re.search(r'\bxvid\b', extra_info_no_name, flags=re.I): elif re.search(r'\bxvid\b', extra_info_no_name, flags=re.I):
return 'xvid' return 'xvid'
elif re.search(r'\b[xh]265|hevc\b', extra_info_no_name, flags=re.I): elif re.search(r'\b[xh]\W?265|hevc\b', extra_info_no_name, flags=re.I):
return 'hevc' return 'hevc'
return '' return ''
def _get_proper_list(aired_since_shows, recent_shows, recent_anime): def get_webdl_type(extra_info_no_name, rel_name):
if not sickbeard.WEBDL_TYPES:
load_webdl_types()
for t in sickbeard.WEBDL_TYPES:
try:
if re.search(r'\b%s\b' % t[1], extra_info_no_name, flags=re.I):
return t[0]
except (StandardError, Exception):
continue
return ('webdl', 'webrip')[None is re.search(r'\bweb.?dl\b', rel_name, flags=re.I)]
def load_webdl_types():
new_types = []
default_types = [('Amazon', r'AMZN|AMAZON'), ('Netflix', r'NETFLIX|NF'), ('Hulu', r'HULU')]
url = 'https://raw.githubusercontent.com/SickGear/sickgear.extdata/master/SickGear/webdl_types.txt'
url_data = helpers.getURL(url)
my_db = db.DBConnection()
sql_results = my_db.select('SELECT * FROM webdl_types')
old_types = [(r['dname'], r['regex']) for r in sql_results]
if isinstance(url_data, basestring) and url_data.strip():
try:
for line in url_data.splitlines():
try:
(key, val) = line.decode('utf-8').strip().split(u'::', 1)
except (StandardError, Exception):
continue
if key is None or val is None:
continue
new_types.append((key, val))
except (IOError, OSError):
pass
cl = []
for nt in new_types:
if nt not in old_types:
cl.append(['REPLACE INTO webdl_types (dname, regex) VALUES (?,?)', [nt[0], nt[1]]])
for ot in old_types:
if ot not in new_types:
cl.append(['DELETE FROM webdl_types WHERE dname = ? AND regex = ?', [ot[0], ot[1]]])
if cl:
my_db.mass_action(cl)
else:
new_types = old_types
sickbeard.WEBDL_TYPES = new_types + default_types
def _get_proper_list(aired_since_shows, recent_shows, recent_anime, proper_list=None):
propers = {} propers = {}
# for each provider get a list of the # for each provider get a list of the
@ -124,22 +186,28 @@ def _get_proper_list(aired_since_shows, recent_shows, recent_anime):
for cur_provider in providers: for cur_provider in providers:
if not recent_anime and cur_provider.anime_only: if not recent_anime and cur_provider.anime_only:
continue continue
threading.currentThread().name = orig_thread_name + ' :: [' + cur_provider.name + ']'
logger.log(u'Searching for new PROPER releases') if None is not proper_list:
found_propers = proper_list.get(cur_provider.get_id(), [])
if not found_propers:
continue
else:
threading.currentThread().name = orig_thread_name + ' :: [' + cur_provider.name + ']'
try: logger.log(u'Searching for new PROPER releases')
found_propers = cur_provider.find_propers(search_date=aired_since_shows, shows=recent_shows,
anime=recent_anime) try:
except exceptions.AuthException as e: found_propers = cur_provider.find_propers(search_date=aired_since_shows, shows=recent_shows,
logger.log(u'Authentication error: ' + ex(e), logger.ERROR) anime=recent_anime)
continue except exceptions.AuthException as e:
except Exception as e: logger.log(u'Authentication error: ' + ex(e), logger.ERROR)
logger.log(u'Error while searching ' + cur_provider.name + ', skipping: ' + ex(e), logger.ERROR) continue
logger.log(traceback.format_exc(), logger.ERROR) except Exception as e:
continue logger.log(u'Error while searching ' + cur_provider.name + ', skipping: ' + ex(e), logger.ERROR)
finally: logger.log(traceback.format_exc(), logger.ERROR)
threading.currentThread().name = orig_thread_name continue
finally:
threading.currentThread().name = orig_thread_name
# if they haven't been added by a different provider than add the proper to the list # if they haven't been added by a different provider than add the proper to the list
count = 0 count = 0
@ -162,7 +230,7 @@ def _get_proper_list(aired_since_shows, recent_shows, recent_anime):
parse_result.is_anime, parse_result.is_anime,
check_is_repack=True) check_is_repack=True)
x.is_internal = parse_result.extra_info_no_name() and \ x.is_internal = parse_result.extra_info_no_name() and \
re.search(r'\binternal\b', parse_result.extra_info_no_name(), flags=re.I) re.search(r'\binternal\b', parse_result.extra_info_no_name(), flags=re.I)
x.codec = _get_codec(parse_result.extra_info_no_name()) x.codec = _get_codec(parse_result.extra_info_no_name())
propers[name] = x propers[name] = x
count += 1 count += 1
@ -255,11 +323,12 @@ def _get_proper_list(aired_since_shows, recent_shows, recent_anime):
except (StandardError, Exception): except (StandardError, Exception):
extra_info = None extra_info = None
old_proper_level, old_is_internal, old_codec = get_old_proper_level(parse_result.show, cur_proper.indexer, old_proper_level, old_is_internal, old_codec, old_extra_no_name, old_name = \
cur_proper.indexerid, cur_proper.season, get_old_proper_level(parse_result.show, cur_proper.indexer, cur_proper.indexerid, cur_proper.season,
parse_result.episode_numbers, old_status, parse_result.episode_numbers, old_status, cur_proper.quality, extra_info,
cur_proper.quality, extra_info, cur_proper.version, cur_proper.is_anime)
cur_proper.version, cur_proper.is_anime)
old_name = (old_name, sql_results[0]['release_name'])[old_name in ('', None)]
if cur_proper.proper_level < old_proper_level: if cur_proper.proper_level < old_proper_level:
continue continue
elif cur_proper.proper_level == old_proper_level: elif cur_proper.proper_level == old_proper_level:
@ -273,11 +342,20 @@ def _get_proper_list(aired_since_shows, recent_shows, recent_anime):
log_same_grp = 'Skipping proper from release group: [%s], does not match existing release group: [%s] for [%s]'\ log_same_grp = 'Skipping proper from release group: [%s], does not match existing release group: [%s] for [%s]'\
% (cur_proper.release_group, old_release_group, cur_proper.name) % (cur_proper.release_group, old_release_group, cur_proper.name)
is_web = (old_quality in (Quality.HDWEBDL, Quality.FULLHDWEBDL, Quality.UHD4KWEB) or
(old_quality == Quality.SDTV and re.search(r'\Wweb.?(dl|rip|.[hx]26[45])\W',
str(sql_results[0]['release_name']), re.I)))
if is_web:
old_webdl_type = get_webdl_type(old_extra_no_name, old_name)
new_webdl_type = get_webdl_type(cur_proper.extra_info_no_name(), cur_proper.name)
if old_webdl_type != new_webdl_type:
logger.log('Skipping proper webdl source: [%s], does not match existing webdl source: [%s] for [%s]'
% (old_webdl_type, new_webdl_type, cur_proper.name), logger.DEBUG)
continue
# for webldls, prevent propers from different groups # for webldls, prevent propers from different groups
if sickbeard.PROPERS_WEBDL_ONEGRP and \ if sickbeard.PROPERS_WEBDL_ONEGRP and is_web and cur_proper.release_group != old_release_group:
(old_quality in (Quality.HDWEBDL, Quality.FULLHDWEBDL, Quality.UHD4KWEB) or
(old_quality == Quality.SDTV and re.search(r'\Wweb.?(dl|rip|.[hx]26[45])\W', str(sql_results[0]['release_name']), re.I))) and \
cur_proper.release_group != old_release_group:
logger.log(log_same_grp, logger.DEBUG) logger.log(log_same_grp, logger.DEBUG)
continue continue
@ -375,6 +453,46 @@ def _download_propers(proper_list):
search.snatch_episode(result, SNATCHED_PROPER) search.snatch_episode(result, SNATCHED_PROPER)
def get_needed_qualites(needed=None):
if not isinstance(needed, neededQualities):
needed = neededQualities()
if not sickbeard.DOWNLOAD_PROPERS or needed.all_needed:
return needed
age_shows, age_anime = sickbeard.BACKLOG_DAYS + 2, 14
aired_since_shows = datetime.datetime.today() - datetime.timedelta(days=age_shows)
aired_since_anime = datetime.datetime.today() - datetime.timedelta(days=age_anime)
my_db = db.DBConnection()
sql_results = my_db.select(
'SELECT DISTINCT s.indexer, s.indexer_id, e.season, e.episode FROM history as h' +
' INNER JOIN tv_episodes AS e ON (h.showid == e.showid AND h.season == e.season AND h.episode == e.episode)' +
' INNER JOIN tv_shows AS s ON (e.showid = s.indexer_id)' +
' WHERE h.date >= %s' % min(aired_since_shows, aired_since_anime).strftime(dateFormat) +
' AND (%s)' % ' OR '.join(['h.action LIKE "%%%02d"' % x for x in SNATCHED_ANY + [DOWNLOADED, FAILED]])
)
for sql_episode in sql_results:
if needed.all_needed:
break
try:
show = helpers.find_show_by_id(
sickbeard.showList, {int(sql_episode['indexer']): int(sql_episode['indexer_id'])})
except MultipleShowObjectsException:
continue
if show:
needed.check_needed_types(show)
if needed.all_show_qualities_needed(show) or needed.all_qualities_needed:
continue
ep_obj = show.getEpisode(season=sql_episode['season'], episode=sql_episode['episode'])
if ep_obj:
ep_status, ep_quality = Quality.splitCompositeStatus(ep_obj.status)
if ep_status in SNATCHED_ANY + [DOWNLOADED, ARCHIVED]:
needed.check_needed_qualities([ep_quality])
return needed
def _recent_history(aired_since_shows, aired_since_anime): def _recent_history(aired_since_shows, aired_since_anime):
recent_shows, recent_anime = [], [] recent_shows, recent_anime = [], []
@ -418,19 +536,23 @@ def _set_last_proper_search(when):
if 0 == len(sql_results): if 0 == len(sql_results):
my_db.action('INSERT INTO info (last_backlog, last_indexer, last_proper_search) VALUES (?,?,?)', my_db.action('INSERT INTO info (last_backlog, last_indexer, last_proper_search) VALUES (?,?,?)',
[0, 0, str(when)]) [0, 0, sbdatetime.totimestamp(when)])
else: else:
my_db.action('UPDATE info SET last_proper_search=%s' % when) my_db.action('UPDATE info SET last_proper_search=%s' % sbdatetime.totimestamp(when))
def next_proper_timeleft():
return sickbeard.properFinderScheduler.timeLeft()
def _get_last_proper_search(): def get_last_proper_search():
my_db = db.DBConnection() my_db = db.DBConnection()
sql_results = my_db.select('SELECT * FROM info') sql_results = my_db.select('SELECT * FROM info')
try: try:
last_proper_search = datetime.date.fromordinal(int(sql_results[0]['last_proper_search'])) last_proper_search = int(sql_results[0]['last_proper_search'])
except (StandardError, Exception): except (StandardError, Exception):
return datetime.date.fromordinal(1) return 1
return last_proper_search return last_proper_search

2
sickbeard/providers/alpharatio.py

@ -65,6 +65,8 @@ class AlphaRatioProvider(generic.TorrentProvider):
search_url = self.urls['search'] % (search_string, ('&freetorrent=1', '')[not self.freeleech]) search_url = self.urls['search'] % (search_string, ('&freetorrent=1', '')[not self.freeleech])
html = self.get_url(search_url) html = self.get_url(search_url)
if self.should_skip():
return results
cnt = len(items[mode]) cnt = len(items[mode])
try: try:

2
sickbeard/providers/anizb.py

@ -38,7 +38,7 @@ class AnizbProvider(generic.NZBProvider):
for params in search_params[mode]: for params in search_params[mode]:
search_url = '%sapi/%s' % (self.url, params and (('?q=%s', '?q=%(q)s')['q' in params] % params) or '') search_url = '%sapi/%s' % (self.url, params and (('?q=%s', '?q=%(q)s')['q' in params] % params) or '')
data = self.cache.getRSSFeed(search_url) data = self.cache.get_rss(search_url)
time.sleep(1.1) time.sleep(1.1)
cnt = len(results) cnt = len(results)

2
sickbeard/providers/beyondhd.py

@ -73,6 +73,8 @@ class BeyondHDProvider(generic.TorrentProvider):
search_url += self.urls['search'] % re.sub('[.\s]+', ' ', search_string) search_url += self.urls['search'] % re.sub('[.\s]+', ' ', search_string)
data_json = self.get_url(search_url, json=True) data_json = self.get_url(search_url, json=True)
if self.should_skip():
return results
cnt = len(items[mode]) cnt = len(items[mode])
if data_json and 'results' in data_json and self._check_auth_from_data(data_json): if data_json and 'results' in data_json and self._check_auth_from_data(data_json):

2
sickbeard/providers/bithdtv.py

@ -71,6 +71,8 @@ class BitHDTVProvider(generic.TorrentProvider):
search_url = self.urls['search'] % (search_string, self._categories_string(mode)) search_url = self.urls['search'] % (search_string, self._categories_string(mode))
html = self.get_url(search_url, timeout=90) html = self.get_url(search_url, timeout=90)
if self.should_skip():
return results
cnt = len(items[mode]) cnt = len(items[mode])
try: try:

2
sickbeard/providers/bitmetv.py

@ -64,6 +64,8 @@ class BitmetvProvider(generic.TorrentProvider):
search_url = self.urls['search'] % (self._categories_string(mode, 'cat=%s'), search_string) search_url = self.urls['search'] % (self._categories_string(mode, 'cat=%s'), search_string)
html = self.get_url(search_url) html = self.get_url(search_url)
if self.should_skip():
return results
cnt = len(items[mode]) cnt = len(items[mode])
try: try:

2
sickbeard/providers/blutopia.py

@ -105,6 +105,8 @@ class BlutopiaProvider(generic.TorrentProvider):
self.token, '+'.join(search_string.split()), self._categories_string(mode, ''), '', '', '') self.token, '+'.join(search_string.split()), self._categories_string(mode, ''), '', '', '')
resp = self.get_url(search_url, json=True) resp = self.get_url(search_url, json=True)
if self.should_skip():
return results
cnt = len(items[mode]) cnt = len(items[mode])
try: try:

56
sickbeard/providers/btn.py

@ -56,6 +56,7 @@ class BTNProvider(generic.TorrentProvider):
self.ua = self.session.headers['User-Agent'] self.ua = self.session.headers['User-Agent']
self.reject_m2ts = False self.reject_m2ts = False
self.cache = BTNCache(self) self.cache = BTNCache(self)
self.has_limit = True
def _authorised(self, **kwargs): def _authorised(self, **kwargs):
@ -67,6 +68,15 @@ class BTNProvider(generic.TorrentProvider):
raise AuthException('Must set Api key or Username/Password for %s in config provider options' % self.name) raise AuthException('Must set Api key or Username/Password for %s in config provider options' % self.name)
return True return True
def _check_response(self, data, url, post_data=None, post_json=None):
if not self.should_skip(log_warning=False):
if data and 'Call Limit' in data:
self.tmr_limit_update('1', 'h', '150/hr %s' % data)
self.log_failure_url(url, post_data, post_json)
else:
logger.log(u'Action prematurely ended. %(prov)s server error response = %(desc)s' %
{'prov': self.name, 'desc': data}, logger.WARNING)
def _search_provider(self, search_params, age=0, **kwargs): def _search_provider(self, search_params, age=0, **kwargs):
self._authorised() self._authorised()
@ -93,21 +103,19 @@ class BTNProvider(generic.TorrentProvider):
self.api_key, json.dumps(param_dct), items_per_page, offset)) self.api_key, json.dumps(param_dct), items_per_page, offset))
try: try:
response = None response, error_text = None, None
if api_up and self.api_key: if api_up and self.api_key:
self.session.headers['Content-Type'] = 'application/json-rpc' self.session.headers['Content-Type'] = 'application/json-rpc'
response = helpers.getURL( response = self.get_url(self.url_api, post_data=json_rpc(params), json=True)
self.url_api, post_data=json_rpc(params), session=self.session, json=True) # response = {'error': {'message': 'Call Limit Exceeded Test'}}
if not response: error_text = response['error']['message']
api_up = False api_up = False
results = self.html(mode, search_string, results) if 'Propers' == mode:
error_text = response['error']['message'] return results
logger.log( results = self.html(mode, search_string, results)
('Call Limit' in error_text if not results:
and u'Action aborted because the %(prov)s 150 calls/hr limit was reached' self._check_response(error_text, self.url_api, post_data=json_rpc(params))
or u'Action prematurely ended. %(prov)s server error response = %(desc)s') % return results
{'prov': self.name, 'desc': error_text}, logger.WARNING)
return results
except AuthException: except AuthException:
logger.log('API looks to be down, add un/pw config detail to be used as a fallback', logger.WARNING) logger.log('API looks to be down, add un/pw config detail to be used as a fallback', logger.WARNING)
except (KeyError, Exception): except (KeyError, Exception):
@ -115,7 +123,7 @@ class BTNProvider(generic.TorrentProvider):
data_json = response and 'result' in response and response['result'] or {} data_json = response and 'result' in response and response['result'] or {}
if data_json: if data_json:
self.tmr_limit_count = 0
found_torrents = 'torrents' in data_json and data_json['torrents'] or {} found_torrents = 'torrents' in data_json and data_json['torrents'] or {}
# We got something, we know the API sends max 1000 results at a time. # We got something, we know the API sends max 1000 results at a time.
@ -134,15 +142,10 @@ class BTNProvider(generic.TorrentProvider):
for page in range(1, pages_needed + 1): for page in range(1, pages_needed + 1):
try: try:
response = helpers.getURL( post_data = json_rpc(params, results_per_page, page * results_per_page)
self.url_api, json=True, session=self.session, response = self.get_url(self.url_api, json=True, post_data=post_data)
post_data=json_rpc(params, results_per_page, page * results_per_page))
error_text = response['error']['message'] error_text = response['error']['message']
logger.log( self._check_response(error_text, self.url_api, post_data=post_data)
('Call Limit' in error_text
and u'Action prematurely ended because the %(prov)s 150 calls/hr limit was reached'
or u'Action prematurely ended. %(prov)s server error response = %(desc)s') %
{'prov': self.name, 'desc': error_text}, logger.WARNING)
return results return results
except (KeyError, Exception): except (KeyError, Exception):
data_json = response and 'result' in response and response['result'] or {} data_json = response and 'result' in response and response['result'] or {}
@ -150,6 +153,7 @@ class BTNProvider(generic.TorrentProvider):
# Note that this these are individual requests and might time out individually. # Note that this these are individual requests and might time out individually.
# This would result in 'gaps' in the results. There is no way to fix this though. # This would result in 'gaps' in the results. There is no way to fix this though.
if 'torrents' in data_json: if 'torrents' in data_json:
self.tmr_limit_count = 0
found_torrents.update(data_json['torrents']) found_torrents.update(data_json['torrents'])
cnt = len(results) cnt = len(results)
@ -176,7 +180,8 @@ class BTNProvider(generic.TorrentProvider):
if self.username and self.password: if self.username and self.password:
return super(BTNProvider, self)._authorised( return super(BTNProvider, self)._authorised(
post_params={'login': 'Log In!'}, logged_in=(lambda y='': 'casThe' in y[0:4096])) post_params={'login': 'Log In!'},
logged_in=(lambda y='': 'casThe' in y[0:512] and '<title>Index' in y[0:512]))
raise AuthException('Password or Username for %s is empty in config provider options' % self.name) raise AuthException('Password or Username for %s is empty in config provider options' % self.name)
def html(self, mode, search_string, results): def html(self, mode, search_string, results):
@ -197,7 +202,10 @@ class BTNProvider(generic.TorrentProvider):
search_string = isinstance(search_string, unicode) and unidecode(search_string) or search_string search_string = isinstance(search_string, unicode) and unidecode(search_string) or search_string
search_url = self.urls['search'] % (search_string, self._categories_string(mode, 'filter_cat[%s]=1')) search_url = self.urls['search'] % (search_string, self._categories_string(mode, 'filter_cat[%s]=1'))
html = helpers.getURL(search_url, session=self.session) html = self.get_url(search_url, use_tmr_limit=False)
if self.should_skip(log_warning=False, use_tmr_limit=False):
return results
cnt = len(results) cnt = len(results)
try: try:
if not html or self._has_no_results(html): if not html or self._has_no_results(html):

4
sickbeard/providers/btscene.py

@ -64,7 +64,7 @@ class BTSceneProvider(generic.TorrentProvider):
url = self.url url = self.url
response = self.get_url(url) response = self.get_url(url)
if not response: if self.should_skip():
return results return results
form = re.findall('(?is)(<form[^>]+)', response) form = re.findall('(?is)(<form[^>]+)', response)
@ -84,6 +84,8 @@ class BTSceneProvider(generic.TorrentProvider):
else url + self.urls['search'] % (urllib.quote_plus(search_string)) else url + self.urls['search'] % (urllib.quote_plus(search_string))
html = self.get_url(search_url) html = self.get_url(search_url)
if self.should_skip():
return results
cnt = len(items[mode]) cnt = len(items[mode])
try: try:

2
sickbeard/providers/dh.py

@ -65,6 +65,8 @@ class DHProvider(generic.TorrentProvider):
html = self.get_url(self.urls['search'] % ( html = self.get_url(self.urls['search'] % (
'+'.join(search_string.split()), self._categories_string(mode), ('3', '0')[not self.freeleech])) '+'.join(search_string.split()), self._categories_string(mode), ('3', '0')[not self.freeleech]))
if self.should_skip():
return results
cnt = len(items[mode]) cnt = len(items[mode])
try: try:

5
sickbeard/providers/ettv.py

@ -62,6 +62,8 @@ class ETTVProvider(generic.TorrentProvider):
self._categories_string(mode), ('%2B ', '')['Cache' == mode] + '.'.join(search_string.split())) self._categories_string(mode), ('%2B ', '')['Cache' == mode] + '.'.join(search_string.split()))
html = self.get_url(search_url) html = self.get_url(search_url)
if self.should_skip():
return results
cnt = len(items[mode]) cnt = len(items[mode])
try: try:
@ -110,6 +112,9 @@ class ETTVProvider(generic.TorrentProvider):
def get_data(self, url): def get_data(self, url):
result = None result = None
html = self.get_url(url, timeout=90) html = self.get_url(url, timeout=90)
if self.should_skip():
return result
try: try:
result = re.findall('(?i)"(magnet:[^"]+?)">', html)[0] result = re.findall('(?i)"(magnet:[^"]+?)">', html)[0]
except IndexError: except IndexError:

2
sickbeard/providers/fano.py

@ -83,6 +83,8 @@ class FanoProvider(generic.TorrentProvider):
search_url = self.urls['search'] % (search_string, self._categories_string(mode)) search_url = self.urls['search'] % (search_string, self._categories_string(mode))
html = self.get_url(search_url) html = self.get_url(search_url)
if self.should_skip():
return results
cnt = len(items[mode]) cnt = len(items[mode])
try: try:

2
sickbeard/providers/filelist.py

@ -62,6 +62,8 @@ class FLProvider(generic.TorrentProvider):
html = self.get_url(self.urls['search'] % ('+'.join(search_string.split()), html = self.get_url(self.urls['search'] % ('+'.join(search_string.split()),
self._categories_string(mode, template='cats[]=%s'))) self._categories_string(mode, template='cats[]=%s')))
if self.should_skip():
return results
cnt = len(items[mode]) cnt = len(items[mode])
try: try:

2
sickbeard/providers/funfile.py

@ -66,6 +66,8 @@ class FunFileProvider(generic.TorrentProvider):
search_url = self.urls['search'] % (self._categories_string(mode), search_string) search_url = self.urls['search'] % (self._categories_string(mode), search_string)
html = self.get_url(search_url, timeout=self.url_timeout) html = self.get_url(search_url, timeout=self.url_timeout)
if self.should_skip():
return results
cnt = len(items[mode]) cnt = len(items[mode])
try: try:

525
sickbeard/providers/generic.py

@ -27,6 +27,7 @@ import re
import time import time
import urlparse import urlparse
import threading import threading
import socket
from urllib import quote_plus from urllib import quote_plus
import zlib import zlib
from base64 import b16encode, b32decode from base64 import b16encode, b32decode
@ -45,13 +46,157 @@ from sickbeard.exceptions import SickBeardException, AuthException, ex
from sickbeard.helpers import maybe_plural, remove_file_failed from sickbeard.helpers import maybe_plural, remove_file_failed
from sickbeard.name_parser.parser import NameParser, InvalidNameException, InvalidShowException from sickbeard.name_parser.parser import NameParser, InvalidNameException, InvalidShowException
from sickbeard.show_name_helpers import get_show_names_all_possible from sickbeard.show_name_helpers import get_show_names_all_possible
from sickbeard.sbdatetime import sbdatetime
class HaltParseException(SickBeardException): class HaltParseException(SickBeardException):
"""Something requires the current processing to abort""" """Something requires the current processing to abort"""
class GenericProvider: class ProviderFailTypes:
http = 1
connection = 2
connection_timeout = 3
timeout = 4
other = 5
limit = 6
nodata = 7
names = {http: 'http', timeout: 'timeout',
connection: 'connection', connection_timeout: 'connection_timeout',
nodata: 'nodata', other: 'other', limit: 'limit'}
def __init__(self):
pass
class ProviderFail(object):
def __init__(self, fail_type=ProviderFailTypes.other, code=None, fail_time=None):
self.code = code
self.fail_type = fail_type
self.fail_time = (datetime.datetime.now(), fail_time)[isinstance(fail_time, datetime.datetime)]
class ProviderFailList(object):
def __init__(self, provider_name):
self.provider_name = provider_name
self._fails = []
self.lock = threading.Lock()
self.clear_old()
self.load_list()
self.last_save = datetime.datetime.now()
self.dirty = False
@property
def fails(self):
return self._fails
@property
def fails_sorted(self):
fail_dict = {}
b_d = {'count': 0}
for e in self._fails:
fail_date = e.fail_time.date()
fail_hour = e.fail_time.time().hour
date_time = datetime.datetime.combine(fail_date, datetime.time(hour=fail_hour))
if ProviderFailTypes.names[e.fail_type] not in fail_dict.get(date_time, {}):
default = {'date': str(fail_date), 'date_time': date_time, 'multirow': False}
for et in ProviderFailTypes.names.itervalues():
default[et] = b_d.copy()
fail_dict.setdefault(date_time, default)[ProviderFailTypes.names[e.fail_type]]['count'] = 1
else:
fail_dict[date_time][ProviderFailTypes.names[e.fail_type]]['count'] += 1
if ProviderFailTypes.http == e.fail_type:
if e.code in fail_dict[date_time].get(ProviderFailTypes.names[e.fail_type],
{'code': {}}).get('code', {}):
fail_dict[date_time][ProviderFailTypes.names[e.fail_type]]['code'][e.code] += 1
else:
fail_dict[date_time][ProviderFailTypes.names[e.fail_type]].setdefault('code', {})[e.code] = 1
row_count = {}
for (k, v) in fail_dict.iteritems():
row_count.setdefault(v.get('date'), 0)
if v.get('date') in row_count:
row_count[v.get('date')] += 1
for (k, v) in fail_dict.iteritems():
if 1 < row_count.get(v.get('date')):
fail_dict[k]['multirow'] = True
fail_list = sorted([fail_dict[k] for k in fail_dict.iterkeys()], key=lambda y: y.get('date_time'), reverse=True)
totals = {}
for fail_date in set([fail.get('date') for fail in fail_list]):
daytotals = {}
for et in ProviderFailTypes.names.itervalues():
daytotals.update({et: sum([x.get(et).get('count') for x in fail_list if fail_date == x.get('date')])})
totals.update({fail_date: daytotals})
for (fail_date, total) in totals.iteritems():
for i, item in enumerate(fail_list):
if fail_date == item.get('date'):
if item.get('multirow'):
fail_list[i:i] = [item.copy()]
for et in ProviderFailTypes.names.itervalues():
fail_list[i][et] = {'count': total[et]}
if et == ProviderFailTypes.names[ProviderFailTypes.http]:
fail_list[i][et]['code'] = {}
break
return fail_list
def add_fail(self, fail):
if isinstance(fail, ProviderFail):
with self.lock:
self.dirty = True
self._fails.append(fail)
logger.log('Adding fail.%s for %s' % (ProviderFailTypes.names.get(
fail.fail_type, ProviderFailTypes.names[ProviderFailTypes.other]), self.provider_name()),
logger.DEBUG)
self.save_list()
def save_list(self):
if self.dirty:
self.clear_old()
with self.lock:
my_db = db.DBConnection('cache.db')
cl = []
for f in self._fails:
cl.append(['INSERT OR IGNORE INTO provider_fails (prov_name, fail_type, fail_code, fail_time) '
'VALUES (?,?,?,?)', [self.provider_name(), f.fail_type, f.code,
sbdatetime.totimestamp(f.fail_time)]])
self.dirty = False
if cl:
my_db.mass_action(cl)
self.last_save = datetime.datetime.now()
def load_list(self):
with self.lock:
try:
my_db = db.DBConnection('cache.db')
if my_db.hasTable('provider_fails'):
results = my_db.select('SELECT * FROM provider_fails WHERE prov_name = ?', [self.provider_name()])
self._fails = []
for r in results:
try:
self._fails.append(ProviderFail(
fail_type=helpers.tryInt(r['fail_type']), code=helpers.tryInt(r['fail_code']),
fail_time=datetime.datetime.fromtimestamp(helpers.tryInt(r['fail_time']))))
except (StandardError, Exception):
continue
except (StandardError, Exception):
pass
def clear_old(self):
with self.lock:
try:
my_db = db.DBConnection('cache.db')
if my_db.hasTable('provider_fails'):
time_limit = sbdatetime.totimestamp(datetime.datetime.now() - datetime.timedelta(days=28))
my_db.action('DELETE FROM provider_fails WHERE fail_time < ?', [time_limit])
except (StandardError, Exception):
pass
class GenericProvider(object):
NZB = 'nzb' NZB = 'nzb'
TORRENT = 'torrent' TORRENT = 'torrent'
@ -86,6 +231,321 @@ class GenericProvider:
# 'Chrome/32.0.1700.107 Safari/537.36'} # 'Chrome/32.0.1700.107 Safari/537.36'}
'User-Agent': USER_AGENT} 'User-Agent': USER_AGENT}
self._failure_count = 0
self._failure_time = None
self.fails = ProviderFailList(self.get_id)
self._tmr_limit_count = 0
self._tmr_limit_time = None
self._tmr_limit_wait = None
self._last_fail_type = None
self.has_limit = False
self.fail_times = {1: (0, 15), 2: (0, 30), 3: (1, 0), 4: (2, 0), 5: (3, 0), 6: (6, 0), 7: (12, 0), 8: (24, 0)}
self._load_fail_values()
def _load_fail_values(self):
if hasattr(sickbeard, 'DATA_DIR'):
my_db = db.DBConnection('cache.db')
if my_db.hasTable('provider_fails_count'):
r = my_db.select('SELECT * FROM provider_fails_count WHERE prov_name = ?', [self.get_id()])
if r:
self._failure_count = helpers.tryInt(r[0]['failure_count'], 0)
if r[0]['failure_time']:
self._failure_time = datetime.datetime.fromtimestamp(r[0]['failure_time'])
else:
self._failure_time = None
self._tmr_limit_count = helpers.tryInt(r[0]['tmr_limit_count'], 0)
if r[0]['tmr_limit_time']:
self._tmr_limit_time = datetime.datetime.fromtimestamp(r[0]['tmr_limit_time'])
else:
self._tmr_limit_time = None
if r[0]['tmr_limit_wait']:
self._tmr_limit_wait = datetime.timedelta(seconds=helpers.tryInt(r[0]['tmr_limit_wait'], 0))
else:
self._tmr_limit_wait = None
self._last_fail_type = self.last_fail
def _save_fail_value(self, field, value):
my_db = db.DBConnection('cache.db')
if my_db.hasTable('provider_fails_count'):
r = my_db.action('UPDATE provider_fails_count SET %s = ? WHERE prov_name = ?' % field,
[value, self.get_id()])
if 0 == r.rowcount:
my_db.action('REPLACE INTO provider_fails_count (prov_name, %s) VALUES (?,?)' % field,
[self.get_id(), value])
@property
def last_fail(self):
try:
return sorted(self.fails.fails, key=lambda x: x.fail_time, reverse=True)[0].fail_type
except (StandardError, Exception):
return None
@property
def failure_count(self):
return self._failure_count
@failure_count.setter
def failure_count(self, value):
changed_val = self._failure_count != value
self._failure_count = value
if changed_val:
self._save_fail_value('failure_count', value)
@property
def failure_time(self):
return self._failure_time
@failure_time.setter
def failure_time(self, value):
if None is value or isinstance(value, datetime.datetime):
changed_val = self._failure_time != value
self._failure_time = value
if changed_val:
self._save_fail_value('failure_time', (sbdatetime.totimestamp(value), value)[None is value])
@property
def tmr_limit_count(self):
return self._tmr_limit_count
@tmr_limit_count.setter
def tmr_limit_count(self, value):
changed_val = self._tmr_limit_count != value
self._tmr_limit_count = value
if changed_val:
self._save_fail_value('tmr_limit_count', value)
@property
def tmr_limit_time(self):
return self._tmr_limit_time
@tmr_limit_time.setter
def tmr_limit_time(self, value):
if None is value or isinstance(value, datetime.datetime):
changed_val = self._tmr_limit_time != value
self._tmr_limit_time = value
if changed_val:
self._save_fail_value('tmr_limit_time', (sbdatetime.totimestamp(value), value)[None is value])
@property
def max_index(self):
return len(self.fail_times)
@property
def tmr_limit_wait(self):
return self._tmr_limit_wait
@tmr_limit_wait.setter
def tmr_limit_wait(self, value):
if isinstance(getattr(self, 'fails', None), ProviderFailList) and isinstance(value, datetime.timedelta):
self.fails.add_fail(ProviderFail(fail_type=ProviderFailTypes.limit))
changed_val = self._tmr_limit_wait != value
self._tmr_limit_wait = value
if changed_val:
if None is value:
self._save_fail_value('tmr_limit_wait', value)
elif isinstance(value, datetime.timedelta):
self._save_fail_value('tmr_limit_wait', value.total_seconds())
def fail_time_index(self, base_limit=2):
i = self.failure_count - base_limit
return (i, self.max_index)[i >= self.max_index]
def tmr_limit_update(self, period, unit, desc):
self.tmr_limit_time = datetime.datetime.now()
self.tmr_limit_count += 1
limit_set = False
if None not in (period, unit):
limit_set = True
if unit in ('s', 'sec', 'secs', 'seconds', 'second'):
self.tmr_limit_wait = datetime.timedelta(seconds=helpers.tryInt(period))
elif unit in ('m', 'min', 'mins', 'minutes', 'minute'):
self.tmr_limit_wait = datetime.timedelta(minutes=helpers.tryInt(period))
elif unit in ('h', 'hr', 'hrs', 'hours', 'hour'):
self.tmr_limit_wait = datetime.timedelta(hours=helpers.tryInt(period))
elif unit in ('d', 'days', 'day'):
self.tmr_limit_wait = datetime.timedelta(days=helpers.tryInt(period))
else:
limit_set = False
if not limit_set:
time_index = self.fail_time_index(base_limit=0)
self.tmr_limit_wait = self.wait_time(time_index)
logger.log('Request limit reached. Waiting for %s until next retry. Message: %s' %
(self.tmr_limit_wait, desc or 'none found'), logger.WARNING)
def wait_time(self, time_index=None):
"""
Return a suitable wait time, selected by parameter, or based on the current failure count
:param time_index: A key value index into the fail_times dict, or selects using failure count if None
:type time_index: Integer
:return: Time
:rtype: Timedelta
"""
if None is time_index:
time_index = self.fail_time_index()
return datetime.timedelta(hours=self.fail_times[time_index][0], minutes=self.fail_times[time_index][1])
def fail_newest_delta(self):
"""
Return how long since most recent failure
:return: Period since most recent failure on record
:rtype: timedelta
"""
return datetime.datetime.now() - self.failure_time
def is_waiting(self):
return self.fail_newest_delta() < self.wait_time()
def valid_tmr_time(self):
return isinstance(self.tmr_limit_wait, datetime.timedelta) and \
isinstance(self.tmr_limit_time, datetime.datetime)
@property
def get_next_try_time(self):
n = None
h = datetime.timedelta(seconds=0)
f = datetime.timedelta(seconds=0)
if self.valid_tmr_time():
h = self.tmr_limit_time + self.tmr_limit_wait - datetime.datetime.now()
if 3 <= self.failure_count and isinstance(self.failure_time, datetime.datetime) and self.is_waiting():
h = self.failure_time + self.wait_time() - datetime.datetime.now()
if datetime.timedelta(seconds=0) < max((h, f)):
n = max((h, f))
return n
def retry_next(self):
if self.valid_tmr_time():
self.tmr_limit_time = datetime.datetime.now() - self.tmr_limit_wait
if 3 <= self.failure_count and isinstance(self.failure_time, datetime.datetime) and self.is_waiting():
self.failure_time = datetime.datetime.now() - self.wait_time()
@staticmethod
def fmt_delta(delta):
return str(delta).rsplit('.')[0]
def should_skip(self, log_warning=True, use_tmr_limit=True):
"""
Determine if a subsequent server request should be skipped. The result of this logic is based on most recent
server connection activity including, exhausted request limits, and counting connect failures to determine a
"cool down" period before recommending reconnection attempts; by returning False.
:param log_warning: Output to log if True (default) otherwise set False for no output.
:type log_warning: Boolean
:param use_tmr_limit: Setting this to False will ignore a tmr limit being reached and will instead return False.
:type use_tmr_limit: Boolean
:return: True for any known issue that would prevent a subsequent server connection, otherwise False.
:rtype: Boolean
"""
if self.valid_tmr_time():
time_left = self.tmr_limit_time + self.tmr_limit_wait - datetime.datetime.now()
if time_left > datetime.timedelta(seconds=0):
if log_warning:
# Ensure provider name output (e.g. when displaying config/provs) instead of e.g. thread "Tornado"
prepend = ('[%s] :: ' % self.name, '')[any([x.name in threading.currentThread().getName()
for x in sickbeard.providers.sortedProviderList()])]
logger.log('%sToo many requests reached at %s, waiting for %s' % (
prepend, self.fmt_delta(self.tmr_limit_time), self.fmt_delta(time_left)), logger.WARNING)
return use_tmr_limit
else:
self.tmr_limit_time = None
self.tmr_limit_wait = None
if 3 <= self.failure_count:
if None is self.failure_time:
self.failure_time = datetime.datetime.now()
if self.is_waiting():
if log_warning:
time_left = self.wait_time() - self.fail_newest_delta()
logger.log('Failed %s times, skipping provider for %s, last failure at %s with fail type: %s' % (
self.failure_count, self.fmt_delta(time_left), self.fmt_delta(self.failure_time),
ProviderFailTypes.names.get(
self.last_fail, ProviderFailTypes.names[ProviderFailTypes.other])), logger.WARNING)
return True
return False
def inc_failure_count(self, *args, **kwargs):
fail_type = ('fail_type' in kwargs and kwargs['fail_type'].fail_type) or \
(isinstance(args, tuple) and isinstance(args[0], ProviderFail) and args[0].fail_type)
if not isinstance(self.failure_time, datetime.datetime) or \
fail_type != self._last_fail_type or \
self.fail_newest_delta() > datetime.timedelta(seconds=3):
self.failure_count += 1
self.failure_time = datetime.datetime.now()
self._last_fail_type = fail_type
self.fails.add_fail(*args, **kwargs)
else:
logger.log('%s: Not logging same failure within 3 seconds' % self.name, logger.DEBUG)
def get_url(self, url, skip_auth=False, use_tmr_limit=True, *args, **kwargs):
"""
Return data from a URI with a possible check for authentication prior to the data fetch.
Raised errors and no data in responses are tracked for making future logic decisions.
:param url: Address where to fetch data from
:type url: String
:param skip_auth: Skip authentication check of provider if True
:type skip_auth: Boolean
:param use_tmr_limit: An API limit can be +ve before a fetch, but unwanted, set False to short should_skip
:type use_tmr_limit: Boolean
:param args: params to pass-through to getURL
:type args:
:param kwargs: keyword params to pass-through to getURL
:type kwargs:
:return: None or data fetched from URL
:rtype: String or Nonetype
"""
data = None
# check for auth
if (not skip_auth and not (self.is_public_access()
and type(self).__name__ not in ['TorrentRssProvider']) and not self._authorised()) \
or self.should_skip(use_tmr_limit=use_tmr_limit):
return
kwargs['raise_exceptions'] = True
kwargs['raise_status_code'] = True
for k, v in dict(headers=self.headers, hooks=dict(response=self.cb_response), session=self.session).items():
kwargs.setdefault(k, v)
post_data = kwargs.get('post_data')
post_json = kwargs.get('post_json')
# noinspection PyUnusedLocal
log_failure_url = False
try:
data = helpers.getURL(url, *args, **kwargs)
if data:
if 0 != self.failure_count:
logger.log('Unblocking provider: %s' % self.get_id(), logger.DEBUG)
self.failure_count = 0
self.failure_time = None
else:
self.inc_failure_count(ProviderFail(fail_type=ProviderFailTypes.nodata))
log_failure_url = True
except requests.exceptions.HTTPError as e:
self.inc_failure_count(ProviderFail(fail_type=ProviderFailTypes.http, code=e.response.status_code))
except requests.exceptions.ConnectionError:
self.inc_failure_count(ProviderFail(fail_type=ProviderFailTypes.connection))
except requests.exceptions.ReadTimeout:
self.inc_failure_count(ProviderFail(fail_type=ProviderFailTypes.timeout))
except (requests.exceptions.Timeout, socket.timeout):
self.inc_failure_count(ProviderFail(fail_type=ProviderFailTypes.connection_timeout))
except (StandardError, Exception) as e:
log_failure_url = True
self.inc_failure_count(ProviderFail(fail_type=ProviderFailTypes.other))
self.fails.save_list()
if log_failure_url:
self.log_failure_url(url, post_data, post_json)
return data
def log_failure_url(self, url, post_data=None, post_json=None):
if self.should_skip(log_warning=False):
post = []
if post_data:
post += [' .. Post params: [%s]' % '&'.join([post_data])]
if post_json:
post += [' .. Json params: [%s]' % '&'.join([post_json])]
logger.log('Failure URL: %s%s' % (url, ''.join(post)), logger.WARNING)
def get_id(self): def get_id(self):
return GenericProvider.make_id(self.name) return GenericProvider.make_id(self.name)
@ -152,19 +612,6 @@ class GenericProvider:
self.session.response = dict(url=r.url, status_code=r.status_code, elapsed=r.elapsed, from_cache=r.from_cache) self.session.response = dict(url=r.url, status_code=r.status_code, elapsed=r.elapsed, from_cache=r.from_cache)
return r return r
def get_url(self, url, post_data=None, params=None, timeout=30, json=False):
"""
By default this is just a simple urlopen call but this method should be overridden
for providers with special URL requirements (like cookies)
"""
# check for auth
if not self._authorised():
return
return helpers.getURL(url, post_data=post_data, params=params, headers=self.headers, timeout=timeout,
session=self.session, json=json, hooks=dict(response=self.cb_response))
def download_result(self, result): def download_result(self, result):
""" """
Save the result to disk. Save the result to disk.
@ -428,9 +875,13 @@ class GenericProvider:
results = {} results = {}
item_list = [] item_list = []
if self.should_skip():
return results
searched_scene_season = None searched_scene_season = None
for ep_obj in episodes: for ep_obj in episodes:
if self.should_skip(log_warning=False):
break
# search cache for episode result # search cache for episode result
cache_result = self.cache.searchCache(ep_obj, manual_search) cache_result = self.cache.searchCache(ep_obj, manual_search)
if cache_result: if cache_result:
@ -457,6 +908,8 @@ class GenericProvider:
for cur_param in search_params: for cur_param in search_params:
item_list += self._search_provider(cur_param, search_mode=search_mode, epcount=len(episodes)) item_list += self._search_provider(cur_param, search_mode=search_mode, epcount=len(episodes))
if self.should_skip():
break
return self.finish_find_search_results(show, episodes, search_mode, manual_search, results, item_list) return self.finish_find_search_results(show, episodes, search_mode, manual_search, results, item_list)
@ -649,10 +1102,11 @@ class GenericProvider:
:param count: count of successfully processed items :param count: count of successfully processed items
:param url: source url of item(s) :param url: source url of item(s)
""" """
str1, thing, str3 = (('', '%s item' % mode.lower(), ''), (' usable', 'proper', ' found'))['Propers' == mode] if not self.should_skip():
logger.log(u'%s %s in response from %s' % (('No' + str1, count)[0 < count], ( str1, thing, str3 = (('', '%s item' % mode.lower(), ''), (' usable', 'proper', ' found'))['Propers' == mode]
'%s%s%s%s' % (('', 'freeleech ')[getattr(self, 'freeleech', False)], thing, maybe_plural(count), str3)), logger.log(u'%s %s in response from %s' % (('No' + str1, count)[0 < count], (
re.sub('(\s)\s+', r'\1', url))) '%s%s%s%s' % (('', 'freeleech ')[getattr(self, 'freeleech', False)], thing, maybe_plural(count), str3)),
re.sub('(\s)\s+', r'\1', url)))
def check_auth_cookie(self): def check_auth_cookie(self):
@ -723,12 +1177,13 @@ class GenericProvider:
return return
class NZBProvider(object, GenericProvider): class NZBProvider(GenericProvider):
def __init__(self, name, supports_backlog=True, anime_only=False): def __init__(self, name, supports_backlog=True, anime_only=False):
GenericProvider.__init__(self, name, supports_backlog, anime_only) GenericProvider.__init__(self, name, supports_backlog, anime_only)
self.providerType = GenericProvider.NZB self.providerType = GenericProvider.NZB
self.has_limit = True
def image_name(self): def image_name(self):
@ -757,6 +1212,9 @@ class NZBProvider(object, GenericProvider):
results = [classes.Proper(x['name'], x['url'], datetime.datetime.fromtimestamp(x['time']), self.show) for x in results = [classes.Proper(x['name'], x['url'], datetime.datetime.fromtimestamp(x['time']), self.show) for x in
cache_results] cache_results]
if self.should_skip():
return results
index = 0 index = 0
alt_search = ('nzbs_org' == self.get_id()) alt_search = ('nzbs_org' == self.get_id())
do_search_alt = False do_search_alt = False
@ -775,6 +1233,9 @@ class NZBProvider(object, GenericProvider):
urls = [] urls = []
while index < len(search_terms): while index < len(search_terms):
if self.should_skip(log_warning=False):
break
search_params = {'q': search_terms[index], 'maxage': sickbeard.BACKLOG_DAYS + 2} search_params = {'q': search_terms[index], 'maxage': sickbeard.BACKLOG_DAYS + 2}
if alt_search: if alt_search:
@ -817,7 +1278,7 @@ class NZBProvider(object, GenericProvider):
return self._search_provider(search_params=search_params, **kwargs) return self._search_provider(search_params=search_params, **kwargs)
class TorrentProvider(object, GenericProvider): class TorrentProvider(GenericProvider):
def __init__(self, name, supports_backlog=True, anime_only=False, cache_update_freq=None, update_freq=None): def __init__(self, name, supports_backlog=True, anime_only=False, cache_update_freq=None, update_freq=None):
GenericProvider.__init__(self, name, supports_backlog, anime_only) GenericProvider.__init__(self, name, supports_backlog, anime_only)
@ -995,8 +1456,9 @@ class TorrentProvider(object, GenericProvider):
return None return None
if 10 < len(cur_url) and ((expire and (expire > int(time.time()))) or if 10 < len(cur_url) and ((expire and (expire > int(time.time()))) or
self._has_signature(helpers.getURL(cur_url, session=self.session))): self._has_signature(self.get_url(cur_url, skip_auth=True))):
if self.should_skip():
return None
for k, v in getattr(self, 'url_tmpl', {}).items(): for k, v in getattr(self, 'url_tmpl', {}).items():
self.urls[k] = v % {'home': cur_url, 'vars': getattr(self, 'url_vars', {}).get(k, '')} self.urls[k] = v % {'home': cur_url, 'vars': getattr(self, 'url_vars', {}).get(k, '')}
@ -1056,15 +1518,17 @@ class TorrentProvider(object, GenericProvider):
if isinstance(url, type([])): if isinstance(url, type([])):
for i in range(0, len(url)): for i in range(0, len(url)):
helpers.getURL(url.pop(), session=self.session) self.get_url(url.pop(), skip_auth=True)
if self.should_skip():
return False
passfield, userfield = None, None passfield, userfield = None, None
if not url: if not url:
if hasattr(self, 'urls'): if hasattr(self, 'urls'):
url = self.urls.get('login_action') url = self.urls.get('login_action')
if url: if url:
response = helpers.getURL(url, session=self.session) response = self.get_url(url, skip_auth=True)
if None is response: if self.should_skip() or None is response:
return False return False
try: try:
post_params = isinstance(post_params, type({})) and post_params or {} post_params = isinstance(post_params, type({})) and post_params or {}
@ -1104,8 +1568,8 @@ class TorrentProvider(object, GenericProvider):
if self.password not in post_params.values(): if self.password not in post_params.values():
post_params[(passfield, 'password')[not passfield]] = self.password post_params[(passfield, 'password')[not passfield]] = self.password
response = helpers.getURL(url, post_data=post_params, session=self.session, timeout=timeout) response = self.get_url(url, skip_auth=True, post_data=post_params, timeout=timeout)
if response: if not self.should_skip() and response:
if logged_in(response): if logged_in(response):
return True return True
@ -1153,6 +1617,8 @@ class TorrentProvider(object, GenericProvider):
:return: list of Proper objects :return: list of Proper objects
""" """
results = [] results = []
if self.should_skip():
return results
search_terms = getattr(self, 'proper_search_terms', ['proper', 'repack', 'real']) search_terms = getattr(self, 'proper_search_terms', ['proper', 'repack', 'real'])
if not isinstance(search_terms, list): if not isinstance(search_terms, list):
@ -1164,9 +1630,14 @@ class TorrentProvider(object, GenericProvider):
clean_term = re.compile(r'(?i)[^a-z1-9|.]+') clean_term = re.compile(r'(?i)[^a-z1-9|.]+')
for proper_term in search_terms: for proper_term in search_terms:
if self.should_skip(log_warning=False):
break
proper_check = re.compile(r'(?i)(?:%s)' % clean_term.sub('', proper_term)) proper_check = re.compile(r'(?i)(?:%s)' % clean_term.sub('', proper_term))
for item in items: for item in items:
if self.should_skip(log_warning=False):
break
title, url = self._title_and_url(item) title, url = self._title_and_url(item)
if proper_check.search(title): if proper_check.search(title):
results.append(classes.Proper(title, url, datetime.datetime.today(), results.append(classes.Proper(title, url, datetime.datetime.today(),

2
sickbeard/providers/gftracker.py

@ -66,6 +66,8 @@ class GFTrackerProvider(generic.TorrentProvider):
(self.urls['search'] % search_string, '')['Cache' == mode]) (self.urls['search'] % search_string, '')['Cache' == mode])
html = self.get_url(search_url) html = self.get_url(search_url)
if self.should_skip():
return results
cnt = len(items[mode]) cnt = len(items[mode])
try: try:

2
sickbeard/providers/grabtheinfo.py

@ -70,6 +70,8 @@ class GrabTheInfoProvider(generic.TorrentProvider):
(self.urls['search'] % search_string, '')['Cache' == mode]) (self.urls['search'] % search_string, '')['Cache' == mode])
html = self.get_url(search_url) html = self.get_url(search_url)
if self.should_skip():
return results
cnt = len(items[mode]) cnt = len(items[mode])
try: try:

2
sickbeard/providers/hd4free.py

@ -105,6 +105,8 @@ class HD4FreeProvider(generic.TorrentProvider):
self.token, '+'.join(search_string.split()), self._categories_string(mode, ''), '', '', '') self.token, '+'.join(search_string.split()), self._categories_string(mode, ''), '', '', '')
resp = self.get_url(search_url, json=True) resp = self.get_url(search_url, json=True)
if self.should_skip():
return results
cnt = len(items[mode]) cnt = len(items[mode])
try: try:

6
sickbeard/providers/hdbits.py

@ -48,7 +48,7 @@ class HDBitsProvider(generic.TorrentProvider):
self.username, self.passkey, self.freeleech, self.minseed, self.minleech = 5 * [None] self.username, self.passkey, self.freeleech, self.minseed, self.minleech = 5 * [None]
def check_auth_from_data(self, parsed_json): def _check_auth_from_data(self, parsed_json):
if 'status' in parsed_json and 5 == parsed_json.get('status') and 'message' in parsed_json: if 'status' in parsed_json and 5 == parsed_json.get('status') and 'message' in parsed_json:
logger.log(u'Incorrect username or password for %s: %s' % (self.name, parsed_json['message']), logger.DEBUG) logger.log(u'Incorrect username or password for %s: %s' % (self.name, parsed_json['message']), logger.DEBUG)
@ -112,9 +112,11 @@ class HDBitsProvider(generic.TorrentProvider):
search_url = self.urls['search'] search_url = self.urls['search']
json_resp = self.get_url(search_url, post_data=post_data, json=True) json_resp = self.get_url(search_url, post_data=post_data, json=True)
if self.should_skip():
return results
try: try:
if not (json_resp and self.check_auth_from_data(json_resp) and 'data' in json_resp): if not (json_resp and self._check_auth_from_data(json_resp) and 'data' in json_resp):
logger.log(u'Response from %s does not contain any json data, abort' % self.name, logger.ERROR) logger.log(u'Response from %s does not contain any json data, abort' % self.name, logger.ERROR)
return results return results
except AuthException as e: except AuthException as e:

2
sickbeard/providers/hdspace.py

@ -83,6 +83,8 @@ class HDSpaceProvider(generic.TorrentProvider):
search_url += self.urls['search'] % rc['nodots'].sub(' ', search_string) search_url += self.urls['search'] % rc['nodots'].sub(' ', search_string)
html = self.get_url(search_url) html = self.get_url(search_url)
if self.should_skip():
return results
cnt = len(items[mode]) cnt = len(items[mode])
try: try:

2
sickbeard/providers/hdtorrents.py

@ -86,6 +86,8 @@ class HDTorrentsProvider(generic.TorrentProvider):
self._categories_string(mode, template='category[]=%s') self._categories_string(mode, template='category[]=%s')
.replace('&category[]=Animation', ('&genre[]=Animation', '')[mode in ['Cache', 'Propers']])) .replace('&category[]=Animation', ('&genre[]=Animation', '')[mode in ['Cache', 'Propers']]))
html = self.get_url(search_url) html = self.get_url(search_url)
if self.should_skip():
return results
cnt = len(items[mode]) cnt = len(items[mode])
try: try:

2
sickbeard/providers/iptorrents.py

@ -88,6 +88,8 @@ class IPTorrentsProvider(generic.TorrentProvider):
(';free', '')[not self.freeleech], (';o=seeders', '')['Cache' == mode]) (';free', '')[not self.freeleech], (';o=seeders', '')['Cache' == mode])
html = self.get_url(search_url) html = self.get_url(search_url)
if self.should_skip():
return results
cnt = len(items[mode]) cnt = len(items[mode])
try: try:

2
sickbeard/providers/limetorrents.py

@ -67,6 +67,8 @@ class LimeTorrentsProvider(generic.TorrentProvider):
else self.urls['search'] % (urllib.quote_plus(search_string)) else self.urls['search'] % (urllib.quote_plus(search_string))
html = self.get_url(search_url) html = self.get_url(search_url)
if self.should_skip():
return results
cnt = len(items[mode]) cnt = len(items[mode])
try: try:

2
sickbeard/providers/magnetdl.py

@ -54,6 +54,8 @@ class MagnetDLProvider(generic.TorrentProvider):
search_url = self.urls['search'] % re.sub('[.\s]+', ' ', search_string) search_url = self.urls['search'] % re.sub('[.\s]+', ' ', search_string)
html = self.get_url(search_url) html = self.get_url(search_url)
if self.should_skip():
return results
cnt = len(items[mode]) cnt = len(items[mode])
try: try:

3
sickbeard/providers/morethan.py

@ -65,6 +65,9 @@ class MoreThanProvider(generic.TorrentProvider):
# fetches 15 results by default, and up to 100 if allowed in user profile # fetches 15 results by default, and up to 100 if allowed in user profile
html = self.get_url(search_url) html = self.get_url(search_url)
if self.should_skip():
return results
cnt = len(items[mode]) cnt = len(items[mode])
try: try:
if not html or self._has_no_results(html): if not html or self._has_no_results(html):

2
sickbeard/providers/ncore.py

@ -68,6 +68,8 @@ class NcoreProvider(generic.TorrentProvider):
# fetches 15 results by default, and up to 100 if allowed in user profile # fetches 15 results by default, and up to 100 if allowed in user profile
html = self.get_url(search_url) html = self.get_url(search_url)
if self.should_skip():
return results
cnt = len(items[mode]) cnt = len(items[mode])
try: try:

6
sickbeard/providers/nebulance.py

@ -51,7 +51,9 @@ class NebulanceProvider(generic.TorrentProvider):
post_params={'keeplogged': '1', 'form_tmpl': True}): post_params={'keeplogged': '1', 'form_tmpl': True}):
return False return False
if not self.user_authkey: if not self.user_authkey:
response = helpers.getURL(self.urls['user'], session=self.session, json=True) response = self.get_url(self.urls['user'], skip_auth=True, json=True)
if self.should_skip():
return False
if 'response' in response: if 'response' in response:
self.user_authkey, self.user_passkey = [response['response'].get(v) for v in 'authkey', 'passkey'] self.user_authkey, self.user_passkey = [response['response'].get(v) for v in 'authkey', 'passkey']
return self.user_authkey return self.user_authkey
@ -74,6 +76,8 @@ class NebulanceProvider(generic.TorrentProvider):
search_url += self.urls['search'] % rc['nodots'].sub('+', search_string) search_url += self.urls['search'] % rc['nodots'].sub('+', search_string)
data_json = self.get_url(search_url, json=True) data_json = self.get_url(search_url, json=True)
if self.should_skip():
return results
cnt = len(items[mode]) cnt = len(items[mode])
try: try:

62
sickbeard/providers/newznab.py

@ -28,7 +28,7 @@ from math import ceil
from sickbeard.sbdatetime import sbdatetime from sickbeard.sbdatetime import sbdatetime
from . import generic from . import generic
from sickbeard import helpers, logger, tvcache, classes, db from sickbeard import helpers, logger, tvcache, classes, db
from sickbeard.common import neededQualities, Quality from sickbeard.common import neededQualities, Quality, SNATCHED, SNATCHED_PROPER, SNATCHED_BEST, DOWNLOADED
from sickbeard.exceptions import AuthException, MultipleShowObjectsException from sickbeard.exceptions import AuthException, MultipleShowObjectsException
from sickbeard.indexers.indexer_config import * from sickbeard.indexers.indexer_config import *
from io import BytesIO from io import BytesIO
@ -291,7 +291,12 @@ class NewznabProvider(generic.NZBProvider):
return [x for x in cats if x['id'] not in self.excludes] return [x for x in cats if x['id'] not in self.excludes]
return ','.join(set(cats.split(',')) - self.excludes) return ','.join(set(cats.split(',')) - self.excludes)
def check_auth_from_data(self, data): def _check_auth(self, is_required=None):
if self.should_skip():
return False
return super(NewznabProvider, self)._check_auth(is_required)
def _check_auth_from_data(self, data, url):
if data is None or not hasattr(data, 'tag'): if data is None or not hasattr(data, 'tag'):
return False return False
@ -306,6 +311,13 @@ class NewznabProvider(generic.NZBProvider):
raise AuthException('Your account on %s has been suspended, contact the admin.' % self.name) raise AuthException('Your account on %s has been suspended, contact the admin.' % self.name)
elif '102' == code: elif '102' == code:
raise AuthException('Your account isn\'t allowed to use the API on %s, contact the admin.' % self.name) raise AuthException('Your account isn\'t allowed to use the API on %s, contact the admin.' % self.name)
elif '500' == code:
try:
retry_time, unit = re.findall(r'Retry in (\d+)\W+([a-z]+)', description, flags=re.I)[0]
except IndexError:
retry_time, unit = None, None
self.tmr_limit_update(retry_time, unit, description)
self.log_failure_url(url)
elif '910' == code: elif '910' == code:
logger.log( logger.log(
'%s %s, please check with provider.' % '%s %s, please check with provider.' %
@ -316,6 +328,7 @@ class NewznabProvider(generic.NZBProvider):
logger.WARNING) logger.WARNING)
return False return False
self.tmr_limit_count = 0
return True return True
def config_str(self): def config_str(self):
@ -530,15 +543,20 @@ class NewznabProvider(generic.NZBProvider):
(hits_per_page * 100 // hits_per_page * 2, hits_per_page * int(ceil(rel_limit * 1.5)))[season_search]) (hits_per_page * 100 // hits_per_page * 2, hits_per_page * int(ceil(rel_limit * 1.5)))[season_search])
def find_search_results(self, show, episodes, search_mode, manual_search=False, try_other_searches=False, **kwargs): def find_search_results(self, show, episodes, search_mode, manual_search=False, try_other_searches=False, **kwargs):
self._check_auth() check = self._check_auth()
results = {}
if (isinstance(check, bool) and not check) or self.should_skip():
return results
self.show = show self.show = show
results = {}
item_list = [] item_list = []
name_space = {} name_space = {}
searched_scene_season = s_mode = None searched_scene_season = s_mode = None
for ep_obj in episodes: for ep_obj in episodes:
if self.should_skip(log_warning=False):
break
# skip if season already searched # skip if season already searched
if (s_mode or 'sponly' == search_mode) and 1 < len(episodes) \ if (s_mode or 'sponly' == search_mode) and 1 < len(episodes) \
and searched_scene_season == ep_obj.scene_season: and searched_scene_season == ep_obj.scene_season:
@ -577,6 +595,8 @@ class NewznabProvider(generic.NZBProvider):
try_all_searches=try_other_searches) try_all_searches=try_other_searches)
item_list += items item_list += items
name_space.update(n_space) name_space.update(n_space)
if self.should_skip():
break
return self.finish_find_search_results( return self.finish_find_search_results(
show, episodes, search_mode, manual_search, results, item_list, name_space=name_space) show, episodes, search_mode, manual_search, results, item_list, name_space=name_space)
@ -617,7 +637,13 @@ class NewznabProvider(generic.NZBProvider):
def _search_provider(self, search_params, needed=neededQualities(need_all=True), max_items=400, def _search_provider(self, search_params, needed=neededQualities(need_all=True), max_items=400,
try_all_searches=False, **kwargs): try_all_searches=False, **kwargs):
results, n_spaces = [], {}
if self.should_skip():
return results, n_spaces
api_key = self._check_auth() api_key = self._check_auth()
if isinstance(api_key, bool) and not api_key:
return results, n_spaces
base_params = {'t': 'tvsearch', base_params = {'t': 'tvsearch',
'maxage': sickbeard.USENET_RETENTION or 0, 'maxage': sickbeard.USENET_RETENTION or 0,
@ -644,8 +670,13 @@ class NewznabProvider(generic.NZBProvider):
cat_webdl = self.cats.get(NewznabConstants.CAT_WEBDL) cat_webdl = self.cats.get(NewznabConstants.CAT_WEBDL)
for mode in search_params.keys(): for mode in search_params.keys():
if self.should_skip(log_warning=False):
break
for i, params in enumerate(search_params[mode]): for i, params in enumerate(search_params[mode]):
if self.should_skip(log_warning=False):
break
# category ids # category ids
cat = [] cat = []
if 'Episode' == mode or 'Season' == mode: if 'Episode' == mode or 'Season' == mode:
@ -697,14 +728,13 @@ class NewznabProvider(generic.NZBProvider):
search_url = '%sapi?%s' % (self.url, urllib.urlencode(request_params)) search_url = '%sapi?%s' % (self.url, urllib.urlencode(request_params))
i and time.sleep(2.1) i and time.sleep(2.1)
data = helpers.getURL(search_url) data = self.get_url(search_url)
if not data: if self.should_skip() or not data:
logger.log('No Data returned from %s' % self.name, logger.WARNING)
break break
# hack this in until it's fixed server side # hack this in until it's fixed server side
if data and not data.startswith('<?xml'): if not data.startswith('<?xml'):
data = '<?xml version="1.0" encoding="ISO-8859-1" ?>%s' % data data = '<?xml version="1.0" encoding="ISO-8859-1" ?>%s' % data
try: try:
@ -714,7 +744,7 @@ class NewznabProvider(generic.NZBProvider):
logger.log('Error trying to load %s RSS feed' % self.name, logger.WARNING) logger.log('Error trying to load %s RSS feed' % self.name, logger.WARNING)
break break
if not self.check_auth_from_data(parsed_xml): if not self._check_auth_from_data(parsed_xml, search_url):
break break
if 'rss' != parsed_xml.tag: if 'rss' != parsed_xml.tag:
@ -794,6 +824,10 @@ class NewznabProvider(generic.NZBProvider):
results = [classes.Proper(x['name'], x['url'], datetime.datetime.fromtimestamp(x['time']), self.show) for x in results = [classes.Proper(x['name'], x['url'], datetime.datetime.fromtimestamp(x['time']), self.show) for x in
cache_results] cache_results]
check = self._check_auth()
if isinstance(check, bool) and not check:
return results
index = 0 index = 0
alt_search = ('nzbs_org' == self.get_id()) alt_search = ('nzbs_org' == self.get_id())
do_search_alt = False do_search_alt = False
@ -812,6 +846,9 @@ class NewznabProvider(generic.NZBProvider):
urls = [] urls = []
while index < len(search_terms): while index < len(search_terms):
if self.should_skip(log_warning=False):
break
search_params = {'q': search_terms[index], 'maxage': sickbeard.BACKLOG_DAYS + 2} search_params = {'q': search_terms[index], 'maxage': sickbeard.BACKLOG_DAYS + 2}
if alt_search: if alt_search:
@ -885,8 +922,11 @@ class NewznabCache(tvcache.TVCache):
if 4489 != sickbeard.RECENTSEARCH_FREQUENCY or self.should_update(): if 4489 != sickbeard.RECENTSEARCH_FREQUENCY or self.should_update():
n_spaces = {} n_spaces = {}
try: try:
self._checkAuth() check = self._checkAuth()
(items, n_spaces) = self.provider.cache_data(needed=needed) if isinstance(check, bool) and not check:
items = None
else:
(items, n_spaces) = self.provider.cache_data(needed=needed)
except (StandardError, Exception): except (StandardError, Exception):
items = None items = None

2
sickbeard/providers/nyaa.py

@ -53,6 +53,8 @@ class NyaaProvider(generic.TorrentProvider):
search_url = self.urls['search'] % ((0, 2)[self.confirmed], search_string) search_url = self.urls['search'] % ((0, 2)[self.confirmed], search_string)
html = self.get_url(search_url) html = self.get_url(search_url)
if self.should_skip():
return results
cnt = len(items[mode]) cnt = len(items[mode])
try: try:

19
sickbeard/providers/omgwtfnzbs.py

@ -100,9 +100,12 @@ class OmgwtfnzbsProvider(generic.NZBProvider):
result = None result = None
if url and False is self._init_api(): if url and False is self._init_api():
data = self.get_url(url, timeout=90) data = self.get_url(url, timeout=90)
if self.should_skip():
return result
if data: if data:
if re.search('(?i)limit.*?reached', data): if re.search('(?i)limit.*?reached', data):
logger.log('Daily Nzb Download limit reached', logger.DEBUG) self.tmr_limit_update('1', 'h', 'Your 24 hour limit of 10 NZBs has been reached')
self.log_failure_url(url)
elif '</nzb>' not in data or 'seem to be logged in' in data: elif '</nzb>' not in data or 'seem to be logged in' in data:
logger.log('Failed nzb data response: %s' % data, logger.DEBUG) logger.log('Failed nzb data response: %s' % data, logger.DEBUG)
else: else:
@ -138,6 +141,9 @@ class OmgwtfnzbsProvider(generic.NZBProvider):
def cache_data(self, needed=neededQualities(need_all=True), **kwargs): def cache_data(self, needed=neededQualities(need_all=True), **kwargs):
if self.should_skip():
return []
api_key = self._init_api() api_key = self._init_api()
if False is api_key: if False is api_key:
return self.search_html(needed=needed, **kwargs) return self.search_html(needed=needed, **kwargs)
@ -153,6 +159,8 @@ class OmgwtfnzbsProvider(generic.NZBProvider):
url = self.urls['cache'] % urllib.urlencode(params) url = self.urls['cache'] % urllib.urlencode(params)
response = self.get_url(url) response = self.get_url(url)
if self.should_skip():
return results
data = feedparser.parse(response.replace('<xml', '<?xml').replace('>\n<info>', '?>\n<feed>\n<info>') data = feedparser.parse(response.replace('<xml', '<?xml').replace('>\n<info>', '?>\n<feed>\n<info>')
.replace('<search_req>\n', '').replace('</search_req>\n', '') .replace('<search_req>\n', '').replace('</search_req>\n', '')
@ -183,6 +191,8 @@ class OmgwtfnzbsProvider(generic.NZBProvider):
search_url = self.urls['search'] % urllib.urlencode(params) search_url = self.urls['search'] % urllib.urlencode(params)
data_json = self.get_url(search_url, json=True) data_json = self.get_url(search_url, json=True)
if self.should_skip():
return results
if data_json and self._check_auth_from_data(data_json, is_xml=False): if data_json and self._check_auth_from_data(data_json, is_xml=False):
for item in data_json: for item in data_json:
if 'release' in item and 'getnzb' in item: if 'release' in item and 'getnzb' in item:
@ -211,6 +221,8 @@ class OmgwtfnzbsProvider(generic.NZBProvider):
mode = ('search', 'cache')['' == search] mode = ('search', 'cache')['' == search]
search_url = self.urls[mode + '_html'] % search search_url = self.urls[mode + '_html'] % search
html = self.get_url(search_url) html = self.get_url(search_url)
if self.should_skip():
return results
cnt = len(results) cnt = len(results)
try: try:
if not html: if not html:
@ -254,6 +266,8 @@ class OmgwtfnzbsProvider(generic.NZBProvider):
search_terms = ['.PROPER.', '.REPACK.', '.REAL.'] search_terms = ['.PROPER.', '.REPACK.', '.REAL.']
results = [] results = []
if self.should_skip():
return results
for term in search_terms: for term in search_terms:
for item in self._search_provider(term, search_mode='Propers', retention=4): for item in self._search_provider(term, search_mode='Propers', retention=4):
@ -272,6 +286,9 @@ class OmgwtfnzbsProvider(generic.NZBProvider):
def _init_api(self): def _init_api(self):
if self.should_skip():
return None
try: try:
api_key = self._check_auth() api_key = self._check_auth()
if not api_key.startswith('cookie:'): if not api_key.startswith('cookie:'):

2
sickbeard/providers/pisexy.py

@ -59,6 +59,8 @@ class PiSexyProvider(generic.TorrentProvider):
search_url = self.urls['search'] % search_string search_url = self.urls['search'] % search_string
html = self.get_url(search_url) html = self.get_url(search_url)
if self.should_skip():
return results
cnt = len(items[mode]) cnt = len(items[mode])
try: try:

5
sickbeard/providers/potuk.py

@ -94,6 +94,8 @@ class PotUKProvider(generic.TorrentProvider):
params.setdefault(name, value) params.setdefault(name, value)
del params['doprefs'] del params['doprefs']
html = self.get_url(search_url, post_data=params) html = self.get_url(search_url, post_data=params)
if self.should_skip():
return results
cnt = len(items[mode]) cnt = len(items[mode])
try: try:
@ -135,6 +137,9 @@ class PotUKProvider(generic.TorrentProvider):
def get_data(self, url): def get_data(self, url):
result = None result = None
html = self.get_url(url, timeout=90) html = self.get_url(url, timeout=90)
if self.should_skip():
return result
try: try:
result = self._link(re.findall('(?i)"(attachment\.php[^"]+?)"', html)[0]) result = self._link(re.findall('(?i)"(attachment\.php[^"]+?)"', html)[0])
except IndexError: except IndexError:

3
sickbeard/providers/pretome.py

@ -16,7 +16,6 @@
# along with SickGear. If not, see <http://www.gnu.org/licenses/>. # along with SickGear. If not, see <http://www.gnu.org/licenses/>.
from . import generic from . import generic
from sickbeard.rssfeeds import RSSFeeds
from lib.unidecode import unidecode from lib.unidecode import unidecode
@ -52,7 +51,7 @@ class PreToMeProvider(generic.TorrentProvider):
search_string = isinstance(search_string, unicode) and unidecode(search_string) or search_string search_string = isinstance(search_string, unicode) and unidecode(search_string) or search_string
search_url = url + (self.urls['search'] % search_string, '')['Cache' == mode] search_url = url + (self.urls['search'] % search_string, '')['Cache' == mode]
xml_data = RSSFeeds(self).get_feed(search_url) xml_data = self.cache.get_rss(search_url)
cnt = len(items[mode]) cnt = len(items[mode])
if xml_data and 'entries' in xml_data: if xml_data and 'entries' in xml_data:

2
sickbeard/providers/privatehd.py

@ -97,6 +97,8 @@ class PrivateHDProvider(generic.TorrentProvider):
'+'.join(search_string.split()), self._categories_string(mode, '')) '+'.join(search_string.split()), self._categories_string(mode, ''))
html = self.get_url(search_url) html = self.get_url(search_url)
if self.should_skip():
return results
cnt = len(items[mode]) cnt = len(items[mode])
try: try:

5
sickbeard/providers/ptf.py

@ -85,11 +85,16 @@ class PTFProvider(generic.TorrentProvider):
search_url = self.urls['search'] % ('+'.join(search_string.split()), self._categories_string(mode)) search_url = self.urls['search'] % ('+'.join(search_string.split()), self._categories_string(mode))
html = self.get_url(search_url) html = self.get_url(search_url)
if self.should_skip():
return results
time.sleep(2) time.sleep(2)
if not self.has_all_cookies(['session_key']): if not self.has_all_cookies(['session_key']):
if not self._authorised(): if not self._authorised():
return results return results
html = self.get_url(search_url) html = self.get_url(search_url)
if self.should_skip():
return results
cnt = len(items[mode]) cnt = len(items[mode])
try: try:

6
sickbeard/providers/rarbg.py

@ -58,8 +58,8 @@ class RarbgProvider(generic.TorrentProvider):
return True return True
for r in range(0, 3): for r in range(0, 3):
response = helpers.getURL(self.urls['api_token'], session=self.session, json=True) response = self.get_url(self.urls['api_token'], json=True)
if response and 'token' in response: if not self.should_skip() and response and 'token' in response:
self.token = response['token'] self.token = response['token']
self.token_expiry = datetime.datetime.now() + datetime.timedelta(minutes=14) self.token_expiry = datetime.datetime.now() + datetime.timedelta(minutes=14)
return True return True
@ -125,6 +125,8 @@ class RarbgProvider(generic.TorrentProvider):
searched_url = search_url % {'r': int(self.confirmed), 't': self.token} searched_url = search_url % {'r': int(self.confirmed), 't': self.token}
data_json = self.get_url(searched_url, json=True) data_json = self.get_url(searched_url, json=True)
if self.should_skip():
return results
self.token_expiry = datetime.datetime.now() + datetime.timedelta(minutes=14) self.token_expiry = datetime.datetime.now() + datetime.timedelta(minutes=14)
self.request_throttle = datetime.datetime.now() + datetime.timedelta(seconds=3) self.request_throttle = datetime.datetime.now() + datetime.timedelta(seconds=3)

2
sickbeard/providers/revtt.py

@ -63,6 +63,8 @@ class RevTTProvider(generic.TorrentProvider):
html = self.get_url(self.urls['search'] % ('+'.join(search_string.split()), html = self.get_url(self.urls['search'] % ('+'.join(search_string.split()),
self._categories_string(mode))) self._categories_string(mode)))
if self.should_skip():
return results
cnt = len(items[mode]) cnt = len(items[mode])
try: try:

8
sickbeard/providers/rsstorrent.py

@ -21,7 +21,6 @@ from . import generic
from sickbeard import logger, tvcache from sickbeard import logger, tvcache
from sickbeard.helpers import tryInt from sickbeard.helpers import tryInt
from sickbeard.exceptions import ex from sickbeard.exceptions import ex
from sickbeard.rssfeeds import RSSFeeds
from lib.bencode import bdecode from lib.bencode import bdecode
@ -41,8 +40,6 @@ class TorrentRssProvider(generic.TorrentProvider):
self.search_mode = search_mode self.search_mode = search_mode
self.search_fallback = bool(tryInt(search_fallback)) self.search_fallback = bool(tryInt(search_fallback))
self.feeder = RSSFeeds(self)
def image_name(self): def image_name(self):
return generic.GenericProvider.image_name(self, 'torrentrss') return generic.GenericProvider.image_name(self, 'torrentrss')
@ -102,6 +99,9 @@ class TorrentRssProvider(generic.TorrentProvider):
break break
else: else:
torrent_file = self.get_url(url) torrent_file = self.get_url(url)
if self.should_skip():
break
try: try:
bdecode(torrent_file) bdecode(torrent_file)
break break
@ -120,7 +120,7 @@ class TorrentRssProvider(generic.TorrentProvider):
result = [] result = []
for mode in search_params.keys(): for mode in search_params.keys():
data = self.feeder.get_feed(self.url) data = self.cache.get_rss(self.url)
result += (data and 'entries' in data) and data.entries or [] result += (data and 'entries' in data) and data.entries or []

2
sickbeard/providers/scenehd.py

@ -61,6 +61,8 @@ class SceneHDProvider(generic.TorrentProvider):
search_url = self.urls['search'] % (search_string, self._categories_string(mode, '%s', ',')) search_url = self.urls['search'] % (search_string, self._categories_string(mode, '%s', ','))
html = self.get_url(search_url, timeout=90) html = self.get_url(search_url, timeout=90)
if self.should_skip():
return results
cnt = len(items[mode]) cnt = len(items[mode])
try: try:

2
sickbeard/providers/scenetime.py

@ -80,6 +80,8 @@ class SceneTimeProvider(generic.TorrentProvider):
self.session.headers.update({'Referer': self.url + 'browse.php', 'X-Requested-With': 'XMLHttpRequest'}) self.session.headers.update({'Referer': self.url + 'browse.php', 'X-Requested-With': 'XMLHttpRequest'})
html = self.get_url(self.urls['browse'], post_data=post_data) html = self.get_url(self.urls['browse'], post_data=post_data)
if self.should_skip():
return results
cnt = len(items[mode]) cnt = len(items[mode])
try: try:

11
sickbeard/providers/shazbat.py

@ -49,8 +49,8 @@ class ShazbatProvider(generic.TorrentProvider):
def _authorised(self, **kwargs): def _authorised(self, **kwargs):
return super(ShazbatProvider, self)._authorised( return super(ShazbatProvider, self)._authorised(
logged_in=(lambda y=None: '<input type="password"' not in helpers.getURL( logged_in=(lambda y=None: '<input type="password"' not in self.get_url(self.urls['feeds'], skip_auth=True)),
self.urls['feeds'], session=self.session)), post_params={'tv_login': self.username, 'form_tmpl': True}) post_params={'tv_login': self.username, 'form_tmpl': True})
def _search_provider(self, search_params, **kwargs): def _search_provider(self, search_params, **kwargs):
@ -70,11 +70,16 @@ class ShazbatProvider(generic.TorrentProvider):
if 'Cache' == mode: if 'Cache' == mode:
search_url = self.urls['browse'] search_url = self.urls['browse']
html = self.get_url(search_url) html = self.get_url(search_url)
if self.should_skip():
return results
else: else:
search_string = isinstance(search_string, unicode) and unidecode(search_string) or search_string search_string = isinstance(search_string, unicode) and unidecode(search_string) or search_string
search_string = search_string.replace(show_detail, '').strip() search_string = search_string.replace(show_detail, '').strip()
search_url = self.urls['search'] % search_string search_url = self.urls['search'] % search_string
html = self.get_url(search_url) html = self.get_url(search_url)
if self.should_skip():
return results
shows = rc['show_id'].findall(html) shows = rc['show_id'].findall(html)
if not any(shows): if not any(shows):
continue continue
@ -85,6 +90,8 @@ class ShazbatProvider(generic.TorrentProvider):
continue continue
html and time.sleep(1.1) html and time.sleep(1.1)
html += self.get_url(self.urls['show'] % sid) html += self.get_url(self.urls['show'] % sid)
if self.should_skip():
return results
cnt = len(items[mode]) cnt = len(items[mode])
try: try:

2
sickbeard/providers/skytorrents.py

@ -56,6 +56,8 @@ class SkytorrentsProvider(generic.TorrentProvider):
search_url = self.urls['search'] % search_string search_url = self.urls['search'] % search_string
html = self.get_url(search_url) html = self.get_url(search_url)
if self.should_skip():
return results
cnt = len(items[mode]) cnt = len(items[mode])
try: try:

2
sickbeard/providers/speedcd.py

@ -67,6 +67,8 @@ class SpeedCDProvider(generic.TorrentProvider):
jxt=2, jxw='b', freeleech=('on', None)[not self.freeleech]) jxt=2, jxw='b', freeleech=('on', None)[not self.freeleech])
data_json = self.get_url(self.urls['search'], post_data=post_data, json=True) data_json = self.get_url(self.urls['search'], post_data=post_data, json=True)
if self.should_skip():
return results
cnt = len(items[mode]) cnt = len(items[mode])
try: try:

4
sickbeard/providers/thepiratebay.py

@ -106,7 +106,7 @@ class ThePirateBayProvider(generic.TorrentProvider):
quality = Quality.UNKNOWN quality = Quality.UNKNOWN
file_name = None file_name = None
data = self.get_url('%sajax_details_filelist.php?id=%s' % (self.url, torrent_id)) data = self.get_url('%sajax_details_filelist.php?id=%s' % (self.url, torrent_id))
if not data: if self.should_skip() or not data:
return None return None
files_list = re.findall('<td.+>(.*?)</td>', data) files_list = re.findall('<td.+>(.*?)</td>', data)
@ -193,6 +193,8 @@ class ThePirateBayProvider(generic.TorrentProvider):
search_url = self.urls['browse'] if 'Cache' == mode \ search_url = self.urls['browse'] if 'Cache' == mode \
else self.urls['search'] % (urllib.quote(search_string)) else self.urls['search'] % (urllib.quote(search_string))
html = self.get_url(search_url) html = self.get_url(search_url)
if self.should_skip():
return results
cnt = len(items[mode]) cnt = len(items[mode])
try: try:

5
sickbeard/providers/tokyotoshokan.py

@ -49,6 +49,9 @@ class TokyoToshokanProvider(generic.TorrentProvider):
'stats': 'S:\s*?(\d)+\s*L:\s*(\d+)', 'size': 'size:\s*(\d+[.,]\d+\w+)'}.iteritems()) 'stats': 'S:\s*?(\d)+\s*L:\s*(\d+)', 'size': 'size:\s*(\d+[.,]\d+\w+)'}.iteritems())
html = self.get_url(search_url) html = self.get_url(search_url)
if self.should_skip():
return self._sort_seeding(mode, results)
if html: if html:
try: try:
with BS4Parser(html, features=['html5lib', 'permissive']) as soup: with BS4Parser(html, features=['html5lib', 'permissive']) as soup:
@ -103,7 +106,7 @@ class TokyoToshokanCache(tvcache.TVCache):
mode = 'Cache' mode = 'Cache'
search_url = '%srss.php?%s' % (self.provider.url, urllib.urlencode({'filter': '1'})) search_url = '%srss.php?%s' % (self.provider.url, urllib.urlencode({'filter': '1'}))
data = self.getRSSFeed(search_url) data = self.get_rss(search_url)
results = [] results = []
if data and 'entries' in data: if data and 'entries' in data:

2
sickbeard/providers/torlock.py

@ -74,6 +74,8 @@ class TorLockProvider(generic.TorrentProvider):
else self.urls['search'] % (urllib.quote_plus(search_string).replace('+', '-')) else self.urls['search'] % (urllib.quote_plus(search_string).replace('+', '-'))
html = self.get_url(search_url) html = self.get_url(search_url)
if self.should_skip():
return results
cnt = len(items[mode]) cnt = len(items[mode])
try: try:

4
sickbeard/providers/torrentbytes.py

@ -36,7 +36,7 @@ class TorrentBytesProvider(generic.TorrentProvider):
self.url_tmpl = {'config_provider_home_uri': '%(home)s', 'login_action': '%(home)s%(vars)s', self.url_tmpl = {'config_provider_home_uri': '%(home)s', 'login_action': '%(home)s%(vars)s',
'search': '%(home)s%(vars)s'} 'search': '%(home)s%(vars)s'}
self.categories = {'Season': [41, 32], 'Episode': [33, 37, 38]} self.categories = {'Season': [41], 'Episode': [32, 33, 37, 38]}
self.categories['Cache'] = self.categories['Season'] + self.categories['Episode'] self.categories['Cache'] = self.categories['Season'] + self.categories['Episode']
self.username, self.password, self.freeleech, self.minseed, self.minleech = 5 * [None] self.username, self.password, self.freeleech, self.minseed, self.minleech = 5 * [None]
@ -61,6 +61,8 @@ class TorrentBytesProvider(generic.TorrentProvider):
search_url = self.urls['search'] % (search_string, self._categories_string(mode)) search_url = self.urls['search'] % (search_string, self._categories_string(mode))
html = self.get_url(search_url, timeout=90) html = self.get_url(search_url, timeout=90)
if self.should_skip():
return results
cnt = len(items[mode]) cnt = len(items[mode])
try: try:

2
sickbeard/providers/torrentday.py

@ -86,6 +86,8 @@ class TorrentDayProvider(generic.TorrentProvider):
search_string, ('&sort=7&type=desc', '')['Cache' == mode]) search_string, ('&sort=7&type=desc', '')['Cache' == mode])
html = self.get_url(search_url) html = self.get_url(search_url)
if self.should_skip():
return results
cnt = len(items[mode]) cnt = len(items[mode])
try: try:

2
sickbeard/providers/torrenting.py

@ -69,6 +69,8 @@ class TorrentingProvider(generic.TorrentProvider):
search_url = self.urls['search'] % (self._categories_string(), search_string) search_url = self.urls['search'] % (self._categories_string(), search_string)
html = self.get_url(search_url) html = self.get_url(search_url)
if self.should_skip():
return results
cnt = len(items[mode]) cnt = len(items[mode])
try: try:

2
sickbeard/providers/torrentleech.py

@ -62,6 +62,8 @@ class TorrentLeechProvider(generic.TorrentProvider):
'query': isinstance(search_string, unicode) and unidecode(search_string) or search_string} 'query': isinstance(search_string, unicode) and unidecode(search_string) or search_string}
html = self.get_url(search_url) html = self.get_url(search_url)
if self.should_skip():
return results
cnt = len(items[mode]) cnt = len(items[mode])
try: try:

2
sickbeard/providers/torrentz2.py

@ -93,6 +93,8 @@ class Torrentz2Provider(generic.TorrentProvider):
'tv%s' % ('+' + quote_plus(search_string), '')['Cache' == mode]) 'tv%s' % ('+' + quote_plus(search_string), '')['Cache' == mode])
html = self.get_url(search_url) html = self.get_url(search_url)
if self.should_skip():
return results
cnt = len(items[mode]) cnt = len(items[mode])
try: try:

2
sickbeard/providers/tvchaosuk.py

@ -66,6 +66,8 @@ class TVChaosUKProvider(generic.TorrentProvider):
'order': 'desc', 'daysprune': '-1'}) 'order': 'desc', 'daysprune': '-1'})
html = self.get_url(self.urls['search'], **kwargs) html = self.get_url(self.urls['search'], **kwargs)
if self.should_skip():
return results
cnt = len(items[mode]) cnt = len(items[mode])
try: try:

2
sickbeard/providers/wop.py

@ -70,6 +70,8 @@ class WOPProvider(generic.TorrentProvider):
search_url = self.urls['search'] % (search_string, self._categories_string(mode, 'cats2[]=%s')) search_url = self.urls['search'] % (search_string, self._categories_string(mode, 'cats2[]=%s'))
html = self.get_url(search_url, timeout=90) html = self.get_url(search_url, timeout=90)
if self.should_skip():
return results
cnt = len(items[mode]) cnt = len(items[mode])
try: try:

2
sickbeard/providers/zooqle.py

@ -58,6 +58,8 @@ class ZooqleProvider(generic.TorrentProvider):
search_url = self.urls['search'] % (search_string, self._categories_string(mode, '', ',')) search_url = self.urls['search'] % (search_string, self._categories_string(mode, '', ','))
html = self.get_url(search_url) html = self.get_url(search_url)
if self.should_skip():
return results
cnt = len(items[mode]) cnt = len(items[mode])
try: try:

64
sickbeard/rssfeeds.py

@ -5,54 +5,32 @@
import feedparser import feedparser
from sickbeard import helpers, logger from sickbeard import logger
from sickbeard.exceptions import ex from sickbeard.exceptions import ex
class RSSFeeds: class RSSFeeds:
def __init__(self, provider=None): def __init__(self, provider=None):
self.provider = provider self.provider = provider
self.response = None
def _check_auth_cookie(self):
if self.provider:
return self.provider.check_auth_cookie()
return True
# noinspection PyUnusedLocal
def cb_response(self, r, *args, **kwargs):
self.response = dict(url=r.url, elapsed=r.elapsed, from_cache=r.from_cache)
return r
def get_feed(self, url, request_headers=None, **kwargs):
if not self._check_auth_cookie():
return
session = None
if self.provider and hasattr(self.provider, 'session'):
session = self.provider.session
response = helpers.getURL(url, headers=request_headers, session=session,
hooks=dict(response=self.cb_response), **kwargs)
if not response:
return
try:
feed = feedparser.parse(response)
feed['rq_response'] = self.response
if feed and 'entries' in feed:
return feed
if feed and 'error' in feed.feed:
err_code = feed.feed['error']['code']
err_desc = feed.feed['error']['description']
logger.log(u'RSS ERROR:[%s] CODE:[%s]' % (err_desc, err_code), logger.DEBUG)
else:
logger.log(u'RSS error loading url: ' + url, logger.DEBUG)
except Exception as e: def get_feed(self, url, **kwargs):
logger.log(u'RSS error: ' + ex(e), logger.DEBUG)
if self.provider and self.provider.check_auth_cookie():
response = self.provider.get_url(url, **kwargs)
if not self.provider.should_skip() and response:
try:
data = feedparser.parse(response)
data['rq_response'] = self.provider.session.response
if data and 'entries' in data:
return data
if data and 'error' in data.feed:
err_code = data.feed['error']['code']
err_desc = data.feed['error']['description']
logger.log(u'RSS error:[%s] code:[%s]' % (err_desc, err_code), logger.DEBUG)
else:
logger.log(u'RSS error loading url: ' + url, logger.DEBUG)
except Exception as e:
logger.log(u'RSS error: ' + ex(e), logger.DEBUG)

33
sickbeard/scheduler.py

@ -27,7 +27,7 @@ from sickbeard.exceptions import ex
class Scheduler(threading.Thread): class Scheduler(threading.Thread):
def __init__(self, action, cycleTime=datetime.timedelta(minutes=10), run_delay=datetime.timedelta(minutes=0), def __init__(self, action, cycleTime=datetime.timedelta(minutes=10), run_delay=datetime.timedelta(minutes=0),
start_time=None, threadName="ScheduledThread", silent=True, prevent_cycle_run=None): start_time=None, threadName="ScheduledThread", silent=True, prevent_cycle_run=None, paused=False):
super(Scheduler, self).__init__() super(Scheduler, self).__init__()
self.lastRun = datetime.datetime.now() + run_delay - cycleTime self.lastRun = datetime.datetime.now() + run_delay - cycleTime
@ -38,10 +38,32 @@ class Scheduler(threading.Thread):
self.name = threadName self.name = threadName
self.silent = silent self.silent = silent
self.stop = threading.Event() self._stop = threading.Event()
self._unpause = threading.Event()
if not paused:
self._unpause.set()
self.lock = threading.Lock() self.lock = threading.Lock()
self.force = False self.force = False
def pause(self):
self._unpause.clear()
def unpause(self):
self._unpause.set()
def stop(self):
self._stop.set()
self.unpause()
def check_paused(self):
if hasattr(self.action, 'check_paused'):
if self.action.check_paused():
self.pause()
self.silent = True
else:
self.unpause()
self.silent = False
def timeLeft(self): def timeLeft(self):
return self.cycleTime - (datetime.datetime.now() - self.lastRun) return self.cycleTime - (datetime.datetime.now() - self.lastRun)
@ -52,8 +74,10 @@ class Scheduler(threading.Thread):
return False return False
def run(self): def run(self):
self.check_paused()
while not self.stop.is_set(): # if self._unpause Event() is NOT set the loop pauses
while self._unpause.wait() and not self._stop.is_set():
try: try:
current_time = datetime.datetime.now() current_time = datetime.datetime.now()
@ -100,4 +124,5 @@ class Scheduler(threading.Thread):
time.sleep(1) time.sleep(1)
# exiting thread # exiting thread
self.stop.clear() self._stop.clear()
self._unpause.clear()

9
sickbeard/search.py

@ -143,7 +143,7 @@ def snatch_episode(result, end_status=SNATCHED):
# make sure we have the torrent file content # make sure we have the torrent file content
if not result.content and not result.url.startswith('magnet'): if not result.content and not result.url.startswith('magnet'):
result.content = result.provider.get_url(result.url) result.content = result.provider.get_url(result.url)
if not result.content: if result.provider.should_skip() or not result.content:
logger.log(u'Torrent content failed to download from %s' % result.url, logger.ERROR) logger.log(u'Torrent content failed to download from %s' % result.url, logger.ERROR)
return False return False
# Snatches torrent with client # Snatches torrent with client
@ -465,11 +465,18 @@ def search_for_needed_episodes(episodes):
best_result.content = None best_result.content = None
if not best_result.url.startswith('magnet'): if not best_result.url.startswith('magnet'):
best_result.content = best_result.provider.get_url(best_result.url) best_result.content = best_result.provider.get_url(best_result.url)
if best_result.provider.should_skip():
break
if not best_result.content: if not best_result.content:
continue continue
found_results[cur_ep] = best_result found_results[cur_ep] = best_result
try:
cur_provider.save_list()
except (StandardError, Exception):
pass
threading.currentThread().name = orig_thread_name threading.currentThread().name = orig_thread_name
if not len(providers): if not len(providers):

8
sickbeard/search_propers.py

@ -26,8 +26,12 @@ class ProperSearcher:
def __init__(self): def __init__(self):
self.lock = threading.Lock() self.lock = threading.Lock()
self.amActive = False self.amActive = False
self.search_intervals = [('daily', '24 hours', 24 * 60), ('4h', '4 hours', 4 * 60),
('90m', '90 mins', 90), ('45m', '45 mins', 45), ('15m', '15 mins', 15)] @staticmethod
def check_paused():
if sickbeard.DOWNLOAD_PROPERS:
return False
return True
def run(self): def run(self):

72
sickbeard/search_queue.py

@ -21,11 +21,14 @@ from __future__ import with_statement
import traceback import traceback
import threading import threading
import datetime import datetime
import re
import sickbeard import sickbeard
from sickbeard import db, logger, common, exceptions, helpers, network_timezones, generic_queue, search, \ from sickbeard import db, logger, common, exceptions, helpers, network_timezones, generic_queue, search, \
failed_history, history, ui, properFinder failed_history, history, ui, properFinder
from sickbeard.search import wanted_episodes, get_aired_in_season, set_wanted_aired from sickbeard.search import wanted_episodes, get_aired_in_season, set_wanted_aired
from sickbeard.classes import Proper
from sickbeard.indexers.indexer_config import INDEXER_TVDB
search_queue_lock = threading.Lock() search_queue_lock = threading.Lock()
@ -109,7 +112,11 @@ class SearchQueue(generic_queue.GenericQueue):
return self._is_in_progress(RecentSearchQueueItem) return self._is_in_progress(RecentSearchQueueItem)
def is_propersearch_in_progress(self): def is_propersearch_in_progress(self):
return self._is_in_progress(ProperSearchQueueItem) with self.lock:
for cur_item in self.queue + [self.currentItem]:
if isinstance(cur_item, ProperSearchQueueItem) and None is cur_item.propers:
return True
return False
def is_standard_backlog_in_progress(self): def is_standard_backlog_in_progress(self):
with self.lock: with self.lock:
@ -141,25 +148,25 @@ class SearchQueue(generic_queue.GenericQueue):
return message return message
def queue_length(self): def queue_length(self):
length = {'backlog': [], 'recent': 0, 'manual': [], 'failed': [], 'proper': 0} length = {'backlog': [], 'recent': 0, 'manual': [], 'failed': [], 'proper': []}
with self.lock: with self.lock:
for cur_item in [self.currentItem] + self.queue: for cur_item in [self.currentItem] + self.queue:
if isinstance(cur_item, RecentSearchQueueItem): if isinstance(cur_item, RecentSearchQueueItem):
length['recent'] += 1 length['recent'] += 1
elif isinstance(cur_item, BacklogQueueItem): elif isinstance(cur_item, BacklogQueueItem):
length['backlog'].append({'indexerid': cur_item.show.indexerid, 'indexer': cur_item.show.indexer, length['backlog'] += [dict(indexerid=cur_item.show.indexerid, indexer=cur_item.show.indexer,
'name': cur_item.show.name, 'segment': cur_item.segment, name=cur_item.show.name, segment=cur_item.segment,
'standard_backlog': cur_item.standard_backlog, standard_backlog=cur_item.standard_backlog,
'limited_backlog': cur_item.limited_backlog, 'forced': cur_item.forced, limited_backlog=cur_item.limited_backlog, forced=cur_item.forced,
'torrent_only': cur_item.torrent_only}) torrent_only=cur_item.torrent_only)]
elif isinstance(cur_item, ProperSearchQueueItem): elif isinstance(cur_item, ProperSearchQueueItem):
length['proper'] += 1 length['proper'] += [dict(recent=None is not cur_item.propers)]
elif isinstance(cur_item, ManualSearchQueueItem): elif isinstance(cur_item, ManualSearchQueueItem):
length['manual'].append({'indexerid': cur_item.show.indexerid, 'indexer': cur_item.show.indexer, length['manual'] += [dict(indexerid=cur_item.show.indexerid, indexer=cur_item.show.indexer,
'name': cur_item.show.name, 'segment': cur_item.segment}) name=cur_item.show.name, segment=cur_item.segment)]
elif isinstance(cur_item, FailedQueueItem): elif isinstance(cur_item, FailedQueueItem):
length['failed'].append({'indexerid': cur_item.show.indexerid, 'indexer': cur_item.show.indexer, length['failed'] += [dict(indexerid=cur_item.show.indexerid, indexer=cur_item.show.indexer,
'name': cur_item.show.name, 'segment': cur_item.segment}) name=cur_item.show.name, segment=cur_item.segment)]
return length return length
def add_item(self, item): def add_item(self, item):
@ -210,7 +217,11 @@ class RecentSearchQueueItem(generic_queue.QueueItem):
self.episodes.extend(wanted_eps) self.episodes.extend(wanted_eps)
if sickbeard.DOWNLOAD_PROPERS:
properFinder.get_needed_qualites(needed)
self.update_providers(needed=needed) self.update_providers(needed=needed)
self._check_for_propers(needed)
if not self.episodes: if not self.episodes:
logger.log(u'No search of cache for episodes required') logger.log(u'No search of cache for episodes required')
@ -245,6 +256,33 @@ class RecentSearchQueueItem(generic_queue.QueueItem):
self.finish() self.finish()
@staticmethod @staticmethod
def _check_for_propers(needed):
if not sickbeard.DOWNLOAD_PROPERS:
return
propers = {}
my_db = db.DBConnection('cache.db')
sql_results = my_db.select('SELECT * FROM provider_cache')
re_p = (r'\brepack|proper|real\b', r'\brepack|proper|real|v[1-5]\b')[needed.need_anime]
proper_regex = re.compile(re_p, flags=re.I)
for s in sql_results:
if proper_regex.search(s['name']):
try:
show = helpers.find_show_by_id(sickbeard.showList, {INDEXER_TVDB: int(s['indexerid'])})
except (StandardError, Exception):
continue
if show:
propers.setdefault(s['provider'], []).append(
Proper(s['name'], s['url'], datetime.datetime.fromtimestamp(s['time']), show, parsed_show=show))
if propers:
logger.log('Found Proper/Repack/Real in recent search, sending data to properfinder')
propersearch_queue_item = sickbeard.search_queue.ProperSearchQueueItem(propers=propers)
sickbeard.searchQueueScheduler.action.add_item(propersearch_queue_item)
@staticmethod
def _change_missing_episodes(): def _change_missing_episodes():
if not network_timezones.network_dict: if not network_timezones.network_dict:
network_timezones.update_network_dict() network_timezones.update_network_dict()
@ -326,7 +364,8 @@ class RecentSearchQueueItem(generic_queue.QueueItem):
threads[-1].start() threads[-1].start()
if not len(providers): if not len(providers):
logger.log('No NZB/Torrent providers in Media Providers/Options are enabled to match recent episodes', logger.WARNING) logger.log('No NZB/Torrent providers in Media Providers/Options are enabled to match recent episodes',
logger.WARNING)
if threads: if threads:
# wait for all threads to finish # wait for all threads to finish
@ -337,16 +376,17 @@ class RecentSearchQueueItem(generic_queue.QueueItem):
class ProperSearchQueueItem(generic_queue.QueueItem): class ProperSearchQueueItem(generic_queue.QueueItem):
def __init__(self): def __init__(self, propers=None):
generic_queue.QueueItem.__init__(self, 'Proper Search', PROPER_SEARCH) generic_queue.QueueItem.__init__(self, 'Proper Search', PROPER_SEARCH)
self.priority = generic_queue.QueuePriorities.HIGH self.priority = (generic_queue.QueuePriorities.VERYHIGH, generic_queue.QueuePriorities.HIGH)[None is propers]
self.propers = propers
self.success = None self.success = None
def run(self): def run(self):
generic_queue.QueueItem.run(self) generic_queue.QueueItem.run(self)
try: try:
properFinder.search_propers() properFinder.search_propers(self.propers)
finally: finally:
self.finish() self.finish()

8
sickbeard/show_updater.py

@ -20,7 +20,7 @@ import datetime
import traceback import traceback
import sickbeard import sickbeard
from sickbeard import logger, exceptions, ui, db, network_timezones, failed_history from sickbeard import logger, exceptions, ui, db, network_timezones, failed_history, properFinder
from sickbeard.exceptions import ex from sickbeard.exceptions import ex
@ -43,6 +43,12 @@ class ShowUpdater:
logger.log('network timezone update error', logger.ERROR) logger.log('network timezone update error', logger.ERROR)
logger.log(traceback.format_exc(), logger.ERROR) logger.log(traceback.format_exc(), logger.ERROR)
# refresh webdl types
try:
properFinder.load_webdl_types()
except (StandardError, Exception):
logger.log('error loading webdl_types', logger.DEBUG)
# update xem id lists # update xem id lists
try: try:
sickbeard.scene_exceptions.get_xem_ids() sickbeard.scene_exceptions.get_xem_ids()

6
sickbeard/subtitles.py

@ -86,6 +86,12 @@ class SubtitlesFinder():
The SubtitlesFinder will be executed every hour but will not necessarly search The SubtitlesFinder will be executed every hour but will not necessarly search
and download subtitles. Only if the defined rule is true and download subtitles. Only if the defined rule is true
""" """
@staticmethod
def check_paused():
if sickbeard.USE_SUBTITLES:
return False
return True
def run(self, force=False): def run(self, force=False):
if len(sickbeard.subtitles.getEnabledServiceList()) < 1: if len(sickbeard.subtitles.getEnabledServiceList()) < 1:
logger.log(u'Not enough services selected. At least 1 service is required to search subtitles in the background', logger.ERROR) logger.log(u'Not enough services selected. At least 1 service is required to search subtitles in the background', logger.ERROR)

2
sickbeard/tvcache.py

@ -107,7 +107,7 @@ class TVCache:
return [] return []
def getRSSFeed(self, url, **kwargs): def get_rss(self, url, **kwargs):
return RSSFeeds(self.provider).get_feed(url, **kwargs) return RSSFeeds(self.provider).get_feed(url, **kwargs)
def _translateTitle(self, title): def _translateTitle(self, title):

92
sickbeard/webserve.py

@ -602,6 +602,40 @@ class MainHandler(WebHandler):
sickbeard.save_config() sickbeard.save_config()
@staticmethod
def getFooterTime(change_layout=True, json_dump=True, *args, **kwargs):
now = datetime.datetime.now()
events = [
('recent', sickbeard.recentSearchScheduler.timeLeft),
('backlog', sickbeard.backlogSearchScheduler.next_backlog_timeleft),
]
if sickbeard.DOWNLOAD_PROPERS:
events += [('propers', sickbeard.properFinder.next_proper_timeleft)]
if change_layout not in (False, 0, '0', '', None):
sickbeard.FOOTER_TIME_LAYOUT += 1
if sickbeard.FOOTER_TIME_LAYOUT == 2: # 2 layouts = time + delta
sickbeard.FOOTER_TIME_LAYOUT = 0
sickbeard.save_config()
next_event = []
for k, v in events:
try:
t = v()
except AttributeError:
t = None
if 0 == sickbeard.FOOTER_TIME_LAYOUT:
next_event += [{k + '_time': t and sbdatetime.sbdatetime.sbftime(now + t, markup=True) or 'soon'}]
else:
next_event += [{k + '_timeleft': t and str(t).split('.')[0] or 'soon'}]
if json_dump not in (False, 0, '0', '', None):
next_event = json.dumps(next_event)
return next_event
def toggleDisplayShowSpecials(self, show): def toggleDisplayShowSpecials(self, show):
sickbeard.DISPLAY_SHOW_SPECIALS = not sickbeard.DISPLAY_SHOW_SPECIALS sickbeard.DISPLAY_SHOW_SPECIALS = not sickbeard.DISPLAY_SHOW_SPECIALS
@ -1446,7 +1480,7 @@ class Home(MainHandler):
indexerid = int(showObj.indexerid) indexerid = int(showObj.indexerid)
indexer = int(showObj.indexer) indexer = int(showObj.indexer)
t.min_initial = Quality.qualityStrings[min(Quality.splitQuality(showObj.quality)[0])] t.min_initial = Quality.get_quality_ui(min(Quality.splitQuality(showObj.quality)[0]))
t.all_scene_exceptions = showObj.exceptions t.all_scene_exceptions = showObj.exceptions
t.scene_numbering = get_scene_numbering_for_show(indexerid, indexer) t.scene_numbering = get_scene_numbering_for_show(indexerid, indexer)
t.scene_absolute_numbering = get_scene_absolute_numbering_for_show(indexerid, indexer) t.scene_absolute_numbering = get_scene_absolute_numbering_for_show(indexerid, indexer)
@ -4531,11 +4565,27 @@ class ManageSearches(Manage):
t.recent_search_status = sickbeard.searchQueueScheduler.action.is_recentsearch_in_progress() t.recent_search_status = sickbeard.searchQueueScheduler.action.is_recentsearch_in_progress()
t.find_propers_status = sickbeard.searchQueueScheduler.action.is_propersearch_in_progress() t.find_propers_status = sickbeard.searchQueueScheduler.action.is_propersearch_in_progress()
t.queue_length = sickbeard.searchQueueScheduler.action.queue_length() t.queue_length = sickbeard.searchQueueScheduler.action.queue_length()
t.provider_fail_stats = filter(lambda stat: len(stat['fails']), [{
'active': p.is_active(), 'name': p.name, 'prov_id': p.get_id(), 'prov_img': p.image_name(),
'fails': p.fails.fails_sorted, 'tmr_limit_time': p.tmr_limit_time,
'next_try': p.get_next_try_time, 'has_limit': getattr(p, 'has_limit', False)}
for p in sickbeard.providerList + sickbeard.newznabProviderList])
t.provider_fails = 0 < len([p for p in t.provider_fail_stats if len(p['fails'])])
t.submenu = self.ManageMenu('Search') t.submenu = self.ManageMenu('Search')
return t.respond() return t.respond()
def retryProvider(self, provider=None, *args, **kwargs):
if not provider:
return
prov = [p for p in sickbeard.providerList + sickbeard.newznabProviderList if p.get_id() == provider]
if not prov:
return
prov[0].retry_next()
time.sleep(3)
return
def forceVersionCheck(self, *args, **kwargs): def forceVersionCheck(self, *args, **kwargs):
# force a check to see if there is a new version # force a check to see if there is a new version
if sickbeard.versionCheckScheduler.action.check_for_new_version(force=True): if sickbeard.versionCheckScheduler.action.check_for_new_version(force=True):
@ -4826,7 +4876,7 @@ class ConfigGeneral(Config):
trash_remove_show=None, trash_rotate_logs=None, update_frequency=None, launch_browser=None, web_username=None, trash_remove_show=None, trash_rotate_logs=None, update_frequency=None, launch_browser=None, web_username=None,
use_api=None, api_key=None, indexer_default=None, timezone_display=None, cpu_preset=None, file_logging_preset=None, use_api=None, api_key=None, indexer_default=None, timezone_display=None, cpu_preset=None, file_logging_preset=None,
web_password=None, version_notify=None, enable_https=None, https_cert=None, https_key=None, web_password=None, version_notify=None, enable_https=None, https_cert=None, https_key=None,
handle_reverse_proxy=None, send_security_headers=None, home_search_focus=None, sort_article=None, auto_update=None, notify_on_update=None, handle_reverse_proxy=None, send_security_headers=None, home_search_focus=None, display_freespace=None, sort_article=None, auto_update=None, notify_on_update=None,
proxy_setting=None, proxy_indexers=None, anon_redirect=None, git_path=None, git_remote=None, calendar_unprotected=None, proxy_setting=None, proxy_indexers=None, anon_redirect=None, git_path=None, git_remote=None, calendar_unprotected=None,
fuzzy_dating=None, trim_zero=None, date_preset=None, date_preset_na=None, time_preset=None, fuzzy_dating=None, trim_zero=None, date_preset=None, date_preset_na=None, time_preset=None,
indexer_timeout=None, rootDir=None, theme_name=None, default_home=None, use_imdb_info=None, indexer_timeout=None, rootDir=None, theme_name=None, default_home=None, use_imdb_info=None,
@ -4882,6 +4932,7 @@ class ConfigGeneral(Config):
sickbeard.HOME_SEARCH_FOCUS = config.checkbox_to_value(home_search_focus) sickbeard.HOME_SEARCH_FOCUS = config.checkbox_to_value(home_search_focus)
sickbeard.USE_IMDB_INFO = config.checkbox_to_value(use_imdb_info) sickbeard.USE_IMDB_INFO = config.checkbox_to_value(use_imdb_info)
sickbeard.DISPLAY_FREESPACE = config.checkbox_to_value(display_freespace)
sickbeard.SORT_ARTICLE = config.checkbox_to_value(sort_article) sickbeard.SORT_ARTICLE = config.checkbox_to_value(sort_article)
sickbeard.FUZZY_DATING = config.checkbox_to_value(fuzzy_dating) sickbeard.FUZZY_DATING = config.checkbox_to_value(fuzzy_dating)
sickbeard.TRIM_ZERO = config.checkbox_to_value(trim_zero) sickbeard.TRIM_ZERO = config.checkbox_to_value(trim_zero)
@ -4982,7 +5033,6 @@ class ConfigSearch(Config):
for show in sickbeard.showList if show.rls_require_words and for show in sickbeard.showList if show.rls_require_words and
show.rls_require_words.strip()] show.rls_require_words.strip()]
t.using_rls_require_words.sort(lambda x, y: cmp(x[1], y[1]), reverse=False) t.using_rls_require_words.sort(lambda x, y: cmp(x[1], y[1]), reverse=False)
t.propers_intervals = search_propers.ProperSearcher().search_intervals
t.using_regex = False t.using_regex = False
try: try:
from sickbeard.name_parser.parser import regex from sickbeard.name_parser.parser import regex
@ -4996,7 +5046,7 @@ class ConfigSearch(Config):
nzbget_category=None, nzbget_priority=None, nzbget_host=None, nzbget_use_https=None, nzbget_category=None, nzbget_priority=None, nzbget_host=None, nzbget_use_https=None,
backlog_days=None, backlog_frequency=None, search_unaired=None, unaired_recent_search_only=None, backlog_days=None, backlog_frequency=None, search_unaired=None, unaired_recent_search_only=None,
recentsearch_frequency=None, nzb_method=None, torrent_method=None, usenet_retention=None, recentsearch_frequency=None, nzb_method=None, torrent_method=None, usenet_retention=None,
download_propers=None, propers_webdl_onegrp=None, check_propers_interval=None, download_propers=None, propers_webdl_onegrp=None,
allow_high_priority=None, allow_high_priority=None,
torrent_dir=None, torrent_username=None, torrent_password=None, torrent_host=None, torrent_dir=None, torrent_username=None, torrent_password=None, torrent_host=None,
torrent_label=None, torrent_path=None, torrent_verify_cert=None, torrent_label=None, torrent_path=None, torrent_verify_cert=None,
@ -5033,26 +5083,8 @@ class ConfigSearch(Config):
sickbeard.IGNORE_WORDS = ignore_words if ignore_words else '' sickbeard.IGNORE_WORDS = ignore_words if ignore_words else ''
sickbeard.REQUIRE_WORDS = require_words if require_words else '' sickbeard.REQUIRE_WORDS = require_words if require_words else ''
sickbeard.DOWNLOAD_PROPERS = config.checkbox_to_value(download_propers) config.change_DOWNLOAD_PROPERS(config.checkbox_to_value(download_propers))
sickbeard.PROPERS_WEBDL_ONEGRP = config.checkbox_to_value(propers_webdl_onegrp) sickbeard.PROPERS_WEBDL_ONEGRP = config.checkbox_to_value(propers_webdl_onegrp)
if sickbeard.CHECK_PROPERS_INTERVAL != check_propers_interval:
sickbeard.CHECK_PROPERS_INTERVAL = check_propers_interval
if sickbeard.DOWNLOAD_PROPERS:
proper_sch = sickbeard.properFinderScheduler
item = [(k, n, v) for (k, n, v) in proper_sch.action.search_intervals if k == check_propers_interval]
if item and None is proper_sch.start_time:
interval = datetime.timedelta(minutes=item[0][2])
run_in = proper_sch.lastRun + interval - datetime.datetime.now()
proper_sch.cycleTime = interval
run_at = 'imminent'
if datetime.timedelta() < run_in:
hours, remainder = divmod(run_in.seconds, 3600)
minutes, seconds = divmod(remainder, 60)
run_at = u'in approx. ' + ('%dh, %dm' % (hours, minutes) if 0 < hours else
'%dm, %ds' % (minutes, seconds))
logger.log(u'Change search PROPERS interval, next check %s' % run_at)
sickbeard.SEARCH_UNAIRED = bool(config.checkbox_to_value(search_unaired)) sickbeard.SEARCH_UNAIRED = bool(config.checkbox_to_value(search_unaired))
sickbeard.UNAIRED_RECENT_SEARCH_ONLY = bool(config.checkbox_to_value(unaired_recent_search_only, value_off=1, value_on=0)) sickbeard.UNAIRED_RECENT_SEARCH_ONLY = bool(config.checkbox_to_value(unaired_recent_search_only, value_off=1, value_on=0))
@ -5126,21 +5158,11 @@ class ConfigPostProcessing(Config):
results += ['Unable to create directory ' + os.path.normpath(tv_download_dir) + ', dir not changed.'] results += ['Unable to create directory ' + os.path.normpath(tv_download_dir) + ', dir not changed.']
new_val = config.checkbox_to_value(process_automatically) new_val = config.checkbox_to_value(process_automatically)
if new_val != sickbeard.PROCESS_AUTOMATICALLY: sickbeard.PROCESS_AUTOMATICALLY = new_val
if not sickbeard.PROCESS_AUTOMATICALLY and not sickbeard.autoPostProcesserScheduler.ident: sickbeard.autoPostProcesserScheduler.check_paused()
try:
sickbeard.autoPostProcesserScheduler.start()
except:
pass
sickbeard.PROCESS_AUTOMATICALLY = new_val
config.change_AUTOPOSTPROCESSER_FREQUENCY(autopostprocesser_frequency) config.change_AUTOPOSTPROCESSER_FREQUENCY(autopostprocesser_frequency)
if sickbeard.PROCESS_AUTOMATICALLY:
sickbeard.autoPostProcesserScheduler.silent = False
else:
sickbeard.autoPostProcesserScheduler.silent = True
if unpack: if unpack:
if self.isRarSupported() != 'not supported': if self.isRarSupported() != 'not supported':
sickbeard.UNPACK = config.checkbox_to_value(unpack) sickbeard.UNPACK = config.checkbox_to_value(unpack)

8
tests/db_tests.py

@ -20,6 +20,7 @@
from __future__ import print_function from __future__ import print_function
import unittest import unittest
import test_lib as test import test_lib as test
from sickbeard import cache_db, mainDB, failed_db
class DBBasicTests(test.SickbeardTestDBCase): class DBBasicTests(test.SickbeardTestDBCase):
@ -28,9 +29,16 @@ class DBBasicTests(test.SickbeardTestDBCase):
super(DBBasicTests, self).setUp() super(DBBasicTests, self).setUp()
self.db = test.db.DBConnection() self.db = test.db.DBConnection()
def is_testdb(self, version):
if isinstance(version, (int, long)):
return 100000 <= version
def test_select(self): def test_select(self):
self.db.select('SELECT * FROM tv_episodes WHERE showid = ? AND location != ""', [0000]) self.db.select('SELECT * FROM tv_episodes WHERE showid = ? AND location != ""', [0000])
self.db.close() self.db.close()
self.assertEqual(cache_db.TEST_BASE_VERSION is not None, self.is_testdb(cache_db.MAX_DB_VERSION))
self.assertEqual(mainDB.TEST_BASE_VERSION is not None, self.is_testdb(mainDB.MAX_DB_VERSION))
self.assertEqual(failed_db.TEST_BASE_VERSION is not None, self.is_testdb(failed_db.MAX_DB_VERSION))
if __name__ == '__main__': if __name__ == '__main__':
print('==================') print('==================')

Some files were not shown because too many files changed in this diff

Loading…
Cancel
Save