Browse Source

Merge branch 'develop' into py3

pull/1219/head
Safihre 7 years ago
parent
commit
579e07adc3
  1. 4
      .gitignore
  2. 2
      ABOUT.txt
  3. 2
      INSTALL.txt
  4. 4
      ISSUES.txt
  5. 1
      README.md
  6. 33
      README.mkd
  7. 1
      SABHelper.py
  8. 47
      SABnzbd.py
  9. 2
      interfaces/Config/templates/config_notify.tmpl
  10. 24
      interfaces/Config/templates/config_rss.tmpl
  11. 27
      interfaces/Config/templates/staticcfg/css/style.css
  12. 2
      interfaces/Glitter/templates/static/stylesheets/colorschemes/Night.css
  13. 24
      interfaces/Glitter/templates/static/stylesheets/glitter.css
  14. 14
      interfaces/wizard/static/style.css
  15. 4
      interfaces/wizard/two.html
  16. BIN
      osx/unrar/unrar
  17. 340
      po/main/SABnzbd.pot
  18. 344
      po/main/da.po
  19. 360
      po/main/de.po
  20. 344
      po/main/es.po
  21. 344
      po/main/fi.po
  22. 346
      po/main/fr.po
  23. 448
      po/main/he.po
  24. 344
      po/main/nb.po
  25. 344
      po/main/nl.po
  26. 344
      po/main/pl.po
  27. 344
      po/main/pt_BR.po
  28. 344
      po/main/ro.po
  29. 348
      po/main/ru.po
  30. 344
      po/main/sr.po
  31. 344
      po/main/sv.po
  32. 344
      po/main/zh_CN.po
  33. 21
      sabnzbd/__init__.py
  34. 77
      sabnzbd/api.py
  35. 15
      sabnzbd/assembler.py
  36. 4
      sabnzbd/config.py
  37. 2
      sabnzbd/constants.py
  38. 11
      sabnzbd/database.py
  39. 6
      sabnzbd/decoder.py
  40. 12
      sabnzbd/directunpacker.py
  41. 4
      sabnzbd/dirscanner.py
  42. 84
      sabnzbd/downloader.py
  43. 55
      sabnzbd/interface.py
  44. 11
      sabnzbd/misc.py
  45. 109
      sabnzbd/newsunpack.py
  46. 14
      sabnzbd/newswrapper.py
  47. 5
      sabnzbd/notifier.py
  48. 25
      sabnzbd/nzbqueue.py
  49. 55
      sabnzbd/nzbstuff.py
  50. 8
      sabnzbd/osxmenu.py
  51. 16
      sabnzbd/par2file.py
  52. 4
      sabnzbd/postproc.py
  53. 44
      sabnzbd/rss.py
  54. 8
      sabnzbd/sabtray.py
  55. 1
      sabnzbd/sabtraylinux.py
  56. 8
      sabnzbd/skintext.py
  57. 2
      sabnzbd/sorting.py
  58. 4
      sabnzbd/urlgrabber.py
  59. 5
      sabnzbd/utils/certgen.py
  60. 2
      sabnzbd/utils/checkdir.py
  61. 9
      sabnzbd/utils/diskspeed.py
  62. 13
      sabnzbd/utils/getperformance.py
  63. 6
      sabnzbd/utils/happyeyeballs.py
  64. 1
      sabnzbd/utils/kronos.py
  65. 2
      sabnzbd/utils/pystone.py
  66. 4
      sabnzbd/utils/servertests.py
  67. 1
      sabnzbd/utils/upload.py
  68. 6
      sabnzbd/zconfig.py
  69. 125
      scripts/Deobfuscate.py
  70. 71
      tests/conftest.py
  71. 4
      tests/requirements.txt
  72. 7
      tests/sabnzbd.basic.ini
  73. 61
      tests/test_api_pages.py
  74. 295
      tests/test_functional.py
  75. 58
      tests/test_nzb.py
  76. 63
      tests/testhelper.py
  77. 11
      tools/extract_pot.py
  78. 38
      util/apireg.py
  79. 1
      util/mailslot.py
  80. BIN
      win/par2/multipar/par2j.exe
  81. BIN
      win/par2/multipar/par2j64.exe
  82. BIN
      win/unrar/UnRAR.exe
  83. BIN
      win/unrar/x64/UnRAR.exe

4
.gitignore

@ -16,12 +16,14 @@ SABnzbd*.exe
SABnzbd*.gz SABnzbd*.gz
SABnzbd*.dmg SABnzbd*.dmg
# WingIDE project files # WingIDE/PyCharm project files
*.wp[ru] *.wp[ru]
.idea
# Testing folders # Testing folders
.cache .cache
.xprocess .xprocess
tests/cache
# General junk # General junk
*.keep *.keep

2
ABOUT.txt

@ -1,5 +1,5 @@
******************************************* *******************************************
*** This is SABnzbd 2.3.4 *** *** This is SABnzbd 2.3.5 ***
******************************************* *******************************************
SABnzbd is an open-source cross-platform binary newsreader. SABnzbd is an open-source cross-platform binary newsreader.
It simplifies the process of downloading from Usenet dramatically, It simplifies the process of downloading from Usenet dramatically,

2
INSTALL.txt

@ -1,4 +1,4 @@
SABnzbd 2.3.4 SABnzbd 2.3.5
------------------------------------------------------------------------------- -------------------------------------------------------------------------------
0) LICENSE 0) LICENSE

4
ISSUES.txt

@ -66,3 +66,7 @@
Config->Special->wait_for_dfolder to 1. Config->Special->wait_for_dfolder to 1.
SABnzbd will appear to hang until the drive is mounted. SABnzbd will appear to hang until the drive is mounted.
- If you experience speed-drops to KB/s when using a VPN, try setting the number of connections
to your servers to a total of 7. There is a CPU-usage reduction feature in SABnzbd that
gets confused by the way some VPN's handle the state of a connection. Below 8 connections
this feature is not active.

1
README.md

@ -21,7 +21,6 @@ Optional:
- `python-cryptography` (enables certificate generation and detection of encrypted RAR-files during download) - `python-cryptography` (enables certificate generation and detection of encrypted RAR-files during download)
- `python-dbus` (enable option to Shutdown/Restart/Standby PC on queue finish) - `python-dbus` (enable option to Shutdown/Restart/Standby PC on queue finish)
- `7zip` - `7zip`
- `unzip`
Your package manager should supply these. If not, we've got links in our more in-depth [installation guide](https://github.com/sabnzbd/sabnzbd/blob/master/INSTALL.txt). Your package manager should supply these. If not, we've got links in our more in-depth [installation guide](https://github.com/sabnzbd/sabnzbd/blob/master/INSTALL.txt).

33
README.mkd

@ -1,18 +1,25 @@
Release Notes - SABnzbd 2.3.4 Release Notes - SABnzbd 2.3.5
========================================================= =========================================================
## Changes since 2.3.3 ## Bug fixes since 2.3.4
- Device hostname in hostname-verification always lowercased - Reworked Deobfuscate.py script for much faster renaming
- Hostnames ending in ".local" are always accepted - All scripts can now receive input through environment variables
- URLGrabber would not always detect correct filename - Unable to set only one Indexer Category per category
- URLGrabber would ignore some successful downloads - Could falsely report not enough blocks are available for repair
- Always send NNTP QUIT after server-test - Failures in un-(7)zip or file-joining would not fail the job
- Added option "--disable-file-log" to disable file-based logging - Direct Unpack could abort unnecessarily
- Added CORS-header to API - Rare crash during file assembly
- Windows: Service compatibility with Windows 10 April update - Server hostname is now used in warnings and logs
- Windows: Update Python to 2.7.15 - Improved disk performance measurement
- Windows: Update 7zip to 18.05 - Overall improvements in stability and reliability
- macOS: Restore compatibility with El Capitan (10.11) - Windows: MultiPar repair of joinable files could fail
- Windows: Tray icon also shows remaining size when paused
- Windows: Wizard would not default to installer language
- Windows: Update MultiPar to 1.3.0.1
- Windows and macOS: Update UnRar to 5.60
Looking for help with SABnzbd development:
https://www.reddit.com/r/usenet/918nxv/
## Upgrading from 2.2.x and older ## Upgrading from 2.2.x and older
- Finish queue - Finish queue

1
SABHelper.py

@ -20,7 +20,6 @@ if sys.version_info[:2] < (2, 6) or sys.version_info[:2] >= (3, 0):
print("Sorry, requires Python 2.6 or 2.7.") print("Sorry, requires Python 2.6 or 2.7.")
sys.exit(1) sys.exit(1)
import os
import time import time
import subprocess import subprocess

47
SABnzbd.py

@ -476,7 +476,7 @@ def all_localhosts():
def check_resolve(host): def check_resolve(host):
""" Return True if 'host' resolves """ """ Return True if 'host' resolves """
try: try:
dummy = socket.getaddrinfo(host, None) socket.getaddrinfo(host, None)
except: except:
# Does not resolve # Does not resolve
return False return False
@ -572,7 +572,7 @@ def get_webhost(cherryhost, cherryport, https_port):
cherryhost = cherryhost.strip('[]') cherryhost = cherryhost.strip('[]')
else: else:
try: try:
info = socket.getaddrinfo(cherryhost, None) socket.getaddrinfo(cherryhost, None)
except: except:
cherryhost = cherryhost.strip('[]') cherryhost = cherryhost.strip('[]')
@ -633,12 +633,12 @@ def attach_server(host, port, cert=None, key=None, chain=None):
def is_sabnzbd_running(url): def is_sabnzbd_running(url):
""" Return True when there's already a SABnzbd instance running. """ """ Return True when there's already a SABnzbd instance running. """
try: try:
url = '%s&mode=version' % (url) url = '%s&mode=version' % url
# Do this without certificate verification, few installations will have that # Do this without certificate verification, few installations will have that
prev = sabnzbd.set_https_verification(False) prev = sabnzbd.set_https_verification(False)
ver = get_from_url(url) ver = get_from_url(url)
sabnzbd.set_https_verification(prev) sabnzbd.set_https_verification(prev)
return (ver and (re.search(r'\d+\.\d+\.', ver) or ver.strip() == sabnzbd.__version__)) return ver and (re.search(r'\d+\.\d+\.', ver) or ver.strip() == sabnzbd.__version__)
except: except:
return False return False
@ -702,7 +702,7 @@ def evaluate_inipath(path):
return path return path
def commandline_handler(frozen=True): def commandline_handler():
""" Split win32-service commands are true parameters """ Split win32-service commands are true parameters
Returns: Returns:
service, sab_opts, serv_opts, upload_nzbs service, sab_opts, serv_opts, upload_nzbs
@ -803,7 +803,6 @@ def main():
vista_plus = False vista_plus = False
win64 = False win64 = False
repair = 0 repair = 0
api_url = None
no_login = False no_login = False
sabnzbd.RESTART_ARGS = [sys.argv[0]] sabnzbd.RESTART_ARGS = [sys.argv[0]]
pid_path = None pid_path = None
@ -839,9 +838,9 @@ def main():
elif opt in ('-b', '--browser'): elif opt in ('-b', '--browser'):
try: try:
autobrowser = bool(int(arg)) autobrowser = bool(int(arg))
except: except ValueError:
autobrowser = True autobrowser = True
elif opt in ('--autorestarted', ): elif opt == '--autorestarted':
autorestarted = True autorestarted = True
elif opt in ('-c', '--clean'): elif opt in ('-c', '--clean'):
clean_up = True clean_up = True
@ -860,36 +859,36 @@ def main():
exit_sab(0) exit_sab(0)
elif opt in ('-p', '--pause'): elif opt in ('-p', '--pause'):
pause = True pause = True
elif opt in ('--https',): elif opt == '--https':
https_port = int(arg) https_port = int(arg)
sabnzbd.RESTART_ARGS.append(opt) sabnzbd.RESTART_ARGS.append(opt)
sabnzbd.RESTART_ARGS.append(arg) sabnzbd.RESTART_ARGS.append(arg)
elif opt in ('--repair',): elif opt == '--repair':
repair = 1 repair = 1
pause = True pause = True
elif opt in ('--repair-all',): elif opt == '--repair-all':
repair = 2 repair = 2
pause = True pause = True
elif opt in ('--log-all',): elif opt == '--log-all':
sabnzbd.LOG_ALL = True sabnzbd.LOG_ALL = True
elif opt in ('--disable-file-log'): elif opt == '--disable-file-log':
no_file_log = True no_file_log = True
elif opt in ('--no-login',): elif opt == '--no-login':
no_login = True no_login = True
elif opt in ('--pid',): elif opt == '--pid':
pid_path = arg pid_path = arg
sabnzbd.RESTART_ARGS.append(opt) sabnzbd.RESTART_ARGS.append(opt)
sabnzbd.RESTART_ARGS.append(arg) sabnzbd.RESTART_ARGS.append(arg)
elif opt in ('--pidfile',): elif opt == '--pidfile':
pid_file = arg pid_file = arg
sabnzbd.RESTART_ARGS.append(opt) sabnzbd.RESTART_ARGS.append(opt)
sabnzbd.RESTART_ARGS.append(arg) sabnzbd.RESTART_ARGS.append(arg)
elif opt in ('--new',): elif opt == '--new':
new_instance = True new_instance = True
elif opt in ('--console',): elif opt == '--console':
sabnzbd.RESTART_ARGS.append(opt) sabnzbd.RESTART_ARGS.append(opt)
osx_console = True osx_console = True
elif opt in ('--ipv6_hosting',): elif opt == '--ipv6_hosting':
ipv6_hosting = arg ipv6_hosting = arg
sabnzbd.MY_FULLNAME = os.path.normpath(os.path.abspath(sabnzbd.MY_FULLNAME)) sabnzbd.MY_FULLNAME = os.path.normpath(os.path.abspath(sabnzbd.MY_FULLNAME))
@ -983,13 +982,13 @@ def main():
if enable_https and https_port: if enable_https and https_port:
try: try:
portend.free(cherryhost, https_port, timeout=0.05) portend.free(cherryhost, https_port, timeout=0.05)
except IOError as error: except IOError:
Bail_Out(browserhost, cherryport) Bail_Out(browserhost, cherryport)
except: except:
Bail_Out(browserhost, cherryport, '49') Bail_Out(browserhost, cherryport, '49')
try: try:
portend.free(cherryhost, cherryport, timeout=0.05) portend.free(cherryhost, cherryport, timeout=0.05)
except IOError as error: except IOError:
Bail_Out(browserhost, cherryport) Bail_Out(browserhost, cherryport)
except: except:
Bail_Out(browserhost, cherryport, '49') Bail_Out(browserhost, cherryport, '49')
@ -1026,7 +1025,7 @@ def main():
else: else:
# In case HTTPS == HTTP port # In case HTTPS == HTTP port
cherryport = newport cherryport = newport
sabnzbd.cfg.port.set(newport) sabnzbd.cfg.cherryport.set(newport)
except: except:
# Something else wrong, probably badly specified host # Something else wrong, probably badly specified host
Bail_Out(browserhost, cherryport, '49') Bail_Out(browserhost, cherryport, '49')
@ -1208,8 +1207,6 @@ def main():
if autobrowser is not None: if autobrowser is not None:
sabnzbd.cfg.autobrowser.set(autobrowser) sabnzbd.cfg.autobrowser.set(autobrowser)
else:
autobrowser = sabnzbd.cfg.autobrowser()
if not sabnzbd.WIN_SERVICE and not getattr(sys, 'frozen', None) == 'macosx_app': if not sabnzbd.WIN_SERVICE and not getattr(sys, 'frozen', None) == 'macosx_app':
signal.signal(signal.SIGINT, sabnzbd.sig_handler) signal.signal(signal.SIGINT, sabnzbd.sig_handler)
@ -1574,7 +1571,7 @@ if sabnzbd.WIN32:
win32serviceutil.ServiceFramework.__init__(self, args) win32serviceutil.ServiceFramework.__init__(self, args)
self.hWaitStop = win32event.CreateEvent(None, 0, 0, None) self.hWaitStop = win32event.CreateEvent(None, 0, 0, None)
self.overlapped = pywintypes.OVERLAPPED() # @UndefinedVariable self.overlapped = pywintypes.OVERLAPPED()
self.overlapped.hEvent = win32event.CreateEvent(None, 0, 0, None) self.overlapped.hEvent = win32event.CreateEvent(None, 0, 0, None)
sabnzbd.WIN_SERVICE = self sabnzbd.WIN_SERVICE = self

2
interfaces/Config/templates/config_notify.tmpl

@ -194,7 +194,7 @@
<fieldset> <fieldset>
<div class="field-pair"> <div class="field-pair">
<label class="config" for="nscript_script">$T('opt-nscript_script')</label> <label class="config" for="nscript_script">$T('opt-nscript_script')</label>
<select name="nscript_script"> <select name="nscript_script" id="nscript_script">
<!--#for $sc in $scripts#--> <!--#for $sc in $scripts#-->
<option value="$sc" <!--#if $nscript_script == $sc then 'selected="selected"' else ""#-->>$Tspec($sc)</option> <option value="$sc" <!--#if $nscript_script == $sc then 'selected="selected"' else ""#-->>$Tspec($sc)</option>
<!--#end for#--> <!--#end for#-->

24
interfaces/Config/templates/config_rss.tmpl

@ -390,9 +390,10 @@
<th class="no-sort">$T('link-download')</th> <th class="no-sort">$T('link-download')</th>
<th>$T('rss-filter')</th> <th>$T('rss-filter')</th>
<th>$T('size')</th> <th>$T('size')</th>
<th width="65%">$T('sort-title')</th> <th width="60%">$T('sort-title')</th>
<th>$T('category')</th> <th>$T('category')</th>
<th class="default-sort">$T('nzo-age')</th> <th class="default-sort">$T('nzo-age')</th>
<th>$T('source')</th>
</tr> </tr>
</thead> </thead>
<!--#for $job in $matched#--> <!--#for $job in $matched#-->
@ -411,6 +412,13 @@
<td>$job['title']</td> <td>$job['title']</td>
<td>$job['cat']</td> <td>$job['cat']</td>
<td data-sort-value="$job['age_ms']">$job['age']</td> <td data-sort-value="$job['age_ms']">$job['age']</td>
<td data-sort-value="$job['baselink']" title="$job['baselink']">
<!--#if not $job['infourl']#-->
<div class="favicon source-icon" style="background-image: url(//$job['baselink']/favicon.ico);" data-domain="$job['baselink']"></div>
<!--#else#-->
<a class="favicon source-icon" href="$job['infourl']" target="_blank" style="background-image: url(//$job['baselink']/favicon.ico);" data-domain="$job['baselink']"></a>
<!--#end if#-->
</td>
</tr> </tr>
<!--#end for#--> <!--#end for#-->
</table> </table>
@ -426,9 +434,10 @@
<th class="no-sort">$T('link-download')</th> <th class="no-sort">$T('link-download')</th>
<th>$T('rss-filter')</th> <th>$T('rss-filter')</th>
<th>$T('size')</th> <th>$T('size')</th>
<th width="65%">$T('sort-title')</th> <th width="60%">$T('sort-title')</th>
<th>$T('category')</th> <th>$T('category')</th>
<th class="default-sort">$T('nzo-age')</th> <th class="default-sort">$T('nzo-age')</th>
<th>$T('source')</th>
</tr> </tr>
</thead> </thead>
<!--#for $job in $unmatched#--> <!--#for $job in $unmatched#-->
@ -447,6 +456,13 @@
<td>$job['title']</td> <td>$job['title']</td>
<td>$job['cat']</td> <td>$job['cat']</td>
<td data-sort-value="$job['age_ms']">$job['age']</td> <td data-sort-value="$job['age_ms']">$job['age']</td>
<td data-sort-value="$job['baselink']" title="$job['baselink']">
<!--#if not $job['infourl']#-->
<div class="favicon source-icon" style="background-image: url(//$job['baselink']/favicon.ico);" data-domain="$job['baselink']"></div>
<!--#else#-->
<a class="favicon source-icon" href="$job['infourl']" target="_blank" style="background-image: url(//$job['baselink']/favicon.ico);" data-domain="$job['baselink']"></a>
<!--#end if#-->
</td>
</tr> </tr>
<!--#end for#--> <!--#end for#-->
</table> </table>
@ -476,8 +492,10 @@
<td>$job['title']</td> <td>$job['title']</td>
<td>$job['cat']</td> <td>$job['cat']</td>
<td data-sort-value="$job['baselink']" title="$job['baselink']"> <td data-sort-value="$job['baselink']" title="$job['baselink']">
<!--#if $job['baselink']#--> <!--#if not $job['infourl']#-->
<div class="favicon source-icon" style="background-image: url(//$job['baselink']/favicon.ico);" data-domain="$job['baselink']"></div> <div class="favicon source-icon" style="background-image: url(//$job['baselink']/favicon.ico);" data-domain="$job['baselink']"></div>
<!--#else#-->
<a class="favicon source-icon" href="$job['infourl']" target="_blank" style="background-image: url(//$job['baselink']/favicon.ico);" data-domain="$job['baselink']"></a>
<!--#end if#--> <!--#end if#-->
</td> </td>
</tr> </tr>

27
interfaces/Config/templates/staticcfg/css/style.css

@ -4,16 +4,13 @@ body {
} }
#logo { #logo {
display: block; display: block;
margin: auto; margin: 3px auto auto;
margin-top: 3px;
} }
#content { #content {
color: #000; color: #000;
padding: 15px 20px 20px; padding: 65px 20px 20px;
font-size: 13px; font-size: 13px;
padding-top: 65px;
padding-bottom: 20px;
} }
.colmask { .colmask {
z-index: 20; z-index: 20;
@ -529,7 +526,7 @@ tr.separator {
} }
#filebrowser_modal .checkbox { #filebrowser_modal .checkbox {
float: left; float: left;
margin: 8px 5px 0x; margin: 8px 5px 0px;
} }
#filebrowser_modal .checkbox input { #filebrowser_modal .checkbox input {
margin-top: 1px; margin-top: 1px;
@ -576,6 +573,7 @@ h2.activeRSS {
float: left; float: left;
margin: 0 6px 0 2px; margin: 0 6px 0 2px;
text-align: center; text-align: center;
color: black !important;
} }
.source-icon span { .source-icon span {
top: -3px; top: -3px;
@ -600,8 +598,7 @@ h2.activeRSS {
padding-top: .4em; padding-top: .4em;
} }
#subscriptions .chk { #subscriptions .chk {
padding: 5px; padding: 8px 5px 5px;
padding-top: 8px;
vertical-align: middle; vertical-align: middle;
} }
#subscriptions .title { #subscriptions .title {
@ -773,7 +770,6 @@ input[type=radio] {
input[type="button"], input[type="button"],
input[type="submit"] { input[type="submit"] {
color: #333; color: #333;
background-color: #fff;
display:inline-block; display:inline-block;
padding:6px 12px; padding:6px 12px;
margin-bottom: 0; margin-bottom: 0;
@ -784,7 +780,7 @@ input[type="submit"] {
white-space:nowrap; white-space:nowrap;
vertical-align:middle; vertical-align:middle;
cursor:pointer; cursor:pointer;
background-image:none; background: #fff none;
border:1px solid #ccc; border:1px solid #ccc;
height: 34px; height: 34px;
} }
@ -1002,7 +998,7 @@ input[type="checkbox"] {
} }
.Servers .col2.server-disabled .label { .Servers .col2.server-disabled .label {
color: ##777 !important; color: #777 !important;
} }
.Servers .col2 .label:nth-child(2) { .Servers .col2 .label:nth-child(2) {
@ -1063,9 +1059,7 @@ input[type="checkbox"] {
.Servers .col2 label, .Servers .col2 label,
.Email .col2 label { .Email .col2 label {
margin: 0; margin: 2px 0 0 4px;
margin-left: 4px;
margin-top: 2px;
cursor: pointer; cursor: pointer;
} }
@ -1141,6 +1135,7 @@ input[type="checkbox"] {
} }
.value-and-select select { .value-and-select select {
min-width: 30px; min-width: 30px;
margin-top: 1px;
} }
.dotOne, .dotTwo, .dotThree { .dotOne, .dotTwo, .dotThree {
@ -1341,9 +1336,7 @@ input[type="checkbox"] {
} }
.desc { .desc {
margin: 0; margin: 2px 0 0 3px;
margin-left: 3px;
margin-top: 2px;
padding: 0 !important; padding: 0 !important;
} }

2
interfaces/Glitter/templates/static/stylesheets/colorschemes/Night.css

@ -76,7 +76,7 @@ legend,
background-color: #666; background-color: #666;
} }
.navbar-collapse.in .dropdown-menu, { .navbar-collapse.in .dropdown-menu {
border: none; border: none;
} }

24
interfaces/Glitter/templates/static/stylesheets/glitter.css

@ -105,10 +105,7 @@ h2 {
.navbar-logo { .navbar-logo {
vertical-align: middle; vertical-align: middle;
display: inline-block; display: inline-block;
margin-right: 12px; margin: 4px 12px -1px 15px;
margin-left: 15px;
margin-top: 4px;
margin-bottom: -1px;
} }
.navbar-logo svg { .navbar-logo svg {
@ -288,8 +285,7 @@ li.dropdown {
opacity: 0.9; opacity: 0.9;
color: black; color: black;
z-index: 2000; z-index: 2000;
padding: 1em; padding: 15% 1em 1em;
padding-top: 15%;
} }
.main-filedrop.in span { .main-filedrop.in span {
@ -721,8 +717,7 @@ td.delete .dropdown>a {
td.delete input[type="checkbox"], td.delete input[type="checkbox"],
.add-nzb-inputbox-options input[type="checkbox"]{ .add-nzb-inputbox-options input[type="checkbox"]{
margin: 0; margin: 0 0 -2px;
margin-bottom: -2px;
display: block; display: block;
} }
@ -1155,8 +1150,7 @@ tr.queue-item>td:first-child>a {
#history-options { #history-options {
margin-top: 0; margin-top: 0;
margin-left: 10px; margin-left: 10px;
padding: 0; padding: 0 0 0 4px;
padding-left: 4px;
} }
#history-options .hover-button { #history-options .hover-button {
@ -1536,8 +1530,7 @@ tr.queue-item>td:first-child>a {
.add-nzb-inputbox span { .add-nzb-inputbox span {
display: inline-block; display: inline-block;
margin: 8px 2px 0px 5px; margin: 8px 2px 0px -20px;
margin-left: -20px;
} }
.btn-file { .btn-file {
@ -1630,11 +1623,9 @@ input[name="nzbURL"] {
#modal-item-files .multioperations-selector { #modal-item-files .multioperations-selector {
clear: left; clear: left;
margin: 0;
float: left; float: left;
padding: 5px 8px; padding: 5px 8px;
margin-bottom: 5px; margin: 0 10px 5px 0;
margin-right: 10px;
border: 1px solid #cccccc; border: 1px solid #cccccc;
} }
@ -2045,9 +2036,8 @@ a:focus {
right: 17px; right: 17px;
display: inline-block; display: inline-block;
border-right: 6px solid transparent; border-right: 6px solid transparent;
border-bottom: 6px solid #ccc; border-bottom: 6px solid rgba(0, 0, 0, 0.2);
border-left: 6px solid transparent; border-left: 6px solid transparent;
border-bottom-color: rgba(0, 0, 0, 0.2);
content: ''; content: '';
} }

14
interfaces/wizard/static/style.css

@ -88,19 +88,12 @@ label {
float: right; float: right;
margin: 0; margin: 0;
} }
.sup {
vertical-align: sup !important;
}
.align-right { .align-right {
text-align: right; text-align: right;
} }
.align-center { .align-center {
text-align: center; text-align: center;
} }
.float-center {
float: center;
}
.unselected, .unselected,
.selected { .selected {
display: inline-block; display: inline-block;
@ -123,9 +116,6 @@ label {
.bigger { .bigger {
font-size: 14px; font-size: 14px;
} }
.padded {
padding: 12px;
}
.bigger input { .bigger input {
font-size: 16px; font-size: 16px;
} }
@ -135,9 +125,6 @@ label {
.full-width { .full-width {
width: 100%; width: 100%;
} }
.bigbutton {
font-size: 18px !important;
}
.correct { .correct {
border: 2px solid #00cc22; border: 2px solid #00cc22;
} }
@ -153,7 +140,6 @@ label {
.text-input-wide { .text-input-wide {
width: 230px; width: 230px;
} }
.text-input-thin,
#server-hidden-settings input[type="number"] { #server-hidden-settings input[type="number"] {
width: 100px; width: 100px;
} }

4
interfaces/wizard/two.html

@ -22,13 +22,13 @@
<p><strong>$T('opt-complete_dir')</strong></p> <p><strong>$T('opt-complete_dir')</strong></p>
<div class="quoteBlock"> <div class="quoteBlock">
$complete_dir $complete_dir
<a href="${access_url}config/folders" class="indented"><span class="glyphicon glyphicon-cog"></span></a> <a href="${access_url}/config/folders#complete_dir" class="indented"><span class="glyphicon glyphicon-cog"></span></a>
</div> </div>
<p><strong>$T('opt-download_dir')</strong></p> <p><strong>$T('opt-download_dir')</strong></p>
<div class="quoteBlock"> <div class="quoteBlock">
$download_dir $download_dir
<a href="${access_url}config/folders" class="indented"><span class="glyphicon glyphicon-cog"></span></a> <a href="${access_url}/config/folders#complete_dir" class="indented"><span class="glyphicon glyphicon-cog"></span></a>
</div> </div>
<hr/> <hr/>

BIN
osx/unrar/unrar

Binary file not shown.

340
po/main/SABnzbd.pot

File diff suppressed because it is too large

344
po/main/da.po

File diff suppressed because it is too large

360
po/main/de.po

File diff suppressed because it is too large

344
po/main/es.po

File diff suppressed because it is too large

344
po/main/fi.po

File diff suppressed because it is too large

346
po/main/fr.po

File diff suppressed because it is too large

448
po/main/he.po

File diff suppressed because it is too large

344
po/main/nb.po

File diff suppressed because it is too large

344
po/main/nl.po

File diff suppressed because it is too large

344
po/main/pl.po

File diff suppressed because it is too large

344
po/main/pt_BR.po

File diff suppressed because it is too large

344
po/main/ro.po

File diff suppressed because it is too large

348
po/main/ru.po

File diff suppressed because it is too large

344
po/main/sr.po

File diff suppressed because it is too large

344
po/main/sv.po

File diff suppressed because it is too large

344
po/main/zh_CN.po

File diff suppressed because it is too large

21
sabnzbd/__init__.py

@ -196,7 +196,7 @@ def sig_handler(signum=None, frame=None):
INIT_LOCK = Lock() INIT_LOCK = Lock()
def connect_db(thread_index=0): def get_db_connection(thread_index=0):
# Create a connection and store it in the current thread # Create a connection and store it in the current thread
if not (hasattr(cherrypy.thread_data, 'history_db') and cherrypy.thread_data.history_db): if not (hasattr(cherrypy.thread_data, 'history_db') and cherrypy.thread_data.history_db):
cherrypy.thread_data.history_db = sabnzbd.database.HistoryDB() cherrypy.thread_data.history_db = sabnzbd.database.HistoryDB()
@ -217,7 +217,7 @@ def initialize(pause_downloader=False, clean_up=False, evalSched=False, repair=0
__SHUTTING_DOWN__ = False __SHUTTING_DOWN__ = False
# Set global database connection for Web-UI threads # Set global database connection for Web-UI threads
cherrypy.engine.subscribe('start_thread', connect_db) cherrypy.engine.subscribe('start_thread', get_db_connection)
# Paused? # Paused?
pause_downloader = pause_downloader or cfg.start_paused() pause_downloader = pause_downloader or cfg.start_paused()
@ -659,7 +659,7 @@ def add_nzbfile(nzbfile, pp=None, script=None, cat=None, priority=NORMAL_PRIORIT
keep = False keep = False
if not sabnzbd.WIN32: if not sabnzbd.WIN32:
# If windows client sends file to Unix server backslashed may # If windows client sends file to Unix server backslashes may
# be included, so convert these # be included, so convert these
filename = filename.replace('\\', '/') filename = filename.replace('\\', '/')
@ -988,12 +988,12 @@ def pp_to_opts(pp):
# Convert the pp to an int # Convert the pp to an int
pp = sabnzbd.interface.int_conv(pp) pp = sabnzbd.interface.int_conv(pp)
if pp == 0: if pp == 0:
return (False, False, False) return False, False, False
if pp == 1: if pp == 1:
return (True, False, False) return True, False, False
if pp == 2: if pp == 2:
return (True, True, False) return True, True, False
return (True, True, True) return True, True, True
def opts_to_pp(repair, unpack, delete): def opts_to_pp(repair, unpack, delete):
@ -1156,6 +1156,11 @@ def test_cert_checking():
""" Test quality of certificate validation """ Test quality of certificate validation
On systems with at least Python > 2.7.9 On systems with at least Python > 2.7.9
""" """
if sabnzbd.HAVE_SSL_CONTEXT:
# User disabled the test, assume proper SSL certificates
if not cfg.selftest_host():
return True
# Try a connection to our test-host
try: try:
import ssl import ssl
ctx = ssl.create_default_context() ctx = ssl.create_default_context()
@ -1165,7 +1170,7 @@ def test_cert_checking():
ssl_sock.connect((cfg.selftest_host(), 443)) ssl_sock.connect((cfg.selftest_host(), 443))
ssl_sock.close() ssl_sock.close()
return True return True
except (socket.gaierror, socket.timeout) as e: except (socket.gaierror, socket.timeout):
# Non-SSL related error. # Non-SSL related error.
# We now assume that certificates work instead of forcing # We now assume that certificates work instead of forcing
# lower quality just because some (temporary) internet problem # lower quality just because some (temporary) internet problem

77
sabnzbd/api.py

@ -29,6 +29,7 @@ import cherrypy
import locale import locale
from threading import Thread from threading import Thread
try: try:
import win32api import win32api
import win32file import win32file
@ -67,7 +68,6 @@ import sabnzbd.rss
import sabnzbd.emailer import sabnzbd.emailer
import sabnzbd.getipaddress as getipaddress import sabnzbd.getipaddress as getipaddress
############################################################################## ##############################################################################
# API error messages # API error messages
############################################################################## ##############################################################################
@ -82,7 +82,6 @@ _MSG_OUTPUT_FORMAT = 'Format not supported'
_MSG_NO_SUCH_CONFIG = 'Config item does not exist' _MSG_NO_SUCH_CONFIG = 'Config item does not exist'
_MSG_BAD_SERVER_PARMS = 'Incorrect server settings' _MSG_BAD_SERVER_PARMS = 'Incorrect server settings'
# For Windows: determine executable extensions # For Windows: determine executable extensions
if os.name == 'nt': if os.name == 'nt':
PATHEXT = os.environ.get('PATHEXT', '').lower().split(';') PATHEXT = os.environ.get('PATHEXT', '').lower().split(';')
@ -211,6 +210,8 @@ def _api_queue_pause(output, value, kwargs):
if value: if value:
items = value.split(',') items = value.split(',')
handled = NzbQueue.do.pause_multiple_nzo(items) handled = NzbQueue.do.pause_multiple_nzo(items)
else:
handled = False
return report(output, keyword='', data={'status': bool(handled), 'nzo_ids': handled}) return report(output, keyword='', data={'status': bool(handled), 'nzo_ids': handled})
@ -219,6 +220,8 @@ def _api_queue_resume(output, value, kwargs):
if value: if value:
items = value.split(',') items = value.split(',')
handled = NzbQueue.do.resume_multiple_nzo(items) handled = NzbQueue.do.resume_multiple_nzo(items)
else:
handled = False
return report(output, keyword='', data={'status': bool(handled), 'nzo_ids': handled}) return report(output, keyword='', data={'status': bool(handled), 'nzo_ids': handled})
@ -453,6 +456,7 @@ def _api_change_opts(name, output, kwargs):
""" API: accepts output, value(=nzo_id), value2(=pp) """ """ API: accepts output, value(=nzo_id), value2(=pp) """
value = kwargs.get('value') value = kwargs.get('value')
value2 = kwargs.get('value2') value2 = kwargs.get('value2')
result = 0
if value and value2 and value2.isdigit(): if value and value2 and value2.isdigit():
result = NzbQueue.do.change_opts(value, int(value2)) result = NzbQueue.do.change_opts(value, int(value2))
return report(output, keyword='status', data=bool(result > 0)) return report(output, keyword='status', data=bool(result > 0))
@ -474,7 +478,6 @@ def _api_history(name, output, kwargs):
failed_only = kwargs.get('failed_only') failed_only = kwargs.get('failed_only')
categories = kwargs.get('category') categories = kwargs.get('category')
# Do we need to send anything? # Do we need to send anything?
if last_history_update == sabnzbd.LAST_HISTORY_UPDATE: if last_history_update == sabnzbd.LAST_HISTORY_UPDATE:
return report(output, keyword='history', data=False) return report(output, keyword='history', data=False)
@ -489,7 +492,7 @@ def _api_history(name, output, kwargs):
special = value.lower() special = value.lower()
del_files = bool(int_conv(kwargs.get('del_files'))) del_files = bool(int_conv(kwargs.get('del_files')))
if special in ('all', 'failed', 'completed'): if special in ('all', 'failed', 'completed'):
history_db = sabnzbd.connect_db() history_db = sabnzbd.get_db_connection()
if special in ('all', 'failed'): if special in ('all', 'failed'):
if del_files: if del_files:
del_job_files(history_db.get_failed_paths(search)) del_job_files(history_db.get_failed_paths(search))
@ -715,12 +718,10 @@ def _api_reset_quota(name, output, kwargs):
def _api_test_email(name, output, kwargs): def _api_test_email(name, output, kwargs):
""" API: send a test email, return result """ """ API: send a test email, return result """
logging.info("Sending test email") logging.info("Sending test email")
pack = {} pack = {'download': ['action 1', 'action 2'], 'unpack': ['action 1', 'action 2']}
pack['download'] = ['action 1', 'action 2'] res = sabnzbd.emailer.endjob(u'I had a d\xe8ja vu', 'unknown', True,
pack['unpack'] = ['action 1', 'action 2'] os.path.normpath(os.path.join(cfg.complete_dir.get_path(), u'/unknown/I had a d\xe8ja vu')),
res = sabnzbd.emailer.endjob('I had a d\xe8ja vu', 'unknown', True, 123 * MEBI, None, pack, 'my_script', u'Line 1\nLine 2\nLine 3\nd\xe8ja vu\n', 0,
os.path.normpath(os.path.join(cfg.complete_dir.get_path(), '/unknown/I had a d\xe8ja vu')),
123 * MEBI, None, pack, 'my_script', 'Line 1\nLine 2\nLine 3\nd\xe8ja vu\n', 0,
test=kwargs) test=kwargs)
if res == 'Email succeeded': if res == 'Email succeeded':
res = None res = None
@ -793,7 +794,6 @@ def _api_browse(name, output, kwargs):
compact = kwargs.get('compact') compact = kwargs.get('compact')
if compact and compact == '1': if compact and compact == '1':
paths = []
name = platform_encode(kwargs.get('term', '')) name = platform_encode(kwargs.get('term', ''))
paths = [entry['path'] for entry in folders_at_path(os.path.dirname(name)) if 'path' in entry] paths = [entry['path'] for entry in folders_at_path(os.path.dirname(name)) if 'path' in entry]
return report(output, keyword='', data=paths) return report(output, keyword='', data=paths)
@ -883,9 +883,8 @@ def _api_config_undefined(output, kwargs):
def _api_server_stats(name, output, kwargs): def _api_server_stats(name, output, kwargs):
""" API: accepts output """ """ API: accepts output """
sum_t, sum_m, sum_w, sum_d = BPSMeter.do.get_sums() sum_t, sum_m, sum_w, sum_d = BPSMeter.do.get_sums()
stats = {'total': sum_t, 'month': sum_m, 'week': sum_w, 'day': sum_d} stats = {'total': sum_t, 'month': sum_m, 'week': sum_w, 'day': sum_d, 'servers': {}}
stats['servers'] = {}
for svr in config.get_servers(): for svr in config.get_servers():
t, m, w, d, daily = BPSMeter.do.amounts(svr) t, m, w, d, daily = BPSMeter.do.amounts(svr)
stats['servers'][svr] = {'total': t or 0, 'month': m or 0, 'week': w or 0, 'day': d or 0, 'daily': daily or {}} stats['servers'][svr] = {'total': t or 0, 'month': m or 0, 'week': w or 0, 'day': d or 0, 'daily': daily or {}}
@ -1131,6 +1130,24 @@ def handle_rss_api(output, kwargs):
feed.set_dict(kwargs) feed.set_dict(kwargs)
else: else:
config.ConfigRSS(name, kwargs) config.ConfigRSS(name, kwargs)
action = kwargs.get('filter_action')
if action in ('add', 'update'):
# Use the general function, but catch the redirect-raise
try:
kwargs['feed'] = name
sabnzbd.interface.ConfigRss('/').internal_upd_rss_filter(**kwargs)
except cherrypy.HTTPRedirect:
pass
elif action == 'delete':
# Use the general function, but catch the redirect-raise
try:
kwargs['feed'] = name
sabnzbd.interface.ConfigRss('/').internal_del_rss_filter(**kwargs)
except cherrypy.HTTPRedirect:
pass
return name return name
@ -1326,9 +1343,9 @@ def build_queue(start=0, limit=0, trans=False, output=None, search=None):
# Ensure compatibility of API status # Ensure compatibility of API status
if status == Status.DELETED or priority == TOP_PRIORITY: if status == Status.DELETED or priority == TOP_PRIORITY:
status = Status.DOWNLOADING status = Status.DOWNLOADING
slot['status'] = "%s" % (status) slot['status'] = "%s" % status
if (Downloader.do.paused or Downloader.do.postproc or is_propagating or \ if (Downloader.do.paused or Downloader.do.postproc or is_propagating or
status not in (Status.DOWNLOADING, Status.FETCHING, Status.QUEUED)) and priority != TOP_PRIORITY: status not in (Status.DOWNLOADING, Status.FETCHING, Status.QUEUED)) and priority != TOP_PRIORITY:
slot['timeleft'] = '0:00:00' slot['timeleft'] = '0:00:00'
slot['eta'] = 'unknown' slot['eta'] = 'unknown'
@ -1490,16 +1507,17 @@ def options_list(output):
}) })
def retry_job(job, new_nzb, password): def retry_job(job, new_nzb=None, password=None):
""" Re enter failed job in the download queue """ """ Re enter failed job in the download queue """
if job: if job:
history_db = sabnzbd.connect_db() history_db = sabnzbd.get_db_connection()
futuretype, url, pp, script, cat = history_db.get_other(job) futuretype, url, pp, script, cat = history_db.get_other(job)
if futuretype: if futuretype:
if pp == 'X': if pp == 'X':
pp = None pp = None
sabnzbd.add_url(url, pp, script, cat) nzo_id = sabnzbd.add_url(url, pp, script, cat)
history_db.remove_history(job) history_db.remove_history(job)
return nzo_id
else: else:
path = history_db.get_path(job) path = history_db.get_path(job)
if path: if path:
@ -1511,8 +1529,13 @@ def retry_job(job, new_nzb, password):
def retry_all_jobs(): def retry_all_jobs():
""" Re enter all failed jobs in the download queue """ """ Re enter all failed jobs in the download queue """
history_db = sabnzbd.connect_db() # Fetch all retryable folders from History
return NzbQueue.do.retry_all_jobs(history_db) items = sabnzbd.api.build_history()[0]
nzo_ids = []
for item in items:
if item['retry']:
nzo_ids.append(retry_job(item['nzo_id']))
return nzo_ids
def del_job_files(job_paths): def del_job_files(job_paths):
@ -1529,7 +1552,7 @@ def del_hist_job(job, del_files):
if path: if path:
PostProcessor.do.delete(job, del_files=del_files) PostProcessor.do.delete(job, del_files=del_files)
else: else:
history_db = sabnzbd.connect_db() history_db = sabnzbd.get_db_connection()
path = history_db.get_path(job) path = history_db.get_path(job)
history_db.remove_history(job) history_db.remove_history(job)
@ -1549,6 +1572,8 @@ def Tspec(txt):
_SKIN_CACHE = {} # Stores pre-translated acronyms _SKIN_CACHE = {} # Stores pre-translated acronyms
# This special is to be used in interface.py for template processing # This special is to be used in interface.py for template processing
# to be passed for the $T function: so { ..., 'T' : Ttemplate, ...} # to be passed for the $T function: so { ..., 'T' : Ttemplate, ...}
def Ttemplate(txt): def Ttemplate(txt):
@ -1668,7 +1693,6 @@ def build_queue_header(search=None, start=0, limit=0, output=None):
header['size'] = format_bytes(bytes) header['size'] = format_bytes(bytes)
header['noofslots_total'] = qnfo.q_fullsize header['noofslots_total'] = qnfo.q_fullsize
status = ''
if Downloader.do.paused or Downloader.do.postproc: if Downloader.do.paused or Downloader.do.postproc:
status = Status.PAUSED status = Status.PAUSED
elif bytespersec > 0: elif bytespersec > 0:
@ -1683,15 +1707,13 @@ def build_queue_header(search=None, start=0, limit=0, output=None):
# new eta format: 16:00 Fri 07 Feb # new eta format: 16:00 Fri 07 Feb
header['eta'] = datestart.strftime(time_format('%H:%M %a %d %b')) header['eta'] = datestart.strftime(time_format('%H:%M %a %d %b'))
except: except:
datestart = datetime.datetime.now()
header['eta'] = T('unknown') header['eta'] = T('unknown')
return (header, qnfo.list, bytespersec, qnfo.q_fullsize, qnfo.bytes_left_previous_page) return header, qnfo.list, bytespersec, qnfo.q_fullsize, qnfo.bytes_left_previous_page
def build_history(start=None, limit=None, verbose=False, verbose_list=None, search=None, failed_only=0, def build_history(start=None, limit=None, verbose=False, verbose_list=None, search=None, failed_only=0,
categories=None, output=None): categories=None, output=None):
if output: if output:
converter = unicoder converter = unicoder
else: else:
@ -1744,7 +1766,7 @@ def build_history(start=None, limit=None, verbose=False, verbose_list=None, sear
# Aquire the db instance # Aquire the db instance
try: try:
history_db = sabnzbd.connect_db() history_db = sabnzbd.get_db_connection()
close_db = False close_db = False
except: except:
# Required for repairs at startup because Cherrypy isn't active yet # Required for repairs at startup because Cherrypy isn't active yet
@ -1755,7 +1777,6 @@ def build_history(start=None, limit=None, verbose=False, verbose_list=None, sear
if not h_limit: if not h_limit:
items, fetched_items, total_items = history_db.fetch_history(h_start, 1, search, failed_only, categories) items, fetched_items, total_items = history_db.fetch_history(h_start, 1, search, failed_only, categories)
items = [] items = []
fetched_items = 0
else: else:
items, fetched_items, total_items = history_db.fetch_history(h_start, h_limit, search, failed_only, categories) items, fetched_items, total_items = history_db.fetch_history(h_start, h_limit, search, failed_only, categories)
@ -1840,7 +1861,7 @@ def build_history(start=None, limit=None, verbose=False, verbose_list=None, sear
if close_db: if close_db:
history_db.close() history_db.close()
return (items, fetched_items, total_items) return items, fetched_items, total_items
def get_active_history(queue=None, items=None): def get_active_history(queue=None, items=None):

15
sabnzbd/assembler.py

@ -78,11 +78,6 @@ class Assembler(Thread):
# Abort all direct unpackers, just to be sure # Abort all direct unpackers, just to be sure
sabnzbd.directunpacker.abort_all() sabnzbd.directunpacker.abort_all()
# Place job back in queue and wait 30 seconds to hope it gets resolved
self.process(job)
sleep(30)
continue
# Prepare filename # Prepare filename
nzo.verify_nzf_filename(nzf) nzo.verify_nzf_filename(nzf)
nzf.filename = sanitize_filename(nzf.filename) nzf.filename = sanitize_filename(nzf.filename)
@ -114,7 +109,7 @@ class Assembler(Thread):
nzf.remove_admin() nzf.remove_admin()
# Do rar-related processing # Do rar-related processing
if rarfile.is_rarfile(filepath): if is_rarfile(filepath):
# Encryption and unwanted extension detection # Encryption and unwanted extension detection
rar_encrypted, unwanted_file = check_encrypted_and_unwanted_files(nzo, filepath) rar_encrypted, unwanted_file = check_encrypted_and_unwanted_files(nzo, filepath)
if rar_encrypted: if rar_encrypted:
@ -243,7 +238,7 @@ def check_encrypted_and_unwanted_files(nzo, filepath):
return encrypted, unwanted return encrypted, unwanted
# Is it even a rarfile? # Is it even a rarfile?
if rarfile.is_rarfile(filepath): if is_rarfile(filepath):
# Open the rar # Open the rar
rarfile.UNRAR_TOOL = sabnzbd.newsunpack.RAR_COMMAND rarfile.UNRAR_TOOL = sabnzbd.newsunpack.RAR_COMMAND
zf = rarfile.RarFile(filepath, all_names=True) zf = rarfile.RarFile(filepath, all_names=True)
@ -331,11 +326,11 @@ def nzo_filtered_by_rating(nzo):
nzo.rating_filtered = 1 nzo.rating_filtered = 1
reason = rating_filtered(rating, nzo.filename.lower(), True) reason = rating_filtered(rating, nzo.filename.lower(), True)
if reason is not None: if reason is not None:
return (2, reason) return 2, reason
reason = rating_filtered(rating, nzo.filename.lower(), False) reason = rating_filtered(rating, nzo.filename.lower(), False)
if reason is not None: if reason is not None:
return (1, reason) return 1, reason
return (0, "") return 0, ""
def rating_filtered(rating, filename, abort): def rating_filtered(rating, filename, abort):

4
sabnzbd/config.py

@ -897,7 +897,7 @@ def get_servers():
return {} return {}
def define_categories(force=False): def define_categories():
""" Define categories listed in the Setup file """ Define categories listed in the Setup file
return a list of ConfigCat instances return a list of ConfigCat instances
""" """
@ -991,7 +991,7 @@ def get_rss():
for feed_uri in feed.uri(): for feed_uri in feed.uri():
if new_feed_uris and not urlparse(feed_uri).scheme and urlparse(new_feed_uris[-1]).scheme: if new_feed_uris and not urlparse(feed_uri).scheme and urlparse(new_feed_uris[-1]).scheme:
# Current one has no scheme but previous one does, append to previous # Current one has no scheme but previous one does, append to previous
new_feed_uris[-1] += '%2C' + feed_uri new_feed_uris[-1] += ',' + feed_uri
have_new_uri = True have_new_uri = True
continue continue
# Add full working URL # Add full working URL

2
sabnzbd/constants.py

@ -123,7 +123,7 @@ year_match = r'[\W]([1|2]\d{3})([^\w]|$)' # Something '(YYYY)' or '.YYYY.' or '
sample_match = r'((^|[\W_])(sample|proof))' # something-sample or something-proof sample_match = r'((^|[\W_])(sample|proof))' # something-sample or something-proof
class Status(): class Status:
COMPLETED = 'Completed' # PP: Job is finished COMPLETED = 'Completed' # PP: Job is finished
CHECKING = 'Checking' # Q: Pre-check is running CHECKING = 'Checking' # Q: Pre-check is running
DOWNLOADING = 'Downloading' # Q: Normal downloading DOWNLOADING = 'Downloading' # Q: Normal downloading

11
sabnzbd/database.py

@ -315,7 +315,7 @@ class HistoryDB(object):
# Stage Name is separated by ::: stage lines by ; and stages by \r\n # Stage Name is separated by ::: stage lines by ; and stages by \r\n
items = [unpack_history_info(item) for item in items] items = [unpack_history_info(item) for item in items]
return (items, fetched_items, total_items) return items, fetched_items, total_items
def have_episode(self, series, season, episode): def have_episode(self, series, season, episode):
""" Check whether History contains this series episode """ """ Check whether History contains this series episode """
@ -376,7 +376,7 @@ class HistoryDB(object):
except AttributeError: except AttributeError:
pass pass
return (total, month, week) return total, month, week
def get_script_log(self, nzo_id): def get_script_log(self, nzo_id):
""" Return decompressed log file """ """ Return decompressed log file """
@ -401,7 +401,7 @@ class HistoryDB(object):
return name return name
def get_path(self, nzo_id): def get_path(self, nzo_id):
""" Return the `incomplete` path of the job `nzo_id` """ """ Return the `incomplete` path of the job `nzo_id` if it is still there """
t = (nzo_id,) t = (nzo_id,)
path = '' path = ''
if self.execute('SELECT path FROM history WHERE nzo_id=?', t): if self.execute('SELECT path FROM history WHERE nzo_id=?', t):
@ -409,7 +409,9 @@ class HistoryDB(object):
path = self.c.fetchone().get('path') path = self.c.fetchone().get('path')
except AttributeError: except AttributeError:
pass pass
if os.path.exists(path):
return path return path
return None
def get_other(self, nzo_id): def get_other(self, nzo_id):
""" Return additional data for job `nzo_id` """ """ Return additional data for job `nzo_id` """
@ -422,9 +424,10 @@ class HistoryDB(object):
pp = items.get('pp') pp = items.get('pp')
script = items.get('script') script = items.get('script')
cat = items.get('category') cat = items.get('category')
return dtype, url, pp, script, cat
except (AttributeError, IndexError): except (AttributeError, IndexError):
pass
return '', '', '', '', '' return '', '', '', '', ''
return dtype, url, pp, script, cat
def dict_factory(cursor, row): def dict_factory(cursor, row):

6
sabnzbd/decoder.py

@ -118,7 +118,7 @@ class Decoder(Thread):
nzf.article_count += 1 nzf.article_count += 1
found = True found = True
except IOError as e: except IOError:
logme = T('Decoding %s failed') % art_id logme = T('Decoding %s failed') % art_id
logging.warning(logme) logging.warning(logme)
logging.info("Traceback: ", exc_info=True) logging.info("Traceback: ", exc_info=True)
@ -127,7 +127,7 @@ class Decoder(Thread):
sabnzbd.nzbqueue.NzbQueue.do.reset_try_lists(article) sabnzbd.nzbqueue.NzbQueue.do.reset_try_lists(article)
register = False register = False
except MemoryError as e: except MemoryError:
logme = T('Decoder failure: Out of memory') logme = T('Decoder failure: Out of memory')
logging.warning(logme) logging.warning(logme)
anfo = sabnzbd.articlecache.ArticleCache.do.cache_info() anfo = sabnzbd.articlecache.ArticleCache.do.cache_info()
@ -295,7 +295,7 @@ def yCheck(data):
except IndexError: except IndexError:
break break
return ((ybegin, ypart, yend), data) return (ybegin, ypart, yend), data
# Example: =ybegin part=1 line=128 size=123 name=-=DUMMY=- abc.par # Example: =ybegin part=1 line=128 size=123 name=-=DUMMY=- abc.par
YSPLIT_RE = re.compile(r'([a-zA-Z0-9]+)=') YSPLIT_RE = re.compile(r'([a-zA-Z0-9]+)=')

12
sabnzbd/directunpacker.py

@ -33,6 +33,7 @@ from sabnzbd.misc import int_conv, format_time_string
from sabnzbd.filesystem import clip_path, long_path, remove_all, globber, \ from sabnzbd.filesystem import clip_path, long_path, remove_all, globber, \
has_win_device, real_path has_win_device, real_path
from sabnzbd.encoding import TRANS, unicoder from sabnzbd.encoding import TRANS, unicoder
from sabnzbd.decorators import synchronized
from sabnzbd.newsunpack import build_command, EXTRACTFROM_RE, EXTRACTED_RE, rar_volumelist from sabnzbd.newsunpack import build_command, EXTRACTFROM_RE, EXTRACTED_RE, rar_volumelist
from sabnzbd.postproc import prepare_extraction_path from sabnzbd.postproc import prepare_extraction_path
from sabnzbd.utils.rarfile import RarFile from sabnzbd.utils.rarfile import RarFile
@ -102,6 +103,7 @@ class DirectUnpacker(threading.Thread):
if none_counter > found_counter: if none_counter > found_counter:
self.total_volumes = {} self.total_volumes = {}
@synchronized(START_STOP_LOCK)
def add(self, nzf): def add(self, nzf):
""" Add jobs and start instance of DirectUnpack """ """ Add jobs and start instance of DirectUnpack """
if not cfg.direct_unpack_tested(): if not cfg.direct_unpack_tested():
@ -162,9 +164,9 @@ class DirectUnpacker(threading.Thread):
break break
# Error? Let PP-handle it # Error? Let PP-handle it
if linebuf.endswith(('ERROR: ', 'Cannot create', 'in the encrypted file', 'CRC failed', \ if linebuf.endswith(('ERROR: ', 'Cannot create', 'in the encrypted file', 'CRC failed',
'checksum failed', 'You need to start extraction from a previous volume', \ 'checksum failed', 'You need to start extraction from a previous volume',
'password is incorrect', 'Write error', 'checksum error', \ 'password is incorrect', 'Write error', 'checksum error',
'start extraction from a previous volume')): 'start extraction from a previous volume')):
logging.info('Error in DirectUnpack of %s', self.cur_setname) logging.info('Error in DirectUnpack of %s', self.cur_setname)
self.abort() self.abort()
@ -301,6 +303,7 @@ class DirectUnpacker(threading.Thread):
with self.next_file_lock: with self.next_file_lock:
self.next_file_lock.wait() self.next_file_lock.wait()
@synchronized(START_STOP_LOCK)
def create_unrar_instance(self): def create_unrar_instance(self):
""" Start the unrar instance using the user's options """ """ Start the unrar instance using the user's options """
# Generate extraction path and save for post-proc # Generate extraction path and save for post-proc
@ -358,9 +361,10 @@ class DirectUnpacker(threading.Thread):
# Doing the first # Doing the first
logging.info('DirectUnpacked volume %s for %s', self.cur_volume, self.cur_setname) logging.info('DirectUnpacked volume %s for %s', self.cur_volume, self.cur_setname)
@synchronized(START_STOP_LOCK)
def abort(self): def abort(self):
""" Abort running instance and delete generated files """ """ Abort running instance and delete generated files """
if not self.killed: if not self.killed and self.cur_setname:
logging.info('Aborting DirectUnpack for %s', self.cur_setname) logging.info('Aborting DirectUnpack for %s', self.cur_setname)
self.killed = True self.killed = True

4
sabnzbd/dirscanner.py

@ -76,7 +76,7 @@ def is_archive(path):
except: except:
logging.info(T('Cannot read %s'), path, exc_info=True) logging.info(T('Cannot read %s'), path, exc_info=True)
return -1, None, '' return -1, None, ''
elif rarfile.is_rarfile(path): elif misc.is_rarfile(path):
try: try:
# Set path to tool to open it # Set path to tool to open it
rarfile.UNRAR_TOOL = sabnzbd.newsunpack.RAR_COMMAND rarfile.UNRAR_TOOL = sabnzbd.newsunpack.RAR_COMMAND
@ -233,7 +233,7 @@ def ProcessSingleFile(filename, path, pp=None, script=None, cat=None, catdir=Non
# # Empty, but correct file # # Empty, but correct file
# return -1, nzo_ids # return -1, nzo_ids
except: except:
if data.find("<nzb") >= 0 and data.find("</nzb") < 0: if data.find("<nzb") >= 0 > data.find("</nzb"):
# Looks like an incomplete file, retry # Looks like an incomplete file, retry
return -2, nzo_ids return -2, nzo_ids
else: else:

84
sabnzbd/downloader.py

@ -305,13 +305,13 @@ class Downloader(Thread):
self.force_disconnect = True self.force_disconnect = True
def limit_speed(self, value): def limit_speed(self, value):
''' Set the actual download speed in Bytes/sec """ Set the actual download speed in Bytes/sec
When 'value' ends with a '%' sign or is within 1-100, it is interpreted as a pecentage of the maximum bandwidth When 'value' ends with a '%' sign or is within 1-100, it is interpreted as a pecentage of the maximum bandwidth
When no '%' is found, it is interpreted as an absolute speed (including KMGT notation). When no '%' is found, it is interpreted as an absolute speed (including KMGT notation).
''' """
if value: if value:
mx = cfg.bandwidth_max.get_int() mx = cfg.bandwidth_max.get_int()
if '%' in str(value) or (from_units(value) > 0 and from_units(value) < 101): if '%' in str(value) or (0 < from_units(value) < 101):
limit = value.strip(' %') limit = value.strip(' %')
self.bandwidth_perc = from_units(limit) self.bandwidth_perc = from_units(limit)
if mx: if mx:
@ -369,24 +369,24 @@ class Downloader(Thread):
# Was it resolving problem? # Was it resolving problem?
if server.info is False: if server.info is False:
# Warn about resolving issues # Warn about resolving issues
errormsg = T('Cannot connect to server %s [%s]') % (server.id, T('Server name does not resolve')) errormsg = T('Cannot connect to server %s [%s]') % (server.host, T('Server name does not resolve'))
if server.errormsg != errormsg: if server.errormsg != errormsg:
server.errormsg = errormsg server.errormsg = errormsg
logging.warning(errormsg) logging.warning(errormsg)
logging.warning(T('Server %s will be ignored for %s minutes'), server.id, _PENALTY_TIMEOUT) logging.warning(T('Server %s will be ignored for %s minutes'), server.host, _PENALTY_TIMEOUT)
# Not fully the same as the code below for optional servers # Not fully the same as the code below for optional servers
server.bad_cons = 0 server.bad_cons = 0
server.active = False server.active = False
self.plan_server(server.id, _PENALTY_TIMEOUT) self.plan_server(server, _PENALTY_TIMEOUT)
# Optional and active server had too many problems. # Optional and active server had too many problems.
# Disable it now and send a re-enable plan to the scheduler # Disable it now and send a re-enable plan to the scheduler
if server.optional and server.active and (server.bad_cons / server.threads) > 3: if server.optional and server.active and (server.bad_cons / server.threads) > 3:
server.bad_cons = 0 server.bad_cons = 0
server.active = False server.active = False
logging.warning(T('Server %s will be ignored for %s minutes'), server.id, _PENALTY_TIMEOUT) logging.warning(T('Server %s will be ignored for %s minutes'), server.host, _PENALTY_TIMEOUT)
self.plan_server(server.id, _PENALTY_TIMEOUT) self.plan_server(server, _PENALTY_TIMEOUT)
# Remove all connections to server # Remove all connections to server
for nw in server.idle_threads + server.busy_threads: for nw in server.idle_threads + server.busy_threads:
@ -472,7 +472,7 @@ class Downloader(Thread):
if server.retention and article.nzf.nzo.avg_stamp < time.time() - server.retention: if server.retention and article.nzf.nzo.avg_stamp < time.time() - server.retention:
# Let's get rid of all the articles for this server at once # Let's get rid of all the articles for this server at once
logging.info('Job %s too old for %s, moving on', article.nzf.nzo.work_name, server.id) logging.info('Job %s too old for %s, moving on', article.nzf.nzo.work_name, server.host)
while article: while article:
self.decode(article, None, None) self.decode(article, None, None)
article = article.nzf.nzo.get_article(server, self.servers) article = article.nzf.nzo.get_article(server, self.servers)
@ -487,10 +487,10 @@ class Downloader(Thread):
self.__request_article(nw) self.__request_article(nw)
else: else:
try: try:
logging.info("%s@%s: Initiating connection", nw.thrdnum, server.id) logging.info("%s@%s: Initiating connection", nw.thrdnum, server.host)
nw.init_connect(self.write_fds) nw.init_connect(self.write_fds)
except: except:
logging.error(T('Failed to initialize %s@%s with reason: %s'), nw.thrdnum, server.id, sys.exc_info()[1]) logging.error(T('Failed to initialize %s@%s with reason: %s'), nw.thrdnum, server.host, sys.exc_info()[1])
self.__reset_nw(nw, "failed to initialize") self.__reset_nw(nw, "failed to initialize")
# Exit-point # Exit-point
@ -618,7 +618,7 @@ class Downloader(Thread):
try: try:
nw.finish_connect(nw.status_code) nw.finish_connect(nw.status_code)
if sabnzbd.LOG_ALL: if sabnzbd.LOG_ALL:
logging.debug("%s@%s last message -> %s", nw.thrdnum, nw.server.id, nntp_to_msg(nw.data)) logging.debug("%s@%s last message -> %s", nw.thrdnum, nw.server.host, nntp_to_msg(nw.data))
nw.clear_data() nw.clear_data()
except NNTPPermanentError as error: except NNTPPermanentError as error:
# Handle login problems # Handle login problems
@ -635,9 +635,9 @@ class Downloader(Thread):
errormsg = T('Too many connections to server %s') % display_msg errormsg = T('Too many connections to server %s') % display_msg
if server.errormsg != errormsg: if server.errormsg != errormsg:
server.errormsg = errormsg server.errormsg = errormsg
logging.warning(T('Too many connections to server %s'), server.id) logging.warning(T('Too many connections to server %s'), server.host)
self.__reset_nw(nw, None, warn=False, destroy=True, quit=True) self.__reset_nw(nw, None, warn=False, destroy=True, quit=True)
self.plan_server(server.id, _PENALTY_TOOMANY) self.plan_server(server, _PENALTY_TOOMANY)
server.threads -= 1 server.threads -= 1
elif ecode in (502, 481, 482) and clues_too_many_ip(msg): elif ecode in (502, 481, 482) and clues_too_many_ip(msg):
# Account sharing? # Account sharing?
@ -645,7 +645,7 @@ class Downloader(Thread):
errormsg = T('Probable account sharing') + display_msg errormsg = T('Probable account sharing') + display_msg
if server.errormsg != errormsg: if server.errormsg != errormsg:
server.errormsg = errormsg server.errormsg = errormsg
name = ' (%s)' % server.id name = ' (%s)' % server.host
logging.warning(T('Probable account sharing') + name) logging.warning(T('Probable account sharing') + name)
penalty = _PENALTY_SHARE penalty = _PENALTY_SHARE
block = True block = True
@ -655,7 +655,7 @@ class Downloader(Thread):
errormsg = T('Failed login for server %s') % display_msg errormsg = T('Failed login for server %s') % display_msg
if server.errormsg != errormsg: if server.errormsg != errormsg:
server.errormsg = errormsg server.errormsg = errormsg
logging.error(T('Failed login for server %s'), server.id) logging.error(T('Failed login for server %s'), server.host)
penalty = _PENALTY_PERM penalty = _PENALTY_PERM
block = True block = True
elif ecode in (502, 482): elif ecode in (502, 482):
@ -664,7 +664,7 @@ class Downloader(Thread):
errormsg = T('Cannot connect to server %s [%s]') % ('', display_msg) errormsg = T('Cannot connect to server %s [%s]') % ('', display_msg)
if server.errormsg != errormsg: if server.errormsg != errormsg:
server.errormsg = errormsg server.errormsg = errormsg
logging.warning(T('Cannot connect to server %s [%s]'), server.id, msg) logging.warning(T('Cannot connect to server %s [%s]'), server.host, msg)
if clues_pay(msg): if clues_pay(msg):
penalty = _PENALTY_PERM penalty = _PENALTY_PERM
else: else:
@ -673,7 +673,7 @@ class Downloader(Thread):
elif ecode == 400: elif ecode == 400:
# Temp connection problem? # Temp connection problem?
if server.active: if server.active:
logging.debug('Unspecified error 400 from server %s', server.id) logging.debug('Unspecified error 400 from server %s', server.host)
penalty = _PENALTY_VERYSHORT penalty = _PENALTY_VERYSHORT
block = True block = True
else: else:
@ -682,25 +682,25 @@ class Downloader(Thread):
errormsg = T('Cannot connect to server %s [%s]') % ('', display_msg) errormsg = T('Cannot connect to server %s [%s]') % ('', display_msg)
if server.errormsg != errormsg: if server.errormsg != errormsg:
server.errormsg = errormsg server.errormsg = errormsg
logging.warning(T('Cannot connect to server %s [%s]'), server.id, msg) logging.warning(T('Cannot connect to server %s [%s]'), server.host, msg)
penalty = _PENALTY_UNKNOWN penalty = _PENALTY_UNKNOWN
block = True block = True
if block or (penalty and server.optional): if block or (penalty and server.optional):
if server.active: if server.active:
server.active = False server.active = False
if penalty and (block or server.optional): if penalty and (block or server.optional):
self.plan_server(server.id, penalty) self.plan_server(server, penalty)
sabnzbd.nzbqueue.NzbQueue.do.reset_all_try_lists() sabnzbd.nzbqueue.NzbQueue.do.reset_all_try_lists()
self.__reset_nw(nw, None, warn=False, quit=True) self.__reset_nw(nw, None, warn=False, quit=True)
continue continue
except: except:
logging.error(T('Connecting %s@%s failed, message=%s'), logging.error(T('Connecting %s@%s failed, message=%s'),
nw.thrdnum, nw.server.id, nntp_to_msg(nw.data)) nw.thrdnum, nw.server.host, nntp_to_msg(nw.data))
# No reset-warning needed, above logging is sufficient # No reset-warning needed, above logging is sufficient
self.__reset_nw(nw, None, warn=False) self.__reset_nw(nw, None, warn=False)
if nw.connected: if nw.connected:
logging.info("Connecting %s@%s finished", nw.thrdnum, nw.server.id) logging.info("Connecting %s@%s finished", nw.thrdnum, nw.server.host)
self.__request_article(nw) self.__request_article(nw)
elif nw.status_code == 223: elif nw.status_code == 223:
@ -717,27 +717,27 @@ class Downloader(Thread):
elif nw.status_code in (411, 423, 430): elif nw.status_code in (411, 423, 430):
done = True done = True
logging.debug('Thread %s@%s: Article %s missing (error=%s)', logging.debug('Thread %s@%s: Article %s missing (error=%s)',
nw.thrdnum, nw.server.id, article.article, nw.status_code) nw.thrdnum, nw.server.host, article.article, nw.status_code)
nw.clear_data() nw.clear_data()
elif nw.status_code == 480: elif nw.status_code == 480:
if server.active: if server.active:
server.active = False server.active = False
server.errormsg = T('Server %s requires user/password') % '' server.errormsg = T('Server %s requires user/password') % ''
self.plan_server(server.id, 0) self.plan_server(server, 0)
sabnzbd.nzbqueue.NzbQueue.do.reset_all_try_lists() sabnzbd.nzbqueue.NzbQueue.do.reset_all_try_lists()
msg = T('Server %s requires user/password') % nw.server.id msg = T('Server %s requires user/password') % nw.server.host
self.__reset_nw(nw, msg, quit=True) self.__reset_nw(nw, msg, quit=True)
elif nw.status_code == 500: elif nw.status_code == 500:
if nzo.precheck: if nzo.precheck:
# Assume "STAT" command is not supported # Assume "STAT" command is not supported
server.have_stat = False server.have_stat = False
logging.debug('Server %s does not support STAT', server.id) logging.debug('Server %s does not support STAT', server.host)
else: else:
# Assume "BODY" command is not supported # Assume "BODY" command is not supported
server.have_body = False server.have_body = False
logging.debug('Server %s does not support BODY', server.id) logging.debug('Server %s does not support BODY', server.host)
nw.clear_data() nw.clear_data()
self.__request_article(nw) self.__request_article(nw)
@ -745,7 +745,7 @@ class Downloader(Thread):
server.bad_cons = 0 # Succesful data, clear "bad" counter server.bad_cons = 0 # Succesful data, clear "bad" counter
server.errormsg = server.warning = '' server.errormsg = server.warning = ''
if sabnzbd.LOG_ALL: if sabnzbd.LOG_ALL:
logging.debug('Thread %s@%s: %s done', nw.thrdnum, server.id, article.article) logging.debug('Thread %s@%s: %s done', nw.thrdnum, server.host, article.article)
self.decode(article, nw.lines, nw.data) self.decode(article, nw.lines, nw.data)
nw.soft_reset() nw.soft_reset()
@ -777,9 +777,9 @@ class Downloader(Thread):
if warn and errormsg: if warn and errormsg:
server.warning = errormsg server.warning = errormsg
logging.info('Thread %s@%s: ' + errormsg, nw.thrdnum, server.id) logging.info('Thread %s@%s: ' + errormsg, nw.thrdnum, server.host)
elif errormsg: elif errormsg:
logging.info('Thread %s@%s: ' + errormsg, nw.thrdnum, server.id) logging.info('Thread %s@%s: ' + errormsg, nw.thrdnum, server.host)
if nw in server.busy_threads: if nw in server.busy_threads:
server.busy_threads.remove(nw) server.busy_threads.remove(nw)
@ -813,11 +813,11 @@ class Downloader(Thread):
if nw.server.send_group and nzo.group != nw.group: if nw.server.send_group and nzo.group != nw.group:
group = nzo.group group = nzo.group
if sabnzbd.LOG_ALL: if sabnzbd.LOG_ALL:
logging.debug('Thread %s@%s: GROUP <%s>', nw.thrdnum, nw.server.id, group) logging.debug('Thread %s@%s: GROUP <%s>', nw.thrdnum, nw.server.host, group)
nw.send_group(group) nw.send_group(group)
else: else:
if sabnzbd.LOG_ALL: if sabnzbd.LOG_ALL:
logging.debug('Thread %s@%s: BODY %s', nw.thrdnum, nw.server.id, nw.article.article) logging.debug('Thread %s@%s: BODY %s', nw.thrdnum, nw.server.host, nw.article.article)
nw.body(nzo.precheck) nw.body(nzo.precheck)
fileno = nw.nntp.sock.fileno() fileno = nw.nntp.sock.fileno()
@ -839,24 +839,24 @@ class Downloader(Thread):
# Each server has a dictionary entry, consisting of a list of timestamps. # Each server has a dictionary entry, consisting of a list of timestamps.
@synchronized(TIMER_LOCK) @synchronized(TIMER_LOCK)
def plan_server(self, server_id, interval): def plan_server(self, server, interval):
""" Plan the restart of a server in 'interval' minutes """ """ Plan the restart of a server in 'interval' minutes """
if cfg.no_penalties() and interval > _PENALTY_SHORT: if cfg.no_penalties() and interval > _PENALTY_SHORT:
# Overwrite in case of no_penalties # Overwrite in case of no_penalties
interval = _PENALTY_SHORT interval = _PENALTY_SHORT
logging.debug('Set planned server resume %s in %s mins', server_id, interval) logging.debug('Set planned server resume %s in %s mins', server.host, interval)
if server_id not in self._timers: if server.id not in self._timers:
self._timers[server_id] = [] self._timers[server.id] = []
stamp = time.time() + 60.0 * interval stamp = time.time() + 60.0 * interval
self._timers[server_id].append(stamp) self._timers[server.id].append(stamp)
if interval: if interval:
sabnzbd.scheduler.plan_server(self.trigger_server, [server_id, stamp], interval) sabnzbd.scheduler.plan_server(self.trigger_server, [server.id, stamp], interval)
@synchronized(TIMER_LOCK) @synchronized(TIMER_LOCK)
def trigger_server(self, server_id, timestamp): def trigger_server(self, server_id, timestamp):
""" Called by scheduler, start server if timer still valid """ """ Called by scheduler, start server if timer still valid """
logging.debug('Trigger planned server resume %s', server_id) logging.debug('Trigger planned server resume for server-id %s', server_id)
if server_id in self._timers: if server_id in self._timers:
if timestamp in self._timers[server_id]: if timestamp in self._timers[server_id]:
del self._timers[server_id] del self._timers[server_id]
@ -873,7 +873,7 @@ class Downloader(Thread):
# Activate server if it was inactive # Activate server if it was inactive
for server in self.servers: for server in self.servers:
if server.id == server_id and not server.active: if server.id == server_id and not server.active:
logging.debug('Unblock server %s', server_id) logging.debug('Unblock server %s', server.host)
self.init_server(server_id, server_id) self.init_server(server_id, server_id)
break break
@ -890,7 +890,7 @@ class Downloader(Thread):
kicked = [] kicked = []
for server_id in self._timers.keys(): for server_id in self._timers.keys():
if not [stamp for stamp in self._timers[server_id] if stamp >= now]: if not [stamp for stamp in self._timers[server_id] if stamp >= now]:
logging.debug('Forcing re-evaluation of server %s', server_id) logging.debug('Forcing re-evaluation of server-id %s', server_id)
del self._timers[server_id] del self._timers[server_id]
self.init_server(server_id, server_id) self.init_server(server_id, server_id)
kicked.append(server_id) kicked.append(server_id)
@ -898,7 +898,7 @@ class Downloader(Thread):
for server in self.servers: for server in self.servers:
if server.id not in self._timers: if server.id not in self._timers:
if server.id not in kicked and not server.active: if server.id not in kicked and not server.active:
logging.debug('Forcing activation of server %s', server.id) logging.debug('Forcing activation of server %s', server.host)
self.init_server(server.id, server.id) self.init_server(server.id, server.id)
def update_server(self, oldserver, newserver): def update_server(self, oldserver, newserver):

55
sabnzbd/interface.py

@ -44,7 +44,6 @@ from sabnzbd.misc import to_units, from_units, time_format, calc_age, \
cat_to_opts, int_conv, get_base_url, probablyipv4 cat_to_opts, int_conv, get_base_url, probablyipv4
from sabnzbd.filesystem import real_path, long_path, globber, globber_full, remove_all, clip_path, same_file from sabnzbd.filesystem import real_path, long_path, globber, globber_full, remove_all, clip_path, same_file
from sabnzbd.newswrapper import GetServerParms from sabnzbd.newswrapper import GetServerParms
from sabnzbd.rating import Rating
from sabnzbd.bpsmeter import BPSMeter from sabnzbd.bpsmeter import BPSMeter
from sabnzbd.encoding import TRANS, xml_name, LatinFilter, unicoder, special_fixer, \ from sabnzbd.encoding import TRANS, xml_name, LatinFilter, unicoder, special_fixer, \
platform_encode platform_encode
@ -59,7 +58,7 @@ from sabnzbd.decoder import SABYENC_ENABLED
from sabnzbd.utils.diskspeed import diskspeedmeasure from sabnzbd.utils.diskspeed import diskspeedmeasure
from sabnzbd.utils.getperformance import getpystone from sabnzbd.utils.getperformance import getpystone
from sabnzbd.constants import NORMAL_PRIORITY, MEBI, DEF_SKIN_COLORS, DEF_STDINTF, \ from sabnzbd.constants import NORMAL_PRIORITY, MEBI, DEF_SKIN_COLORS, \
DEF_STDCONFIG, DEF_MAIN_TMPL, DEFAULT_PRIORITY DEF_STDCONFIG, DEF_MAIN_TMPL, DEFAULT_PRIORITY
from sabnzbd.lang import list_languages from sabnzbd.lang import list_languages
@ -237,8 +236,7 @@ def check_login():
def get_users(): def get_users():
users = {} users = {cfg.username(): cfg.password()}
users[cfg.username()] = cfg.password()
return users return users
@ -501,7 +499,7 @@ class MainPage(object):
# No session key check, due to fixed URLs # No session key check, due to fixed URLs
name = kwargs.get('name') name = kwargs.get('name')
if name: if name:
history_db = sabnzbd.connect_db() history_db = sabnzbd.get_db_connection()
return ShowString(history_db.get_name(name), history_db.get_script_log(name)) return ShowString(history_db.get_name(name), history_db.get_script_log(name))
else: else:
raise Raiser(self.__root) raise Raiser(self.__root)
@ -775,7 +773,7 @@ class NzoPage(object):
# /SABnzbd_nzo_xxxxx/files # /SABnzbd_nzo_xxxxx/files
elif 'files' in args: elif 'files' in args:
info = self.nzo_files(info, pnfo_list, nzo_id) info = self.nzo_files(info, nzo_id)
# /SABnzbd_nzo_xxxxx/save # /SABnzbd_nzo_xxxxx/save
elif 'save' in args: elif 'save' in args:
@ -785,7 +783,7 @@ class NzoPage(object):
# /SABnzbd_nzo_xxxxx/ # /SABnzbd_nzo_xxxxx/
else: else:
info = self.nzo_details(info, pnfo_list, nzo_id) info = self.nzo_details(info, pnfo_list, nzo_id)
info = self.nzo_files(info, pnfo_list, nzo_id) info = self.nzo_files(info, nzo_id)
template = Template(file=os.path.join(sabnzbd.WEB_DIR, 'nzo.tmpl'), template = Template(file=os.path.join(sabnzbd.WEB_DIR, 'nzo.tmpl'),
filter=FILTER, searchList=[info], compilerSettings=DIRECTIVES) filter=FILTER, searchList=[info], compilerSettings=DIRECTIVES)
@ -837,7 +835,7 @@ class NzoPage(object):
return info return info
def nzo_files(self, info, pnfo_list, nzo_id): def nzo_files(self, info, nzo_id):
active = [] active = []
nzo = NzbQueue.do.get_nzo(nzo_id) nzo = NzbQueue.do.get_nzo(nzo_id)
if nzo: if nzo:
@ -1108,7 +1106,7 @@ class HistoryPage(object):
@secured_expose(check_session_key=True) @secured_expose(check_session_key=True)
def purge(self, **kwargs): def purge(self, **kwargs):
history_db = sabnzbd.connect_db() history_db = sabnzbd.get_db_connection()
history_db.remove_history() history_db.remove_history()
raise queueRaiser(self.__root, kwargs) raise queueRaiser(self.__root, kwargs)
@ -1135,7 +1133,7 @@ class HistoryPage(object):
@secured_expose(check_session_key=True) @secured_expose(check_session_key=True)
def purge_failed(self, **kwargs): def purge_failed(self, **kwargs):
del_files = bool(int_conv(kwargs.get('del_files'))) del_files = bool(int_conv(kwargs.get('del_files')))
history_db = sabnzbd.connect_db() history_db = sabnzbd.get_db_connection()
if del_files: if del_files:
del_job_files(history_db.get_failed_paths()) del_job_files(history_db.get_failed_paths())
history_db.remove_failed() history_db.remove_failed()
@ -1175,7 +1173,7 @@ class HistoryPage(object):
# No session key check, due to fixed URLs # No session key check, due to fixed URLs
name = kwargs.get('name') name = kwargs.get('name')
if name: if name:
history_db = sabnzbd.connect_db() history_db = sabnzbd.get_db_connection()
return ShowString(history_db.get_name(name), history_db.get_script_log(name)) return ShowString(history_db.get_name(name), history_db.get_script_log(name))
else: else:
raise Raiser(self.__root) raise Raiser(self.__root)
@ -1881,9 +1879,13 @@ class ConfigRss(object):
@secured_expose(check_session_key=True, check_configlock=True) @secured_expose(check_session_key=True, check_configlock=True)
def upd_rss_filter(self, **kwargs): def upd_rss_filter(self, **kwargs):
""" Wrapper, so we can call from api.py """
self.internal_upd_rss_filter(**kwargs)
def internal_upd_rss_filter(self, **kwargs):
""" Save updated filter definition """ """ Save updated filter definition """
try: try:
cfg = config.get_rss()[kwargs.get('feed')] feed_cfg = config.get_rss()[kwargs.get('feed')]
except KeyError: except KeyError:
raise rssRaiser(self.__root, kwargs) raise rssRaiser(self.__root, kwargs)
@ -1897,14 +1899,14 @@ class ConfigRss(object):
enabled = kwargs.get('enabled', '0') enabled = kwargs.get('enabled', '0')
if filt: if filt:
cfg.filters.update(int(kwargs.get('index', 0)), (cat, pp, script, kwargs.get('filter_type'), feed_cfg.filters.update(int(kwargs.get('index', 0)), (cat, pp, script, kwargs.get('filter_type'),
platform_encode(filt), prio, enabled)) platform_encode(filt), prio, enabled))
# Move filter if requested # Move filter if requested
index = int_conv(kwargs.get('index', '')) index = int_conv(kwargs.get('index', ''))
new_index = kwargs.get('new_index', '') new_index = kwargs.get('new_index', '')
if new_index and int_conv(new_index) != index: if new_index and int_conv(new_index) != index:
cfg.filters.move(int(index), int_conv(new_index)) feed_cfg.filters.move(int(index), int_conv(new_index))
config.save_config() config.save_config()
self.__evaluate = False self.__evaluate = False
@ -1922,13 +1924,17 @@ class ConfigRss(object):
@secured_expose(check_session_key=True, check_configlock=True) @secured_expose(check_session_key=True, check_configlock=True)
def del_rss_filter(self, **kwargs): def del_rss_filter(self, **kwargs):
""" Wrapper, so we can call from api.py """
self.internal_del_rss_filter(**kwargs)
def internal_del_rss_filter(self, **kwargs):
""" Remove one RSS filter """ """ Remove one RSS filter """
try: try:
cfg = config.get_rss()[kwargs.get('feed')] feed_cfg = config.get_rss()[kwargs.get('feed')]
except KeyError: except KeyError:
raise rssRaiser(self.__root, kwargs) raise rssRaiser(self.__root, kwargs)
cfg.filters.delete(int(kwargs.get('index', 0))) feed_cfg.filters.delete(int(kwargs.get('index', 0)))
config.save_config() config.save_config()
self.__evaluate = False self.__evaluate = False
self.__show_eval_button = True self.__show_eval_button = True
@ -2043,15 +2049,8 @@ class ConfigScheduling(object):
@secured_expose(check_configlock=True) @secured_expose(check_configlock=True)
def index(self, **kwargs): def index(self, **kwargs):
def get_days(): def get_days():
days = {} days = {"*": T('Daily'), "1": T('Monday'), "2": T('Tuesday'), "3": T('Wednesday'), "4": T('Thursday'),
days["*"] = T('Daily') "5": T('Friday'), "6": T('Saturday'), "7": T('Sunday')}
days["1"] = T('Monday')
days["2"] = T('Tuesday')
days["3"] = T('Wednesday')
days["4"] = T('Thursday')
days["5"] = T('Friday')
days["6"] = T('Saturday')
days["7"] = T('Sunday')
return days return days
conf = build_header(sabnzbd.WEB_DIR_CONFIG) conf = build_header(sabnzbd.WEB_DIR_CONFIG)
@ -2080,7 +2079,7 @@ class ConfigScheduling(object):
if '%' not in value and from_units(value) < 1.0: if '%' not in value and from_units(value) < 1.0:
value = T('off') # : "Off" value for speedlimit in scheduler value = T('off') # : "Off" value for speedlimit in scheduler
else: else:
if '%' not in value and int_conv(value) > 1 and int_conv(value) < 101: if '%' not in value and 1 < int_conv(value) < 101:
value += '%' value += '%'
value = value.upper() value = value.upper()
if action in actions: if action in actions:
@ -2135,7 +2134,6 @@ class ConfigScheduling(object):
@secured_expose(check_session_key=True, check_configlock=True) @secured_expose(check_session_key=True, check_configlock=True)
def addSchedule(self, **kwargs): def addSchedule(self, **kwargs):
servers = config.get_servers() servers = config.get_servers()
categories = list_cats(False)
minute = kwargs.get('minute') minute = kwargs.get('minute')
hour = kwargs.get('hour') hour = kwargs.get('hour')
days_of_week = ''.join([str(x) for x in kwargs.get('daysofweek', '')]) days_of_week = ''.join([str(x) for x in kwargs.get('daysofweek', '')])
@ -2534,6 +2532,7 @@ def GetRssLog(feed):
# These fields could be empty # These fields could be empty
job['cat'] = job.get('cat', '') job['cat'] = job.get('cat', '')
job['size'] = job.get('size', '') job['size'] = job.get('size', '')
job['infourl'] = job.get('infourl', '')
# Auto-fetched jobs didn't have these fields set # Auto-fetched jobs didn't have these fields set
if job.get('url'): if job.get('url'):
@ -2769,7 +2768,7 @@ def rss_history(url, limit=50, search=None):
stageLine.append("<tr><dt>Stage %s</dt>" % stage['name']) stageLine.append("<tr><dt>Stage %s</dt>" % stage['name'])
actions = [] actions = []
for action in stage['actions']: for action in stage['actions']:
actions.append("<dd>%s</dd>" % (action)) actions.append("<dd>%s</dd>" % action)
actions.sort() actions.sort()
actions.reverse() actions.reverse()
for act in actions: for act in actions:

11
sabnzbd/misc.py

@ -41,7 +41,7 @@ from sabnzbd.constants import DEFAULT_PRIORITY, FUTURE_Q_FOLDER, JOB_ADMIN, \
GIGI, MEBI, DEF_ARTICLE_CACHE_DEFAULT, DEF_ARTICLE_CACHE_MAX GIGI, MEBI, DEF_ARTICLE_CACHE_DEFAULT, DEF_ARTICLE_CACHE_MAX
import sabnzbd.config as config import sabnzbd.config as config
import sabnzbd.cfg as cfg import sabnzbd.cfg as cfg
from sabnzbd.encoding import ubtou, unicoder, special_fixer, gUTF import sabnzbd.utils.rarfile as rarfile
TAB_UNITS = ('', 'K', 'M', 'G', 'T', 'P') TAB_UNITS = ('', 'K', 'M', 'G', 'T', 'P')
RE_UNITS = re.compile(r'(\d+\.*\d*)\s*([KMGTP]{0,1})', re.I) RE_UNITS = re.compile(r'(\d+\.*\d*)\s*([KMGTP]{0,1})', re.I)
@ -426,7 +426,6 @@ def to_units(val, spaces=0, postfix=''):
Show single decimal for M and higher Show single decimal for M and higher
""" """
dec_limit = 1 dec_limit = 1
decimals = 0
if val < 0: if val < 0:
sign = '-' sign = '-'
else: else:
@ -503,7 +502,7 @@ def split_host(srv):
port = int(port) port = int(port)
except: except:
port = None port = None
return (host, port) return host, port
def get_cache_limit(): def get_cache_limit():
@ -573,8 +572,8 @@ def memory_usage():
except: except:
logging.debug('Error retrieving memory usage') logging.debug('Error retrieving memory usage')
logging.info("Traceback: ", exc_info=True) logging.info("Traceback: ", exc_info=True)
else:
return ''
try: try:
_PAGE_SIZE = os.sysconf("SC_PAGE_SIZE") _PAGE_SIZE = os.sysconf("SC_PAGE_SIZE")
except: except:
@ -643,7 +642,7 @@ def create_https_certificates(ssl_cert, ssl_key):
try: try:
from sabnzbd.utils.certgen import generate_key, generate_local_cert from sabnzbd.utils.certgen import generate_key, generate_local_cert
private_key = generate_key(key_size=2048, output_file=ssl_key) private_key = generate_key(key_size=2048, output_file=ssl_key)
generate_local_cert(private_key, days_valid=3560, output_file=ssl_cert, LN='SABnzbd', ON='SABnzbd', CN='localhost') generate_local_cert(private_key, days_valid=3560, output_file=ssl_cert, LN=u'SABnzbd', ON=u'SABnzbd')
logging.info('Self-signed certificates generated successfully') logging.info('Self-signed certificates generated successfully')
except: except:
logging.error(T('Error creating SSL key and certificate')) logging.error(T('Error creating SSL key and certificate'))

109
sabnzbd/newsunpack.py

@ -159,14 +159,7 @@ def external_processing(extern_proc, nzo, complete_dir, nicename, status):
'download_time': nzo.nzo_info.get('download_time', ''), 'download_time': nzo.nzo_info.get('download_time', ''),
'avg_bps': int(nzo.avg_bps_total / nzo.avg_bps_freq) if nzo.avg_bps_freq else 0, 'avg_bps': int(nzo.avg_bps_total / nzo.avg_bps_freq) if nzo.avg_bps_freq else 0,
'age': calc_age(nzo.avg_date), 'age': calc_age(nzo.avg_date),
'orig_nzb_gz': clip_path(nzb_paths[0]) if nzb_paths else '', 'orig_nzb_gz': clip_path(nzb_paths[0]) if nzb_paths else ''}
'program_dir': sabnzbd.DIR_PROG,
'par2_command': sabnzbd.newsunpack.PAR2_COMMAND,
'multipar_command': sabnzbd.newsunpack.MULTIPAR_COMMAND,
'rar_command': sabnzbd.newsunpack.RAR_COMMAND,
'zip_command': sabnzbd.newsunpack.ZIP_COMMAND,
'7zip_command': sabnzbd.newsunpack.SEVEN_COMMAND,
'version': sabnzbd.__version__}
try: try:
stup, need_shell, command, creationflags = build_command(command) stup, need_shell, command, creationflags = build_command(command)
@ -182,7 +175,7 @@ def external_processing(extern_proc, nzo, complete_dir, nicename, status):
proc = p.stdout proc = p.stdout
if p.stdin: if p.stdin:
p.stdin.close() p.stdin.close()
line = ''
lines = [] lines = []
while 1: while 1:
line = proc.readline() line = proc.readline()
@ -243,11 +236,10 @@ def unpack_magic(nzo, workdir, workdir_complete, dele, one_folder, joinables, zi
else: else:
xjoinables, xzips, xrars, xsevens, xts = build_filelists(workdir, workdir_complete, check_both=dele) xjoinables, xzips, xrars, xsevens, xts = build_filelists(workdir, workdir_complete, check_both=dele)
rerun = False
force_rerun = False force_rerun = False
newfiles = [] newfiles = []
error = None error = None
new_joins = new_rars = new_zips = new_ts = None new_joins = new_ts = None
if cfg.enable_filejoin(): if cfg.enable_filejoin():
new_joins = [jn for jn in xjoinables if jn not in joinables] new_joins = [jn for jn in xjoinables if jn not in joinables]
@ -443,16 +435,17 @@ def file_join(nzo, workdir, workdir_complete, delete, joinables):
if seq_error: if seq_error:
msg = T('Incomplete sequence of joinable files') msg = T('Incomplete sequence of joinable files')
nzo.fail_msg = T('File join of %s failed') % unicoder(joinable_set) nzo.fail_msg = T('File join of %s failed') % unicoder(os.path.basename(joinable_set))
nzo.set_unpack_info('Filejoin', T('[%s] Error "%s" while joining files') % (unicoder(joinable_set), msg)) nzo.set_unpack_info('Filejoin', T('[%s] Error "%s" while joining files') % (unicoder(os.path.basename(joinable_set)), msg))
logging.error(T('Error "%s" while running file_join on %s'), msg, nzo.final_name) logging.error(T('Error "%s" while running file_join on %s'), msg, nzo.final_name)
return True, []
else: else:
msg = T('[%s] Joined %s files') % (unicoder(joinable_set), size) msg = T('[%s] Joined %s files') % (unicoder(joinable_set), size)
nzo.set_unpack_info('Filejoin', msg) nzo.set_unpack_info('Filejoin', msg)
except: except:
msg = sys.exc_info()[1] msg = sys.exc_info()[1]
nzo.fail_msg = T('File join of %s failed') % msg nzo.fail_msg = T('File join of %s failed') % msg
nzo.set_unpack_info('Filejoin', T('[%s] Error "%s" while joining files') % (unicoder(joinable_set), msg)) nzo.set_unpack_info('Filejoin', T('[%s] Error "%s" while joining files') % (unicoder(os.path.basename(joinable_set)), msg))
logging.error(T('Error "%s" while running file_join on %s'), msg, nzo.final_name) logging.error(T('Error "%s" while running file_join on %s'), msg, nzo.final_name)
return True, [] return True, []
@ -467,9 +460,7 @@ def rar_unpack(nzo, workdir, workdir_complete, delete, one_folder, rars):
When 'delete' is set, originals will be deleted. When 'delete' is set, originals will be deleted.
When 'one_folder' is set, all files will be in a single folder When 'one_folder' is set, all files will be in a single folder
""" """
extracted_files = [] newfiles = extracted_files = []
success = False
rar_sets = {} rar_sets = {}
for rar in rars: for rar in rars:
rar_set = os.path.splitext(os.path.basename(rar))[0] rar_set = os.path.splitext(os.path.basename(rar))[0]
@ -510,6 +501,8 @@ def rar_unpack(nzo, workdir, workdir_complete, delete, one_folder, rars):
if wait_count > 60: if wait_count > 60:
# We abort after 2 minutes of no changes # We abort after 2 minutes of no changes
nzo.direct_unpacker.abort() nzo.direct_unpacker.abort()
else:
wait_count = 0
last_stats = nzo.direct_unpacker.get_formatted_stats() last_stats = nzo.direct_unpacker.get_formatted_stats()
# Did we already direct-unpack it? Not when recursive-unpacking # Did we already direct-unpack it? Not when recursive-unpacking
@ -656,7 +649,7 @@ def rar_extract_core(rarfile_path, numrars, one_folder, nzo, setname, extraction
stup, need_shell, command, creationflags = build_command(command, flatten_command=True) stup, need_shell, command, creationflags = build_command(command, flatten_command=True)
# Get list of all the volumes part of this set # Get list of all the volumes part of this set
logging.debug("Analyzing rar file ... %s found", rarfile.is_rarfile(rarfile_path)) logging.debug("Analyzing rar file ... %s found", is_rarfile(rarfile_path))
logging.debug("Running unrar %s", command) logging.debug("Running unrar %s", command)
p = Popen(command, shell=need_shell, stdin=subprocess.PIPE, p = Popen(command, shell=need_shell, stdin=subprocess.PIPE,
stdout=subprocess.PIPE, stderr=subprocess.STDOUT, stdout=subprocess.PIPE, stderr=subprocess.STDOUT,
@ -994,7 +987,9 @@ def seven_extract(nzo, sevenset, extensions, extraction_path, one_folder, delete
nzo.fail_msg = '' nzo.fail_msg = ''
if fail == 2: if fail == 2:
msg = '%s (%s)' % (T('Unpacking failed, archive requires a password'), os.path.basename(sevenset)) msg = '%s (%s)' % (T('Unpacking failed, archive requires a password'), os.path.basename(sevenset))
if fail > 0:
nzo.fail_msg = msg nzo.fail_msg = msg
nzo.status = Status.FAILED
logging.error(msg) logging.error(msg)
return fail, new_files, msg return fail, new_files, msg
@ -1028,7 +1023,7 @@ def seven_extract_core(sevenset, extensions, extraction_path, one_folder, delete
parm = '-tzip' if sevenset.lower().endswith('.zip') else '-t7z' parm = '-tzip' if sevenset.lower().endswith('.zip') else '-t7z'
if not os.path.exists(name): if not os.path.exists(name):
return 1, T('7ZIP set "%s" is incomplete, cannot unpack') % unicoder(sevenset) return 1, T('7ZIP set "%s" is incomplete, cannot unpack') % os.path.basename(sevenset)
# For file-bookkeeping # For file-bookkeeping
orig_dir_content = recursive_listdir(extraction_path) orig_dir_content = recursive_listdir(extraction_path)
@ -1047,6 +1042,15 @@ def seven_extract_core(sevenset, extensions, extraction_path, one_folder, delete
ret = p.wait() ret = p.wait()
# Return-code for CRC and Password is the same
if ret == 2 and 'ERROR: CRC Failed' in output:
# We can output a more general error
ret = 1
msg = T('ERROR: CRC failed in "%s"') % os.path.basename(sevenset)
else:
# Default message
msg = T('Could not unpack %s') % os.path.basename(sevenset)
# What's new? # What's new?
new_files = list(set(orig_dir_content + recursive_listdir(extraction_path))) new_files = list(set(orig_dir_content + recursive_listdir(extraction_path)))
@ -1065,7 +1069,7 @@ def seven_extract_core(sevenset, extensions, extraction_path, one_folder, delete
logging.warning(T('Deleting %s failed!'), sevenset) logging.warning(T('Deleting %s failed!'), sevenset)
# Always return an error message, even when return code is 0 # Always return an error message, even when return code is 0
return ret, new_files, T('Could not unpack %s') % unicoder(sevenset) return ret, new_files, msg
############################################################################## ##############################################################################
@ -1127,9 +1131,9 @@ def par2_repair(parfile_nzf, nzo, workdir, setname, single):
# Multipar or not? # Multipar or not?
if sabnzbd.WIN32 and cfg.multipar(): if sabnzbd.WIN32 and cfg.multipar():
finished, readd, datafiles, used_joinables, used_for_repair = MultiPar_Verify(parfile, parfile_nzf, nzo, setname, joinables, single=single) finished, readd, datafiles, used_joinables, used_for_repair = MultiPar_Verify(parfile, nzo, setname, joinables, single=single)
else: else:
finished, readd, datafiles, used_joinables, used_for_repair = PAR_Verify(parfile, parfile_nzf, nzo, setname, joinables, single=single) finished, readd, datafiles, used_joinables, used_for_repair = PAR_Verify(parfile, nzo, setname, joinables, single=single)
if finished: if finished:
result = True result = True
@ -1196,7 +1200,7 @@ _RE_LOADING_PAR2 = re.compile(r'Loading "([^"]+)"\.')
_RE_LOADED_PAR2 = re.compile(r'Loaded (\d+) new packets') _RE_LOADED_PAR2 = re.compile(r'Loaded (\d+) new packets')
def PAR_Verify(parfile, parfile_nzf, nzo, setname, joinables, single=False): def PAR_Verify(parfile, nzo, setname, joinables, single=False):
""" Run par2 on par-set """ """ Run par2 on par-set """
used_joinables = [] used_joinables = []
used_for_repair = [] used_for_repair = []
@ -1337,7 +1341,7 @@ def PAR_Verify(parfile, parfile_nzf, nzo, setname, joinables, single=False):
block_table = {} block_table = {}
for nzf in nzo.extrapars[setname]: for nzf in nzo.extrapars[setname]:
if not nzf.completed: if not nzf.completed:
block_table[int_conv(nzf.blocks)] = nzf block_table[nzf.blocks] = nzf
if block_table: if block_table:
nzf = block_table[min(block_table.keys())] nzf = block_table[min(block_table.keys())]
@ -1374,7 +1378,7 @@ def PAR_Verify(parfile, parfile_nzf, nzo, setname, joinables, single=False):
elif line.startswith('Repair is possible'): elif line.startswith('Repair is possible'):
start = time.time() start = time.time()
nzo.set_action_line(T('Repairing'), '%2d%%' % (0)) nzo.set_action_line(T('Repairing'), '%2d%%' % 0)
elif line.startswith('Repairing:'): elif line.startswith('Repairing:'):
chunks = line.split() chunks = line.split()
@ -1533,7 +1537,7 @@ def PAR_Verify(parfile, parfile_nzf, nzo, setname, joinables, single=False):
_RE_FILENAME = re.compile(r'"([^"]+)"') _RE_FILENAME = re.compile(r'"([^"]+)"')
def MultiPar_Verify(parfile, parfile_nzf, nzo, setname, joinables, single=False): def MultiPar_Verify(parfile, nzo, setname, joinables, single=False):
""" Run par2 on par-set """ """ Run par2 on par-set """
parfolder = os.path.split(parfile)[0] parfolder = os.path.split(parfile)[0]
used_joinables = [] used_joinables = []
@ -1650,7 +1654,7 @@ def MultiPar_Verify(parfile, parfile_nzf, nzo, setname, joinables, single=False)
block_table = {} block_table = {}
for nzf in nzo.extrapars[setname]: for nzf in nzo.extrapars[setname]:
if not nzf.completed: if not nzf.completed:
block_table[int_conv(nzf.blocks)] = nzf block_table[nzf.blocks] = nzf
if block_table: if block_table:
nzf = block_table[min(block_table.keys())] nzf = block_table[min(block_table.keys())]
@ -1841,13 +1845,17 @@ def MultiPar_Verify(parfile, parfile_nzf, nzo, setname, joinables, single=False)
# Set message for user in case of joining # Set message for user in case of joining
if line.startswith('Ready to rejoin'): if line.startswith('Ready to rejoin'):
nzo.set_action_line(T('Joining'), '%2d' % len(used_joinables)) nzo.set_action_line(T('Joining'), '%2d' % len(used_joinables))
else:
# If we are repairing a joinable set, it won't actually
# do the joining. So we can't remove those files!
used_joinables = []
# ----------------- Repair stage # ----------------- Repair stage
elif 'Recovering slice' in line: elif 'Recovering slice' in line:
# Before this it will calculate matrix, here is where it starts # Before this it will calculate matrix, here is where it starts
start = time.time() start = time.time()
in_repair = True in_repair = True
nzo.set_action_line(T('Repairing'), '%2d%%' % (0)) nzo.set_action_line(T('Repairing'), '%2d%%' % 0)
elif in_repair and line.startswith('Verifying repair'): elif in_repair and line.startswith('Verifying repair'):
in_repair = False in_repair = False
@ -1921,7 +1929,7 @@ def MultiPar_Verify(parfile, parfile_nzf, nzo, setname, joinables, single=False)
return finished, readd, datafiles, used_joinables, used_for_repair return finished, readd, datafiles, used_joinables, used_for_repair
def create_env(nzo=None, extra_env_fields=None): def create_env(nzo=None, extra_env_fields={}):
""" Modify the environment for pp-scripts with extra information """ Modify the environment for pp-scripts with extra information
OSX: Return copy of environment without PYTHONPATH and PYTHONHOME OSX: Return copy of environment without PYTHONPATH and PYTHONHOME
other: return None other: return None
@ -1945,6 +1953,15 @@ def create_env(nzo=None, extra_env_fields=None):
# Catch key/unicode errors # Catch key/unicode errors
pass pass
# Always supply basic info
extra_env_fields.update({'program_dir': sabnzbd.DIR_PROG,
'par2_command': sabnzbd.newsunpack.PAR2_COMMAND,
'multipar_command': sabnzbd.newsunpack.MULTIPAR_COMMAND,
'rar_command': sabnzbd.newsunpack.RAR_COMMAND,
'zip_command': sabnzbd.newsunpack.ZIP_COMMAND,
'7zip_command': sabnzbd.newsunpack.SEVEN_COMMAND,
'version': sabnzbd.__version__})
# Add extra fields # Add extra fields
for field in extra_env_fields: for field in extra_env_fields:
try: try:
@ -2099,11 +2116,7 @@ def build_filelists(workdir, workdir_complete=None, check_both=False, check_rar=
# Extra check for rar (takes CPU/disk) # Extra check for rar (takes CPU/disk)
file_is_rar = False file_is_rar = False
if check_rar: if check_rar:
try: file_is_rar = is_rarfile(file)
# Can fail on Windows due to long-path after recursive-unpack
file_is_rar = rarfile.is_rarfile(file)
except:
pass
# Run through all the checks # Run through all the checks
if SEVENZIP_RE.search(file) or SEVENMULTI_RE.search(file): if SEVENZIP_RE.search(file) or SEVENMULTI_RE.search(file):
@ -2295,23 +2308,33 @@ def analyse_show(name):
info.get('ep_name', '') info.get('ep_name', '')
def pre_queue(name, pp, cat, script, priority, size, groups): def pre_queue(nzo, pp, cat):
""" Run pre-queue script (if any) and process results """ """ Run pre-queue script (if any) and process results.
pp and cat are supplied seperate since they can change.
"""
def fix(p): def fix(p):
if not p or str(p).lower() == 'none': if not p or str(p).lower() == 'none':
return '' return ''
return unicoder(p) return unicoder(p)
values = [1, name, pp, cat, script, priority, None] values = [1, nzo.final_name_pw_clean, pp, cat, nzo.script, nzo.priority, None]
script_path = make_script_path(cfg.pre_script()) script_path = make_script_path(cfg.pre_script())
if script_path: if script_path:
command = [script_path, name, pp, cat, script, priority, str(size), ' '.join(groups)] # Basic command-line parameters
command.extend(analyse_show(name)) command = [script_path, nzo.final_name_pw_clean, pp, cat, nzo.script, nzo.priority, str(nzo.bytes), ' '.join(nzo.groups)]
command.extend(analyse_show(nzo.final_name_pw_clean))
command = [fix(arg) for arg in command] command = [fix(arg) for arg in command]
# Fields not in the NZO directly
extra_env_fields = {'groups': ' '.join(nzo.groups),
'show_name': command[8],
'show_season': command[9],
'show_episode': command[10],
'show_episode_name': command[11]}
try: try:
stup, need_shell, command, creationflags = build_command(command) stup, need_shell, command, creationflags = build_command(command)
env = create_env() env = create_env(nzo, extra_env_fields)
logging.info('Running pre-queue script %s', command) logging.info('Running pre-queue script %s', command)
p = Popen(command, shell=need_shell, stdin=subprocess.PIPE, p = Popen(command, shell=need_shell, stdin=subprocess.PIPE,
stdout=subprocess.PIPE, stderr=subprocess.STDOUT, stdout=subprocess.PIPE, stderr=subprocess.STDOUT,
@ -2332,11 +2355,11 @@ def pre_queue(name, pp, cat, script, priority, size, groups):
n += 1 n += 1
accept = int_conv(values[0]) accept = int_conv(values[0])
if accept < 1: if accept < 1:
logging.info('Pre-Q refuses %s', name) logging.info('Pre-Q refuses %s', nzo.final_name_pw_clean)
elif accept == 2: elif accept == 2:
logging.info('Pre-Q accepts&fails %s', name) logging.info('Pre-Q accepts&fails %s', nzo.final_name_pw_clean)
else: else:
logging.info('Pre-Q accepts %s', name) logging.info('Pre-Q accepts %s', nzo.final_name_pw_clean)
return values return values

14
sabnzbd/newswrapper.py

@ -25,7 +25,6 @@ from threading import Thread
from nntplib import NNTPPermanentError from nntplib import NNTPPermanentError
import time import time
import logging import logging
import re
import ssl import ssl
import sabnzbd import sabnzbd
@ -135,7 +134,7 @@ class NNTP(object):
# Pre-define attributes to save memory # Pre-define attributes to save memory
__slots__ = ('host', 'port', 'nw', 'blocking', 'error_msg', 'sock') __slots__ = ('host', 'port', 'nw', 'blocking', 'error_msg', 'sock')
def __init__(self, host, port, info, sslenabled, send_group, nw, user=None, password=None, block=False, write_fds=None): def __init__(self, host, port, info, sslenabled, nw, block=False, write_fds=None):
self.host = host self.host = host
self.port = port self.port = port
self.nw = nw self.nw = nw
@ -160,14 +159,14 @@ class NNTP(object):
ctx = ssl.create_default_context() ctx = ssl.create_default_context()
# Only verify hostname when we're strict # Only verify hostname when we're strict
if(nw.server.ssl_verify < 2): if nw.server.ssl_verify < 2:
ctx.check_hostname = False ctx.check_hostname = False
# Certificates optional # Certificates optional
if(nw.server.ssl_verify == 0): if nw.server.ssl_verify == 0:
ctx.verify_mode = ssl.CERT_NONE ctx.verify_mode = ssl.CERT_NONE
# Did the user set a custom cipher-string? # Did the user set a custom cipher-string?
if(nw.server.ssl_ciphers): if nw.server.ssl_ciphers:
# At their own risk, socket will error out in case it was invalid # At their own risk, socket will error out in case it was invalid
ctx.set_ciphers(nw.server.ssl_ciphers) ctx.set_ciphers(nw.server.ssl_ciphers)
@ -295,8 +294,7 @@ class NewsWrapper(object):
# Construct NNTP object and shorthands # Construct NNTP object and shorthands
self.nntp = NNTP(self.server.hostip, self.server.port, self.server.info, self.server.ssl, self.nntp = NNTP(self.server.hostip, self.server.port, self.server.info, self.server.ssl,
self.server.send_group, self, self.server.username, self.server.password, self, self.blocking, write_fds)
self.blocking, write_fds)
self.recv = self.nntp.sock.recv self.recv = self.nntp.sock.recv
self.timeout = time.time() + self.server.timeout self.timeout = time.time() + self.server.timeout
@ -395,7 +393,7 @@ class NewsWrapper(object):
# time.sleep(0.0001) # time.sleep(0.0001)
continue continue
else: else:
return (0, False, True) return 0, False, True
# Append so we can do 1 join(), much faster than multiple! # Append so we can do 1 join(), much faster than multiple!
self.data.append(chunk) self.data.append(chunk)

5
sabnzbd/notifier.py

@ -26,7 +26,6 @@ import logging
import socket import socket
import urllib.request, urllib.error, urllib.parse import urllib.request, urllib.error, urllib.parse
import http.client import http.client
import urllib.request, urllib.parse, urllib.error
import time import time
import subprocess import subprocess
import json import json
@ -145,7 +144,7 @@ def check_cat(section, job_cat, keyword=None):
if not keyword: if not keyword:
keyword = section keyword = section
section_cats = sabnzbd.config.get_config(section, '%s_cats' % keyword)() section_cats = sabnzbd.config.get_config(section, '%s_cats' % keyword)()
return (['*'] == section_cats or job_cat in section_cats) return ['*'] == section_cats or job_cat in section_cats
except TypeError: except TypeError:
logging.debug('Incorrect Notify option %s:%s_cats', section, section) logging.debug('Incorrect Notify option %s:%s_cats', section, section)
return True return True
@ -463,7 +462,7 @@ def send_pushover(title, msg, gtype, force=False, test=None):
"expire": emergency_expire "expire": emergency_expire
} }
return do_send_pushover(body) return do_send_pushover(body)
if prio > -3 and prio < 2: if -3 < prio < 2:
body = { "token": apikey, body = { "token": apikey,
"user": userkey, "user": userkey,
"device": device, "device": device,

25
sabnzbd/nzbqueue.py

@ -148,22 +148,6 @@ class NzbQueue(object):
logging.info('Skipping repair for job %s', folder) logging.info('Skipping repair for job %s', folder)
return result return result
def retry_all_jobs(self, history_db):
""" Retry all retryable jobs in History """
result = []
# Retryable folders from History
items = sabnzbd.api.build_history()[0]
registered = [(platform_encode(os.path.basename(item['path'])),
item['nzo_id'])
for item in items if item['retry']]
for job in registered:
logging.info('Repairing job %s', job[0])
result.append(self.repair_job(job[0]))
history_db.remove_history(job[1])
return bool(result)
def repair_job(self, folder, new_nzb=None, password=None): def repair_job(self, folder, new_nzb=None, password=None):
""" Reconstruct admin for a single job folder, optionally with new NZB """ """ Reconstruct admin for a single job folder, optionally with new NZB """
def all_verified(path): def all_verified(path):
@ -171,7 +155,6 @@ class NzbQueue(object):
verified = sabnzbd.load_data(VERIFIED_FILE, path, remove=False) or {'x': False} verified = sabnzbd.load_data(VERIFIED_FILE, path, remove=False) or {'x': False}
return all(verified[x] for x in verified) return all(verified[x] for x in verified)
nzo_id = None
name = os.path.basename(folder) name = os.path.basename(folder)
path = os.path.join(folder, JOB_ADMIN) path = os.path.join(folder, JOB_ADMIN)
if hasattr(new_nzb, 'filename'): if hasattr(new_nzb, 'filename'):
@ -508,10 +491,10 @@ class NzbQueue(object):
nzo2 = self.__nzo_table[item_id_2] nzo2 = self.__nzo_table[item_id_2]
except KeyError: except KeyError:
# One or both jobs missing # One or both jobs missing
return (-1, 0) return -1, 0
if nzo1 == nzo2: if nzo1 == nzo2:
return (-1, 0) return -1, 0
# get the priorities of the two items # get the priorities of the two items
nzo1_priority = nzo1.priority nzo1_priority = nzo1.priority
@ -540,9 +523,9 @@ class NzbQueue(object):
logging.info('Switching job [%s] %s => [%s] %s', item_id_pos1, item.final_name, item_id_pos2, self.__nzo_list[item_id_pos2].final_name) logging.info('Switching job [%s] %s => [%s] %s', item_id_pos1, item.final_name, item_id_pos2, self.__nzo_list[item_id_pos2].final_name)
del self.__nzo_list[item_id_pos1] del self.__nzo_list[item_id_pos1]
self.__nzo_list.insert(item_id_pos2, item) self.__nzo_list.insert(item_id_pos2, item)
return (item_id_pos2, nzo1.priority) return item_id_pos2, nzo1.priority
# If moving failed/no movement took place # If moving failed/no movement took place
return (-1, nzo1.priority) return -1, nzo1.priority
@NzbQueueLocker @NzbQueueLocker
def move_up_bulk(self, nzo_id, nzf_ids, size): def move_up_bulk(self, nzo_id, nzf_ids, size):

55
sabnzbd/nzbstuff.py

@ -163,7 +163,7 @@ class Article(TryList):
# if (server_check.priority() < found_priority and server_check.priority() < server.priority and not self.server_in_try_list(server_check)): # if (server_check.priority() < found_priority and server_check.priority() < server.priority and not self.server_in_try_list(server_check)):
if server_check.active and (server_check.priority < found_priority): if server_check.active and (server_check.priority < found_priority):
if server_check.priority < server.priority: if server_check.priority < server.priority:
if (not self.server_in_try_list(server_check)): if not self.server_in_try_list(server_check):
if log: if log:
logging.debug('Article %s | Server: %s | setting found priority to %s', self.article, server.host, server_check.priority) logging.debug('Article %s | Server: %s | setting found priority to %s', self.article, server.host, server_check.priority)
found_priority = server_check.priority found_priority = server_check.priority
@ -313,14 +313,14 @@ class NzbFile(TryList):
if found: if found:
self.bytes_left -= article.bytes self.bytes_left -= article.bytes
return (not self.articles) return not self.articles
def set_par2(self, setname, vol, blocks): def set_par2(self, setname, vol, blocks):
""" Designate this this file as a par2 file """ """ Designate this this file as a par2 file """
self.is_par2 = True self.is_par2 = True
self.setname = setname self.setname = setname
self.vol = vol self.vol = vol
self.blocks = int(blocks) self.blocks = int_conv(blocks)
def get_article(self, server, servers): def get_article(self, server, servers):
""" Get next article to be downloaded """ """ Get next article to be downloaded """
@ -619,9 +619,9 @@ class NzbObject(TryList):
# Run user pre-queue script if needed # Run user pre-queue script if needed
if not reuse and cfg.pre_script(): if not reuse and cfg.pre_script():
accept, name, pp, cat_pp, script_pp, priority, group = \ # Call the script
sabnzbd.newsunpack.pre_queue(self.final_name_pw_clean, pp, cat, script, accept, name, pp, cat_pp, script_pp, priority, group = sabnzbd.newsunpack.pre_queue(self, pp, cat)
priority, self.bytes, self.groups)
# Accept or reject # Accept or reject
accept = int_conv(accept) accept = int_conv(accept)
if accept < 1: if accept < 1:
@ -816,7 +816,7 @@ class NzbObject(TryList):
# Sort the sets # Sort the sets
for setname in self.extrapars: for setname in self.extrapars:
self.extrapars[parset].sort(key=lambda x: x.blocks) self.extrapars[setname].sort(key=lambda x: x.blocks)
# Also re-parse all filenames in case par2 came after first articles # Also re-parse all filenames in case par2 came after first articles
self.verify_all_filenames_and_resort() self.verify_all_filenames_and_resort()
@ -892,38 +892,37 @@ class NzbObject(TryList):
def get_extra_blocks(self, setname, needed_blocks): def get_extra_blocks(self, setname, needed_blocks):
""" We want par2-files of all sets that are similar to this one """ We want par2-files of all sets that are similar to this one
So that we also can handle multi-sets with duplicate filenames So that we also can handle multi-sets with duplicate filenames
Block-table has as keys the nr-blocks
Returns number of added blocks in case they are available Returns number of added blocks in case they are available
In case of duplicate files for the same set, we might add too
little par2 on the first add-run, but that's a risk we need to take.
""" """
logging.info('Need %s more blocks, checking blocks', needed_blocks) logging.info('Need %s more blocks, checking blocks', needed_blocks)
avail_blocks = 0 avail_blocks = 0
block_table = {} block_list = []
for setname_search in self.extrapars: for setname_search in self.extrapars:
# Do it for our set, or highlight matching one # Do it for our set, or highlight matching one
# We might catch to many par2's, but that's okay # We might catch too many par2's, but that's okay
if setname_search == setname or difflib.SequenceMatcher(None, setname, setname_search).ratio() > 0.85: if setname_search == setname or difflib.SequenceMatcher(None, setname, setname_search).ratio() > 0.85:
for nzf in self.extrapars[setname_search]: for nzf in self.extrapars[setname_search]:
# Don't count extrapars that are completed already # Don't count extrapars that are completed already
if nzf.completed: if nzf.completed:
continue continue
blocks = int_conv(nzf.blocks) block_list.append(nzf)
if blocks not in block_table: avail_blocks += nzf.blocks
block_table[blocks] = []
# We assume same block-vol-naming for each set
avail_blocks += blocks
block_table[blocks].append(nzf)
# Sort by smallest blocks last, to be popped first
block_list.sort(key=lambda x: x.blocks, reverse=True)
logging.info('%s blocks available', avail_blocks) logging.info('%s blocks available', avail_blocks)
# Enough? # Enough?
if avail_blocks >= needed_blocks: if avail_blocks >= needed_blocks:
added_blocks = 0 added_blocks = 0
while added_blocks < needed_blocks: while added_blocks < needed_blocks:
block_size = min(block_table.keys()) new_nzf = block_list.pop()
for new_nzf in block_table[block_size]:
self.add_parfile(new_nzf) self.add_parfile(new_nzf)
added_blocks += block_size added_blocks += new_nzf.blocks
block_table.pop(block_size)
logging.info('Added %s blocks to %s', added_blocks, self.final_name) logging.info('Added %s blocks to %s', added_blocks, self.final_name)
return added_blocks return added_blocks
else: else:
@ -985,7 +984,7 @@ class NzbObject(TryList):
self.status = Status.QUEUED self.status = Status.QUEUED
self.set_download_report() self.set_download_report()
return (file_done, post_done) return file_done, post_done
@synchronized(NZO_LOCK) @synchronized(NZO_LOCK)
def remove_saved_article(self, article): def remove_saved_article(self, article):
@ -1086,7 +1085,7 @@ class NzbObject(TryList):
# Convert input # Convert input
value = int_conv(value) value = int_conv(value)
if value in (REPAIR_PRIORITY, TOP_PRIORITY, HIGH_PRIORITY, NORMAL_PRIORITY, \ if value in (REPAIR_PRIORITY, TOP_PRIORITY, HIGH_PRIORITY, NORMAL_PRIORITY,
LOW_PRIORITY, DEFAULT_PRIORITY, PAUSED_PRIORITY, DUP_PRIORITY, STOP_PRIORITY): LOW_PRIORITY, DEFAULT_PRIORITY, PAUSED_PRIORITY, DUP_PRIORITY, STOP_PRIORITY):
self.priority = value self.priority = value
return return
@ -1201,7 +1200,7 @@ class NzbObject(TryList):
if (parset in nzf.filename or parset in original_filename) and self.extrapars[parset]: if (parset in nzf.filename or parset in original_filename) and self.extrapars[parset]:
for new_nzf in self.extrapars[parset]: for new_nzf in self.extrapars[parset]:
self.add_parfile(new_nzf) self.add_parfile(new_nzf)
blocks_new += int_conv(new_nzf.blocks) blocks_new += new_nzf.blocks
# Enough now? # Enough now?
if blocks_new >= self.bad_articles: if blocks_new >= self.bad_articles:
logging.info('Prospectively added %s repair blocks to %s', blocks_new, self.final_name) logging.info('Prospectively added %s repair blocks to %s', blocks_new, self.final_name)
@ -1296,11 +1295,11 @@ class NzbObject(TryList):
self.set_unpack_info('Servers', ', '.join(msgs), unique=True) self.set_unpack_info('Servers', ', '.join(msgs), unique=True)
@synchronized(NZO_LOCK) @synchronized(NZO_LOCK)
def increase_bad_articles_counter(self, type): def increase_bad_articles_counter(self, article_type):
""" Record information about bad articles """ """ Record information about bad articles """
if type not in self.nzo_info: if article_type not in self.nzo_info:
self.nzo_info[type] = 0 self.nzo_info[article_type] = 0
self.nzo_info[type] += 1 self.nzo_info[article_type] += 1
self.bad_articles += 1 self.bad_articles += 1
def get_article(self, server, servers): def get_article(self, server, servers):
@ -1800,7 +1799,7 @@ def scan_password(name):
slash = name.find('/') slash = name.find('/')
# Look for name/password, but make sure that '/' comes before any {{ # Look for name/password, but make sure that '/' comes before any {{
if slash >= 0 and slash < braces and 'password=' not in name: if 0 <= slash < braces and 'password=' not in name:
# Is it maybe in 'name / password' notation? # Is it maybe in 'name / password' notation?
if slash == name.find(' / ') + 1: if slash == name.find(' / ') + 1:
# Remove the extra space after name and before password # Remove the extra space after name and before password

8
sabnzbd/osxmenu.py

@ -208,7 +208,7 @@ class SABnzbdDelegate(NSObject):
for speed in sorted(speeds.keys()): for speed in sorted(speeds.keys()):
menu_speed_item = NSMenuItem.alloc().initWithTitle_action_keyEquivalent_('%s' % (speeds[speed]), 'speedlimitAction:', '') menu_speed_item = NSMenuItem.alloc().initWithTitle_action_keyEquivalent_('%s' % (speeds[speed]), 'speedlimitAction:', '')
menu_speed_item.setRepresentedObject_("%s" % (speed)) menu_speed_item.setRepresentedObject_("%s" % speed)
self.menu_speed.addItem_(menu_speed_item) self.menu_speed.addItem_(menu_speed_item)
self.speed_menu_item.setSubmenu_(self.menu_speed) self.speed_menu_item.setSubmenu_(self.menu_speed)
@ -414,7 +414,7 @@ class SABnzbdDelegate(NSObject):
if history['status'] != Status.COMPLETED: if history['status'] != Status.COMPLETED:
jobfailed = NSAttributedString.alloc().initWithString_attributes_(job, self.failedAttributes) jobfailed = NSAttributedString.alloc().initWithString_attributes_(job, self.failedAttributes)
menu_history_item.setAttributedTitle_(jobfailed) menu_history_item.setAttributedTitle_(jobfailed)
menu_history_item.setRepresentedObject_("%s" % (path)) menu_history_item.setRepresentedObject_("%s" % path)
self.menu_history.addItem_(menu_history_item) self.menu_history.addItem_(menu_history_item)
else: else:
menu_history_item = NSMenuItem.alloc().initWithTitle_action_keyEquivalent_(T('Empty'), '', '') menu_history_item = NSMenuItem.alloc().initWithTitle_action_keyEquivalent_(T('Empty'), '', '')
@ -483,9 +483,9 @@ class SABnzbdDelegate(NSObject):
if self.state != "" and self.info != "": if self.state != "" and self.info != "":
self.state_menu_item.setTitle_("%s - %s" % (self.state, self.info)) self.state_menu_item.setTitle_("%s - %s" % (self.state, self.info))
if self.info == "": if self.info == "":
self.state_menu_item.setTitle_("%s" % (self.state)) self.state_menu_item.setTitle_("%s" % self.state)
else: else:
self.state_menu_item.setTitle_("%s" % (self.info)) self.state_menu_item.setTitle_("%s" % self.info)
except: except:
logging.info("[osx] stateUpdate Exception %s" % (sys.exc_info()[0])) logging.info("[osx] stateUpdate Exception %s" % (sys.exc_info()[0]))

16
sabnzbd/par2file.py

@ -26,7 +26,9 @@ import struct
PROBABLY_PAR2_RE = re.compile(r'(.*)\.vol(\d*)[\+\-](\d*)\.par2', re.I) PROBABLY_PAR2_RE = re.compile(r'(.*)\.vol(\d*)[\+\-](\d*)\.par2', re.I)
PAR_ID = "PAR2\x00PKT" PAR_PKT_ID = "PAR2\x00PKT"
PAR_FILE_ID = "PAR 2.0\x00FileDesc"
PAR_CREATOR_ID = "PAR 2.0\x00Creator"
PAR_RECOVERY_ID = "RecvSlic" PAR_RECOVERY_ID = "RecvSlic"
@ -35,7 +37,7 @@ def is_parfile(filename):
try: try:
with open(filename, "rb") as f: with open(filename, "rb") as f:
buf = f.read(8) buf = f.read(8)
return buf.startswith(PAR_ID) return buf.startswith(PAR_PKT_ID)
except: except:
pass pass
return False return False
@ -47,7 +49,6 @@ def analyse_par2(name, filepath=None):
setname is empty when not a par2 file setname is empty when not a par2 file
""" """
name = name.strip() name = name.strip()
setname = None
vol = block = 0 vol = block = 0
m = PROBABLY_PAR2_RE.search(name) m = PROBABLY_PAR2_RE.search(name)
if m: if m:
@ -129,7 +130,8 @@ def parse_par2_file_packet(f, header):
nothing = None, None, None nothing = None, None, None
if header != PAR_ID: if header != PAR_PKT_ID:
print header
return nothing return nothing
# Length must be multiple of 4 and at least 20 # Length must be multiple of 4 and at least 20
@ -157,10 +159,14 @@ def parse_par2_file_packet(f, header):
# See if it's the right packet and get name + hash # See if it's the right packet and get name + hash
for offset in range(0, len, 8): for offset in range(0, len, 8):
if data[offset:offset + 16] == "PAR 2.0\0FileDesc": if data[offset:offset + 16] == PAR_FILE_ID:
hash = data[offset + 32:offset + 48] hash = data[offset + 32:offset + 48]
hash16k = data[offset + 48:offset + 64] hash16k = data[offset + 48:offset + 64]
filename = data[offset + 72:].strip('\0') filename = data[offset + 72:].strip('\0')
return filename, hash, hash16k return filename, hash, hash16k
elif data[offset:offset + 15] == PAR_CREATOR_ID:
# Here untill the end is the creator-text
# Usefull in case of bugs in the par2-creating software
logging.debug('Par2-creator of %s is: %s', os.path.basename(f.name), data[offset+16:].rstrip())
return nothing return nothing

4
sabnzbd/postproc.py

@ -271,7 +271,6 @@ def process_job(nzo):
nzb_list = [] nzb_list = []
# These need to be initialized in case of a crash # These need to be initialized in case of a crash
workdir_complete = '' workdir_complete = ''
postproc_time = 0
script_log = '' script_log = ''
script_line = '' script_line = ''
@ -326,15 +325,12 @@ def process_job(nzo):
unpack_error = 1 unpack_error = 1
script = nzo.script script = nzo.script
cat = nzo.cat
logging.info('Starting Post-Processing on %s' + logging.info('Starting Post-Processing on %s' +
' => Repair:%s, Unpack:%s, Delete:%s, Script:%s, Cat:%s', ' => Repair:%s, Unpack:%s, Delete:%s, Script:%s, Cat:%s',
filename, flag_repair, flag_unpack, flag_delete, script, nzo.cat) filename, flag_repair, flag_unpack, flag_delete, script, nzo.cat)
# Set complete dir to workdir in case we need to abort # Set complete dir to workdir in case we need to abort
workdir_complete = workdir workdir_complete = workdir
marker_file = None
# Par processing, if enabled # Par processing, if enabled
if all_ok and flag_repair: if all_ok and flag_repair:

44
sabnzbd/rss.py

@ -287,10 +287,10 @@ class RSSQueue(object):
status = feed_parsed.get('status', 999) status = feed_parsed.get('status', 999)
if status in (401, 402, 403): if status in (401, 402, 403):
msg = T('Do not have valid authentication for feed %s') % feed msg = T('Do not have valid authentication for feed %s') % uri
logging.info(msg) logging.info(msg)
if status >= 500 and status <= 599: if 500 <= status <= 599:
msg = T('Server side error (server code %s); could not get %s on %s') % (status, feed, uri) msg = T('Server side error (server code %s); could not get %s on %s') % (status, feed, uri)
logging.info(msg) logging.info(msg)
@ -301,11 +301,14 @@ class RSSQueue(object):
msg = T('Server %s uses an untrusted HTTPS certificate') % get_urlbase(uri) msg = T('Server %s uses an untrusted HTTPS certificate') % get_urlbase(uri)
msg += ' - https://sabnzbd.org/certificate-errors' msg += ' - https://sabnzbd.org/certificate-errors'
logging.error(msg) logging.error(msg)
elif feed_parsed['href'] != uri and 'login' in feed_parsed['href']:
# Redirect to login page!
msg = T('Do not have valid authentication for feed %s') % uri
else: else:
msg = T('Failed to retrieve RSS from %s: %s') % (uri, xml_name(msg)) msg = T('Failed to retrieve RSS from %s: %s') % (uri, xml_name(msg))
logging.info(msg) logging.info(msg)
if not entries: if not entries and not msg:
msg = T('RSS Feed %s was empty') % uri msg = T('RSS Feed %s was empty') % uri
logging.info(msg) logging.info(msg)
all_entries.extend(entries) all_entries.extend(entries)
@ -330,12 +333,8 @@ class RSSQueue(object):
if readout: if readout:
try: try:
link, category, size, age, season, episode = _get_link(uri, entry) link, infourl, category, size, age, season, episode = _get_link(entry)
except (AttributeError, IndexError): except (AttributeError, IndexError):
link = None
category = ''
size = 0
age = None
logging.info(T('Incompatible feed') + ' ' + uri) logging.info(T('Incompatible feed') + ' ' + uri)
logging.info("Traceback: ", exc_info=True) logging.info("Traceback: ", exc_info=True)
return T('Incompatible feed') return T('Incompatible feed')
@ -482,13 +481,13 @@ class RSSQueue(object):
else: else:
star = first star = first
if result: if result:
_HandleLink(jobs, feed, link, title, size, age, season, episode, 'G', category, myCat, myPP, _HandleLink(jobs, feed, link, infourl, title, size, age, season, episode, 'G', category, myCat,
myScript, act, star, priority=myPrio, rule=str(n)) myPP, myScript, act, star, priority=myPrio, rule=str(n))
if act: if act:
new_downloads.append(title) new_downloads.append(title)
else: else:
_HandleLink(jobs, feed, link, title, size, age, season, episode, 'B', category, myCat, myPP, _HandleLink(jobs, feed, link, infourl, title, size, age, season, episode, 'B', category, myCat,
myScript, False, star, priority=myPrio, rule=str(n)) myPP, myScript, False, star, priority=myPrio, rule=str(n))
# Send email if wanted and not "forced" # Send email if wanted and not "forced"
if new_downloads and cfg.email_rss() and not force: if new_downloads and cfg.email_rss() and not force:
@ -588,7 +587,7 @@ class RSSQueue(object):
return '' return ''
def _HandleLink(jobs, feed, link, title, size, age, season, episode, flag, orgcat, cat, pp, script, def _HandleLink(jobs, feed, link, infourl, title, size, age, season, episode, flag, orgcat, cat, pp, script,
download, star, priority=NORMAL_PRIORITY, rule=0): download, star, priority=NORMAL_PRIORITY, rule=0):
""" Process one link """ """ Process one link """
if script == '': if script == '':
@ -599,6 +598,7 @@ def _HandleLink(jobs, feed, link, title, size, age, season, episode, flag, orgca
jobs[link] = {} jobs[link] = {}
jobs[link]['title'] = title jobs[link]['title'] = title
jobs[link]['url'] = link jobs[link]['url'] = link
jobs[link]['infourl'] = infourl
jobs[link]['cat'] = cat jobs[link]['cat'] = cat
jobs[link]['pp'] = pp jobs[link]['pp'] = pp
jobs[link]['script'] = script jobs[link]['script'] = script
@ -627,14 +627,11 @@ def _HandleLink(jobs, feed, link, title, size, age, season, episode, flag, orgca
else: else:
jobs[link]['status'] = flag jobs[link]['status'] = flag
def _get_link(uri, entry): def _get_link(entry):
""" Retrieve the post link from this entry """ Retrieve the post link from this entry
Returns (link, category, size) Returns (link, category, size)
""" """
link = None size = 0L
category = ''
size = 0
uri = uri.lower()
age = datetime.datetime.now() age = datetime.datetime.now()
# Try standard link and enclosures first # Try standard link and enclosures first
@ -648,7 +645,12 @@ def _get_link(uri, entry):
except: except:
pass pass
if size == 0: # GUID usually has URL to result on page
infourl = None
if entry.id and entry.id != link and entry.id.startswith('http'):
infourl = entry.id
if size == 0L:
_RE_SIZE1 = re.compile(r'Size:\s*(\d+\.\d+\s*[KMG]{0,1})B\W*', re.I) _RE_SIZE1 = re.compile(r'Size:\s*(\d+\.\d+\s*[KMG]{0,1})B\W*', re.I)
_RE_SIZE2 = re.compile(r'\W*(\d+\.\d+\s*[KMG]{0,1})B\W*', re.I) _RE_SIZE2 = re.compile(r'\W*(\d+\.\d+\s*[KMG]{0,1})B\W*', re.I)
# Try to find size in Description # Try to find size in Description
@ -697,10 +699,10 @@ def _get_link(uri, entry):
except: except:
category = '' category = ''
return link, category, size, age, season, episode return link, infourl, category, size, age, season, episode
else: else:
logging.warning(T('Empty RSS entry found (%s)'), link) logging.warning(T('Empty RSS entry found (%s)'), link)
return None, '', 0, None, 0, 0 return None, None, '', 0L, None, 0, 0
def special_rss_site(url): def special_rss_site(url):

8
sabnzbd/sabtray.py

@ -19,6 +19,7 @@
sabtray.py - Systray icon for SABnzbd on Windows, contributed by Jan Schejbal sabtray.py - Systray icon for SABnzbd on Windows, contributed by Jan Schejbal
""" """
import os
import logging import logging
from time import sleep from time import sleep
@ -29,8 +30,6 @@ import sabnzbd.scheduler as scheduler
from sabnzbd.downloader import Downloader from sabnzbd.downloader import Downloader
import sabnzbd.cfg as cfg import sabnzbd.cfg as cfg
from sabnzbd.misc import to_units from sabnzbd.misc import to_units
import os
import cherrypy
# contains the tray icon, which demands its own thread # contains the tray icon, which demands its own thread
from sabnzbd.utils.systrayiconthread import SysTrayIconThread from sabnzbd.utils.systrayiconthread import SysTrayIconThread
@ -98,10 +97,13 @@ class SABTrayThread(SysTrayIconThread):
speed = to_units(bpsnow) speed = to_units(bpsnow)
if self.sabpaused: if self.sabpaused:
if bytes_left > 0:
self.hover_text = "%s - %s: %sB" % (self.txt_paused, self.txt_remaining, mb_left)
else:
self.hover_text = self.txt_paused self.hover_text = self.txt_paused
self.icon = self.sabicons['pause'] self.icon = self.sabicons['pause']
elif bytes_left > 0: elif bytes_left > 0:
self.hover_text = "%sB/s %s: %sB (%s)" % (speed, self.txt_remaining, mb_left, time_left) self.hover_text = "%sB/s - %s: %sB (%s)" % (speed, self.txt_remaining, mb_left, time_left)
self.icon = self.sabicons['green'] self.icon = self.sabicons['green']
else: else:
self.hover_text = self.txt_idle self.hover_text = self.txt_idle

1
sabnzbd/sabtraylinux.py

@ -21,7 +21,6 @@ sabnzbd.sabtraylinux - System tray icon for Linux, inspired from the Windows one
import gtk import gtk
import gobject import gobject
import cherrypy
from time import sleep from time import sleep
import subprocess import subprocess
from threading import Thread from threading import Thread

8
sabnzbd/skintext.py

@ -47,16 +47,10 @@ SKIN_TEXT = {
'post-Propagating' : TT('Propagation delay'), 'post-Propagating' : TT('Propagation delay'),
'post-Checking' : TT('Checking'), #: PP status 'post-Checking' : TT('Checking'), #: PP status
'sch-frequency' : TT('Frequency'), #: #: Config->Scheduler
'sch-action' : TT('Action'), #: #: Config->Scheduler
'sch-arguments' : TT('Arguments'), #: #: Config->Scheduler
'sch-task' : TT('Task'), #: #: Config->Scheduler 'sch-task' : TT('Task'), #: #: Config->Scheduler
'sch-disable_server' : TT('disable server'), #: #: Config->Scheduler 'sch-disable_server' : TT('disable server'), #: #: Config->Scheduler
'sch-enable_server' : TT('enable server'), #: #: Config->Scheduler 'sch-enable_server' : TT('enable server'), #: #: Config->Scheduler
'sch-resume' : TT('Resume'), #: #: Config->Scheduler
'sch-pause' : TT('Pause'), #: #: Config->Scheduler
'sch-shutdown' : TT('Shutdown'), #: #: Config->Scheduler
'sch-restart' : TT('Restart'), #: #: Config->Scheduler
'sch-speedlimit' : TT('Speedlimit'), #: #: Config->Scheduler 'sch-speedlimit' : TT('Speedlimit'), #: #: Config->Scheduler
'sch-pause_all' : TT('Pause All'), #: #: Config->Scheduler 'sch-pause_all' : TT('Pause All'), #: #: Config->Scheduler
'sch-pause_post' : TT('Pause post-processing'), #: #: Config->Scheduler 'sch-pause_post' : TT('Pause post-processing'), #: #: Config->Scheduler

2
sabnzbd/sorting.py

@ -237,7 +237,7 @@ class SeriesSorter(object):
one = '-'.join(extra_list) one = '-'.join(extra_list)
two = '-'.join(extra2_list) two = '-'.join(extra2_list)
return (one, two) return one, two
def get_shownames(self): def get_shownames(self):
""" Get the show name from the match object and format it """ """ Get the show name from the match object and format it """

4
sabnzbd/urlgrabber.py

@ -200,7 +200,7 @@ class URLGrabber(Thread):
retry = True retry = True
fetch_request = None fetch_request = None
elif retry: elif retry:
fetch_request, msg, retry, wait, data = _analyse(fetch_request, url, future_nzo) fetch_request, msg, retry, wait, data = _analyse(fetch_request, future_nzo)
if not fetch_request: if not fetch_request:
if retry: if retry:
@ -352,7 +352,7 @@ def _build_request(url):
return urllib.request.urlopen(req) return urllib.request.urlopen(req)
def _analyse(fetch_request, url, future_nzo): def _analyse(fetch_request, future_nzo):
""" Analyze response of indexer """ Analyze response of indexer
returns fetch_request|None, error-message|None, retry, wait-seconds, data returns fetch_request|None, error-message|None, retry, wait-seconds, data
""" """

5
sabnzbd/utils/certgen.py

@ -52,7 +52,7 @@ def generate_key(key_size=2048, output_file='key.pem'):
# Ported from cryptography docs/x509/tutorial.rst # Ported from cryptography docs/x509/tutorial.rst
def generate_local_cert(private_key, days_valid=3560, output_file='cert.cert', LN='SABnzbd', ON='SABnzbd', CN='localhost'): def generate_local_cert(private_key, days_valid=3560, output_file='cert.cert', LN=u'SABnzbd', ON=u'SABnzbd'):
# Various details about who we are. For a self-signed certificate the # Various details about who we are. For a self-signed certificate the
# subject and issuer are always the same. # subject and issuer are always the same.
subject = issuer = x509.Name([ subject = issuer = x509.Name([
@ -64,8 +64,7 @@ def generate_local_cert(private_key, days_valid=3560, output_file='cert.cert', L
# build Subject Alternate Names (aka SAN) list # build Subject Alternate Names (aka SAN) list
# First the host names, add with x509.DNSName(): # First the host names, add with x509.DNSName():
san_list = [x509.DNSName("localhost")] san_list = [x509.DNSName(u"localhost"), x509.DNSName(unicode(socket.gethostname()))]
san_list.append(x509.DNSName(str(socket.gethostname())))
# Then the host IP addresses, add with x509.IPAddress() # Then the host IP addresses, add with x509.IPAddress()
# Inside a try-except, just to be sure # Inside a try-except, just to be sure

2
sabnzbd/utils/checkdir.py

@ -6,7 +6,6 @@ Functions to check if the path filesystem uses FAT
import sys import sys
import os import os
import subprocess
debug = False debug = False
@ -71,7 +70,6 @@ def isFAT(dir):
''' '''
dfcmd = "df " + dir dfcmd = "df " + dir
device = ''
for thisline in os.popen(dfcmd).readlines(): for thisline in os.popen(dfcmd).readlines():
if thisline.find('/')==0: if thisline.find('/')==0:
if debug: print(thisline) if debug: print(thisline)

9
sabnzbd/utils/diskspeed.py

@ -3,7 +3,6 @@
import time import time
import os import os
import sys import sys
import logging
_DUMP_DATA_SIZE = 10 * 1024 * 1024 _DUMP_DATA_SIZE = 10 * 1024 * 1024
_DUMP_DATA = os.urandom(_DUMP_DATA_SIZE) _DUMP_DATA = os.urandom(_DUMP_DATA_SIZE)
@ -14,11 +13,14 @@ def diskspeedmeasure(dirname):
method: keep writing a file, until 1 second is passed. method: keep writing a file, until 1 second is passed.
Then divide bytes written by time passed Then divide bytes written by time passed
""" """
maxtime = 0.5 # sec maxtime = 1.0 # sec
total_written = 0 total_written = 0
filename = os.path.join(dirname, 'outputTESTING.txt') filename = os.path.join(dirname, 'outputTESTING.txt')
fp = os.open(filename, os.O_CREAT | os.O_WRONLY, 0o777) # low-level I/O fp = os.open(filename, os.O_CREAT | os.O_WRONLY, 0o777) # low-level I/O
# Use low-level I/O
fp = os.open(filename, os.O_CREAT | os.O_WRONLY, 0o777)
# Start looping # Start looping
total_time = 0.0 total_time = 0.0
while total_time < maxtime: while total_time < maxtime:
@ -30,7 +32,8 @@ def diskspeedmeasure(dirname):
# Remove the file # Remove the file
try: try:
fp.close() # Have to use low-level close
os.close(fp)
os.remove(filename) os.remove(filename)
except: except:
pass pass

13
sabnzbd/utils/getperformance.py

@ -20,7 +20,7 @@ def getcpu():
elif platform.system() == "Linux": elif platform.system() == "Linux":
for myline in open("/proc/cpuinfo"): for myline in open("/proc/cpuinfo"):
if myline.startswith(('model name')): if myline.startswith('model name'):
# Typical line: # Typical line:
# model name : Intel(R) Xeon(R) CPU E5335 @ 2.00GHz # model name : Intel(R) Xeon(R) CPU E5335 @ 2.00GHz
cputype = myline.split(":", 1)[1] # get everything after the first ":" cputype = myline.split(":", 1)[1] # get everything after the first ":"
@ -45,6 +45,17 @@ def getpystone():
except: except:
return None return None
# if we arrive here, we were able to succesfully import pystone, so start calculation
maxpystone = None
# Start with a short run, find the the pystone, and increase runtime until duration took > 0.1 second
for pyseed in [1000, 2000, 5000, 10000, 20000, 50000, 100000, 200000]:
duration, pystonefloat = pystones(pyseed)
maxpystone = max(maxpystone, int(pystonefloat))
# Stop when pystone() has been running for at least 0.1 second
if duration > 0.1:
break
return maxpystone
if __name__ == '__main__': if __name__ == '__main__':
print((getpystone())) print((getpystone()))

6
sabnzbd/utils/happyeyeballs.py

@ -6,10 +6,10 @@
# If the HOST has an IPv6 address, IPv6 is given a head start by delaying IPv4. See https://tools.ietf.org/html/rfc6555#section-4.1 # If the HOST has an IPv6 address, IPv6 is given a head start by delaying IPv4. See https://tools.ietf.org/html/rfc6555#section-4.1
# You can run this as a standalone program, or as a module: # You can run this as a standalone program, or as a module:
''' """
from happyeyeballs import happyeyeballs from happyeyeballs import happyeyeballs
print happyeyeballs('newszilla.xs4all.nl', port=119) print happyeyeballs('newszilla.xs4all.nl', port=119)
''' """
# or with more logging: # or with more logging:
''' '''
from happyeyeballs import happyeyeballs from happyeyeballs import happyeyeballs
@ -108,7 +108,7 @@ def happyeyeballs(HOST, **kwargs):
ipv4delay = 0 ipv4delay = 0
try: try:
# Check if there is an AAAA / IPv6 result for this host: # Check if there is an AAAA / IPv6 result for this host:
info = socket.getaddrinfo(HOST, PORT, socket.AF_INET6, socket.SOCK_STREAM, socket.IPPROTO_IP, socket.AI_CANONNAME) socket.getaddrinfo(HOST, PORT, socket.AF_INET6, socket.SOCK_STREAM, socket.IPPROTO_IP, socket.AI_CANONNAME)
if DEBUG: logging.debug("IPv6 address found for %s", HOST) if DEBUG: logging.debug("IPv6 address found for %s", HOST)
if preferipv6: if preferipv6:
ipv4delay=0.1 # preferipv6, AND at least one IPv6 found, so give IPv4 (!) a delay so that IPv6 has a head start and is preferred ipv4delay=0.1 # preferipv6, AND at least one IPv6 found, so give IPv4 (!) a delay so that IPv6 has a head start and is preferred

1
sabnzbd/utils/kronos.py

@ -78,7 +78,6 @@ import os
import sys import sys
import sched import sched
import time import time
import traceback
import weakref import weakref
import logging import logging

2
sabnzbd/utils/pystone.py

@ -236,7 +236,7 @@ def Func2(StrParI1, StrParI2):
if Func1(StrParI1[IntLoc], StrParI2[IntLoc+1]) == Ident1: if Func1(StrParI1[IntLoc], StrParI2[IntLoc+1]) == Ident1:
CharLoc = 'A' CharLoc = 'A'
IntLoc = IntLoc + 1 IntLoc = IntLoc + 1
if CharLoc >= 'W' and CharLoc <= 'Z': if 'W' <= CharLoc <= 'Z':
IntLoc = 7 IntLoc = 7
if CharLoc == 'X': if CharLoc == 'X':
return TRUE return TRUE

4
sabnzbd/utils/servertests.py

@ -90,7 +90,7 @@ def test_nntp_server(host, port, server=None, username=None, password=None, ssl=
nw.recv_chunk(block=True) nw.recv_chunk(block=True)
nw.finish_connect(nw.status_code) nw.finish_connect(nw.status_code)
except socket.timeout as e: except socket.timeout:
if port != 119 and not ssl: if port != 119 and not ssl:
return False, T('Timed out: Try enabling SSL or connecting on a different port.') return False, T('Timed out: Try enabling SSL or connecting on a different port.')
else: else:
@ -103,7 +103,7 @@ def test_nntp_server(host, port, server=None, username=None, password=None, ssl=
return False, str(e) return False, str(e)
except TypeError as e: except TypeError:
return False, T('Invalid server address.') return False, T('Invalid server address.')
except IndexError: except IndexError:

1
sabnzbd/utils/upload.py

@ -25,7 +25,6 @@ import os
from sabnzbd.encoding import unicoder from sabnzbd.encoding import unicoder
import sabnzbd.cfg as cfg import sabnzbd.cfg as cfg
from sabnzbd.filesystem import get_ext, get_filename from sabnzbd.filesystem import get_ext, get_filename
import sabnzbd.newsunpack
from sabnzbd.constants import VALID_ARCHIVES, VALID_NZB_FILES from sabnzbd.constants import VALID_ARCHIVES, VALID_NZB_FILES
from sabnzbd.dirscanner import ProcessArchiveFile, ProcessSingleFile from sabnzbd.dirscanner import ProcessArchiveFile, ProcessSingleFile

6
sabnzbd/zconfig.py

@ -21,7 +21,6 @@ sabnzbd.zconfig - bonjour/zeroconfig support
import os import os
import logging import logging
import cherrypy
_HOST_PORT = (None, None) _HOST_PORT = (None, None)
@ -80,11 +79,6 @@ def set_bonjour(host=None, port=None):
return return
name = hostname() name = hostname()
if '.local' in name:
suffix = ''
else:
suffix = '.local'
logging.debug('Try to publish in Bonjour as "%s" (%s:%s)', name, host, port) logging.debug('Try to publish in Bonjour as "%s" (%s:%s)', name, host, port)
try: try:
refObject = pybonjour.DNSServiceRegister( refObject = pybonjour.DNSServiceRegister(

125
scripts/Deobfuscate.py

@ -28,7 +28,7 @@ NOTES:
1) To use this script you need Python installed on your system and 1) To use this script you need Python installed on your system and
select "Add to path" during its installation. Select this folder in select "Add to path" during its installation. Select this folder in
Config > Folders > Scripts Folder and select this script for each job Config > Folders > Scripts Folder and select this script for each job
you want it sued for, or link it to a category in Config > Categories. you want it used for, or link it to a category in Config > Categories.
2) Beware that files on the 'Cleanup List' are removed before 2) Beware that files on the 'Cleanup List' are removed before
scripts are called and if any of them happen to be required by scripts are called and if any of them happen to be required by
the found par2 file, it will fail. the found par2 file, it will fail.
@ -39,37 +39,116 @@ NOTES:
5) Feedback or bugs in this script can be reported in on our forum: 5) Feedback or bugs in this script can be reported in on our forum:
https://forums.sabnzbd.org/viewforum.php?f=9 https://forums.sabnzbd.org/viewforum.php?f=9
Improved by P1nGu1n
""" """
import os import os
import sys import sys
import time import time
import fnmatch import fnmatch
import subprocess import struct
import hashlib
from os import path
# Files to exclude and minimal file size for renaming
EXCLUDED_FILE_EXTS = ('.vob', '.bin')
MIN_FILE_SIZE = 40*1024*1024
# Are we being called from SABnzbd? # Are we being called from SABnzbd?
if not os.environ.get('SAB_VERSION'): if not os.environ.get('SAB_VERSION'):
print("This script needs to be called from SABnzbd as post-processing script.") print("This script needs to be called from SABnzbd as post-processing script.")
sys.exit(1) sys.exit(1)
# Files to exclude and minimal file size for renaming
EXCLUDED_FILE_EXTS = ('.vob', '.bin')
MIN_FILE_SIZE = 40*1024*1024
# see: http://parchive.sourceforge.net/docs/specifications/parity-volume-spec/article-spec.html
STRUCT_PACKET_HEADER = struct.Struct("<"
"8s" # Magic sequence
"Q" # Length of the entire packet (including header), must be multiple of 4
"16s" # MD5 Hash of packet
"16s" # Recovery Set ID
"16s" # Packet type
)
PACKET_TYPE_FILE_DESC = 'PAR 2.0\x00FileDesc'
STRUCT_FILE_DESC_PACKET = struct.Struct("<"
"16s" # File ID
"16s" # MD5 hash of the entire file
"16s" # MD5 hash of the first 16KiB of the file
"Q" # Length of the file
)
# Supporting functions
def print_splitter(): def print_splitter():
""" Simple helper function """ """ Simple helper function """
print('\n------------------------\n') print('\n------------------------\n')
# Windows or others?
par2_command = os.environ['SAB_PAR2_COMMAND']
if os.environ['SAB_MULTIPAR_COMMAND']:
par2_command = os.environ['SAB_MULTIPAR_COMMAND']
# Diagnostic info def decodePar(parfile):
result = False
dir = os.path.dirname(parfile)
with open(parfile, 'rb') as parfileToDecode:
while True:
header = parfileToDecode.read(STRUCT_PACKET_HEADER.size)
if not header: break # file fully read
(_, packetLength, _, _, packetType) = STRUCT_PACKET_HEADER.unpack(header)
bodyLength = packetLength - STRUCT_PACKET_HEADER.size
# only process File Description packets
if packetType != PACKET_TYPE_FILE_DESC:
# skip this packet
parfileToDecode.seek(bodyLength, os.SEEK_CUR)
continue
chunck = parfileToDecode.read(STRUCT_FILE_DESC_PACKET.size)
(_, _, hash16k, filelength) = STRUCT_FILE_DESC_PACKET.unpack(chunck)
# filename makes up for the rest of the packet, padded with null characters
targetName = parfileToDecode.read(bodyLength - STRUCT_FILE_DESC_PACKET.size).rstrip('\0')
targetPath = path.join(dir, targetName)
# file already exists, skip it
if path.exists(targetPath):
print "File already exists: " + targetName
continue
# find and rename file
srcPath = findFile(dir, filelength, hash16k)
if srcPath is not None:
os.rename(srcPath, targetPath)
print "Renamed file from " + path.basename(srcPath) + " to " + targetName
result = True
else:
print "No match found for: " + targetName
return result
def findFile(dir, filelength, hash16k):
for filename in os.listdir(dir):
filepath = path.join(dir, filename)
# check if the size matches as an indication
if path.getsize(filepath) != filelength: continue
with open(filepath, 'rb') as fileToMatch:
data = fileToMatch.read(16 * 1024)
m = hashlib.md5()
m.update(data)
# compare hash to confirm the match
if m.digest() == hash16k:
return filepath
return None
# Run main program
print_splitter() print_splitter()
print(('SABnzbd version: ', os.environ['SAB_VERSION'])) print(('SABnzbd version: ', os.environ['SAB_VERSION']))
print(('Job location: ', os.environ['SAB_COMPLETE_DIR'])) print(('Job location: ', os.environ['SAB_COMPLETE_DIR']))
print(('Par2-command: ', par2_command))
print_splitter() print_splitter()
# Search for par2 files # Search for par2 files
@ -86,34 +165,14 @@ if not matches:
# Run par2 from SABnzbd on them # Run par2 from SABnzbd on them
for par2_file in matches: for par2_file in matches:
# Build command, make it check the whole directory # Analyse data and analyse result
wildcard = os.path.join(os.environ['SAB_COMPLETE_DIR'], '*')
command = [str(par2_command), 'r', par2_file, wildcard]
# Start command
print_splitter() print_splitter()
print(('Starting command: ', repr(command))) if decodePar(par2_file):
try:
result = subprocess.check_output(command)
except subprocess.CalledProcessError as e:
# Multipar also gives non-zero in case of succes
result = e.output
# Show output
print_splitter()
print(result)
print_splitter()
# Last status-line for the History
# Check if the magic words are there
if 'Repaired successfully' in result or 'All files are correct' in result or \
'Repair complete' in result or 'All Files Complete' in result or 'PAR File(s) Incomplete' in result:
print('Recursive repair/verify finished.') print('Recursive repair/verify finished.')
run_renamer = False run_renamer = False
else: else:
print('Recursive repair/verify did not complete!') print('Recursive repair/verify did not complete!')
# No matches? Then we try to rename the largest file to the job-name # No matches? Then we try to rename the largest file to the job-name
if run_renamer: if run_renamer:
print_splitter() print_splitter()

71
tests/conftest.py

@ -1,71 +0,0 @@
#!/usr/bin/python -OO
# Copyright 2007-2018 The SABnzbd-Team <team@sabnzbd.org>
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
"""
tests.conftest - Wrappers to start SABnzbd for testing
"""
import os
import itertools
import urllib.request, urllib.error, urllib.parse
import pytest
import shutil
import time
import testhelper
from xprocess import ProcessStarter
@pytest.fixture(scope='session')
def sabnzbd_connect(request, xprocess):
# Get cache directory
base_path = os.path.dirname(os.path.abspath(__file__))
cache_dir = os.path.join(base_path, 'cache')
# Copy basic config file
try:
os.mkdir(cache_dir)
shutil.copyfile(os.path.join(base_path, 'sabnzbd.basic.ini'), os.path.join(cache_dir, 'sabnzbd.ini'))
except:
pass
class Starter(ProcessStarter):
# Wait for SABnzbd to start
pattern = "ENGINE Bus STARTED"
# Start without browser and with basic logging
args = 'python ../../SABnzbd.py -l1 -s %s:%s -b0 -f %s' % (testhelper.SAB_HOST, testhelper.SAB_PORT, cache_dir)
args = args.split()
# We have to wait a bit longer than default
def filter_lines(self, lines):
return itertools.islice(lines, 500)
# Shut it down at the end
def shutdown_sabnzbd():
# Gracefull shutdown request
testhelper.get_url_result('shutdown')
# Takes a second to shutdown
for x in range(5):
try:
shutil.rmtree(cache_dir)
break
except:
time.sleep(1)
request.addfinalizer(shutdown_sabnzbd)
return xprocess.ensure("sabnzbd", Starter)

4
tests/requirements.txt

@ -1,8 +1,8 @@
# SAB-Specific # SAB-Specific
cheetah cheetah3
cryptography cryptography
sabyenc sabyenc
# Testing # Testing
pytest-xprocess selenium
requests requests

7
tests/sabnzbd.basic.ini

@ -2,10 +2,3 @@ __version__ = 19
__encoding__ = utf-8 __encoding__ = utf-8
[misc] [misc]
api_key = apikey api_key = apikey
[servers]
[[sabnzbd.test]]
enable = 1
host = sabnzd.test
username = sabnzbd
password = sabnzbd

61
tests/test_api_pages.py

@ -1,61 +0,0 @@
#!/usr/bin/python -OO
# Copyright 2007-2018 The SABnzbd-Team <team@sabnzbd.org>
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
"""
tests.test_api_pages - The most basic testing if things work
"""
import pytest
import testhelper
def test_basic_api(sabnzbd_connect):
# Basic API test
assert 'queue' in testhelper.get_api_result('queue')
assert 'history' in testhelper.get_api_result('history')
assert 'status' in testhelper.get_api_result('fullstatus')
assert 'config' in testhelper.get_api_result('get_config')
def test_main_pages(sabnzbd_connect):
# See if the basic pages work
assert 'Traceback' not in testhelper.get_url_result()
assert 'Traceback' not in testhelper.get_url_result('history')
assert 'Traceback' not in testhelper.get_url_result('queue')
assert 'Traceback' not in testhelper.get_url_result('status')
def test_wizard_pages(sabnzbd_connect):
# Test if wizard pages work
assert 'Traceback' not in testhelper.get_url_result('wizard')
assert 'Traceback' not in testhelper.get_url_result('wizard/one')
assert 'Traceback' not in testhelper.get_url_result('wizard/two')
def test_config_pages(sabnzbd_connect):
# Test if config pages work
assert 'Traceback' not in testhelper.get_url_result('config')
assert 'Traceback' not in testhelper.get_url_result('config/general')
assert 'Traceback' not in testhelper.get_url_result('config/server')
assert 'Traceback' not in testhelper.get_url_result('config/categories')
assert 'Traceback' not in testhelper.get_url_result('config/switches')
assert 'Traceback' not in testhelper.get_url_result('config/sorting')
assert 'Traceback' not in testhelper.get_url_result('config/notify')
assert 'Traceback' not in testhelper.get_url_result('config/scheduling')
assert 'Traceback' not in testhelper.get_url_result('config/rss')
assert 'Traceback' not in testhelper.get_url_result('config/special')

295
tests/test_functional.py

@ -0,0 +1,295 @@
#!/usr/bin/python -OO
# Copyright 2007-2018 The SABnzbd-Team <team@sabnzbd.org>
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
"""
tests.test_functional - The most basic testing if things work
"""
import unittest
import random
from selenium import webdriver
from selenium.common.exceptions import WebDriverException, NoSuchElementException
from selenium.webdriver.chrome.options import Options as ChromeOptions
from selenium.webdriver.firefox.options import Options as FirefoxOptions
from selenium.webdriver.common.keys import Keys
from selenium.webdriver.support.ui import WebDriverWait
from testhelper import *
class SABnzbdBaseTest(unittest.TestCase):
@classmethod
def setUpClass(cls):
# We try Chrome, fallback to Firefox
try:
driver_options = ChromeOptions()
# Headless on Appveyor/Travis
if "CI" in os.environ:
driver_options.add_argument("--headless")
driver_options.add_argument("--no-sandbox")
cls.driver = webdriver.Chrome(chrome_options=driver_options)
except WebDriverException:
driver_options = FirefoxOptions()
# Headless on Appveyor/Travis
if "CI" in os.environ:
driver_options.headless = True
cls.driver = webdriver.Firefox(firefox_options=driver_options)
# Get the newsserver-info, if available
if "SAB_NEWSSERVER_HOST" in os.environ:
cls.newsserver_host = os.environ['SAB_NEWSSERVER_HOST']
cls.newsserver_user = os.environ['SAB_NEWSSERVER_USER']
cls.newsserver_password = os.environ['SAB_NEWSSERVER_PASSWORD']
@classmethod
def tearDownClass(cls):
cls.driver.close()
cls.driver.quit()
def no_page_crash(self):
# Do a base test if CherryPy did not report test
self.assertNotIn('500 Internal Server Error', self.driver.title)
def open_page(self, url):
# Open a page and test for crash
self.driver.get(url)
self.no_page_crash()
def scroll_to_top(self):
self.driver.find_element_by_tag_name('body').send_keys(Keys.CONTROL + Keys.HOME)
time.sleep(2)
def wait_for_ajax(self):
wait = WebDriverWait(self.driver, 15)
wait.until(lambda driver_wait: self.driver.execute_script('return jQuery.active') == 0)
wait.until(lambda driver_wait: self.driver.execute_script('return document.readyState') == 'complete')
@unittest.skipIf("SAB_NEWSSERVER_HOST" not in os.environ, "Test-server not specified")
class SABnzbdDownloadFlow(SABnzbdBaseTest):
def test_full(self):
# Wrapper for all the tests in order
self.start_wizard()
# Basic test
self.add_nzb_from_url("http://sabnzbd.org/tests/basic_rar5.nzb", "testfile.bin")
# Unicode test
self.add_nzb_from_url("http://sabnzbd.org/tests/unicode_rar.nzb", u"\u4f60\u597d\u4e16\u754c.bin")
# Unicode test with a missing article
#self.add_nzb_from_url("http://sabnzbd.org/tests/unicode_rar_broken.nzb", u"\u4f60\u597d\u4e16\u754c.bin")
def start_wizard(self):
# Language-selection
self.open_page("http://%s:%s/sabnzbd/wizard/" % (SAB_HOST, SAB_PORT))
self.driver.find_element_by_id("en").click()
self.driver.find_element_by_css_selector('.btn.btn-default').click()
# Fill server-info
self.no_page_crash()
host_inp = self.driver.find_element_by_name("host")
host_inp.clear()
host_inp.send_keys(self.newsserver_host)
username_imp = self.driver.find_element_by_name("username")
username_imp.clear()
username_imp.send_keys(self.newsserver_user)
pass_inp = self.driver.find_element_by_name("password")
pass_inp.clear()
pass_inp.send_keys(self.newsserver_password)
# With SSL
ssl_imp = self.driver.find_element_by_name("ssl")
if not ssl_imp.get_attribute('checked'):
ssl_imp.click()
# Test server-check
self.driver.find_element_by_id("serverTest").click()
self.wait_for_ajax()
self.assertIn("Connection Successful", self.driver.find_element_by_id("serverResponse").text)
# Final page done
self.driver.find_element_by_id("next-button").click()
self.no_page_crash()
self.assertIn("http://%s:%s/sabnzbd" % (SAB_HOST, SAB_PORT), self.driver.find_element_by_class_name("quoteBlock").text)
# Go to SAB!
self.driver.find_element_by_css_selector('.btn.btn-success').click()
self.no_page_crash()
def add_nzb_from_url(self, file_url, file_output):
test_job_name = 'testfile_%s' % random.randint(500, 1000)
self.open_page("http://%s:%s/sabnzbd/" % (SAB_HOST, SAB_PORT))
# Wait for modal to open, add URL
self.driver.find_element_by_css_selector('a[href="#modal-add-nzb"]').click()
time.sleep(1)
self.driver.find_element_by_name("nzbURL").send_keys(file_url)
self.driver.find_element_by_name("nzbname").send_keys(test_job_name)
self.driver.find_element_by_css_selector('form[data-bind="submit: addNZBFromURL"] input[type="submit"]').click()
# We wait for 30 seconds to let it complete
for _ in range(120):
try:
# Locate resulting row
result_row = self.driver.find_element_by_xpath('//*[@id="history-tab"]//tr[td//text()[contains(., "%s")]]' % test_job_name)
# Did it complete?
if result_row.find_element_by_css_selector('td.status').text == 'Completed':
break
else:
time.sleep(1)
except NoSuchElementException:
time.sleep(1)
else:
self.fail("Download did not complete")
# Check if the file exists on disk
file_to_find = os.path.join(SAB_COMPLETE_DIR, test_job_name, file_output)
self.assertTrue(os.path.exists(file_to_find), "File not found")
# Shutil can't handle unicode, need to remove the file here
os.remove(file_to_find)
class SABnzbdBasicPagesTest(SABnzbdBaseTest):
def test_base_pages(self):
# Quick-check of all Config pages
test_urls = ['config',
'config/general',
'config/folders',
'config/server',
'config/categories',
'config/switches',
'config/sorting',
'config/notify',
'config/scheduling',
'config/rss',
'config/special']
for test_url in test_urls:
self.open_page("http://%s:%s/%s" % (SAB_HOST, SAB_PORT, test_url))
@unittest.skipIf("SAB_NEWSSERVER_HOST" not in os.environ, "Test-server not specified")
class SABnzbdConfigServers(SABnzbdBaseTest):
server_name = "_SeleniumServer"
def open_config_servers(self):
# Test if base page works
self.open_page("http://%s:%s/sabnzbd/config/server" % (SAB_HOST, SAB_PORT))
self.scroll_to_top()
# Show advanced options
advanced_btn = self.driver.find_element_by_name("advanced-settings-button")
if not advanced_btn.get_attribute('checked'):
advanced_btn.click()
def add_test_server(self):
# Add server
self.driver.find_element_by_id("addServerButton").click()
host_inp = self.driver.find_element_by_name("host")
host_inp.clear()
host_inp.send_keys(self.newsserver_host)
username_imp = self.driver.find_element_by_css_selector("#addServerContent input[data-hide='username']")
username_imp.clear()
username_imp.send_keys(self.newsserver_user)
pass_inp = self.driver.find_element_by_css_selector("#addServerContent input[data-hide='password']")
pass_inp.clear()
pass_inp.send_keys(self.newsserver_password)
# With SSL
ssl_imp = self.driver.find_element_by_name("ssl")
if not ssl_imp.get_attribute('checked'):
ssl_imp.click()
# Check that we filled the right port automatically
self.assertEqual(self.driver.find_element_by_id("port").get_attribute('value'), '563')
# Test server-check
self.driver.find_element_by_css_selector("#addServerContent .testServer").click()
self.wait_for_ajax()
self.assertIn("Connection Successful", self.driver.find_element_by_css_selector('#addServerContent .result-box').text)
# Set test-servername
self.driver.find_element_by_id("displayname").send_keys(self.server_name)
# Add and show details
pass_inp.send_keys(Keys.RETURN)
time.sleep(1)
if not self.driver.find_element_by_id("host0").is_displayed():
self.driver.find_element_by_class_name("showserver").click()
def remove_server(self):
# Remove the first server and accept the confirmation
self.driver.find_element_by_class_name("delServer").click()
self.driver.switch_to.alert.accept()
# Check that it's gone
time.sleep(2)
self.assertNotIn(self.server_name, self.driver.page_source)
def test_add_and_remove_server(self):
self.open_config_servers()
self.add_test_server()
self.remove_server()
def test_empty_bad_password(self):
self.open_config_servers()
self.add_test_server()
# Test server-check with empty password
pass_inp = self.driver.find_elements_by_css_selector("input[data-hide='password']")[1]
pass_inp.clear()
self.driver.find_elements_by_css_selector(".testServer")[1].click()
self.wait_for_ajax()
check_result = self.driver.find_elements_by_css_selector('.result-box')[1].text.lower()
self.assertTrue("authentication failed" in check_result or "invalid username or password" in check_result)
# Test server-check with bad password
pass_inp.send_keys("bad")
self.driver.find_elements_by_css_selector(".testServer")[1].click()
self.wait_for_ajax()
self.assertTrue("authentication failed" in check_result or "invalid username or password" in check_result)
# Finish
self.remove_server()
class SABnzbdConfigCategories(SABnzbdBaseTest):
category_name = "testCat"
def test_page(self):
# Test if base page works
self.open_page("http://%s:%s/sabnzbd/config/categories" % (SAB_HOST, SAB_PORT))
# Add new category
self.driver.find_elements_by_name("newname")[1].send_keys("testCat")
self.driver.find_element_by_xpath("//button/text()[normalize-space(.)='Add']/parent::*").click()
self.no_page_crash()
self.assertNotIn(self.category_name, self.driver.page_source)
if __name__ == "__main__":
unittest.main(failfast=True)

58
tests/test_nzb.py

@ -1,58 +0,0 @@
#!/usr/bin/python -OO
# Copyright 2007-2018 The SABnzbd-Team <team@sabnzbd.org>
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
"""
tests.test_nzb - Basic NZB adding support
"""
import os
import pytest
import testhelper
# Where are we now?
base_path = os.path.dirname(os.path.abspath(__file__))
def nzo_in_queue(nzo_response):
""" Helper function for checking if file is in queue and then remove it """
queue_res = testhelper.get_api_result('queue')
nzo_id = nzo_response['nzo_ids'][0]
# Was it added?
assert nzo_response['status'] == True
assert queue_res['queue']['slots'][0]['nzo_id'] == nzo_response['nzo_ids'][0]
# Let's remove it
remove_response = testhelper.get_api_result('queue', {'name': 'delete', 'value': nzo_id})
assert nzo_response['status'] == True
# Really gone?
queue_res = testhelper.get_api_result('queue')
assert not queue_res['queue']['slots']
def test_addfile(sabnzbd_connect):
# See if basic upload works
nzo_response = testhelper.upload_nzb(os.path.join(base_path, 'data', 'reftestnzb.nzb'))
nzo_in_queue(nzo_response)
def test_addlocalfile(sabnzbd_connect):
# See if basic adding from disk-file works
nzo_response = testhelper.get_api_result('addlocalfile', {'name': os.path.join(base_path, 'data', 'reftestnzb.nzb')})
nzo_in_queue(nzo_response)

63
tests/testhelper.py

@ -19,12 +19,18 @@
tests.testhelper - Basic helper functions tests.testhelper - Basic helper functions
""" """
import urllib.request, urllib.error, urllib.parse import os
import json import shutil
import subprocess
import time
import requests import requests
SAB_HOST = 'localhost' SAB_HOST = 'localhost'
SAB_PORT = 8081 SAB_PORT = 8081
SAB_BASE_DIR = os.path.dirname(os.path.abspath(__file__))
SAB_CACHE_DIR = os.path.join(SAB_BASE_DIR, 'cache')
SAB_COMPLETE_DIR = os.path.join(SAB_CACHE_DIR, 'Downloads', 'complete')
def get_url_result(url=''): def get_url_result(url=''):
@ -41,8 +47,57 @@ def get_api_result(mode, extra_arguments={}):
return r.json() return r.json()
def upload_nzb(file): def upload_nzb(filename):
""" Upload file and return nzo_id reponse """ """ Upload file and return nzo_id reponse """
files = {'name': open(file, 'rb')} files = {'name': open(filename, 'rb')}
arguments = {'apikey': 'apikey', 'mode': 'addfile', 'output': 'json'} arguments = {'apikey': 'apikey', 'mode': 'addfile', 'output': 'json'}
return requests.post('http://%s:%s/api' % (SAB_HOST, SAB_PORT), files=files, data=arguments).json() return requests.post('http://%s:%s/api' % (SAB_HOST, SAB_PORT), files=files, data=arguments).json()
def setUpModule():
# Remove cache if already there
if os.path.isdir(SAB_CACHE_DIR):
shutil.rmtree(SAB_CACHE_DIR)
# Copy basic config file with API key
os.mkdir(SAB_CACHE_DIR)
shutil.copyfile(os.path.join(SAB_BASE_DIR, 'sabnzbd.basic.ini'), os.path.join(SAB_CACHE_DIR, 'sabnzbd.ini'))
# Check if we have language files
if not os.path.exists(os.path.join(SAB_BASE_DIR, '..', 'locale')):
lang_command = 'python %s/../tools/make_mo.py' % SAB_BASE_DIR
subprocess.Popen(lang_command.split())
# Start SABnzbd
sab_command = 'python %s/../SABnzbd.py --new -l2 -s %s:%s -b0 -f %s' % (SAB_BASE_DIR, SAB_HOST, SAB_PORT, SAB_CACHE_DIR)
subprocess.Popen(sab_command.split())
# Wait for SAB to respond
for _ in range(10):
try:
get_url_result()
# Woohoo, we're up!
break
except requests.ConnectionError:
time.sleep(1)
else:
# Make sure we clean up
tearDownModule()
raise requests.ConnectionError()
def tearDownModule():
# Graceful shutdown request
try:
get_url_result('shutdown')
except requests.ConnectionError:
pass
# Takes a second to shutdown
for x in range(10):
try:
shutil.rmtree(SAB_CACHE_DIR)
break
except OSError:
print "Unable to remove cache dir (try %d)" % x
time.sleep(1)

11
tools/extract_pot.py

@ -27,7 +27,7 @@ import re
f = open('sabnzbd/version.py') f = open('sabnzbd/version.py')
code = f.read() code = f.read()
f.close() f.close()
exec(code) exec code
# Fixed information for the POT header # Fixed information for the POT header
HEADER = r'''# HEADER = r'''#
@ -53,7 +53,7 @@ EMAIL_DIR = 'email'
DOMAIN = 'SABnzbd' DOMAIN = 'SABnzbd'
DOMAIN_EMAIL = 'SABemail' DOMAIN_EMAIL = 'SABemail'
DOMAIN_NSIS = 'SABnsis' DOMAIN_NSIS = 'SABnsis'
PARMS = '-d %s -p %s -k T -k Ta -k TT -o %s.pot.tmp' % (DOMAIN, PO_DIR, DOMAIN) PARMS = '-d %s -p %s -w500 -k T -k Ta -k TT -o %s.pot.tmp' % (DOMAIN, PO_DIR, DOMAIN)
FILES = 'SABnzbd.py SABHelper.py SABnzbdDelegate.py sabnzbd/*.py sabnzbd/utils/*.py' FILES = 'SABnzbd.py SABHelper.py SABnzbdDelegate.py sabnzbd/*.py sabnzbd/utils/*.py'
FILE_CACHE = {} FILE_CACHE = {}
@ -108,8 +108,11 @@ def get_context(line):
item = item.split(':')[0] item = item.split(':')[0]
if context: if context:
newlines.append('%s [%s]' % (item, context)) # Format context
else: item = '%s [%s]' % (item, context)
# Only add new texts
if item not in newlines:
newlines.append(item) newlines.append(item)
return '#: ' + ' # '.join(newlines) + '\n' return '#: ' + ' # '.join(newlines) + '\n'

38
util/apireg.py

@ -69,7 +69,7 @@ def set_connection_info(url, user=True):
try: try:
hive = winreg.ConnectRegistry(None, section) hive = winreg.ConnectRegistry(None, section)
try: try:
key = winreg.CreateKey(hive, keypath) _winreg.CreateKey(hive, keypath)
except: except:
pass pass
key = winreg.OpenKey(hive, keypath) key = winreg.OpenKey(hive, keypath)
@ -105,18 +105,42 @@ def get_install_lng():
""" Return language-code used by the installer """ """ Return language-code used by the installer """
lng = 0 lng = 0
try: try:
hive = winreg.ConnectRegistry(None, winreg.HKEY_LOCAL_MACHINE) hive = _winreg.ConnectRegistry(None, _winreg.HKEY_CURRENT_USER)
key = winreg.OpenKey(hive, r"Software\SABnzbd") key = _winreg.OpenKey(hive, r"Software\SABnzbd")
for i in range(0, winreg.QueryInfoKey(key)[1]): for i in range(0, _winreg.QueryInfoKey(key)[1]):
name, value, val_type = winreg.EnumValue(key, i) name, value, val_type = _winreg.EnumValue(key, i)
if name == 'Installer Language': if name == 'Installer Language':
lng = value lng = value
winreg.CloseKey(key) winreg.CloseKey(key)
except WindowsError: except WindowsError:
pass pass
finally: finally:
winreg.CloseKey(hive) _winreg.CloseKey(hive)
return lng
if lng in LanguageMap:
return LanguageMap[lng]
return 'en'
# Map from NSIS-codepage to our language-strings
LanguageMap = {
'1033': 'en',
'1036': 'fr',
'1031': 'de',
'1043': 'nl',
'1035': 'fi',
'1045': 'pl',
'1053': 'sv',
'1030': 'da',
'2068': 'nb',
'1048': 'ro',
'1034': 'es',
'1046': 'pr_BR',
'3098': 'sr',
'1037': 'he',
'1049': 'ru',
'2052': 'zh_CN'
}
if __name__ == '__main__': if __name__ == '__main__':

1
util/mailslot.py

@ -19,7 +19,6 @@
sabnzbd.mailslot - Mailslot communication sabnzbd.mailslot - Mailslot communication
""" """
import os
from win32file import GENERIC_WRITE, FILE_SHARE_READ, \ from win32file import GENERIC_WRITE, FILE_SHARE_READ, \
OPEN_EXISTING, FILE_ATTRIBUTE_NORMAL OPEN_EXISTING, FILE_ATTRIBUTE_NORMAL
from ctypes import c_uint, c_buffer, byref, sizeof, windll from ctypes import c_uint, c_buffer, byref, sizeof, windll

BIN
win/par2/multipar/par2j.exe

Binary file not shown.

BIN
win/par2/multipar/par2j64.exe

Binary file not shown.

BIN
win/unrar/UnRAR.exe

Binary file not shown.

BIN
win/unrar/x64/UnRAR.exe

Binary file not shown.
Loading…
Cancel
Save