Browse Source

Merge branch 'develop' into py3

pull/1219/head
Safihre 7 years ago
parent
commit
579e07adc3
  1. 4
      .gitignore
  2. 2
      ABOUT.txt
  3. 2
      INSTALL.txt
  4. 4
      ISSUES.txt
  5. 1
      README.md
  6. 33
      README.mkd
  7. 1
      SABHelper.py
  8. 47
      SABnzbd.py
  9. 2
      interfaces/Config/templates/config_notify.tmpl
  10. 24
      interfaces/Config/templates/config_rss.tmpl
  11. 27
      interfaces/Config/templates/staticcfg/css/style.css
  12. 2
      interfaces/Glitter/templates/static/stylesheets/colorschemes/Night.css
  13. 24
      interfaces/Glitter/templates/static/stylesheets/glitter.css
  14. 14
      interfaces/wizard/static/style.css
  15. 4
      interfaces/wizard/two.html
  16. BIN
      osx/unrar/unrar
  17. 340
      po/main/SABnzbd.pot
  18. 344
      po/main/da.po
  19. 360
      po/main/de.po
  20. 344
      po/main/es.po
  21. 344
      po/main/fi.po
  22. 346
      po/main/fr.po
  23. 448
      po/main/he.po
  24. 344
      po/main/nb.po
  25. 344
      po/main/nl.po
  26. 344
      po/main/pl.po
  27. 344
      po/main/pt_BR.po
  28. 344
      po/main/ro.po
  29. 348
      po/main/ru.po
  30. 344
      po/main/sr.po
  31. 344
      po/main/sv.po
  32. 344
      po/main/zh_CN.po
  33. 57
      sabnzbd/__init__.py
  34. 121
      sabnzbd/api.py
  35. 15
      sabnzbd/assembler.py
  36. 4
      sabnzbd/config.py
  37. 2
      sabnzbd/constants.py
  38. 15
      sabnzbd/database.py
  39. 6
      sabnzbd/decoder.py
  40. 14
      sabnzbd/directunpacker.py
  41. 4
      sabnzbd/dirscanner.py
  42. 84
      sabnzbd/downloader.py
  43. 55
      sabnzbd/interface.py
  44. 11
      sabnzbd/misc.py
  45. 129
      sabnzbd/newsunpack.py
  46. 14
      sabnzbd/newswrapper.py
  47. 5
      sabnzbd/notifier.py
  48. 25
      sabnzbd/nzbqueue.py
  49. 59
      sabnzbd/nzbstuff.py
  50. 8
      sabnzbd/osxmenu.py
  51. 16
      sabnzbd/par2file.py
  52. 4
      sabnzbd/postproc.py
  53. 44
      sabnzbd/rss.py
  54. 10
      sabnzbd/sabtray.py
  55. 1
      sabnzbd/sabtraylinux.py
  56. 8
      sabnzbd/skintext.py
  57. 2
      sabnzbd/sorting.py
  58. 4
      sabnzbd/urlgrabber.py
  59. 5
      sabnzbd/utils/certgen.py
  60. 2
      sabnzbd/utils/checkdir.py
  61. 9
      sabnzbd/utils/diskspeed.py
  62. 13
      sabnzbd/utils/getperformance.py
  63. 222
      sabnzbd/utils/happyeyeballs.py
  64. 1
      sabnzbd/utils/kronos.py
  65. 2
      sabnzbd/utils/pystone.py
  66. 4
      sabnzbd/utils/servertests.py
  67. 1
      sabnzbd/utils/upload.py
  68. 6
      sabnzbd/zconfig.py
  69. 125
      scripts/Deobfuscate.py
  70. 71
      tests/conftest.py
  71. 4
      tests/requirements.txt
  72. 9
      tests/sabnzbd.basic.ini
  73. 61
      tests/test_api_pages.py
  74. 295
      tests/test_functional.py
  75. 58
      tests/test_nzb.py
  76. 65
      tests/testhelper.py
  77. 11
      tools/extract_pot.py
  78. 38
      util/apireg.py
  79. 1
      util/mailslot.py
  80. BIN
      win/par2/multipar/par2j.exe
  81. BIN
      win/par2/multipar/par2j64.exe
  82. BIN
      win/unrar/UnRAR.exe
  83. BIN
      win/unrar/x64/UnRAR.exe

4
.gitignore

@ -16,12 +16,14 @@ SABnzbd*.exe
SABnzbd*.gz
SABnzbd*.dmg
# WingIDE project files
# WingIDE/PyCharm project files
*.wp[ru]
.idea
# Testing folders
.cache
.xprocess
tests/cache
# General junk
*.keep

2
ABOUT.txt

@ -1,5 +1,5 @@
*******************************************
*** This is SABnzbd 2.3.4 ***
*** This is SABnzbd 2.3.5 ***
*******************************************
SABnzbd is an open-source cross-platform binary newsreader.
It simplifies the process of downloading from Usenet dramatically,

2
INSTALL.txt

@ -1,4 +1,4 @@
SABnzbd 2.3.4
SABnzbd 2.3.5
-------------------------------------------------------------------------------
0) LICENSE

4
ISSUES.txt

@ -66,3 +66,7 @@
Config->Special->wait_for_dfolder to 1.
SABnzbd will appear to hang until the drive is mounted.
- If you experience speed-drops to KB/s when using a VPN, try setting the number of connections
to your servers to a total of 7. There is a CPU-usage reduction feature in SABnzbd that
gets confused by the way some VPN's handle the state of a connection. Below 8 connections
this feature is not active.

1
README.md

@ -21,7 +21,6 @@ Optional:
- `python-cryptography` (enables certificate generation and detection of encrypted RAR-files during download)
- `python-dbus` (enable option to Shutdown/Restart/Standby PC on queue finish)
- `7zip`
- `unzip`
Your package manager should supply these. If not, we've got links in our more in-depth [installation guide](https://github.com/sabnzbd/sabnzbd/blob/master/INSTALL.txt).

33
README.mkd

@ -1,18 +1,25 @@
Release Notes - SABnzbd 2.3.4
Release Notes - SABnzbd 2.3.5
=========================================================
## Changes since 2.3.3
- Device hostname in hostname-verification always lowercased
- Hostnames ending in ".local" are always accepted
- URLGrabber would not always detect correct filename
- URLGrabber would ignore some successful downloads
- Always send NNTP QUIT after server-test
- Added option "--disable-file-log" to disable file-based logging
- Added CORS-header to API
- Windows: Service compatibility with Windows 10 April update
- Windows: Update Python to 2.7.15
- Windows: Update 7zip to 18.05
- macOS: Restore compatibility with El Capitan (10.11)
## Bug fixes since 2.3.4
- Reworked Deobfuscate.py script for much faster renaming
- All scripts can now receive input through environment variables
- Unable to set only one Indexer Category per category
- Could falsely report not enough blocks are available for repair
- Failures in un-(7)zip or file-joining would not fail the job
- Direct Unpack could abort unnecessarily
- Rare crash during file assembly
- Server hostname is now used in warnings and logs
- Improved disk performance measurement
- Overall improvements in stability and reliability
- Windows: MultiPar repair of joinable files could fail
- Windows: Tray icon also shows remaining size when paused
- Windows: Wizard would not default to installer language
- Windows: Update MultiPar to 1.3.0.1
- Windows and macOS: Update UnRar to 5.60
Looking for help with SABnzbd development:
https://www.reddit.com/r/usenet/918nxv/
## Upgrading from 2.2.x and older
- Finish queue

1
SABHelper.py

@ -20,7 +20,6 @@ if sys.version_info[:2] < (2, 6) or sys.version_info[:2] >= (3, 0):
print("Sorry, requires Python 2.6 or 2.7.")
sys.exit(1)
import os
import time
import subprocess

47
SABnzbd.py

@ -476,7 +476,7 @@ def all_localhosts():
def check_resolve(host):
""" Return True if 'host' resolves """
try:
dummy = socket.getaddrinfo(host, None)
socket.getaddrinfo(host, None)
except:
# Does not resolve
return False
@ -572,7 +572,7 @@ def get_webhost(cherryhost, cherryport, https_port):
cherryhost = cherryhost.strip('[]')
else:
try:
info = socket.getaddrinfo(cherryhost, None)
socket.getaddrinfo(cherryhost, None)
except:
cherryhost = cherryhost.strip('[]')
@ -633,12 +633,12 @@ def attach_server(host, port, cert=None, key=None, chain=None):
def is_sabnzbd_running(url):
""" Return True when there's already a SABnzbd instance running. """
try:
url = '%s&mode=version' % (url)
url = '%s&mode=version' % url
# Do this without certificate verification, few installations will have that
prev = sabnzbd.set_https_verification(False)
ver = get_from_url(url)
sabnzbd.set_https_verification(prev)
return (ver and (re.search(r'\d+\.\d+\.', ver) or ver.strip() == sabnzbd.__version__))
return ver and (re.search(r'\d+\.\d+\.', ver) or ver.strip() == sabnzbd.__version__)
except:
return False
@ -702,7 +702,7 @@ def evaluate_inipath(path):
return path
def commandline_handler(frozen=True):
def commandline_handler():
""" Split win32-service commands are true parameters
Returns:
service, sab_opts, serv_opts, upload_nzbs
@ -803,7 +803,6 @@ def main():
vista_plus = False
win64 = False
repair = 0
api_url = None
no_login = False
sabnzbd.RESTART_ARGS = [sys.argv[0]]
pid_path = None
@ -839,9 +838,9 @@ def main():
elif opt in ('-b', '--browser'):
try:
autobrowser = bool(int(arg))
except:
except ValueError:
autobrowser = True
elif opt in ('--autorestarted', ):
elif opt == '--autorestarted':
autorestarted = True
elif opt in ('-c', '--clean'):
clean_up = True
@ -860,36 +859,36 @@ def main():
exit_sab(0)
elif opt in ('-p', '--pause'):
pause = True
elif opt in ('--https',):
elif opt == '--https':
https_port = int(arg)
sabnzbd.RESTART_ARGS.append(opt)
sabnzbd.RESTART_ARGS.append(arg)
elif opt in ('--repair',):
elif opt == '--repair':
repair = 1
pause = True
elif opt in ('--repair-all',):
elif opt == '--repair-all':
repair = 2
pause = True
elif opt in ('--log-all',):
elif opt == '--log-all':
sabnzbd.LOG_ALL = True
elif opt in ('--disable-file-log'):
elif opt == '--disable-file-log':
no_file_log = True
elif opt in ('--no-login',):
elif opt == '--no-login':
no_login = True
elif opt in ('--pid',):
elif opt == '--pid':
pid_path = arg
sabnzbd.RESTART_ARGS.append(opt)
sabnzbd.RESTART_ARGS.append(arg)
elif opt in ('--pidfile',):
elif opt == '--pidfile':
pid_file = arg
sabnzbd.RESTART_ARGS.append(opt)
sabnzbd.RESTART_ARGS.append(arg)
elif opt in ('--new',):
elif opt == '--new':
new_instance = True
elif opt in ('--console',):
elif opt == '--console':
sabnzbd.RESTART_ARGS.append(opt)
osx_console = True
elif opt in ('--ipv6_hosting',):
elif opt == '--ipv6_hosting':
ipv6_hosting = arg
sabnzbd.MY_FULLNAME = os.path.normpath(os.path.abspath(sabnzbd.MY_FULLNAME))
@ -983,13 +982,13 @@ def main():
if enable_https and https_port:
try:
portend.free(cherryhost, https_port, timeout=0.05)
except IOError as error:
except IOError:
Bail_Out(browserhost, cherryport)
except:
Bail_Out(browserhost, cherryport, '49')
try:
portend.free(cherryhost, cherryport, timeout=0.05)
except IOError as error:
except IOError:
Bail_Out(browserhost, cherryport)
except:
Bail_Out(browserhost, cherryport, '49')
@ -1026,7 +1025,7 @@ def main():
else:
# In case HTTPS == HTTP port
cherryport = newport
sabnzbd.cfg.port.set(newport)
sabnzbd.cfg.cherryport.set(newport)
except:
# Something else wrong, probably badly specified host
Bail_Out(browserhost, cherryport, '49')
@ -1208,8 +1207,6 @@ def main():
if autobrowser is not None:
sabnzbd.cfg.autobrowser.set(autobrowser)
else:
autobrowser = sabnzbd.cfg.autobrowser()
if not sabnzbd.WIN_SERVICE and not getattr(sys, 'frozen', None) == 'macosx_app':
signal.signal(signal.SIGINT, sabnzbd.sig_handler)
@ -1574,7 +1571,7 @@ if sabnzbd.WIN32:
win32serviceutil.ServiceFramework.__init__(self, args)
self.hWaitStop = win32event.CreateEvent(None, 0, 0, None)
self.overlapped = pywintypes.OVERLAPPED() # @UndefinedVariable
self.overlapped = pywintypes.OVERLAPPED()
self.overlapped.hEvent = win32event.CreateEvent(None, 0, 0, None)
sabnzbd.WIN_SERVICE = self

2
interfaces/Config/templates/config_notify.tmpl

@ -194,7 +194,7 @@
<fieldset>
<div class="field-pair">
<label class="config" for="nscript_script">$T('opt-nscript_script')</label>
<select name="nscript_script">
<select name="nscript_script" id="nscript_script">
<!--#for $sc in $scripts#-->
<option value="$sc" <!--#if $nscript_script == $sc then 'selected="selected"' else ""#-->>$Tspec($sc)</option>
<!--#end for#-->

24
interfaces/Config/templates/config_rss.tmpl

@ -390,9 +390,10 @@
<th class="no-sort">$T('link-download')</th>
<th>$T('rss-filter')</th>
<th>$T('size')</th>
<th width="65%">$T('sort-title')</th>
<th width="60%">$T('sort-title')</th>
<th>$T('category')</th>
<th class="default-sort">$T('nzo-age')</th>
<th>$T('source')</th>
</tr>
</thead>
<!--#for $job in $matched#-->
@ -411,6 +412,13 @@
<td>$job['title']</td>
<td>$job['cat']</td>
<td data-sort-value="$job['age_ms']">$job['age']</td>
<td data-sort-value="$job['baselink']" title="$job['baselink']">
<!--#if not $job['infourl']#-->
<div class="favicon source-icon" style="background-image: url(//$job['baselink']/favicon.ico);" data-domain="$job['baselink']"></div>
<!--#else#-->
<a class="favicon source-icon" href="$job['infourl']" target="_blank" style="background-image: url(//$job['baselink']/favicon.ico);" data-domain="$job['baselink']"></a>
<!--#end if#-->
</td>
</tr>
<!--#end for#-->
</table>
@ -426,9 +434,10 @@
<th class="no-sort">$T('link-download')</th>
<th>$T('rss-filter')</th>
<th>$T('size')</th>
<th width="65%">$T('sort-title')</th>
<th width="60%">$T('sort-title')</th>
<th>$T('category')</th>
<th class="default-sort">$T('nzo-age')</th>
<th>$T('source')</th>
</tr>
</thead>
<!--#for $job in $unmatched#-->
@ -447,6 +456,13 @@
<td>$job['title']</td>
<td>$job['cat']</td>
<td data-sort-value="$job['age_ms']">$job['age']</td>
<td data-sort-value="$job['baselink']" title="$job['baselink']">
<!--#if not $job['infourl']#-->
<div class="favicon source-icon" style="background-image: url(//$job['baselink']/favicon.ico);" data-domain="$job['baselink']"></div>
<!--#else#-->
<a class="favicon source-icon" href="$job['infourl']" target="_blank" style="background-image: url(//$job['baselink']/favicon.ico);" data-domain="$job['baselink']"></a>
<!--#end if#-->
</td>
</tr>
<!--#end for#-->
</table>
@ -476,8 +492,10 @@
<td>$job['title']</td>
<td>$job['cat']</td>
<td data-sort-value="$job['baselink']" title="$job['baselink']">
<!--#if $job['baselink']#-->
<!--#if not $job['infourl']#-->
<div class="favicon source-icon" style="background-image: url(//$job['baselink']/favicon.ico);" data-domain="$job['baselink']"></div>
<!--#else#-->
<a class="favicon source-icon" href="$job['infourl']" target="_blank" style="background-image: url(//$job['baselink']/favicon.ico);" data-domain="$job['baselink']"></a>
<!--#end if#-->
</td>
</tr>

27
interfaces/Config/templates/staticcfg/css/style.css

@ -4,16 +4,13 @@ body {
}
#logo {
display: block;
margin: auto;
margin-top: 3px;
margin: 3px auto auto;
}
#content {
color: #000;
padding: 15px 20px 20px;
padding: 65px 20px 20px;
font-size: 13px;
padding-top: 65px;
padding-bottom: 20px;
}
.colmask {
z-index: 20;
@ -529,7 +526,7 @@ tr.separator {
}
#filebrowser_modal .checkbox {
float: left;
margin: 8px 5px 0x;
margin: 8px 5px 0px;
}
#filebrowser_modal .checkbox input {
margin-top: 1px;
@ -576,6 +573,7 @@ h2.activeRSS {
float: left;
margin: 0 6px 0 2px;
text-align: center;
color: black !important;
}
.source-icon span {
top: -3px;
@ -600,8 +598,7 @@ h2.activeRSS {
padding-top: .4em;
}
#subscriptions .chk {
padding: 5px;
padding-top: 8px;
padding: 8px 5px 5px;
vertical-align: middle;
}
#subscriptions .title {
@ -773,7 +770,6 @@ input[type=radio] {
input[type="button"],
input[type="submit"] {
color: #333;
background-color: #fff;
display:inline-block;
padding:6px 12px;
margin-bottom: 0;
@ -784,7 +780,7 @@ input[type="submit"] {
white-space:nowrap;
vertical-align:middle;
cursor:pointer;
background-image:none;
background: #fff none;
border:1px solid #ccc;
height: 34px;
}
@ -1002,7 +998,7 @@ input[type="checkbox"] {
}
.Servers .col2.server-disabled .label {
color: ##777 !important;
color: #777 !important;
}
.Servers .col2 .label:nth-child(2) {
@ -1063,9 +1059,7 @@ input[type="checkbox"] {
.Servers .col2 label,
.Email .col2 label {
margin: 0;
margin-left: 4px;
margin-top: 2px;
margin: 2px 0 0 4px;
cursor: pointer;
}
@ -1141,6 +1135,7 @@ input[type="checkbox"] {
}
.value-and-select select {
min-width: 30px;
margin-top: 1px;
}
.dotOne, .dotTwo, .dotThree {
@ -1341,9 +1336,7 @@ input[type="checkbox"] {
}
.desc {
margin: 0;
margin-left: 3px;
margin-top: 2px;
margin: 2px 0 0 3px;
padding: 0 !important;
}

2
interfaces/Glitter/templates/static/stylesheets/colorschemes/Night.css

@ -76,7 +76,7 @@ legend,
background-color: #666;
}
.navbar-collapse.in .dropdown-menu, {
.navbar-collapse.in .dropdown-menu {
border: none;
}

24
interfaces/Glitter/templates/static/stylesheets/glitter.css

@ -105,10 +105,7 @@ h2 {
.navbar-logo {
vertical-align: middle;
display: inline-block;
margin-right: 12px;
margin-left: 15px;
margin-top: 4px;
margin-bottom: -1px;
margin: 4px 12px -1px 15px;
}
.navbar-logo svg {
@ -288,8 +285,7 @@ li.dropdown {
opacity: 0.9;
color: black;
z-index: 2000;
padding: 1em;
padding-top: 15%;
padding: 15% 1em 1em;
}
.main-filedrop.in span {
@ -721,8 +717,7 @@ td.delete .dropdown>a {
td.delete input[type="checkbox"],
.add-nzb-inputbox-options input[type="checkbox"]{
margin: 0;
margin-bottom: -2px;
margin: 0 0 -2px;
display: block;
}
@ -1155,8 +1150,7 @@ tr.queue-item>td:first-child>a {
#history-options {
margin-top: 0;
margin-left: 10px;
padding: 0;
padding-left: 4px;
padding: 0 0 0 4px;
}
#history-options .hover-button {
@ -1536,8 +1530,7 @@ tr.queue-item>td:first-child>a {
.add-nzb-inputbox span {
display: inline-block;
margin: 8px 2px 0px 5px;
margin-left: -20px;
margin: 8px 2px 0px -20px;
}
.btn-file {
@ -1630,11 +1623,9 @@ input[name="nzbURL"] {
#modal-item-files .multioperations-selector {
clear: left;
margin: 0;
float: left;
padding: 5px 8px;
margin-bottom: 5px;
margin-right: 10px;
margin: 0 10px 5px 0;
border: 1px solid #cccccc;
}
@ -2045,9 +2036,8 @@ a:focus {
right: 17px;
display: inline-block;
border-right: 6px solid transparent;
border-bottom: 6px solid #ccc;
border-bottom: 6px solid rgba(0, 0, 0, 0.2);
border-left: 6px solid transparent;
border-bottom-color: rgba(0, 0, 0, 0.2);
content: '';
}

14
interfaces/wizard/static/style.css

@ -88,19 +88,12 @@ label {
float: right;
margin: 0;
}
.sup {
vertical-align: sup !important;
}
.align-right {
text-align: right;
}
.align-center {
text-align: center;
}
.float-center {
float: center;
}
.unselected,
.selected {
display: inline-block;
@ -123,9 +116,6 @@ label {
.bigger {
font-size: 14px;
}
.padded {
padding: 12px;
}
.bigger input {
font-size: 16px;
}
@ -135,9 +125,6 @@ label {
.full-width {
width: 100%;
}
.bigbutton {
font-size: 18px !important;
}
.correct {
border: 2px solid #00cc22;
}
@ -153,7 +140,6 @@ label {
.text-input-wide {
width: 230px;
}
.text-input-thin,
#server-hidden-settings input[type="number"] {
width: 100px;
}

4
interfaces/wizard/two.html

@ -22,13 +22,13 @@
<p><strong>$T('opt-complete_dir')</strong></p>
<div class="quoteBlock">
$complete_dir
<a href="${access_url}config/folders" class="indented"><span class="glyphicon glyphicon-cog"></span></a>
<a href="${access_url}/config/folders#complete_dir" class="indented"><span class="glyphicon glyphicon-cog"></span></a>
</div>
<p><strong>$T('opt-download_dir')</strong></p>
<div class="quoteBlock">
$download_dir
<a href="${access_url}config/folders" class="indented"><span class="glyphicon glyphicon-cog"></span></a>
<a href="${access_url}/config/folders#complete_dir" class="indented"><span class="glyphicon glyphicon-cog"></span></a>
</div>
<hr/>

BIN
osx/unrar/unrar

Binary file not shown.

340
po/main/SABnzbd.pot

File diff suppressed because it is too large

344
po/main/da.po

File diff suppressed because it is too large

360
po/main/de.po

File diff suppressed because it is too large

344
po/main/es.po

File diff suppressed because it is too large

344
po/main/fi.po

File diff suppressed because it is too large

346
po/main/fr.po

File diff suppressed because it is too large

448
po/main/he.po

File diff suppressed because it is too large

344
po/main/nb.po

File diff suppressed because it is too large

344
po/main/nl.po

File diff suppressed because it is too large

344
po/main/pl.po

File diff suppressed because it is too large

344
po/main/pt_BR.po

File diff suppressed because it is too large

344
po/main/ro.po

File diff suppressed because it is too large

348
po/main/ru.po

File diff suppressed because it is too large

344
po/main/sr.po

File diff suppressed because it is too large

344
po/main/sv.po

File diff suppressed because it is too large

344
po/main/zh_CN.po

File diff suppressed because it is too large

57
sabnzbd/__init__.py

@ -196,7 +196,7 @@ def sig_handler(signum=None, frame=None):
INIT_LOCK = Lock()
def connect_db(thread_index=0):
def get_db_connection(thread_index=0):
# Create a connection and store it in the current thread
if not (hasattr(cherrypy.thread_data, 'history_db') and cherrypy.thread_data.history_db):
cherrypy.thread_data.history_db = sabnzbd.database.HistoryDB()
@ -217,7 +217,7 @@ def initialize(pause_downloader=False, clean_up=False, evalSched=False, repair=0
__SHUTTING_DOWN__ = False
# Set global database connection for Web-UI threads
cherrypy.engine.subscribe('start_thread', connect_db)
cherrypy.engine.subscribe('start_thread', get_db_connection)
# Paused?
pause_downloader = pause_downloader or cfg.start_paused()
@ -653,13 +653,13 @@ def add_nzbfile(nzbfile, pp=None, script=None, cat=None, priority=NORMAL_PRIORIT
try:
filename = nzbfile.filename.encode('cp1252').decode('utf-8')
except:
# Correct encoding afterall!
# Correct encoding after all!
filename = nzbfile.filename
filename = encoding.special_fixer(filename)
keep = False
if not sabnzbd.WIN32:
# If windows client sends file to Unix server backslashed may
# If windows client sends file to Unix server backslashes may
# be included, so convert these
filename = filename.replace('\\', '/')
@ -988,12 +988,12 @@ def pp_to_opts(pp):
# Convert the pp to an int
pp = sabnzbd.interface.int_conv(pp)
if pp == 0:
return (False, False, False)
return False, False, False
if pp == 1:
return (True, False, False)
return True, False, False
if pp == 2:
return (True, True, False)
return (True, True, True)
return True, True, False
return True, True, True
def opts_to_pp(repair, unpack, delete):
@ -1156,24 +1156,29 @@ def test_cert_checking():
""" Test quality of certificate validation
On systems with at least Python > 2.7.9
"""
try:
import ssl
ctx = ssl.create_default_context()
base_sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
ssl_sock = ctx.wrap_socket(base_sock, server_hostname=cfg.selftest_host())
ssl_sock.settimeout(2.0)
ssl_sock.connect((cfg.selftest_host(), 443))
ssl_sock.close()
return True
except (socket.gaierror, socket.timeout) as e:
# Non-SSL related error.
# We now assume that certificates work instead of forcing
# lower quality just because some (temporary) internet problem
logging.info('Could not determine system certificate validation quality due to connection problems')
return True
except:
# Seems something is still wrong
sabnzbd.set_https_verification(0)
if sabnzbd.HAVE_SSL_CONTEXT:
# User disabled the test, assume proper SSL certificates
if not cfg.selftest_host():
return True
# Try a connection to our test-host
try:
import ssl
ctx = ssl.create_default_context()
base_sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
ssl_sock = ctx.wrap_socket(base_sock, server_hostname=cfg.selftest_host())
ssl_sock.settimeout(2.0)
ssl_sock.connect((cfg.selftest_host(), 443))
ssl_sock.close()
return True
except (socket.gaierror, socket.timeout):
# Non-SSL related error.
# We now assume that certificates work instead of forcing
# lower quality just because some (temporary) internet problem
logging.info('Could not determine system certificate validation quality due to connection problems')
return True
except:
# Seems something is still wrong
sabnzbd.set_https_verification(0)
return False

121
sabnzbd/api.py

@ -29,6 +29,7 @@ import cherrypy
import locale
from threading import Thread
try:
import win32api
import win32file
@ -37,8 +38,8 @@ except ImportError:
import sabnzbd
from sabnzbd.constants import VALID_ARCHIVES, VALID_NZB_FILES, Status, \
TOP_PRIORITY, REPAIR_PRIORITY, HIGH_PRIORITY, NORMAL_PRIORITY, LOW_PRIORITY, \
KIBI, MEBI, GIGI, JOB_ADMIN
TOP_PRIORITY, REPAIR_PRIORITY, HIGH_PRIORITY, NORMAL_PRIORITY, LOW_PRIORITY, \
KIBI, MEBI, GIGI, JOB_ADMIN
import sabnzbd.config as config
import sabnzbd.cfg as cfg
from sabnzbd.downloader import Downloader
@ -67,7 +68,6 @@ import sabnzbd.rss
import sabnzbd.emailer
import sabnzbd.getipaddress as getipaddress
##############################################################################
# API error messages
##############################################################################
@ -82,7 +82,6 @@ _MSG_OUTPUT_FORMAT = 'Format not supported'
_MSG_NO_SUCH_CONFIG = 'Config item does not exist'
_MSG_BAD_SERVER_PARMS = 'Incorrect server settings'
# For Windows: determine executable extensions
if os.name == 'nt':
PATHEXT = os.environ.get('PATHEXT', '').lower().split(';')
@ -211,6 +210,8 @@ def _api_queue_pause(output, value, kwargs):
if value:
items = value.split(',')
handled = NzbQueue.do.pause_multiple_nzo(items)
else:
handled = False
return report(output, keyword='', data={'status': bool(handled), 'nzo_ids': handled})
@ -219,6 +220,8 @@ def _api_queue_resume(output, value, kwargs):
if value:
items = value.split(',')
handled = NzbQueue.do.resume_multiple_nzo(items)
else:
handled = False
return report(output, keyword='', data={'status': bool(handled), 'nzo_ids': handled})
@ -332,7 +335,7 @@ def _api_addfile(name, output, kwargs):
# Indexer category, so do mapping
cat = cat_convert(xcat)
res = sabnzbd.add_nzbfile(name, kwargs.get('pp'), kwargs.get('script'), cat,
kwargs.get('priority'), kwargs.get('nzbname'))
kwargs.get('priority'), kwargs.get('nzbname'))
return report(output, keyword='', data={'status': res[0] == 0, 'nzo_ids': res[1]}, compat=True)
else:
return report(output, _MSG_NO_VALUE)
@ -453,6 +456,7 @@ def _api_change_opts(name, output, kwargs):
""" API: accepts output, value(=nzo_id), value2(=pp) """
value = kwargs.get('value')
value2 = kwargs.get('value2')
result = 0
if value and value2 and value2.isdigit():
result = NzbQueue.do.change_opts(value, int(value2))
return report(output, keyword='status', data=bool(result > 0))
@ -474,7 +478,6 @@ def _api_history(name, output, kwargs):
failed_only = kwargs.get('failed_only')
categories = kwargs.get('category')
# Do we need to send anything?
if last_history_update == sabnzbd.LAST_HISTORY_UPDATE:
return report(output, keyword='history', data=False)
@ -489,7 +492,7 @@ def _api_history(name, output, kwargs):
special = value.lower()
del_files = bool(int_conv(kwargs.get('del_files')))
if special in ('all', 'failed', 'completed'):
history_db = sabnzbd.connect_db()
history_db = sabnzbd.get_db_connection()
if special in ('all', 'failed'):
if del_files:
del_job_files(history_db.get_failed_paths(search))
@ -510,7 +513,7 @@ def _api_history(name, output, kwargs):
history = {}
grand, month, week, day = BPSMeter.do.get_sums()
history['total_size'], history['month_size'], history['week_size'], history['day_size'] = \
to_units(grand), to_units(month), to_units(week), to_units(day)
to_units(grand), to_units(month), to_units(week), to_units(day)
history['slots'], fetched_items, history['noofslots'] = build_history(start=start,
limit=limit, verbose=True,
search=search, failed_only=failed_only,
@ -715,12 +718,10 @@ def _api_reset_quota(name, output, kwargs):
def _api_test_email(name, output, kwargs):
""" API: send a test email, return result """
logging.info("Sending test email")
pack = {}
pack['download'] = ['action 1', 'action 2']
pack['unpack'] = ['action 1', 'action 2']
res = sabnzbd.emailer.endjob('I had a d\xe8ja vu', 'unknown', True,
os.path.normpath(os.path.join(cfg.complete_dir.get_path(), '/unknown/I had a d\xe8ja vu')),
123 * MEBI, None, pack, 'my_script', 'Line 1\nLine 2\nLine 3\nd\xe8ja vu\n', 0,
pack = {'download': ['action 1', 'action 2'], 'unpack': ['action 1', 'action 2']}
res = sabnzbd.emailer.endjob(u'I had a d\xe8ja vu', 'unknown', True,
os.path.normpath(os.path.join(cfg.complete_dir.get_path(), u'/unknown/I had a d\xe8ja vu')),
123 * MEBI, None, pack, 'my_script', u'Line 1\nLine 2\nLine 3\nd\xe8ja vu\n', 0,
test=kwargs)
if res == 'Email succeeded':
res = None
@ -793,7 +794,6 @@ def _api_browse(name, output, kwargs):
compact = kwargs.get('compact')
if compact and compact == '1':
paths = []
name = platform_encode(kwargs.get('term', ''))
paths = [entry['path'] for entry in folders_at_path(os.path.dirname(name)) if 'path' in entry]
return report(output, keyword='', data=paths)
@ -883,12 +883,11 @@ def _api_config_undefined(output, kwargs):
def _api_server_stats(name, output, kwargs):
""" API: accepts output """
sum_t, sum_m, sum_w, sum_d = BPSMeter.do.get_sums()
stats = {'total': sum_t, 'month': sum_m, 'week': sum_w, 'day': sum_d}
stats = {'total': sum_t, 'month': sum_m, 'week': sum_w, 'day': sum_d, 'servers': {}}
stats['servers'] = {}
for svr in config.get_servers():
t, m, w, d, daily = BPSMeter.do.amounts(svr)
stats['servers'][svr] = {'total': t or 0, 'month': m or 0, 'week': w or 0, 'day': d or 0, 'daily': daily or {} }
stats['servers'][svr] = {'total': t or 0, 'month': m or 0, 'week': w or 0, 'day': d or 0, 'daily': daily or {}}
return report(output, keyword='', data=stats)
@ -1131,6 +1130,24 @@ def handle_rss_api(output, kwargs):
feed.set_dict(kwargs)
else:
config.ConfigRSS(name, kwargs)
action = kwargs.get('filter_action')
if action in ('add', 'update'):
# Use the general function, but catch the redirect-raise
try:
kwargs['feed'] = name
sabnzbd.interface.ConfigRss('/').internal_upd_rss_filter(**kwargs)
except cherrypy.HTTPRedirect:
pass
elif action == 'delete':
# Use the general function, but catch the redirect-raise
try:
kwargs['feed'] = name
sabnzbd.interface.ConfigRss('/').internal_del_rss_filter(**kwargs)
except cherrypy.HTTPRedirect:
pass
return name
@ -1214,10 +1231,10 @@ def build_status(skip_dashboard=False, output=None):
# For the templates or for JSON
if output:
thread_info = { 'thrdnum': nw.thrdnum,
'art_name': art_name,
'nzf_name': nzf_name,
'nzo_name': nzo_name }
thread_info = {'thrdnum': nw.thrdnum,
'art_name': art_name,
'nzf_name': nzf_name,
'nzo_name': nzo_name}
serverconnections.append(thread_info)
else:
serverconnections.append((nw.thrdnum, art_name, nzf_name, nzo_name))
@ -1233,20 +1250,20 @@ def build_status(skip_dashboard=False, output=None):
# For the templates or for JSON
if output:
server_info = { 'servername': server.displayname,
'serveractiveconn': connected,
'servertotalconn': server.threads,
'serverconnections': serverconnections,
'serverssl': server.ssl,
'serversslinfo': server.ssl_info,
'serveractive': server.active,
'servererror': server.errormsg,
'serverpriority': server.priority,
'serveroptional': server.optional }
server_info = {'servername': server.displayname,
'serveractiveconn': connected,
'servertotalconn': server.threads,
'serverconnections': serverconnections,
'serverssl': server.ssl,
'serversslinfo': server.ssl_info,
'serveractive': server.active,
'servererror': server.errormsg,
'serverpriority': server.priority,
'serveroptional': server.optional}
info['servers'].append(server_info)
else:
info['servers'].append((server.displayname, '', connected, serverconnections, server.ssl,
server.active, server.errormsg, server.priority, server.optional))
server.active, server.errormsg, server.priority, server.optional))
info['warnings'] = sabnzbd.GUIHANDLER.content()
@ -1326,10 +1343,10 @@ def build_queue(start=0, limit=0, trans=False, output=None, search=None):
# Ensure compatibility of API status
if status == Status.DELETED or priority == TOP_PRIORITY:
status = Status.DOWNLOADING
slot['status'] = "%s" % (status)
slot['status'] = "%s" % status
if (Downloader.do.paused or Downloader.do.postproc or is_propagating or \
status not in (Status.DOWNLOADING, Status.FETCHING, Status.QUEUED)) and priority != TOP_PRIORITY:
if (Downloader.do.paused or Downloader.do.postproc or is_propagating or
status not in (Status.DOWNLOADING, Status.FETCHING, Status.QUEUED)) and priority != TOP_PRIORITY:
slot['timeleft'] = '0:00:00'
slot['eta'] = 'unknown'
else:
@ -1490,16 +1507,17 @@ def options_list(output):
})
def retry_job(job, new_nzb, password):
def retry_job(job, new_nzb=None, password=None):
""" Re enter failed job in the download queue """
if job:
history_db = sabnzbd.connect_db()
history_db = sabnzbd.get_db_connection()
futuretype, url, pp, script, cat = history_db.get_other(job)
if futuretype:
if pp == 'X':
pp = None
sabnzbd.add_url(url, pp, script, cat)
nzo_id = sabnzbd.add_url(url, pp, script, cat)
history_db.remove_history(job)
return nzo_id
else:
path = history_db.get_path(job)
if path:
@ -1511,8 +1529,13 @@ def retry_job(job, new_nzb, password):
def retry_all_jobs():
""" Re enter all failed jobs in the download queue """
history_db = sabnzbd.connect_db()
return NzbQueue.do.retry_all_jobs(history_db)
# Fetch all retryable folders from History
items = sabnzbd.api.build_history()[0]
nzo_ids = []
for item in items:
if item['retry']:
nzo_ids.append(retry_job(item['nzo_id']))
return nzo_ids
def del_job_files(job_paths):
@ -1529,7 +1552,7 @@ def del_hist_job(job, del_files):
if path:
PostProcessor.do.delete(job, del_files=del_files)
else:
history_db = sabnzbd.connect_db()
history_db = sabnzbd.get_db_connection()
path = history_db.get_path(job)
history_db.remove_history(job)
@ -1548,7 +1571,9 @@ def Tspec(txt):
return txt
_SKIN_CACHE = {} # Stores pre-translated acronyms
_SKIN_CACHE = {} # Stores pre-translated acronyms
# This special is to be used in interface.py for template processing
# to be passed for the $T function: so { ..., 'T' : Ttemplate, ...}
def Ttemplate(txt):
@ -1668,7 +1693,6 @@ def build_queue_header(search=None, start=0, limit=0, output=None):
header['size'] = format_bytes(bytes)
header['noofslots_total'] = qnfo.q_fullsize
status = ''
if Downloader.do.paused or Downloader.do.postproc:
status = Status.PAUSED
elif bytespersec > 0:
@ -1683,15 +1707,13 @@ def build_queue_header(search=None, start=0, limit=0, output=None):
# new eta format: 16:00 Fri 07 Feb
header['eta'] = datestart.strftime(time_format('%H:%M %a %d %b'))
except:
datestart = datetime.datetime.now()
header['eta'] = T('unknown')
return (header, qnfo.list, bytespersec, qnfo.q_fullsize, qnfo.bytes_left_previous_page)
return header, qnfo.list, bytespersec, qnfo.q_fullsize, qnfo.bytes_left_previous_page
def build_history(start=None, limit=None, verbose=False, verbose_list=None, search=None, failed_only=0,
categories=None, output=None):
if output:
converter = unicoder
else:
@ -1744,7 +1766,7 @@ def build_history(start=None, limit=None, verbose=False, verbose_list=None, sear
# Aquire the db instance
try:
history_db = sabnzbd.connect_db()
history_db = sabnzbd.get_db_connection()
close_db = False
except:
# Required for repairs at startup because Cherrypy isn't active yet
@ -1755,7 +1777,6 @@ def build_history(start=None, limit=None, verbose=False, verbose_list=None, sear
if not h_limit:
items, fetched_items, total_items = history_db.fetch_history(h_start, 1, search, failed_only, categories)
items = []
fetched_items = 0
else:
items, fetched_items, total_items = history_db.fetch_history(h_start, h_limit, search, failed_only, categories)
@ -1840,7 +1861,7 @@ def build_history(start=None, limit=None, verbose=False, verbose_list=None, sear
if close_db:
history_db.close()
return (items, fetched_items, total_items)
return items, fetched_items, total_items
def get_active_history(queue=None, items=None):

15
sabnzbd/assembler.py

@ -78,11 +78,6 @@ class Assembler(Thread):
# Abort all direct unpackers, just to be sure
sabnzbd.directunpacker.abort_all()
# Place job back in queue and wait 30 seconds to hope it gets resolved
self.process(job)
sleep(30)
continue
# Prepare filename
nzo.verify_nzf_filename(nzf)
nzf.filename = sanitize_filename(nzf.filename)
@ -114,7 +109,7 @@ class Assembler(Thread):
nzf.remove_admin()
# Do rar-related processing
if rarfile.is_rarfile(filepath):
if is_rarfile(filepath):
# Encryption and unwanted extension detection
rar_encrypted, unwanted_file = check_encrypted_and_unwanted_files(nzo, filepath)
if rar_encrypted:
@ -243,7 +238,7 @@ def check_encrypted_and_unwanted_files(nzo, filepath):
return encrypted, unwanted
# Is it even a rarfile?
if rarfile.is_rarfile(filepath):
if is_rarfile(filepath):
# Open the rar
rarfile.UNRAR_TOOL = sabnzbd.newsunpack.RAR_COMMAND
zf = rarfile.RarFile(filepath, all_names=True)
@ -331,11 +326,11 @@ def nzo_filtered_by_rating(nzo):
nzo.rating_filtered = 1
reason = rating_filtered(rating, nzo.filename.lower(), True)
if reason is not None:
return (2, reason)
return 2, reason
reason = rating_filtered(rating, nzo.filename.lower(), False)
if reason is not None:
return (1, reason)
return (0, "")
return 1, reason
return 0, ""
def rating_filtered(rating, filename, abort):

4
sabnzbd/config.py

@ -897,7 +897,7 @@ def get_servers():
return {}
def define_categories(force=False):
def define_categories():
""" Define categories listed in the Setup file
return a list of ConfigCat instances
"""
@ -991,7 +991,7 @@ def get_rss():
for feed_uri in feed.uri():
if new_feed_uris and not urlparse(feed_uri).scheme and urlparse(new_feed_uris[-1]).scheme:
# Current one has no scheme but previous one does, append to previous
new_feed_uris[-1] += '%2C' + feed_uri
new_feed_uris[-1] += ',' + feed_uri
have_new_uri = True
continue
# Add full working URL

2
sabnzbd/constants.py

@ -123,7 +123,7 @@ year_match = r'[\W]([1|2]\d{3})([^\w]|$)' # Something '(YYYY)' or '.YYYY.' or '
sample_match = r'((^|[\W_])(sample|proof))' # something-sample or something-proof
class Status():
class Status:
COMPLETED = 'Completed' # PP: Job is finished
CHECKING = 'Checking' # Q: Pre-check is running
DOWNLOADING = 'Downloading' # Q: Normal downloading

15
sabnzbd/database.py

@ -315,7 +315,7 @@ class HistoryDB(object):
# Stage Name is separated by ::: stage lines by ; and stages by \r\n
items = [unpack_history_info(item) for item in items]
return (items, fetched_items, total_items)
return items, fetched_items, total_items
def have_episode(self, series, season, episode):
""" Check whether History contains this series episode """
@ -376,7 +376,7 @@ class HistoryDB(object):
except AttributeError:
pass
return (total, month, week)
return total, month, week
def get_script_log(self, nzo_id):
""" Return decompressed log file """
@ -401,7 +401,7 @@ class HistoryDB(object):
return name
def get_path(self, nzo_id):
""" Return the `incomplete` path of the job `nzo_id` """
""" Return the `incomplete` path of the job `nzo_id` if it is still there """
t = (nzo_id,)
path = ''
if self.execute('SELECT path FROM history WHERE nzo_id=?', t):
@ -409,7 +409,9 @@ class HistoryDB(object):
path = self.c.fetchone().get('path')
except AttributeError:
pass
return path
if os.path.exists(path):
return path
return None
def get_other(self, nzo_id):
""" Return additional data for job `nzo_id` """
@ -422,9 +424,10 @@ class HistoryDB(object):
pp = items.get('pp')
script = items.get('script')
cat = items.get('category')
return dtype, url, pp, script, cat
except (AttributeError, IndexError):
return '', '', '', '', ''
return dtype, url, pp, script, cat
pass
return '', '', '', '', ''
def dict_factory(cursor, row):

6
sabnzbd/decoder.py

@ -118,7 +118,7 @@ class Decoder(Thread):
nzf.article_count += 1
found = True
except IOError as e:
except IOError:
logme = T('Decoding %s failed') % art_id
logging.warning(logme)
logging.info("Traceback: ", exc_info=True)
@ -127,7 +127,7 @@ class Decoder(Thread):
sabnzbd.nzbqueue.NzbQueue.do.reset_try_lists(article)
register = False
except MemoryError as e:
except MemoryError:
logme = T('Decoder failure: Out of memory')
logging.warning(logme)
anfo = sabnzbd.articlecache.ArticleCache.do.cache_info()
@ -295,7 +295,7 @@ def yCheck(data):
except IndexError:
break
return ((ybegin, ypart, yend), data)
return (ybegin, ypart, yend), data
# Example: =ybegin part=1 line=128 size=123 name=-=DUMMY=- abc.par
YSPLIT_RE = re.compile(r'([a-zA-Z0-9]+)=')

14
sabnzbd/directunpacker.py

@ -33,6 +33,7 @@ from sabnzbd.misc import int_conv, format_time_string
from sabnzbd.filesystem import clip_path, long_path, remove_all, globber, \
has_win_device, real_path
from sabnzbd.encoding import TRANS, unicoder
from sabnzbd.decorators import synchronized
from sabnzbd.newsunpack import build_command, EXTRACTFROM_RE, EXTRACTED_RE, rar_volumelist
from sabnzbd.postproc import prepare_extraction_path
from sabnzbd.utils.rarfile import RarFile
@ -102,6 +103,7 @@ class DirectUnpacker(threading.Thread):
if none_counter > found_counter:
self.total_volumes = {}
@synchronized(START_STOP_LOCK)
def add(self, nzf):
""" Add jobs and start instance of DirectUnpack """
if not cfg.direct_unpack_tested():
@ -162,10 +164,10 @@ class DirectUnpacker(threading.Thread):
break
# Error? Let PP-handle it
if linebuf.endswith(('ERROR: ', 'Cannot create', 'in the encrypted file', 'CRC failed', \
'checksum failed', 'You need to start extraction from a previous volume', \
'password is incorrect', 'Write error', 'checksum error', \
'start extraction from a previous volume')):
if linebuf.endswith(('ERROR: ', 'Cannot create', 'in the encrypted file', 'CRC failed',
'checksum failed', 'You need to start extraction from a previous volume',
'password is incorrect', 'Write error', 'checksum error',
'start extraction from a previous volume')):
logging.info('Error in DirectUnpack of %s', self.cur_setname)
self.abort()
@ -301,6 +303,7 @@ class DirectUnpacker(threading.Thread):
with self.next_file_lock:
self.next_file_lock.wait()
@synchronized(START_STOP_LOCK)
def create_unrar_instance(self):
""" Start the unrar instance using the user's options """
# Generate extraction path and save for post-proc
@ -358,9 +361,10 @@ class DirectUnpacker(threading.Thread):
# Doing the first
logging.info('DirectUnpacked volume %s for %s', self.cur_volume, self.cur_setname)
@synchronized(START_STOP_LOCK)
def abort(self):
""" Abort running instance and delete generated files """
if not self.killed:
if not self.killed and self.cur_setname:
logging.info('Aborting DirectUnpack for %s', self.cur_setname)
self.killed = True

4
sabnzbd/dirscanner.py

@ -76,7 +76,7 @@ def is_archive(path):
except:
logging.info(T('Cannot read %s'), path, exc_info=True)
return -1, None, ''
elif rarfile.is_rarfile(path):
elif misc.is_rarfile(path):
try:
# Set path to tool to open it
rarfile.UNRAR_TOOL = sabnzbd.newsunpack.RAR_COMMAND
@ -233,7 +233,7 @@ def ProcessSingleFile(filename, path, pp=None, script=None, cat=None, catdir=Non
# # Empty, but correct file
# return -1, nzo_ids
except:
if data.find("<nzb") >= 0 and data.find("</nzb") < 0:
if data.find("<nzb") >= 0 > data.find("</nzb"):
# Looks like an incomplete file, retry
return -2, nzo_ids
else:

84
sabnzbd/downloader.py

@ -305,13 +305,13 @@ class Downloader(Thread):
self.force_disconnect = True
def limit_speed(self, value):
''' Set the actual download speed in Bytes/sec
""" Set the actual download speed in Bytes/sec
When 'value' ends with a '%' sign or is within 1-100, it is interpreted as a pecentage of the maximum bandwidth
When no '%' is found, it is interpreted as an absolute speed (including KMGT notation).
'''
"""
if value:
mx = cfg.bandwidth_max.get_int()
if '%' in str(value) or (from_units(value) > 0 and from_units(value) < 101):
if '%' in str(value) or (0 < from_units(value) < 101):
limit = value.strip(' %')
self.bandwidth_perc = from_units(limit)
if mx:
@ -369,24 +369,24 @@ class Downloader(Thread):
# Was it resolving problem?
if server.info is False:
# Warn about resolving issues
errormsg = T('Cannot connect to server %s [%s]') % (server.id, T('Server name does not resolve'))
errormsg = T('Cannot connect to server %s [%s]') % (server.host, T('Server name does not resolve'))
if server.errormsg != errormsg:
server.errormsg = errormsg
logging.warning(errormsg)
logging.warning(T('Server %s will be ignored for %s minutes'), server.id, _PENALTY_TIMEOUT)
logging.warning(T('Server %s will be ignored for %s minutes'), server.host, _PENALTY_TIMEOUT)
# Not fully the same as the code below for optional servers
server.bad_cons = 0
server.active = False
self.plan_server(server.id, _PENALTY_TIMEOUT)
self.plan_server(server, _PENALTY_TIMEOUT)
# Optional and active server had too many problems.
# Disable it now and send a re-enable plan to the scheduler
if server.optional and server.active and (server.bad_cons / server.threads) > 3:
server.bad_cons = 0
server.active = False
logging.warning(T('Server %s will be ignored for %s minutes'), server.id, _PENALTY_TIMEOUT)
self.plan_server(server.id, _PENALTY_TIMEOUT)
logging.warning(T('Server %s will be ignored for %s minutes'), server.host, _PENALTY_TIMEOUT)
self.plan_server(server, _PENALTY_TIMEOUT)
# Remove all connections to server
for nw in server.idle_threads + server.busy_threads:
@ -472,7 +472,7 @@ class Downloader(Thread):
if server.retention and article.nzf.nzo.avg_stamp < time.time() - server.retention:
# Let's get rid of all the articles for this server at once
logging.info('Job %s too old for %s, moving on', article.nzf.nzo.work_name, server.id)
logging.info('Job %s too old for %s, moving on', article.nzf.nzo.work_name, server.host)
while article:
self.decode(article, None, None)
article = article.nzf.nzo.get_article(server, self.servers)
@ -487,10 +487,10 @@ class Downloader(Thread):
self.__request_article(nw)
else:
try:
logging.info("%s@%s: Initiating connection", nw.thrdnum, server.id)
logging.info("%s@%s: Initiating connection", nw.thrdnum, server.host)
nw.init_connect(self.write_fds)
except:
logging.error(T('Failed to initialize %s@%s with reason: %s'), nw.thrdnum, server.id, sys.exc_info()[1])
logging.error(T('Failed to initialize %s@%s with reason: %s'), nw.thrdnum, server.host, sys.exc_info()[1])
self.__reset_nw(nw, "failed to initialize")
# Exit-point
@ -618,7 +618,7 @@ class Downloader(Thread):
try:
nw.finish_connect(nw.status_code)
if sabnzbd.LOG_ALL:
logging.debug("%s@%s last message -> %s", nw.thrdnum, nw.server.id, nntp_to_msg(nw.data))
logging.debug("%s@%s last message -> %s", nw.thrdnum, nw.server.host, nntp_to_msg(nw.data))
nw.clear_data()
except NNTPPermanentError as error:
# Handle login problems
@ -635,9 +635,9 @@ class Downloader(Thread):
errormsg = T('Too many connections to server %s') % display_msg
if server.errormsg != errormsg:
server.errormsg = errormsg
logging.warning(T('Too many connections to server %s'), server.id)
logging.warning(T('Too many connections to server %s'), server.host)
self.__reset_nw(nw, None, warn=False, destroy=True, quit=True)
self.plan_server(server.id, _PENALTY_TOOMANY)
self.plan_server(server, _PENALTY_TOOMANY)
server.threads -= 1
elif ecode in (502, 481, 482) and clues_too_many_ip(msg):
# Account sharing?
@ -645,7 +645,7 @@ class Downloader(Thread):
errormsg = T('Probable account sharing') + display_msg
if server.errormsg != errormsg:
server.errormsg = errormsg
name = ' (%s)' % server.id
name = ' (%s)' % server.host
logging.warning(T('Probable account sharing') + name)
penalty = _PENALTY_SHARE
block = True
@ -655,7 +655,7 @@ class Downloader(Thread):
errormsg = T('Failed login for server %s') % display_msg
if server.errormsg != errormsg:
server.errormsg = errormsg
logging.error(T('Failed login for server %s'), server.id)
logging.error(T('Failed login for server %s'), server.host)
penalty = _PENALTY_PERM
block = True
elif ecode in (502, 482):
@ -664,7 +664,7 @@ class Downloader(Thread):
errormsg = T('Cannot connect to server %s [%s]') % ('', display_msg)
if server.errormsg != errormsg:
server.errormsg = errormsg
logging.warning(T('Cannot connect to server %s [%s]'), server.id, msg)
logging.warning(T('Cannot connect to server %s [%s]'), server.host, msg)
if clues_pay(msg):
penalty = _PENALTY_PERM
else:
@ -673,7 +673,7 @@ class Downloader(Thread):
elif ecode == 400:
# Temp connection problem?
if server.active:
logging.debug('Unspecified error 400 from server %s', server.id)
logging.debug('Unspecified error 400 from server %s', server.host)
penalty = _PENALTY_VERYSHORT
block = True
else:
@ -682,25 +682,25 @@ class Downloader(Thread):
errormsg = T('Cannot connect to server %s [%s]') % ('', display_msg)
if server.errormsg != errormsg:
server.errormsg = errormsg
logging.warning(T('Cannot connect to server %s [%s]'), server.id, msg)
logging.warning(T('Cannot connect to server %s [%s]'), server.host, msg)
penalty = _PENALTY_UNKNOWN
block = True
if block or (penalty and server.optional):
if server.active:
server.active = False
if penalty and (block or server.optional):
self.plan_server(server.id, penalty)
self.plan_server(server, penalty)
sabnzbd.nzbqueue.NzbQueue.do.reset_all_try_lists()
self.__reset_nw(nw, None, warn=False, quit=True)
continue
except:
logging.error(T('Connecting %s@%s failed, message=%s'),
nw.thrdnum, nw.server.id, nntp_to_msg(nw.data))
nw.thrdnum, nw.server.host, nntp_to_msg(nw.data))
# No reset-warning needed, above logging is sufficient
self.__reset_nw(nw, None, warn=False)
if nw.connected:
logging.info("Connecting %s@%s finished", nw.thrdnum, nw.server.id)
logging.info("Connecting %s@%s finished", nw.thrdnum, nw.server.host)
self.__request_article(nw)
elif nw.status_code == 223:
@ -717,27 +717,27 @@ class Downloader(Thread):
elif nw.status_code in (411, 423, 430):
done = True
logging.debug('Thread %s@%s: Article %s missing (error=%s)',
nw.thrdnum, nw.server.id, article.article, nw.status_code)
nw.thrdnum, nw.server.host, article.article, nw.status_code)
nw.clear_data()
elif nw.status_code == 480:
if server.active:
server.active = False
server.errormsg = T('Server %s requires user/password') % ''
self.plan_server(server.id, 0)
self.plan_server(server, 0)
sabnzbd.nzbqueue.NzbQueue.do.reset_all_try_lists()
msg = T('Server %s requires user/password') % nw.server.id
msg = T('Server %s requires user/password') % nw.server.host
self.__reset_nw(nw, msg, quit=True)
elif nw.status_code == 500:
if nzo.precheck:
# Assume "STAT" command is not supported
server.have_stat = False
logging.debug('Server %s does not support STAT', server.id)
logging.debug('Server %s does not support STAT', server.host)
else:
# Assume "BODY" command is not supported
server.have_body = False
logging.debug('Server %s does not support BODY', server.id)
logging.debug('Server %s does not support BODY', server.host)
nw.clear_data()
self.__request_article(nw)
@ -745,7 +745,7 @@ class Downloader(Thread):
server.bad_cons = 0 # Succesful data, clear "bad" counter
server.errormsg = server.warning = ''
if sabnzbd.LOG_ALL:
logging.debug('Thread %s@%s: %s done', nw.thrdnum, server.id, article.article)
logging.debug('Thread %s@%s: %s done', nw.thrdnum, server.host, article.article)
self.decode(article, nw.lines, nw.data)
nw.soft_reset()
@ -777,9 +777,9 @@ class Downloader(Thread):
if warn and errormsg:
server.warning = errormsg
logging.info('Thread %s@%s: ' + errormsg, nw.thrdnum, server.id)
logging.info('Thread %s@%s: ' + errormsg, nw.thrdnum, server.host)
elif errormsg:
logging.info('Thread %s@%s: ' + errormsg, nw.thrdnum, server.id)
logging.info('Thread %s@%s: ' + errormsg, nw.thrdnum, server.host)
if nw in server.busy_threads:
server.busy_threads.remove(nw)
@ -813,11 +813,11 @@ class Downloader(Thread):
if nw.server.send_group and nzo.group != nw.group:
group = nzo.group
if sabnzbd.LOG_ALL:
logging.debug('Thread %s@%s: GROUP <%s>', nw.thrdnum, nw.server.id, group)
logging.debug('Thread %s@%s: GROUP <%s>', nw.thrdnum, nw.server.host, group)
nw.send_group(group)
else:
if sabnzbd.LOG_ALL:
logging.debug('Thread %s@%s: BODY %s', nw.thrdnum, nw.server.id, nw.article.article)
logging.debug('Thread %s@%s: BODY %s', nw.thrdnum, nw.server.host, nw.article.article)
nw.body(nzo.precheck)
fileno = nw.nntp.sock.fileno()
@ -839,24 +839,24 @@ class Downloader(Thread):
# Each server has a dictionary entry, consisting of a list of timestamps.
@synchronized(TIMER_LOCK)
def plan_server(self, server_id, interval):
def plan_server(self, server, interval):
""" Plan the restart of a server in 'interval' minutes """
if cfg.no_penalties() and interval > _PENALTY_SHORT:
# Overwrite in case of no_penalties
interval = _PENALTY_SHORT
logging.debug('Set planned server resume %s in %s mins', server_id, interval)
if server_id not in self._timers:
self._timers[server_id] = []
logging.debug('Set planned server resume %s in %s mins', server.host, interval)
if server.id not in self._timers:
self._timers[server.id] = []
stamp = time.time() + 60.0 * interval
self._timers[server_id].append(stamp)
self._timers[server.id].append(stamp)
if interval:
sabnzbd.scheduler.plan_server(self.trigger_server, [server_id, stamp], interval)
sabnzbd.scheduler.plan_server(self.trigger_server, [server.id, stamp], interval)
@synchronized(TIMER_LOCK)
def trigger_server(self, server_id, timestamp):
""" Called by scheduler, start server if timer still valid """
logging.debug('Trigger planned server resume %s', server_id)
logging.debug('Trigger planned server resume for server-id %s', server_id)
if server_id in self._timers:
if timestamp in self._timers[server_id]:
del self._timers[server_id]
@ -873,7 +873,7 @@ class Downloader(Thread):
# Activate server if it was inactive
for server in self.servers:
if server.id == server_id and not server.active:
logging.debug('Unblock server %s', server_id)
logging.debug('Unblock server %s', server.host)
self.init_server(server_id, server_id)
break
@ -890,7 +890,7 @@ class Downloader(Thread):
kicked = []
for server_id in self._timers.keys():
if not [stamp for stamp in self._timers[server_id] if stamp >= now]:
logging.debug('Forcing re-evaluation of server %s', server_id)
logging.debug('Forcing re-evaluation of server-id %s', server_id)
del self._timers[server_id]
self.init_server(server_id, server_id)
kicked.append(server_id)
@ -898,7 +898,7 @@ class Downloader(Thread):
for server in self.servers:
if server.id not in self._timers:
if server.id not in kicked and not server.active:
logging.debug('Forcing activation of server %s', server.id)
logging.debug('Forcing activation of server %s', server.host)
self.init_server(server.id, server.id)
def update_server(self, oldserver, newserver):

55
sabnzbd/interface.py

@ -44,7 +44,6 @@ from sabnzbd.misc import to_units, from_units, time_format, calc_age, \
cat_to_opts, int_conv, get_base_url, probablyipv4
from sabnzbd.filesystem import real_path, long_path, globber, globber_full, remove_all, clip_path, same_file
from sabnzbd.newswrapper import GetServerParms
from sabnzbd.rating import Rating
from sabnzbd.bpsmeter import BPSMeter
from sabnzbd.encoding import TRANS, xml_name, LatinFilter, unicoder, special_fixer, \
platform_encode
@ -59,7 +58,7 @@ from sabnzbd.decoder import SABYENC_ENABLED
from sabnzbd.utils.diskspeed import diskspeedmeasure
from sabnzbd.utils.getperformance import getpystone
from sabnzbd.constants import NORMAL_PRIORITY, MEBI, DEF_SKIN_COLORS, DEF_STDINTF, \
from sabnzbd.constants import NORMAL_PRIORITY, MEBI, DEF_SKIN_COLORS, \
DEF_STDCONFIG, DEF_MAIN_TMPL, DEFAULT_PRIORITY
from sabnzbd.lang import list_languages
@ -237,8 +236,7 @@ def check_login():
def get_users():
users = {}
users[cfg.username()] = cfg.password()
users = {cfg.username(): cfg.password()}
return users
@ -501,7 +499,7 @@ class MainPage(object):
# No session key check, due to fixed URLs
name = kwargs.get('name')
if name:
history_db = sabnzbd.connect_db()
history_db = sabnzbd.get_db_connection()
return ShowString(history_db.get_name(name), history_db.get_script_log(name))
else:
raise Raiser(self.__root)
@ -775,7 +773,7 @@ class NzoPage(object):
# /SABnzbd_nzo_xxxxx/files
elif 'files' in args:
info = self.nzo_files(info, pnfo_list, nzo_id)
info = self.nzo_files(info, nzo_id)
# /SABnzbd_nzo_xxxxx/save
elif 'save' in args:
@ -785,7 +783,7 @@ class NzoPage(object):
# /SABnzbd_nzo_xxxxx/
else:
info = self.nzo_details(info, pnfo_list, nzo_id)
info = self.nzo_files(info, pnfo_list, nzo_id)
info = self.nzo_files(info, nzo_id)
template = Template(file=os.path.join(sabnzbd.WEB_DIR, 'nzo.tmpl'),
filter=FILTER, searchList=[info], compilerSettings=DIRECTIVES)
@ -837,7 +835,7 @@ class NzoPage(object):
return info
def nzo_files(self, info, pnfo_list, nzo_id):
def nzo_files(self, info, nzo_id):
active = []
nzo = NzbQueue.do.get_nzo(nzo_id)
if nzo:
@ -1108,7 +1106,7 @@ class HistoryPage(object):
@secured_expose(check_session_key=True)
def purge(self, **kwargs):
history_db = sabnzbd.connect_db()
history_db = sabnzbd.get_db_connection()
history_db.remove_history()
raise queueRaiser(self.__root, kwargs)
@ -1135,7 +1133,7 @@ class HistoryPage(object):
@secured_expose(check_session_key=True)
def purge_failed(self, **kwargs):
del_files = bool(int_conv(kwargs.get('del_files')))
history_db = sabnzbd.connect_db()
history_db = sabnzbd.get_db_connection()
if del_files:
del_job_files(history_db.get_failed_paths())
history_db.remove_failed()
@ -1175,7 +1173,7 @@ class HistoryPage(object):
# No session key check, due to fixed URLs
name = kwargs.get('name')
if name:
history_db = sabnzbd.connect_db()
history_db = sabnzbd.get_db_connection()
return ShowString(history_db.get_name(name), history_db.get_script_log(name))
else:
raise Raiser(self.__root)
@ -1881,9 +1879,13 @@ class ConfigRss(object):
@secured_expose(check_session_key=True, check_configlock=True)
def upd_rss_filter(self, **kwargs):
""" Wrapper, so we can call from api.py """
self.internal_upd_rss_filter(**kwargs)
def internal_upd_rss_filter(self, **kwargs):
""" Save updated filter definition """
try:
cfg = config.get_rss()[kwargs.get('feed')]
feed_cfg = config.get_rss()[kwargs.get('feed')]
except KeyError:
raise rssRaiser(self.__root, kwargs)
@ -1897,14 +1899,14 @@ class ConfigRss(object):
enabled = kwargs.get('enabled', '0')
if filt:
cfg.filters.update(int(kwargs.get('index', 0)), (cat, pp, script, kwargs.get('filter_type'),
feed_cfg.filters.update(int(kwargs.get('index', 0)), (cat, pp, script, kwargs.get('filter_type'),
platform_encode(filt), prio, enabled))
# Move filter if requested
index = int_conv(kwargs.get('index', ''))
new_index = kwargs.get('new_index', '')
if new_index and int_conv(new_index) != index:
cfg.filters.move(int(index), int_conv(new_index))
feed_cfg.filters.move(int(index), int_conv(new_index))
config.save_config()
self.__evaluate = False
@ -1922,13 +1924,17 @@ class ConfigRss(object):
@secured_expose(check_session_key=True, check_configlock=True)
def del_rss_filter(self, **kwargs):
""" Wrapper, so we can call from api.py """
self.internal_del_rss_filter(**kwargs)
def internal_del_rss_filter(self, **kwargs):
""" Remove one RSS filter """
try:
cfg = config.get_rss()[kwargs.get('feed')]
feed_cfg = config.get_rss()[kwargs.get('feed')]
except KeyError:
raise rssRaiser(self.__root, kwargs)
cfg.filters.delete(int(kwargs.get('index', 0)))
feed_cfg.filters.delete(int(kwargs.get('index', 0)))
config.save_config()
self.__evaluate = False
self.__show_eval_button = True
@ -2043,15 +2049,8 @@ class ConfigScheduling(object):
@secured_expose(check_configlock=True)
def index(self, **kwargs):
def get_days():
days = {}
days["*"] = T('Daily')
days["1"] = T('Monday')
days["2"] = T('Tuesday')
days["3"] = T('Wednesday')
days["4"] = T('Thursday')
days["5"] = T('Friday')
days["6"] = T('Saturday')
days["7"] = T('Sunday')
days = {"*": T('Daily'), "1": T('Monday'), "2": T('Tuesday'), "3": T('Wednesday'), "4": T('Thursday'),
"5": T('Friday'), "6": T('Saturday'), "7": T('Sunday')}
return days
conf = build_header(sabnzbd.WEB_DIR_CONFIG)
@ -2080,7 +2079,7 @@ class ConfigScheduling(object):
if '%' not in value and from_units(value) < 1.0:
value = T('off') # : "Off" value for speedlimit in scheduler
else:
if '%' not in value and int_conv(value) > 1 and int_conv(value) < 101:
if '%' not in value and 1 < int_conv(value) < 101:
value += '%'
value = value.upper()
if action in actions:
@ -2135,7 +2134,6 @@ class ConfigScheduling(object):
@secured_expose(check_session_key=True, check_configlock=True)
def addSchedule(self, **kwargs):
servers = config.get_servers()
categories = list_cats(False)
minute = kwargs.get('minute')
hour = kwargs.get('hour')
days_of_week = ''.join([str(x) for x in kwargs.get('daysofweek', '')])
@ -2534,6 +2532,7 @@ def GetRssLog(feed):
# These fields could be empty
job['cat'] = job.get('cat', '')
job['size'] = job.get('size', '')
job['infourl'] = job.get('infourl', '')
# Auto-fetched jobs didn't have these fields set
if job.get('url'):
@ -2769,7 +2768,7 @@ def rss_history(url, limit=50, search=None):
stageLine.append("<tr><dt>Stage %s</dt>" % stage['name'])
actions = []
for action in stage['actions']:
actions.append("<dd>%s</dd>" % (action))
actions.append("<dd>%s</dd>" % action)
actions.sort()
actions.reverse()
for act in actions:

11
sabnzbd/misc.py

@ -41,7 +41,7 @@ from sabnzbd.constants import DEFAULT_PRIORITY, FUTURE_Q_FOLDER, JOB_ADMIN, \
GIGI, MEBI, DEF_ARTICLE_CACHE_DEFAULT, DEF_ARTICLE_CACHE_MAX
import sabnzbd.config as config
import sabnzbd.cfg as cfg
from sabnzbd.encoding import ubtou, unicoder, special_fixer, gUTF
import sabnzbd.utils.rarfile as rarfile
TAB_UNITS = ('', 'K', 'M', 'G', 'T', 'P')
RE_UNITS = re.compile(r'(\d+\.*\d*)\s*([KMGTP]{0,1})', re.I)
@ -426,7 +426,6 @@ def to_units(val, spaces=0, postfix=''):
Show single decimal for M and higher
"""
dec_limit = 1
decimals = 0
if val < 0:
sign = '-'
else:
@ -503,7 +502,7 @@ def split_host(srv):
port = int(port)
except:
port = None
return (host, port)
return host, port
def get_cache_limit():
@ -573,8 +572,8 @@ def memory_usage():
except:
logging.debug('Error retrieving memory usage')
logging.info("Traceback: ", exc_info=True)
else:
return ''
try:
_PAGE_SIZE = os.sysconf("SC_PAGE_SIZE")
except:
@ -643,7 +642,7 @@ def create_https_certificates(ssl_cert, ssl_key):
try:
from sabnzbd.utils.certgen import generate_key, generate_local_cert
private_key = generate_key(key_size=2048, output_file=ssl_key)
generate_local_cert(private_key, days_valid=3560, output_file=ssl_cert, LN='SABnzbd', ON='SABnzbd', CN='localhost')
generate_local_cert(private_key, days_valid=3560, output_file=ssl_cert, LN=u'SABnzbd', ON=u'SABnzbd')
logging.info('Self-signed certificates generated successfully')
except:
logging.error(T('Error creating SSL key and certificate'))

129
sabnzbd/newsunpack.py

@ -159,14 +159,7 @@ def external_processing(extern_proc, nzo, complete_dir, nicename, status):
'download_time': nzo.nzo_info.get('download_time', ''),
'avg_bps': int(nzo.avg_bps_total / nzo.avg_bps_freq) if nzo.avg_bps_freq else 0,
'age': calc_age(nzo.avg_date),
'orig_nzb_gz': clip_path(nzb_paths[0]) if nzb_paths else '',
'program_dir': sabnzbd.DIR_PROG,
'par2_command': sabnzbd.newsunpack.PAR2_COMMAND,
'multipar_command': sabnzbd.newsunpack.MULTIPAR_COMMAND,
'rar_command': sabnzbd.newsunpack.RAR_COMMAND,
'zip_command': sabnzbd.newsunpack.ZIP_COMMAND,
'7zip_command': sabnzbd.newsunpack.SEVEN_COMMAND,
'version': sabnzbd.__version__}
'orig_nzb_gz': clip_path(nzb_paths[0]) if nzb_paths else ''}
try:
stup, need_shell, command, creationflags = build_command(command)
@ -182,7 +175,7 @@ def external_processing(extern_proc, nzo, complete_dir, nicename, status):
proc = p.stdout
if p.stdin:
p.stdin.close()
line = ''
lines = []
while 1:
line = proc.readline()
@ -243,11 +236,10 @@ def unpack_magic(nzo, workdir, workdir_complete, dele, one_folder, joinables, zi
else:
xjoinables, xzips, xrars, xsevens, xts = build_filelists(workdir, workdir_complete, check_both=dele)
rerun = False
force_rerun = False
newfiles = []
error = None
new_joins = new_rars = new_zips = new_ts = None
new_joins = new_ts = None
if cfg.enable_filejoin():
new_joins = [jn for jn in xjoinables if jn not in joinables]
@ -443,16 +435,17 @@ def file_join(nzo, workdir, workdir_complete, delete, joinables):
if seq_error:
msg = T('Incomplete sequence of joinable files')
nzo.fail_msg = T('File join of %s failed') % unicoder(joinable_set)
nzo.set_unpack_info('Filejoin', T('[%s] Error "%s" while joining files') % (unicoder(joinable_set), msg))
nzo.fail_msg = T('File join of %s failed') % unicoder(os.path.basename(joinable_set))
nzo.set_unpack_info('Filejoin', T('[%s] Error "%s" while joining files') % (unicoder(os.path.basename(joinable_set)), msg))
logging.error(T('Error "%s" while running file_join on %s'), msg, nzo.final_name)
return True, []
else:
msg = T('[%s] Joined %s files') % (unicoder(joinable_set), size)
nzo.set_unpack_info('Filejoin', msg)
except:
msg = sys.exc_info()[1]
nzo.fail_msg = T('File join of %s failed') % msg
nzo.set_unpack_info('Filejoin', T('[%s] Error "%s" while joining files') % (unicoder(joinable_set), msg))
nzo.set_unpack_info('Filejoin', T('[%s] Error "%s" while joining files') % (unicoder(os.path.basename(joinable_set)), msg))
logging.error(T('Error "%s" while running file_join on %s'), msg, nzo.final_name)
return True, []
@ -467,9 +460,7 @@ def rar_unpack(nzo, workdir, workdir_complete, delete, one_folder, rars):
When 'delete' is set, originals will be deleted.
When 'one_folder' is set, all files will be in a single folder
"""
extracted_files = []
success = False
newfiles = extracted_files = []
rar_sets = {}
for rar in rars:
rar_set = os.path.splitext(os.path.basename(rar))[0]
@ -510,6 +501,8 @@ def rar_unpack(nzo, workdir, workdir_complete, delete, one_folder, rars):
if wait_count > 60:
# We abort after 2 minutes of no changes
nzo.direct_unpacker.abort()
else:
wait_count = 0
last_stats = nzo.direct_unpacker.get_formatted_stats()
# Did we already direct-unpack it? Not when recursive-unpacking
@ -656,7 +649,7 @@ def rar_extract_core(rarfile_path, numrars, one_folder, nzo, setname, extraction
stup, need_shell, command, creationflags = build_command(command, flatten_command=True)
# Get list of all the volumes part of this set
logging.debug("Analyzing rar file ... %s found", rarfile.is_rarfile(rarfile_path))
logging.debug("Analyzing rar file ... %s found", is_rarfile(rarfile_path))
logging.debug("Running unrar %s", command)
p = Popen(command, shell=need_shell, stdin=subprocess.PIPE,
stdout=subprocess.PIPE, stderr=subprocess.STDOUT,
@ -994,7 +987,9 @@ def seven_extract(nzo, sevenset, extensions, extraction_path, one_folder, delete
nzo.fail_msg = ''
if fail == 2:
msg = '%s (%s)' % (T('Unpacking failed, archive requires a password'), os.path.basename(sevenset))
if fail > 0:
nzo.fail_msg = msg
nzo.status = Status.FAILED
logging.error(msg)
return fail, new_files, msg
@ -1028,7 +1023,7 @@ def seven_extract_core(sevenset, extensions, extraction_path, one_folder, delete
parm = '-tzip' if sevenset.lower().endswith('.zip') else '-t7z'
if not os.path.exists(name):
return 1, T('7ZIP set "%s" is incomplete, cannot unpack') % unicoder(sevenset)
return 1, T('7ZIP set "%s" is incomplete, cannot unpack') % os.path.basename(sevenset)
# For file-bookkeeping
orig_dir_content = recursive_listdir(extraction_path)
@ -1047,6 +1042,15 @@ def seven_extract_core(sevenset, extensions, extraction_path, one_folder, delete
ret = p.wait()
# Return-code for CRC and Password is the same
if ret == 2 and 'ERROR: CRC Failed' in output:
# We can output a more general error
ret = 1
msg = T('ERROR: CRC failed in "%s"') % os.path.basename(sevenset)
else:
# Default message
msg = T('Could not unpack %s') % os.path.basename(sevenset)
# What's new?
new_files = list(set(orig_dir_content + recursive_listdir(extraction_path)))
@ -1065,7 +1069,7 @@ def seven_extract_core(sevenset, extensions, extraction_path, one_folder, delete
logging.warning(T('Deleting %s failed!'), sevenset)
# Always return an error message, even when return code is 0
return ret, new_files, T('Could not unpack %s') % unicoder(sevenset)
return ret, new_files, msg
##############################################################################
@ -1127,9 +1131,9 @@ def par2_repair(parfile_nzf, nzo, workdir, setname, single):
# Multipar or not?
if sabnzbd.WIN32 and cfg.multipar():
finished, readd, datafiles, used_joinables, used_for_repair = MultiPar_Verify(parfile, parfile_nzf, nzo, setname, joinables, single=single)
finished, readd, datafiles, used_joinables, used_for_repair = MultiPar_Verify(parfile, nzo, setname, joinables, single=single)
else:
finished, readd, datafiles, used_joinables, used_for_repair = PAR_Verify(parfile, parfile_nzf, nzo, setname, joinables, single=single)
finished, readd, datafiles, used_joinables, used_for_repair = PAR_Verify(parfile, nzo, setname, joinables, single=single)
if finished:
result = True
@ -1196,7 +1200,7 @@ _RE_LOADING_PAR2 = re.compile(r'Loading "([^"]+)"\.')
_RE_LOADED_PAR2 = re.compile(r'Loaded (\d+) new packets')
def PAR_Verify(parfile, parfile_nzf, nzo, setname, joinables, single=False):
def PAR_Verify(parfile, nzo, setname, joinables, single=False):
""" Run par2 on par-set """
used_joinables = []
used_for_repair = []
@ -1337,7 +1341,7 @@ def PAR_Verify(parfile, parfile_nzf, nzo, setname, joinables, single=False):
block_table = {}
for nzf in nzo.extrapars[setname]:
if not nzf.completed:
block_table[int_conv(nzf.blocks)] = nzf
block_table[nzf.blocks] = nzf
if block_table:
nzf = block_table[min(block_table.keys())]
@ -1374,7 +1378,7 @@ def PAR_Verify(parfile, parfile_nzf, nzo, setname, joinables, single=False):
elif line.startswith('Repair is possible'):
start = time.time()
nzo.set_action_line(T('Repairing'), '%2d%%' % (0))
nzo.set_action_line(T('Repairing'), '%2d%%' % 0)
elif line.startswith('Repairing:'):
chunks = line.split()
@ -1533,7 +1537,7 @@ def PAR_Verify(parfile, parfile_nzf, nzo, setname, joinables, single=False):
_RE_FILENAME = re.compile(r'"([^"]+)"')
def MultiPar_Verify(parfile, parfile_nzf, nzo, setname, joinables, single=False):
def MultiPar_Verify(parfile, nzo, setname, joinables, single=False):
""" Run par2 on par-set """
parfolder = os.path.split(parfile)[0]
used_joinables = []
@ -1650,7 +1654,7 @@ def MultiPar_Verify(parfile, parfile_nzf, nzo, setname, joinables, single=False)
block_table = {}
for nzf in nzo.extrapars[setname]:
if not nzf.completed:
block_table[int_conv(nzf.blocks)] = nzf
block_table[nzf.blocks] = nzf
if block_table:
nzf = block_table[min(block_table.keys())]
@ -1841,13 +1845,17 @@ def MultiPar_Verify(parfile, parfile_nzf, nzo, setname, joinables, single=False)
# Set message for user in case of joining
if line.startswith('Ready to rejoin'):
nzo.set_action_line(T('Joining'), '%2d' % len(used_joinables))
else:
# If we are repairing a joinable set, it won't actually
# do the joining. So we can't remove those files!
used_joinables = []
# ----------------- Repair stage
elif 'Recovering slice' in line:
# Before this it will calculate matrix, here is where it starts
start = time.time()
in_repair = True
nzo.set_action_line(T('Repairing'), '%2d%%' % (0))
nzo.set_action_line(T('Repairing'), '%2d%%' % 0)
elif in_repair and line.startswith('Verifying repair'):
in_repair = False
@ -1921,7 +1929,7 @@ def MultiPar_Verify(parfile, parfile_nzf, nzo, setname, joinables, single=False)
return finished, readd, datafiles, used_joinables, used_for_repair
def create_env(nzo=None, extra_env_fields=None):
def create_env(nzo=None, extra_env_fields={}):
""" Modify the environment for pp-scripts with extra information
OSX: Return copy of environment without PYTHONPATH and PYTHONHOME
other: return None
@ -1945,16 +1953,25 @@ def create_env(nzo=None, extra_env_fields=None):
# Catch key/unicode errors
pass
# Add extra fields
for field in extra_env_fields:
try:
if extra_env_fields[field] is not None:
env['SAB_' + field.upper()] = extra_env_fields[field]
else:
env['SAB_' + field.upper()] = ''
except:
# Catch key/unicode errors
pass
# Always supply basic info
extra_env_fields.update({'program_dir': sabnzbd.DIR_PROG,
'par2_command': sabnzbd.newsunpack.PAR2_COMMAND,
'multipar_command': sabnzbd.newsunpack.MULTIPAR_COMMAND,
'rar_command': sabnzbd.newsunpack.RAR_COMMAND,
'zip_command': sabnzbd.newsunpack.ZIP_COMMAND,
'7zip_command': sabnzbd.newsunpack.SEVEN_COMMAND,
'version': sabnzbd.__version__})
# Add extra fields
for field in extra_env_fields:
try:
if extra_env_fields[field] is not None:
env['SAB_' + field.upper()] = extra_env_fields[field]
else:
env['SAB_' + field.upper()] = ''
except:
# Catch key/unicode errors
pass
if sabnzbd.DARWIN:
if 'PYTHONPATH' in env:
@ -2099,11 +2116,7 @@ def build_filelists(workdir, workdir_complete=None, check_both=False, check_rar=
# Extra check for rar (takes CPU/disk)
file_is_rar = False
if check_rar:
try:
# Can fail on Windows due to long-path after recursive-unpack
file_is_rar = rarfile.is_rarfile(file)
except:
pass
file_is_rar = is_rarfile(file)
# Run through all the checks
if SEVENZIP_RE.search(file) or SEVENMULTI_RE.search(file):
@ -2295,23 +2308,33 @@ def analyse_show(name):
info.get('ep_name', '')
def pre_queue(name, pp, cat, script, priority, size, groups):
""" Run pre-queue script (if any) and process results """
def pre_queue(nzo, pp, cat):
""" Run pre-queue script (if any) and process results.
pp and cat are supplied seperate since they can change.
"""
def fix(p):
if not p or str(p).lower() == 'none':
return ''
return unicoder(p)
values = [1, name, pp, cat, script, priority, None]
values = [1, nzo.final_name_pw_clean, pp, cat, nzo.script, nzo.priority, None]
script_path = make_script_path(cfg.pre_script())
if script_path:
command = [script_path, name, pp, cat, script, priority, str(size), ' '.join(groups)]
command.extend(analyse_show(name))
# Basic command-line parameters
command = [script_path, nzo.final_name_pw_clean, pp, cat, nzo.script, nzo.priority, str(nzo.bytes), ' '.join(nzo.groups)]
command.extend(analyse_show(nzo.final_name_pw_clean))
command = [fix(arg) for arg in command]
# Fields not in the NZO directly
extra_env_fields = {'groups': ' '.join(nzo.groups),
'show_name': command[8],
'show_season': command[9],
'show_episode': command[10],
'show_episode_name': command[11]}
try:
stup, need_shell, command, creationflags = build_command(command)
env = create_env()
env = create_env(nzo, extra_env_fields)
logging.info('Running pre-queue script %s', command)
p = Popen(command, shell=need_shell, stdin=subprocess.PIPE,
stdout=subprocess.PIPE, stderr=subprocess.STDOUT,
@ -2332,11 +2355,11 @@ def pre_queue(name, pp, cat, script, priority, size, groups):
n += 1
accept = int_conv(values[0])
if accept < 1:
logging.info('Pre-Q refuses %s', name)
logging.info('Pre-Q refuses %s', nzo.final_name_pw_clean)
elif accept == 2:
logging.info('Pre-Q accepts&fails %s', name)
logging.info('Pre-Q accepts&fails %s', nzo.final_name_pw_clean)
else:
logging.info('Pre-Q accepts %s', name)
logging.info('Pre-Q accepts %s', nzo.final_name_pw_clean)
return values

14
sabnzbd/newswrapper.py

@ -25,7 +25,6 @@ from threading import Thread
from nntplib import NNTPPermanentError
import time
import logging
import re
import ssl
import sabnzbd
@ -135,7 +134,7 @@ class NNTP(object):
# Pre-define attributes to save memory
__slots__ = ('host', 'port', 'nw', 'blocking', 'error_msg', 'sock')
def __init__(self, host, port, info, sslenabled, send_group, nw, user=None, password=None, block=False, write_fds=None):
def __init__(self, host, port, info, sslenabled, nw, block=False, write_fds=None):
self.host = host
self.port = port
self.nw = nw
@ -160,14 +159,14 @@ class NNTP(object):
ctx = ssl.create_default_context()
# Only verify hostname when we're strict
if(nw.server.ssl_verify < 2):
if nw.server.ssl_verify < 2:
ctx.check_hostname = False
# Certificates optional
if(nw.server.ssl_verify == 0):
if nw.server.ssl_verify == 0:
ctx.verify_mode = ssl.CERT_NONE
# Did the user set a custom cipher-string?
if(nw.server.ssl_ciphers):
if nw.server.ssl_ciphers:
# At their own risk, socket will error out in case it was invalid
ctx.set_ciphers(nw.server.ssl_ciphers)
@ -295,8 +294,7 @@ class NewsWrapper(object):
# Construct NNTP object and shorthands
self.nntp = NNTP(self.server.hostip, self.server.port, self.server.info, self.server.ssl,
self.server.send_group, self, self.server.username, self.server.password,
self.blocking, write_fds)
self, self.blocking, write_fds)
self.recv = self.nntp.sock.recv
self.timeout = time.time() + self.server.timeout
@ -395,7 +393,7 @@ class NewsWrapper(object):
# time.sleep(0.0001)
continue
else:
return (0, False, True)
return 0, False, True
# Append so we can do 1 join(), much faster than multiple!
self.data.append(chunk)

5
sabnzbd/notifier.py

@ -26,7 +26,6 @@ import logging
import socket
import urllib.request, urllib.error, urllib.parse
import http.client
import urllib.request, urllib.parse, urllib.error
import time
import subprocess
import json
@ -145,7 +144,7 @@ def check_cat(section, job_cat, keyword=None):
if not keyword:
keyword = section
section_cats = sabnzbd.config.get_config(section, '%s_cats' % keyword)()
return (['*'] == section_cats or job_cat in section_cats)
return ['*'] == section_cats or job_cat in section_cats
except TypeError:
logging.debug('Incorrect Notify option %s:%s_cats', section, section)
return True
@ -463,7 +462,7 @@ def send_pushover(title, msg, gtype, force=False, test=None):
"expire": emergency_expire
}
return do_send_pushover(body)
if prio > -3 and prio < 2:
if -3 < prio < 2:
body = { "token": apikey,
"user": userkey,
"device": device,

25
sabnzbd/nzbqueue.py

@ -148,22 +148,6 @@ class NzbQueue(object):
logging.info('Skipping repair for job %s', folder)
return result
def retry_all_jobs(self, history_db):
""" Retry all retryable jobs in History """
result = []
# Retryable folders from History
items = sabnzbd.api.build_history()[0]
registered = [(platform_encode(os.path.basename(item['path'])),
item['nzo_id'])
for item in items if item['retry']]
for job in registered:
logging.info('Repairing job %s', job[0])
result.append(self.repair_job(job[0]))
history_db.remove_history(job[1])
return bool(result)
def repair_job(self, folder, new_nzb=None, password=None):
""" Reconstruct admin for a single job folder, optionally with new NZB """
def all_verified(path):
@ -171,7 +155,6 @@ class NzbQueue(object):
verified = sabnzbd.load_data(VERIFIED_FILE, path, remove=False) or {'x': False}
return all(verified[x] for x in verified)
nzo_id = None
name = os.path.basename(folder)
path = os.path.join(folder, JOB_ADMIN)
if hasattr(new_nzb, 'filename'):
@ -508,10 +491,10 @@ class NzbQueue(object):
nzo2 = self.__nzo_table[item_id_2]
except KeyError:
# One or both jobs missing
return (-1, 0)
return -1, 0
if nzo1 == nzo2:
return (-1, 0)
return -1, 0
# get the priorities of the two items
nzo1_priority = nzo1.priority
@ -540,9 +523,9 @@ class NzbQueue(object):
logging.info('Switching job [%s] %s => [%s] %s', item_id_pos1, item.final_name, item_id_pos2, self.__nzo_list[item_id_pos2].final_name)
del self.__nzo_list[item_id_pos1]
self.__nzo_list.insert(item_id_pos2, item)
return (item_id_pos2, nzo1.priority)
return item_id_pos2, nzo1.priority
# If moving failed/no movement took place
return (-1, nzo1.priority)
return -1, nzo1.priority
@NzbQueueLocker
def move_up_bulk(self, nzo_id, nzf_ids, size):

59
sabnzbd/nzbstuff.py

@ -163,7 +163,7 @@ class Article(TryList):
# if (server_check.priority() < found_priority and server_check.priority() < server.priority and not self.server_in_try_list(server_check)):
if server_check.active and (server_check.priority < found_priority):
if server_check.priority < server.priority:
if (not self.server_in_try_list(server_check)):
if not self.server_in_try_list(server_check):
if log:
logging.debug('Article %s | Server: %s | setting found priority to %s', self.article, server.host, server_check.priority)
found_priority = server_check.priority
@ -313,14 +313,14 @@ class NzbFile(TryList):
if found:
self.bytes_left -= article.bytes
return (not self.articles)
return not self.articles
def set_par2(self, setname, vol, blocks):
""" Designate this this file as a par2 file """
self.is_par2 = True
self.setname = setname
self.vol = vol
self.blocks = int(blocks)
self.blocks = int_conv(blocks)
def get_article(self, server, servers):
""" Get next article to be downloaded """
@ -619,9 +619,9 @@ class NzbObject(TryList):
# Run user pre-queue script if needed
if not reuse and cfg.pre_script():
accept, name, pp, cat_pp, script_pp, priority, group = \
sabnzbd.newsunpack.pre_queue(self.final_name_pw_clean, pp, cat, script,
priority, self.bytes, self.groups)
# Call the script
accept, name, pp, cat_pp, script_pp, priority, group = sabnzbd.newsunpack.pre_queue(self, pp, cat)
# Accept or reject
accept = int_conv(accept)
if accept < 1:
@ -816,7 +816,7 @@ class NzbObject(TryList):
# Sort the sets
for setname in self.extrapars:
self.extrapars[parset].sort(key=lambda x: x.blocks)
self.extrapars[setname].sort(key=lambda x: x.blocks)
# Also re-parse all filenames in case par2 came after first articles
self.verify_all_filenames_and_resort()
@ -892,38 +892,37 @@ class NzbObject(TryList):
def get_extra_blocks(self, setname, needed_blocks):
""" We want par2-files of all sets that are similar to this one
So that we also can handle multi-sets with duplicate filenames
Block-table has as keys the nr-blocks
Returns number of added blocks in case they are available
In case of duplicate files for the same set, we might add too
little par2 on the first add-run, but that's a risk we need to take.
"""
logging.info('Need %s more blocks, checking blocks', needed_blocks)
avail_blocks = 0
block_table = {}
block_list = []
for setname_search in self.extrapars:
# Do it for our set, or highlight matching one
# We might catch to many par2's, but that's okay
# We might catch too many par2's, but that's okay
if setname_search == setname or difflib.SequenceMatcher(None, setname, setname_search).ratio() > 0.85:
for nzf in self.extrapars[setname_search]:
# Don't count extrapars that are completed already
if nzf.completed:
continue
blocks = int_conv(nzf.blocks)
if blocks not in block_table:
block_table[blocks] = []
# We assume same block-vol-naming for each set
avail_blocks += blocks
block_table[blocks].append(nzf)
block_list.append(nzf)
avail_blocks += nzf.blocks
# Sort by smallest blocks last, to be popped first
block_list.sort(key=lambda x: x.blocks, reverse=True)
logging.info('%s blocks available', avail_blocks)
# Enough?
if avail_blocks >= needed_blocks:
added_blocks = 0
while added_blocks < needed_blocks:
block_size = min(block_table.keys())
for new_nzf in block_table[block_size]:
self.add_parfile(new_nzf)
added_blocks += block_size
block_table.pop(block_size)
new_nzf = block_list.pop()
self.add_parfile(new_nzf)
added_blocks += new_nzf.blocks
logging.info('Added %s blocks to %s', added_blocks, self.final_name)
return added_blocks
else:
@ -985,7 +984,7 @@ class NzbObject(TryList):
self.status = Status.QUEUED
self.set_download_report()
return (file_done, post_done)
return file_done, post_done
@synchronized(NZO_LOCK)
def remove_saved_article(self, article):
@ -1086,8 +1085,8 @@ class NzbObject(TryList):
# Convert input
value = int_conv(value)
if value in (REPAIR_PRIORITY, TOP_PRIORITY, HIGH_PRIORITY, NORMAL_PRIORITY, \
LOW_PRIORITY, DEFAULT_PRIORITY, PAUSED_PRIORITY, DUP_PRIORITY, STOP_PRIORITY):
if value in (REPAIR_PRIORITY, TOP_PRIORITY, HIGH_PRIORITY, NORMAL_PRIORITY,
LOW_PRIORITY, DEFAULT_PRIORITY, PAUSED_PRIORITY, DUP_PRIORITY, STOP_PRIORITY):
self.priority = value
return
@ -1201,7 +1200,7 @@ class NzbObject(TryList):
if (parset in nzf.filename or parset in original_filename) and self.extrapars[parset]:
for new_nzf in self.extrapars[parset]:
self.add_parfile(new_nzf)
blocks_new += int_conv(new_nzf.blocks)
blocks_new += new_nzf.blocks
# Enough now?
if blocks_new >= self.bad_articles:
logging.info('Prospectively added %s repair blocks to %s', blocks_new, self.final_name)
@ -1296,11 +1295,11 @@ class NzbObject(TryList):
self.set_unpack_info('Servers', ', '.join(msgs), unique=True)
@synchronized(NZO_LOCK)
def increase_bad_articles_counter(self, type):
def increase_bad_articles_counter(self, article_type):
""" Record information about bad articles """
if type not in self.nzo_info:
self.nzo_info[type] = 0
self.nzo_info[type] += 1
if article_type not in self.nzo_info:
self.nzo_info[article_type] = 0
self.nzo_info[article_type] += 1
self.bad_articles += 1
def get_article(self, server, servers):
@ -1800,7 +1799,7 @@ def scan_password(name):
slash = name.find('/')
# Look for name/password, but make sure that '/' comes before any {{
if slash >= 0 and slash < braces and 'password=' not in name:
if 0 <= slash < braces and 'password=' not in name:
# Is it maybe in 'name / password' notation?
if slash == name.find(' / ') + 1:
# Remove the extra space after name and before password

8
sabnzbd/osxmenu.py

@ -208,7 +208,7 @@ class SABnzbdDelegate(NSObject):
for speed in sorted(speeds.keys()):
menu_speed_item = NSMenuItem.alloc().initWithTitle_action_keyEquivalent_('%s' % (speeds[speed]), 'speedlimitAction:', '')
menu_speed_item.setRepresentedObject_("%s" % (speed))
menu_speed_item.setRepresentedObject_("%s" % speed)
self.menu_speed.addItem_(menu_speed_item)
self.speed_menu_item.setSubmenu_(self.menu_speed)
@ -414,7 +414,7 @@ class SABnzbdDelegate(NSObject):
if history['status'] != Status.COMPLETED:
jobfailed = NSAttributedString.alloc().initWithString_attributes_(job, self.failedAttributes)
menu_history_item.setAttributedTitle_(jobfailed)
menu_history_item.setRepresentedObject_("%s" % (path))
menu_history_item.setRepresentedObject_("%s" % path)
self.menu_history.addItem_(menu_history_item)
else:
menu_history_item = NSMenuItem.alloc().initWithTitle_action_keyEquivalent_(T('Empty'), '', '')
@ -483,9 +483,9 @@ class SABnzbdDelegate(NSObject):
if self.state != "" and self.info != "":
self.state_menu_item.setTitle_("%s - %s" % (self.state, self.info))
if self.info == "":
self.state_menu_item.setTitle_("%s" % (self.state))
self.state_menu_item.setTitle_("%s" % self.state)
else:
self.state_menu_item.setTitle_("%s" % (self.info))
self.state_menu_item.setTitle_("%s" % self.info)
except:
logging.info("[osx] stateUpdate Exception %s" % (sys.exc_info()[0]))

16
sabnzbd/par2file.py

@ -26,7 +26,9 @@ import struct
PROBABLY_PAR2_RE = re.compile(r'(.*)\.vol(\d*)[\+\-](\d*)\.par2', re.I)
PAR_ID = "PAR2\x00PKT"
PAR_PKT_ID = "PAR2\x00PKT"
PAR_FILE_ID = "PAR 2.0\x00FileDesc"
PAR_CREATOR_ID = "PAR 2.0\x00Creator"
PAR_RECOVERY_ID = "RecvSlic"
@ -35,7 +37,7 @@ def is_parfile(filename):
try:
with open(filename, "rb") as f:
buf = f.read(8)
return buf.startswith(PAR_ID)
return buf.startswith(PAR_PKT_ID)
except:
pass
return False
@ -47,7 +49,6 @@ def analyse_par2(name, filepath=None):
setname is empty when not a par2 file
"""
name = name.strip()
setname = None
vol = block = 0
m = PROBABLY_PAR2_RE.search(name)
if m:
@ -129,7 +130,8 @@ def parse_par2_file_packet(f, header):
nothing = None, None, None
if header != PAR_ID:
if header != PAR_PKT_ID:
print header
return nothing
# Length must be multiple of 4 and at least 20
@ -157,10 +159,14 @@ def parse_par2_file_packet(f, header):
# See if it's the right packet and get name + hash
for offset in range(0, len, 8):
if data[offset:offset + 16] == "PAR 2.0\0FileDesc":
if data[offset:offset + 16] == PAR_FILE_ID:
hash = data[offset + 32:offset + 48]
hash16k = data[offset + 48:offset + 64]
filename = data[offset + 72:].strip('\0')
return filename, hash, hash16k
elif data[offset:offset + 15] == PAR_CREATOR_ID:
# Here untill the end is the creator-text
# Usefull in case of bugs in the par2-creating software
logging.debug('Par2-creator of %s is: %s', os.path.basename(f.name), data[offset+16:].rstrip())
return nothing

4
sabnzbd/postproc.py

@ -271,7 +271,6 @@ def process_job(nzo):
nzb_list = []
# These need to be initialized in case of a crash
workdir_complete = ''
postproc_time = 0
script_log = ''
script_line = ''
@ -326,15 +325,12 @@ def process_job(nzo):
unpack_error = 1
script = nzo.script
cat = nzo.cat
logging.info('Starting Post-Processing on %s' +
' => Repair:%s, Unpack:%s, Delete:%s, Script:%s, Cat:%s',
filename, flag_repair, flag_unpack, flag_delete, script, nzo.cat)
# Set complete dir to workdir in case we need to abort
workdir_complete = workdir
marker_file = None
# Par processing, if enabled
if all_ok and flag_repair:

44
sabnzbd/rss.py

@ -287,10 +287,10 @@ class RSSQueue(object):
status = feed_parsed.get('status', 999)
if status in (401, 402, 403):
msg = T('Do not have valid authentication for feed %s') % feed
msg = T('Do not have valid authentication for feed %s') % uri
logging.info(msg)
if status >= 500 and status <= 599:
if 500 <= status <= 599:
msg = T('Server side error (server code %s); could not get %s on %s') % (status, feed, uri)
logging.info(msg)
@ -301,11 +301,14 @@ class RSSQueue(object):
msg = T('Server %s uses an untrusted HTTPS certificate') % get_urlbase(uri)
msg += ' - https://sabnzbd.org/certificate-errors'
logging.error(msg)
elif feed_parsed['href'] != uri and 'login' in feed_parsed['href']:
# Redirect to login page!
msg = T('Do not have valid authentication for feed %s') % uri
else:
msg = T('Failed to retrieve RSS from %s: %s') % (uri, xml_name(msg))
logging.info(msg)
if not entries:
if not entries and not msg:
msg = T('RSS Feed %s was empty') % uri
logging.info(msg)
all_entries.extend(entries)
@ -330,12 +333,8 @@ class RSSQueue(object):
if readout:
try:
link, category, size, age, season, episode = _get_link(uri, entry)
link, infourl, category, size, age, season, episode = _get_link(entry)
except (AttributeError, IndexError):
link = None
category = ''
size = 0
age = None
logging.info(T('Incompatible feed') + ' ' + uri)
logging.info("Traceback: ", exc_info=True)
return T('Incompatible feed')
@ -482,13 +481,13 @@ class RSSQueue(object):
else:
star = first
if result:
_HandleLink(jobs, feed, link, title, size, age, season, episode, 'G', category, myCat, myPP,
myScript, act, star, priority=myPrio, rule=str(n))
_HandleLink(jobs, feed, link, infourl, title, size, age, season, episode, 'G', category, myCat,
myPP, myScript, act, star, priority=myPrio, rule=str(n))
if act:
new_downloads.append(title)
else:
_HandleLink(jobs, feed, link, title, size, age, season, episode, 'B', category, myCat, myPP,
myScript, False, star, priority=myPrio, rule=str(n))
_HandleLink(jobs, feed, link, infourl, title, size, age, season, episode, 'B', category, myCat,
myPP, myScript, False, star, priority=myPrio, rule=str(n))
# Send email if wanted and not "forced"
if new_downloads and cfg.email_rss() and not force:
@ -588,7 +587,7 @@ class RSSQueue(object):
return ''
def _HandleLink(jobs, feed, link, title, size, age, season, episode, flag, orgcat, cat, pp, script,
def _HandleLink(jobs, feed, link, infourl, title, size, age, season, episode, flag, orgcat, cat, pp, script,
download, star, priority=NORMAL_PRIORITY, rule=0):
""" Process one link """
if script == '':
@ -599,6 +598,7 @@ def _HandleLink(jobs, feed, link, title, size, age, season, episode, flag, orgca
jobs[link] = {}
jobs[link]['title'] = title
jobs[link]['url'] = link
jobs[link]['infourl'] = infourl
jobs[link]['cat'] = cat
jobs[link]['pp'] = pp
jobs[link]['script'] = script
@ -627,14 +627,11 @@ def _HandleLink(jobs, feed, link, title, size, age, season, episode, flag, orgca
else:
jobs[link]['status'] = flag
def _get_link(uri, entry):
def _get_link(entry):
""" Retrieve the post link from this entry
Returns (link, category, size)
"""
link = None
category = ''
size = 0
uri = uri.lower()
size = 0L
age = datetime.datetime.now()
# Try standard link and enclosures first
@ -648,7 +645,12 @@ def _get_link(uri, entry):
except:
pass
if size == 0:
# GUID usually has URL to result on page
infourl = None
if entry.id and entry.id != link and entry.id.startswith('http'):
infourl = entry.id
if size == 0L:
_RE_SIZE1 = re.compile(r'Size:\s*(\d+\.\d+\s*[KMG]{0,1})B\W*', re.I)
_RE_SIZE2 = re.compile(r'\W*(\d+\.\d+\s*[KMG]{0,1})B\W*', re.I)
# Try to find size in Description
@ -697,10 +699,10 @@ def _get_link(uri, entry):
except:
category = ''
return link, category, size, age, season, episode
return link, infourl, category, size, age, season, episode
else:
logging.warning(T('Empty RSS entry found (%s)'), link)
return None, '', 0, None, 0, 0
return None, None, '', 0L, None, 0, 0
def special_rss_site(url):

10
sabnzbd/sabtray.py

@ -19,6 +19,7 @@
sabtray.py - Systray icon for SABnzbd on Windows, contributed by Jan Schejbal
"""
import os
import logging
from time import sleep
@ -29,8 +30,6 @@ import sabnzbd.scheduler as scheduler
from sabnzbd.downloader import Downloader
import sabnzbd.cfg as cfg
from sabnzbd.misc import to_units
import os
import cherrypy
# contains the tray icon, which demands its own thread
from sabnzbd.utils.systrayiconthread import SysTrayIconThread
@ -98,10 +97,13 @@ class SABTrayThread(SysTrayIconThread):
speed = to_units(bpsnow)
if self.sabpaused:
self.hover_text = self.txt_paused
if bytes_left > 0:
self.hover_text = "%s - %s: %sB" % (self.txt_paused, self.txt_remaining, mb_left)
else:
self.hover_text = self.txt_paused
self.icon = self.sabicons['pause']
elif bytes_left > 0:
self.hover_text = "%sB/s %s: %sB (%s)" % (speed, self.txt_remaining, mb_left, time_left)
self.hover_text = "%sB/s - %s: %sB (%s)" % (speed, self.txt_remaining, mb_left, time_left)
self.icon = self.sabicons['green']
else:
self.hover_text = self.txt_idle

1
sabnzbd/sabtraylinux.py

@ -21,7 +21,6 @@ sabnzbd.sabtraylinux - System tray icon for Linux, inspired from the Windows one
import gtk
import gobject
import cherrypy
from time import sleep
import subprocess
from threading import Thread

8
sabnzbd/skintext.py

@ -47,16 +47,10 @@ SKIN_TEXT = {
'post-Propagating' : TT('Propagation delay'),
'post-Checking' : TT('Checking'), #: PP status
'sch-frequency' : TT('Frequency'), #: #: Config->Scheduler
'sch-action' : TT('Action'), #: #: Config->Scheduler
'sch-arguments' : TT('Arguments'), #: #: Config->Scheduler
'sch-task' : TT('Task'), #: #: Config->Scheduler
'sch-disable_server' : TT('disable server'), #: #: Config->Scheduler
'sch-enable_server' : TT('enable server'), #: #: Config->Scheduler
'sch-resume' : TT('Resume'), #: #: Config->Scheduler
'sch-pause' : TT('Pause'), #: #: Config->Scheduler
'sch-shutdown' : TT('Shutdown'), #: #: Config->Scheduler
'sch-restart' : TT('Restart'), #: #: Config->Scheduler
'sch-speedlimit' : TT('Speedlimit'), #: #: Config->Scheduler
'sch-pause_all' : TT('Pause All'), #: #: Config->Scheduler
'sch-pause_post' : TT('Pause post-processing'), #: #: Config->Scheduler

2
sabnzbd/sorting.py

@ -237,7 +237,7 @@ class SeriesSorter(object):
one = '-'.join(extra_list)
two = '-'.join(extra2_list)
return (one, two)
return one, two
def get_shownames(self):
""" Get the show name from the match object and format it """

4
sabnzbd/urlgrabber.py

@ -200,7 +200,7 @@ class URLGrabber(Thread):
retry = True
fetch_request = None
elif retry:
fetch_request, msg, retry, wait, data = _analyse(fetch_request, url, future_nzo)
fetch_request, msg, retry, wait, data = _analyse(fetch_request, future_nzo)
if not fetch_request:
if retry:
@ -352,7 +352,7 @@ def _build_request(url):
return urllib.request.urlopen(req)
def _analyse(fetch_request, url, future_nzo):
def _analyse(fetch_request, future_nzo):
""" Analyze response of indexer
returns fetch_request|None, error-message|None, retry, wait-seconds, data
"""

5
sabnzbd/utils/certgen.py

@ -52,7 +52,7 @@ def generate_key(key_size=2048, output_file='key.pem'):
# Ported from cryptography docs/x509/tutorial.rst
def generate_local_cert(private_key, days_valid=3560, output_file='cert.cert', LN='SABnzbd', ON='SABnzbd', CN='localhost'):
def generate_local_cert(private_key, days_valid=3560, output_file='cert.cert', LN=u'SABnzbd', ON=u'SABnzbd'):
# Various details about who we are. For a self-signed certificate the
# subject and issuer are always the same.
subject = issuer = x509.Name([
@ -64,8 +64,7 @@ def generate_local_cert(private_key, days_valid=3560, output_file='cert.cert', L
# build Subject Alternate Names (aka SAN) list
# First the host names, add with x509.DNSName():
san_list = [x509.DNSName("localhost")]
san_list.append(x509.DNSName(str(socket.gethostname())))
san_list = [x509.DNSName(u"localhost"), x509.DNSName(unicode(socket.gethostname()))]
# Then the host IP addresses, add with x509.IPAddress()
# Inside a try-except, just to be sure

2
sabnzbd/utils/checkdir.py

@ -6,7 +6,6 @@ Functions to check if the path filesystem uses FAT
import sys
import os
import subprocess
debug = False
@ -71,7 +70,6 @@ def isFAT(dir):
'''
dfcmd = "df " + dir
device = ''
for thisline in os.popen(dfcmd).readlines():
if thisline.find('/')==0:
if debug: print(thisline)

9
sabnzbd/utils/diskspeed.py

@ -3,7 +3,6 @@
import time
import os
import sys
import logging
_DUMP_DATA_SIZE = 10 * 1024 * 1024
_DUMP_DATA = os.urandom(_DUMP_DATA_SIZE)
@ -14,11 +13,14 @@ def diskspeedmeasure(dirname):
method: keep writing a file, until 1 second is passed.
Then divide bytes written by time passed
"""
maxtime = 0.5 # sec
maxtime = 1.0 # sec
total_written = 0
filename = os.path.join(dirname, 'outputTESTING.txt')
fp = os.open(filename, os.O_CREAT | os.O_WRONLY, 0o777) # low-level I/O
# Use low-level I/O
fp = os.open(filename, os.O_CREAT | os.O_WRONLY, 0o777)
# Start looping
total_time = 0.0
while total_time < maxtime:
@ -30,7 +32,8 @@ def diskspeedmeasure(dirname):
# Remove the file
try:
fp.close()
# Have to use low-level close
os.close(fp)
os.remove(filename)
except:
pass

13
sabnzbd/utils/getperformance.py

@ -20,7 +20,7 @@ def getcpu():
elif platform.system() == "Linux":
for myline in open("/proc/cpuinfo"):
if myline.startswith(('model name')):
if myline.startswith('model name'):
# Typical line:
# model name : Intel(R) Xeon(R) CPU E5335 @ 2.00GHz
cputype = myline.split(":", 1)[1] # get everything after the first ":"
@ -45,6 +45,17 @@ def getpystone():
except:
return None
# if we arrive here, we were able to succesfully import pystone, so start calculation
maxpystone = None
# Start with a short run, find the the pystone, and increase runtime until duration took > 0.1 second
for pyseed in [1000, 2000, 5000, 10000, 20000, 50000, 100000, 200000]:
duration, pystonefloat = pystones(pyseed)
maxpystone = max(maxpystone, int(pystonefloat))
# Stop when pystone() has been running for at least 0.1 second
if duration > 0.1:
break
return maxpystone
if __name__ == '__main__':
print((getpystone()))

222
sabnzbd/utils/happyeyeballs.py

@ -6,10 +6,10 @@
# If the HOST has an IPv6 address, IPv6 is given a head start by delaying IPv4. See https://tools.ietf.org/html/rfc6555#section-4.1
# You can run this as a standalone program, or as a module:
'''
"""
from happyeyeballs import happyeyeballs
print happyeyeballs('newszilla.xs4all.nl', port=119)
'''
"""
# or with more logging:
'''
from happyeyeballs import happyeyeballs
@ -31,119 +31,119 @@ DEBUG = False
# called by each thread
def do_socket_connect(queue, ip, PORT, SSL, ipv4delay):
# connect to the ip, and put the result into the queue
if DEBUG: logging.debug("Input for thread is %s %s %s", ip, PORT, SSL)
try:
# CREATE SOCKET
if ip.find(':') >= 0:
s = socket.socket(socket.AF_INET6, socket.SOCK_STREAM)
if ip.find('.') >= 0:
time.sleep(ipv4delay) # IPv4 ... so a delay for IPv4 as we prefer IPv6. Note: ipv4delay could be 0
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.settimeout(3)
if not SSL:
# Connect ...
s.connect((ip, PORT))
# ... and close
s.close()
else:
# WRAP SOCKET
wrappedSocket = ssl.wrap_socket(s, ssl_version=ssl.PROTOCOL_TLSv1)
# CONNECT
wrappedSocket.connect((ip, PORT))
# CLOSE SOCKET CONNECTION
wrappedSocket.close()
queue.put((ip, True))
if DEBUG: logging.debug("connect to %s OK", ip)
except:
queue.put((ip, False))
if DEBUG: logging.debug("connect to %s not OK", ip)
pass
# connect to the ip, and put the result into the queue
if DEBUG: logging.debug("Input for thread is %s %s %s", ip, PORT, SSL)
try:
# CREATE SOCKET
if ip.find(':') >= 0:
s = socket.socket(socket.AF_INET6, socket.SOCK_STREAM)
if ip.find('.') >= 0:
time.sleep(ipv4delay) # IPv4 ... so a delay for IPv4 as we prefer IPv6. Note: ipv4delay could be 0
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.settimeout(3)
if not SSL:
# Connect ...
s.connect((ip, PORT))
# ... and close
s.close()
else:
# WRAP SOCKET
wrappedSocket = ssl.wrap_socket(s, ssl_version=ssl.PROTOCOL_TLSv1)
# CONNECT
wrappedSocket.connect((ip, PORT))
# CLOSE SOCKET CONNECTION
wrappedSocket.close()
queue.put((ip, True))
if DEBUG: logging.debug("connect to %s OK", ip)
except:
queue.put((ip, False))
if DEBUG: logging.debug("connect to %s not OK", ip)
pass
def happyeyeballs(HOST, **kwargs):
# Happyeyeballs function, with caching of the results
# Fill out the parameters into the variables
try:
PORT = kwargs['port']
except:
PORT = 80
try:
SSL = kwargs['ssl']
except:
SSL = False
try:
preferipv6 = kwargs['preferipv6']
except:
preferipv6 = True # prefer IPv6, so give IPv6 connects a head start by delaying IPv4
# Find out if a cached result is available, and recent enough:
timecurrent = int(time.time()) # current time in seconds since epoch
retentionseconds = 100
hostkey = (HOST, PORT, SSL, preferipv6) # Example key: (u'ssl.astraweb.com', 563, True, True)
try:
happyeyeballs.happylist[hostkey] # just to check: does it exist?
# No exception, so entry exists, so let's check the time:
timecached = happyeyeballs.happylist[hostkey][1]
if timecurrent - timecached <= retentionseconds:
if DEBUG: logging.debug("existing cached result recent enough")
return happyeyeballs.happylist[hostkey][0]
else:
if DEBUG: logging.debug("existing cached result too old. Find a new one")
# Continue a few lines down
except:
# Exception, so entry not there, so we have to fill it out
if DEBUG: logging.debug("Host not yet in the cache. Find entry")
pass
# we only arrive here if the entry has to be determined. So let's do that:
# We have to determine the (new) best IP address
start = time.clock()
if DEBUG: logging.debug("\n\n%s %s %s %s", HOST, PORT, SSL, preferipv6)
ipv4delay = 0
try:
# Check if there is an AAAA / IPv6 result for this host:
info = socket.getaddrinfo(HOST, PORT, socket.AF_INET6, socket.SOCK_STREAM, socket.IPPROTO_IP, socket.AI_CANONNAME)
if DEBUG: logging.debug("IPv6 address found for %s", HOST)
if preferipv6:
ipv4delay=0.1 # preferipv6, AND at least one IPv6 found, so give IPv4 (!) a delay so that IPv6 has a head start and is preferred
except:
if DEBUG: logging.debug("No IPv6 address found for %s", HOST)
# Happyeyeballs function, with caching of the results
# Fill out the parameters into the variables
try:
PORT = kwargs['port']
except:
PORT = 80
try:
SSL = kwargs['ssl']
except:
SSL = False
try:
preferipv6 = kwargs['preferipv6']
except:
preferipv6 = True # prefer IPv6, so give IPv6 connects a head start by delaying IPv4
# Find out if a cached result is available, and recent enough:
timecurrent = int(time.time()) # current time in seconds since epoch
retentionseconds = 100
hostkey = (HOST, PORT, SSL, preferipv6) # Example key: (u'ssl.astraweb.com', 563, True, True)
try:
happyeyeballs.happylist[hostkey] # just to check: does it exist?
# No exception, so entry exists, so let's check the time:
timecached = happyeyeballs.happylist[hostkey][1]
if timecurrent - timecached <= retentionseconds:
if DEBUG: logging.debug("existing cached result recent enough")
return happyeyeballs.happylist[hostkey][0]
else:
if DEBUG: logging.debug("existing cached result too old. Find a new one")
# Continue a few lines down
except:
# Exception, so entry not there, so we have to fill it out
if DEBUG: logging.debug("Host not yet in the cache. Find entry")
pass
# we only arrive here if the entry has to be determined. So let's do that:
# We have to determine the (new) best IP address
start = time.clock()
if DEBUG: logging.debug("\n\n%s %s %s %s", HOST, PORT, SSL, preferipv6)
ipv4delay = 0
try:
# Check if there is an AAAA / IPv6 result for this host:
socket.getaddrinfo(HOST, PORT, socket.AF_INET6, socket.SOCK_STREAM, socket.IPPROTO_IP, socket.AI_CANONNAME)
if DEBUG: logging.debug("IPv6 address found for %s", HOST)
if preferipv6:
ipv4delay=0.1 # preferipv6, AND at least one IPv6 found, so give IPv4 (!) a delay so that IPv6 has a head start and is preferred
except:
if DEBUG: logging.debug("No IPv6 address found for %s", HOST)
myqueue = queue.Queue() # queue used for threads giving back the results
try:
try:
# Get all IP (IPv4 and IPv6) addresses:
allinfo = socket.getaddrinfo(HOST, PORT, 0, 0, socket.IPPROTO_TCP)
for info in allinfo:
address = info[4][0]
thisthread = threading.Thread(target=do_socket_connect, args=(myqueue, address, PORT, SSL, ipv4delay))
thisthread.daemon = True
thisthread.start()
result = None # default return value, used if none of threads says True/"OK", so no connect on any IP address
# start reading from the Queue for message from the threads:
for i in range(len(allinfo)):
s = myqueue.get() # get a response
if s[1] == True:
result = s[0]
break # the first True/"OK" is enough, so break out of for loop
except:
if DEBUG: logging.debug("something went wrong in the try block")
result = None
logging.info("Quickest IP address for %s (port %s, ssl %s, preferipv6 %s) is %s", HOST, PORT, SSL, preferipv6, result)
delay = int(1000 * (time.clock() - start))
logging.debug("Happy Eyeballs lookup and port connect took %s ms", delay)
# We're done. Store and return the result
if result:
happyeyeballs.happylist[hostkey] = ( result, timecurrent )
if DEBUG: logging.debug("Determined new result for %s with result %s", (hostkey, happyeyeballs.happylist[hostkey]) )
return result
allinfo = socket.getaddrinfo(HOST, PORT, 0, 0, socket.IPPROTO_TCP)
for info in allinfo:
address = info[4][0]
thisthread = threading.Thread(target=do_socket_connect, args=(myqueue, address, PORT, SSL, ipv4delay))
thisthread.daemon = True
thisthread.start()
result = None # default return value, used if none of threads says True/"OK", so no connect on any IP address
# start reading from the Queue for message from the threads:
for i in range(len(allinfo)):
s = myqueue.get() # get a response
if s[1] == True:
result = s[0]
break # the first True/"OK" is enough, so break out of for loop
except:
if DEBUG: logging.debug("something went wrong in the try block")
result = None
logging.info("Quickest IP address for %s (port %s, ssl %s, preferipv6 %s) is %s", HOST, PORT, SSL, preferipv6, result)
delay = int(1000 * (time.clock() - start))
logging.debug("Happy Eyeballs lookup and port connect took %s ms", delay)
# We're done. Store and return the result
if result:
happyeyeballs.happylist[hostkey] = ( result, timecurrent )
if DEBUG: logging.debug("Determined new result for %s with result %s", (hostkey, happyeyeballs.happylist[hostkey]) )
return result
happyeyeballs.happylist = {} # The cached results. This static variable must be after the def happyeyeballs()
@ -152,9 +152,9 @@ happyeyeballs.happylist = {} # The cached results. This static variable must
if __name__ == '__main__':
logger = logging.getLogger('')
logger.setLevel(logging.INFO)
if DEBUG: logger.setLevel(logging.DEBUG)
logger = logging.getLogger('')
logger.setLevel(logging.INFO)
if DEBUG: logger.setLevel(logging.DEBUG)
# plain HTTP/HTTPS sites:
print((happyeyeballs('www.google.com')))

1
sabnzbd/utils/kronos.py

@ -78,7 +78,6 @@ import os
import sys
import sched
import time
import traceback
import weakref
import logging

2
sabnzbd/utils/pystone.py

@ -236,7 +236,7 @@ def Func2(StrParI1, StrParI2):
if Func1(StrParI1[IntLoc], StrParI2[IntLoc+1]) == Ident1:
CharLoc = 'A'
IntLoc = IntLoc + 1
if CharLoc >= 'W' and CharLoc <= 'Z':
if 'W' <= CharLoc <= 'Z':
IntLoc = 7
if CharLoc == 'X':
return TRUE

4
sabnzbd/utils/servertests.py

@ -90,7 +90,7 @@ def test_nntp_server(host, port, server=None, username=None, password=None, ssl=
nw.recv_chunk(block=True)
nw.finish_connect(nw.status_code)
except socket.timeout as e:
except socket.timeout:
if port != 119 and not ssl:
return False, T('Timed out: Try enabling SSL or connecting on a different port.')
else:
@ -103,7 +103,7 @@ def test_nntp_server(host, port, server=None, username=None, password=None, ssl=
return False, str(e)
except TypeError as e:
except TypeError:
return False, T('Invalid server address.')
except IndexError:

1
sabnzbd/utils/upload.py

@ -25,7 +25,6 @@ import os
from sabnzbd.encoding import unicoder
import sabnzbd.cfg as cfg
from sabnzbd.filesystem import get_ext, get_filename
import sabnzbd.newsunpack
from sabnzbd.constants import VALID_ARCHIVES, VALID_NZB_FILES
from sabnzbd.dirscanner import ProcessArchiveFile, ProcessSingleFile

6
sabnzbd/zconfig.py

@ -21,7 +21,6 @@ sabnzbd.zconfig - bonjour/zeroconfig support
import os
import logging
import cherrypy
_HOST_PORT = (None, None)
@ -80,11 +79,6 @@ def set_bonjour(host=None, port=None):
return
name = hostname()
if '.local' in name:
suffix = ''
else:
suffix = '.local'
logging.debug('Try to publish in Bonjour as "%s" (%s:%s)', name, host, port)
try:
refObject = pybonjour.DNSServiceRegister(

125
scripts/Deobfuscate.py

@ -28,7 +28,7 @@ NOTES:
1) To use this script you need Python installed on your system and
select "Add to path" during its installation. Select this folder in
Config > Folders > Scripts Folder and select this script for each job
you want it sued for, or link it to a category in Config > Categories.
you want it used for, or link it to a category in Config > Categories.
2) Beware that files on the 'Cleanup List' are removed before
scripts are called and if any of them happen to be required by
the found par2 file, it will fail.
@ -39,37 +39,116 @@ NOTES:
5) Feedback or bugs in this script can be reported in on our forum:
https://forums.sabnzbd.org/viewforum.php?f=9
Improved by P1nGu1n
"""
import os
import sys
import time
import fnmatch
import subprocess
import struct
import hashlib
from os import path
# Files to exclude and minimal file size for renaming
EXCLUDED_FILE_EXTS = ('.vob', '.bin')
MIN_FILE_SIZE = 40*1024*1024
# Are we being called from SABnzbd?
if not os.environ.get('SAB_VERSION'):
print("This script needs to be called from SABnzbd as post-processing script.")
sys.exit(1)
# Files to exclude and minimal file size for renaming
EXCLUDED_FILE_EXTS = ('.vob', '.bin')
MIN_FILE_SIZE = 40*1024*1024
# see: http://parchive.sourceforge.net/docs/specifications/parity-volume-spec/article-spec.html
STRUCT_PACKET_HEADER = struct.Struct("<"
"8s" # Magic sequence
"Q" # Length of the entire packet (including header), must be multiple of 4
"16s" # MD5 Hash of packet
"16s" # Recovery Set ID
"16s" # Packet type
)
PACKET_TYPE_FILE_DESC = 'PAR 2.0\x00FileDesc'
STRUCT_FILE_DESC_PACKET = struct.Struct("<"
"16s" # File ID
"16s" # MD5 hash of the entire file
"16s" # MD5 hash of the first 16KiB of the file
"Q" # Length of the file
)
# Supporting functions
def print_splitter():
""" Simple helper function """
print('\n------------------------\n')
# Windows or others?
par2_command = os.environ['SAB_PAR2_COMMAND']
if os.environ['SAB_MULTIPAR_COMMAND']:
par2_command = os.environ['SAB_MULTIPAR_COMMAND']
# Diagnostic info
def decodePar(parfile):
result = False
dir = os.path.dirname(parfile)
with open(parfile, 'rb') as parfileToDecode:
while True:
header = parfileToDecode.read(STRUCT_PACKET_HEADER.size)
if not header: break # file fully read
(_, packetLength, _, _, packetType) = STRUCT_PACKET_HEADER.unpack(header)
bodyLength = packetLength - STRUCT_PACKET_HEADER.size
# only process File Description packets
if packetType != PACKET_TYPE_FILE_DESC:
# skip this packet
parfileToDecode.seek(bodyLength, os.SEEK_CUR)
continue
chunck = parfileToDecode.read(STRUCT_FILE_DESC_PACKET.size)
(_, _, hash16k, filelength) = STRUCT_FILE_DESC_PACKET.unpack(chunck)
# filename makes up for the rest of the packet, padded with null characters
targetName = parfileToDecode.read(bodyLength - STRUCT_FILE_DESC_PACKET.size).rstrip('\0')
targetPath = path.join(dir, targetName)
# file already exists, skip it
if path.exists(targetPath):
print "File already exists: " + targetName
continue
# find and rename file
srcPath = findFile(dir, filelength, hash16k)
if srcPath is not None:
os.rename(srcPath, targetPath)
print "Renamed file from " + path.basename(srcPath) + " to " + targetName
result = True
else:
print "No match found for: " + targetName
return result
def findFile(dir, filelength, hash16k):
for filename in os.listdir(dir):
filepath = path.join(dir, filename)
# check if the size matches as an indication
if path.getsize(filepath) != filelength: continue
with open(filepath, 'rb') as fileToMatch:
data = fileToMatch.read(16 * 1024)
m = hashlib.md5()
m.update(data)
# compare hash to confirm the match
if m.digest() == hash16k:
return filepath
return None
# Run main program
print_splitter()
print(('SABnzbd version: ', os.environ['SAB_VERSION']))
print(('Job location: ', os.environ['SAB_COMPLETE_DIR']))
print(('Par2-command: ', par2_command))
print_splitter()
# Search for par2 files
@ -86,34 +165,14 @@ if not matches:
# Run par2 from SABnzbd on them
for par2_file in matches:
# Build command, make it check the whole directory
wildcard = os.path.join(os.environ['SAB_COMPLETE_DIR'], '*')
command = [str(par2_command), 'r', par2_file, wildcard]
# Start command
print_splitter()
print(('Starting command: ', repr(command)))
try:
result = subprocess.check_output(command)
except subprocess.CalledProcessError as e:
# Multipar also gives non-zero in case of succes
result = e.output
# Show output
# Analyse data and analyse result
print_splitter()
print(result)
print_splitter()
# Last status-line for the History
# Check if the magic words are there
if 'Repaired successfully' in result or 'All files are correct' in result or \
'Repair complete' in result or 'All Files Complete' in result or 'PAR File(s) Incomplete' in result:
if decodePar(par2_file):
print('Recursive repair/verify finished.')
run_renamer = False
else:
print('Recursive repair/verify did not complete!')
# No matches? Then we try to rename the largest file to the job-name
if run_renamer:
print_splitter()

71
tests/conftest.py

@ -1,71 +0,0 @@
#!/usr/bin/python -OO
# Copyright 2007-2018 The SABnzbd-Team <team@sabnzbd.org>
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
"""
tests.conftest - Wrappers to start SABnzbd for testing
"""
import os
import itertools
import urllib.request, urllib.error, urllib.parse
import pytest
import shutil
import time
import testhelper
from xprocess import ProcessStarter
@pytest.fixture(scope='session')
def sabnzbd_connect(request, xprocess):
# Get cache directory
base_path = os.path.dirname(os.path.abspath(__file__))
cache_dir = os.path.join(base_path, 'cache')
# Copy basic config file
try:
os.mkdir(cache_dir)
shutil.copyfile(os.path.join(base_path, 'sabnzbd.basic.ini'), os.path.join(cache_dir, 'sabnzbd.ini'))
except:
pass
class Starter(ProcessStarter):
# Wait for SABnzbd to start
pattern = "ENGINE Bus STARTED"
# Start without browser and with basic logging
args = 'python ../../SABnzbd.py -l1 -s %s:%s -b0 -f %s' % (testhelper.SAB_HOST, testhelper.SAB_PORT, cache_dir)
args = args.split()
# We have to wait a bit longer than default
def filter_lines(self, lines):
return itertools.islice(lines, 500)
# Shut it down at the end
def shutdown_sabnzbd():
# Gracefull shutdown request
testhelper.get_url_result('shutdown')
# Takes a second to shutdown
for x in range(5):
try:
shutil.rmtree(cache_dir)
break
except:
time.sleep(1)
request.addfinalizer(shutdown_sabnzbd)
return xprocess.ensure("sabnzbd", Starter)

4
tests/requirements.txt

@ -1,8 +1,8 @@
# SAB-Specific
cheetah
cheetah3
cryptography
sabyenc
# Testing
pytest-xprocess
selenium
requests

9
tests/sabnzbd.basic.ini

@ -1,11 +1,4 @@
__version__ = 19
__encoding__ = utf-8
[misc]
api_key = apikey
[servers]
[[sabnzbd.test]]
enable = 1
host = sabnzd.test
username = sabnzbd
password = sabnzbd
api_key = apikey

61
tests/test_api_pages.py

@ -1,61 +0,0 @@
#!/usr/bin/python -OO
# Copyright 2007-2018 The SABnzbd-Team <team@sabnzbd.org>
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
"""
tests.test_api_pages - The most basic testing if things work
"""
import pytest
import testhelper
def test_basic_api(sabnzbd_connect):
# Basic API test
assert 'queue' in testhelper.get_api_result('queue')
assert 'history' in testhelper.get_api_result('history')
assert 'status' in testhelper.get_api_result('fullstatus')
assert 'config' in testhelper.get_api_result('get_config')
def test_main_pages(sabnzbd_connect):
# See if the basic pages work
assert 'Traceback' not in testhelper.get_url_result()
assert 'Traceback' not in testhelper.get_url_result('history')
assert 'Traceback' not in testhelper.get_url_result('queue')
assert 'Traceback' not in testhelper.get_url_result('status')
def test_wizard_pages(sabnzbd_connect):
# Test if wizard pages work
assert 'Traceback' not in testhelper.get_url_result('wizard')
assert 'Traceback' not in testhelper.get_url_result('wizard/one')
assert 'Traceback' not in testhelper.get_url_result('wizard/two')
def test_config_pages(sabnzbd_connect):
# Test if config pages work
assert 'Traceback' not in testhelper.get_url_result('config')
assert 'Traceback' not in testhelper.get_url_result('config/general')
assert 'Traceback' not in testhelper.get_url_result('config/server')
assert 'Traceback' not in testhelper.get_url_result('config/categories')
assert 'Traceback' not in testhelper.get_url_result('config/switches')
assert 'Traceback' not in testhelper.get_url_result('config/sorting')
assert 'Traceback' not in testhelper.get_url_result('config/notify')
assert 'Traceback' not in testhelper.get_url_result('config/scheduling')
assert 'Traceback' not in testhelper.get_url_result('config/rss')
assert 'Traceback' not in testhelper.get_url_result('config/special')

295
tests/test_functional.py

@ -0,0 +1,295 @@
#!/usr/bin/python -OO
# Copyright 2007-2018 The SABnzbd-Team <team@sabnzbd.org>
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
"""
tests.test_functional - The most basic testing if things work
"""
import unittest
import random
from selenium import webdriver
from selenium.common.exceptions import WebDriverException, NoSuchElementException
from selenium.webdriver.chrome.options import Options as ChromeOptions
from selenium.webdriver.firefox.options import Options as FirefoxOptions
from selenium.webdriver.common.keys import Keys
from selenium.webdriver.support.ui import WebDriverWait
from testhelper import *
class SABnzbdBaseTest(unittest.TestCase):
@classmethod
def setUpClass(cls):
# We try Chrome, fallback to Firefox
try:
driver_options = ChromeOptions()
# Headless on Appveyor/Travis
if "CI" in os.environ:
driver_options.add_argument("--headless")
driver_options.add_argument("--no-sandbox")
cls.driver = webdriver.Chrome(chrome_options=driver_options)
except WebDriverException:
driver_options = FirefoxOptions()
# Headless on Appveyor/Travis
if "CI" in os.environ:
driver_options.headless = True
cls.driver = webdriver.Firefox(firefox_options=driver_options)
# Get the newsserver-info, if available
if "SAB_NEWSSERVER_HOST" in os.environ:
cls.newsserver_host = os.environ['SAB_NEWSSERVER_HOST']
cls.newsserver_user = os.environ['SAB_NEWSSERVER_USER']
cls.newsserver_password = os.environ['SAB_NEWSSERVER_PASSWORD']
@classmethod
def tearDownClass(cls):
cls.driver.close()
cls.driver.quit()
def no_page_crash(self):
# Do a base test if CherryPy did not report test
self.assertNotIn('500 Internal Server Error', self.driver.title)
def open_page(self, url):
# Open a page and test for crash
self.driver.get(url)
self.no_page_crash()
def scroll_to_top(self):
self.driver.find_element_by_tag_name('body').send_keys(Keys.CONTROL + Keys.HOME)
time.sleep(2)
def wait_for_ajax(self):
wait = WebDriverWait(self.driver, 15)
wait.until(lambda driver_wait: self.driver.execute_script('return jQuery.active') == 0)
wait.until(lambda driver_wait: self.driver.execute_script('return document.readyState') == 'complete')
@unittest.skipIf("SAB_NEWSSERVER_HOST" not in os.environ, "Test-server not specified")
class SABnzbdDownloadFlow(SABnzbdBaseTest):
def test_full(self):
# Wrapper for all the tests in order
self.start_wizard()
# Basic test
self.add_nzb_from_url("http://sabnzbd.org/tests/basic_rar5.nzb", "testfile.bin")
# Unicode test
self.add_nzb_from_url("http://sabnzbd.org/tests/unicode_rar.nzb", u"\u4f60\u597d\u4e16\u754c.bin")
# Unicode test with a missing article
#self.add_nzb_from_url("http://sabnzbd.org/tests/unicode_rar_broken.nzb", u"\u4f60\u597d\u4e16\u754c.bin")
def start_wizard(self):
# Language-selection
self.open_page("http://%s:%s/sabnzbd/wizard/" % (SAB_HOST, SAB_PORT))
self.driver.find_element_by_id("en").click()
self.driver.find_element_by_css_selector('.btn.btn-default').click()
# Fill server-info
self.no_page_crash()
host_inp = self.driver.find_element_by_name("host")
host_inp.clear()
host_inp.send_keys(self.newsserver_host)
username_imp = self.driver.find_element_by_name("username")
username_imp.clear()
username_imp.send_keys(self.newsserver_user)
pass_inp = self.driver.find_element_by_name("password")
pass_inp.clear()
pass_inp.send_keys(self.newsserver_password)
# With SSL
ssl_imp = self.driver.find_element_by_name("ssl")
if not ssl_imp.get_attribute('checked'):
ssl_imp.click()
# Test server-check
self.driver.find_element_by_id("serverTest").click()
self.wait_for_ajax()
self.assertIn("Connection Successful", self.driver.find_element_by_id("serverResponse").text)
# Final page done
self.driver.find_element_by_id("next-button").click()
self.no_page_crash()
self.assertIn("http://%s:%s/sabnzbd" % (SAB_HOST, SAB_PORT), self.driver.find_element_by_class_name("quoteBlock").text)
# Go to SAB!
self.driver.find_element_by_css_selector('.btn.btn-success').click()
self.no_page_crash()
def add_nzb_from_url(self, file_url, file_output):
test_job_name = 'testfile_%s' % random.randint(500, 1000)
self.open_page("http://%s:%s/sabnzbd/" % (SAB_HOST, SAB_PORT))
# Wait for modal to open, add URL
self.driver.find_element_by_css_selector('a[href="#modal-add-nzb"]').click()
time.sleep(1)
self.driver.find_element_by_name("nzbURL").send_keys(file_url)
self.driver.find_element_by_name("nzbname").send_keys(test_job_name)
self.driver.find_element_by_css_selector('form[data-bind="submit: addNZBFromURL"] input[type="submit"]').click()
# We wait for 30 seconds to let it complete
for _ in range(120):
try:
# Locate resulting row
result_row = self.driver.find_element_by_xpath('//*[@id="history-tab"]//tr[td//text()[contains(., "%s")]]' % test_job_name)
# Did it complete?
if result_row.find_element_by_css_selector('td.status').text == 'Completed':
break
else:
time.sleep(1)
except NoSuchElementException:
time.sleep(1)
else:
self.fail("Download did not complete")
# Check if the file exists on disk
file_to_find = os.path.join(SAB_COMPLETE_DIR, test_job_name, file_output)
self.assertTrue(os.path.exists(file_to_find), "File not found")
# Shutil can't handle unicode, need to remove the file here
os.remove(file_to_find)
class SABnzbdBasicPagesTest(SABnzbdBaseTest):
def test_base_pages(self):
# Quick-check of all Config pages
test_urls = ['config',
'config/general',
'config/folders',
'config/server',
'config/categories',
'config/switches',
'config/sorting',
'config/notify',
'config/scheduling',
'config/rss',
'config/special']
for test_url in test_urls:
self.open_page("http://%s:%s/%s" % (SAB_HOST, SAB_PORT, test_url))
@unittest.skipIf("SAB_NEWSSERVER_HOST" not in os.environ, "Test-server not specified")
class SABnzbdConfigServers(SABnzbdBaseTest):
server_name = "_SeleniumServer"
def open_config_servers(self):
# Test if base page works
self.open_page("http://%s:%s/sabnzbd/config/server" % (SAB_HOST, SAB_PORT))
self.scroll_to_top()
# Show advanced options
advanced_btn = self.driver.find_element_by_name("advanced-settings-button")
if not advanced_btn.get_attribute('checked'):
advanced_btn.click()
def add_test_server(self):
# Add server
self.driver.find_element_by_id("addServerButton").click()
host_inp = self.driver.find_element_by_name("host")
host_inp.clear()
host_inp.send_keys(self.newsserver_host)
username_imp = self.driver.find_element_by_css_selector("#addServerContent input[data-hide='username']")
username_imp.clear()
username_imp.send_keys(self.newsserver_user)
pass_inp = self.driver.find_element_by_css_selector("#addServerContent input[data-hide='password']")
pass_inp.clear()
pass_inp.send_keys(self.newsserver_password)
# With SSL
ssl_imp = self.driver.find_element_by_name("ssl")
if not ssl_imp.get_attribute('checked'):
ssl_imp.click()
# Check that we filled the right port automatically
self.assertEqual(self.driver.find_element_by_id("port").get_attribute('value'), '563')
# Test server-check
self.driver.find_element_by_css_selector("#addServerContent .testServer").click()
self.wait_for_ajax()
self.assertIn("Connection Successful", self.driver.find_element_by_css_selector('#addServerContent .result-box').text)
# Set test-servername
self.driver.find_element_by_id("displayname").send_keys(self.server_name)
# Add and show details
pass_inp.send_keys(Keys.RETURN)
time.sleep(1)
if not self.driver.find_element_by_id("host0").is_displayed():
self.driver.find_element_by_class_name("showserver").click()
def remove_server(self):
# Remove the first server and accept the confirmation
self.driver.find_element_by_class_name("delServer").click()
self.driver.switch_to.alert.accept()
# Check that it's gone
time.sleep(2)
self.assertNotIn(self.server_name, self.driver.page_source)
def test_add_and_remove_server(self):
self.open_config_servers()
self.add_test_server()
self.remove_server()
def test_empty_bad_password(self):
self.open_config_servers()
self.add_test_server()
# Test server-check with empty password
pass_inp = self.driver.find_elements_by_css_selector("input[data-hide='password']")[1]
pass_inp.clear()
self.driver.find_elements_by_css_selector(".testServer")[1].click()
self.wait_for_ajax()
check_result = self.driver.find_elements_by_css_selector('.result-box')[1].text.lower()
self.assertTrue("authentication failed" in check_result or "invalid username or password" in check_result)
# Test server-check with bad password
pass_inp.send_keys("bad")
self.driver.find_elements_by_css_selector(".testServer")[1].click()
self.wait_for_ajax()
self.assertTrue("authentication failed" in check_result or "invalid username or password" in check_result)
# Finish
self.remove_server()
class SABnzbdConfigCategories(SABnzbdBaseTest):
category_name = "testCat"
def test_page(self):
# Test if base page works
self.open_page("http://%s:%s/sabnzbd/config/categories" % (SAB_HOST, SAB_PORT))
# Add new category
self.driver.find_elements_by_name("newname")[1].send_keys("testCat")
self.driver.find_element_by_xpath("//button/text()[normalize-space(.)='Add']/parent::*").click()
self.no_page_crash()
self.assertNotIn(self.category_name, self.driver.page_source)
if __name__ == "__main__":
unittest.main(failfast=True)

58
tests/test_nzb.py

@ -1,58 +0,0 @@
#!/usr/bin/python -OO
# Copyright 2007-2018 The SABnzbd-Team <team@sabnzbd.org>
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
"""
tests.test_nzb - Basic NZB adding support
"""
import os
import pytest
import testhelper
# Where are we now?
base_path = os.path.dirname(os.path.abspath(__file__))
def nzo_in_queue(nzo_response):
""" Helper function for checking if file is in queue and then remove it """
queue_res = testhelper.get_api_result('queue')
nzo_id = nzo_response['nzo_ids'][0]
# Was it added?
assert nzo_response['status'] == True
assert queue_res['queue']['slots'][0]['nzo_id'] == nzo_response['nzo_ids'][0]
# Let's remove it
remove_response = testhelper.get_api_result('queue', {'name': 'delete', 'value': nzo_id})
assert nzo_response['status'] == True
# Really gone?
queue_res = testhelper.get_api_result('queue')
assert not queue_res['queue']['slots']
def test_addfile(sabnzbd_connect):
# See if basic upload works
nzo_response = testhelper.upload_nzb(os.path.join(base_path, 'data', 'reftestnzb.nzb'))
nzo_in_queue(nzo_response)
def test_addlocalfile(sabnzbd_connect):
# See if basic adding from disk-file works
nzo_response = testhelper.get_api_result('addlocalfile', {'name': os.path.join(base_path, 'data', 'reftestnzb.nzb')})
nzo_in_queue(nzo_response)

65
tests/testhelper.py

@ -19,12 +19,18 @@
tests.testhelper - Basic helper functions
"""
import urllib.request, urllib.error, urllib.parse
import json
import os
import shutil
import subprocess
import time
import requests
SAB_HOST = 'localhost'
SAB_PORT = 8081
SAB_BASE_DIR = os.path.dirname(os.path.abspath(__file__))
SAB_CACHE_DIR = os.path.join(SAB_BASE_DIR, 'cache')
SAB_COMPLETE_DIR = os.path.join(SAB_CACHE_DIR, 'Downloads', 'complete')
def get_url_result(url=''):
@ -41,8 +47,57 @@ def get_api_result(mode, extra_arguments={}):
return r.json()
def upload_nzb(file):
def upload_nzb(filename):
""" Upload file and return nzo_id reponse """
files = {'name': open(file, 'rb')}
arguments ={'apikey':'apikey', 'mode':'addfile', 'output': 'json'}
files = {'name': open(filename, 'rb')}
arguments = {'apikey': 'apikey', 'mode': 'addfile', 'output': 'json'}
return requests.post('http://%s:%s/api' % (SAB_HOST, SAB_PORT), files=files, data=arguments).json()
def setUpModule():
# Remove cache if already there
if os.path.isdir(SAB_CACHE_DIR):
shutil.rmtree(SAB_CACHE_DIR)
# Copy basic config file with API key
os.mkdir(SAB_CACHE_DIR)
shutil.copyfile(os.path.join(SAB_BASE_DIR, 'sabnzbd.basic.ini'), os.path.join(SAB_CACHE_DIR, 'sabnzbd.ini'))
# Check if we have language files
if not os.path.exists(os.path.join(SAB_BASE_DIR, '..', 'locale')):
lang_command = 'python %s/../tools/make_mo.py' % SAB_BASE_DIR
subprocess.Popen(lang_command.split())
# Start SABnzbd
sab_command = 'python %s/../SABnzbd.py --new -l2 -s %s:%s -b0 -f %s' % (SAB_BASE_DIR, SAB_HOST, SAB_PORT, SAB_CACHE_DIR)
subprocess.Popen(sab_command.split())
# Wait for SAB to respond
for _ in range(10):
try:
get_url_result()
# Woohoo, we're up!
break
except requests.ConnectionError:
time.sleep(1)
else:
# Make sure we clean up
tearDownModule()
raise requests.ConnectionError()
def tearDownModule():
# Graceful shutdown request
try:
get_url_result('shutdown')
except requests.ConnectionError:
pass
# Takes a second to shutdown
for x in range(10):
try:
shutil.rmtree(SAB_CACHE_DIR)
break
except OSError:
print "Unable to remove cache dir (try %d)" % x
time.sleep(1)

11
tools/extract_pot.py

@ -27,7 +27,7 @@ import re
f = open('sabnzbd/version.py')
code = f.read()
f.close()
exec(code)
exec code
# Fixed information for the POT header
HEADER = r'''#
@ -53,7 +53,7 @@ EMAIL_DIR = 'email'
DOMAIN = 'SABnzbd'
DOMAIN_EMAIL = 'SABemail'
DOMAIN_NSIS = 'SABnsis'
PARMS = '-d %s -p %s -k T -k Ta -k TT -o %s.pot.tmp' % (DOMAIN, PO_DIR, DOMAIN)
PARMS = '-d %s -p %s -w500 -k T -k Ta -k TT -o %s.pot.tmp' % (DOMAIN, PO_DIR, DOMAIN)
FILES = 'SABnzbd.py SABHelper.py SABnzbdDelegate.py sabnzbd/*.py sabnzbd/utils/*.py'
FILE_CACHE = {}
@ -108,8 +108,11 @@ def get_context(line):
item = item.split(':')[0]
if context:
newlines.append('%s [%s]' % (item, context))
else:
# Format context
item = '%s [%s]' % (item, context)
# Only add new texts
if item not in newlines:
newlines.append(item)
return '#: ' + ' # '.join(newlines) + '\n'

38
util/apireg.py

@ -69,7 +69,7 @@ def set_connection_info(url, user=True):
try:
hive = winreg.ConnectRegistry(None, section)
try:
key = winreg.CreateKey(hive, keypath)
_winreg.CreateKey(hive, keypath)
except:
pass
key = winreg.OpenKey(hive, keypath)
@ -105,18 +105,42 @@ def get_install_lng():
""" Return language-code used by the installer """
lng = 0
try:
hive = winreg.ConnectRegistry(None, winreg.HKEY_LOCAL_MACHINE)
key = winreg.OpenKey(hive, r"Software\SABnzbd")
for i in range(0, winreg.QueryInfoKey(key)[1]):
name, value, val_type = winreg.EnumValue(key, i)
hive = _winreg.ConnectRegistry(None, _winreg.HKEY_CURRENT_USER)
key = _winreg.OpenKey(hive, r"Software\SABnzbd")
for i in range(0, _winreg.QueryInfoKey(key)[1]):
name, value, val_type = _winreg.EnumValue(key, i)
if name == 'Installer Language':
lng = value
winreg.CloseKey(key)
except WindowsError:
pass
finally:
winreg.CloseKey(hive)
return lng
_winreg.CloseKey(hive)
if lng in LanguageMap:
return LanguageMap[lng]
return 'en'
# Map from NSIS-codepage to our language-strings
LanguageMap = {
'1033': 'en',
'1036': 'fr',
'1031': 'de',
'1043': 'nl',
'1035': 'fi',
'1045': 'pl',
'1053': 'sv',
'1030': 'da',
'2068': 'nb',
'1048': 'ro',
'1034': 'es',
'1046': 'pr_BR',
'3098': 'sr',
'1037': 'he',
'1049': 'ru',
'2052': 'zh_CN'
}
if __name__ == '__main__':

1
util/mailslot.py

@ -19,7 +19,6 @@
sabnzbd.mailslot - Mailslot communication
"""
import os
from win32file import GENERIC_WRITE, FILE_SHARE_READ, \
OPEN_EXISTING, FILE_ATTRIBUTE_NORMAL
from ctypes import c_uint, c_buffer, byref, sizeof, windll

BIN
win/par2/multipar/par2j.exe

Binary file not shown.

BIN
win/par2/multipar/par2j64.exe

Binary file not shown.

BIN
win/unrar/UnRAR.exe

Binary file not shown.

BIN
win/unrar/x64/UnRAR.exe

Binary file not shown.
Loading…
Cancel
Save