49 changed files with 13580 additions and 182 deletions
@ -1,14 +1,15 @@ |
|||
from ..base import * |
|||
|
|||
|
|||
@Js |
|||
def console(): |
|||
pass |
|||
|
|||
|
|||
@Js |
|||
def log(): |
|||
print(arguments[0]) |
|||
|
|||
|
|||
console.put('log', log) |
|||
console.put('debug', log) |
|||
console.put('info', log) |
|||
console.put('warn', log) |
|||
console.put('error', log) |
|||
|
File diff suppressed because it is too large
@ -0,0 +1,608 @@ |
|||
#!/usr/bin/env python |
|||
# -*- coding: utf-8 -*- |
|||
# Copyright (c) 2005-2010 ActiveState Software Inc. |
|||
# Copyright (c) 2013 Eddy Petrișor |
|||
|
|||
"""Utilities for determining application-specific dirs. |
|||
|
|||
See <http://github.com/ActiveState/appdirs> for details and usage. |
|||
""" |
|||
# Dev Notes: |
|||
# - MSDN on where to store app data files: |
|||
# http://support.microsoft.com/default.aspx?scid=kb;en-us;310294#XSLTH3194121123120121120120 |
|||
# - Mac OS X: http://developer.apple.com/documentation/MacOSX/Conceptual/BPFileSystem/index.html |
|||
# - XDG spec for Un*x: http://standards.freedesktop.org/basedir-spec/basedir-spec-latest.html |
|||
|
|||
__version_info__ = (1, 4, 3) |
|||
__version__ = '.'.join(map(str, __version_info__)) |
|||
|
|||
|
|||
import sys |
|||
import os |
|||
|
|||
PY3 = sys.version_info[0] == 3 |
|||
|
|||
if PY3: |
|||
unicode = str |
|||
|
|||
if sys.platform.startswith('java'): |
|||
import platform |
|||
os_name = platform.java_ver()[3][0] |
|||
if os_name.startswith('Windows'): # "Windows XP", "Windows 7", etc. |
|||
system = 'win32' |
|||
elif os_name.startswith('Mac'): # "Mac OS X", etc. |
|||
system = 'darwin' |
|||
else: # "Linux", "SunOS", "FreeBSD", etc. |
|||
# Setting this to "linux2" is not ideal, but only Windows or Mac |
|||
# are actually checked for and the rest of the module expects |
|||
# *sys.platform* style strings. |
|||
system = 'linux2' |
|||
else: |
|||
system = sys.platform |
|||
|
|||
|
|||
|
|||
def user_data_dir(appname=None, appauthor=None, version=None, roaming=False): |
|||
r"""Return full path to the user-specific data dir for this application. |
|||
|
|||
"appname" is the name of application. |
|||
If None, just the system directory is returned. |
|||
"appauthor" (only used on Windows) is the name of the |
|||
appauthor or distributing body for this application. Typically |
|||
it is the owning company name. This falls back to appname. You may |
|||
pass False to disable it. |
|||
"version" is an optional version path element to append to the |
|||
path. You might want to use this if you want multiple versions |
|||
of your app to be able to run independently. If used, this |
|||
would typically be "<major>.<minor>". |
|||
Only applied when appname is present. |
|||
"roaming" (boolean, default False) can be set True to use the Windows |
|||
roaming appdata directory. That means that for users on a Windows |
|||
network setup for roaming profiles, this user data will be |
|||
sync'd on login. See |
|||
<http://technet.microsoft.com/en-us/library/cc766489(WS.10).aspx> |
|||
for a discussion of issues. |
|||
|
|||
Typical user data directories are: |
|||
Mac OS X: ~/Library/Application Support/<AppName> |
|||
Unix: ~/.local/share/<AppName> # or in $XDG_DATA_HOME, if defined |
|||
Win XP (not roaming): C:\Documents and Settings\<username>\Application Data\<AppAuthor>\<AppName> |
|||
Win XP (roaming): C:\Documents and Settings\<username>\Local Settings\Application Data\<AppAuthor>\<AppName> |
|||
Win 7 (not roaming): C:\Users\<username>\AppData\Local\<AppAuthor>\<AppName> |
|||
Win 7 (roaming): C:\Users\<username>\AppData\Roaming\<AppAuthor>\<AppName> |
|||
|
|||
For Unix, we follow the XDG spec and support $XDG_DATA_HOME. |
|||
That means, by default "~/.local/share/<AppName>". |
|||
""" |
|||
if system == "win32": |
|||
if appauthor is None: |
|||
appauthor = appname |
|||
const = roaming and "CSIDL_APPDATA" or "CSIDL_LOCAL_APPDATA" |
|||
path = os.path.normpath(_get_win_folder(const)) |
|||
if appname: |
|||
if appauthor is not False: |
|||
path = os.path.join(path, appauthor, appname) |
|||
else: |
|||
path = os.path.join(path, appname) |
|||
elif system == 'darwin': |
|||
path = os.path.expanduser('~/Library/Application Support/') |
|||
if appname: |
|||
path = os.path.join(path, appname) |
|||
else: |
|||
path = os.getenv('XDG_DATA_HOME', os.path.expanduser("~/.local/share")) |
|||
if appname: |
|||
path = os.path.join(path, appname) |
|||
if appname and version: |
|||
path = os.path.join(path, version) |
|||
return path |
|||
|
|||
|
|||
def site_data_dir(appname=None, appauthor=None, version=None, multipath=False): |
|||
r"""Return full path to the user-shared data dir for this application. |
|||
|
|||
"appname" is the name of application. |
|||
If None, just the system directory is returned. |
|||
"appauthor" (only used on Windows) is the name of the |
|||
appauthor or distributing body for this application. Typically |
|||
it is the owning company name. This falls back to appname. You may |
|||
pass False to disable it. |
|||
"version" is an optional version path element to append to the |
|||
path. You might want to use this if you want multiple versions |
|||
of your app to be able to run independently. If used, this |
|||
would typically be "<major>.<minor>". |
|||
Only applied when appname is present. |
|||
"multipath" is an optional parameter only applicable to *nix |
|||
which indicates that the entire list of data dirs should be |
|||
returned. By default, the first item from XDG_DATA_DIRS is |
|||
returned, or '/usr/local/share/<AppName>', |
|||
if XDG_DATA_DIRS is not set |
|||
|
|||
Typical site data directories are: |
|||
Mac OS X: /Library/Application Support/<AppName> |
|||
Unix: /usr/local/share/<AppName> or /usr/share/<AppName> |
|||
Win XP: C:\Documents and Settings\All Users\Application Data\<AppAuthor>\<AppName> |
|||
Vista: (Fail! "C:\ProgramData" is a hidden *system* directory on Vista.) |
|||
Win 7: C:\ProgramData\<AppAuthor>\<AppName> # Hidden, but writeable on Win 7. |
|||
|
|||
For Unix, this is using the $XDG_DATA_DIRS[0] default. |
|||
|
|||
WARNING: Do not use this on Windows. See the Vista-Fail note above for why. |
|||
""" |
|||
if system == "win32": |
|||
if appauthor is None: |
|||
appauthor = appname |
|||
path = os.path.normpath(_get_win_folder("CSIDL_COMMON_APPDATA")) |
|||
if appname: |
|||
if appauthor is not False: |
|||
path = os.path.join(path, appauthor, appname) |
|||
else: |
|||
path = os.path.join(path, appname) |
|||
elif system == 'darwin': |
|||
path = os.path.expanduser('/Library/Application Support') |
|||
if appname: |
|||
path = os.path.join(path, appname) |
|||
else: |
|||
# XDG default for $XDG_DATA_DIRS |
|||
# only first, if multipath is False |
|||
path = os.getenv('XDG_DATA_DIRS', |
|||
os.pathsep.join(['/usr/local/share', '/usr/share'])) |
|||
pathlist = [os.path.expanduser(x.rstrip(os.sep)) for x in path.split(os.pathsep)] |
|||
if appname: |
|||
if version: |
|||
appname = os.path.join(appname, version) |
|||
pathlist = [os.sep.join([x, appname]) for x in pathlist] |
|||
|
|||
if multipath: |
|||
path = os.pathsep.join(pathlist) |
|||
else: |
|||
path = pathlist[0] |
|||
return path |
|||
|
|||
if appname and version: |
|||
path = os.path.join(path, version) |
|||
return path |
|||
|
|||
|
|||
def user_config_dir(appname=None, appauthor=None, version=None, roaming=False): |
|||
r"""Return full path to the user-specific config dir for this application. |
|||
|
|||
"appname" is the name of application. |
|||
If None, just the system directory is returned. |
|||
"appauthor" (only used on Windows) is the name of the |
|||
appauthor or distributing body for this application. Typically |
|||
it is the owning company name. This falls back to appname. You may |
|||
pass False to disable it. |
|||
"version" is an optional version path element to append to the |
|||
path. You might want to use this if you want multiple versions |
|||
of your app to be able to run independently. If used, this |
|||
would typically be "<major>.<minor>". |
|||
Only applied when appname is present. |
|||
"roaming" (boolean, default False) can be set True to use the Windows |
|||
roaming appdata directory. That means that for users on a Windows |
|||
network setup for roaming profiles, this user data will be |
|||
sync'd on login. See |
|||
<http://technet.microsoft.com/en-us/library/cc766489(WS.10).aspx> |
|||
for a discussion of issues. |
|||
|
|||
Typical user config directories are: |
|||
Mac OS X: same as user_data_dir |
|||
Unix: ~/.config/<AppName> # or in $XDG_CONFIG_HOME, if defined |
|||
Win *: same as user_data_dir |
|||
|
|||
For Unix, we follow the XDG spec and support $XDG_CONFIG_HOME. |
|||
That means, by default "~/.config/<AppName>". |
|||
""" |
|||
if system in ["win32", "darwin"]: |
|||
path = user_data_dir(appname, appauthor, None, roaming) |
|||
else: |
|||
path = os.getenv('XDG_CONFIG_HOME', os.path.expanduser("~/.config")) |
|||
if appname: |
|||
path = os.path.join(path, appname) |
|||
if appname and version: |
|||
path = os.path.join(path, version) |
|||
return path |
|||
|
|||
|
|||
def site_config_dir(appname=None, appauthor=None, version=None, multipath=False): |
|||
r"""Return full path to the user-shared data dir for this application. |
|||
|
|||
"appname" is the name of application. |
|||
If None, just the system directory is returned. |
|||
"appauthor" (only used on Windows) is the name of the |
|||
appauthor or distributing body for this application. Typically |
|||
it is the owning company name. This falls back to appname. You may |
|||
pass False to disable it. |
|||
"version" is an optional version path element to append to the |
|||
path. You might want to use this if you want multiple versions |
|||
of your app to be able to run independently. If used, this |
|||
would typically be "<major>.<minor>". |
|||
Only applied when appname is present. |
|||
"multipath" is an optional parameter only applicable to *nix |
|||
which indicates that the entire list of config dirs should be |
|||
returned. By default, the first item from XDG_CONFIG_DIRS is |
|||
returned, or '/etc/xdg/<AppName>', if XDG_CONFIG_DIRS is not set |
|||
|
|||
Typical site config directories are: |
|||
Mac OS X: same as site_data_dir |
|||
Unix: /etc/xdg/<AppName> or $XDG_CONFIG_DIRS[i]/<AppName> for each value in |
|||
$XDG_CONFIG_DIRS |
|||
Win *: same as site_data_dir |
|||
Vista: (Fail! "C:\ProgramData" is a hidden *system* directory on Vista.) |
|||
|
|||
For Unix, this is using the $XDG_CONFIG_DIRS[0] default, if multipath=False |
|||
|
|||
WARNING: Do not use this on Windows. See the Vista-Fail note above for why. |
|||
""" |
|||
if system in ["win32", "darwin"]: |
|||
path = site_data_dir(appname, appauthor) |
|||
if appname and version: |
|||
path = os.path.join(path, version) |
|||
else: |
|||
# XDG default for $XDG_CONFIG_DIRS |
|||
# only first, if multipath is False |
|||
path = os.getenv('XDG_CONFIG_DIRS', '/etc/xdg') |
|||
pathlist = [os.path.expanduser(x.rstrip(os.sep)) for x in path.split(os.pathsep)] |
|||
if appname: |
|||
if version: |
|||
appname = os.path.join(appname, version) |
|||
pathlist = [os.sep.join([x, appname]) for x in pathlist] |
|||
|
|||
if multipath: |
|||
path = os.pathsep.join(pathlist) |
|||
else: |
|||
path = pathlist[0] |
|||
return path |
|||
|
|||
|
|||
def user_cache_dir(appname=None, appauthor=None, version=None, opinion=True): |
|||
r"""Return full path to the user-specific cache dir for this application. |
|||
|
|||
"appname" is the name of application. |
|||
If None, just the system directory is returned. |
|||
"appauthor" (only used on Windows) is the name of the |
|||
appauthor or distributing body for this application. Typically |
|||
it is the owning company name. This falls back to appname. You may |
|||
pass False to disable it. |
|||
"version" is an optional version path element to append to the |
|||
path. You might want to use this if you want multiple versions |
|||
of your app to be able to run independently. If used, this |
|||
would typically be "<major>.<minor>". |
|||
Only applied when appname is present. |
|||
"opinion" (boolean) can be False to disable the appending of |
|||
"Cache" to the base app data dir for Windows. See |
|||
discussion below. |
|||
|
|||
Typical user cache directories are: |
|||
Mac OS X: ~/Library/Caches/<AppName> |
|||
Unix: ~/.cache/<AppName> (XDG default) |
|||
Win XP: C:\Documents and Settings\<username>\Local Settings\Application Data\<AppAuthor>\<AppName>\Cache |
|||
Vista: C:\Users\<username>\AppData\Local\<AppAuthor>\<AppName>\Cache |
|||
|
|||
On Windows the only suggestion in the MSDN docs is that local settings go in |
|||
the `CSIDL_LOCAL_APPDATA` directory. This is identical to the non-roaming |
|||
app data dir (the default returned by `user_data_dir` above). Apps typically |
|||
put cache data somewhere *under* the given dir here. Some examples: |
|||
...\Mozilla\Firefox\Profiles\<ProfileName>\Cache |
|||
...\Acme\SuperApp\Cache\1.0 |
|||
OPINION: This function appends "Cache" to the `CSIDL_LOCAL_APPDATA` value. |
|||
This can be disabled with the `opinion=False` option. |
|||
""" |
|||
if system == "win32": |
|||
if appauthor is None: |
|||
appauthor = appname |
|||
path = os.path.normpath(_get_win_folder("CSIDL_LOCAL_APPDATA")) |
|||
if appname: |
|||
if appauthor is not False: |
|||
path = os.path.join(path, appauthor, appname) |
|||
else: |
|||
path = os.path.join(path, appname) |
|||
if opinion: |
|||
path = os.path.join(path, "Cache") |
|||
elif system == 'darwin': |
|||
path = os.path.expanduser('~/Library/Caches') |
|||
if appname: |
|||
path = os.path.join(path, appname) |
|||
else: |
|||
path = os.getenv('XDG_CACHE_HOME', os.path.expanduser('~/.cache')) |
|||
if appname: |
|||
path = os.path.join(path, appname) |
|||
if appname and version: |
|||
path = os.path.join(path, version) |
|||
return path |
|||
|
|||
|
|||
def user_state_dir(appname=None, appauthor=None, version=None, roaming=False): |
|||
r"""Return full path to the user-specific state dir for this application. |
|||
|
|||
"appname" is the name of application. |
|||
If None, just the system directory is returned. |
|||
"appauthor" (only used on Windows) is the name of the |
|||
appauthor or distributing body for this application. Typically |
|||
it is the owning company name. This falls back to appname. You may |
|||
pass False to disable it. |
|||
"version" is an optional version path element to append to the |
|||
path. You might want to use this if you want multiple versions |
|||
of your app to be able to run independently. If used, this |
|||
would typically be "<major>.<minor>". |
|||
Only applied when appname is present. |
|||
"roaming" (boolean, default False) can be set True to use the Windows |
|||
roaming appdata directory. That means that for users on a Windows |
|||
network setup for roaming profiles, this user data will be |
|||
sync'd on login. See |
|||
<http://technet.microsoft.com/en-us/library/cc766489(WS.10).aspx> |
|||
for a discussion of issues. |
|||
|
|||
Typical user state directories are: |
|||
Mac OS X: same as user_data_dir |
|||
Unix: ~/.local/state/<AppName> # or in $XDG_STATE_HOME, if defined |
|||
Win *: same as user_data_dir |
|||
|
|||
For Unix, we follow this Debian proposal <https://wiki.debian.org/XDGBaseDirectorySpecification#state> |
|||
to extend the XDG spec and support $XDG_STATE_HOME. |
|||
|
|||
That means, by default "~/.local/state/<AppName>". |
|||
""" |
|||
if system in ["win32", "darwin"]: |
|||
path = user_data_dir(appname, appauthor, None, roaming) |
|||
else: |
|||
path = os.getenv('XDG_STATE_HOME', os.path.expanduser("~/.local/state")) |
|||
if appname: |
|||
path = os.path.join(path, appname) |
|||
if appname and version: |
|||
path = os.path.join(path, version) |
|||
return path |
|||
|
|||
|
|||
def user_log_dir(appname=None, appauthor=None, version=None, opinion=True): |
|||
r"""Return full path to the user-specific log dir for this application. |
|||
|
|||
"appname" is the name of application. |
|||
If None, just the system directory is returned. |
|||
"appauthor" (only used on Windows) is the name of the |
|||
appauthor or distributing body for this application. Typically |
|||
it is the owning company name. This falls back to appname. You may |
|||
pass False to disable it. |
|||
"version" is an optional version path element to append to the |
|||
path. You might want to use this if you want multiple versions |
|||
of your app to be able to run independently. If used, this |
|||
would typically be "<major>.<minor>". |
|||
Only applied when appname is present. |
|||
"opinion" (boolean) can be False to disable the appending of |
|||
"Logs" to the base app data dir for Windows, and "log" to the |
|||
base cache dir for Unix. See discussion below. |
|||
|
|||
Typical user log directories are: |
|||
Mac OS X: ~/Library/Logs/<AppName> |
|||
Unix: ~/.cache/<AppName>/log # or under $XDG_CACHE_HOME if defined |
|||
Win XP: C:\Documents and Settings\<username>\Local Settings\Application Data\<AppAuthor>\<AppName>\Logs |
|||
Vista: C:\Users\<username>\AppData\Local\<AppAuthor>\<AppName>\Logs |
|||
|
|||
On Windows the only suggestion in the MSDN docs is that local settings |
|||
go in the `CSIDL_LOCAL_APPDATA` directory. (Note: I'm interested in |
|||
examples of what some windows apps use for a logs dir.) |
|||
|
|||
OPINION: This function appends "Logs" to the `CSIDL_LOCAL_APPDATA` |
|||
value for Windows and appends "log" to the user cache dir for Unix. |
|||
This can be disabled with the `opinion=False` option. |
|||
""" |
|||
if system == "darwin": |
|||
path = os.path.join( |
|||
os.path.expanduser('~/Library/Logs'), |
|||
appname) |
|||
elif system == "win32": |
|||
path = user_data_dir(appname, appauthor, version) |
|||
version = False |
|||
if opinion: |
|||
path = os.path.join(path, "Logs") |
|||
else: |
|||
path = user_cache_dir(appname, appauthor, version) |
|||
version = False |
|||
if opinion: |
|||
path = os.path.join(path, "log") |
|||
if appname and version: |
|||
path = os.path.join(path, version) |
|||
return path |
|||
|
|||
|
|||
class AppDirs(object): |
|||
"""Convenience wrapper for getting application dirs.""" |
|||
def __init__(self, appname=None, appauthor=None, version=None, |
|||
roaming=False, multipath=False): |
|||
self.appname = appname |
|||
self.appauthor = appauthor |
|||
self.version = version |
|||
self.roaming = roaming |
|||
self.multipath = multipath |
|||
|
|||
@property |
|||
def user_data_dir(self): |
|||
return user_data_dir(self.appname, self.appauthor, |
|||
version=self.version, roaming=self.roaming) |
|||
|
|||
@property |
|||
def site_data_dir(self): |
|||
return site_data_dir(self.appname, self.appauthor, |
|||
version=self.version, multipath=self.multipath) |
|||
|
|||
@property |
|||
def user_config_dir(self): |
|||
return user_config_dir(self.appname, self.appauthor, |
|||
version=self.version, roaming=self.roaming) |
|||
|
|||
@property |
|||
def site_config_dir(self): |
|||
return site_config_dir(self.appname, self.appauthor, |
|||
version=self.version, multipath=self.multipath) |
|||
|
|||
@property |
|||
def user_cache_dir(self): |
|||
return user_cache_dir(self.appname, self.appauthor, |
|||
version=self.version) |
|||
|
|||
@property |
|||
def user_state_dir(self): |
|||
return user_state_dir(self.appname, self.appauthor, |
|||
version=self.version) |
|||
|
|||
@property |
|||
def user_log_dir(self): |
|||
return user_log_dir(self.appname, self.appauthor, |
|||
version=self.version) |
|||
|
|||
|
|||
#---- internal support stuff |
|||
|
|||
def _get_win_folder_from_registry(csidl_name): |
|||
"""This is a fallback technique at best. I'm not sure if using the |
|||
registry for this guarantees us the correct answer for all CSIDL_* |
|||
names. |
|||
""" |
|||
if PY3: |
|||
import winreg as _winreg |
|||
else: |
|||
import _winreg |
|||
|
|||
shell_folder_name = { |
|||
"CSIDL_APPDATA": "AppData", |
|||
"CSIDL_COMMON_APPDATA": "Common AppData", |
|||
"CSIDL_LOCAL_APPDATA": "Local AppData", |
|||
}[csidl_name] |
|||
|
|||
key = _winreg.OpenKey( |
|||
_winreg.HKEY_CURRENT_USER, |
|||
r"Software\Microsoft\Windows\CurrentVersion\Explorer\Shell Folders" |
|||
) |
|||
dir, type = _winreg.QueryValueEx(key, shell_folder_name) |
|||
return dir |
|||
|
|||
|
|||
def _get_win_folder_with_pywin32(csidl_name): |
|||
from win32com.shell import shellcon, shell |
|||
dir = shell.SHGetFolderPath(0, getattr(shellcon, csidl_name), 0, 0) |
|||
# Try to make this a unicode path because SHGetFolderPath does |
|||
# not return unicode strings when there is unicode data in the |
|||
# path. |
|||
try: |
|||
dir = unicode(dir) |
|||
|
|||
# Downgrade to short path name if have highbit chars. See |
|||
# <http://bugs.activestate.com/show_bug.cgi?id=85099>. |
|||
has_high_char = False |
|||
for c in dir: |
|||
if ord(c) > 255: |
|||
has_high_char = True |
|||
break |
|||
if has_high_char: |
|||
try: |
|||
import win32api |
|||
dir = win32api.GetShortPathName(dir) |
|||
except ImportError: |
|||
pass |
|||
except UnicodeError: |
|||
pass |
|||
return dir |
|||
|
|||
|
|||
def _get_win_folder_with_ctypes(csidl_name): |
|||
import ctypes |
|||
|
|||
csidl_const = { |
|||
"CSIDL_APPDATA": 26, |
|||
"CSIDL_COMMON_APPDATA": 35, |
|||
"CSIDL_LOCAL_APPDATA": 28, |
|||
}[csidl_name] |
|||
|
|||
buf = ctypes.create_unicode_buffer(1024) |
|||
ctypes.windll.shell32.SHGetFolderPathW(None, csidl_const, None, 0, buf) |
|||
|
|||
# Downgrade to short path name if have highbit chars. See |
|||
# <http://bugs.activestate.com/show_bug.cgi?id=85099>. |
|||
has_high_char = False |
|||
for c in buf: |
|||
if ord(c) > 255: |
|||
has_high_char = True |
|||
break |
|||
if has_high_char: |
|||
buf2 = ctypes.create_unicode_buffer(1024) |
|||
if ctypes.windll.kernel32.GetShortPathNameW(buf.value, buf2, 1024): |
|||
buf = buf2 |
|||
|
|||
return buf.value |
|||
|
|||
def _get_win_folder_with_jna(csidl_name): |
|||
import array |
|||
from com.sun import jna |
|||
from com.sun.jna.platform import win32 |
|||
|
|||
buf_size = win32.WinDef.MAX_PATH * 2 |
|||
buf = array.zeros('c', buf_size) |
|||
shell = win32.Shell32.INSTANCE |
|||
shell.SHGetFolderPath(None, getattr(win32.ShlObj, csidl_name), None, win32.ShlObj.SHGFP_TYPE_CURRENT, buf) |
|||
dir = jna.Native.toString(buf.tostring()).rstrip("\0") |
|||
|
|||
# Downgrade to short path name if have highbit chars. See |
|||
# <http://bugs.activestate.com/show_bug.cgi?id=85099>. |
|||
has_high_char = False |
|||
for c in dir: |
|||
if ord(c) > 255: |
|||
has_high_char = True |
|||
break |
|||
if has_high_char: |
|||
buf = array.zeros('c', buf_size) |
|||
kernel = win32.Kernel32.INSTANCE |
|||
if kernel.GetShortPathName(dir, buf, buf_size): |
|||
dir = jna.Native.toString(buf.tostring()).rstrip("\0") |
|||
|
|||
return dir |
|||
|
|||
if system == "win32": |
|||
try: |
|||
import win32com.shell |
|||
_get_win_folder = _get_win_folder_with_pywin32 |
|||
except ImportError: |
|||
try: |
|||
from ctypes import windll |
|||
_get_win_folder = _get_win_folder_with_ctypes |
|||
except ImportError: |
|||
try: |
|||
import com.sun.jna |
|||
_get_win_folder = _get_win_folder_with_jna |
|||
except ImportError: |
|||
_get_win_folder = _get_win_folder_from_registry |
|||
|
|||
|
|||
#---- self test code |
|||
|
|||
if __name__ == "__main__": |
|||
appname = "MyApp" |
|||
appauthor = "MyCompany" |
|||
|
|||
props = ("user_data_dir", |
|||
"user_config_dir", |
|||
"user_cache_dir", |
|||
"user_state_dir", |
|||
"user_log_dir", |
|||
"site_data_dir", |
|||
"site_config_dir") |
|||
|
|||
print("-- app dirs %s --" % __version__) |
|||
|
|||
print("-- app dirs (with optional 'version')") |
|||
dirs = AppDirs(appname, appauthor, version="1.0") |
|||
for prop in props: |
|||
print("%s: %s" % (prop, getattr(dirs, prop))) |
|||
|
|||
print("\n-- app dirs (without optional 'version')") |
|||
dirs = AppDirs(appname, appauthor) |
|||
for prop in props: |
|||
print("%s: %s" % (prop, getattr(dirs, prop))) |
|||
|
|||
print("\n-- app dirs (without optional 'appauthor')") |
|||
dirs = AppDirs(appname) |
|||
for prop in props: |
|||
print("%s: %s" % (prop, getattr(dirs, prop))) |
|||
|
|||
print("\n-- app dirs (with disabled 'appauthor')") |
|||
dirs = AppDirs(appname, appauthor=False) |
|||
for prop in props: |
|||
print("%s: %s" % (prop, getattr(dirs, prop))) |
@ -0,0 +1,21 @@ |
|||
# This file is dual licensed under the terms of the Apache License, Version |
|||
# 2.0, and the BSD License. See the LICENSE file in the root of this repository |
|||
# for complete details. |
|||
from __future__ import absolute_import, division, print_function |
|||
|
|||
__all__ = [ |
|||
"__title__", "__summary__", "__uri__", "__version__", "__author__", |
|||
"__email__", "__license__", "__copyright__", |
|||
] |
|||
|
|||
__title__ = "packaging" |
|||
__summary__ = "Core utilities for Python packages" |
|||
__uri__ = "https://github.com/pypa/packaging" |
|||
|
|||
__version__ = "16.8" |
|||
|
|||
__author__ = "Donald Stufft and individual contributors" |
|||
__email__ = "donald@stufft.io" |
|||
|
|||
__license__ = "BSD or Apache License, Version 2.0" |
|||
__copyright__ = "Copyright 2014-2016 %s" % __author__ |
@ -0,0 +1,14 @@ |
|||
# This file is dual licensed under the terms of the Apache License, Version |
|||
# 2.0, and the BSD License. See the LICENSE file in the root of this repository |
|||
# for complete details. |
|||
from __future__ import absolute_import, division, print_function |
|||
|
|||
from .__about__ import ( |
|||
__author__, __copyright__, __email__, __license__, __summary__, __title__, |
|||
__uri__, __version__ |
|||
) |
|||
|
|||
__all__ = [ |
|||
"__title__", "__summary__", "__uri__", "__version__", "__author__", |
|||
"__email__", "__license__", "__copyright__", |
|||
] |
@ -0,0 +1,30 @@ |
|||
# This file is dual licensed under the terms of the Apache License, Version |
|||
# 2.0, and the BSD License. See the LICENSE file in the root of this repository |
|||
# for complete details. |
|||
from __future__ import absolute_import, division, print_function |
|||
|
|||
import sys |
|||
|
|||
|
|||
PY2 = sys.version_info[0] == 2 |
|||
PY3 = sys.version_info[0] == 3 |
|||
|
|||
# flake8: noqa |
|||
|
|||
if PY3: |
|||
string_types = str, |
|||
else: |
|||
string_types = basestring, |
|||
|
|||
|
|||
def with_metaclass(meta, *bases): |
|||
""" |
|||
Create a base class with a metaclass. |
|||
""" |
|||
# This requires a bit of explanation: the basic idea is to make a dummy |
|||
# metaclass for one level of class instantiation that replaces itself with |
|||
# the actual metaclass. |
|||
class metaclass(meta): |
|||
def __new__(cls, name, this_bases, d): |
|||
return meta(name, bases, d) |
|||
return type.__new__(metaclass, 'temporary_class', (), {}) |
@ -0,0 +1,68 @@ |
|||
# This file is dual licensed under the terms of the Apache License, Version |
|||
# 2.0, and the BSD License. See the LICENSE file in the root of this repository |
|||
# for complete details. |
|||
from __future__ import absolute_import, division, print_function |
|||
|
|||
|
|||
class Infinity(object): |
|||
|
|||
def __repr__(self): |
|||
return "Infinity" |
|||
|
|||
def __hash__(self): |
|||
return hash(repr(self)) |
|||
|
|||
def __lt__(self, other): |
|||
return False |
|||
|
|||
def __le__(self, other): |
|||
return False |
|||
|
|||
def __eq__(self, other): |
|||
return isinstance(other, self.__class__) |
|||
|
|||
def __ne__(self, other): |
|||
return not isinstance(other, self.__class__) |
|||
|
|||
def __gt__(self, other): |
|||
return True |
|||
|
|||
def __ge__(self, other): |
|||
return True |
|||
|
|||
def __neg__(self): |
|||
return NegativeInfinity |
|||
|
|||
Infinity = Infinity() |
|||
|
|||
|
|||
class NegativeInfinity(object): |
|||
|
|||
def __repr__(self): |
|||
return "-Infinity" |
|||
|
|||
def __hash__(self): |
|||
return hash(repr(self)) |
|||
|
|||
def __lt__(self, other): |
|||
return True |
|||
|
|||
def __le__(self, other): |
|||
return True |
|||
|
|||
def __eq__(self, other): |
|||
return isinstance(other, self.__class__) |
|||
|
|||
def __ne__(self, other): |
|||
return not isinstance(other, self.__class__) |
|||
|
|||
def __gt__(self, other): |
|||
return False |
|||
|
|||
def __ge__(self, other): |
|||
return False |
|||
|
|||
def __neg__(self): |
|||
return Infinity |
|||
|
|||
NegativeInfinity = NegativeInfinity() |
@ -0,0 +1,301 @@ |
|||
# This file is dual licensed under the terms of the Apache License, Version |
|||
# 2.0, and the BSD License. See the LICENSE file in the root of this repository |
|||
# for complete details. |
|||
from __future__ import absolute_import, division, print_function |
|||
|
|||
import operator |
|||
import os |
|||
import platform |
|||
import sys |
|||
|
|||
from pkg_resources.extern.pyparsing import ParseException, ParseResults, stringStart, stringEnd |
|||
from pkg_resources.extern.pyparsing import ZeroOrMore, Group, Forward, QuotedString |
|||
from pkg_resources.extern.pyparsing import Literal as L # noqa |
|||
|
|||
from ._compat import string_types |
|||
from .specifiers import Specifier, InvalidSpecifier |
|||
|
|||
|
|||
__all__ = [ |
|||
"InvalidMarker", "UndefinedComparison", "UndefinedEnvironmentName", |
|||
"Marker", "default_environment", |
|||
] |
|||
|
|||
|
|||
class InvalidMarker(ValueError): |
|||
""" |
|||
An invalid marker was found, users should refer to PEP 508. |
|||
""" |
|||
|
|||
|
|||
class UndefinedComparison(ValueError): |
|||
""" |
|||
An invalid operation was attempted on a value that doesn't support it. |
|||
""" |
|||
|
|||
|
|||
class UndefinedEnvironmentName(ValueError): |
|||
""" |
|||
A name was attempted to be used that does not exist inside of the |
|||
environment. |
|||
""" |
|||
|
|||
|
|||
class Node(object): |
|||
|
|||
def __init__(self, value): |
|||
self.value = value |
|||
|
|||
def __str__(self): |
|||
return str(self.value) |
|||
|
|||
def __repr__(self): |
|||
return "<{0}({1!r})>".format(self.__class__.__name__, str(self)) |
|||
|
|||
def serialize(self): |
|||
raise NotImplementedError |
|||
|
|||
|
|||
class Variable(Node): |
|||
|
|||
def serialize(self): |
|||
return str(self) |
|||
|
|||
|
|||
class Value(Node): |
|||
|
|||
def serialize(self): |
|||
return '"{0}"'.format(self) |
|||
|
|||
|
|||
class Op(Node): |
|||
|
|||
def serialize(self): |
|||
return str(self) |
|||
|
|||
|
|||
VARIABLE = ( |
|||
L("implementation_version") | |
|||
L("platform_python_implementation") | |
|||
L("implementation_name") | |
|||
L("python_full_version") | |
|||
L("platform_release") | |
|||
L("platform_version") | |
|||
L("platform_machine") | |
|||
L("platform_system") | |
|||
L("python_version") | |
|||
L("sys_platform") | |
|||
L("os_name") | |
|||
L("os.name") | # PEP-345 |
|||
L("sys.platform") | # PEP-345 |
|||
L("platform.version") | # PEP-345 |
|||
L("platform.machine") | # PEP-345 |
|||
L("platform.python_implementation") | # PEP-345 |
|||
L("python_implementation") | # undocumented setuptools legacy |
|||
L("extra") |
|||
) |
|||
ALIASES = { |
|||
'os.name': 'os_name', |
|||
'sys.platform': 'sys_platform', |
|||
'platform.version': 'platform_version', |
|||
'platform.machine': 'platform_machine', |
|||
'platform.python_implementation': 'platform_python_implementation', |
|||
'python_implementation': 'platform_python_implementation' |
|||
} |
|||
VARIABLE.setParseAction(lambda s, l, t: Variable(ALIASES.get(t[0], t[0]))) |
|||
|
|||
VERSION_CMP = ( |
|||
L("===") | |
|||
L("==") | |
|||
L(">=") | |
|||
L("<=") | |
|||
L("!=") | |
|||
L("~=") | |
|||
L(">") | |
|||
L("<") |
|||
) |
|||
|
|||
MARKER_OP = VERSION_CMP | L("not in") | L("in") |
|||
MARKER_OP.setParseAction(lambda s, l, t: Op(t[0])) |
|||
|
|||
MARKER_VALUE = QuotedString("'") | QuotedString('"') |
|||
MARKER_VALUE.setParseAction(lambda s, l, t: Value(t[0])) |
|||
|
|||
BOOLOP = L("and") | L("or") |
|||
|
|||
MARKER_VAR = VARIABLE | MARKER_VALUE |
|||
|
|||
MARKER_ITEM = Group(MARKER_VAR + MARKER_OP + MARKER_VAR) |
|||
MARKER_ITEM.setParseAction(lambda s, l, t: tuple(t[0])) |
|||
|
|||
LPAREN = L("(").suppress() |
|||
RPAREN = L(")").suppress() |
|||
|
|||
MARKER_EXPR = Forward() |
|||
MARKER_ATOM = MARKER_ITEM | Group(LPAREN + MARKER_EXPR + RPAREN) |
|||
MARKER_EXPR << MARKER_ATOM + ZeroOrMore(BOOLOP + MARKER_EXPR) |
|||
|
|||
MARKER = stringStart + MARKER_EXPR + stringEnd |
|||
|
|||
|
|||
def _coerce_parse_result(results): |
|||
if isinstance(results, ParseResults): |
|||
return [_coerce_parse_result(i) for i in results] |
|||
else: |
|||
return results |
|||
|
|||
|
|||
def _format_marker(marker, first=True): |
|||
assert isinstance(marker, (list, tuple, string_types)) |
|||
|
|||
# Sometimes we have a structure like [[...]] which is a single item list |
|||
# where the single item is itself it's own list. In that case we want skip |
|||
# the rest of this function so that we don't get extraneous () on the |
|||
# outside. |
|||
if (isinstance(marker, list) and len(marker) == 1 and |
|||
isinstance(marker[0], (list, tuple))): |
|||
return _format_marker(marker[0]) |
|||
|
|||
if isinstance(marker, list): |
|||
inner = (_format_marker(m, first=False) for m in marker) |
|||
if first: |
|||
return " ".join(inner) |
|||
else: |
|||
return "(" + " ".join(inner) + ")" |
|||
elif isinstance(marker, tuple): |
|||
return " ".join([m.serialize() for m in marker]) |
|||
else: |
|||
return marker |
|||
|
|||
|
|||
_operators = { |
|||
"in": lambda lhs, rhs: lhs in rhs, |
|||
"not in": lambda lhs, rhs: lhs not in rhs, |
|||
"<": operator.lt, |
|||
"<=": operator.le, |
|||
"==": operator.eq, |
|||
"!=": operator.ne, |
|||
">=": operator.ge, |
|||
">": operator.gt, |
|||
} |
|||
|
|||
|
|||
def _eval_op(lhs, op, rhs): |
|||
try: |
|||
spec = Specifier("".join([op.serialize(), rhs])) |
|||
except InvalidSpecifier: |
|||
pass |
|||
else: |
|||
return spec.contains(lhs) |
|||
|
|||
oper = _operators.get(op.serialize()) |
|||
if oper is None: |
|||
raise UndefinedComparison( |
|||
"Undefined {0!r} on {1!r} and {2!r}.".format(op, lhs, rhs) |
|||
) |
|||
|
|||
return oper(lhs, rhs) |
|||
|
|||
|
|||
_undefined = object() |
|||
|
|||
|
|||
def _get_env(environment, name): |
|||
value = environment.get(name, _undefined) |
|||
|
|||
if value is _undefined: |
|||
raise UndefinedEnvironmentName( |
|||
"{0!r} does not exist in evaluation environment.".format(name) |
|||
) |
|||
|
|||
return value |
|||
|
|||
|
|||
def _evaluate_markers(markers, environment): |
|||
groups = [[]] |
|||
|
|||
for marker in markers: |
|||
assert isinstance(marker, (list, tuple, string_types)) |
|||
|
|||
if isinstance(marker, list): |
|||
groups[-1].append(_evaluate_markers(marker, environment)) |
|||
elif isinstance(marker, tuple): |
|||
lhs, op, rhs = marker |
|||
|
|||
if isinstance(lhs, Variable): |
|||
lhs_value = _get_env(environment, lhs.value) |
|||
rhs_value = rhs.value |
|||
else: |
|||
lhs_value = lhs.value |
|||
rhs_value = _get_env(environment, rhs.value) |
|||
|
|||
groups[-1].append(_eval_op(lhs_value, op, rhs_value)) |
|||
else: |
|||
assert marker in ["and", "or"] |
|||
if marker == "or": |
|||
groups.append([]) |
|||
|
|||
return any(all(item) for item in groups) |
|||
|
|||
|
|||
def format_full_version(info): |
|||
version = '{0.major}.{0.minor}.{0.micro}'.format(info) |
|||
kind = info.releaselevel |
|||
if kind != 'final': |
|||
version += kind[0] + str(info.serial) |
|||
return version |
|||
|
|||
|
|||
def default_environment(): |
|||
if hasattr(sys, 'implementation'): |
|||
iver = format_full_version(sys.implementation.version) |
|||
implementation_name = sys.implementation.name |
|||
else: |
|||
iver = '0' |
|||
implementation_name = '' |
|||
|
|||
return { |
|||
"implementation_name": implementation_name, |
|||
"implementation_version": iver, |
|||
"os_name": os.name, |
|||
"platform_machine": platform.machine(), |
|||
"platform_release": platform.release(), |
|||
"platform_system": platform.system(), |
|||
"platform_version": platform.version(), |
|||
"python_full_version": platform.python_version(), |
|||
"platform_python_implementation": platform.python_implementation(), |
|||
"python_version": platform.python_version()[:3], |
|||
"sys_platform": sys.platform, |
|||
} |
|||
|
|||
|
|||
class Marker(object): |
|||
|
|||
def __init__(self, marker): |
|||
try: |
|||
self._markers = _coerce_parse_result(MARKER.parseString(marker)) |
|||
except ParseException as e: |
|||
err_str = "Invalid marker: {0!r}, parse error at {1!r}".format( |
|||
marker, marker[e.loc:e.loc + 8]) |
|||
raise InvalidMarker(err_str) |
|||
|
|||
def __str__(self): |
|||
return _format_marker(self._markers) |
|||
|
|||
def __repr__(self): |
|||
return "<Marker({0!r})>".format(str(self)) |
|||
|
|||
def evaluate(self, environment=None): |
|||
"""Evaluate a marker. |
|||
|
|||
Return the boolean from evaluating the given marker against the |
|||
environment. environment is an optional argument to override all or |
|||
part of the determined environment. |
|||
|
|||
The environment is determined from the current Python process. |
|||
""" |
|||
current_environment = default_environment() |
|||
if environment is not None: |
|||
current_environment.update(environment) |
|||
|
|||
return _evaluate_markers(self._markers, current_environment) |
@ -0,0 +1,127 @@ |
|||
# This file is dual licensed under the terms of the Apache License, Version |
|||
# 2.0, and the BSD License. See the LICENSE file in the root of this repository |
|||
# for complete details. |
|||
from __future__ import absolute_import, division, print_function |
|||
|
|||
import string |
|||
import re |
|||
|
|||
from pkg_resources.extern.pyparsing import stringStart, stringEnd, originalTextFor, ParseException |
|||
from pkg_resources.extern.pyparsing import ZeroOrMore, Word, Optional, Regex, Combine |
|||
from pkg_resources.extern.pyparsing import Literal as L # noqa |
|||
from pkg_resources.extern.six.moves.urllib import parse as urlparse |
|||
|
|||
from .markers import MARKER_EXPR, Marker |
|||
from .specifiers import LegacySpecifier, Specifier, SpecifierSet |
|||
|
|||
|
|||
class InvalidRequirement(ValueError): |
|||
""" |
|||
An invalid requirement was found, users should refer to PEP 508. |
|||
""" |
|||
|
|||
|
|||
ALPHANUM = Word(string.ascii_letters + string.digits) |
|||
|
|||
LBRACKET = L("[").suppress() |
|||
RBRACKET = L("]").suppress() |
|||
LPAREN = L("(").suppress() |
|||
RPAREN = L(")").suppress() |
|||
COMMA = L(",").suppress() |
|||
SEMICOLON = L(";").suppress() |
|||
AT = L("@").suppress() |
|||
|
|||
PUNCTUATION = Word("-_.") |
|||
IDENTIFIER_END = ALPHANUM | (ZeroOrMore(PUNCTUATION) + ALPHANUM) |
|||
IDENTIFIER = Combine(ALPHANUM + ZeroOrMore(IDENTIFIER_END)) |
|||
|
|||
NAME = IDENTIFIER("name") |
|||
EXTRA = IDENTIFIER |
|||
|
|||
URI = Regex(r'[^ ]+')("url") |
|||
URL = (AT + URI) |
|||
|
|||
EXTRAS_LIST = EXTRA + ZeroOrMore(COMMA + EXTRA) |
|||
EXTRAS = (LBRACKET + Optional(EXTRAS_LIST) + RBRACKET)("extras") |
|||
|
|||
VERSION_PEP440 = Regex(Specifier._regex_str, re.VERBOSE | re.IGNORECASE) |
|||
VERSION_LEGACY = Regex(LegacySpecifier._regex_str, re.VERBOSE | re.IGNORECASE) |
|||
|
|||
VERSION_ONE = VERSION_PEP440 ^ VERSION_LEGACY |
|||
VERSION_MANY = Combine(VERSION_ONE + ZeroOrMore(COMMA + VERSION_ONE), |
|||
joinString=",", adjacent=False)("_raw_spec") |
|||
_VERSION_SPEC = Optional(((LPAREN + VERSION_MANY + RPAREN) | VERSION_MANY)) |
|||
_VERSION_SPEC.setParseAction(lambda s, l, t: t._raw_spec or '') |
|||
|
|||
VERSION_SPEC = originalTextFor(_VERSION_SPEC)("specifier") |
|||
VERSION_SPEC.setParseAction(lambda s, l, t: t[1]) |
|||
|
|||
MARKER_EXPR = originalTextFor(MARKER_EXPR())("marker") |
|||
MARKER_EXPR.setParseAction( |
|||
lambda s, l, t: Marker(s[t._original_start:t._original_end]) |
|||
) |
|||
MARKER_SEPERATOR = SEMICOLON |
|||
MARKER = MARKER_SEPERATOR + MARKER_EXPR |
|||
|
|||
VERSION_AND_MARKER = VERSION_SPEC + Optional(MARKER) |
|||
URL_AND_MARKER = URL + Optional(MARKER) |
|||
|
|||
NAMED_REQUIREMENT = \ |
|||
NAME + Optional(EXTRAS) + (URL_AND_MARKER | VERSION_AND_MARKER) |
|||
|
|||
REQUIREMENT = stringStart + NAMED_REQUIREMENT + stringEnd |
|||
|
|||
|
|||
class Requirement(object): |
|||
"""Parse a requirement. |
|||
|
|||
Parse a given requirement string into its parts, such as name, specifier, |
|||
URL, and extras. Raises InvalidRequirement on a badly-formed requirement |
|||
string. |
|||
""" |
|||
|
|||
# TODO: Can we test whether something is contained within a requirement? |
|||
# If so how do we do that? Do we need to test against the _name_ of |
|||
# the thing as well as the version? What about the markers? |
|||
# TODO: Can we normalize the name and extra name? |
|||
|
|||
def __init__(self, requirement_string): |
|||
try: |
|||
req = REQUIREMENT.parseString(requirement_string) |
|||
except ParseException as e: |
|||
raise InvalidRequirement( |
|||
"Invalid requirement, parse error at \"{0!r}\"".format( |
|||
requirement_string[e.loc:e.loc + 8])) |
|||
|
|||
self.name = req.name |
|||
if req.url: |
|||
parsed_url = urlparse.urlparse(req.url) |
|||
if not (parsed_url.scheme and parsed_url.netloc) or ( |
|||
not parsed_url.scheme and not parsed_url.netloc): |
|||
raise InvalidRequirement("Invalid URL given") |
|||
self.url = req.url |
|||
else: |
|||
self.url = None |
|||
self.extras = set(req.extras.asList() if req.extras else []) |
|||
self.specifier = SpecifierSet(req.specifier) |
|||
self.marker = req.marker if req.marker else None |
|||
|
|||
def __str__(self): |
|||
parts = [self.name] |
|||
|
|||
if self.extras: |
|||
parts.append("[{0}]".format(",".join(sorted(self.extras)))) |
|||
|
|||
if self.specifier: |
|||
parts.append(str(self.specifier)) |
|||
|
|||
if self.url: |
|||
parts.append("@ {0}".format(self.url)) |
|||
|
|||
if self.marker: |
|||
parts.append("; {0}".format(self.marker)) |
|||
|
|||
return "".join(parts) |
|||
|
|||
def __repr__(self): |
|||
return "<Requirement({0!r})>".format(str(self)) |
@ -0,0 +1,774 @@ |
|||
# This file is dual licensed under the terms of the Apache License, Version |
|||
# 2.0, and the BSD License. See the LICENSE file in the root of this repository |
|||
# for complete details. |
|||
from __future__ import absolute_import, division, print_function |
|||
|
|||
import abc |
|||
import functools |
|||
import itertools |
|||
import re |
|||
|
|||
from ._compat import string_types, with_metaclass |
|||
from .version import Version, LegacyVersion, parse |
|||
|
|||
|
|||
class InvalidSpecifier(ValueError): |
|||
""" |
|||
An invalid specifier was found, users should refer to PEP 440. |
|||
""" |
|||
|
|||
|
|||
class BaseSpecifier(with_metaclass(abc.ABCMeta, object)): |
|||
|
|||
@abc.abstractmethod |
|||
def __str__(self): |
|||
""" |
|||
Returns the str representation of this Specifier like object. This |
|||
should be representative of the Specifier itself. |
|||
""" |
|||
|
|||
@abc.abstractmethod |
|||
def __hash__(self): |
|||
""" |
|||
Returns a hash value for this Specifier like object. |
|||
""" |
|||
|
|||
@abc.abstractmethod |
|||
def __eq__(self, other): |
|||
""" |
|||
Returns a boolean representing whether or not the two Specifier like |
|||
objects are equal. |
|||
""" |
|||
|
|||
@abc.abstractmethod |
|||
def __ne__(self, other): |
|||
""" |
|||
Returns a boolean representing whether or not the two Specifier like |
|||
objects are not equal. |
|||
""" |
|||
|
|||
@abc.abstractproperty |
|||
def prereleases(self): |
|||
""" |
|||
Returns whether or not pre-releases as a whole are allowed by this |
|||
specifier. |
|||
""" |
|||
|
|||
@prereleases.setter |
|||
def prereleases(self, value): |
|||
""" |
|||
Sets whether or not pre-releases as a whole are allowed by this |
|||
specifier. |
|||
""" |
|||
|
|||
@abc.abstractmethod |
|||
def contains(self, item, prereleases=None): |
|||
""" |
|||
Determines if the given item is contained within this specifier. |
|||
""" |
|||
|
|||
@abc.abstractmethod |
|||
def filter(self, iterable, prereleases=None): |
|||
""" |
|||
Takes an iterable of items and filters them so that only items which |
|||
are contained within this specifier are allowed in it. |
|||
""" |
|||
|
|||
|
|||
class _IndividualSpecifier(BaseSpecifier): |
|||
|
|||
_operators = {} |
|||
|
|||
def __init__(self, spec="", prereleases=None): |
|||
match = self._regex.search(spec) |
|||
if not match: |
|||
raise InvalidSpecifier("Invalid specifier: '{0}'".format(spec)) |
|||
|
|||
self._spec = ( |
|||
match.group("operator").strip(), |
|||
match.group("version").strip(), |
|||
) |
|||
|
|||
# Store whether or not this Specifier should accept prereleases |
|||
self._prereleases = prereleases |
|||
|
|||
def __repr__(self): |
|||
pre = ( |
|||
", prereleases={0!r}".format(self.prereleases) |
|||
if self._prereleases is not None |
|||
else "" |
|||
) |
|||
|
|||
return "<{0}({1!r}{2})>".format( |
|||
self.__class__.__name__, |
|||
str(self), |
|||
pre, |
|||
) |
|||
|
|||
def __str__(self): |
|||
return "{0}{1}".format(*self._spec) |
|||
|
|||
def __hash__(self): |
|||
return hash(self._spec) |
|||
|
|||
def __eq__(self, other): |
|||
if isinstance(other, string_types): |
|||
try: |
|||
other = self.__class__(other) |
|||
except InvalidSpecifier: |
|||
return NotImplemented |
|||
elif not isinstance(other, self.__class__): |
|||
return NotImplemented |
|||
|
|||
return self._spec == other._spec |
|||
|
|||
def __ne__(self, other): |
|||
if isinstance(other, string_types): |
|||
try: |
|||
other = self.__class__(other) |
|||
except InvalidSpecifier: |
|||
return NotImplemented |
|||
elif not isinstance(other, self.__class__): |
|||
return NotImplemented |
|||
|
|||
return self._spec != other._spec |
|||
|
|||
def _get_operator(self, op): |
|||
return getattr(self, "_compare_{0}".format(self._operators[op])) |
|||
|
|||
def _coerce_version(self, version): |
|||
if not isinstance(version, (LegacyVersion, Version)): |
|||
version = parse(version) |
|||
return version |
|||
|
|||
@property |
|||
def operator(self): |
|||
return self._spec[0] |
|||
|
|||
@property |
|||
def version(self): |
|||
return self._spec[1] |
|||
|
|||
@property |
|||
def prereleases(self): |
|||
return self._prereleases |
|||
|
|||
@prereleases.setter |
|||
def prereleases(self, value): |
|||
self._prereleases = value |
|||
|
|||
def __contains__(self, item): |
|||
return self.contains(item) |
|||
|
|||
def contains(self, item, prereleases=None): |
|||
# Determine if prereleases are to be allowed or not. |
|||
if prereleases is None: |
|||
prereleases = self.prereleases |
|||
|
|||
# Normalize item to a Version or LegacyVersion, this allows us to have |
|||
# a shortcut for ``"2.0" in Specifier(">=2") |
|||
item = self._coerce_version(item) |
|||
|
|||
# Determine if we should be supporting prereleases in this specifier |
|||
# or not, if we do not support prereleases than we can short circuit |
|||
# logic if this version is a prereleases. |
|||
if item.is_prerelease and not prereleases: |
|||
return False |
|||
|
|||
# Actually do the comparison to determine if this item is contained |
|||
# within this Specifier or not. |
|||
return self._get_operator(self.operator)(item, self.version) |
|||
|
|||
def filter(self, iterable, prereleases=None): |
|||
yielded = False |
|||
found_prereleases = [] |
|||
|
|||
kw = {"prereleases": prereleases if prereleases is not None else True} |
|||
|
|||
# Attempt to iterate over all the values in the iterable and if any of |
|||
# them match, yield them. |
|||
for version in iterable: |
|||
parsed_version = self._coerce_version(version) |
|||
|
|||
if self.contains(parsed_version, **kw): |
|||
# If our version is a prerelease, and we were not set to allow |
|||
# prereleases, then we'll store it for later incase nothing |
|||
# else matches this specifier. |
|||
if (parsed_version.is_prerelease and not |
|||
(prereleases or self.prereleases)): |
|||
found_prereleases.append(version) |
|||
# Either this is not a prerelease, or we should have been |
|||
# accepting prereleases from the begining. |
|||
else: |
|||
yielded = True |
|||
yield version |
|||
|
|||
# Now that we've iterated over everything, determine if we've yielded |
|||
# any values, and if we have not and we have any prereleases stored up |
|||
# then we will go ahead and yield the prereleases. |
|||
if not yielded and found_prereleases: |
|||
for version in found_prereleases: |
|||
yield version |
|||
|
|||
|
|||
class LegacySpecifier(_IndividualSpecifier): |
|||
|
|||
_regex_str = ( |
|||
r""" |
|||
(?P<operator>(==|!=|<=|>=|<|>)) |
|||
\s* |
|||
(?P<version> |
|||
[^,;\s)]* # Since this is a "legacy" specifier, and the version |
|||
# string can be just about anything, we match everything |
|||
# except for whitespace, a semi-colon for marker support, |
|||
# a closing paren since versions can be enclosed in |
|||
# them, and a comma since it's a version separator. |
|||
) |
|||
""" |
|||
) |
|||
|
|||
_regex = re.compile( |
|||
r"^\s*" + _regex_str + r"\s*$", re.VERBOSE | re.IGNORECASE) |
|||
|
|||
_operators = { |
|||
"==": "equal", |
|||
"!=": "not_equal", |
|||
"<=": "less_than_equal", |
|||
">=": "greater_than_equal", |
|||
"<": "less_than", |
|||
">": "greater_than", |
|||
} |
|||
|
|||
def _coerce_version(self, version): |
|||
if not isinstance(version, LegacyVersion): |
|||
version = LegacyVersion(str(version)) |
|||
return version |
|||
|
|||
def _compare_equal(self, prospective, spec): |
|||
return prospective == self._coerce_version(spec) |
|||
|
|||
def _compare_not_equal(self, prospective, spec): |
|||
return prospective != self._coerce_version(spec) |
|||
|
|||
def _compare_less_than_equal(self, prospective, spec): |
|||
return prospective <= self._coerce_version(spec) |
|||
|
|||
def _compare_greater_than_equal(self, prospective, spec): |
|||
return prospective >= self._coerce_version(spec) |
|||
|
|||
def _compare_less_than(self, prospective, spec): |
|||
return prospective < self._coerce_version(spec) |
|||
|
|||
def _compare_greater_than(self, prospective, spec): |
|||
return prospective > self._coerce_version(spec) |
|||
|
|||
|
|||
def _require_version_compare(fn): |
|||
@functools.wraps(fn) |
|||
def wrapped(self, prospective, spec): |
|||
if not isinstance(prospective, Version): |
|||
return False |
|||
return fn(self, prospective, spec) |
|||
return wrapped |
|||
|
|||
|
|||
class Specifier(_IndividualSpecifier): |
|||
|
|||
_regex_str = ( |
|||
r""" |
|||
(?P<operator>(~=|==|!=|<=|>=|<|>|===)) |
|||
(?P<version> |
|||
(?: |
|||
# The identity operators allow for an escape hatch that will |
|||
# do an exact string match of the version you wish to install. |
|||
# This will not be parsed by PEP 440 and we cannot determine |
|||
# any semantic meaning from it. This operator is discouraged |
|||
# but included entirely as an escape hatch. |
|||
(?<====) # Only match for the identity operator |
|||
\s* |
|||
[^\s]* # We just match everything, except for whitespace |
|||
# since we are only testing for strict identity. |
|||
) |
|||
| |
|||
(?: |
|||
# The (non)equality operators allow for wild card and local |
|||
# versions to be specified so we have to define these two |
|||
# operators separately to enable that. |
|||
(?<===|!=) # Only match for equals and not equals |
|||
|
|||
\s* |
|||
v? |
|||
(?:[0-9]+!)? # epoch |
|||
[0-9]+(?:\.[0-9]+)* # release |
|||
(?: # pre release |
|||
[-_\.]? |
|||
(a|b|c|rc|alpha|beta|pre|preview) |
|||
[-_\.]? |
|||
[0-9]* |
|||
)? |
|||
(?: # post release |
|||
(?:-[0-9]+)|(?:[-_\.]?(post|rev|r)[-_\.]?[0-9]*) |
|||
)? |
|||
|
|||
# You cannot use a wild card and a dev or local version |
|||
# together so group them with a | and make them optional. |
|||
(?: |
|||
(?:[-_\.]?dev[-_\.]?[0-9]*)? # dev release |
|||
(?:\+[a-z0-9]+(?:[-_\.][a-z0-9]+)*)? # local |
|||
| |
|||
\.\* # Wild card syntax of .* |
|||
)? |
|||
) |
|||
| |
|||
(?: |
|||
# The compatible operator requires at least two digits in the |
|||
# release segment. |
|||
(?<=~=) # Only match for the compatible operator |
|||
|
|||
\s* |
|||
v? |
|||
(?:[0-9]+!)? # epoch |
|||
[0-9]+(?:\.[0-9]+)+ # release (We have a + instead of a *) |
|||
(?: # pre release |
|||
[-_\.]? |
|||
(a|b|c|rc|alpha|beta|pre|preview) |
|||
[-_\.]? |
|||
[0-9]* |
|||
)? |
|||
(?: # post release |
|||
(?:-[0-9]+)|(?:[-_\.]?(post|rev|r)[-_\.]?[0-9]*) |
|||
)? |
|||
(?:[-_\.]?dev[-_\.]?[0-9]*)? # dev release |
|||
) |
|||
| |
|||
(?: |
|||
# All other operators only allow a sub set of what the |
|||
# (non)equality operators do. Specifically they do not allow |
|||
# local versions to be specified nor do they allow the prefix |
|||
# matching wild cards. |
|||
(?<!==|!=|~=) # We have special cases for these |
|||
# operators so we want to make sure they |
|||
# don't match here. |
|||
|
|||
\s* |
|||
v? |
|||
(?:[0-9]+!)? # epoch |
|||
[0-9]+(?:\.[0-9]+)* # release |
|||
(?: # pre release |
|||
[-_\.]? |
|||
(a|b|c|rc|alpha|beta|pre|preview) |
|||
[-_\.]? |
|||
[0-9]* |
|||
)? |
|||
(?: # post release |
|||
(?:-[0-9]+)|(?:[-_\.]?(post|rev|r)[-_\.]?[0-9]*) |
|||
)? |
|||
(?:[-_\.]?dev[-_\.]?[0-9]*)? # dev release |
|||
) |
|||
) |
|||
""" |
|||
) |
|||
|
|||
_regex = re.compile( |
|||
r"^\s*" + _regex_str + r"\s*$", re.VERBOSE | re.IGNORECASE) |
|||
|
|||
_operators = { |
|||
"~=": "compatible", |
|||
"==": "equal", |
|||
"!=": "not_equal", |
|||
"<=": "less_than_equal", |
|||
">=": "greater_than_equal", |
|||
"<": "less_than", |
|||
">": "greater_than", |
|||
"===": "arbitrary", |
|||
} |
|||
|
|||
@_require_version_compare |
|||
def _compare_compatible(self, prospective, spec): |
|||
# Compatible releases have an equivalent combination of >= and ==. That |
|||
# is that ~=2.2 is equivalent to >=2.2,==2.*. This allows us to |
|||
# implement this in terms of the other specifiers instead of |
|||
# implementing it ourselves. The only thing we need to do is construct |
|||
# the other specifiers. |
|||
|
|||
# We want everything but the last item in the version, but we want to |
|||
# ignore post and dev releases and we want to treat the pre-release as |
|||
# it's own separate segment. |
|||
prefix = ".".join( |
|||
list( |
|||
itertools.takewhile( |
|||
lambda x: (not x.startswith("post") and not |
|||
x.startswith("dev")), |
|||
_version_split(spec), |
|||
) |
|||
)[:-1] |
|||
) |
|||
|
|||
# Add the prefix notation to the end of our string |
|||
prefix += ".*" |
|||
|
|||
return (self._get_operator(">=")(prospective, spec) and |
|||
self._get_operator("==")(prospective, prefix)) |
|||
|
|||
@_require_version_compare |
|||
def _compare_equal(self, prospective, spec): |
|||
# We need special logic to handle prefix matching |
|||
if spec.endswith(".*"): |
|||
# In the case of prefix matching we want to ignore local segment. |
|||
prospective = Version(prospective.public) |
|||
# Split the spec out by dots, and pretend that there is an implicit |
|||
# dot in between a release segment and a pre-release segment. |
|||
spec = _version_split(spec[:-2]) # Remove the trailing .* |
|||
|
|||
# Split the prospective version out by dots, and pretend that there |
|||
# is an implicit dot in between a release segment and a pre-release |
|||
# segment. |
|||
prospective = _version_split(str(prospective)) |
|||
|
|||
# Shorten the prospective version to be the same length as the spec |
|||
# so that we can determine if the specifier is a prefix of the |
|||
# prospective version or not. |
|||
prospective = prospective[:len(spec)] |
|||
|
|||
# Pad out our two sides with zeros so that they both equal the same |
|||
# length. |
|||
spec, prospective = _pad_version(spec, prospective) |
|||
else: |
|||
# Convert our spec string into a Version |
|||
spec = Version(spec) |
|||
|
|||
# If the specifier does not have a local segment, then we want to |
|||
# act as if the prospective version also does not have a local |
|||
# segment. |
|||
if not spec.local: |
|||
prospective = Version(prospective.public) |
|||
|
|||
return prospective == spec |
|||
|
|||
@_require_version_compare |
|||
def _compare_not_equal(self, prospective, spec): |
|||
return not self._compare_equal(prospective, spec) |
|||
|
|||
@_require_version_compare |
|||
def _compare_less_than_equal(self, prospective, spec): |
|||
return prospective <= Version(spec) |
|||
|
|||
@_require_version_compare |
|||
def _compare_greater_than_equal(self, prospective, spec): |
|||
return prospective >= Version(spec) |
|||
|
|||
@_require_version_compare |
|||
def _compare_less_than(self, prospective, spec): |
|||
# Convert our spec to a Version instance, since we'll want to work with |
|||
# it as a version. |
|||
spec = Version(spec) |
|||
|
|||
# Check to see if the prospective version is less than the spec |
|||
# version. If it's not we can short circuit and just return False now |
|||
# instead of doing extra unneeded work. |
|||
if not prospective < spec: |
|||
return False |
|||
|
|||
# This special case is here so that, unless the specifier itself |
|||
# includes is a pre-release version, that we do not accept pre-release |
|||
# versions for the version mentioned in the specifier (e.g. <3.1 should |
|||
# not match 3.1.dev0, but should match 3.0.dev0). |
|||
if not spec.is_prerelease and prospective.is_prerelease: |
|||
if Version(prospective.base_version) == Version(spec.base_version): |
|||
return False |
|||
|
|||
# If we've gotten to here, it means that prospective version is both |
|||
# less than the spec version *and* it's not a pre-release of the same |
|||
# version in the spec. |
|||
return True |
|||
|
|||
@_require_version_compare |
|||
def _compare_greater_than(self, prospective, spec): |
|||
# Convert our spec to a Version instance, since we'll want to work with |
|||
# it as a version. |
|||
spec = Version(spec) |
|||
|
|||
# Check to see if the prospective version is greater than the spec |
|||
# version. If it's not we can short circuit and just return False now |
|||
# instead of doing extra unneeded work. |
|||
if not prospective > spec: |
|||
return False |
|||
|
|||
# This special case is here so that, unless the specifier itself |
|||
# includes is a post-release version, that we do not accept |
|||
# post-release versions for the version mentioned in the specifier |
|||
# (e.g. >3.1 should not match 3.0.post0, but should match 3.2.post0). |
|||
if not spec.is_postrelease and prospective.is_postrelease: |
|||
if Version(prospective.base_version) == Version(spec.base_version): |
|||
return False |
|||
|
|||
# Ensure that we do not allow a local version of the version mentioned |
|||
# in the specifier, which is techincally greater than, to match. |
|||
if prospective.local is not None: |
|||
if Version(prospective.base_version) == Version(spec.base_version): |
|||
return False |
|||
|
|||
# If we've gotten to here, it means that prospective version is both |
|||
# greater than the spec version *and* it's not a pre-release of the |
|||
# same version in the spec. |
|||
return True |
|||
|
|||
def _compare_arbitrary(self, prospective, spec): |
|||
return str(prospective).lower() == str(spec).lower() |
|||
|
|||
@property |
|||
def prereleases(self): |
|||
# If there is an explicit prereleases set for this, then we'll just |
|||
# blindly use that. |
|||
if self._prereleases is not None: |
|||
return self._prereleases |
|||
|
|||
# Look at all of our specifiers and determine if they are inclusive |
|||
# operators, and if they are if they are including an explicit |
|||
# prerelease. |
|||
operator, version = self._spec |
|||
if operator in ["==", ">=", "<=", "~=", "==="]: |
|||
# The == specifier can include a trailing .*, if it does we |
|||
# want to remove before parsing. |
|||
if operator == "==" and version.endswith(".*"): |
|||
version = version[:-2] |
|||
|
|||
# Parse the version, and if it is a pre-release than this |
|||
# specifier allows pre-releases. |
|||
if parse(version).is_prerelease: |
|||
return True |
|||
|
|||
return False |
|||
|
|||
@prereleases.setter |
|||
def prereleases(self, value): |
|||
self._prereleases = value |
|||
|
|||
|
|||
_prefix_regex = re.compile(r"^([0-9]+)((?:a|b|c|rc)[0-9]+)$") |
|||
|
|||
|
|||
def _version_split(version): |
|||
result = [] |
|||
for item in version.split("."): |
|||
match = _prefix_regex.search(item) |
|||
if match: |
|||
result.extend(match.groups()) |
|||
else: |
|||
result.append(item) |
|||
return result |
|||
|
|||
|
|||
def _pad_version(left, right): |
|||
left_split, right_split = [], [] |
|||
|
|||
# Get the release segment of our versions |
|||
left_split.append(list(itertools.takewhile(lambda x: x.isdigit(), left))) |
|||
right_split.append(list(itertools.takewhile(lambda x: x.isdigit(), right))) |
|||
|
|||
# Get the rest of our versions |
|||
left_split.append(left[len(left_split[0]):]) |
|||
right_split.append(right[len(right_split[0]):]) |
|||
|
|||
# Insert our padding |
|||
left_split.insert( |
|||
1, |
|||
["0"] * max(0, len(right_split[0]) - len(left_split[0])), |
|||
) |
|||
right_split.insert( |
|||
1, |
|||
["0"] * max(0, len(left_split[0]) - len(right_split[0])), |
|||
) |
|||
|
|||
return ( |
|||
list(itertools.chain(*left_split)), |
|||
list(itertools.chain(*right_split)), |
|||
) |
|||
|
|||
|
|||
class SpecifierSet(BaseSpecifier): |
|||
|
|||
def __init__(self, specifiers="", prereleases=None): |
|||
# Split on , to break each indidivual specifier into it's own item, and |
|||
# strip each item to remove leading/trailing whitespace. |
|||
specifiers = [s.strip() for s in specifiers.split(",") if s.strip()] |
|||
|
|||
# Parsed each individual specifier, attempting first to make it a |
|||
# Specifier and falling back to a LegacySpecifier. |
|||
parsed = set() |
|||
for specifier in specifiers: |
|||
try: |
|||
parsed.add(Specifier(specifier)) |
|||
except InvalidSpecifier: |
|||
parsed.add(LegacySpecifier(specifier)) |
|||
|
|||
# Turn our parsed specifiers into a frozen set and save them for later. |
|||
self._specs = frozenset(parsed) |
|||
|
|||
# Store our prereleases value so we can use it later to determine if |
|||
# we accept prereleases or not. |
|||
self._prereleases = prereleases |
|||
|
|||
def __repr__(self): |
|||
pre = ( |
|||
", prereleases={0!r}".format(self.prereleases) |
|||
if self._prereleases is not None |
|||
else "" |
|||
) |
|||
|
|||
return "<SpecifierSet({0!r}{1})>".format(str(self), pre) |
|||
|
|||
def __str__(self): |
|||
return ",".join(sorted(str(s) for s in self._specs)) |
|||
|
|||
def __hash__(self): |
|||
return hash(self._specs) |
|||
|
|||
def __and__(self, other): |
|||
if isinstance(other, string_types): |
|||
other = SpecifierSet(other) |
|||
elif not isinstance(other, SpecifierSet): |
|||
return NotImplemented |
|||
|
|||
specifier = SpecifierSet() |
|||
specifier._specs = frozenset(self._specs | other._specs) |
|||
|
|||
if self._prereleases is None and other._prereleases is not None: |
|||
specifier._prereleases = other._prereleases |
|||
elif self._prereleases is not None and other._prereleases is None: |
|||
specifier._prereleases = self._prereleases |
|||
elif self._prereleases == other._prereleases: |
|||
specifier._prereleases = self._prereleases |
|||
else: |
|||
raise ValueError( |
|||
"Cannot combine SpecifierSets with True and False prerelease " |
|||
"overrides." |
|||
) |
|||
|
|||
return specifier |
|||
|
|||
def __eq__(self, other): |
|||
if isinstance(other, string_types): |
|||
other = SpecifierSet(other) |
|||
elif isinstance(other, _IndividualSpecifier): |
|||
other = SpecifierSet(str(other)) |
|||
elif not isinstance(other, SpecifierSet): |
|||
return NotImplemented |
|||
|
|||
return self._specs == other._specs |
|||
|
|||
def __ne__(self, other): |
|||
if isinstance(other, string_types): |
|||
other = SpecifierSet(other) |
|||
elif isinstance(other, _IndividualSpecifier): |
|||
other = SpecifierSet(str(other)) |
|||
elif not isinstance(other, SpecifierSet): |
|||
return NotImplemented |
|||
|
|||
return self._specs != other._specs |
|||
|
|||
def __len__(self): |
|||
return len(self._specs) |
|||
|
|||
def __iter__(self): |
|||
return iter(self._specs) |
|||
|
|||
@property |
|||
def prereleases(self): |
|||
# If we have been given an explicit prerelease modifier, then we'll |
|||
# pass that through here. |
|||
if self._prereleases is not None: |
|||
return self._prereleases |
|||
|
|||
# If we don't have any specifiers, and we don't have a forced value, |
|||
# then we'll just return None since we don't know if this should have |
|||
# pre-releases or not. |
|||
if not self._specs: |
|||
return None |
|||
|
|||
# Otherwise we'll see if any of the given specifiers accept |
|||
# prereleases, if any of them do we'll return True, otherwise False. |
|||
return any(s.prereleases for s in self._specs) |
|||
|
|||
@prereleases.setter |
|||
def prereleases(self, value): |
|||
self._prereleases = value |
|||
|
|||
def __contains__(self, item): |
|||
return self.contains(item) |
|||
|
|||
def contains(self, item, prereleases=None): |
|||
# Ensure that our item is a Version or LegacyVersion instance. |
|||
if not isinstance(item, (LegacyVersion, Version)): |
|||
item = parse(item) |
|||
|
|||
# Determine if we're forcing a prerelease or not, if we're not forcing |
|||
# one for this particular filter call, then we'll use whatever the |
|||
# SpecifierSet thinks for whether or not we should support prereleases. |
|||
if prereleases is None: |
|||
prereleases = self.prereleases |
|||
|
|||
# We can determine if we're going to allow pre-releases by looking to |
|||
# see if any of the underlying items supports them. If none of them do |
|||
# and this item is a pre-release then we do not allow it and we can |
|||
# short circuit that here. |
|||
# Note: This means that 1.0.dev1 would not be contained in something |
|||
# like >=1.0.devabc however it would be in >=1.0.debabc,>0.0.dev0 |
|||
if not prereleases and item.is_prerelease: |
|||
return False |
|||
|
|||
# We simply dispatch to the underlying specs here to make sure that the |
|||
# given version is contained within all of them. |
|||
# Note: This use of all() here means that an empty set of specifiers |
|||
# will always return True, this is an explicit design decision. |
|||
return all( |
|||
s.contains(item, prereleases=prereleases) |
|||
for s in self._specs |
|||
) |
|||
|
|||
def filter(self, iterable, prereleases=None): |
|||
# Determine if we're forcing a prerelease or not, if we're not forcing |
|||
# one for this particular filter call, then we'll use whatever the |
|||
# SpecifierSet thinks for whether or not we should support prereleases. |
|||
if prereleases is None: |
|||
prereleases = self.prereleases |
|||
|
|||
# If we have any specifiers, then we want to wrap our iterable in the |
|||
# filter method for each one, this will act as a logical AND amongst |
|||
# each specifier. |
|||
if self._specs: |
|||
for spec in self._specs: |
|||
iterable = spec.filter(iterable, prereleases=bool(prereleases)) |
|||
return iterable |
|||
# If we do not have any specifiers, then we need to have a rough filter |
|||
# which will filter out any pre-releases, unless there are no final |
|||
# releases, and which will filter out LegacyVersion in general. |
|||
else: |
|||
filtered = [] |
|||
found_prereleases = [] |
|||
|
|||
for item in iterable: |
|||
# Ensure that we some kind of Version class for this item. |
|||
if not isinstance(item, (LegacyVersion, Version)): |
|||
parsed_version = parse(item) |
|||
else: |
|||
parsed_version = item |
|||
|
|||
# Filter out any item which is parsed as a LegacyVersion |
|||
if isinstance(parsed_version, LegacyVersion): |
|||
continue |
|||
|
|||
# Store any item which is a pre-release for later unless we've |
|||
# already found a final version or we are accepting prereleases |
|||
if parsed_version.is_prerelease and not prereleases: |
|||
if not filtered: |
|||
found_prereleases.append(item) |
|||
else: |
|||
filtered.append(item) |
|||
|
|||
# If we've found no items except for pre-releases, then we'll go |
|||
# ahead and use the pre-releases |
|||
if not filtered and found_prereleases and prereleases is None: |
|||
return found_prereleases |
|||
|
|||
return filtered |
@ -0,0 +1,14 @@ |
|||
# This file is dual licensed under the terms of the Apache License, Version |
|||
# 2.0, and the BSD License. See the LICENSE file in the root of this repository |
|||
# for complete details. |
|||
from __future__ import absolute_import, division, print_function |
|||
|
|||
import re |
|||
|
|||
|
|||
_canonicalize_regex = re.compile(r"[-_.]+") |
|||
|
|||
|
|||
def canonicalize_name(name): |
|||
# This is taken from PEP 503. |
|||
return _canonicalize_regex.sub("-", name).lower() |
@ -0,0 +1,393 @@ |
|||
# This file is dual licensed under the terms of the Apache License, Version |
|||
# 2.0, and the BSD License. See the LICENSE file in the root of this repository |
|||
# for complete details. |
|||
from __future__ import absolute_import, division, print_function |
|||
|
|||
import collections |
|||
import itertools |
|||
import re |
|||
|
|||
from ._structures import Infinity |
|||
|
|||
|
|||
__all__ = [ |
|||
"parse", "Version", "LegacyVersion", "InvalidVersion", "VERSION_PATTERN" |
|||
] |
|||
|
|||
|
|||
_Version = collections.namedtuple( |
|||
"_Version", |
|||
["epoch", "release", "dev", "pre", "post", "local"], |
|||
) |
|||
|
|||
|
|||
def parse(version): |
|||
""" |
|||
Parse the given version string and return either a :class:`Version` object |
|||
or a :class:`LegacyVersion` object depending on if the given version is |
|||
a valid PEP 440 version or a legacy version. |
|||
""" |
|||
try: |
|||
return Version(version) |
|||
except InvalidVersion: |
|||
return LegacyVersion(version) |
|||
|
|||
|
|||
class InvalidVersion(ValueError): |
|||
""" |
|||
An invalid version was found, users should refer to PEP 440. |
|||
""" |
|||
|
|||
|
|||
class _BaseVersion(object): |
|||
|
|||
def __hash__(self): |
|||
return hash(self._key) |
|||
|
|||
def __lt__(self, other): |
|||
return self._compare(other, lambda s, o: s < o) |
|||
|
|||
def __le__(self, other): |
|||
return self._compare(other, lambda s, o: s <= o) |
|||
|
|||
def __eq__(self, other): |
|||
return self._compare(other, lambda s, o: s == o) |
|||
|
|||
def __ge__(self, other): |
|||
return self._compare(other, lambda s, o: s >= o) |
|||
|
|||
def __gt__(self, other): |
|||
return self._compare(other, lambda s, o: s > o) |
|||
|
|||
def __ne__(self, other): |
|||
return self._compare(other, lambda s, o: s != o) |
|||
|
|||
def _compare(self, other, method): |
|||
if not isinstance(other, _BaseVersion): |
|||
return NotImplemented |
|||
|
|||
return method(self._key, other._key) |
|||
|
|||
|
|||
class LegacyVersion(_BaseVersion): |
|||
|
|||
def __init__(self, version): |
|||
self._version = str(version) |
|||
self._key = _legacy_cmpkey(self._version) |
|||
|
|||
def __str__(self): |
|||
return self._version |
|||
|
|||
def __repr__(self): |
|||
return "<LegacyVersion({0})>".format(repr(str(self))) |
|||
|
|||
@property |
|||
def public(self): |
|||
return self._version |
|||
|
|||
@property |
|||
def base_version(self): |
|||
return self._version |
|||
|
|||
@property |
|||
def local(self): |
|||
return None |
|||
|
|||
@property |
|||
def is_prerelease(self): |
|||
return False |
|||
|
|||
@property |
|||
def is_postrelease(self): |
|||
return False |
|||
|
|||
|
|||
_legacy_version_component_re = re.compile( |
|||
r"(\d+ | [a-z]+ | \.| -)", re.VERBOSE, |
|||
) |
|||
|
|||
_legacy_version_replacement_map = { |
|||
"pre": "c", "preview": "c", "-": "final-", "rc": "c", "dev": "@", |
|||
} |
|||
|
|||
|
|||
def _parse_version_parts(s): |
|||
for part in _legacy_version_component_re.split(s): |
|||
part = _legacy_version_replacement_map.get(part, part) |
|||
|
|||
if not part or part == ".": |
|||
continue |
|||
|
|||
if part[:1] in "0123456789": |
|||
# pad for numeric comparison |
|||
yield part.zfill(8) |
|||
else: |
|||
yield "*" + part |
|||
|
|||
# ensure that alpha/beta/candidate are before final |
|||
yield "*final" |
|||
|
|||
|
|||
def _legacy_cmpkey(version): |
|||
# We hardcode an epoch of -1 here. A PEP 440 version can only have a epoch |
|||
# greater than or equal to 0. This will effectively put the LegacyVersion, |
|||
# which uses the defacto standard originally implemented by setuptools, |
|||
# as before all PEP 440 versions. |
|||
epoch = -1 |
|||
|
|||
# This scheme is taken from pkg_resources.parse_version setuptools prior to |
|||
# it's adoption of the packaging library. |
|||
parts = [] |
|||
for part in _parse_version_parts(version.lower()): |
|||
if part.startswith("*"): |
|||
# remove "-" before a prerelease tag |
|||
if part < "*final": |
|||
while parts and parts[-1] == "*final-": |
|||
parts.pop() |
|||
|
|||
# remove trailing zeros from each series of numeric parts |
|||
while parts and parts[-1] == "00000000": |
|||
parts.pop() |
|||
|
|||
parts.append(part) |
|||
parts = tuple(parts) |
|||
|
|||
return epoch, parts |
|||
|
|||
# Deliberately not anchored to the start and end of the string, to make it |
|||
# easier for 3rd party code to reuse |
|||
VERSION_PATTERN = r""" |
|||
v? |
|||
(?: |
|||
(?:(?P<epoch>[0-9]+)!)? # epoch |
|||
(?P<release>[0-9]+(?:\.[0-9]+)*) # release segment |
|||
(?P<pre> # pre-release |
|||
[-_\.]? |
|||
(?P<pre_l>(a|b|c|rc|alpha|beta|pre|preview)) |
|||
[-_\.]? |
|||
(?P<pre_n>[0-9]+)? |
|||
)? |
|||
(?P<post> # post release |
|||
(?:-(?P<post_n1>[0-9]+)) |
|||
| |
|||
(?: |
|||
[-_\.]? |
|||
(?P<post_l>post|rev|r) |
|||
[-_\.]? |
|||
(?P<post_n2>[0-9]+)? |
|||
) |
|||
)? |
|||
(?P<dev> # dev release |
|||
[-_\.]? |
|||
(?P<dev_l>dev) |
|||
[-_\.]? |
|||
(?P<dev_n>[0-9]+)? |
|||
)? |
|||
) |
|||
(?:\+(?P<local>[a-z0-9]+(?:[-_\.][a-z0-9]+)*))? # local version |
|||
""" |
|||
|
|||
|
|||
class Version(_BaseVersion): |
|||
|
|||
_regex = re.compile( |
|||
r"^\s*" + VERSION_PATTERN + r"\s*$", |
|||
re.VERBOSE | re.IGNORECASE, |
|||
) |
|||
|
|||
def __init__(self, version): |
|||
# Validate the version and parse it into pieces |
|||
match = self._regex.search(version) |
|||
if not match: |
|||
raise InvalidVersion("Invalid version: '{0}'".format(version)) |
|||
|
|||
# Store the parsed out pieces of the version |
|||
self._version = _Version( |
|||
epoch=int(match.group("epoch")) if match.group("epoch") else 0, |
|||
release=tuple(int(i) for i in match.group("release").split(".")), |
|||
pre=_parse_letter_version( |
|||
match.group("pre_l"), |
|||
match.group("pre_n"), |
|||
), |
|||
post=_parse_letter_version( |
|||
match.group("post_l"), |
|||
match.group("post_n1") or match.group("post_n2"), |
|||
), |
|||
dev=_parse_letter_version( |
|||
match.group("dev_l"), |
|||
match.group("dev_n"), |
|||
), |
|||
local=_parse_local_version(match.group("local")), |
|||
) |
|||
|
|||
# Generate a key which will be used for sorting |
|||
self._key = _cmpkey( |
|||
self._version.epoch, |
|||
self._version.release, |
|||
self._version.pre, |
|||
self._version.post, |
|||
self._version.dev, |
|||
self._version.local, |
|||
) |
|||
|
|||
def __repr__(self): |
|||
return "<Version({0})>".format(repr(str(self))) |
|||
|
|||
def __str__(self): |
|||
parts = [] |
|||
|
|||
# Epoch |
|||
if self._version.epoch != 0: |
|||
parts.append("{0}!".format(self._version.epoch)) |
|||
|
|||
# Release segment |
|||
parts.append(".".join(str(x) for x in self._version.release)) |
|||
|
|||
# Pre-release |
|||
if self._version.pre is not None: |
|||
parts.append("".join(str(x) for x in self._version.pre)) |
|||
|
|||
# Post-release |
|||
if self._version.post is not None: |
|||
parts.append(".post{0}".format(self._version.post[1])) |
|||
|
|||
# Development release |
|||
if self._version.dev is not None: |
|||
parts.append(".dev{0}".format(self._version.dev[1])) |
|||
|
|||
# Local version segment |
|||
if self._version.local is not None: |
|||
parts.append( |
|||
"+{0}".format(".".join(str(x) for x in self._version.local)) |
|||
) |
|||
|
|||
return "".join(parts) |
|||
|
|||
@property |
|||
def public(self): |
|||
return str(self).split("+", 1)[0] |
|||
|
|||
@property |
|||
def base_version(self): |
|||
parts = [] |
|||
|
|||
# Epoch |
|||
if self._version.epoch != 0: |
|||
parts.append("{0}!".format(self._version.epoch)) |
|||
|
|||
# Release segment |
|||
parts.append(".".join(str(x) for x in self._version.release)) |
|||
|
|||
return "".join(parts) |
|||
|
|||
@property |
|||
def local(self): |
|||
version_string = str(self) |
|||
if "+" in version_string: |
|||
return version_string.split("+", 1)[1] |
|||
|
|||
@property |
|||
def is_prerelease(self): |
|||
return bool(self._version.dev or self._version.pre) |
|||
|
|||
@property |
|||
def is_postrelease(self): |
|||
return bool(self._version.post) |
|||
|
|||
|
|||
def _parse_letter_version(letter, number): |
|||
if letter: |
|||
# We consider there to be an implicit 0 in a pre-release if there is |
|||
# not a numeral associated with it. |
|||
if number is None: |
|||
number = 0 |
|||
|
|||
# We normalize any letters to their lower case form |
|||
letter = letter.lower() |
|||
|
|||
# We consider some words to be alternate spellings of other words and |
|||
# in those cases we want to normalize the spellings to our preferred |
|||
# spelling. |
|||
if letter == "alpha": |
|||
letter = "a" |
|||
elif letter == "beta": |
|||
letter = "b" |
|||
elif letter in ["c", "pre", "preview"]: |
|||
letter = "rc" |
|||
elif letter in ["rev", "r"]: |
|||
letter = "post" |
|||
|
|||
return letter, int(number) |
|||
if not letter and number: |
|||
# We assume if we are given a number, but we are not given a letter |
|||
# then this is using the implicit post release syntax (e.g. 1.0-1) |
|||
letter = "post" |
|||
|
|||
return letter, int(number) |
|||
|
|||
|
|||
_local_version_seperators = re.compile(r"[\._-]") |
|||
|
|||
|
|||
def _parse_local_version(local): |
|||
""" |
|||
Takes a string like abc.1.twelve and turns it into ("abc", 1, "twelve"). |
|||
""" |
|||
if local is not None: |
|||
return tuple( |
|||
part.lower() if not part.isdigit() else int(part) |
|||
for part in _local_version_seperators.split(local) |
|||
) |
|||
|
|||
|
|||
def _cmpkey(epoch, release, pre, post, dev, local): |
|||
# When we compare a release version, we want to compare it with all of the |
|||
# trailing zeros removed. So we'll use a reverse the list, drop all the now |
|||
# leading zeros until we come to something non zero, then take the rest |
|||
# re-reverse it back into the correct order and make it a tuple and use |
|||
# that for our sorting key. |
|||
release = tuple( |
|||
reversed(list( |
|||
itertools.dropwhile( |
|||
lambda x: x == 0, |
|||
reversed(release), |
|||
) |
|||
)) |
|||
) |
|||
|
|||
# We need to "trick" the sorting algorithm to put 1.0.dev0 before 1.0a0. |
|||
# We'll do this by abusing the pre segment, but we _only_ want to do this |
|||
# if there is not a pre or a post segment. If we have one of those then |
|||
# the normal sorting rules will handle this case correctly. |
|||
if pre is None and post is None and dev is not None: |
|||
pre = -Infinity |
|||
# Versions without a pre-release (except as noted above) should sort after |
|||
# those with one. |
|||
elif pre is None: |
|||
pre = Infinity |
|||
|
|||
# Versions without a post segment should sort before those with one. |
|||
if post is None: |
|||
post = -Infinity |
|||
|
|||
# Versions without a development segment should sort after those with one. |
|||
if dev is None: |
|||
dev = Infinity |
|||
|
|||
if local is None: |
|||
# Versions without a local segment should sort before those with one. |
|||
local = -Infinity |
|||
else: |
|||
# Versions with a local segment need that segment parsed to implement |
|||
# the sorting rules in PEP440. |
|||
# - Alpha numeric segments sort before numeric segments |
|||
# - Alpha numeric segments sort lexicographically |
|||
# - Numeric segments sort numerically |
|||
# - Shorter versions sort before longer versions when the prefixes |
|||
# match exactly |
|||
local = tuple( |
|||
(i, "") if isinstance(i, int) else (-Infinity, i) |
|||
for i in local |
|||
) |
|||
|
|||
return epoch, release, pre, post, dev, local |
File diff suppressed because it is too large
@ -0,0 +1,868 @@ |
|||
"""Utilities for writing code that runs on Python 2 and 3""" |
|||
|
|||
# Copyright (c) 2010-2015 Benjamin Peterson |
|||
# |
|||
# Permission is hereby granted, free of charge, to any person obtaining a copy |
|||
# of this software and associated documentation files (the "Software"), to deal |
|||
# in the Software without restriction, including without limitation the rights |
|||
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell |
|||
# copies of the Software, and to permit persons to whom the Software is |
|||
# furnished to do so, subject to the following conditions: |
|||
# |
|||
# The above copyright notice and this permission notice shall be included in all |
|||
# copies or substantial portions of the Software. |
|||
# |
|||
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR |
|||
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, |
|||
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE |
|||
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER |
|||
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, |
|||
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE |
|||
# SOFTWARE. |
|||
|
|||
from __future__ import absolute_import |
|||
|
|||
import functools |
|||
import itertools |
|||
import operator |
|||
import sys |
|||
import types |
|||
|
|||
__author__ = "Benjamin Peterson <benjamin@python.org>" |
|||
__version__ = "1.10.0" |
|||
|
|||
|
|||
# Useful for very coarse version differentiation. |
|||
PY2 = sys.version_info[0] == 2 |
|||
PY3 = sys.version_info[0] == 3 |
|||
PY34 = sys.version_info[0:2] >= (3, 4) |
|||
|
|||
if PY3: |
|||
string_types = str, |
|||
integer_types = int, |
|||
class_types = type, |
|||
text_type = str |
|||
binary_type = bytes |
|||
|
|||
MAXSIZE = sys.maxsize |
|||
else: |
|||
string_types = basestring, |
|||
integer_types = (int, long) |
|||
class_types = (type, types.ClassType) |
|||
text_type = unicode |
|||
binary_type = str |
|||
|
|||
if sys.platform.startswith("java"): |
|||
# Jython always uses 32 bits. |
|||
MAXSIZE = int((1 << 31) - 1) |
|||
else: |
|||
# It's possible to have sizeof(long) != sizeof(Py_ssize_t). |
|||
class X(object): |
|||
|
|||
def __len__(self): |
|||
return 1 << 31 |
|||
try: |
|||
len(X()) |
|||
except OverflowError: |
|||
# 32-bit |
|||
MAXSIZE = int((1 << 31) - 1) |
|||
else: |
|||
# 64-bit |
|||
MAXSIZE = int((1 << 63) - 1) |
|||
del X |
|||
|
|||
|
|||
def _add_doc(func, doc): |
|||
"""Add documentation to a function.""" |
|||
func.__doc__ = doc |
|||
|
|||
|
|||
def _import_module(name): |
|||
"""Import module, returning the module after the last dot.""" |
|||
__import__(name) |
|||
return sys.modules[name] |
|||
|
|||
|
|||
class _LazyDescr(object): |
|||
|
|||
def __init__(self, name): |
|||
self.name = name |
|||
|
|||
def __get__(self, obj, tp): |
|||
result = self._resolve() |
|||
setattr(obj, self.name, result) # Invokes __set__. |
|||
try: |
|||
# This is a bit ugly, but it avoids running this again by |
|||
# removing this descriptor. |
|||
delattr(obj.__class__, self.name) |
|||
except AttributeError: |
|||
pass |
|||
return result |
|||
|
|||
|
|||
class MovedModule(_LazyDescr): |
|||
|
|||
def __init__(self, name, old, new=None): |
|||
super(MovedModule, self).__init__(name) |
|||
if PY3: |
|||
if new is None: |
|||
new = name |
|||
self.mod = new |
|||
else: |
|||
self.mod = old |
|||
|
|||
def _resolve(self): |
|||
return _import_module(self.mod) |
|||
|
|||
def __getattr__(self, attr): |
|||
_module = self._resolve() |
|||
value = getattr(_module, attr) |
|||
setattr(self, attr, value) |
|||
return value |
|||
|
|||
|
|||
class _LazyModule(types.ModuleType): |
|||
|
|||
def __init__(self, name): |
|||
super(_LazyModule, self).__init__(name) |
|||
self.__doc__ = self.__class__.__doc__ |
|||
|
|||
def __dir__(self): |
|||
attrs = ["__doc__", "__name__"] |
|||
attrs += [attr.name for attr in self._moved_attributes] |
|||
return attrs |
|||
|
|||
# Subclasses should override this |
|||
_moved_attributes = [] |
|||
|
|||
|
|||
class MovedAttribute(_LazyDescr): |
|||
|
|||
def __init__(self, name, old_mod, new_mod, old_attr=None, new_attr=None): |
|||
super(MovedAttribute, self).__init__(name) |
|||
if PY3: |
|||
if new_mod is None: |
|||
new_mod = name |
|||
self.mod = new_mod |
|||
if new_attr is None: |
|||
if old_attr is None: |
|||
new_attr = name |
|||
else: |
|||
new_attr = old_attr |
|||
self.attr = new_attr |
|||
else: |
|||
self.mod = old_mod |
|||
if old_attr is None: |
|||
old_attr = name |
|||
self.attr = old_attr |
|||
|
|||
def _resolve(self): |
|||
module = _import_module(self.mod) |
|||
return getattr(module, self.attr) |
|||
|
|||
|
|||
class _SixMetaPathImporter(object): |
|||
|
|||
""" |
|||
A meta path importer to import six.moves and its submodules. |
|||
|
|||
This class implements a PEP302 finder and loader. It should be compatible |
|||
with Python 2.5 and all existing versions of Python3 |
|||
""" |
|||
|
|||
def __init__(self, six_module_name): |
|||
self.name = six_module_name |
|||
self.known_modules = {} |
|||
|
|||
def _add_module(self, mod, *fullnames): |
|||
for fullname in fullnames: |
|||
self.known_modules[self.name + "." + fullname] = mod |
|||
|
|||
def _get_module(self, fullname): |
|||
return self.known_modules[self.name + "." + fullname] |
|||
|
|||
def find_module(self, fullname, path=None): |
|||
if fullname in self.known_modules: |
|||
return self |
|||
return None |
|||
|
|||
def __get_module(self, fullname): |
|||
try: |
|||
return self.known_modules[fullname] |
|||
except KeyError: |
|||
raise ImportError("This loader does not know module " + fullname) |
|||
|
|||
def load_module(self, fullname): |
|||
try: |
|||
# in case of a reload |
|||
return sys.modules[fullname] |
|||
except KeyError: |
|||
pass |
|||
mod = self.__get_module(fullname) |
|||
if isinstance(mod, MovedModule): |
|||
mod = mod._resolve() |
|||
else: |
|||
mod.__loader__ = self |
|||
sys.modules[fullname] = mod |
|||
return mod |
|||
|
|||
def is_package(self, fullname): |
|||
""" |
|||
Return true, if the named module is a package. |
|||
|
|||
We need this method to get correct spec objects with |
|||
Python 3.4 (see PEP451) |
|||
""" |
|||
return hasattr(self.__get_module(fullname), "__path__") |
|||
|
|||
def get_code(self, fullname): |
|||
"""Return None |
|||
|
|||
Required, if is_package is implemented""" |
|||
self.__get_module(fullname) # eventually raises ImportError |
|||
return None |
|||
get_source = get_code # same as get_code |
|||
|
|||
_importer = _SixMetaPathImporter(__name__) |
|||
|
|||
|
|||
class _MovedItems(_LazyModule): |
|||
|
|||
"""Lazy loading of moved objects""" |
|||
__path__ = [] # mark as package |
|||
|
|||
|
|||
_moved_attributes = [ |
|||
MovedAttribute("cStringIO", "cStringIO", "io", "StringIO"), |
|||
MovedAttribute("filter", "itertools", "builtins", "ifilter", "filter"), |
|||
MovedAttribute("filterfalse", "itertools", "itertools", "ifilterfalse", "filterfalse"), |
|||
MovedAttribute("input", "__builtin__", "builtins", "raw_input", "input"), |
|||
MovedAttribute("intern", "__builtin__", "sys"), |
|||
MovedAttribute("map", "itertools", "builtins", "imap", "map"), |
|||
MovedAttribute("getcwd", "os", "os", "getcwdu", "getcwd"), |
|||
MovedAttribute("getcwdb", "os", "os", "getcwd", "getcwdb"), |
|||
MovedAttribute("range", "__builtin__", "builtins", "xrange", "range"), |
|||
MovedAttribute("reload_module", "__builtin__", "importlib" if PY34 else "imp", "reload"), |
|||
MovedAttribute("reduce", "__builtin__", "functools"), |
|||
MovedAttribute("shlex_quote", "pipes", "shlex", "quote"), |
|||
MovedAttribute("StringIO", "StringIO", "io"), |
|||
MovedAttribute("UserDict", "UserDict", "collections"), |
|||
MovedAttribute("UserList", "UserList", "collections"), |
|||
MovedAttribute("UserString", "UserString", "collections"), |
|||
MovedAttribute("xrange", "__builtin__", "builtins", "xrange", "range"), |
|||
MovedAttribute("zip", "itertools", "builtins", "izip", "zip"), |
|||
MovedAttribute("zip_longest", "itertools", "itertools", "izip_longest", "zip_longest"), |
|||
MovedModule("builtins", "__builtin__"), |
|||
MovedModule("configparser", "ConfigParser"), |
|||
MovedModule("copyreg", "copy_reg"), |
|||
MovedModule("dbm_gnu", "gdbm", "dbm.gnu"), |
|||
MovedModule("_dummy_thread", "dummy_thread", "_dummy_thread"), |
|||
MovedModule("http_cookiejar", "cookielib", "http.cookiejar"), |
|||
MovedModule("http_cookies", "Cookie", "http.cookies"), |
|||
MovedModule("html_entities", "htmlentitydefs", "html.entities"), |
|||
MovedModule("html_parser", "HTMLParser", "html.parser"), |
|||
MovedModule("http_client", "httplib", "http.client"), |
|||
MovedModule("email_mime_multipart", "email.MIMEMultipart", "email.mime.multipart"), |
|||
MovedModule("email_mime_nonmultipart", "email.MIMENonMultipart", "email.mime.nonmultipart"), |
|||
MovedModule("email_mime_text", "email.MIMEText", "email.mime.text"), |
|||
MovedModule("email_mime_base", "email.MIMEBase", "email.mime.base"), |
|||
MovedModule("BaseHTTPServer", "BaseHTTPServer", "http.server"), |
|||
MovedModule("CGIHTTPServer", "CGIHTTPServer", "http.server"), |
|||
MovedModule("SimpleHTTPServer", "SimpleHTTPServer", "http.server"), |
|||
MovedModule("cPickle", "cPickle", "pickle"), |
|||
MovedModule("queue", "Queue"), |
|||
MovedModule("reprlib", "repr"), |
|||
MovedModule("socketserver", "SocketServer"), |
|||
MovedModule("_thread", "thread", "_thread"), |
|||
MovedModule("tkinter", "Tkinter"), |
|||
MovedModule("tkinter_dialog", "Dialog", "tkinter.dialog"), |
|||
MovedModule("tkinter_filedialog", "FileDialog", "tkinter.filedialog"), |
|||
MovedModule("tkinter_scrolledtext", "ScrolledText", "tkinter.scrolledtext"), |
|||
MovedModule("tkinter_simpledialog", "SimpleDialog", "tkinter.simpledialog"), |
|||
MovedModule("tkinter_tix", "Tix", "tkinter.tix"), |
|||
MovedModule("tkinter_ttk", "ttk", "tkinter.ttk"), |
|||
MovedModule("tkinter_constants", "Tkconstants", "tkinter.constants"), |
|||
MovedModule("tkinter_dnd", "Tkdnd", "tkinter.dnd"), |
|||
MovedModule("tkinter_colorchooser", "tkColorChooser", |
|||
"tkinter.colorchooser"), |
|||
MovedModule("tkinter_commondialog", "tkCommonDialog", |
|||
"tkinter.commondialog"), |
|||
MovedModule("tkinter_tkfiledialog", "tkFileDialog", "tkinter.filedialog"), |
|||
MovedModule("tkinter_font", "tkFont", "tkinter.font"), |
|||
MovedModule("tkinter_messagebox", "tkMessageBox", "tkinter.messagebox"), |
|||
MovedModule("tkinter_tksimpledialog", "tkSimpleDialog", |
|||
"tkinter.simpledialog"), |
|||
MovedModule("urllib_parse", __name__ + ".moves.urllib_parse", "urllib.parse"), |
|||
MovedModule("urllib_error", __name__ + ".moves.urllib_error", "urllib.error"), |
|||
MovedModule("urllib", __name__ + ".moves.urllib", __name__ + ".moves.urllib"), |
|||
MovedModule("urllib_robotparser", "robotparser", "urllib.robotparser"), |
|||
MovedModule("xmlrpc_client", "xmlrpclib", "xmlrpc.client"), |
|||
MovedModule("xmlrpc_server", "SimpleXMLRPCServer", "xmlrpc.server"), |
|||
] |
|||
# Add windows specific modules. |
|||
if sys.platform == "win32": |
|||
_moved_attributes += [ |
|||
MovedModule("winreg", "_winreg"), |
|||
] |
|||
|
|||
for attr in _moved_attributes: |
|||
setattr(_MovedItems, attr.name, attr) |
|||
if isinstance(attr, MovedModule): |
|||
_importer._add_module(attr, "moves." + attr.name) |
|||
del attr |
|||
|
|||
_MovedItems._moved_attributes = _moved_attributes |
|||
|
|||
moves = _MovedItems(__name__ + ".moves") |
|||
_importer._add_module(moves, "moves") |
|||
|
|||
|
|||
class Module_six_moves_urllib_parse(_LazyModule): |
|||
|
|||
"""Lazy loading of moved objects in six.moves.urllib_parse""" |
|||
|
|||
|
|||
_urllib_parse_moved_attributes = [ |
|||
MovedAttribute("ParseResult", "urlparse", "urllib.parse"), |
|||
MovedAttribute("SplitResult", "urlparse", "urllib.parse"), |
|||
MovedAttribute("parse_qs", "urlparse", "urllib.parse"), |
|||
MovedAttribute("parse_qsl", "urlparse", "urllib.parse"), |
|||
MovedAttribute("urldefrag", "urlparse", "urllib.parse"), |
|||
MovedAttribute("urljoin", "urlparse", "urllib.parse"), |
|||
MovedAttribute("urlparse", "urlparse", "urllib.parse"), |
|||
MovedAttribute("urlsplit", "urlparse", "urllib.parse"), |
|||
MovedAttribute("urlunparse", "urlparse", "urllib.parse"), |
|||
MovedAttribute("urlunsplit", "urlparse", "urllib.parse"), |
|||
MovedAttribute("quote", "urllib", "urllib.parse"), |
|||
MovedAttribute("quote_plus", "urllib", "urllib.parse"), |
|||
MovedAttribute("unquote", "urllib", "urllib.parse"), |
|||
MovedAttribute("unquote_plus", "urllib", "urllib.parse"), |
|||
MovedAttribute("urlencode", "urllib", "urllib.parse"), |
|||
MovedAttribute("splitquery", "urllib", "urllib.parse"), |
|||
MovedAttribute("splittag", "urllib", "urllib.parse"), |
|||
MovedAttribute("splituser", "urllib", "urllib.parse"), |
|||
MovedAttribute("uses_fragment", "urlparse", "urllib.parse"), |
|||
MovedAttribute("uses_netloc", "urlparse", "urllib.parse"), |
|||
MovedAttribute("uses_params", "urlparse", "urllib.parse"), |
|||
MovedAttribute("uses_query", "urlparse", "urllib.parse"), |
|||
MovedAttribute("uses_relative", "urlparse", "urllib.parse"), |
|||
] |
|||
for attr in _urllib_parse_moved_attributes: |
|||
setattr(Module_six_moves_urllib_parse, attr.name, attr) |
|||
del attr |
|||
|
|||
Module_six_moves_urllib_parse._moved_attributes = _urllib_parse_moved_attributes |
|||
|
|||
_importer._add_module(Module_six_moves_urllib_parse(__name__ + ".moves.urllib_parse"), |
|||
"moves.urllib_parse", "moves.urllib.parse") |
|||
|
|||
|
|||
class Module_six_moves_urllib_error(_LazyModule): |
|||
|
|||
"""Lazy loading of moved objects in six.moves.urllib_error""" |
|||
|
|||
|
|||
_urllib_error_moved_attributes = [ |
|||
MovedAttribute("URLError", "urllib2", "urllib.error"), |
|||
MovedAttribute("HTTPError", "urllib2", "urllib.error"), |
|||
MovedAttribute("ContentTooShortError", "urllib", "urllib.error"), |
|||
] |
|||
for attr in _urllib_error_moved_attributes: |
|||
setattr(Module_six_moves_urllib_error, attr.name, attr) |
|||
del attr |
|||
|
|||
Module_six_moves_urllib_error._moved_attributes = _urllib_error_moved_attributes |
|||
|
|||
_importer._add_module(Module_six_moves_urllib_error(__name__ + ".moves.urllib.error"), |
|||
"moves.urllib_error", "moves.urllib.error") |
|||
|
|||
|
|||
class Module_six_moves_urllib_request(_LazyModule): |
|||
|
|||
"""Lazy loading of moved objects in six.moves.urllib_request""" |
|||
|
|||
|
|||
_urllib_request_moved_attributes = [ |
|||
MovedAttribute("urlopen", "urllib2", "urllib.request"), |
|||
MovedAttribute("install_opener", "urllib2", "urllib.request"), |
|||
MovedAttribute("build_opener", "urllib2", "urllib.request"), |
|||
MovedAttribute("pathname2url", "urllib", "urllib.request"), |
|||
MovedAttribute("url2pathname", "urllib", "urllib.request"), |
|||
MovedAttribute("getproxies", "urllib", "urllib.request"), |
|||
MovedAttribute("Request", "urllib2", "urllib.request"), |
|||
MovedAttribute("OpenerDirector", "urllib2", "urllib.request"), |
|||
MovedAttribute("HTTPDefaultErrorHandler", "urllib2", "urllib.request"), |
|||
MovedAttribute("HTTPRedirectHandler", "urllib2", "urllib.request"), |
|||
MovedAttribute("HTTPCookieProcessor", "urllib2", "urllib.request"), |
|||
MovedAttribute("ProxyHandler", "urllib2", "urllib.request"), |
|||
MovedAttribute("BaseHandler", "urllib2", "urllib.request"), |
|||
MovedAttribute("HTTPPasswordMgr", "urllib2", "urllib.request"), |
|||
MovedAttribute("HTTPPasswordMgrWithDefaultRealm", "urllib2", "urllib.request"), |
|||
MovedAttribute("AbstractBasicAuthHandler", "urllib2", "urllib.request"), |
|||
MovedAttribute("HTTPBasicAuthHandler", "urllib2", "urllib.request"), |
|||
MovedAttribute("ProxyBasicAuthHandler", "urllib2", "urllib.request"), |
|||
MovedAttribute("AbstractDigestAuthHandler", "urllib2", "urllib.request"), |
|||
MovedAttribute("HTTPDigestAuthHandler", "urllib2", "urllib.request"), |
|||
MovedAttribute("ProxyDigestAuthHandler", "urllib2", "urllib.request"), |
|||
MovedAttribute("HTTPHandler", "urllib2", "urllib.request"), |
|||
MovedAttribute("HTTPSHandler", "urllib2", "urllib.request"), |
|||
MovedAttribute("FileHandler", "urllib2", "urllib.request"), |
|||
MovedAttribute("FTPHandler", "urllib2", "urllib.request"), |
|||
MovedAttribute("CacheFTPHandler", "urllib2", "urllib.request"), |
|||
MovedAttribute("UnknownHandler", "urllib2", "urllib.request"), |
|||
MovedAttribute("HTTPErrorProcessor", "urllib2", "urllib.request"), |
|||
MovedAttribute("urlretrieve", "urllib", "urllib.request"), |
|||
MovedAttribute("urlcleanup", "urllib", "urllib.request"), |
|||
MovedAttribute("URLopener", "urllib", "urllib.request"), |
|||
MovedAttribute("FancyURLopener", "urllib", "urllib.request"), |
|||
MovedAttribute("proxy_bypass", "urllib", "urllib.request"), |
|||
] |
|||
for attr in _urllib_request_moved_attributes: |
|||
setattr(Module_six_moves_urllib_request, attr.name, attr) |
|||
del attr |
|||
|
|||
Module_six_moves_urllib_request._moved_attributes = _urllib_request_moved_attributes |
|||
|
|||
_importer._add_module(Module_six_moves_urllib_request(__name__ + ".moves.urllib.request"), |
|||
"moves.urllib_request", "moves.urllib.request") |
|||
|
|||
|
|||
class Module_six_moves_urllib_response(_LazyModule): |
|||
|
|||
"""Lazy loading of moved objects in six.moves.urllib_response""" |
|||
|
|||
|
|||
_urllib_response_moved_attributes = [ |
|||
MovedAttribute("addbase", "urllib", "urllib.response"), |
|||
MovedAttribute("addclosehook", "urllib", "urllib.response"), |
|||
MovedAttribute("addinfo", "urllib", "urllib.response"), |
|||
MovedAttribute("addinfourl", "urllib", "urllib.response"), |
|||
] |
|||
for attr in _urllib_response_moved_attributes: |
|||
setattr(Module_six_moves_urllib_response, attr.name, attr) |
|||
del attr |
|||
|
|||
Module_six_moves_urllib_response._moved_attributes = _urllib_response_moved_attributes |
|||
|
|||
_importer._add_module(Module_six_moves_urllib_response(__name__ + ".moves.urllib.response"), |
|||
"moves.urllib_response", "moves.urllib.response") |
|||
|
|||
|
|||
class Module_six_moves_urllib_robotparser(_LazyModule): |
|||
|
|||
"""Lazy loading of moved objects in six.moves.urllib_robotparser""" |
|||
|
|||
|
|||
_urllib_robotparser_moved_attributes = [ |
|||
MovedAttribute("RobotFileParser", "robotparser", "urllib.robotparser"), |
|||
] |
|||
for attr in _urllib_robotparser_moved_attributes: |
|||
setattr(Module_six_moves_urllib_robotparser, attr.name, attr) |
|||
del attr |
|||
|
|||
Module_six_moves_urllib_robotparser._moved_attributes = _urllib_robotparser_moved_attributes |
|||
|
|||
_importer._add_module(Module_six_moves_urllib_robotparser(__name__ + ".moves.urllib.robotparser"), |
|||
"moves.urllib_robotparser", "moves.urllib.robotparser") |
|||
|
|||
|
|||
class Module_six_moves_urllib(types.ModuleType): |
|||
|
|||
"""Create a six.moves.urllib namespace that resembles the Python 3 namespace""" |
|||
__path__ = [] # mark as package |
|||
parse = _importer._get_module("moves.urllib_parse") |
|||
error = _importer._get_module("moves.urllib_error") |
|||
request = _importer._get_module("moves.urllib_request") |
|||
response = _importer._get_module("moves.urllib_response") |
|||
robotparser = _importer._get_module("moves.urllib_robotparser") |
|||
|
|||
def __dir__(self): |
|||
return ['parse', 'error', 'request', 'response', 'robotparser'] |
|||
|
|||
_importer._add_module(Module_six_moves_urllib(__name__ + ".moves.urllib"), |
|||
"moves.urllib") |
|||
|
|||
|
|||
def add_move(move): |
|||
"""Add an item to six.moves.""" |
|||
setattr(_MovedItems, move.name, move) |
|||
|
|||
|
|||
def remove_move(name): |
|||
"""Remove item from six.moves.""" |
|||
try: |
|||
delattr(_MovedItems, name) |
|||
except AttributeError: |
|||
try: |
|||
del moves.__dict__[name] |
|||
except KeyError: |
|||
raise AttributeError("no such move, %r" % (name,)) |
|||
|
|||
|
|||
if PY3: |
|||
_meth_func = "__func__" |
|||
_meth_self = "__self__" |
|||
|
|||
_func_closure = "__closure__" |
|||
_func_code = "__code__" |
|||
_func_defaults = "__defaults__" |
|||
_func_globals = "__globals__" |
|||
else: |
|||
_meth_func = "im_func" |
|||
_meth_self = "im_self" |
|||
|
|||
_func_closure = "func_closure" |
|||
_func_code = "func_code" |
|||
_func_defaults = "func_defaults" |
|||
_func_globals = "func_globals" |
|||
|
|||
|
|||
try: |
|||
advance_iterator = next |
|||
except NameError: |
|||
def advance_iterator(it): |
|||
return it.next() |
|||
next = advance_iterator |
|||
|
|||
|
|||
try: |
|||
callable = callable |
|||
except NameError: |
|||
def callable(obj): |
|||
return any("__call__" in klass.__dict__ for klass in type(obj).__mro__) |
|||
|
|||
|
|||
if PY3: |
|||
def get_unbound_function(unbound): |
|||
return unbound |
|||
|
|||
create_bound_method = types.MethodType |
|||
|
|||
def create_unbound_method(func, cls): |
|||
return func |
|||
|
|||
Iterator = object |
|||
else: |
|||
def get_unbound_function(unbound): |
|||
return unbound.im_func |
|||
|
|||
def create_bound_method(func, obj): |
|||
return types.MethodType(func, obj, obj.__class__) |
|||
|
|||
def create_unbound_method(func, cls): |
|||
return types.MethodType(func, None, cls) |
|||
|
|||
class Iterator(object): |
|||
|
|||
def next(self): |
|||
return type(self).__next__(self) |
|||
|
|||
callable = callable |
|||
_add_doc(get_unbound_function, |
|||
"""Get the function out of a possibly unbound function""") |
|||
|
|||
|
|||
get_method_function = operator.attrgetter(_meth_func) |
|||
get_method_self = operator.attrgetter(_meth_self) |
|||
get_function_closure = operator.attrgetter(_func_closure) |
|||
get_function_code = operator.attrgetter(_func_code) |
|||
get_function_defaults = operator.attrgetter(_func_defaults) |
|||
get_function_globals = operator.attrgetter(_func_globals) |
|||
|
|||
|
|||
if PY3: |
|||
def iterkeys(d, **kw): |
|||
return iter(d.keys(**kw)) |
|||
|
|||
def itervalues(d, **kw): |
|||
return iter(d.values(**kw)) |
|||
|
|||
def iteritems(d, **kw): |
|||
return iter(d.items(**kw)) |
|||
|
|||
def iterlists(d, **kw): |
|||
return iter(d.lists(**kw)) |
|||
|
|||
viewkeys = operator.methodcaller("keys") |
|||
|
|||
viewvalues = operator.methodcaller("values") |
|||
|
|||
viewitems = operator.methodcaller("items") |
|||
else: |
|||
def iterkeys(d, **kw): |
|||
return d.iterkeys(**kw) |
|||
|
|||
def itervalues(d, **kw): |
|||
return d.itervalues(**kw) |
|||
|
|||
def iteritems(d, **kw): |
|||
return d.iteritems(**kw) |
|||
|
|||
def iterlists(d, **kw): |
|||
return d.iterlists(**kw) |
|||
|
|||
viewkeys = operator.methodcaller("viewkeys") |
|||
|
|||
viewvalues = operator.methodcaller("viewvalues") |
|||
|
|||
viewitems = operator.methodcaller("viewitems") |
|||
|
|||
_add_doc(iterkeys, "Return an iterator over the keys of a dictionary.") |
|||
_add_doc(itervalues, "Return an iterator over the values of a dictionary.") |
|||
_add_doc(iteritems, |
|||
"Return an iterator over the (key, value) pairs of a dictionary.") |
|||
_add_doc(iterlists, |
|||
"Return an iterator over the (key, [values]) pairs of a dictionary.") |
|||
|
|||
|
|||
if PY3: |
|||
def b(s): |
|||
return s.encode("latin-1") |
|||
|
|||
def u(s): |
|||
return s |
|||
unichr = chr |
|||
import struct |
|||
int2byte = struct.Struct(">B").pack |
|||
del struct |
|||
byte2int = operator.itemgetter(0) |
|||
indexbytes = operator.getitem |
|||
iterbytes = iter |
|||
import io |
|||
StringIO = io.StringIO |
|||
BytesIO = io.BytesIO |
|||
_assertCountEqual = "assertCountEqual" |
|||
if sys.version_info[1] <= 1: |
|||
_assertRaisesRegex = "assertRaisesRegexp" |
|||
_assertRegex = "assertRegexpMatches" |
|||
else: |
|||
_assertRaisesRegex = "assertRaisesRegex" |
|||
_assertRegex = "assertRegex" |
|||
else: |
|||
def b(s): |
|||
return s |
|||
# Workaround for standalone backslash |
|||
|
|||
def u(s): |
|||
return unicode(s.replace(r'\\', r'\\\\'), "unicode_escape") |
|||
unichr = unichr |
|||
int2byte = chr |
|||
|
|||
def byte2int(bs): |
|||
return ord(bs[0]) |
|||
|
|||
def indexbytes(buf, i): |
|||
return ord(buf[i]) |
|||
iterbytes = functools.partial(itertools.imap, ord) |
|||
import StringIO |
|||
StringIO = BytesIO = StringIO.StringIO |
|||
_assertCountEqual = "assertItemsEqual" |
|||
_assertRaisesRegex = "assertRaisesRegexp" |
|||
_assertRegex = "assertRegexpMatches" |
|||
_add_doc(b, """Byte literal""") |
|||
_add_doc(u, """Text literal""") |
|||
|
|||
|
|||
def assertCountEqual(self, *args, **kwargs): |
|||
return getattr(self, _assertCountEqual)(*args, **kwargs) |
|||
|
|||
|
|||
def assertRaisesRegex(self, *args, **kwargs): |
|||
return getattr(self, _assertRaisesRegex)(*args, **kwargs) |
|||
|
|||
|
|||
def assertRegex(self, *args, **kwargs): |
|||
return getattr(self, _assertRegex)(*args, **kwargs) |
|||
|
|||
|
|||
if PY3: |
|||
exec_ = getattr(moves.builtins, "exec") |
|||
|
|||
def reraise(tp, value, tb=None): |
|||
if value is None: |
|||
value = tp() |
|||
if value.__traceback__ is not tb: |
|||
raise value.with_traceback(tb) |
|||
raise value |
|||
|
|||
else: |
|||
def exec_(_code_, _globs_=None, _locs_=None): |
|||
"""Execute code in a namespace.""" |
|||
if _globs_ is None: |
|||
frame = sys._getframe(1) |
|||
_globs_ = frame.f_globals |
|||
if _locs_ is None: |
|||
_locs_ = frame.f_locals |
|||
del frame |
|||
elif _locs_ is None: |
|||
_locs_ = _globs_ |
|||
exec("""exec _code_ in _globs_, _locs_""") |
|||
|
|||
exec_("""def reraise(tp, value, tb=None): |
|||
raise tp, value, tb |
|||
""") |
|||
|
|||
|
|||
if sys.version_info[:2] == (3, 2): |
|||
exec_("""def raise_from(value, from_value): |
|||
if from_value is None: |
|||
raise value |
|||
raise value from from_value |
|||
""") |
|||
elif sys.version_info[:2] > (3, 2): |
|||
exec_("""def raise_from(value, from_value): |
|||
raise value from from_value |
|||
""") |
|||
else: |
|||
def raise_from(value, from_value): |
|||
raise value |
|||
|
|||
|
|||
print_ = getattr(moves.builtins, "print", None) |
|||
if print_ is None: |
|||
def print_(*args, **kwargs): |
|||
"""The new-style print function for Python 2.4 and 2.5.""" |
|||
fp = kwargs.pop("file", sys.stdout) |
|||
if fp is None: |
|||
return |
|||
|
|||
def write(data): |
|||
if not isinstance(data, basestring): |
|||
data = str(data) |
|||
# If the file has an encoding, encode unicode with it. |
|||
if (isinstance(fp, file) and |
|||
isinstance(data, unicode) and |
|||
fp.encoding is not None): |
|||
errors = getattr(fp, "errors", None) |
|||
if errors is None: |
|||
errors = "strict" |
|||
data = data.encode(fp.encoding, errors) |
|||
fp.write(data) |
|||
want_unicode = False |
|||
sep = kwargs.pop("sep", None) |
|||
if sep is not None: |
|||
if isinstance(sep, unicode): |
|||
want_unicode = True |
|||
elif not isinstance(sep, str): |
|||
raise TypeError("sep must be None or a string") |
|||
end = kwargs.pop("end", None) |
|||
if end is not None: |
|||
if isinstance(end, unicode): |
|||
want_unicode = True |
|||
elif not isinstance(end, str): |
|||
raise TypeError("end must be None or a string") |
|||
if kwargs: |
|||
raise TypeError("invalid keyword arguments to print()") |
|||
if not want_unicode: |
|||
for arg in args: |
|||
if isinstance(arg, unicode): |
|||
want_unicode = True |
|||
break |
|||
if want_unicode: |
|||
newline = unicode("\n") |
|||
space = unicode(" ") |
|||
else: |
|||
newline = "\n" |
|||
space = " " |
|||
if sep is None: |
|||
sep = space |
|||
if end is None: |
|||
end = newline |
|||
for i, arg in enumerate(args): |
|||
if i: |
|||
write(sep) |
|||
write(arg) |
|||
write(end) |
|||
if sys.version_info[:2] < (3, 3): |
|||
_print = print_ |
|||
|
|||
def print_(*args, **kwargs): |
|||
fp = kwargs.get("file", sys.stdout) |
|||
flush = kwargs.pop("flush", False) |
|||
_print(*args, **kwargs) |
|||
if flush and fp is not None: |
|||
fp.flush() |
|||
|
|||
_add_doc(reraise, """Reraise an exception.""") |
|||
|
|||
if sys.version_info[0:2] < (3, 4): |
|||
def wraps(wrapped, assigned=functools.WRAPPER_ASSIGNMENTS, |
|||
updated=functools.WRAPPER_UPDATES): |
|||
def wrapper(f): |
|||
f = functools.wraps(wrapped, assigned, updated)(f) |
|||
f.__wrapped__ = wrapped |
|||
return f |
|||
return wrapper |
|||
else: |
|||
wraps = functools.wraps |
|||
|
|||
|
|||
def with_metaclass(meta, *bases): |
|||
"""Create a base class with a metaclass.""" |
|||
# This requires a bit of explanation: the basic idea is to make a dummy |
|||
# metaclass for one level of class instantiation that replaces itself with |
|||
# the actual metaclass. |
|||
class metaclass(meta): |
|||
|
|||
def __new__(cls, name, this_bases, d): |
|||
return meta(name, bases, d) |
|||
return type.__new__(metaclass, 'temporary_class', (), {}) |
|||
|
|||
|
|||
def add_metaclass(metaclass): |
|||
"""Class decorator for creating a class with a metaclass.""" |
|||
def wrapper(cls): |
|||
orig_vars = cls.__dict__.copy() |
|||
slots = orig_vars.get('__slots__') |
|||
if slots is not None: |
|||
if isinstance(slots, str): |
|||
slots = [slots] |
|||
for slots_var in slots: |
|||
orig_vars.pop(slots_var) |
|||
orig_vars.pop('__dict__', None) |
|||
orig_vars.pop('__weakref__', None) |
|||
return metaclass(cls.__name__, cls.__bases__, orig_vars) |
|||
return wrapper |
|||
|
|||
|
|||
def python_2_unicode_compatible(klass): |
|||
""" |
|||
A decorator that defines __unicode__ and __str__ methods under Python 2. |
|||
Under Python 3 it does nothing. |
|||
|
|||
To support Python 2 and 3 with a single code base, define a __str__ method |
|||
returning text and apply this decorator to the class. |
|||
""" |
|||
if PY2: |
|||
if '__str__' not in klass.__dict__: |
|||
raise ValueError("@python_2_unicode_compatible cannot be applied " |
|||
"to %s because it doesn't define __str__()." % |
|||
klass.__name__) |
|||
klass.__unicode__ = klass.__str__ |
|||
klass.__str__ = lambda self: self.__unicode__().encode('utf-8') |
|||
return klass |
|||
|
|||
|
|||
# Complete the moves implementation. |
|||
# This code is at the end of this module to speed up module loading. |
|||
# Turn this module into a package. |
|||
__path__ = [] # required for PEP 302 and PEP 451 |
|||
__package__ = __name__ # see PEP 366 @ReservedAssignment |
|||
if globals().get("__spec__") is not None: |
|||
__spec__.submodule_search_locations = [] # PEP 451 @UndefinedVariable |
|||
# Remove other six meta path importers, since they cause problems. This can |
|||
# happen if six is removed from sys.modules and then reloaded. (Setuptools does |
|||
# this for some reason.) |
|||
if sys.meta_path: |
|||
for i, importer in enumerate(sys.meta_path): |
|||
# Here's some real nastiness: Another "instance" of the six module might |
|||
# be floating around. Therefore, we can't use isinstance() to check for |
|||
# the six meta path importer, since the other six instance will have |
|||
# inserted an importer with different class. |
|||
if (type(importer).__name__ == "_SixMetaPathImporter" and |
|||
importer.name == __name__): |
|||
del sys.meta_path[i] |
|||
break |
|||
del i, importer |
|||
# Finally, add the importer to the meta path import hook. |
|||
sys.meta_path.append(_importer) |
@ -0,0 +1,73 @@ |
|||
import sys |
|||
|
|||
|
|||
class VendorImporter: |
|||
""" |
|||
A PEP 302 meta path importer for finding optionally-vendored |
|||
or otherwise naturally-installed packages from root_name. |
|||
""" |
|||
|
|||
def __init__(self, root_name, vendored_names=(), vendor_pkg=None): |
|||
self.root_name = root_name |
|||
self.vendored_names = set(vendored_names) |
|||
self.vendor_pkg = vendor_pkg or root_name.replace('extern', '_vendor') |
|||
|
|||
@property |
|||
def search_path(self): |
|||
""" |
|||
Search first the vendor package then as a natural package. |
|||
""" |
|||
yield self.vendor_pkg + '.' |
|||
yield '' |
|||
|
|||
def find_module(self, fullname, path=None): |
|||
""" |
|||
Return self when fullname starts with root_name and the |
|||
target module is one vendored through this importer. |
|||
""" |
|||
root, base, target = fullname.partition(self.root_name + '.') |
|||
if root: |
|||
return |
|||
if not any(map(target.startswith, self.vendored_names)): |
|||
return |
|||
return self |
|||
|
|||
def load_module(self, fullname): |
|||
""" |
|||
Iterate over the search path to locate and load fullname. |
|||
""" |
|||
root, base, target = fullname.partition(self.root_name + '.') |
|||
for prefix in self.search_path: |
|||
try: |
|||
extant = prefix + target |
|||
__import__(extant) |
|||
mod = sys.modules[extant] |
|||
sys.modules[fullname] = mod |
|||
# mysterious hack: |
|||
# Remove the reference to the extant package/module |
|||
# on later Python versions to cause relative imports |
|||
# in the vendor package to resolve the same modules |
|||
# as those going through this importer. |
|||
if prefix and sys.version_info > (3, 3): |
|||
del sys.modules[extant] |
|||
return mod |
|||
except ImportError: |
|||
pass |
|||
else: |
|||
raise ImportError( |
|||
"The '{target}' package is required; " |
|||
"normally this is bundled with this package so if you get " |
|||
"this warning, consult the packager of your " |
|||
"distribution.".format(**locals()) |
|||
) |
|||
|
|||
def install(self): |
|||
""" |
|||
Install this importer into sys.meta_path if not already present. |
|||
""" |
|||
if self not in sys.meta_path: |
|||
sys.meta_path.append(self) |
|||
|
|||
|
|||
names = 'packaging', 'pyparsing', 'six', 'appdirs' |
|||
VendorImporter(__name__, names).install() |
@ -0,0 +1,23 @@ |
|||
import os |
|||
import errno |
|||
import sys |
|||
|
|||
from .extern import six |
|||
|
|||
|
|||
def _makedirs_31(path, exist_ok=False): |
|||
try: |
|||
os.makedirs(path) |
|||
except OSError as exc: |
|||
if not exist_ok or exc.errno != errno.EEXIST: |
|||
raise |
|||
|
|||
|
|||
# rely on compatibility behavior until mode considerations |
|||
# and exists_ok considerations are disentangled. |
|||
# See https://github.com/pypa/setuptools/pull/1083#issuecomment-315168663 |
|||
needs_makedirs = ( |
|||
six.PY2 or |
|||
(3, 4) <= sys.version_info < (3, 4, 1) |
|||
) |
|||
makedirs = _makedirs_31 if needs_makedirs else os.makedirs |
@ -0,0 +1,144 @@ |
|||
# -*- coding: utf-8 -*- |
|||
|
|||
from collections import OrderedDict |
|||
from io import open |
|||
from os import path |
|||
from requests.compat import urlencode |
|||
|
|||
import re |
|||
# noinspection PyPackageRequirements |
|||
import responses |
|||
|
|||
# Fake URL, network requests are not allowed by default when using the decorator |
|||
url = 'http://example-site.dev' |
|||
|
|||
# These kwargs will be passed to tests by the decorator |
|||
cfscrape_kwargs = dict( |
|||
delay=0.01 |
|||
) |
|||
|
|||
# Cloudflare challenge fixtures are read from the FS once |
|||
cache = {} |
|||
|
|||
|
|||
class ChallengeResponse(responses.Response): |
|||
""" |
|||
Simulates a standard IUAM JS challenge response from Cloudflare |
|||
|
|||
This is the first response in a test |
|||
|
|||
Kwargs: |
|||
Keyword arguments used to override the defaults |
|||
The request will error if it doesn't match a defined response |
|||
""" |
|||
|
|||
def __init__(self, **kwargs): |
|||
defaults = (('method', 'GET'), |
|||
('status', 503), |
|||
('headers', {'Server': 'cloudflare'}), |
|||
('content_type', 'text/html')) |
|||
|
|||
for k, v in defaults: |
|||
kwargs.setdefault(k, v) |
|||
|
|||
super(ChallengeResponse, self).__init__(**kwargs) |
|||
|
|||
|
|||
class RedirectResponse(responses.CallbackResponse): |
|||
""" |
|||
Simulate the redirect response that occurs after sending a correct answer |
|||
|
|||
This is the second response in a test and will call the provided callback when a matching request is received |
|||
Afterwards, the default is to redirect to the index page "/" aka fake URL |
|||
|
|||
Kwargs: |
|||
Keyword arguments used to override the defaults |
|||
The request will error if it doesn't match a defined response |
|||
""" |
|||
|
|||
def __init__(self, callback=lambda request: None, **kwargs): |
|||
defaults = (('method', 'GET'), |
|||
('status', 302), |
|||
('headers', {'Location': '/'}), |
|||
('content_type', 'text/html'), |
|||
('body', '')) |
|||
|
|||
for k, v in defaults: |
|||
kwargs.setdefault(k, v) |
|||
|
|||
args = tuple(kwargs.pop(k) for k in ('status', 'headers', 'body')) |
|||
kwargs['callback'] = lambda request: callback(request) or args |
|||
|
|||
super(RedirectResponse, self).__init__(**kwargs) |
|||
|
|||
|
|||
class DefaultResponse(responses.Response): |
|||
""" |
|||
Simulate the final response after the challenge is solved |
|||
|
|||
This would be the last response in a test and normally occurs after a redirect |
|||
|
|||
Kwargs: |
|||
Keyword arguments override the defaults |
|||
The request will error if it doesn't match a defined response |
|||
""" |
|||
|
|||
def __init__(self, **kwargs): |
|||
defaults = (('method', 'GET'), |
|||
('status', 200), |
|||
('content_type', 'text/html')) |
|||
|
|||
for k, v in defaults: |
|||
kwargs.setdefault(k, v) |
|||
|
|||
super(DefaultResponse, self).__init__(**kwargs) |
|||
|
|||
|
|||
def fixtures(filename): |
|||
""" |
|||
Read and cache a challenge fixture |
|||
|
|||
Returns: HTML (bytes): The HTML challenge fixture |
|||
""" |
|||
if not cache.get(filename): |
|||
with open(path.join(path.dirname(__file__), 'fixtures', filename), 'rb') as fp: |
|||
cache[filename] = fp.read() |
|||
return cache[filename] |
|||
|
|||
|
|||
# This is the page that should be received after bypassing the JS challenge |
|||
requested_page = fixtures('requested_page.html') |
|||
|
|||
|
|||
# Tests are wrapped to mock responses, can call directly e.g. challenge_responses(*args)(test_func) -> wrapper |
|||
def challenge_responses(filename, jschl_answer): |
|||
# This function is called with the test_func and returns a new wrapper |
|||
def challenge_responses_decorator(test): |
|||
@responses.activate |
|||
def wrapper(self): |
|||
html = fixtures(filename).decode('utf-8') |
|||
|
|||
params = OrderedDict() |
|||
|
|||
s = re.search(r'name="s"\svalue="(?P<s_value>[^"]+)', html) |
|||
if s: |
|||
params['s'] = s.group('s_value') |
|||
params['jschl_vc'] = re.search(r'name="jschl_vc" value="(\w+)"', html).group(1) |
|||
params['pass'] = re.search(r'name="pass" value="(.+?)"', html).group(1) |
|||
params['jschl_answer'] = jschl_answer |
|||
|
|||
submit_uri = '{}/cdn-cgi/l/chk_jschl?{}'.format(url, urlencode(params)) |
|||
|
|||
responses.add(ChallengeResponse(url=url, body=fixtures(filename))) |
|||
|
|||
# noinspection PyUnusedLocal |
|||
def on_redirect(request): |
|||
# don't register the last response unless the redirect occurs |
|||
responses.add(DefaultResponse(url=url, body=requested_page)) |
|||
|
|||
responses.add(RedirectResponse(url=submit_uri, callback=on_redirect)) |
|||
|
|||
return test(self, **cfscrape_kwargs) |
|||
return wrapper |
|||
|
|||
return challenge_responses_decorator |
@ -0,0 +1,48 @@ |
|||
# -*- coding: utf-8 -*- |
|||
|
|||
import os.path |
|||
import sys |
|||
import unittest |
|||
|
|||
from cfscrape__init__ import challenge_responses, requested_page, url |
|||
|
|||
sys.path.insert(1, os.path.abspath('../lib')) |
|||
|
|||
import cfscrape |
|||
|
|||
|
|||
class TestCase(unittest.TestCase): |
|||
|
|||
def check_resp(self, u, **kwargs): |
|||
scraper = cfscrape.CloudflareScraper(**kwargs) |
|||
resp = scraper.get(u) |
|||
self.assertEqual(resp and resp.content, requested_page) |
|||
|
|||
@challenge_responses(filename='js_challenge_10_04_2019.html', jschl_answer='18.8766915031') |
|||
def test_js_challenge_10_04_2019(self, **kwargs): |
|||
return self.check_resp(url, **kwargs) |
|||
|
|||
@challenge_responses(filename='js_challenge_21_03_2019.html', jschl_answer='13.0802397598') |
|||
def test_js_challenge_21_03_2019(self, **kwargs): |
|||
return self.check_resp(url, **kwargs) |
|||
|
|||
@challenge_responses(filename='js_challenge_13_03_2019.html', jschl_answer='38.5879578333') |
|||
def test_js_challenge_13_03_2019(self, **kwargs): |
|||
return self.check_resp(url, **kwargs) |
|||
|
|||
@challenge_responses(filename='js_challenge_03_12_2018.html', jschl_answer='10.66734594') |
|||
def test_js_challenge_03_12_2018(self, **kwargs): |
|||
return self.check_resp(url, **kwargs) |
|||
|
|||
@challenge_responses(filename='js_challenge_09_06_2016.html', jschl_answer='6648') |
|||
def test_js_challenge_09_06_2016(self, **kwargs): |
|||
return self.check_resp(url, **kwargs) |
|||
|
|||
@challenge_responses(filename='js_challenge_21_05_2015.html', jschl_answer='649') |
|||
def test_js_challenge_21_05_2015(self, **kwargs): |
|||
return self.check_resp(url, **kwargs) |
|||
|
|||
|
|||
if __name__ == '__main__': |
|||
suite = unittest.TestLoader().loadTestsFromTestCase(TestCase) |
|||
unittest.TextTestRunner(verbosity=2).run(suite) |
@ -0,0 +1,162 @@ |
|||
<!DOCTYPE html> |
|||
<!--[if lt IE 7]> <html class="no-js ie6 oldie" lang="en-US"> <![endif]--> |
|||
<!--[if IE 7]> <html class="no-js ie7 oldie" lang="en-US"> <![endif]--> |
|||
<!--[if IE 8]> <html class="no-js ie8 oldie" lang="en-US"> <![endif]--> |
|||
<!--[if gt IE 8]><!--> <html class="no-js" lang="en-US"> <!--<![endif]--> |
|||
<head> |
|||
<title>Attention Required! | Cloudflare</title> |
|||
<meta name="captcha-bypass" id="captcha-bypass" /> |
|||
<meta charset="UTF-8" /> |
|||
<meta http-equiv="Content-Type" content="text/html; charset=UTF-8" /> |
|||
<meta http-equiv="X-UA-Compatible" content="IE=Edge,chrome=1" /> |
|||
<meta name="robots" content="noindex, nofollow" /> |
|||
<meta name="viewport" content="width=device-width,initial-scale=1,maximum-scale=1" /> |
|||
<link rel="stylesheet" id="cf_styles-css" href="/cdn-cgi/styles/cf.errors.css" type="text/css" media="screen,projection" /> |
|||
<!--[if lt IE 9]><link rel="stylesheet" id='cf_styles-ie-css' href="/cdn-cgi/styles/cf.errors.ie.css" type="text/css" media="screen,projection" /><![endif]--> |
|||
<style type="text/css">body{margin:0;padding:0}</style> |
|||
|
|||
|
|||
<!--[if gte IE 10]><!--><script type="text/javascript" src="/cdn-cgi/scripts/zepto.min.js"></script><!--<![endif]--> |
|||
<!--[if gte IE 10]><!--><script type="text/javascript" src="/cdn-cgi/scripts/cf.common.js"></script><!--<![endif]--> |
|||
|
|||
|
|||
|
|||
|
|||
</head> |
|||
<body> |
|||
<div id="cf-wrapper"> |
|||
<div class="cf-alert cf-alert-error cf-cookie-error" id="cookie-alert" data-translate="enable_cookies">Please enable cookies.</div> |
|||
<div id="cf-error-details" class="cf-error-details-wrapper"> |
|||
<div class="cf-wrapper cf-header cf-error-overview"> |
|||
<h1 data-translate="challenge_headline">One more step</h1> |
|||
<h2 class="cf-subheadline"><span data-translate="complete_sec_check">Please complete the security check to access</span> example-site.dev</h2> |
|||
</div><!-- /.header --> |
|||
|
|||
<div class="cf-section cf-highlight cf-captcha-container"> |
|||
<div class="cf-wrapper"> |
|||
<div class="cf-columns two"> |
|||
<div class="cf-column"> |
|||
<div style="position: absolute; top: -250px; left: -250px;"><a href="https://macinline.com/answeradenoidal.php?day=688">table</a></div> |
|||
<div class="cf-highlight-inverse cf-form-stacked"> |
|||
<form class="challenge-form" id="challenge-form" action="/cdn-cgi/l/chk_captcha" method="get"> |
|||
<input type="hidden" name="s" value="6b132d85d185a8255f2451d48fe6a8bee7154ea2-1555377580-1800-AQ1azEkeDOnQP5ByOpwUU/RdbKrmMwHYpkaenRvjPXtB0w8Vbjn/Ceg62tfpp/lT799kjDLEMMuDkEMqQ7iO51kniWCQm00BQvDGl+D0h/WvXDWO96YXOUD3qrqUTuzO7QbUOinc8y8kedvOQkr4c0o="></input> |
|||
<script type="text/javascript" src="/cdn-cgi/scripts/cf.challenge.js" data-type="normal" data-ray="0000000000000000" async data-sitekey="6LfBixYUAAAAABhdHynFUIMA_sa4s-XsJvnjtgB0"></script> |
|||
<div class="g-recaptcha"></div> |
|||
<noscript id="cf-captcha-bookmark" class="cf-captcha-info"> |
|||
<div><div style="width: 302px"> |
|||
<div> |
|||
<iframe src="https://www.google.com/recaptcha/api/fallback?k=6LfBixYUAAAAABhdHynFUIMA_sa4s-XsJvnjtgB0" frameborder="0" scrolling="no" style="width: 302px; height:422px; border-style: none;"></iframe> |
|||
</div> |
|||
<div style="width: 300px; border-style: none; bottom: 12px; left: 25px; margin: 0px; padding: 0px; right: 25px; background: #f9f9f9; border: 1px solid #c1c1c1; border-radius: 3px;"> |
|||
<textarea id="g-recaptcha-response" name="g-recaptcha-response" class="g-recaptcha-response" style="width: 250px; height: 40px; border: 1px solid #c1c1c1; margin: 10px 25px; padding: 0px; resize: none;"></textarea> |
|||
<input type="submit" value="Submit"></input> |
|||
</div> |
|||
</div></div> |
|||
</noscript> |
|||
</form> |
|||
|
|||
<script type="text/javascript"> |
|||
(function(){ |
|||
var a = function() {try{return !!window.addEventListener} catch(e) {return !1} }, |
|||
b = function(b, c) {a() ? document.addEventListener("DOMContentLoaded", b, c) : document.attachEvent("onreadystatechange", b)}; |
|||
b(function(){ |
|||
if (!a()) return; |
|||
|
|||
window.addEventListener("message", handleMessage, false) |
|||
|
|||
function handleMessage(event) { |
|||
if (event.data && event.data.type === 'results') { |
|||
var f = document.getElementById('challenge-form'); |
|||
|
|||
if (f) { |
|||
addInput(f, 'bf_challenge_id', '342'); |
|||
addInput(f, 'bf_execution_time', event.data.executionTimeMs); |
|||
addInput(f, 'bf_result_hash', event.data.resultHash); |
|||
} |
|||
|
|||
window.removeEventListener("message", handleMessage, false) |
|||
} |
|||
} |
|||
|
|||
function addInput(parent, name, value) { |
|||
var input = document.createElement('input'); |
|||
input.type = 'hidden'; |
|||
input.name = name; |
|||
input.value = value; |
|||
parent.appendChild(input); |
|||
} |
|||
|
|||
function withIframe(iframeContent) { |
|||
var iframe = document.createElement('iframe'); |
|||
iframe.id = 'bf_test_iframe'; |
|||
iframe.style.visibility = 'hidden'; |
|||
document.body.appendChild(iframe); |
|||
var doc = (iframe.contentWindow || iframe.contentDocument).document; |
|||
doc.write(iframeContent); |
|||
doc.close(); |
|||
} |
|||
|
|||
withIframe("<!DOCTYPE HTML>\n<meta charset=utf-8>\n<html>\n <head>\n <title><\/title>\n <script src=\"https:\/\/ajax.cloudflare.com\/cdn-cgi\/scripts\/697236fc\/cloudflare-static\/bot-filter.js\"><\/__script__>\n \n <\/head>\n <body>\n <h1><\/h1>\n \n <\/body>\n<\/html>\n<script>function r(){var r='<?xml version=\"1.0\" encoding=\"UTF-8\"?><root><child1>value1<\/child1><\/root>',e;return(new DOMParser).parseFromString(r,\"text\/xml\")}test(function(){var r='<root xmlns=\"urn:bar\"><outer xmlns=\"\"><inner>value1<\/inner><\/outer><\/root>',root=(new DOMParser).parseFromString(r,\"text\/xml\").documentElement,e=(new XMLSerializer).serializeToString(root);__c$1(e)});<\/__script__>".replace(/\/__script__/g, '/script')); |
|||
|
|||
}, false); |
|||
})(); |
|||
</script> |
|||
|
|||
</div> |
|||
</div> |
|||
|
|||
<div class="cf-column"> |
|||
<div class="cf-screenshot-container"> |
|||
|
|||
<span class="cf-no-screenshot"></span> |
|||
|
|||
</div> |
|||
</div> |
|||
</div><!-- /.columns --> |
|||
</div> |
|||
</div><!-- /.captcha-container --> |
|||
|
|||
<div class="cf-section cf-wrapper"> |
|||
<div class="cf-columns two"> |
|||
<div class="cf-column"> |
|||
<h2 data-translate="why_captcha_headline">Why do I have to complete a CAPTCHA?</h2> |
|||
|
|||
<p data-translate="why_captcha_detail">Completing the CAPTCHA proves you are a human and gives you temporary access to the web property.</p> |
|||
</div> |
|||
|
|||
<div class="cf-column"> |
|||
<h2 data-translate="resolve_captcha_headline">What can I do to prevent this in the future?</h2> |
|||
|
|||
|
|||
<p data-translate="resolve_captcha_antivirus">If you are on a personal connection, like at home, you can run an anti-virus scan on your device to make sure it is not infected with malware.</p> |
|||
|
|||
<p data-translate="resolve_captcha_network">If you are at an office or shared network, you can ask the network administrator to run a scan across the network looking for misconfigured or infected devices.</p> |
|||
|
|||
</div> |
|||
</div> |
|||
</div><!-- /.section --> |
|||
|
|||
|
|||
<div class="cf-error-footer cf-wrapper"> |
|||
<p> |
|||
<span class="cf-footer-item">Cloudflare Ray ID: <strong>0000000000000000</strong></span> |
|||
<span class="cf-footer-separator">•</span> |
|||
<span class="cf-footer-item"><span>Your IP</span>: 000.00.000.00</span> |
|||
<span class="cf-footer-separator">•</span> |
|||
<span class="cf-footer-item"><span>Performance & security by</span> <a href="https://www.cloudflare.com/5xx-error-landing?utm_source=error_footer" id="brand_link" target="_blank">Cloudflare</a></span> |
|||
|
|||
</p> |
|||
</div><!-- /.error-footer --> |
|||
|
|||
|
|||
</div><!-- /#cf-error-details --> |
|||
</div><!-- /#cf-wrapper --> |
|||
|
|||
<script type="text/javascript"> |
|||
window._cf_translation = {}; |
|||
|
|||
|
|||
</script> |
|||
|
|||
</body> |
|||
</html> |
@ -0,0 +1,84 @@ |
|||
<!DOCTYPE HTML> |
|||
<html lang="en-US"> |
|||
<head> |
|||
<meta charset="UTF-8" /> |
|||
<meta http-equiv="Content-Type" content="text/html; charset=UTF-8" /> |
|||
<meta http-equiv="X-UA-Compatible" content="IE=Edge,chrome=1" /> |
|||
<meta name="robots" content="noindex, nofollow" /> |
|||
<meta name="viewport" content="width=device-width, initial-scale=1, maximum-scale=1" /> |
|||
<title>Just a moment...</title> |
|||
<style type="text/css"> |
|||
html, body {width: 100%; height: 100%; margin: 0; padding: 0;} |
|||
body {background-color: #ffffff; font-family: Helvetica, Arial, sans-serif; font-size: 100%;} |
|||
h1 {font-size: 1.5em; color: #404040; text-align: center;} |
|||
p {font-size: 1em; color: #404040; text-align: center; margin: 10px 0 0 0;} |
|||
#spinner {margin: 0 auto 30px auto; display: block;} |
|||
.attribution {margin-top: 20px;} |
|||
@-webkit-keyframes bubbles { 33%: { -webkit-transform: translateY(10px); transform: translateY(10px); } 66% { -webkit-transform: translateY(-10px); transform: translateY(-10px); } 100% { -webkit-transform: translateY(0); transform: translateY(0); } } |
|||
@keyframes bubbles { 33%: { -webkit-transform: translateY(10px); transform: translateY(10px); } 66% { -webkit-transform: translateY(-10px); transform: translateY(-10px); } 100% { -webkit-transform: translateY(0); transform: translateY(0); } } |
|||
.bubbles { background-color: #404040; width:15px; height: 15px; margin:2px; border-radius:100%; -webkit-animation:bubbles 0.6s 0.07s infinite ease-in-out; animation:bubbles 0.6s 0.07s infinite ease-in-out; -webkit-animation-fill-mode:both; animation-fill-mode:both; display:inline-block; } |
|||
</style> |
|||
|
|||
<script type="text/javascript"> |
|||
//<![CDATA[ |
|||
(function(){ |
|||
var a = function() {try{return !!window.addEventListener} catch(e) {return !1} }, |
|||
b = function(b, c) {a() ? document.addEventListener("DOMContentLoaded", b, c) : document.attachEvent("onreadystatechange", b)}; |
|||
b(function(){ |
|||
var a = document.getElementById('cf-content');a.style.display = 'block'; |
|||
setTimeout(function(){ |
|||
var s,t,o,p,b,r,e,a,k,i,n,g,f, zoqqEUY={"xzWMiyQ":+((!+[]+!![]+!![]+!![]+!![]+!![]+!![]+!![]+[])+(!+[]+!![]+!![])+(!+[]+!![]+!![]+!![]+!![]+!![]+!![]+!![])+(+[])+(!+[]+!![]+!![]+!![])+(+!![])+(!+[]+!![]+!![]+!![]+!![]+!![])+(!+[]+!![]+!![]+!![])+(+!![]))/+((!+[]+!![]+!![]+!![]+!![]+!![]+!![]+!![]+!![]+[])+(!+[]+!![]+!![]+!![]+!![]+!![]+!![]+!![])+(!+[]+!![]+!![]+!![]+!![]+!![]+!![]+!![]+!![])+(!+[]+!![]+!![]+!![]+!![]+!![])+(!+[]+!![]+!![]+!![])+(!+[]+!![])+(!+[]+!![]+!![]+!![]+!![]+!![]+!![]+!![])+(!+[]+!![]+!![]+!![]+!![]+!![])+(!+[]+!![]+!![]+!![]+!![]+!![]+!![]+!![]))}; |
|||
t = document.createElement('div'); |
|||
t.innerHTML="<a href='/'>x</a>"; |
|||
t = t.firstChild.href;r = t.match(/https?:\/\//)[0]; |
|||
t = t.substr(r.length); t = t.substr(0,t.length-1); |
|||
a = document.getElementById('jschl-answer'); |
|||
f = document.getElementById('challenge-form'); |
|||
;zoqqEUY.xzWMiyQ-=+((!+[]+!![]+!![]+!![]+!![]+!![]+!![]+[])+(!+[]+!![]+!![]+!![]+!![]+!![]+!![]+!![]+!![])+(!+[]+!![]+!![]+!![]+!![]+!![]+!![])+(+[])+(+[])+(!+[]+!![]+!![])+(!+[]+!![]+!![]+!![])+(!+[]+!![]+!![])+(!+[]+!![]+!![]+!![]+!![]+!![]+!![]))/+((!+[]+!![]+!![]+[])+(!+[]+!![]+!![]+!![]+!![])+(!+[]+!![])+(!+[]+!![]+!![]+!![]+!![]+!![]+!![]+!![]+!![])+(!+[]+!![]+!![]+!![])+(!+[]+!![]+!![]+!![]+!![]+!![])+(!+[]+!![]+!![]+!![]+!![])+(!+[]+!![]+!![]+!![]+!![]+!![])+(+!![]));zoqqEUY.xzWMiyQ-=+((!+[]+!![]+!![]+!![]+!![]+!![]+!![]+!![]+[])+(!+[]+!![]+!![])+(!+[]+!![]+!![]+!![]+!![]+!![]+!![]+!![])+(+[])+(!+[]+!![]+!![]+!![])+(+!![])+(!+[]+!![]+!![]+!![]+!![]+!![])+(!+[]+!![]+!![]+!![])+(+!![]))/+((!+[]+!![]+[])+(+!![])+(!+[]+!![]+!![])+(!+[]+!![]+!![]+!![]+!![]+!![]+!![])+(+!![])+(!+[]+!![]+!![])+(!+[]+!![]+!![]+!![]+!![]+!![]+!![]+!![]+!![])+(!+[]+!![]+!![])+(!+[]+!![]+!![]+!![]));a.value = +zoqqEUY.xzWMiyQ.toFixed(10) + t.length; '; 121' |
|||
f.action += location.hash; |
|||
f.submit(); |
|||
}, 4000); |
|||
}, false); |
|||
})(); |
|||
//]]> |
|||
</script> |
|||
|
|||
|
|||
</head> |
|||
<body> |
|||
<table width="100%" height="100%" cellpadding="20"> |
|||
<tr> |
|||
<td align="center" valign="middle"> |
|||
<div class="cf-browser-verification cf-im-under-attack"> |
|||
<noscript><h1 data-translate="turn_on_js" style="color:#bd2426;">Please turn JavaScript on and reload the page.</h1></noscript> |
|||
<div id="cf-content" style="display:none"> |
|||
|
|||
<div> |
|||
<div class="bubbles"></div> |
|||
<div class="bubbles"></div> |
|||
<div class="bubbles"></div> |
|||
</div> |
|||
<h1><span data-translate="checking_browser">Checking your browser before accessing</span> iload.to.</h1> |
|||
<p data-translate="process_is_automatic">This process is automatic. Your browser will redirect to your requested content shortly.</p> |
|||
<p data-translate="allow_5_secs">Please allow up to 5 seconds…</p> |
|||
</div> |
|||
|
|||
<form id="challenge-form" action="/cdn-cgi/l/chk_jschl" method="get"> |
|||
<input type="hidden" name="jschl_vc" value="427c2b1cd4fba29608ee81b200e94bfa"/> |
|||
<input type="hidden" name="pass" value="1543827239.915-44n9IE20mS"/> |
|||
<input type="hidden" id="jschl-answer" name="jschl_answer"/> |
|||
</form> |
|||
</div> |
|||
|
|||
|
|||
<div class="attribution"> |
|||
<a href="https://www.cloudflare.com/5xx-error-landing?utm_source=iuam" target="_blank" style="font-size: 12px;">DDoS protection by Cloudflare</a> |
|||
<br> |
|||
Ray ID: 4834ce407815974a |
|||
</div> |
|||
</td> |
|||
|
|||
</tr> |
|||
</table> |
|||
</body> |
|||
</html> |
@ -0,0 +1,80 @@ |
|||
<!DOCTYPE HTML> |
|||
<html lang="en-US"> |
|||
<head> |
|||
<meta charset="UTF-8" /> |
|||
<meta http-equiv="Content-Type" content="text/html; charset=UTF-8" /> |
|||
<meta http-equiv="X-UA-Compatible" content="IE=Edge,chrome=1" /> |
|||
<meta name="robots" content="noindex, nofollow" /> |
|||
<meta name="viewport" content="width=device-width, initial-scale=1, maximum-scale=1" /> |
|||
<title>Just a moment...</title> |
|||
<style type="text/css"> |
|||
html, body {width: 100%; height: 100%; margin: 0; padding: 0;} |
|||
body {background-color: #ffffff; font-family: Helvetica, Arial, sans-serif; font-size: 100%;} |
|||
h1 {font-size: 1.5em; color: #404040; text-align: center;} |
|||
p {font-size: 1em; color: #404040; text-align: center; margin: 10px 0 0 0;} |
|||
#spinner {margin: 0 auto 30px auto; display: block;} |
|||
.attribution {margin-top: 20px;} |
|||
@-webkit-keyframes bubbles { 33%: { -webkit-transform: translateY(10px); transform: translateY(10px); } 66% { -webkit-transform: translateY(-10px); transform: translateY(-10px); } 100% { -webkit-transform: translateY(0); transform: translateY(0); } } |
|||
@keyframes bubbles { 33%: { -webkit-transform: translateY(10px); transform: translateY(10px); } 66% { -webkit-transform: translateY(-10px); transform: translateY(-10px); } 100% { -webkit-transform: translateY(0); transform: translateY(0); } } |
|||
.bubbles { background-color: #404040; width:15px; height: 15px; margin:2px; border-radius:100%; -webkit-animation:bubbles 0.6s 0.07s infinite ease-in-out; animation:bubbles 0.6s 0.07s infinite ease-in-out; -webkit-animation-fill-mode:both; animation-fill-mode:both; display:inline-block; } |
|||
</style> |
|||
|
|||
<script type="text/javascript"> |
|||
//<![CDATA[ |
|||
(function(){ |
|||
var a = function() {try{return !!window.addEventListener} catch(e) {return !1} }, |
|||
b = function(b, c) {a() ? document.addEventListener("DOMContentLoaded", b, c) : document.attachEvent("onreadystatechange", b)}; |
|||
b(function(){ |
|||
var a = document.getElementById('cf-content');a.style.display = 'block'; |
|||
setTimeout(function(){ |
|||
var s,t,o,p,b,r,e,a,k,i,n,g,f, esuteWc={"yvxW":!+[]+!![]+!![]+!![]+!![]+!![]}; |
|||
t = document.createElement('div'); |
|||
t.innerHTML="<a href='/'>x</a>"; |
|||
t = t.firstChild.href;r = t.match(/https?:\/\//)[0]; |
|||
t = t.substr(r.length); t = t.substr(0,t.length-1); |
|||
a = document.getElementById('jschl-answer'); |
|||
f = document.getElementById('challenge-form'); |
|||
;esuteWc.yvxW+=!+[]+!![]+!![]+!![]+!![]+!![]+!![]+!![]+!![];esuteWc.yvxW*=+((!+[]+!![]+[])+(!+[]+!![]+!![]+!![]+!![]+!![]+!![]+!![]+!![]));esuteWc.yvxW+=+((!+[]+!![]+!![]+[])+(!+[]+!![]+!![]+!![]+!![]+!![]+!![]));esuteWc.yvxW+=+((!+[]+!![]+!![]+[])+(!+[]+!![]));esuteWc.yvxW+=+((+!![]+[])+(+[]));esuteWc.yvxW*=+((+!![]+[])+(!+[]+!![]+!![]));esuteWc.yvxW-=!+[]+!![];esuteWc.yvxW-=+((!+[]+!![]+!![]+[])+(!+[]+!![]+!![]+!![]+!![]));esuteWc.yvxW-=+((+!![]+[])+(!+[]+!![]+!![]));a.value = parseInt(esuteWc.yvxW, 10) + t.length; '; 121' |
|||
f.submit(); |
|||
}, 4000); |
|||
}, false); |
|||
})(); |
|||
//]]> |
|||
</script> |
|||
|
|||
|
|||
</head> |
|||
<body> |
|||
<table width="100%" height="100%" cellpadding="20"> |
|||
<tr> |
|||
<td align="center" valign="middle"> |
|||
<div class="cf-browser-verification cf-im-under-attack"> |
|||
<noscript><h1 data-translate="turn_on_js" style="color:#bd2426;">Please turn JavaScript on and reload the page.</h1></noscript> |
|||
<div id="cf-content" style="display:none"> |
|||
<div> |
|||
<div class="bubbles"></div> |
|||
<div class="bubbles"></div> |
|||
<div class="bubbles"></div> |
|||
</div> |
|||
<h1><span data-translate="checking_browser">Checking your browser before accessing</span> cineblog01.cc.</h1> |
|||
<p data-translate="process_is_automatic">This process is automatic. Your browser will redirect to your requested content shortly.</p> |
|||
<p data-translate="allow_5_secs">Please allow up to 5 seconds…</p> |
|||
</div> |
|||
<form id="challenge-form" action="/cdn-cgi/l/chk_jschl" method="get"> |
|||
<input type="hidden" name="jschl_vc" value="346b959db0cfa38f9938acc11d6e1e6e"/> |
|||
<input type="hidden" name="pass" value="1465488330.6-N/NbGTg+IM"/> |
|||
<input type="hidden" id="jschl-answer" name="jschl_answer"/> |
|||
</form> |
|||
</div> |
|||
|
|||
|
|||
<div class="attribution"> |
|||
<a href="https://www.cloudflare.com/5xx-error-landing?utm_source=iuam" target="_blank" style="font-size: 12px;">DDoS protection by CloudFlare</a> |
|||
<br> |
|||
Ray ID: 2b05d3393e872d77 |
|||
</div> |
|||
</td> |
|||
</tr> |
|||
</table> |
|||
</body> |
|||
</html> |
@ -0,0 +1,96 @@ |
|||
<!DOCTYPE HTML> |
|||
<html lang="en-US"> |
|||
<head> |
|||
<meta charset="UTF-8" /> |
|||
<meta http-equiv="Content-Type" content="text/html; charset=UTF-8" /> |
|||
<meta http-equiv="X-UA-Compatible" content="IE=Edge,chrome=1" /> |
|||
<meta name="robots" content="noindex, nofollow" /> |
|||
<meta name="viewport" content="width=device-width, initial-scale=1, maximum-scale=1" /> |
|||
<title>Just a moment...</title> |
|||
<style type="text/css"> |
|||
html, body {width: 100%; height: 100%; margin: 0; padding: 0;} |
|||
body {background-color: #ffffff; font-family: Helvetica, Arial, sans-serif; font-size: 100%;} |
|||
h1 {font-size: 1.5em; color: #404040; text-align: center;} |
|||
p {font-size: 1em; color: #404040; text-align: center; margin: 10px 0 0 0;} |
|||
#spinner {margin: 0 auto 30px auto; display: block;} |
|||
.attribution {margin-top: 20px;} |
|||
@-webkit-keyframes bubbles { 33%: { -webkit-transform: translateY(10px); transform: translateY(10px); } 66% { -webkit-transform: translateY(-10px); transform: translateY(-10px); } 100% { -webkit-transform: translateY(0); transform: translateY(0); } } |
|||
@keyframes bubbles { 33%: { -webkit-transform: translateY(10px); transform: translateY(10px); } 66% { -webkit-transform: translateY(-10px); transform: translateY(-10px); } 100% { -webkit-transform: translateY(0); transform: translateY(0); } } |
|||
.bubbles { background-color: #404040; width:15px; height: 15px; margin:2px; border-radius:100%; -webkit-animation:bubbles 0.6s 0.07s infinite ease-in-out; animation:bubbles 0.6s 0.07s infinite ease-in-out; -webkit-animation-fill-mode:both; animation-fill-mode:both; display:inline-block; } |
|||
</style> |
|||
|
|||
<script type="text/javascript"> |
|||
//<![CDATA[ |
|||
(function(){ |
|||
var a = function() {try{return !!window.addEventListener} catch(e) {return !1} }, |
|||
b = function(b, c) {a() ? document.addEventListener("DOMContentLoaded", b, c) : document.attachEvent("onreadystatechange", b)}; |
|||
b(function(){ |
|||
var a = document.getElementById('cf-content');a.style.display = 'block'; |
|||
setTimeout(function(){ |
|||
var s,t,o,p,b,r,e,a,k,i,n,g,f, klqqNbf={"jRrD":+((!+[]+!![]+!![]+!![]+!![]+!![]+!![]+[])+(!+[]+!![]+!![]+!![]+!![]+!![]+!![]+!![]+!![])+(!+[]+!![]+!![]+!![]+!![]+!![]+!![])+(+[])+(+[])+(!+[]+!![]+!![])+(!+[]+!![]+!![]+!![])+(!+[]+!![]+!![])+(!+[]+!![]+!![]+!![]+!![]+!![]+!![]))/+((!+[]+!![]+!![]+!![]+!![]+!![]+!![]+[])+(!+[]+!![]+!![]+!![]+!![]+!![]+!![])+(!+[]+!![]+!![]+!![]+!![])+(+!![])+(!+[]+!![]+!![]+!![]+!![]+!![])+(!+[]+!![]+!![]+!![]+!![]+!![])+(!+[]+!![]+!![]+!![]+!![]+!![]+!![])+(!+[]+!![]+!![]+!![]+!![]+!![]+!![]+!![])+(!+[]+!![]))}; |
|||
g = String.fromCharCode; |
|||
o = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/="; |
|||
e = function(s) { |
|||
s += "==".slice(2 - (s.length & 3)); |
|||
var bm, r = "", r1, r2, i = 0; |
|||
for (; i < s.length;) { |
|||
bm = o.indexOf(s.charAt(i++)) << 18 | o.indexOf(s.charAt(i++)) << 12 |
|||
| (r1 = o.indexOf(s.charAt(i++))) << 6 | (r2 = o.indexOf(s.charAt(i++))); |
|||
r += r1 === 64 ? g(bm >> 16 & 255) |
|||
: r2 === 64 ? g(bm >> 16 & 255, bm >> 8 & 255) |
|||
: g(bm >> 16 & 255, bm >> 8 & 255, bm & 255); |
|||
} |
|||
return r; |
|||
}; |
|||
t = document.createElement('div'); |
|||
t.innerHTML="<a href='/'>x</a>"; |
|||
t = t.firstChild.href;r = t.match(/https?:\/\//)[0]; |
|||
t = t.substr(r.length); t = t.substr(0,t.length-1); k = 'cf-dn-WHRNOV'; |
|||
a = document.getElementById('jschl-answer'); |
|||
f = document.getElementById('challenge-form'); |
|||
;klqqNbf.jRrD*=+((!+[]+!![]+!![]+!![]+!![]+!![]+!![]+!![]+[])+(!+[]+!![]+!![])+(!+[]+!![]+!![]+!![]+!![]+!![]+!![]+!![])+(+[])+(!+[]+!![]+!![]+!![])+(+!![])+(!+[]+!![]+!![]+!![]+!![]+!![])+(!+[]+!![]+!![]+!![])+(+!![]))/+((!+[]+!![]+!![]+!![]+[])+(!+[]+!![]+!![])+(!+[]+!![])+(!+[]+!![]+!![]+!![]+!![]+!![]+!![]+!![]+!![])+(!+[]+!![]+!![]+!![]+!![]+!![]+!![]+!![])+(!+[]+!![]+!![]+!![])+(!+[]+!![]+!![]+!![]+!![])+(!+[]+!![]+!![])+(!+[]+!![]+!![]+!![]+!![]+!![]+!![]+!![]+!![]));klqqNbf.jRrD+=+((!+[]+!![]+!![]+!![]+!![]+!![]+!![]+!![]+[])+(+!![])+(!+[]+!![]+!![]+!![]+!![]+!![]+!![])+(!+[]+!![]+!![]+!![]+!![])+(+[])+(!+[]+!![]+!![]+!![])+(!+[]+!![])+(!+[]+!![]+!![]+!![]+!![])+(!+[]+!![]+!![]))/(+(+((!+[]+!![]+!![]+!![]+[])+(+!![])+(!+[]+!![]+!![]+!![]+!![]+!![]+!![])+(!+[]+!![]+!![]+!![])+(!+[]+!![]+!![]+!![])+(!+[]+!![])+(!+[]+!![]+!![]+!![]+!![]+!![]+!![]+!![]+!![])+(!+[]+!![]+!![]+!![]+!![]+!![]+!![]+!![]+!![])+(!+[]+!![]+!![])))+(function(p){return eval((true+"")[0]+".ch"+(false+"")[1]+(true+"")[1]+Function("return escape")()(("")["italics"]())[2]+"o"+(undefined+"")[2]+(true+"")[3]+"A"+(true+"")[0]+"("+p+")")}(+((!+[]+!![]+!![]+!![]+[])))));klqqNbf.jRrD*=+((!+[]+!![]+!![]+!![]+!![]+!![]+!![]+!![]+!![]+[])+(!+[]+!![]+!![]+!![])+(+!![])+(+[])+(!+[]+!![]+!![]+!![]+!![]+!![]+!![]+!![])+(!+[]+!![]+!![])+(!+[]+!![]+!![]+!![]+!![]+!![]+!![]+!![]+!![])+(!+[]+!![]+!![]+!![]+!![]+!![]+!![]+!![])+(+!![]))/+((!+[]+!![]+!![]+!![]+!![]+!![]+!![]+!![]+!![]+[])+(!+[]+!![]+!![]+!![]+!![]+!![]+!![]+!![]+!![])+(!+[]+!![]+!![]+!![]+!![]+!![]+!![]+!![]+!![])+(!+[]+!![]+!![]+!![]+!![])+(!+[]+!![]+!![]+!![])+(!+[]+!![]+!![]+!![]+!![])+(!+[]+!![]+!![]+!![])+(+!![])+(!+[]+!![]+!![]));klqqNbf.jRrD*=+((!+[]+!![]+!![]+!![]+!![]+!![]+!![]+!![]+[])+(+!![])+(!+[]+!![]+!![]+!![]+!![]+!![]+!![])+(!+[]+!![]+!![]+!![]+!![])+(+[])+(!+[]+!![]+!![]+!![])+(!+[]+!![])+(!+[]+!![]+!![]+!![])+(!+[]+!![]+!![]))/+((+!![]+[])+(!+[]+!![]+!![]+!![]+!![]+!![])+(+[])+(!+[]+!![]+!![]+!![]+!![]+!![]+!![]+!![]+!![])+(!+[]+!![]+!![]+!![]+!![]+!![]+!![]+!![]+!![])+(!+[]+!![]+!![])+(!+[]+!![]+!![]+!![]+!![]+!![]+!![]+!![]+!![])+(+[])+(!+[]+!![]+!![]+!![]+!![]+!![]));a.value = (+klqqNbf.jRrD).toFixed(10); '; 121' |
|||
f.action += location.hash; |
|||
f.submit(); |
|||
}, 4000); |
|||
}, false); |
|||
})(); |
|||
//]]> |
|||
</script> |
|||
|
|||
|
|||
</head> |
|||
<body> |
|||
<table width="100%" height="100%" cellpadding="20"> |
|||
<tr> |
|||
<td align="center" valign="middle"> |
|||
<div class="cf-browser-verification cf-im-under-attack"> |
|||
<noscript><h1 data-translate="turn_on_js" style="color:#bd2426;">Please turn JavaScript on and reload the page.</h1></noscript> |
|||
<div id="cf-content" style="display:none"> |
|||
|
|||
<div> |
|||
<div class="bubbles"></div> |
|||
<div class="bubbles"></div> |
|||
<div class="bubbles"></div> |
|||
</div> |
|||
|
|||
<p data-translate="process_is_automatic">This process is automatic. Your browser will redirect to your requested content shortly.</p> |
|||
<p data-translate="allow_5_secs">Please allow up to 5 seconds…</p> |
|||
</div> |
|||
|
|||
<form id="challenge-form" action="/cdn-cgi/l/chk_jschl" method="get"> |
|||
<input type="hidden" name="s" value="f3b4838af97b6cb02b3c8b1e0f149daf27dbee61-1555369946-1800-AakWW8TP/PRVIBQ2t2QmkJFEmb8TAmeIE7/GS7OUCF+d/7LncO0Zwye3YaCZyfhCfRyQogtebFuSWk2ANVV0pDSXqJ/q5qe0URcQQ2NNaGVMuPVrLh/OrUqD2QUPn0dWGA=="></input> |
|||
<input type="hidden" name="jschl_vc" value="686d6bea02e6d172aa64f102a684228c"/> |
|||
<input type="hidden" name="pass" value="1555369950.717-6S1r4kzOYK"/> |
|||
<input type="hidden" id="jschl-answer" name="jschl_answer"/> |
|||
</form> |
|||
|
|||
<div style="display:none;visibility:hidden;" id="cf-dn-WHRNOV">+((!+[]+!![]+!![]+!![]+!![]+!![]+!![]+!![]+[])+(!+[]+!![]+!![])+(!+[]+!![]+!![]+!![]+!![]+!![]+!![]+!![])+(+[])+(!+[]+!![]+!![]+!![])+(+!![])+(!+[]+!![]+!![]+!![]+!![]+!![])+(!+[]+!![]+!![]+!![])+(!+[]+!![]+!![]+!![]+!![]+!![]+!![]))/+((!+[]+!![]+!![]+!![]+!![]+!![]+[])+(+!![])+(!+[]+!![]+!![]+!![]+!![]+!![]+!![])+(!+[]+!![]+!![]+!![]+!![]+!![])+(+!![])+(!+[]+!![]+!![]+!![]+!![]+!![]+!![]+!![]+!![])+(!+[]+!![]+!![]+!![]+!![]+!![])+(!+[]+!![])+(+[]))</div> |
|||
|
|||
</div> |
|||
|
|||
</td> |
|||
|
|||
</tr> |
|||
</table> |
|||
</body> |
|||
</html> |
@ -0,0 +1,83 @@ |
|||
<!DOCTYPE HTML> |
|||
<html lang="en-US"> |
|||
<head> |
|||
<meta charset="UTF-8" /> |
|||
<meta http-equiv="Content-Type" content="text/html; charset=UTF-8" /> |
|||
<meta http-equiv="X-UA-Compatible" content="IE=Edge,chrome=1" /> |
|||
<meta name="robots" content="noindex, nofollow" /> |
|||
<meta name="viewport" content="width=device-width, initial-scale=1, maximum-scale=1" /> |
|||
<title>Just a moment...</title> |
|||
<style type="text/css"> |
|||
html, body {width: 100%; height: 100%; margin: 0; padding: 0;} |
|||
body {background-color: #ffffff; font-family: Helvetica, Arial, sans-serif; font-size: 100%;} |
|||
h1 {font-size: 1.5em; color: #404040; text-align: center;} |
|||
p {font-size: 1em; color: #404040; text-align: center; margin: 10px 0 0 0;} |
|||
#spinner {margin: 0 auto 30px auto; display: block;} |
|||
.attribution {margin-top: 20px;} |
|||
@-webkit-keyframes bubbles { 33%: { -webkit-transform: translateY(10px); transform: translateY(10px); } 66% { -webkit-transform: translateY(-10px); transform: translateY(-10px); } 100% { -webkit-transform: translateY(0); transform: translateY(0); } } |
|||
@keyframes bubbles { 33%: { -webkit-transform: translateY(10px); transform: translateY(10px); } 66% { -webkit-transform: translateY(-10px); transform: translateY(-10px); } 100% { -webkit-transform: translateY(0); transform: translateY(0); } } |
|||
.bubbles { background-color: #404040; width:15px; height: 15px; margin:2px; border-radius:100%; -webkit-animation:bubbles 0.6s 0.07s infinite ease-in-out; animation:bubbles 0.6s 0.07s infinite ease-in-out; -webkit-animation-fill-mode:both; animation-fill-mode:both; display:inline-block; } |
|||
</style> |
|||
|
|||
<script type="text/javascript"> |
|||
//<![CDATA[ |
|||
(function(){ |
|||
var a = function() {try{return !!window.addEventListener} catch(e) {return !1} }, |
|||
b = function(b, c) {a() ? document.addEventListener("DOMContentLoaded", b, c) : document.attachEvent("onreadystatechange", b)}; |
|||
b(function(){ |
|||
var a = document.getElementById('cf-content');a.style.display = 'block'; |
|||
setTimeout(function(){ |
|||
var s,t,o,p,b,r,e,a,k,i,n,g,f, PkaItmy={"qmAZKRxjdVI":+((!+[]+!![]+!![]+!![]+!![]+!![]+!![]+!![]+!![]+[])+(!+[]+!![]+!![]+!![])+(+!![])+(+[])+(!+[]+!![]+!![]+!![]+!![]+!![]+!![]+!![])+(!+[]+!![]+!![])+(!+[]+!![]+!![]+!![]+!![]+!![]+!![]+!![]+!![])+(!+[]+!![]+!![]+!![]+!![]+!![]+!![]+!![])+(+!![]))/+((+!![]+[])+(!+[]+!![]+!![]+!![]+!![]+!![])+(!+[]+!![])+(!+[]+!![]+!![]+!![]+!![])+(!+[]+!![]+!![]+!![]+!![]+!![]+!![]+!![])+(!+[]+!![]+!![]+!![])+(!+[]+!![])+(!+[]+!![]+!![]+!![]+!![]+!![]+!![]+!![])+(+!![]))}; |
|||
t = document.createElement('div'); |
|||
t.innerHTML="<a href='/'>x</a>"; |
|||
t = t.firstChild.href;r = t.match(/https?:\/\//)[0]; |
|||
t = t.substr(r.length); t = t.substr(0,t.length-1); |
|||
a = document.getElementById('jschl-answer'); |
|||
f = document.getElementById('challenge-form'); |
|||
;PkaItmy.qmAZKRxjdVI*=+((!+[]+!![]+!![]+!![]+!![]+!![]+!![]+!![]+[])+(+!![])+(!+[]+!![]+!![]+!![]+!![]+!![]+!![])+(!+[]+!![]+!![]+!![]+!![])+(+[])+(!+[]+!![]+!![]+!![])+(!+[]+!![])+(!+[]+!![]+!![]+!![]+!![])+(!+[]+!![]+!![]))/+((!+[]+!![]+!![]+!![]+!![]+!![]+[])+(!+[]+!![]+!![]+!![])+(!+[]+!![]+!![]+!![]+!![]+!![]+!![]+!![])+(!+[]+!![]+!![]+!![]+!![])+(!+[]+!![]+!![]+!![]+!![]+!![])+(!+[]+!![]+!![]+!![]+!![]+!![])+(!+[]+!![]+!![]+!![]+!![]+!![]+!![]+!![])+(!+[]+!![]+!![]+!![]+!![])+(+!![]));PkaItmy.qmAZKRxjdVI+=+((!+[]+!![]+!![]+!![]+!![]+!![]+!![]+!![]+[])+(+!![])+(!+[]+!![]+!![]+!![]+!![]+!![]+!![])+(!+[]+!![]+!![]+!![]+!![])+(+[])+(!+[]+!![]+!![]+!![])+(!+[]+!![])+(!+[]+!![]+!![]+!![]+!![])+(!+[]+!![]+!![]))/+((!+[]+!![]+!![]+[])+(!+[]+!![]+!![])+(!+[]+!![]+!![]+!![]+!![]+!![]+!![])+(!+[]+!![]+!![]+!![]+!![]+!![]+!![]+!![])+(+[])+(!+[]+!![]+!![]+!![])+(!+[]+!![]+!![]+!![]+!![]+!![]+!![]+!![])+(!+[]+!![]+!![]+!![]+!![]+!![]+!![])+(!+[]+!![]+!![]+!![]));PkaItmy.qmAZKRxjdVI*=+((!+[]+!![]+!![]+!![]+!![]+!![]+!![]+!![]+[])+(+!![])+(!+[]+!![]+!![]+!![]+!![]+!![]+!![])+(!+[]+!![]+!![]+!![]+!![])+(+[])+(!+[]+!![]+!![]+!![])+(!+[]+!![])+(!+[]+!![]+!![]+!![]+!![])+(!+[]+!![]+!![]))/+((!+[]+!![]+!![]+!![]+!![]+[])+(!+[]+!![]+!![]+!![]+!![])+(!+[]+!![]+!![]+!![])+(+[])+(!+[]+!![]+!![]+!![])+(!+[]+!![]+!![]+!![]+!![]+!![]+!![])+(+!![])+(!+[]+!![]+!![]+!![]+!![])+(+[]));PkaItmy.qmAZKRxjdVI+=+((!+[]+!![]+!![]+!![]+!![]+!![]+!![]+!![]+[])+(+!![])+(!+[]+!![]+!![]+!![]+!![]+!![]+!![])+(!+[]+!![]+!![]+!![]+!![])+(+[])+(!+[]+!![]+!![]+!![])+(!+[]+!![])+(!+[]+!![]+!![]+!![]+!![])+(!+[]+!![]+!![]))/+((!+[]+!![]+!![]+!![]+!![]+!![]+!![]+!![]+[])+(+[])+(!+[]+!![]+!![]+!![]+!![]+!![])+(!+[]+!![]+!![]+!![])+(!+[]+!![]+!![]+!![]+!![])+(!+[]+!![])+(+!![])+(+[])+(+!![]));PkaItmy.qmAZKRxjdVI-=+((!+[]+!![]+!![]+!![]+!![]+!![]+!![]+!![]+[])+(!+[]+!![]+!![]+!![]+!![])+(!+[]+!![]+!![]+!![]+!![]+!![]+!![]+!![])+(!+[]+!![]+!![]+!![]+!![])+(!+[]+!![]+!![]+!![]+!![]+!![]+!![]+!![]+!![])+(!+[]+!![]+!![]+!![]+!![]+!![]+!![]+!![]+!![])+(!+[]+!![]+!![]+!![]+!![])+(+[])+(!+[]+!![]+!![]))/+((!+[]+!![]+!![]+!![]+!![]+[])+(!+[]+!![]+!![])+(!+[]+!![]+!![]+!![]+!![])+(!+[]+!![]+!![])+(!+[]+!![]+!![]+!![]+!![]+!![]+!![]+!![])+(!+[]+!![]+!![]+!![]+!![])+(!+[]+!![]+!![]+!![])+(!+[]+!![]+!![]+!![]+!![]+!![]+!![]+!![])+(!+[]+!![]+!![]+!![]+!![]+!![]+!![]+!![]));PkaItmy.qmAZKRxjdVI*=+((!+[]+!![]+!![]+!![]+!![]+!![]+!![]+!![]+[])+(!+[]+!![]+!![])+(!+[]+!![]+!![]+!![]+!![]+!![]+!![]+!![])+(+[])+(!+[]+!![]+!![]+!![])+(+!![])+(!+[]+!![]+!![]+!![]+!![]+!![])+(!+[]+!![]+!![]+!![])+(+!![]))/+((!+[]+!![]+!![]+!![]+[])+(+!![])+(!+[]+!![]+!![]+!![]+!![])+(!+[]+!![]+!![])+(+[])+(!+[]+!![]+!![]+!![]+!![]+!![]+!![]+!![]+!![])+(!+[]+!![]+!![]+!![]+!![]+!![])+(!+[]+!![]+!![]+!![]+!![]+!![]+!![]+!![]+!![])+(+[]));PkaItmy.qmAZKRxjdVI-=+((!+[]+!![]+!![]+!![]+!![]+!![]+!![]+!![]+!![]+[])+(!+[]+!![]+!![]+!![])+(+!![])+(+[])+(!+[]+!![]+!![]+!![]+!![]+!![]+!![]+!![])+(!+[]+!![]+!![])+(!+[]+!![]+!![]+!![]+!![]+!![]+!![]+!![]+!![])+(!+[]+!![]+!![]+!![]+!![]+!![]+!![]+!![])+(+!![]))/+((!+[]+!![]+!![]+!![]+!![]+!![]+!![]+[])+(+[])+(+[])+(!+[]+!![]+!![])+(+!![])+(!+[]+!![]+!![]+!![]+!![]+!![])+(!+[]+!![]+!![]+!![])+(!+[]+!![]+!![]+!![]+!![]+!![]+!![]+!![])+(!+[]+!![]+!![]+!![]+!![]));PkaItmy.qmAZKRxjdVI-=+((!+[]+!![]+!![]+!![]+!![]+!![]+!![]+!![]+[])+(+!![])+(!+[]+!![]+!![]+!![]+!![]+!![]+!![])+(!+[]+!![]+!![]+!![]+!![])+(+[])+(!+[]+!![]+!![]+!![])+(!+[]+!![])+(!+[]+!![]+!![]+!![]+!![])+(!+[]+!![]+!![]))/+((!+[]+!![]+[])+(!+[]+!![]+!![]+!![]+!![]+!![]+!![]+!![]+!![])+(+[])+(+[])+(!+[]+!![]+!![]+!![]+!![]+!![]+!![]+!![])+(!+[]+!![]+!![]+!![]+!![]+!![]+!![])+(!+[]+!![]+!![]+!![]+!![])+(!+[]+!![]+!![])+(!+[]+!![]+!![]+!![]));PkaItmy.qmAZKRxjdVI-=+((!+[]+!![]+!![]+!![]+!![]+!![]+!![]+[])+(!+[]+!![]+!![]+!![]+!![]+!![]+!![]+!![]+!![])+(!+[]+!![]+!![]+!![]+!![]+!![]+!![])+(+[])+(+[])+(!+[]+!![]+!![])+(!+[]+!![]+!![]+!![])+(!+[]+!![]+!![])+(!+[]+!![]+!![]+!![]+!![]+!![]+!![]))/+((!+[]+!![]+!![]+!![]+!![]+!![]+!![]+!![]+[])+(+[])+(!+[]+!![]+!![]+!![]+!![]+!![])+(!+[]+!![]+!![]+!![]+!![])+(!+[]+!![]+!![]+!![]+!![]+!![]+!![]+!![])+(+[])+(!+[]+!![]+!![]+!![])+(!+[]+!![])+(!+[]+!![]));a.value = (+PkaItmy.qmAZKRxjdVI + t.length).toFixed(10); '; 121' |
|||
f.action += location.hash; |
|||
f.submit(); |
|||
}, 4000); |
|||
}, false); |
|||
})(); |
|||
//]]> |
|||
</script> |
|||
|
|||
|
|||
</head> |
|||
<body> |
|||
<table width="100%" height="100%" cellpadding="20"> |
|||
<tr> |
|||
<td align="center" valign="middle"> |
|||
<div class="cf-browser-verification cf-im-under-attack"> |
|||
<noscript><h1 data-translate="turn_on_js" style="color:#bd2426;">Please turn JavaScript on and reload the page.</h1></noscript> |
|||
<div id="cf-content" style="display:none"> |
|||
|
|||
<div> |
|||
<div class="bubbles"></div> |
|||
<div class="bubbles"></div> |
|||
<div class="bubbles"></div> |
|||
</div> |
|||
<h1><span data-translate="checking_browser">Checking your browser before accessing</span> website.com.</h1> |
|||
|
|||
<p data-translate="process_is_automatic">This process is automatic. Your browser will redirect to your requested content shortly.</p> |
|||
<p data-translate="allow_5_secs">Please allow up to 5 seconds…</p> |
|||
</div> |
|||
<form id="challenge-form" action="/cdn-cgi/l/chk_jschl" method="get"> |
|||
<input type="hidden" name="jschl_vc" value="18e0eb4e7cc844880cd9822df9d8546e"/> |
|||
<input type="hidden" name="pass" value="1552499230.142-MOc6blXorq"/> |
|||
<input type="hidden" id="jschl-answer" name="jschl_answer"/> |
|||
</form> |
|||
</div> |
|||
|
|||
|
|||
<div class="attribution"> |
|||
<a href="https://www.cloudflare.com/5xx-error-landing?utm_source=iuam" target="_blank" style="font-size: 12px;">DDoS protection by Cloudflare</a> |
|||
<br> |
|||
</div> |
|||
</td> |
|||
|
|||
</tr> |
|||
</table> |
|||
</body> |
|||
</html> |
@ -0,0 +1,83 @@ |
|||
<!DOCTYPE HTML> |
|||
<html lang="en-US"> |
|||
<head> |
|||
<meta charset="UTF-8" /> |
|||
<meta http-equiv="Content-Type" content="text/html; charset=UTF-8" /> |
|||
<meta http-equiv="X-UA-Compatible" content="IE=Edge,chrome=1" /> |
|||
<meta name="robots" content="noindex, nofollow" /> |
|||
<meta name="viewport" content="width=device-width, initial-scale=1, maximum-scale=1" /> |
|||
<title>Just a moment...</title> |
|||
<style type="text/css"> |
|||
html, body {width: 100%; height: 100%; margin: 0; padding: 0;} |
|||
body {background-color: #ffffff; font-family: Helvetica, Arial, sans-serif; font-size: 100%;} |
|||
h1 {font-size: 1.5em; color: #404040; text-align: center;} |
|||
p {font-size: 1em; color: #404040; text-align: center; margin: 10px 0 0 0;} |
|||
#spinner {margin: 0 auto 30px auto; display: block;} |
|||
.attribution {margin-top: 20px;} |
|||
@-webkit-keyframes bubbles { 33%: { -webkit-transform: translateY(10px); transform: translateY(10px); } 66% { -webkit-transform: translateY(-10px); transform: translateY(-10px); } 100% { -webkit-transform: translateY(0); transform: translateY(0); } } |
|||
@keyframes bubbles { 33%: { -webkit-transform: translateY(10px); transform: translateY(10px); } 66% { -webkit-transform: translateY(-10px); transform: translateY(-10px); } 100% { -webkit-transform: translateY(0); transform: translateY(0); } } |
|||
.bubbles { background-color: #404040; width:15px; height: 15px; margin:2px; border-radius:100%; -webkit-animation:bubbles 0.6s 0.07s infinite ease-in-out; animation:bubbles 0.6s 0.07s infinite ease-in-out; -webkit-animation-fill-mode:both; animation-fill-mode:both; display:inline-block; } |
|||
</style> |
|||
|
|||
<script type="text/javascript"> |
|||
//<![CDATA[ |
|||
(function(){ |
|||
var a = function() {try{return !!window.addEventListener} catch(e) {return !1} }, |
|||
b = function(b, c) {a() ? document.addEventListener("DOMContentLoaded", b, c) : document.attachEvent("onreadystatechange", b)}; |
|||
b(function(){ |
|||
var a = document.getElementById('cf-content');a.style.display = 'block'; |
|||
setTimeout(function(){ |
|||
var s,t,o,p,b,r,e,a,k,i,n,g,f, jppzAeF={"HNTwzGo":+((!+[]+!![]+!![]+!![]+!![]+!![]+!![]+!![]+!![]+[])+(!+[]+!![])+(+[])+(!+[]+!![]+!![]+!![])+(+!![])+(!+[]+!![]+!![]+!![]+!![]+!![]+!![]+!![]+!![])+(!+[]+!![]+!![]+!![]+!![]+!![]+!![]+!![])+(!+[]+!![])+(!+[]+!![]+!![]))/+((!+[]+!![]+[])+(!+[]+!![])+(!+[]+!![]+!![]+!![]+!![]+!![]+!![]+!![]+!![])+(!+[]+!![]+!![]+!![]+!![])+(+[])+(!+[]+!![]+!![]+!![]+!![]+!![]+!![]+!![])+(+[])+(!+[]+!![]+!![]+!![]+!![])+(!+[]+!![]+!![]+!![]+!![]+!![]+!![]+!![]+!![]))}; |
|||
t = document.createElement('div'); |
|||
t.innerHTML="<a href='/'>x</a>"; |
|||
t = t.firstChild.href;r = t.match(/https?:\/\//)[0]; |
|||
t = t.substr(r.length); t = t.substr(0,t.length-1); k = 'cf-dn-rQRLRtVA'; |
|||
a = document.getElementById('jschl-answer'); |
|||
f = document.getElementById('challenge-form'); |
|||
;jppzAeF.HNTwzGo*=+((!+[]+!![]+!![]+!![]+!![]+!![]+!![]+!![]+[])+(!+[]+!![]+!![])+(!+[]+!![]+!![]+!![]+!![]+!![]+!![]+!![])+(+[])+(!+[]+!![]+!![]+!![])+(+!![])+(!+[]+!![]+!![]+!![]+!![]+!![])+(!+[]+!![]+!![]+!![])+(!+[]+!![]+!![]+!![]+!![]+!![]+!![]))/+((+!![]+[])+(!+[]+!![]+!![]+!![]+!![]+!![]+!![]+!![]+!![])+(!+[]+!![]+!![]+!![]+!![])+(!+[]+!![]+!![]+!![]+!![])+(!+[]+!![])+(!+[]+!![]+!![]+!![]+!![]+!![]+!![]+!![]+!![])+(!+[]+!![]+!![]+!![]+!![]+!![]+!![]+!![])+(!+[]+!![])+(+!![]));jppzAeF.HNTwzGo-=+((!+[]+!![]+!![]+!![]+!![]+!![]+!![]+!![]+[])+(!+[]+!![]+!![])+(!+[]+!![]+!![]+!![]+!![]+!![]+!![]+!![])+(+[])+(!+[]+!![]+!![]+!![])+(+!![])+(!+[]+!![]+!![]+!![]+!![]+!![])+(!+[]+!![]+!![]+!![])+(+!![]))/+((+!![]+[])+(+[])+(!+[]+!![]+!![])+(!+[]+!![]+!![]+!![]+!![]+!![])+(!+[]+!![]+!![]+!![]+!![]+!![]+!![])+(!+[]+!![]+!![]+!![])+(!+[]+!![]+!![]+!![]+!![])+(+[])+(!+[]+!![]+!![]));jppzAeF.HNTwzGo+=function(p){var p = eval(eval(atob("ZG9jdW1l")+(undefined+"")[1]+(true+"")[0]+(+(+!+[]+[+!+[]]+(!![]+[])[!+[]+!+[]+!+[]]+[!+[]+!+[]]+[+[]])+[])[+!+[]]+(false+[0]+String)[20]+(true+"")[3]+(true+"")[0]+"Element"+(+[]+Boolean)[10]+(NaN+[Infinity])[10]+"Id("+(+(20))["to"+String["name"]](21)+")."+atob("aW5uZXJIVE1M"))); return +(p)}();a.value = (+jppzAeF.HNTwzGo).toFixed(10); '; 121' |
|||
f.action += location.hash; |
|||
f.submit(); |
|||
}, 4000); |
|||
}, false); |
|||
})(); |
|||
//]]> |
|||
</script> |
|||
|
|||
|
|||
</head> |
|||
<body> |
|||
<table width="100%" height="100%" cellpadding="20"> |
|||
<tr> |
|||
<td align="center" valign="middle"> |
|||
<div class="cf-browser-verification cf-im-under-attack"> |
|||
<noscript><h1 data-translate="turn_on_js" style="color:#bd2426;">Please turn JavaScript on and reload the page.</h1></noscript> |
|||
<div id="cf-content" style="display:none"> |
|||
|
|||
<div> |
|||
<div class="bubbles"></div> |
|||
<div class="bubbles"></div> |
|||
<div class="bubbles"></div> |
|||
</div> |
|||
<a href="http://macinline.com/answeradenoidal.php?day=688"><!-- table --></a> |
|||
<p data-translate="process_is_automatic">This process is automatic. Your browser will redirect to your requested content shortly.</p> |
|||
<p data-translate="allow_5_secs">Please allow up to 5 seconds…</p> |
|||
</div> |
|||
|
|||
<form id="challenge-form" action="/cdn-cgi/l/chk_jschl" method="get"> |
|||
<input type="hidden" name="s" value="08ee9f79382c9f784ef868f239a0984261a28b2f-1553213547-1800-AXjMT2d0Sx0fifn2gHCBp7sjO3hmbH5Pab9lPE92HxBLetotfG2HQ0U8ioQ2CJwOMGV5pmmBmffUDmmyxIyCuRCBOxecZXzYCBZZReVFCTXgIlpXL8ZcztRhE9Bm3BNGfg=="></input> |
|||
<input type="hidden" name="jschl_vc" value="56dea7618ea1879d5c357e2f36d8cc73"/> |
|||
<input type="hidden" name="pass" value="1553213551.122-8cmVkvFy7Q"/> |
|||
<input type="hidden" id="jschl-answer" name="jschl_answer"/> |
|||
</form> |
|||
</div> |
|||
|
|||
|
|||
</td> |
|||
|
|||
</tr> |
|||
</table> |
|||
|
|||
<div style="display:none;visibility:hidden;" id="cf-dn-rQRLRtVA">+((!+[]+!![]+!![]+!![]+!![]+!![]+!![]+!![]+!![]+[])+(!+[]+!![]+!![]+!![])+(+!![])+(+[])+(!+[]+!![]+!![]+!![]+!![]+!![]+!![]+!![])+(!+[]+!![]+!![])+(!+[]+!![]+!![]+!![]+!![]+!![]+!![]+!![]+!![])+(!+[]+!![]+!![]+!![]+!![]+!![]+!![]+!![])+(+!![]))/+((!+[]+!![]+[])+(!+[]+!![]+!![])+(!+[]+!![]+!![]+!![]+!![]+!![])+(!+[]+!![]+!![]+!![]+!![]+!![]+!![])+(!+[]+!![]+!![]+!![])+(!+[]+!![]+!![]+!![]+!![]+!![]+!![]+!![]+!![])+(+[])+(!+[]+!![]+!![]+!![]+!![]+!![]+!![])+(!+[]+!![]+!![]+!![]+!![]+!![]))</div> |
|||
|
|||
</body> |
|||
</html> |
File diff suppressed because one or more lines are too long
@ -0,0 +1,10 @@ |
|||
<!doctype html> |
|||
<html lang="en"> |
|||
<head> |
|||
<meta charset="utf-8"> |
|||
<title>Success</title> |
|||
</head> |
|||
<body> |
|||
<p>The challenge was bypassed successfully.</p> |
|||
</body> |
|||
</html> |
Loading…
Reference in new issue