Browse Source

Merge branch 'develop' into redesign

pull/5180/head
Ruud 10 years ago
parent
commit
a7cd242e16
  1. 184
      couchpotato/core/media/_base/providers/torrent/rarbg.py
  2. 4
      libs/pyasn1/__init__.py
  3. 153
      libs/pyasn1/codec/ber/decoder.py
  4. 168
      libs/pyasn1/codec/ber/encoder.py
  5. 61
      libs/pyasn1/codec/cer/encoder.py
  6. 4
      libs/pyasn1/codec/der/decoder.py
  7. 12
      libs/pyasn1/codec/der/encoder.py
  8. 10
      libs/pyasn1/compat/binary.py
  9. 10
      libs/pyasn1/compat/iterfunc.py
  10. 65
      libs/pyasn1/debug.py
  11. 78
      libs/pyasn1/type/base.py
  12. 17
      libs/pyasn1/type/char.py
  13. 31
      libs/pyasn1/type/namedtype.py
  14. 12
      libs/pyasn1/type/namedval.py
  15. 10
      libs/pyasn1/type/tag.py
  16. 18
      libs/pyasn1/type/tagmap.py
  17. 230
      libs/pyasn1/type/univ.py
  18. 5
      libs/pyasn1/type/useful.py
  19. 6
      libs/requests/__init__.py
  20. 3
      libs/requests/adapters.py
  21. 5
      libs/requests/api.py
  22. 5
      libs/requests/auth.py
  23. 2
      libs/requests/compat.py
  24. 27
      libs/requests/cookies.py
  25. 90
      libs/requests/models.py
  26. 106
      libs/requests/packages/__init__.py
  27. 9
      libs/requests/packages/urllib3/__init__.py
  28. 45
      libs/requests/packages/urllib3/_collections.py
  29. 2
      libs/requests/packages/urllib3/connection.py
  30. 1
      libs/requests/packages/urllib3/connectionpool.py
  31. 23
      libs/requests/packages/urllib3/contrib/pyopenssl.py
  32. 5
      libs/requests/packages/urllib3/exceptions.py
  33. 159
      libs/requests/packages/urllib3/response.py
  34. 38
      libs/requests/packages/urllib3/util/ssl_.py
  35. 2
      libs/requests/packages/urllib3/util/url.py
  36. 14
      libs/requests/sessions.py
  37. 7
      libs/requests/utils.py

184
couchpotato/core/media/_base/providers/torrent/rarbg.py

@ -1,16 +1,16 @@
import re
import traceback
import random
import time
from datetime import datetime
from couchpotato import fireEvent
from couchpotato.core.event import addEvent
from couchpotato.core.helpers.variable import tryInt, getIdentifier
from couchpotato.core.logger import CPLog
from couchpotato.core.media._base.providers.torrent.base import TorrentMagnetProvider
import requests
log = CPLog(__name__)
tokenreceived = 0
rarbgtoken = 0
class Base(TorrentMagnetProvider):
@ -20,130 +20,57 @@ class Base(TorrentMagnetProvider):
'search': 'https://torrentapi.org/pubapi_v2.php?token=%s&mode=search&search_imdb=%s&min_seeders=%s&min_leechers'
'=%s&ranked=%s&category=movies&format=json_extended&app_id=couchpotato',
}
http_time_between_calls = 2 # Seconds
user_agent = 'CouchPotato/1.0'
@staticmethod
def find_info(filename):
# CODEC #
codec = 'x264'
v = re.search("(?i)(x265|h265|h\.265)", filename)
if v:
codec = 'x265'
v = re.search("(?i)(xvid)", filename)
if v:
codec = 'xvid'
# RESOLUTION #
resolution = 'SD'
a = re.search("(?i)(720p)", filename)
if a:
resolution = '720p'
a = re.search("(?i)(1080p)", filename)
if a:
resolution = '1080p'
a = re.search("(?i)(2160p)", filename)
if a:
resolution = '2160p'
# SOURCE #
source = 'HD-Rip'
s = re.search("(?i)(WEB-DL|WEB_DL|WEB\.DL)", filename)
if s:
source = 'WEB-DL'
s = re.search("(?i)(WEBRIP)", filename)
if s:
source = 'WEBRIP'
s = re.search("(?i)(DVDR|DVDRip|DVD-Rip)", filename)
if s:
source = 'DVD-R'
s = re.search("(?i)(BRRIP|BDRIP|BluRay)", filename)
if s:
source = 'BR-Rip'
s = re.search("(?i)BluRay(.*)REMUX", filename)
if s:
source = 'BluRay-Remux'
s = re.search("(?i)BluRay(.*)\.(AVC|VC-1)\.", filename)
if s:
source = 'BluRay-Full'
return_info = [codec, resolution, source]
return return_info
@staticmethod
def get_token(self):
global tokenreceived
global rarbgtoken
now = int(time.time())
if (rarbgtoken == 0) or (tokenreceived == 0) or (now > (tokenreceived+(15*60))):
log.debug("RARBG: Getting Rarbg token")
tokendata = self.getJsonData(self.urls['token'])
if tokendata:
try:
tokenreceived = int(time.time())
rarbgtoken = tokendata['token']
log.debug("RARBG: GOT TOKEN: %s", rarbgtoken)
except RuntimeError:
log.error('RARBG: Failed getting token from Rarbg')
rarbgtoken = 0
# return token
http_time_between_calls = 2 # Seconds
_token = 0
def _search(self, movie, quality, results):
hasresults = 0
curryear = datetime.now().year
self.get_token(self)
movieid = getIdentifier(movie)
try:
movieyear = movie['info']['year']
except:
log.error("RARBG: Couldn't get movie year")
log.error('RARBG: Couldn\'t get movie year')
movieyear = 0
if (rarbgtoken != 0) and (movieyear == 0 or movieyear <= curryear):
data = self.getJsonData(self.urls['search'] % (rarbgtoken, movieid, self.conf('min_seeders'),
self.conf('min_leechers'), self.conf('ranked_only')))
self.getToken()
if (self._token != 0) and (movieyear == 0 or movieyear <= curryear):
data = self.getJsonData(self.urls['search'] % (self._token, movieid, self.conf('min_seeders'),
self.conf('min_leechers'), self.conf('ranked_only')), headers = self.getRequestHeaders())
if data:
if 'error_code' in data:
if data['error'] == 'No results found':
log.debug("RARBG: No results returned from Rarbg")
log.debug('RARBG: No results returned from Rarbg')
else:
if data['error_code'] == 10:
log.error(data['error'], movieid)
else:
log.error("RARBG: There is an error in the returned JSON: %s", data['error'])
log.error('RARBG: There is an error in the returned JSON: %s', data['error'])
else:
hasresults = 1
try:
if hasresults:
for result in data['torrent_results']:
name = result['title']
titlesplit = re.split("-", name)
titlesplit = re.split('-', name)
releasegroup = titlesplit[len(titlesplit)-1]
xtrainfo = self.find_info(name)
encoding = xtrainfo[0]
resolution = xtrainfo[1]
# source = xtrainfo[2]
pubdate = result['pubdate'] # .strip(" +0000")
pubdate = result['pubdate'] # .strip(' +0000')
try:
pubdate = datetime.strptime(pubdate, '%Y-%m-%d %H:%M:%S +0000')
now = datetime.utcnow()
age = (now - pubdate).days
except ValueError:
log.debug("RARBG: Bad pubdate")
log.debug('RARBG: Bad pubdate')
age = 0
torrentscore = self.conf('extra_score')
@ -170,6 +97,79 @@ class Base(TorrentMagnetProvider):
except RuntimeError:
log.error('RARBG: Failed getting results from %s: %s', (self.getName(), traceback.format_exc()))
def getToken(self):
tokendata = self.getJsonData(self.urls['token'], cache_timeout = 900, headers = self.getRequestHeaders())
if tokendata:
try:
token = tokendata['token']
if self._token != token:
log.debug('RARBG: GOT TOKEN: %s', token)
self._token = token
except:
log.error('RARBG: Failed getting token from Rarbg: %s', traceback.format_exc())
self._token = 0
def getRequestHeaders(self):
return {
'User-Agent': fireEvent('app.version', single = True)
}
@staticmethod
def find_info(filename):
# CODEC #
codec = 'x264'
v = re.search('(?i)(x265|h265|h\.265)', filename)
if v:
codec = 'x265'
v = re.search('(?i)(xvid)', filename)
if v:
codec = 'xvid'
# RESOLUTION #
resolution = 'SD'
a = re.search('(?i)(720p)', filename)
if a:
resolution = '720p'
a = re.search('(?i)(1080p)', filename)
if a:
resolution = '1080p'
a = re.search('(?i)(2160p)', filename)
if a:
resolution = '2160p'
# SOURCE #
source = 'HD-Rip'
s = re.search('(?i)(WEB-DL|WEB_DL|WEB\.DL)', filename)
if s:
source = 'WEB-DL'
s = re.search('(?i)(WEBRIP)', filename)
if s:
source = 'WEBRIP'
s = re.search('(?i)(DVDR|DVDRip|DVD-Rip)', filename)
if s:
source = 'DVD-R'
s = re.search('(?i)(BRRIP|BDRIP|BluRay)', filename)
if s:
source = 'BR-Rip'
s = re.search('(?i)BluRay(.*)REMUX', filename)
if s:
source = 'BluRay-Remux'
s = re.search('(?i)BluRay(.*)\.(AVC|VC-1)\.', filename)
if s:
source = 'BluRay-Full'
return_info = [codec, resolution, source]
return return_info
config = [{
'name': 'rarbg',
'groups': [
@ -209,7 +209,7 @@ config = [{
'type': 'int',
'default': 10,
'description': 'Minium amount of seeders the release must have.',
},
},
{
'name': 'min_leechers',
'advanced': True,

4
libs/pyasn1/__init__.py

@ -1,8 +1,8 @@
import sys
# http://www.python.org/dev/peps/pep-0396/
__version__ = '0.1.7'
__version__ = '0.1.8'
if sys.version_info[:2] < (2, 4):
raise RuntimeError('PyASN1 requires Python 2.4 or later')
raise RuntimeError('PyASN1 requires Python 2.4 or later')

153
libs/pyasn1/codec/ber/decoder.py

@ -1,7 +1,7 @@
# BER decoder
from pyasn1.type import tag, base, univ, char, useful, tagmap
from pyasn1.type import tag, univ, char, useful, tagmap
from pyasn1.codec.ber import eoo
from pyasn1.compat.octets import oct2int, octs2ints, isOctetsType
from pyasn1.compat.octets import oct2int, isOctetsType
from pyasn1 import debug, error
class AbstractDecoder:
@ -11,14 +11,14 @@ class AbstractDecoder:
raise error.PyAsn1Error('Decoder not implemented for %s' % (tagSet,))
def indefLenValueDecoder(self, fullSubstrate, substrate, asn1Spec, tagSet,
length, state, decodeFun, substrateFun):
length, state, decodeFun, substrateFun):
raise error.PyAsn1Error('Indefinite length mode decoder not implemented for %s' % (tagSet,))
class AbstractSimpleDecoder(AbstractDecoder):
tagFormats = (tag.tagFormatSimple,)
def _createComponent(self, asn1Spec, tagSet, value=None):
if tagSet[0][1] not in self.tagFormats:
raise error.PyAsn1Error('Invalid tag format %r for %r' % (tagSet[0], self.protoComponent,))
raise error.PyAsn1Error('Invalid tag format %s for %s' % (tagSet[0], self.protoComponent.prettyPrintType()))
if asn1Spec is None:
return self.protoComponent.clone(value, tagSet)
elif value is None:
@ -30,17 +30,12 @@ class AbstractConstructedDecoder(AbstractDecoder):
tagFormats = (tag.tagFormatConstructed,)
def _createComponent(self, asn1Spec, tagSet, value=None):
if tagSet[0][1] not in self.tagFormats:
raise error.PyAsn1Error('Invalid tag format %r for %r' % (tagSet[0], self.protoComponent,))
raise error.PyAsn1Error('Invalid tag format %s for %s' % (tagSet[0], self.protoComponent.prettyPrintType()))
if asn1Spec is None:
return self.protoComponent.clone(tagSet)
else:
return asn1Spec.clone()
class EndOfOctetsDecoder(AbstractSimpleDecoder):
def valueDecoder(self, fullSubstrate, substrate, asn1Spec, tagSet,
length, state, decodeFun, substrateFun):
return eoo.endOfOctets, substrate[length:]
class ExplicitTagDecoder(AbstractSimpleDecoder):
protoComponent = univ.Any('')
tagFormats = (tag.tagFormatConstructed,)
@ -63,7 +58,7 @@ class ExplicitTagDecoder(AbstractSimpleDecoder):
substrate, length
)
value, substrate = decodeFun(substrate, asn1Spec, tagSet, length)
terminator, substrate = decodeFun(substrate)
terminator, substrate = decodeFun(substrate, allowEoo=True)
if eoo.endOfOctets.isSameTypeWith(terminator) and \
terminator == eoo.endOfOctets:
return value, substrate
@ -129,14 +124,14 @@ class BitStringDecoder(AbstractSimpleDecoder):
'Trailing bits overflow %s' % trailingBits
)
head = head[1:]
lsb = p = 0; l = len(head)-1; b = ()
lsb = p = 0; l = len(head)-1; b = []
while p <= l:
if p == l:
lsb = trailingBits
j = 7
o = oct2int(head[p])
while j >= lsb:
b = b + ((o>>j)&0x01,)
b.append((o>>j)&0x01)
j = j - 1
p = p + 1
return self._createComponent(asn1Spec, tagSet, b), tail
@ -144,7 +139,7 @@ class BitStringDecoder(AbstractSimpleDecoder):
if substrateFun:
return substrateFun(r, substrate, length)
while head:
component, head = decodeFun(head)
component, head = decodeFun(head, self.protoComponent)
r = r + component
return r, tail
@ -154,7 +149,8 @@ class BitStringDecoder(AbstractSimpleDecoder):
if substrateFun:
return substrateFun(r, substrate, length)
while substrate:
component, substrate = decodeFun(substrate)
component, substrate = decodeFun(substrate, self.protoComponent,
allowEoo=True)
if eoo.endOfOctets.isSameTypeWith(component) and \
component == eoo.endOfOctets:
break
@ -177,7 +173,7 @@ class OctetStringDecoder(AbstractSimpleDecoder):
if substrateFun:
return substrateFun(r, substrate, length)
while head:
component, head = decodeFun(head)
component, head = decodeFun(head, self.protoComponent)
r = r + component
return r, tail
@ -187,7 +183,8 @@ class OctetStringDecoder(AbstractSimpleDecoder):
if substrateFun:
return substrateFun(r, substrate, length)
while substrate:
component, substrate = decodeFun(substrate)
component, substrate = decodeFun(substrate, self.protoComponent,
allowEoo=True)
if eoo.endOfOctets.isSameTypeWith(component) and \
component == eoo.endOfOctets:
break
@ -216,20 +213,14 @@ class ObjectIdentifierDecoder(AbstractSimpleDecoder):
if not head:
raise error.PyAsn1Error('Empty substrate')
# Get the first subid
subId = oct2int(head[0])
oid = divmod(subId, 40)
index = 1
oid = ()
index = 0
substrateLen = len(head)
while index < substrateLen:
subId = oct2int(head[index])
index = index + 1
if subId == 128:
# ASN.1 spec forbids leading zeros (0x80) in sub-ID OID
# encoding, tolerating it opens a vulnerability.
# See http://www.cosic.esat.kuleuven.be/publications/article-1432.pdf page 7
raise error.PyAsn1Error('Invalid leading 0x80 in sub-OID')
index += 1
if subId < 128:
oid = oid + (subId,)
elif subId > 128:
# Construct subid from a number of octets
nextSubId = subId
@ -239,11 +230,27 @@ class ObjectIdentifierDecoder(AbstractSimpleDecoder):
if index >= substrateLen:
raise error.SubstrateUnderrunError(
'Short substrate for sub-OID past %s' % (oid,)
)
)
nextSubId = oct2int(head[index])
index = index + 1
subId = (subId << 7) + nextSubId
oid = oid + (subId,)
index += 1
oid = oid + ((subId << 7) + nextSubId,)
elif subId == 128:
# ASN.1 spec forbids leading zeros (0x80) in OID
# encoding, tolerating it opens a vulnerability. See
# http://www.cosic.esat.kuleuven.be/publications/article-1432.pdf
# page 7
raise error.PyAsn1Error('Invalid octet 0x80 in OID encoding')
# Decode two leading arcs
if 0 <= oid[0] <= 39:
oid = (0,) + oid
elif 40 <= oid[0] <= 79:
oid = (1, oid[0]-40) + oid[1:]
elif oid[0] >= 80:
oid = (2, oid[0]-80) + oid[1:]
else:
raise error.PyAsn1Error('Malformed first OID octet: %s' % head[0])
return self._createComponent(asn1Spec, tagSet, oid), tail
class RealDecoder(AbstractSimpleDecoder):
@ -254,10 +261,13 @@ class RealDecoder(AbstractSimpleDecoder):
if not head:
return self._createComponent(asn1Spec, tagSet, 0.0), tail
fo = oct2int(head[0]); head = head[1:]
if fo & 0x80: # binary enoding
if fo & 0x80: # binary encoding
if not head:
raise error.PyAsn1Error("Incomplete floating-point value")
n = (fo & 0x03) + 1
if n == 4:
n = oct2int(head[0])
head = head[1:]
eo, head = head[:n], head[n:]
if not eo or not head:
raise error.PyAsn1Error('Real exponent screwed')
@ -266,6 +276,13 @@ class RealDecoder(AbstractSimpleDecoder):
e <<= 8
e |= oct2int(eo[0])
eo = eo[1:]
b = fo >> 4 & 0x03 # base bits
if b > 2:
raise error.PyAsn1Error('Illegal Real base')
if b == 1: # encbase = 8
e *= 3
elif b == 2: # encbase = 16
e *= 4
p = 0
while head: # value
p <<= 8
@ -273,10 +290,14 @@ class RealDecoder(AbstractSimpleDecoder):
head = head[1:]
if fo & 0x40: # sign bit
p = -p
sf = fo >> 2 & 0x03 # scale bits
p *= 2**sf
value = (p, 2, e)
elif fo & 0x40: # infinite value
value = fo & 0x01 and '-inf' or 'inf'
elif fo & 0xc0 == 0: # character encoding
if not head:
raise error.PyAsn1Error("Incomplete floating-point value")
try:
if fo & 0x3 == 0x1: # NR1
value = (int(head), 10, 0)
@ -336,7 +357,7 @@ class SequenceDecoder(AbstractConstructedDecoder):
idx = 0
while substrate:
asn1Spec = self._getComponentTagMap(r, idx)
component, substrate = decodeFun(substrate, asn1Spec)
component, substrate = decodeFun(substrate, asn1Spec, allowEoo=True)
if eoo.endOfOctets.isSameTypeWith(component) and \
component == eoo.endOfOctets:
break
@ -378,7 +399,7 @@ class SequenceOfDecoder(AbstractConstructedDecoder):
asn1Spec = r.getComponentType()
idx = 0
while substrate:
component, substrate = decodeFun(substrate, asn1Spec)
component, substrate = decodeFun(substrate, asn1Spec, allowEoo=True)
if eoo.endOfOctets.isSameTypeWith(component) and \
component == eoo.endOfOctets:
break
@ -437,7 +458,8 @@ class ChoiceDecoder(AbstractConstructedDecoder):
return substrateFun(r, substrate, length)
if r.getTagSet() == tagSet: # explicitly tagged Choice
component, substrate = decodeFun(substrate, r.getComponentTagMap())
eooMarker, substrate = decodeFun(substrate) # eat up EOO marker
# eat up EOO marker
eooMarker, substrate = decodeFun(substrate, allowEoo=True)
if not eoo.endOfOctets.isSameTypeWith(eooMarker) or \
eooMarker != eoo.endOfOctets:
raise error.PyAsn1Error('No EOO seen before substrate ends')
@ -485,7 +507,7 @@ class AnyDecoder(AbstractSimpleDecoder):
if substrateFun:
return substrateFun(r, substrate, length)
while substrate:
component, substrate = decodeFun(substrate, asn1Spec)
component, substrate = decodeFun(substrate, asn1Spec, allowEoo=True)
if eoo.endOfOctets.isSameTypeWith(component) and \
component == eoo.endOfOctets:
break
@ -521,13 +543,14 @@ class BMPStringDecoder(OctetStringDecoder):
protoComponent = char.BMPString()
# "useful" types
class ObjectDescriptorDecoder(OctetStringDecoder):
protoComponent = useful.ObjectDescriptor()
class GeneralizedTimeDecoder(OctetStringDecoder):
protoComponent = useful.GeneralizedTime()
class UTCTimeDecoder(OctetStringDecoder):
protoComponent = useful.UTCTime()
tagMap = {
eoo.endOfOctets.tagSet: EndOfOctetsDecoder(),
univ.Integer.tagSet: IntegerDecoder(),
univ.Boolean.tagSet: BooleanDecoder(),
univ.BitString.tagSet: BitStringDecoder(),
@ -552,9 +575,10 @@ tagMap = {
char.UniversalString.tagSet: UniversalStringDecoder(),
char.BMPString.tagSet: BMPStringDecoder(),
# useful types
useful.ObjectDescriptor.tagSet: ObjectDescriptorDecoder(),
useful.GeneralizedTime.tagSet: GeneralizedTimeDecoder(),
useful.UTCTime.tagSet: UTCTimeDecoder()
}
}
# Type-to-codec map for ambiguous ASN.1 types
typeMap = {
@ -564,7 +588,7 @@ typeMap = {
univ.SequenceOf.typeId: SequenceOfDecoder(),
univ.Choice.typeId: ChoiceDecoder(),
univ.Any.typeId: AnyDecoder()
}
}
( stDecodeTag, stDecodeLength, stGetValueDecoder, stGetValueDecoderByAsn1Spec,
stGetValueDecoderByTag, stTryAsExplicitTag, stDecodeValue,
@ -574,23 +598,22 @@ class Decoder:
defaultErrorState = stErrorCondition
# defaultErrorState = stDumpRawValue
defaultRawDecoder = AnyDecoder()
supportIndefLength = True
def __init__(self, tagMap, typeMap={}):
self.__tagMap = tagMap
self.__typeMap = typeMap
self.__endOfOctetsTagSet = eoo.endOfOctets.getTagSet()
# Tag & TagSet objects caches
self.__tagCache = {}
self.__tagSetCache = {}
def __call__(self, substrate, asn1Spec=None, tagSet=None,
length=None, state=stDecodeTag, recursiveFlag=1,
substrateFun=None):
substrateFun=None, allowEoo=False):
if debug.logger & debug.flagDecoder:
debug.logger('decoder called at scope %s with state %d, working with up to %d octets of substrate: %s' % (debug.scope, state, len(substrate), debug.hexdump(substrate)))
fullSubstrate = substrate
while state != stStop:
if state == stDecodeTag:
# Decode tag
if not substrate:
raise error.SubstrateUnderrunError(
'Short octet stream on tag decoding'
@ -598,13 +621,25 @@ class Decoder:
if not isOctetsType(substrate) and \
not isinstance(substrate, univ.OctetString):
raise error.PyAsn1Error('Bad octet stream type')
# Decode tag
firstOctet = substrate[0]
substrate = substrate[1:]
if firstOctet in self.__tagCache:
lastTag = self.__tagCache[firstOctet]
else:
t = oct2int(firstOctet)
# Look for end-of-octets sentinel
if t == 0:
if substrate and oct2int(substrate[0]) == 0:
if allowEoo and self.supportIndefLength:
debug.logger and debug.logger & debug.flagDecoder and debug.logger('end-of-octets sentinel found')
value, substrate = eoo.endOfOctets, substrate[1:]
state = stStop
continue
else:
raise error.PyAsn1Error('Unexpected end-of-contents sentinel')
else:
raise error.PyAsn1Error('Zero tag encountered')
tagClass = t&0xC0
tagFormat = t&0x20
tagId = t&0x1F
@ -622,7 +657,7 @@ class Decoder:
break
lastTag = tag.Tag(
tagClass=tagClass, tagFormat=tagFormat, tagId=tagId
)
)
if tagId < 31:
# cache short tags
self.__tagCache[firstOctet] = lastTag
@ -637,13 +672,13 @@ class Decoder:
else:
tagSet = lastTag + tagSet
state = stDecodeLength
debug.logger and debug.logger & debug.flagDecoder and debug.logger('tag decoded into %r, decoding length' % tagSet)
debug.logger and debug.logger & debug.flagDecoder and debug.logger('tag decoded into %s, decoding length' % tagSet)
if state == stDecodeLength:
# Decode length
if not substrate:
raise error.SubstrateUnderrunError(
'Short octet stream on length decoding'
)
raise error.SubstrateUnderrunError(
'Short octet stream on length decoding'
)
firstOctet = oct2int(substrate[0])
if firstOctet == 128:
size = 1
@ -670,6 +705,8 @@ class Decoder:
raise error.SubstrateUnderrunError(
'%d-octet short' % (length - len(substrate))
)
if length == -1 and not self.supportIndefLength:
error.PyAsn1Error('Indefinite length encoding not supported by this codec')
state = stGetValueDecoder
debug.logger and debug.logger & debug.flagDecoder and debug.logger('value length decoded into %d, payload substrate is: %s' % (length, debug.hexdump(length == -1 and substrate or substrate[:length])))
if state == stGetValueDecoder:
@ -722,12 +759,12 @@ class Decoder:
if debug.logger and debug.logger & debug.flagDecoder:
debug.logger('candidate ASN.1 spec is a map of:')
for t, v in asn1Spec.getPosMap().items():
debug.logger(' %r -> %s' % (t, v.__class__.__name__))
debug.logger(' %s -> %s' % (t, v.__class__.__name__))
if asn1Spec.getNegMap():
debug.logger('but neither of: ')
for i in asn1Spec.getNegMap().items():
debug.logger(' %r -> %s' % (t, v.__class__.__name__))
debug.logger('new candidate ASN.1 spec is %s, chosen by %r' % (__chosenSpec is None and '<none>' or __chosenSpec.__class__.__name__, tagSet))
for t, v in asn1Spec.getNegMap().items():
debug.logger(' %s -> %s' % (t, v.__class__.__name__))
debug.logger('new candidate ASN.1 spec is %s, chosen by %s' % (__chosenSpec is None and '<none>' or __chosenSpec.prettyPrintType(), tagSet))
else:
__chosenSpec = asn1Spec
debug.logger and debug.logger & debug.flagDecoder and debug.logger('candidate ASN.1 spec is %s' % asn1Spec.__class__.__name__)
@ -745,7 +782,7 @@ class Decoder:
elif baseTagSet in self.__tagMap:
# base type or tagged subtype
concreteDecoder = self.__tagMap[baseTagSet]
debug.logger and debug.logger & debug.flagDecoder and debug.logger('value decoder chosen by base %r' % (baseTagSet,))
debug.logger and debug.logger & debug.flagDecoder and debug.logger('value decoder chosen by base %s' % (baseTagSet,))
else:
concreteDecoder = None
if concreteDecoder:
@ -753,10 +790,6 @@ class Decoder:
state = stDecodeValue
else:
state = stTryAsExplicitTag
elif tagSet == self.__endOfOctetsTagSet:
concreteDecoder = self.__tagMap[tagSet]
state = stDecodeValue
debug.logger and debug.logger & debug.flagDecoder and debug.logger('end-of-octets found')
else:
concreteDecoder = None
state = stTryAsExplicitTag
@ -795,8 +828,8 @@ class Decoder:
debug.logger and debug.logger & debug.flagDecoder and debug.logger('codec %s yields type %s, value:\n%s\n...remaining substrate is: %s' % (concreteDecoder.__class__.__name__, value.__class__.__name__, value.prettyPrint(), substrate and debug.hexdump(substrate) or '<none>'))
if state == stErrorCondition:
raise error.PyAsn1Error(
'%r not in asn1Spec: %r' % (tagSet, asn1Spec)
)
'%s not in asn1Spec: %s' % (tagSet, asn1Spec)
)
if debug.logger and debug.logger & debug.flagDecoder:
debug.scope.pop()
debug.logger('decoder left scope %s, call completed' % debug.scope)

168
libs/pyasn1/codec/ber/encoder.py

@ -114,13 +114,17 @@ class IntegerEncoder(AbstractItemEncoder):
class BitStringEncoder(AbstractItemEncoder):
def encodeValue(self, encodeFun, value, defMode, maxChunkSize):
if not maxChunkSize or len(value) <= maxChunkSize*8:
r = {}; l = len(value); p = 0; j = 7
while p < l:
i, j = divmod(p, 8)
r[i] = r.get(i,0) | value[p]<<(7-j)
p = p + 1
keys = list(r); keys.sort()
return int2oct(7-j) + ints2octs([r[k] for k in keys]), 0
out_len = (len(value) + 7) // 8
out_list = out_len * [0]
j = 7
i = -1
for val in value:
j += 1
if j == 8:
i += 1
j = 0
out_list[i] = out_list[i] | val << (7-j)
return int2oct(7-j) + ints2octs(out_list), 0
else:
pos = 0; substrate = null
while 1:
@ -156,47 +160,98 @@ class ObjectIdentifierEncoder(AbstractItemEncoder):
precomputedValues = {
(1, 3, 6, 1, 2): (43, 6, 1, 2),
(1, 3, 6, 1, 4): (43, 6, 1, 4)
}
}
def encodeValue(self, encodeFun, value, defMode, maxChunkSize):
oid = value.asTuple()
if oid[:5] in self.precomputedValues:
octets = self.precomputedValues[oid[:5]]
index = 5
oid = oid[5:]
else:
if len(oid) < 2:
raise error.PyAsn1Error('Short OID %s' % (value,))
octets = ()
# Build the first twos
if oid[0] > 6 or oid[1] > 39 or oid[0] == 6 and oid[1] > 15:
if oid[0] == 0 and 0 <= oid[1] <= 39:
oid = (oid[1],) + oid[2:]
elif oid[0] == 1 and 0 <= oid[1] <= 39:
oid = (oid[1] + 40,) + oid[2:]
elif oid[0] == 2:
oid = (oid[1] + 80,) + oid[2:]
else:
raise error.PyAsn1Error(
'Initial sub-ID overflow %s in OID %s' % (oid[:2], value)
'Impossible initial arcs %s at %s' % (oid[:2], value)
)
octets = (oid[0] * 40 + oid[1],)
index = 2
# Cycle through subids
for subid in oid[index:]:
if subid > -1 and subid < 128:
# Cycle through subIds
for subId in oid:
if subId > -1 and subId < 128:
# Optimize for the common case
octets = octets + (subid & 0x7f,)
elif subid < 0 or subid > 0xFFFFFFFF:
octets = octets + (subId & 0x7f,)
elif subId < 0:
raise error.PyAsn1Error(
'SubId overflow %s in %s' % (subid, value)
)
'Negative OID arc %s at %s' % (subId, value)
)
else:
# Pack large Sub-Object IDs
res = (subid & 0x7f,)
subid = subid >> 7
while subid > 0:
res = (0x80 | (subid & 0x7f),) + res
subid = subid >> 7
res = (subId & 0x7f,)
subId = subId >> 7
while subId > 0:
res = (0x80 | (subId & 0x7f),) + res
subId = subId >> 7
# Add packed Sub-Object ID to resulted Object ID
octets += res
return ints2octs(octets), 0
class RealEncoder(AbstractItemEncoder):
supportIndefLenMode = 0
binEncBase = 2 # set to None to choose encoding base automatically
def _dropFloatingPoint(self, m, encbase, e):
ms, es = 1, 1
if m < 0:
ms = -1 # mantissa sign
if e < 0:
es = -1 # exponenta sign
m *= ms
if encbase == 8:
m = m*2**(abs(e) % 3 * es)
e = abs(e) // 3 * es
elif encbase == 16:
m = m*2**(abs(e) % 4 * es)
e = abs(e) // 4 * es
while 1:
if int(m) != m:
m *= encbase
e -= 1
continue
break
return ms, int(m), encbase, e
def _chooseEncBase(self, value):
m, b, e = value
base = [2, 8, 16]
if value.binEncBase in base:
return self._dropFloatingPoint(m, value.binEncBase, e)
elif self.binEncBase in base:
return self._dropFloatingPoint(m, self.binEncBase, e)
# auto choosing base 2/8/16
mantissa = [m, m, m]
exponenta = [e, e, e]
encbase = 2
e = float('inf')
for i in range(3):
sign, mantissa[i], base[i], exponenta[i] = \
self._dropFloatingPoint(mantissa[i], base[i], exponenta[i])
if abs(exponenta[i]) < abs(e) or \
(abs(exponenta[i]) == abs(e) and mantissa[i] < m):
e = exponenta[i]
m = int(mantissa[i])
encbase = base[i]
return sign, m, encbase, e
def encodeValue(self, encodeFun, value, defMode, maxChunkSize):
if value.isPlusInfinity():
return int2oct(0x40), 0
@ -208,22 +263,43 @@ class RealEncoder(AbstractItemEncoder):
if b == 10:
return str2octs('\x03%dE%s%d' % (m, e == 0 and '+' or '', e)), 0
elif b == 2:
fo = 0x80 # binary enoding
if m < 0:
fo = fo | 0x40 # sign bit
m = -m
while int(m) != m: # drop floating point
m *= 2
e -= 1
while m & 0x1 == 0: # mantissa normalization
fo = 0x80 # binary encoding
ms, m, encbase, e = self._chooseEncBase(value)
if ms < 0: # mantissa sign
fo = fo | 0x40 # sign bit
# exponenta & mantissa normalization
if encbase == 2:
while m & 0x1 == 0:
m >>= 1
e += 1
elif encbase == 8:
while m & 0x7 == 0:
m >>= 3
e += 1
fo |= 0x10
else: # encbase = 16
while m & 0xf == 0:
m >>= 4
e += 1
fo |= 0x20
sf = 0 # scale factor
while m & 0x1 == 0:
m >>= 1
e += 1
sf += 1
if sf > 3:
raise error.PyAsn1Error('Scale factor overflow') # bug if raised
fo |= sf << 2
eo = null
while e not in (0, -1):
eo = int2oct(e&0xff) + eo
e >>= 8
if e == 0 and eo and oct2int(eo[0]) & 0x80:
eo = int2oct(0) + eo
if e == 0 or e == -1:
eo = int2oct(e&0xff)
else:
while e not in (0, -1):
eo = int2oct(e&0xff) + eo
e >>= 8
if e == 0 and eo and oct2int(eo[0]) & 0x80:
eo = int2oct(0) + eo
if e == -1 and eo and not (oct2int(eo[0]) & 0x80):
eo = int2oct(0xff) + eo
n = len(eo)
if n > 0xff:
raise error.PyAsn1Error('Real exponent overflow')
@ -235,7 +311,7 @@ class RealEncoder(AbstractItemEncoder):
fo |= 2
else:
fo |= 3
eo = int2oct(n//0xff+1) + eo
eo = int2oct(n&0xff) + eo
po = null
while m:
po = int2oct(m&0xff) + po
@ -308,6 +384,7 @@ tagMap = {
char.UniversalString.tagSet: OctetStringEncoder(),
char.BMPString.tagSet: OctetStringEncoder(),
# useful types
useful.ObjectDescriptor.tagSet: OctetStringEncoder(),
useful.GeneralizedTime.tagSet: OctetStringEncoder(),
useful.UTCTime.tagSet: OctetStringEncoder()
}
@ -323,12 +400,15 @@ typeMap = {
}
class Encoder:
supportIndefLength = True
def __init__(self, tagMap, typeMap={}):
self.__tagMap = tagMap
self.__typeMap = typeMap
def __call__(self, value, defMode=1, maxChunkSize=0):
debug.logger & debug.flagEncoder and debug.logger('encoder called in %sdef mode, chunk size %s for type %s, value:\n%s' % (not defMode and 'in' or '', maxChunkSize, value.__class__.__name__, value.prettyPrint()))
def __call__(self, value, defMode=True, maxChunkSize=0):
if not defMode and not self.supportIndefLength:
raise error.PyAsn1Error('Indefinite length encoding not supported by this codec')
debug.logger & debug.flagEncoder and debug.logger('encoder called in %sdef mode, chunk size %s for type %s, value:\n%s' % (not defMode and 'in' or '', maxChunkSize, value.prettyPrintType(), value.prettyPrint()))
tagSet = value.getTagSet()
if len(tagSet) > 1:
concreteEncoder = explicitlyTaggedItemEncoder
@ -343,7 +423,7 @@ class Encoder:
concreteEncoder = self.__tagMap[tagSet]
else:
raise Error('No encoder for %s' % (value,))
debug.logger & debug.flagEncoder and debug.logger('using value codec %s chosen by %r' % (concreteEncoder.__class__.__name__, tagSet))
debug.logger & debug.flagEncoder and debug.logger('using value codec %s chosen by %s' % (concreteEncoder.__class__.__name__, tagSet))
substrate = concreteEncoder.encode(
self, value, defMode, maxChunkSize
)

61
libs/pyasn1/codec/cer/encoder.py

@ -1,7 +1,9 @@
# CER encoder
from pyasn1.type import univ
from pyasn1.type import useful
from pyasn1.codec.ber import encoder
from pyasn1.compat.octets import int2oct, null
from pyasn1.compat.octets import int2oct, str2octs, null
from pyasn1 import error
class BooleanEncoder(encoder.IntegerEncoder):
def encodeValue(self, encodeFun, client, defMode, maxChunkSize):
@ -15,18 +17,56 @@ class BitStringEncoder(encoder.BitStringEncoder):
def encodeValue(self, encodeFun, client, defMode, maxChunkSize):
return encoder.BitStringEncoder.encodeValue(
self, encodeFun, client, defMode, 1000
)
)
class OctetStringEncoder(encoder.OctetStringEncoder):
def encodeValue(self, encodeFun, client, defMode, maxChunkSize):
return encoder.OctetStringEncoder.encodeValue(
self, encodeFun, client, defMode, 1000
)
)
class RealEncoder(encoder.RealEncoder):
def _chooseEncBase(self, value):
m, b, e = value
return self._dropFloatingPoint(m, b, e)
# specialized RealEncoder here
# specialized GeneralStringEncoder here
# specialized GeneralizedTimeEncoder here
# specialized UTCTimeEncoder here
class GeneralizedTimeEncoder(OctetStringEncoder):
zchar = str2octs('Z')
pluschar = str2octs('+')
minuschar = str2octs('-')
zero = str2octs('0')
def encodeValue(self, encodeFun, client, defMode, maxChunkSize):
octets = client.asOctets()
# This breaks too many existing data items
# if '.' not in octets:
# raise error.PyAsn1Error('Format must include fraction of second: %r' % octets)
if len(octets) < 15:
raise error.PyAsn1Error('Bad UTC time length: %r' % octets)
if self.pluschar in octets or self.minuschar in octets:
raise error.PyAsn1Error('Must be UTC time: %r' % octets)
if octets[-1] != self.zchar[0]:
raise error.PyAsn1Error('Missing timezone specifier: %r' % octets)
return encoder.OctetStringEncoder.encodeValue(
self, encodeFun, client, defMode, 1000
)
class UTCTimeEncoder(encoder.OctetStringEncoder):
zchar = str2octs('Z')
pluschar = str2octs('+')
minuschar = str2octs('-')
def encodeValue(self, encodeFun, client, defMode, maxChunkSize):
octets = client.asOctets()
if self.pluschar in octets or self.minuschar in octets:
raise error.PyAsn1Error('Must be UTC time: %r' % octets)
if octets and octets[-1] != self.zchar[0]:
client = client.clone(octets + self.zchar)
if len(client) != 13:
raise error.PyAsn1Error('Bad UTC time length: %r' % client)
return encoder.OctetStringEncoder.encodeValue(
self, encodeFun, client, defMode, 1000
)
class SetOfEncoder(encoder.SequenceOfEncoder):
def encodeValue(self, encodeFun, client, defMode, maxChunkSize):
@ -69,17 +109,20 @@ tagMap.update({
univ.Boolean.tagSet: BooleanEncoder(),
univ.BitString.tagSet: BitStringEncoder(),
univ.OctetString.tagSet: OctetStringEncoder(),
univ.Real.tagSet: RealEncoder(),
useful.GeneralizedTime.tagSet: GeneralizedTimeEncoder(),
useful.UTCTime.tagSet: UTCTimeEncoder(),
univ.SetOf().tagSet: SetOfEncoder() # conflcts with Set
})
})
typeMap = encoder.typeMap.copy()
typeMap.update({
univ.Set.typeId: SetOfEncoder(),
univ.SetOf.typeId: SetOfEncoder()
})
})
class Encoder(encoder.Encoder):
def __call__(self, client, defMode=0, maxChunkSize=0):
def __call__(self, client, defMode=False, maxChunkSize=0):
return encoder.Encoder.__call__(self, client, defMode, maxChunkSize)
encode = Encoder(tagMap, typeMap)

4
libs/pyasn1/codec/der/decoder.py

@ -1,9 +1,9 @@
# DER decoder
from pyasn1.type import univ
from pyasn1.codec.cer import decoder
tagMap = decoder.tagMap
typeMap = decoder.typeMap
Decoder = decoder.Decoder
class Decoder(decoder.Decoder):
supportIndefLength = False
decode = Decoder(tagMap, typeMap)

12
libs/pyasn1/codec/der/encoder.py

@ -1,6 +1,7 @@
# DER encoder
from pyasn1.type import univ
from pyasn1.codec.cer import encoder
from pyasn1 import error
class SetOfEncoder(encoder.SetOfEncoder):
def _cmpSetComponents(self, c1, c2):
@ -12,17 +13,20 @@ class SetOfEncoder(encoder.SetOfEncoder):
tagMap = encoder.tagMap.copy()
tagMap.update({
# Overload CER encodrs with BER ones (a bit hackerish XXX)
# Overload CER encoders with BER ones (a bit hackerish XXX)
univ.BitString.tagSet: encoder.encoder.BitStringEncoder(),
univ.OctetString.tagSet: encoder.encoder.OctetStringEncoder(),
# Set & SetOf have same tags
univ.SetOf().tagSet: SetOfEncoder()
})
})
typeMap = encoder.typeMap
class Encoder(encoder.Encoder):
def __call__(self, client, defMode=1, maxChunkSize=0):
supportIndefLength = False
def __call__(self, client, defMode=True, maxChunkSize=0):
if not defMode:
raise error.PyAsn1Error('DER forbids indefinite length mode')
return encoder.Encoder.__call__(self, client, defMode, maxChunkSize)
encode = Encoder(tagMap, typeMap)

10
libs/pyasn1/compat/binary.py

@ -0,0 +1,10 @@
from sys import version_info
if version_info[0:2] < (2, 6):
def bin(x):
if x <= 1:
return '0b'+str(x)
else:
return bin(x>>1) + str(x&1)
else:
bin = bin

10
libs/pyasn1/compat/iterfunc.py

@ -0,0 +1,10 @@
from sys import version_info
if version_info[0] <= 2 and version_info[1] <= 4:
def all(iterable):
for element in iterable:
if not element:
return False
return True
else:
all = all

65
libs/pyasn1/debug.py

@ -1,4 +1,5 @@
import sys
import time
import logging
from pyasn1.compat.octets import octs2ints
from pyasn1 import error
from pyasn1 import __version__
@ -14,23 +15,67 @@ flagMap = {
'all': flagAll
}
class Printer:
def __init__(self, logger=None, handler=None, formatter=None):
if logger is None:
logger = logging.getLogger('pyasn1')
logger.setLevel(logging.DEBUG)
if handler is None:
handler = logging.StreamHandler()
if formatter is None:
formatter = logging.Formatter('%(asctime)s %(name)s: %(message)s')
handler.setFormatter(formatter)
handler.setLevel(logging.DEBUG)
logger.addHandler(handler)
self.__logger = logger
def __call__(self, msg): self.__logger.debug(msg)
def __str__(self): return '<python built-in logging>'
if hasattr(logging, 'NullHandler'):
NullHandler = logging.NullHandler
else:
# Python 2.6 and older
class NullHandler(logging.Handler):
def emit(self, record):
pass
class Debug:
defaultPrinter = sys.stderr.write
def __init__(self, *flags):
defaultPrinter = None
def __init__(self, *flags, **options):
self._flags = flagNone
self._printer = self.defaultPrinter
if options.get('printer') is not None:
self._printer = options.get('printer')
elif self.defaultPrinter is not None:
self._printer = self.defaultPrinter
if 'loggerName' in options:
# route our logs to parent logger
self._printer = Printer(
logger=logging.getLogger(options['loggerName']),
handler=NullHandler()
)
else:
self._printer = Printer()
self('running pyasn1 version %s' % __version__)
for f in flags:
if f not in flagMap:
raise error.PyAsn1Error('bad debug flag %s' % (f,))
self._flags = self._flags | flagMap[f]
self('debug category \'%s\' enabled' % f)
inverse = f and f[0] in ('!', '~')
if inverse:
f = f[1:]
try:
if inverse:
self._flags &= ~flagMap[f]
else:
self._flags |= flagMap[f]
except KeyError:
raise error.PyAsn1Error('bad debug flag %s' % f)
self('debug category \'%s\' %s' % (f, inverse and 'disabled' or 'enabled'))
def __str__(self):
return 'logger %s, flags %x' % (self._printer, self._flags)
def __call__(self, msg):
self._printer('DBG: %s\n' % msg)
self._printer(msg)
def __and__(self, flag):
return self._flags & flag

78
libs/pyasn1/type/base.py

@ -1,13 +1,13 @@
# Base classes for ASN.1 types
import sys
from pyasn1.type import constraint, tagmap
from pyasn1.type import constraint, tagmap, tag
from pyasn1 import error
class Asn1Item: pass
class Asn1ItemBase(Asn1Item):
# Set of tags for this ASN.1 type
tagSet = ()
tagSet = tag.TagSet()
# A list of constraint.Constraint instances for checking values
subtypeSpec = constraint.ConstraintsIntersection()
@ -38,22 +38,28 @@ class Asn1ItemBase(Asn1Item):
def getEffectiveTagSet(self): return self._tagSet # used by untagged types
def getTagMap(self): return tagmap.TagMap({self._tagSet: self})
def isSameTypeWith(self, other):
def isSameTypeWith(self, other, matchTags=True, matchConstraints=True):
return self is other or \
self._tagSet == other.getTagSet() and \
self._subtypeSpec == other.getSubtypeSpec()
def isSuperTypeOf(self, other):
(not matchTags or \
self._tagSet == other.getTagSet()) and \
(not matchConstraints or \
self._subtypeSpec==other.getSubtypeSpec())
def isSuperTypeOf(self, other, matchTags=True, matchConstraints=True):
"""Returns true if argument is a ASN1 subtype of ourselves"""
return self._tagSet.isSuperTagSetOf(other.getTagSet()) and \
self._subtypeSpec.isSuperTypeOf(other.getSubtypeSpec())
return (not matchTags or \
self._tagSet.isSuperTagSetOf(other.getTagSet())) and \
(not matchConstraints or \
(self._subtypeSpec.isSuperTypeOf(other.getSubtypeSpec())))
class __NoValue:
class NoValue:
def __getattr__(self, attr):
raise error.PyAsn1Error('No value for %s()' % attr)
def __getitem__(self, i):
raise error.PyAsn1Error('No value')
def __repr__(self): return '%s()' % self.__class__.__name__
noValue = __NoValue()
noValue = NoValue()
# Base class for "simple" ASN.1 objects. These are immutable.
class AbstractSimpleAsn1Item(Asn1ItemBase):
@ -72,10 +78,15 @@ class AbstractSimpleAsn1Item(Asn1ItemBase):
self._len = None
def __repr__(self):
if self._value is noValue:
return self.__class__.__name__ + '()'
else:
return self.__class__.__name__ + '(%s)' % (self.prettyOut(self._value),)
r = []
if self._value is not self.defaultValue:
r.append(self.prettyOut(self._value))
if self._tagSet is not self.tagSet:
r.append('tagSet=%r' % (self._tagSet,))
if self._subtypeSpec is not self.subtypeSpec:
r.append('subtypeSpec=%r' % (self._subtypeSpec,))
return '%s(%s)' % (self.__class__.__name__, ', '.join(r))
def __str__(self): return str(self._value)
def __eq__(self, other):
return self is other and True or self._value == other
@ -90,6 +101,9 @@ class AbstractSimpleAsn1Item(Asn1ItemBase):
def __bool__(self): return bool(self._value)
def __hash__(self): return self.__hashedValue
def hasValue(self):
return not isinstance(self._value, NoValue)
def clone(self, value=None, tagSet=None, subtypeSpec=None):
if value is None and tagSet is None and subtypeSpec is None:
return self
@ -121,14 +135,17 @@ class AbstractSimpleAsn1Item(Asn1ItemBase):
def prettyOut(self, value): return str(value)
def prettyPrint(self, scope=0):
if self._value is noValue:
return '<no value>'
else:
if self.hasValue():
return self.prettyOut(self._value)
else:
return '<no value>'
# XXX Compatibility stub
def prettyPrinter(self, scope=0): return self.prettyPrint(scope)
def prettyPrintType(self, scope=0):
return '%s -> %s' % (self.getTagSet(), self.__class__.__name__)
#
# Constructed types:
# * There are five of them: Sequence, SequenceOf/SetOf, Set and Choice
@ -166,13 +183,16 @@ class AbstractConstructedAsn1Item(Asn1ItemBase):
self._componentValuesSet = 0
def __repr__(self):
r = self.__class__.__name__ + '()'
for idx in range(len(self._componentValues)):
if self._componentValues[idx] is None:
continue
r = r + '.setComponentByPosition(%s, %r)' % (
idx, self._componentValues[idx]
)
r = []
if self._componentType is not self.componentType:
r.append('componentType=%r' % (self._componentType,))
if self._tagSet is not self.tagSet:
r.append('tagSet=%r' % (self._tagSet,))
if self._subtypeSpec is not self.subtypeSpec:
r.append('subtypeSpec=%r' % (self._subtypeSpec,))
r = '%s(%s)' % (self.__class__.__name__, ', '.join(r))
if self._componentValues:
r += '.setComponents(%s)' % ', '.join([repr(x) for x in self._componentValues])
return r
def __eq__(self, other):
@ -235,8 +255,17 @@ class AbstractConstructedAsn1Item(Asn1ItemBase):
def setComponentByPosition(self, idx, value, verifyConstraints=True):
raise error.PyAsn1Error('Method not implemented')
def setComponents(self, *args, **kwargs):
for idx in range(len(args)):
self[idx] = args[idx]
for k in kwargs:
self[k] = kwargs[k]
return self
def getComponentType(self): return self._componentType
def setDefaultComponents(self): pass
def __getitem__(self, idx): return self.getComponentByPosition(idx)
def __setitem__(self, idx, value): self.setComponentByPosition(idx, value)
@ -246,4 +275,3 @@ class AbstractConstructedAsn1Item(Asn1ItemBase):
self._componentValues = []
self._componentValuesSet = 0
def setDefaultComponents(self): pass

17
libs/pyasn1/type/char.py

@ -1,12 +1,6 @@
# ASN.1 "character string" types
from pyasn1.type import univ, tag
class UTF8String(univ.OctetString):
tagSet = univ.OctetString.tagSet.tagImplicitly(
tag.Tag(tag.tagClassUniversal, tag.tagFormatSimple, 12)
)
encoding = "utf-8"
class NumericString(univ.OctetString):
tagSet = univ.OctetString.tagSet.tagImplicitly(
tag.Tag(tag.tagClassUniversal, tag.tagFormatSimple, 18)
@ -21,7 +15,8 @@ class TeletexString(univ.OctetString):
tagSet = univ.OctetString.tagSet.tagImplicitly(
tag.Tag(tag.tagClassUniversal, tag.tagFormatSimple, 20)
)
class T61String(TeletexString): pass
class VideotexString(univ.OctetString):
tagSet = univ.OctetString.tagSet.tagImplicitly(
@ -43,6 +38,8 @@ class VisibleString(univ.OctetString):
tag.Tag(tag.tagClassUniversal, tag.tagFormatSimple, 26)
)
class ISO646String(VisibleString): pass
class GeneralString(univ.OctetString):
tagSet = univ.OctetString.tagSet.tagImplicitly(
tag.Tag(tag.tagClassUniversal, tag.tagFormatSimple, 27)
@ -59,3 +56,9 @@ class BMPString(univ.OctetString):
tag.Tag(tag.tagClassUniversal, tag.tagFormatSimple, 30)
)
encoding = "utf-16-be"
class UTF8String(univ.OctetString):
tagSet = univ.OctetString.tagSet.tagImplicitly(
tag.Tag(tag.tagClassUniversal, tag.tagFormatSimple, 12)
)
encoding = "utf-8"

31
libs/pyasn1/type/namedtype.py

@ -8,9 +8,17 @@ class NamedType:
isDefaulted = 0
def __init__(self, name, t):
self.__name = name; self.__type = t
def __repr__(self): return '%s(%s, %s)' % (
def __repr__(self): return '%s(%r, %r)' % (
self.__class__.__name__, self.__name, self.__type
)
def __eq__(self, other): return tuple(self) == tuple(other)
def __ne__(self, other): return tuple(self) != tuple(other)
def __lt__(self, other): return tuple(self) < tuple(other)
def __le__(self, other): return tuple(self) <= tuple(other)
def __gt__(self, other): return tuple(self) > tuple(other)
def __ge__(self, other): return tuple(self) >= tuple(other)
def __hash__(self): return hash(tuple(self))
def getType(self): return self.__type
def getName(self): return self.__name
def __getitem__(self, idx):
@ -33,11 +41,18 @@ class NamedTypes:
self.__ambigiousTypes = {}
def __repr__(self):
r = '%s(' % self.__class__.__name__
for n in self.__namedTypes:
r = r + '%r, ' % (n,)
return r + ')'
return '%s(%s)' % (
self.__class__.__name__,
', '.join([ repr(x) for x in self.__namedTypes ])
)
def __eq__(self, other): return tuple(self) == tuple(other)
def __ne__(self, other): return tuple(self) != tuple(other)
def __lt__(self, other): return tuple(self) < tuple(other)
def __le__(self, other): return tuple(self) <= tuple(other)
def __gt__(self, other): return tuple(self) > tuple(other)
def __ge__(self, other): return tuple(self) >= tuple(other)
def __hash__(self): return hash(tuple(self))
def __getitem__(self, idx): return self.__namedTypes[idx]
if sys.version_info[0] <= 2:
@ -45,7 +60,9 @@ class NamedTypes:
else:
def __bool__(self): return bool(self.__namedTypesLen)
def __len__(self): return self.__namedTypesLen
def clone(self): return self.__class__(*self.__namedTypes)
def getTypeByPosition(self, idx):
if idx < 0 or idx >= self.__namedTypesLen:
raise error.PyAsn1Error('Type position out of range')

12
libs/pyasn1/type/namedval.py

@ -22,7 +22,19 @@ class NamedValues:
self.valToNameIdx[val] = name
self.namedValues = self.namedValues + ((name, val),)
automaticVal = automaticVal + 1
def __repr__(self):
return '%s(%s)' % (self.__class__.__name__, ', '.join([repr(x) for x in self.namedValues]))
def __str__(self): return str(self.namedValues)
def __eq__(self, other): return tuple(self) == tuple(other)
def __ne__(self, other): return tuple(self) != tuple(other)
def __lt__(self, other): return tuple(self) < tuple(other)
def __le__(self, other): return tuple(self) <= tuple(other)
def __gt__(self, other): return tuple(self) > tuple(other)
def __ge__(self, other): return tuple(self) >= tuple(other)
def __hash__(self): return hash(tuple(self))
def getName(self, value):
if value in self.valToNameIdx:

10
libs/pyasn1/type/tag.py

@ -24,6 +24,9 @@ class Tag:
self.uniq = (tagClass, tagId)
self.__hashedUniqTag = hash(self.uniq)
def __str__(self):
return '[%s:%s:%s]' % self.__tag
def __repr__(self):
return '%s(tagClass=%s, tagFormat=%s, tagId=%s)' % (
(self.__class__.__name__,) + self.__tag
@ -62,11 +65,14 @@ class TagSet:
_uniq = _uniq + t.uniq
self.uniq = _uniq
self.__lenOfSuperTags = len(superTags)
def __str__(self):
return self.__superTags and '+'.join([str(x) for x in self.__superTags]) or '[untagged]'
def __repr__(self):
return '%s(%s)' % (
self.__class__.__name__,
', '.join([repr(x) for x in self.__superTags])
'(), ' + ', '.join([repr(x) for x in self.__superTags])
)
def __add__(self, superTag):

18
libs/pyasn1/type/tagmap.py

@ -21,9 +21,23 @@ class TagMap:
raise KeyError()
def __repr__(self):
s = '%r/%r' % (self.__posMap, self.__negMap)
s = self.__class__.__name__ + '('
if self.__posMap:
s = s + 'posMap=%r, ' % (self.__posMap,)
if self.__negMap:
s = s + 'negMap=%r, ' % (self.__negMap,)
if self.__defType is not None:
s = s + '/%r' % (self.__defType,)
s = s + 'defType=%r' % (self.__defType,)
return s + ')'
def __str__(self):
s = self.__class__.__name__ + ':\n'
if self.__posMap:
s = s + 'posMap:\n%s, ' % ',\n '.join([ x.prettyPrintType() for x in self.__posMap.values()])
if self.__negMap:
s = s + 'negMap:\n%s, ' % ',\n '.join([ x.prettyPrintType() for x in self.__negMap.values()])
if self.__defType is not None:
s = s + 'defType:\n%s, ' % self.__defType.prettyPrintType()
return s
def clone(self, parentType, tagMap, uniq=False):

230
libs/pyasn1/type/univ.py

@ -1,5 +1,5 @@
# ASN.1 "universal" data types
import operator, sys
import operator, sys, math
from pyasn1.type import base, tag, constraint, namedtype, namedval, tagmap
from pyasn1.codec.ber import eoo
from pyasn1.compat import octets
@ -22,6 +22,12 @@ class Integer(base.AbstractSimpleAsn1Item):
self, value, tagSet, subtypeSpec
)
def __repr__(self):
if self.__namedValues is not self.namedValues:
return '%s, %r)' % (base.AbstractSimpleAsn1Item.__repr__(self)[:-1], self.__namedValues)
else:
return base.AbstractSimpleAsn1Item.__repr__(self)
def __and__(self, value): return self.clone(self._value & value)
def __rand__(self, value): return self.clone(value & self._value)
def __or__(self, value): return self.clone(self._value | value)
@ -57,8 +63,21 @@ class Integer(base.AbstractSimpleAsn1Item):
if sys.version_info[0] <= 2:
def __long__(self): return long(self._value)
def __float__(self): return float(self._value)
def __abs__(self): return abs(self._value)
def __abs__(self): return self.clone(abs(self._value))
def __index__(self): return int(self._value)
def __pos__(self): return self.clone(+self._value)
def __neg__(self): return self.clone(-self._value)
def __invert__(self): return self.clone(~self._value)
def __round__(self, n=0):
r = round(self._value, n)
if n:
return self.clone(r)
else:
return r
def __floor__(self): return math.floor(self._value)
def __ceil__(self): return math.ceil(self._value)
if sys.version_info[0:2] > (2, 5):
def __trunc__(self): return self.clone(math.trunc(self._value))
def __lt__(self, value): return self._value < value
def __le__(self, value): return self._value <= value
@ -73,7 +92,7 @@ class Integer(base.AbstractSimpleAsn1Item):
return int(value)
except:
raise error.PyAsn1Error(
'Can\'t coerce %s into integer: %s' % (value, sys.exc_info()[1])
'Can\'t coerce %r into integer: %s' % (value, sys.exc_info()[1])
)
r = self.__namedValues.getValue(value)
if r is not None:
@ -82,7 +101,7 @@ class Integer(base.AbstractSimpleAsn1Item):
return int(value)
except:
raise error.PyAsn1Error(
'Can\'t coerce %s into integer: %s' % (value, sys.exc_info()[1])
'Can\'t coerce %r into integer: %s' % (value, sys.exc_info()[1])
)
def prettyOut(self, value):
@ -260,6 +279,15 @@ class BitString(base.AbstractSimpleAsn1Item):
def prettyOut(self, value):
return '\"\'%s\'B\"' % ''.join([str(x) for x in value])
try:
all
except NameError: # Python 2.4
def all(iterable):
for element in iterable:
if not element:
return False
return True
class OctetString(base.AbstractSimpleAsn1Item):
tagSet = baseTagSet = tag.initTagSet(
tag.Tag(tag.tagClassUniversal, tag.tagFormatSimple, 0x04)
@ -280,7 +308,7 @@ class OctetString(base.AbstractSimpleAsn1Item):
value = self.defaultHexValue
if value is None or value is base.noValue:
value = self.defaultBinValue
self.__intValue = None
self.__asNumbersCache = None
base.AbstractSimpleAsn1Item.__init__(self, value, tagSet, subtypeSpec)
def clone(self, value=None, tagSet=None, subtypeSpec=None,
@ -304,19 +332,33 @@ class OctetString(base.AbstractSimpleAsn1Item):
def prettyIn(self, value):
if isinstance(value, str):
return value
elif isinstance(value, unicode):
try:
return value.encode(self._encoding)
except (LookupError, UnicodeEncodeError):
raise error.PyAsn1Error(
'Can\'t encode string \'%s\' with \'%s\' codec' % (value, self._encoding)
)
elif isinstance(value, (tuple, list)):
try:
return ''.join([ chr(x) for x in value ])
except ValueError:
raise error.PyAsn1Error(
'Bad OctetString initializer \'%s\'' % (value,)
)
)
else:
return str(value)
else:
def prettyIn(self, value):
if isinstance(value, bytes):
return value
elif isinstance(value, str):
try:
return value.encode(self._encoding)
except UnicodeEncodeError:
raise error.PyAsn1Error(
'Can\'t encode string \'%s\' with \'%s\' codec' % (value, self._encoding)
)
elif isinstance(value, OctetString):
return value.asOctets()
elif isinstance(value, (tuple, list, map)):
@ -325,14 +367,14 @@ class OctetString(base.AbstractSimpleAsn1Item):
except ValueError:
raise error.PyAsn1Error(
'Bad OctetString initializer \'%s\'' % (value,)
)
)
else:
try:
return str(value).encode(self._encoding)
except UnicodeEncodeError:
raise error.PyAsn1Error(
'Can\'t encode string \'%s\' with \'%s\' codec' % (value, self._encoding)
)
)
def fromBinaryString(self, value):
@ -369,21 +411,33 @@ class OctetString(base.AbstractSimpleAsn1Item):
def prettyOut(self, value):
if sys.version_info[0] <= 2:
numbers = tuple([ ord(x) for x in value ])
numbers = tuple(( ord(x) for x in value ))
else:
numbers = tuple(value)
if [ x for x in numbers if x < 32 or x > 126 ]:
return '0x' + ''.join([ '%.2x' % x for x in numbers ])
else:
if all(x >= 32 and x <= 126 for x in numbers):
return str(value)
else:
return '0x' + ''.join(( '%.2x' % x for x in numbers ))
def __repr__(self):
if self._value is base.noValue:
return self.__class__.__name__ + '()'
if [ x for x in self.asNumbers() if x < 32 or x > 126 ]:
return self.__class__.__name__ + '(hexValue=\'' + ''.join([ '%.2x' % x for x in self.asNumbers() ])+'\')'
else:
return self.__class__.__name__ + '(\'' + self.prettyOut(self._value) + '\')'
r = []
doHex = False
if self._value is not self.defaultValue:
for x in self.asNumbers():
if x < 32 or x > 126:
doHex = True
break
if not doHex:
r.append('%r' % (self._value,))
if self._tagSet is not self.tagSet:
r.append('tagSet=%r' % (self._tagSet,))
if self._subtypeSpec is not self.subtypeSpec:
r.append('subtypeSpec=%r' % (self._subtypeSpec,))
if self.encoding is not self._encoding:
r.append('encoding=%r' % (self._encoding,))
if doHex:
r.append('hexValue=%r' % ''.join([ '%.2x' % x for x in self.asNumbers() ]))
return '%s(%s)' % (self.__class__.__name__, ', '.join(r))
if sys.version_info[0] <= 2:
def __str__(self): return str(self._value)
@ -391,17 +445,17 @@ class OctetString(base.AbstractSimpleAsn1Item):
return self._value.decode(self._encoding, 'ignore')
def asOctets(self): return self._value
def asNumbers(self):
if self.__intValue is None:
self.__intValue = tuple([ ord(x) for x in self._value ])
return self.__intValue
if self.__asNumbersCache is None:
self.__asNumbersCache = tuple([ ord(x) for x in self._value ])
return self.__asNumbersCache
else:
def __str__(self): return self._value.decode(self._encoding, 'ignore')
def __bytes__(self): return self._value
def asOctets(self): return self._value
def asNumbers(self):
if self.__intValue is None:
self.__intValue = tuple(self._value)
return self.__intValue
if self.__asNumbersCache is None:
self.__asNumbersCache = tuple(self._value)
return self.__asNumbersCache
# Immutable sequence object protocol
@ -419,7 +473,9 @@ class OctetString(base.AbstractSimpleAsn1Item):
def __radd__(self, value): return self.clone(self.prettyIn(value) + self._value)
def __mul__(self, value): return self.clone(self._value * value)
def __rmul__(self, value): return self * value
def __int__(self): return int(self._value)
def __float__(self): return float(self._value)
class Null(OctetString):
defaultValue = ''.encode() # This is tightly constrained
tagSet = baseTagSet = tag.initTagSet(
@ -430,7 +486,9 @@ class Null(OctetString):
if sys.version_info[0] <= 2:
intTypes = (int, long)
else:
intTypes = int
intTypes = (int,)
numericTypes = intTypes + (float,)
class ObjectIdentifier(base.AbstractSimpleAsn1Item):
tagSet = baseTagSet = tag.initTagSet(
@ -456,7 +514,9 @@ class ObjectIdentifier(base.AbstractSimpleAsn1Item):
return self._value[i]
def __str__(self): return self.prettyPrint()
def __repr__(self):
return '%s(%r)' % (self.__class__.__name__, self.prettyPrint())
def index(self, suboid): return self._value.index(suboid)
def isPrefixOf(self, value):
@ -504,6 +564,7 @@ class ObjectIdentifier(base.AbstractSimpleAsn1Item):
def prettyOut(self, value): return '.'.join([ str(x) for x in value ])
class Real(base.AbstractSimpleAsn1Item):
binEncBase = None # binEncBase = 16 is recommended for large numbers
try:
_plusInf = float('inf')
_minusInf = float('-inf')
@ -526,11 +587,13 @@ class Real(base.AbstractSimpleAsn1Item):
def prettyIn(self, value):
if isinstance(value, tuple) and len(value) == 3:
for d in value:
if not isinstance(d, intTypes):
raise error.PyAsn1Error(
'Lame Real value syntax: %s' % (value,)
)
if not isinstance(value[0], numericTypes) or \
not isinstance(value[1], intTypes) or \
not isinstance(value[2], intTypes):
raise error.PyAsn1Error('Lame Real value syntax: %s' % (value,))
if isinstance(value[0], float) and \
self._inf and value[0] in self._inf:
return value[0]
if value[1] not in (2, 10):
raise error.PyAsn1Error(
'Prohibited base for Real value: %s' % (value[1],)
@ -540,7 +603,14 @@ class Real(base.AbstractSimpleAsn1Item):
return value
elif isinstance(value, intTypes):
return self.__normalizeBase10((value, 10, 0))
elif isinstance(value, float):
elif isinstance(value, (str, float)):
if isinstance(value, str):
try:
value = float(value)
except ValueError:
raise error.PyAsn1Error(
'Bad real value syntax: %s' % (value,)
)
if self._inf and value in self._inf:
return value
else:
@ -551,11 +621,6 @@ class Real(base.AbstractSimpleAsn1Item):
return self.__normalizeBase10((int(value), 10, e))
elif isinstance(value, Real):
return tuple(value)
elif isinstance(value, str): # handle infinite literal
try:
return float(value)
except ValueError:
pass
raise error.PyAsn1Error(
'Bad real value syntax: %s' % (value,)
)
@ -566,6 +631,12 @@ class Real(base.AbstractSimpleAsn1Item):
else:
return str(value)
def prettyPrint(self, scope=0):
if self.isInfinity():
return self.prettyOut(self._value)
else:
return str(float(self))
def isPlusInfinity(self): return self._value == self._plusInf
def isMinusInfinity(self): return self._value == self._minusInf
def isInfinity(self): return self._value in self._inf
@ -601,8 +672,20 @@ class Real(base.AbstractSimpleAsn1Item):
else:
return float(
self._value[0] * pow(self._value[1], self._value[2])
)
def __abs__(self): return abs(float(self))
)
def __abs__(self): return self.clone(abs(float(self)))
def __pos__(self): return self.clone(+float(self))
def __neg__(self): return self.clone(-float(self))
def __round__(self, n=0):
r = round(float(self), n)
if n:
return self.clone(r)
else:
return r
def __floor__(self): return self.clone(math.floor(float(self)))
def __ceil__(self): return self.clone(math.ceil(float(self)))
if sys.version_info[0:2] > (2, 5):
def __trunc__(self): return self.clone(math.trunc(float(self)))
def __lt__(self, value): return float(self) < value
def __le__(self, value): return float(self) <= value
@ -636,6 +719,7 @@ class SetOf(base.AbstractConstructedAsn1Item):
tag.Tag(tag.tagClassUniversal, tag.tagFormatConstructed, 0x11)
)
typeId = 1
strictConstraints = False
def _cloneComponentValues(self, myClone, cloneValueFlag):
idx = 0; l = len(self._componentValues)
@ -651,9 +735,14 @@ class SetOf(base.AbstractConstructedAsn1Item):
idx = idx + 1
def _verifyComponent(self, idx, value):
if self._componentType is not None and \
not self._componentType.isSuperTypeOf(value):
raise error.PyAsn1Error('Component type error %s' % (value,))
t = self._componentType
if t is None:
return
if not t.isSameTypeWith(value,matchConstraints=self.strictConstraints):
raise error.PyAsn1Error('Component value is tag-incompatible: %r vs %r' % (value, t))
if self.strictConstraints and \
not t.isSuperTypeOf(value, matchTags=False):
raise error.PyAsn1Error('Component value is constraints-incompatible: %r vs %r' % (value, t))
def getComponentByPosition(self, idx): return self._componentValues[idx]
def setComponentByPosition(self, idx, value=None, verifyConstraints=True):
@ -698,6 +787,14 @@ class SetOf(base.AbstractConstructedAsn1Item):
r = r + self._componentValues[idx].prettyPrint(scope)
return r
def prettyPrintType(self, scope=0):
scope = scope + 1
r = '%s -> %s {\n' % (self.getTagSet(), self.__class__.__name__)
if self._componentType is not None:
r = r + ' '*scope
r = r + self._componentType.prettyPrintType(scope)
return r + '\n' + ' '*(scope-1) + '}'
class SequenceOf(SetOf):
tagSet = baseTagSet = tag.initTagSet(
tag.Tag(tag.tagClassUniversal, tag.tagFormatConstructed, 0x10)
@ -706,15 +803,15 @@ class SequenceOf(SetOf):
class SequenceAndSetBase(base.AbstractConstructedAsn1Item):
componentType = namedtype.NamedTypes()
strictConstraints = False
def __init__(self, componentType=None, tagSet=None,
subtypeSpec=None, sizeSpec=None):
if componentType is None:
componentType = self.componentType
base.AbstractConstructedAsn1Item.__init__(
self, componentType, tagSet, subtypeSpec, sizeSpec
)
if self._componentType is None:
self._componentTypeLen = 0
else:
self._componentTypeLen = len(self._componentType)
self, componentType.clone(), tagSet, subtypeSpec, sizeSpec
)
self._componentTypeLen = len(self._componentType)
def __getitem__(self, idx):
if isinstance(idx, str):
@ -747,8 +844,11 @@ class SequenceAndSetBase(base.AbstractConstructedAsn1Item):
'Component type error out of range'
)
t = self._componentType[idx].getType()
if not t.isSuperTypeOf(value):
raise error.PyAsn1Error('Component type error %r vs %r' % (t, value))
if not t.isSameTypeWith(value,matchConstraints=self.strictConstraints):
raise error.PyAsn1Error('Component value is tag-incompatible: %r vs %r' % (value, t))
if self.strictConstraints and \
not t.isSuperTypeOf(value, matchTags=False):
raise error.PyAsn1Error('Component value is constraints-incompatible: %r vs %r' % (value, t))
def getComponentByName(self, name):
return self.getComponentByPosition(
@ -756,9 +856,8 @@ class SequenceAndSetBase(base.AbstractConstructedAsn1Item):
)
def setComponentByName(self, name, value=None, verifyConstraints=True):
return self.setComponentByPosition(
self._componentType.getPositionByName(name), value,
verifyConstraints
)
self._componentType.getPositionByName(name),value,verifyConstraints
)
def getComponentByPosition(self, idx):
try:
@ -767,7 +866,11 @@ class SequenceAndSetBase(base.AbstractConstructedAsn1Item):
if idx < self._componentTypeLen:
return
raise
def setComponentByPosition(self, idx, value=None, verifyConstraints=True):
def setComponentByPosition(self, idx, value=None,
verifyConstraints=True,
exactTypes=False,
matchTags=True,
matchConstraints=True):
l = len(self._componentValues)
if idx >= l:
self._componentValues = self._componentValues + (idx-l+1)*[None]
@ -834,6 +937,17 @@ class SequenceAndSetBase(base.AbstractConstructedAsn1Item):
)
return r
def prettyPrintType(self, scope=0):
scope = scope + 1
r = '%s -> %s {\n' % (self.getTagSet(), self.__class__.__name__)
for idx in range(len(self.componentType)):
r = r + ' '*scope
r = r + '"%s"' % self.componentType.getNameByPosition(idx)
r = '%s = %s\n' % (
r, self._componentType.getTypeByPosition(idx).prettyPrintType(scope)
)
return r + '\n' + ' '*(scope-1) + '}'
class Sequence(SequenceAndSetBase):
tagSet = baseTagSet = tag.initTagSet(
tag.Tag(tag.tagClassUniversal, tag.tagFormatConstructed, 0x10)
@ -877,16 +991,16 @@ class Set(SequenceAndSetBase):
if t.getTagSet():
return self.setComponentByPosition(
idx, value, verifyConstraints
)
)
else:
t = self.setComponentByPosition(idx).getComponentByPosition(idx)
return t.setComponentByType(
tagSet, value, innerFlag, verifyConstraints
)
)
else: # set outer component by inner tagSet
return self.setComponentByPosition(
idx, value, verifyConstraints
)
)
def getComponentTagMap(self):
if self._componentType:

5
libs/pyasn1/type/useful.py

@ -1,6 +1,11 @@
# ASN.1 "useful" types
from pyasn1.type import char, tag
class ObjectDescriptor(char.GraphicString):
tagSet = char.GraphicString.tagSet.tagImplicitly(
tag.Tag(tag.tagClassUniversal, tag.tagFormatSimple, 7)
)
class GeneralizedTime(char.VisibleString):
tagSet = char.VisibleString.tagSet.tagImplicitly(
tag.Tag(tag.tagClassUniversal, tag.tagFormatSimple, 24)

6
libs/requests/__init__.py

@ -6,7 +6,7 @@
# /
"""
requests HTTP library
Requests HTTP library
~~~~~~~~~~~~~~~~~~~~~
Requests is an HTTP library, written in Python, for human beings. Basic GET
@ -42,8 +42,8 @@ is at <http://python-requests.org>.
"""
__title__ = 'requests'
__version__ = '2.6.0'
__build__ = 0x020503
__version__ = '2.7.0'
__build__ = 0x020700
__author__ = 'Kenneth Reitz'
__license__ = 'Apache 2.0'
__copyright__ = 'Copyright 2015 Kenneth Reitz'

3
libs/requests/adapters.py

@ -407,9 +407,6 @@ class HTTPAdapter(BaseAdapter):
# Then, reraise so that we can handle the actual exception.
low_conn.close()
raise
else:
# All is well, return the connection to the pool.
conn._put_conn(low_conn)
except (ProtocolError, socket.error) as err:
raise ConnectionError(err, request=request)

5
libs/requests/api.py

@ -55,17 +55,18 @@ def request(method, url, **kwargs):
return response
def get(url, **kwargs):
def get(url, params=None, **kwargs):
"""Sends a GET request.
:param url: URL for the new :class:`Request` object.
:param params: (optional) Dictionary or bytes to be sent in the query string for the :class:`Request`.
:param \*\*kwargs: Optional arguments that ``request`` takes.
:return: :class:`Response <Response>` object
:rtype: requests.Response
"""
kwargs.setdefault('allow_redirects', True)
return request('get', url, **kwargs)
return request('get', url, params=params, **kwargs)
def options(url, **kwargs):

5
libs/requests/auth.py

@ -103,7 +103,8 @@ class HTTPDigestAuth(AuthBase):
# XXX not implemented yet
entdig = None
p_parsed = urlparse(url)
path = p_parsed.path
#: path is request-uri defined in RFC 2616 which should not be empty
path = p_parsed.path or "/"
if p_parsed.query:
path += '?' + p_parsed.query
@ -178,7 +179,7 @@ class HTTPDigestAuth(AuthBase):
# Consume content and release the original connection
# to allow our new request to reuse the same one.
r.content
r.raw.release_conn()
r.close()
prep = r.request.copy()
extract_cookies_to_jar(prep._cookies, r.request, r.raw)
prep.prepare_cookies(prep._cookies)

2
libs/requests/compat.py

@ -4,7 +4,7 @@
pythoncompat
"""
from .packages import chardet
import chardet
import sys

27
libs/requests/cookies.py

@ -6,6 +6,7 @@ Compatibility code to be able to use `cookielib.CookieJar` with requests.
requests.utils imports from here, so be careful with imports.
"""
import copy
import time
import collections
from .compat import cookielib, urlparse, urlunparse, Morsel
@ -302,7 +303,7 @@ class RequestsCookieJar(cookielib.CookieJar, collections.MutableMapping):
"""Updates this jar with cookies from another CookieJar or dict-like"""
if isinstance(other, cookielib.CookieJar):
for cookie in other:
self.set_cookie(cookie)
self.set_cookie(copy.copy(cookie))
else:
super(RequestsCookieJar, self).update(other)
@ -359,6 +360,21 @@ class RequestsCookieJar(cookielib.CookieJar, collections.MutableMapping):
return new_cj
def _copy_cookie_jar(jar):
if jar is None:
return None
if hasattr(jar, 'copy'):
# We're dealing with an instane of RequestsCookieJar
return jar.copy()
# We're dealing with a generic CookieJar instance
new_jar = copy.copy(jar)
new_jar.clear()
for cookie in jar:
new_jar.set_cookie(copy.copy(cookie))
return new_jar
def create_cookie(name, value, **kwargs):
"""Make a cookie from underspecified parameters.
@ -399,11 +415,14 @@ def morsel_to_cookie(morsel):
expires = None
if morsel['max-age']:
expires = time.time() + morsel['max-age']
try:
expires = int(time.time() + int(morsel['max-age']))
except ValueError:
raise TypeError('max-age: %s must be integer' % morsel['max-age'])
elif morsel['expires']:
time_template = '%a, %d-%b-%Y %H:%M:%S GMT'
expires = time.mktime(
time.strptime(morsel['expires'], time_template)) - time.timezone
expires = int(time.mktime(
time.strptime(morsel['expires'], time_template)) - time.timezone)
return create_cookie(
comment=morsel['comment'],
comment_url=bool(morsel['comment']),

90
libs/requests/models.py

@ -15,7 +15,7 @@ from .hooks import default_hooks
from .structures import CaseInsensitiveDict
from .auth import HTTPBasicAuth
from .cookies import cookiejar_from_dict, get_cookie_header
from .cookies import cookiejar_from_dict, get_cookie_header, _copy_cookie_jar
from .packages.urllib3.fields import RequestField
from .packages.urllib3.filepost import encode_multipart_formdata
from .packages.urllib3.util import parse_url
@ -30,7 +30,8 @@ from .utils import (
iter_slices, guess_json_utf, super_len, to_native_string)
from .compat import (
cookielib, urlunparse, urlsplit, urlencode, str, bytes, StringIO,
is_py2, chardet, json, builtin_str, basestring)
is_py2, chardet, builtin_str, basestring)
from .compat import json as complexjson
from .status_codes import codes
#: The set of HTTP status codes that indicate an automatically
@ -42,12 +43,11 @@ REDIRECT_STATI = (
codes.temporary_redirect, # 307
codes.permanent_redirect, # 308
)
DEFAULT_REDIRECT_LIMIT = 30
CONTENT_CHUNK_SIZE = 10 * 1024
ITER_CHUNK_SIZE = 512
json_dumps = json.dumps
class RequestEncodingMixin(object):
@property
@ -149,8 +149,7 @@ class RequestEncodingMixin(object):
else:
fdata = fp.read()
rf = RequestField(name=k, data=fdata,
filename=fn, headers=fh)
rf = RequestField(name=k, data=fdata, filename=fn, headers=fh)
rf.make_multipart(content_type=ft)
new_fields.append(rf)
@ -207,17 +206,8 @@ class Request(RequestHooksMixin):
<PreparedRequest [GET]>
"""
def __init__(self,
method=None,
url=None,
headers=None,
files=None,
data=None,
params=None,
auth=None,
cookies=None,
hooks=None,
json=None):
def __init__(self, method=None, url=None, headers=None, files=None,
data=None, params=None, auth=None, cookies=None, hooks=None, json=None):
# Default empty dicts for dict params.
data = [] if data is None else data
@ -296,8 +286,7 @@ class PreparedRequest(RequestEncodingMixin, RequestHooksMixin):
self.hooks = default_hooks()
def prepare(self, method=None, url=None, headers=None, files=None,
data=None, params=None, auth=None, cookies=None, hooks=None,
json=None):
data=None, params=None, auth=None, cookies=None, hooks=None, json=None):
"""Prepares the entire request with the given parameters."""
self.prepare_method(method)
@ -306,6 +295,7 @@ class PreparedRequest(RequestEncodingMixin, RequestHooksMixin):
self.prepare_cookies(cookies)
self.prepare_body(data, files, json)
self.prepare_auth(auth, url)
# Note that prepare_auth must be last to enable authentication schemes
# such as OAuth to work on a fully prepared request.
@ -320,7 +310,7 @@ class PreparedRequest(RequestEncodingMixin, RequestHooksMixin):
p.method = self.method
p.url = self.url
p.headers = self.headers.copy() if self.headers is not None else None
p._cookies = self._cookies.copy() if self._cookies is not None else None
p._cookies = _copy_cookie_jar(self._cookies)
p.body = self.body
p.hooks = self.hooks
return p
@ -357,8 +347,10 @@ class PreparedRequest(RequestEncodingMixin, RequestHooksMixin):
raise InvalidURL(*e.args)
if not scheme:
raise MissingSchema("Invalid URL {0!r}: No schema supplied. "
"Perhaps you meant http://{0}?".format(url))
error = ("Invalid URL {0!r}: No schema supplied. Perhaps you meant http://{0}?")
error = error.format(to_native_string(url, 'utf8'))
raise MissingSchema(error)
if not host:
raise InvalidURL("Invalid URL %r: No host supplied" % url)
@ -424,7 +416,7 @@ class PreparedRequest(RequestEncodingMixin, RequestHooksMixin):
if json is not None:
content_type = 'application/json'
body = json_dumps(json)
body = complexjson.dumps(json)
is_stream = all([
hasattr(data, '__iter__'),
@ -501,7 +493,15 @@ class PreparedRequest(RequestEncodingMixin, RequestHooksMixin):
self.prepare_content_length(self.body)
def prepare_cookies(self, cookies):
"""Prepares the given HTTP cookie data."""
"""Prepares the given HTTP cookie data.
This function eventually generates a ``Cookie`` header from the
given cookies using cookielib. Due to cookielib's design, the header
will not be regenerated if it already exists, meaning this function
can only be called once for the life of the
:class:`PreparedRequest <PreparedRequest>` object. Any subsequent calls
to ``prepare_cookies`` will have no actual effect, unless the "Cookie"
header is removed beforehand."""
if isinstance(cookies, cookielib.CookieJar):
self._cookies = cookies
@ -514,6 +514,10 @@ class PreparedRequest(RequestEncodingMixin, RequestHooksMixin):
def prepare_hooks(self, hooks):
"""Prepares the given hooks."""
# hooks can be passed as None to the prepare method and to this
# method. To prevent iterating over None, simply use an empty list
# if hooks is False-y
hooks = hooks or []
for event in hooks:
self.register_hook(event, hooks[event])
@ -524,16 +528,8 @@ class Response(object):
"""
__attrs__ = [
'_content',
'status_code',
'headers',
'url',
'history',
'encoding',
'reason',
'cookies',
'elapsed',
'request',
'_content', 'status_code', 'headers', 'url', 'history',
'encoding', 'reason', 'cookies', 'elapsed', 'request'
]
def __init__(self):
@ -573,7 +569,11 @@ class Response(object):
self.cookies = cookiejar_from_dict({})
#: The amount of time elapsed between sending the request
#: and the arrival of the response (as a timedelta)
#: and the arrival of the response (as a timedelta).
#: This property specifically measures the time taken between sending
#: the first byte of the request and finishing parsing the headers. It
#: is therefore unaffected by consuming the response content or the
#: value of the ``stream`` keyword argument.
self.elapsed = datetime.timedelta(0)
#: The :class:`PreparedRequest <PreparedRequest>` object to which this
@ -649,9 +649,10 @@ class Response(object):
If decode_unicode is True, content will be decoded using the best
available encoding based on the response.
"""
def generate():
try:
# Special case for urllib3.
# Special case for urllib3.
if hasattr(self.raw, 'stream'):
try:
for chunk in self.raw.stream(chunk_size, decode_content=True):
yield chunk
@ -661,7 +662,7 @@ class Response(object):
raise ContentDecodingError(e)
except ReadTimeoutError as e:
raise ConnectionError(e)
except AttributeError:
else:
# Standard file-like object.
while True:
chunk = self.raw.read(chunk_size)
@ -792,14 +793,16 @@ class Response(object):
encoding = guess_json_utf(self.content)
if encoding is not None:
try:
return json.loads(self.content.decode(encoding), **kwargs)
return complexjson.loads(
self.content.decode(encoding), **kwargs
)
except UnicodeDecodeError:
# Wrong UTF codec detected; usually because it's not UTF-8
# but some other 8-bit codec. This is an RFC violation,
# and the server didn't bother to tell us what codec *was*
# used.
pass
return json.loads(self.text, **kwargs)
return complexjson.loads(self.text, **kwargs)
@property
def links(self):
@ -825,10 +828,10 @@ class Response(object):
http_error_msg = ''
if 400 <= self.status_code < 500:
http_error_msg = '%s Client Error: %s' % (self.status_code, self.reason)
http_error_msg = '%s Client Error: %s for url: %s' % (self.status_code, self.reason, self.url)
elif 500 <= self.status_code < 600:
http_error_msg = '%s Server Error: %s' % (self.status_code, self.reason)
http_error_msg = '%s Server Error: %s for url: %s' % (self.status_code, self.reason, self.url)
if http_error_msg:
raise HTTPError(http_error_msg, response=self)
@ -839,4 +842,7 @@ class Response(object):
*Note: Should not normally need to be called explicitly.*
"""
if not self._content_consumed:
return self.raw.close()
return self.raw.release_conn()

106
libs/requests/packages/__init__.py

@ -1,107 +1,3 @@
"""
Copyright (c) Donald Stufft, pip, and individual contributors
Permission is hereby granted, free of charge, to any person obtaining
a copy of this software and associated documentation files (the
"Software"), to deal in the Software without restriction, including
without limitation the rights to use, copy, modify, merge, publish,
distribute, sublicense, and/or sell copies of the Software, and to
permit persons to whom the Software is furnished to do so, subject to
the following conditions:
The above copyright notice and this permission notice shall be
included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
"""
from __future__ import absolute_import
import sys
class VendorAlias(object):
def __init__(self, package_names):
self._package_names = package_names
self._vendor_name = __name__
self._vendor_pkg = self._vendor_name + "."
self._vendor_pkgs = [
self._vendor_pkg + name for name in self._package_names
]
def find_module(self, fullname, path=None):
if fullname.startswith(self._vendor_pkg):
return self
def load_module(self, name):
# Ensure that this only works for the vendored name
if not name.startswith(self._vendor_pkg):
raise ImportError(
"Cannot import %s, must be a subpackage of '%s'." % (
name, self._vendor_name,
)
)
if not (name == self._vendor_name or
any(name.startswith(pkg) for pkg in self._vendor_pkgs)):
raise ImportError(
"Cannot import %s, must be one of %s." % (
name, self._vendor_pkgs
)
)
# Check to see if we already have this item in sys.modules, if we do
# then simply return that.
if name in sys.modules:
return sys.modules[name]
# Check to see if we can import the vendor name
try:
# We do this dance here because we want to try and import this
# module without hitting a recursion error because of a bunch of
# VendorAlias instances on sys.meta_path
real_meta_path = sys.meta_path[:]
try:
sys.meta_path = [
m for m in sys.meta_path
if not isinstance(m, VendorAlias)
]
__import__(name)
module = sys.modules[name]
finally:
# Re-add any additions to sys.meta_path that were made while
# during the import we just did, otherwise things like
# requests.packages.urllib3.poolmanager will fail.
for m in sys.meta_path:
if m not in real_meta_path:
real_meta_path.append(m)
# Restore sys.meta_path with any new items.
sys.meta_path = real_meta_path
except ImportError:
# We can't import the vendor name, so we'll try to import the
# "real" name.
real_name = name[len(self._vendor_pkg):]
try:
__import__(real_name)
module = sys.modules[real_name]
except ImportError:
raise ImportError("No module named '%s'" % (name,))
# If we've gotten here we've found the module we're looking for, either
# as part of our vendored package, or as the real name, so we'll add
# it to sys.modules as the vendored name so that we don't have to do
# the lookup again.
sys.modules[name] = module
# Finally, return the loaded module
return module
sys.meta_path.append(VendorAlias(["urllib3", "chardet"]))
from . import urllib3

9
libs/requests/packages/urllib3/__init__.py

@ -4,7 +4,7 @@ urllib3 - Thread-safe connection pooling and re-using.
__author__ = 'Andrey Petrov (andrey.petrov@shazow.net)'
__license__ = 'MIT'
__version__ = '1.10.2'
__version__ = '1.10.4'
from .connectionpool import (
@ -55,9 +55,12 @@ def add_stderr_logger(level=logging.DEBUG):
del NullHandler
# Set security warning to always go off by default.
import warnings
warnings.simplefilter('always', exceptions.SecurityWarning)
# SecurityWarning's always go off by default.
warnings.simplefilter('always', exceptions.SecurityWarning, append=True)
# InsecurePlatformWarning's don't vary between requests, so we keep it default.
warnings.simplefilter('default', exceptions.InsecurePlatformWarning,
append=True)
def disable_warnings(category=exceptions.HTTPWarning):
"""

45
libs/requests/packages/urllib3/_collections.py

@ -227,20 +227,20 @@ class HTTPHeaderDict(dict):
# Need to convert the tuple to list for further extension
_dict_setitem(self, key_lower, [vals[0], vals[1], val])
def extend(*args, **kwargs):
def extend(self, *args, **kwargs):
"""Generic import function for any type of header-like object.
Adapted version of MutableMapping.update in order to insert items
with self.add instead of self.__setitem__
"""
if len(args) > 2:
raise TypeError("update() takes at most 2 positional "
if len(args) > 1:
raise TypeError("extend() takes at most 1 positional "
"arguments ({} given)".format(len(args)))
elif not args:
raise TypeError("update() takes at least 1 argument (0 given)")
self = args[0]
other = args[1] if len(args) >= 2 else ()
other = args[0] if len(args) >= 1 else ()
if isinstance(other, Mapping):
if isinstance(other, HTTPHeaderDict):
for key, val in other.iteritems():
self.add(key, val)
elif isinstance(other, Mapping):
for key in other:
self.add(key, other[key])
elif hasattr(other, "keys"):
@ -304,17 +304,20 @@ class HTTPHeaderDict(dict):
return list(self.iteritems())
@classmethod
def from_httplib(cls, message, duplicates=('set-cookie',)): # Python 2
def from_httplib(cls, message): # Python 2
"""Read headers from a Python 2 httplib message object."""
ret = cls(message.items())
# ret now contains only the last header line for each duplicate.
# Importing with all duplicates would be nice, but this would
# mean to repeat most of the raw parsing already done, when the
# message object was created. Extracting only the headers of interest
# separately, the cookies, should be faster and requires less
# extra code.
for key in duplicates:
ret.discard(key)
for val in message.getheaders(key):
ret.add(key, val)
return ret
# python2.7 does not expose a proper API for exporting multiheaders
# efficiently. This function re-reads raw lines from the message
# object and extracts the multiheaders properly.
headers = []
for line in message.headers:
if line.startswith((' ', '\t')):
key, value = headers[-1]
headers[-1] = (key, value + '\r\n' + line.rstrip())
continue
key, value = line.split(':', 1)
headers.append((key, value.strip()))
return cls(headers)

2
libs/requests/packages/urllib3/connection.py

@ -260,3 +260,5 @@ if ssl:
# Make a copy for testing.
UnverifiedHTTPSConnection = HTTPSConnection
HTTPSConnection = VerifiedHTTPSConnection
else:
HTTPSConnection = DummyConnection

1
libs/requests/packages/urllib3/connectionpool.py

@ -735,7 +735,6 @@ class HTTPSConnectionPool(HTTPConnectionPool):
% (self.num_connections, self.host))
if not self.ConnectionCls or self.ConnectionCls is DummyConnection:
# Platform-specific: Python without ssl
raise SSLError("Can't connect to HTTPS URL because the SSL "
"module is not available.")

23
libs/requests/packages/urllib3/contrib/pyopenssl.py

@ -38,8 +38,6 @@ Module Variables
----------------
:var DEFAULT_SSL_CIPHER_LIST: The list of supported SSL/TLS cipher suites.
Default: ``ECDH+AESGCM:DH+AESGCM:ECDH+AES256:DH+AES256:ECDH+AES128:DH+AES:
ECDH+3DES:DH+3DES:RSA+AESGCM:RSA+AES:RSA+3DES:!aNULL:!MD5:!DSS``
.. _sni: https://en.wikipedia.org/wiki/Server_Name_Indication
.. _crime attack: https://en.wikipedia.org/wiki/CRIME_(security_exploit)
@ -85,22 +83,7 @@ _openssl_verify = {
+ OpenSSL.SSL.VERIFY_FAIL_IF_NO_PEER_CERT,
}
# A secure default.
# Sources for more information on TLS ciphers:
#
# - https://wiki.mozilla.org/Security/Server_Side_TLS
# - https://www.ssllabs.com/projects/best-practices/index.html
# - https://hynek.me/articles/hardening-your-web-servers-ssl-ciphers/
#
# The general intent is:
# - Prefer cipher suites that offer perfect forward secrecy (DHE/ECDHE),
# - prefer ECDHE over DHE for better performance,
# - prefer any AES-GCM over any AES-CBC for better performance and security,
# - use 3DES as fallback which is secure but slow,
# - disable NULL authentication, MD5 MACs and DSS for security reasons.
DEFAULT_SSL_CIPHER_LIST = "ECDH+AESGCM:DH+AESGCM:ECDH+AES256:DH+AES256:" + \
"ECDH+AES128:DH+AES:ECDH+3DES:DH+3DES:RSA+AESGCM:RSA+AES:RSA+3DES:" + \
"!aNULL:!MD5:!DSS"
DEFAULT_SSL_CIPHER_LIST = util.ssl_.DEFAULT_CIPHERS
orig_util_HAS_SNI = util.HAS_SNI
@ -299,7 +282,9 @@ def ssl_wrap_socket(sock, keyfile=None, certfile=None, cert_reqs=None,
try:
cnx.do_handshake()
except OpenSSL.SSL.WantReadError:
select.select([sock], [], [])
rd, _, _ = select.select([sock], [], [], sock.gettimeout())
if not rd:
raise timeout('select timed out')
continue
except OpenSSL.SSL.Error as e:
raise ssl.SSLError('bad handshake', e)

5
libs/requests/packages/urllib3/exceptions.py

@ -162,3 +162,8 @@ class SystemTimeWarning(SecurityWarning):
class InsecurePlatformWarning(SecurityWarning):
"Warned when certain SSL configuration is not available on a platform."
pass
class ResponseNotChunked(ProtocolError, ValueError):
"Response needs to be chunked in order to read it as chunks."
pass

159
libs/requests/packages/urllib3/response.py

@ -1,9 +1,15 @@
try:
import http.client as httplib
except ImportError:
import httplib
import zlib
import io
from socket import timeout as SocketTimeout
from ._collections import HTTPHeaderDict
from .exceptions import ProtocolError, DecodeError, ReadTimeoutError
from .exceptions import (
ProtocolError, DecodeError, ReadTimeoutError, ResponseNotChunked
)
from .packages.six import string_types as basestring, binary_type, PY3
from .connection import HTTPException, BaseSSLError
from .util.response import is_fp_closed
@ -117,7 +123,17 @@ class HTTPResponse(io.IOBase):
if hasattr(body, 'read'):
self._fp = body
if preload_content and not self._body:
# Are we using the chunked-style of transfer encoding?
self.chunked = False
self.chunk_left = None
tr_enc = self.headers.get('transfer-encoding', '').lower()
# Don't incur the penalty of creating a list and then discarding it
encodings = (enc.strip() for enc in tr_enc.split(","))
if "chunked" in encodings:
self.chunked = True
# We certainly don't want to preload content when the response is chunked.
if not self.chunked and preload_content and not self._body:
self._body = self.read(decode_content=decode_content)
def get_redirect_location(self):
@ -157,6 +173,35 @@ class HTTPResponse(io.IOBase):
"""
return self._fp_bytes_read
def _init_decoder(self):
"""
Set-up the _decoder attribute if necessar.
"""
# Note: content-encoding value should be case-insensitive, per RFC 7230
# Section 3.2
content_encoding = self.headers.get('content-encoding', '').lower()
if self._decoder is None and content_encoding in self.CONTENT_DECODERS:
self._decoder = _get_decoder(content_encoding)
def _decode(self, data, decode_content, flush_decoder):
"""
Decode the data passed in and potentially flush the decoder.
"""
try:
if decode_content and self._decoder:
data = self._decoder.decompress(data)
except (IOError, zlib.error) as e:
content_encoding = self.headers.get('content-encoding', '').lower()
raise DecodeError(
"Received response with content-encoding: %s, but "
"failed to decode it." % content_encoding, e)
if flush_decoder and decode_content and self._decoder:
buf = self._decoder.decompress(binary_type())
data += buf + self._decoder.flush()
return data
def read(self, amt=None, decode_content=None, cache_content=False):
"""
Similar to :meth:`httplib.HTTPResponse.read`, but with two additional
@ -178,12 +223,7 @@ class HTTPResponse(io.IOBase):
after having ``.read()`` the file object. (Overridden if ``amt`` is
set.)
"""
# Note: content-encoding value should be case-insensitive, per RFC 7230
# Section 3.2
content_encoding = self.headers.get('content-encoding', '').lower()
if self._decoder is None:
if content_encoding in self.CONTENT_DECODERS:
self._decoder = _get_decoder(content_encoding)
self._init_decoder()
if decode_content is None:
decode_content = self.decode_content
@ -232,17 +272,7 @@ class HTTPResponse(io.IOBase):
self._fp_bytes_read += len(data)
try:
if decode_content and self._decoder:
data = self._decoder.decompress(data)
except (IOError, zlib.error) as e:
raise DecodeError(
"Received response with content-encoding: %s, but "
"failed to decode it." % content_encoding, e)
if flush_decoder and decode_content and self._decoder:
buf = self._decoder.decompress(binary_type())
data += buf + self._decoder.flush()
data = self._decode(data, decode_content, flush_decoder)
if cache_content:
self._body = data
@ -269,11 +299,15 @@ class HTTPResponse(io.IOBase):
If True, will attempt to decode the body based on the
'content-encoding' header.
"""
while not is_fp_closed(self._fp):
data = self.read(amt=amt, decode_content=decode_content)
if self.chunked:
for line in self.read_chunked(amt, decode_content=decode_content):
yield line
else:
while not is_fp_closed(self._fp):
data = self.read(amt=amt, decode_content=decode_content)
if data:
yield data
if data:
yield data
@classmethod
def from_httplib(ResponseCls, r, **response_kw):
@ -351,3 +385,82 @@ class HTTPResponse(io.IOBase):
else:
b[:len(temp)] = temp
return len(temp)
def _update_chunk_length(self):
# First, we'll figure out length of a chunk and then
# we'll try to read it from socket.
if self.chunk_left is not None:
return
line = self._fp.fp.readline()
line = line.split(b';', 1)[0]
try:
self.chunk_left = int(line, 16)
except ValueError:
# Invalid chunked protocol response, abort.
self.close()
raise httplib.IncompleteRead(line)
def _handle_chunk(self, amt):
returned_chunk = None
if amt is None:
chunk = self._fp._safe_read(self.chunk_left)
returned_chunk = chunk
self._fp._safe_read(2) # Toss the CRLF at the end of the chunk.
self.chunk_left = None
elif amt < self.chunk_left:
value = self._fp._safe_read(amt)
self.chunk_left = self.chunk_left - amt
returned_chunk = value
elif amt == self.chunk_left:
value = self._fp._safe_read(amt)
self._fp._safe_read(2) # Toss the CRLF at the end of the chunk.
self.chunk_left = None
returned_chunk = value
else: # amt > self.chunk_left
returned_chunk = self._fp._safe_read(self.chunk_left)
self._fp._safe_read(2) # Toss the CRLF at the end of the chunk.
self.chunk_left = None
return returned_chunk
def read_chunked(self, amt=None, decode_content=None):
"""
Similar to :meth:`HTTPResponse.read`, but with an additional
parameter: ``decode_content``.
:param decode_content:
If True, will attempt to decode the body based on the
'content-encoding' header.
"""
self._init_decoder()
# FIXME: Rewrite this method and make it a class with a better structured logic.
if not self.chunked:
raise ResponseNotChunked("Response is not chunked. "
"Header 'transfer-encoding: chunked' is missing.")
if self._original_response and self._original_response._method.upper() == 'HEAD':
# Don't bother reading the body of a HEAD request.
# FIXME: Can we do this somehow without accessing private httplib _method?
self._original_response.close()
return
while True:
self._update_chunk_length()
if self.chunk_left == 0:
break
chunk = self._handle_chunk(amt)
yield self._decode(chunk, decode_content=decode_content,
flush_decoder=True)
# Chunk content ends with \r\n: discard it.
while True:
line = self._fp.fp.readline()
if not line:
# Some sites may not end with '\r\n'.
break
if line == b'\r\n':
break
# We read everything; close the "file".
if self._original_response:
self._original_response.close()
self.release_conn()

38
libs/requests/packages/urllib3/util/ssl_.py

@ -9,10 +9,10 @@ HAS_SNI = False
create_default_context = None
import errno
import ssl
import warnings
try: # Test for SSL features
import ssl
from ssl import wrap_socket, CERT_NONE, PROTOCOL_SSLv23
from ssl import HAS_SNI # Has SNI?
except ImportError:
@ -25,14 +25,24 @@ except ImportError:
OP_NO_SSLv2, OP_NO_SSLv3 = 0x1000000, 0x2000000
OP_NO_COMPRESSION = 0x20000
try:
from ssl import _DEFAULT_CIPHERS
except ImportError:
_DEFAULT_CIPHERS = (
'ECDH+AESGCM:DH+AESGCM:ECDH+AES256:DH+AES256:ECDH+AES128:DH+AES:ECDH+HIGH:'
'DH+HIGH:ECDH+3DES:DH+3DES:RSA+AESGCM:RSA+AES:RSA+HIGH:RSA+3DES:!aNULL:'
'!eNULL:!MD5'
)
# A secure default.
# Sources for more information on TLS ciphers:
#
# - https://wiki.mozilla.org/Security/Server_Side_TLS
# - https://www.ssllabs.com/projects/best-practices/index.html
# - https://hynek.me/articles/hardening-your-web-servers-ssl-ciphers/
#
# The general intent is:
# - Prefer cipher suites that offer perfect forward secrecy (DHE/ECDHE),
# - prefer ECDHE over DHE for better performance,
# - prefer any AES-GCM over any AES-CBC for better performance and security,
# - use 3DES as fallback which is secure but slow,
# - disable NULL authentication, MD5 MACs and DSS for security reasons.
DEFAULT_CIPHERS = (
'ECDH+AESGCM:DH+AESGCM:ECDH+AES256:DH+AES256:ECDH+AES128:DH+AES:ECDH+HIGH:'
'DH+HIGH:ECDH+3DES:DH+3DES:RSA+AESGCM:RSA+AES:RSA+HIGH:RSA+3DES:!aNULL:'
'!eNULL:!MD5'
)
try:
from ssl import SSLContext # Modern SSL?
@ -40,7 +50,8 @@ except ImportError:
import sys
class SSLContext(object): # Platform-specific: Python 2 & 3.1
supports_set_ciphers = sys.version_info >= (2, 7)
supports_set_ciphers = ((2, 7) <= sys.version_info < (3,) or
(3, 2) <= sys.version_info)
def __init__(self, protocol_version):
self.protocol = protocol_version
@ -167,7 +178,7 @@ def resolve_ssl_version(candidate):
return candidate
def create_urllib3_context(ssl_version=None, cert_reqs=ssl.CERT_REQUIRED,
def create_urllib3_context(ssl_version=None, cert_reqs=None,
options=None, ciphers=None):
"""All arguments have the same meaning as ``ssl_wrap_socket``.
@ -204,6 +215,9 @@ def create_urllib3_context(ssl_version=None, cert_reqs=ssl.CERT_REQUIRED,
"""
context = SSLContext(ssl_version or ssl.PROTOCOL_SSLv23)
# Setting the default here, as we may have no ssl module on import
cert_reqs = ssl.CERT_REQUIRED if cert_reqs is None else cert_reqs
if options is None:
options = 0
# SSLv2 is easily broken and is considered harmful and dangerous
@ -217,7 +231,7 @@ def create_urllib3_context(ssl_version=None, cert_reqs=ssl.CERT_REQUIRED,
context.options |= options
if getattr(context, 'supports_set_ciphers', True): # Platform-specific: Python 2.6
context.set_ciphers(ciphers or _DEFAULT_CIPHERS)
context.set_ciphers(ciphers or DEFAULT_CIPHERS)
context.verify_mode = cert_reqs
if getattr(context, 'check_hostname', None) is not None: # Platform-specific: Python 3.2

2
libs/requests/packages/urllib3/util/url.py

@ -15,6 +15,8 @@ class Url(namedtuple('Url', url_attrs)):
def __new__(cls, scheme=None, auth=None, host=None, port=None, path=None,
query=None, fragment=None):
if path and not path.startswith('/'):
path = '/' + path
return super(Url, cls).__new__(cls, scheme, auth, host, port, path,
query, fragment)

14
libs/requests/sessions.py

@ -90,7 +90,7 @@ def merge_hooks(request_hooks, session_hooks, dict_class=OrderedDict):
class SessionRedirectMixin(object):
def resolve_redirects(self, resp, req, stream=False, timeout=None,
verify=True, cert=None, proxies=None):
verify=True, cert=None, proxies=None, **adapter_kwargs):
"""Receives a Response. Returns a generator of Responses."""
i = 0
@ -193,6 +193,7 @@ class SessionRedirectMixin(object):
cert=cert,
proxies=proxies,
allow_redirects=False,
**adapter_kwargs
)
extract_cookies_to_jar(self.cookies, prepared_request, resp.raw)
@ -560,10 +561,6 @@ class Session(SessionRedirectMixin):
# Set up variables needed for resolve_redirects and dispatching of hooks
allow_redirects = kwargs.pop('allow_redirects', True)
stream = kwargs.get('stream')
timeout = kwargs.get('timeout')
verify = kwargs.get('verify')
cert = kwargs.get('cert')
proxies = kwargs.get('proxies')
hooks = request.hooks
# Get the appropriate adapter to use
@ -591,12 +588,7 @@ class Session(SessionRedirectMixin):
extract_cookies_to_jar(self.cookies, request, r.raw)
# Redirect resolving generator.
gen = self.resolve_redirects(r, request,
stream=stream,
timeout=timeout,
verify=verify,
cert=cert,
proxies=proxies)
gen = self.resolve_redirects(r, request, **kwargs)
# Resolve redirects if allowed.
history = [resp for resp in gen] if allow_redirects else []

7
libs/requests/utils.py

@ -67,7 +67,7 @@ def super_len(o):
return len(o.getvalue())
def get_netrc_auth(url):
def get_netrc_auth(url, raise_errors=False):
"""Returns the Requests tuple auth for a given url from netrc."""
try:
@ -105,8 +105,9 @@ def get_netrc_auth(url):
return (_netrc[login_i], _netrc[2])
except (NetrcParseError, IOError):
# If there was a parsing error or a permissions issue reading the file,
# we'll just skip netrc auth
pass
# we'll just skip netrc auth unless explicitly asked to raise errors.
if raise_errors:
raise
# AppEngine hackiness.
except (ImportError, AttributeError):

Loading…
Cancel
Save