提交 8a2882c2 编写于 作者: D Donald Stufft

Merge branch 'master' into develop

Conflicts:
	pip/__init__.py
	pip/req.py
Changelog Changelog
========= =========
1.5.2 (2014-01-26)
------------------
* Upgraded the vendored ``pkg_resources`` and ``_markerlib`` to setuptools 2.1.
* Fixed an error that prevented accessing PyPI when pyopenssl, ndg-httpsclient,
and pyasn1 are installed
* Fixed an issue that caused trailing comments to be incorrectly included as
part of the URL in a requirements file
1.5.1 (2014-01-20) 1.5.1 (2014-01-20)
------------------ ------------------
......
...@@ -16,7 +16,7 @@ Modifications ...@@ -16,7 +16,7 @@ Modifications
Markerlib and pkg_resources Markerlib and pkg_resources
=========================== ===========================
Markerlib and pkg_resources has been pulled in from setuptools 2.0.2 Markerlib and pkg_resources has been pulled in from setuptools 2.1
Note to Downstream Distributors Note to Downstream Distributors
......
"""Package resource API """
Package resource API
-------------------- --------------------
A resource is a logical file contained within a package, or a logical A resource is a logical file contained within a package, or a logical
...@@ -1717,7 +1718,14 @@ def find_distributions(path_item, only=False): ...@@ -1717,7 +1718,14 @@ def find_distributions(path_item, only=False):
finder = _find_adapter(_distribution_finders, importer) finder = _find_adapter(_distribution_finders, importer)
return finder(importer, path_item, only) return finder(importer, path_item, only)
def find_in_zip(importer, path_item, only=False): def find_eggs_in_zip(importer, path_item, only=False):
"""
Find eggs in zip files; possibly multiple nested eggs.
"""
if importer.archive.endswith('.whl'):
# wheels are not supported with this finder
# they don't have PKG-INFO metadata, and won't ever contain eggs
return
metadata = EggMetadata(importer) metadata = EggMetadata(importer)
if metadata.has_metadata('PKG-INFO'): if metadata.has_metadata('PKG-INFO'):
yield Distribution.from_filename(path_item, metadata=metadata) yield Distribution.from_filename(path_item, metadata=metadata)
...@@ -1726,10 +1734,10 @@ def find_in_zip(importer, path_item, only=False): ...@@ -1726,10 +1734,10 @@ def find_in_zip(importer, path_item, only=False):
for subitem in metadata.resource_listdir('/'): for subitem in metadata.resource_listdir('/'):
if subitem.endswith('.egg'): if subitem.endswith('.egg'):
subpath = os.path.join(path_item, subitem) subpath = os.path.join(path_item, subitem)
for dist in find_in_zip(zipimport.zipimporter(subpath), subpath): for dist in find_eggs_in_zip(zipimport.zipimporter(subpath), subpath):
yield dist yield dist
register_finder(zipimport.zipimporter, find_in_zip) register_finder(zipimport.zipimporter, find_eggs_in_zip)
def find_nothing(importer, path_item, only=False): def find_nothing(importer, path_item, only=False):
return () return ()
...@@ -2032,7 +2040,7 @@ class EntryPoint(object): ...@@ -2032,7 +2040,7 @@ class EntryPoint(object):
list(map(working_set.add, list(map(working_set.add,
working_set.resolve(self.dist.requires(self.extras),env,installer))) working_set.resolve(self.dist.requires(self.extras),env,installer)))
#@classmethod @classmethod
def parse(cls, src, dist=None): def parse(cls, src, dist=None):
"""Parse a single entry point from string `src` """Parse a single entry point from string `src`
...@@ -2064,9 +2072,7 @@ class EntryPoint(object): ...@@ -2064,9 +2072,7 @@ class EntryPoint(object):
else: else:
return cls(name.strip(), value.strip(), attrs, extras, dist) return cls(name.strip(), value.strip(), attrs, extras, dist)
parse = classmethod(parse) @classmethod
#@classmethod
def parse_group(cls, group, lines, dist=None): def parse_group(cls, group, lines, dist=None):
"""Parse an entry point group""" """Parse an entry point group"""
if not MODULE(group): if not MODULE(group):
...@@ -2079,9 +2085,7 @@ class EntryPoint(object): ...@@ -2079,9 +2085,7 @@ class EntryPoint(object):
this[ep.name]=ep this[ep.name]=ep
return this return this
parse_group = classmethod(parse_group) @classmethod
#@classmethod
def parse_map(cls, data, dist=None): def parse_map(cls, data, dist=None):
"""Parse a map of entry point groups""" """Parse a map of entry point groups"""
if isinstance(data,dict): if isinstance(data,dict):
...@@ -2100,8 +2104,6 @@ class EntryPoint(object): ...@@ -2100,8 +2104,6 @@ class EntryPoint(object):
maps[group] = cls.parse_group(group, lines, dist) maps[group] = cls.parse_group(group, lines, dist)
return maps return maps
parse_map = classmethod(parse_map)
def _remove_md5_fragment(location): def _remove_md5_fragment(location):
if not location: if not location:
...@@ -2128,7 +2130,7 @@ class Distribution(object): ...@@ -2128,7 +2130,7 @@ class Distribution(object):
self.precedence = precedence self.precedence = precedence
self._provider = metadata or empty_provider self._provider = metadata or empty_provider
#@classmethod @classmethod
def from_location(cls,location,basename,metadata=None,**kw): def from_location(cls,location,basename,metadata=None,**kw):
project_name, version, py_version, platform = [None]*4 project_name, version, py_version, platform = [None]*4
basename, ext = os.path.splitext(basename) basename, ext = os.path.splitext(basename)
...@@ -2144,7 +2146,6 @@ class Distribution(object): ...@@ -2144,7 +2146,6 @@ class Distribution(object):
location, metadata, project_name=project_name, version=version, location, metadata, project_name=project_name, version=version,
py_version=py_version, platform=platform, **kw py_version=py_version, platform=platform, **kw
) )
from_location = classmethod(from_location)
hashcmp = property( hashcmp = property(
lambda self: ( lambda self: (
...@@ -2177,16 +2178,15 @@ class Distribution(object): ...@@ -2177,16 +2178,15 @@ class Distribution(object):
# metadata until/unless it's actually needed. (i.e., some distributions # metadata until/unless it's actually needed. (i.e., some distributions
# may not know their name or version without loading PKG-INFO) # may not know their name or version without loading PKG-INFO)
#@property @property
def key(self): def key(self):
try: try:
return self._key return self._key
except AttributeError: except AttributeError:
self._key = key = self.project_name.lower() self._key = key = self.project_name.lower()
return key return key
key = property(key)
#@property @property
def parsed_version(self): def parsed_version(self):
try: try:
return self._parsed_version return self._parsed_version
...@@ -2194,9 +2194,7 @@ class Distribution(object): ...@@ -2194,9 +2194,7 @@ class Distribution(object):
self._parsed_version = pv = parse_version(self.version) self._parsed_version = pv = parse_version(self.version)
return pv return pv
parsed_version = property(parsed_version) @property
#@property
def version(self): def version(self):
try: try:
return self._version return self._version
...@@ -2209,9 +2207,8 @@ class Distribution(object): ...@@ -2209,9 +2207,8 @@ class Distribution(object):
raise ValueError( raise ValueError(
"Missing 'Version:' header and/or %s file" % self.PKG_INFO, self "Missing 'Version:' header and/or %s file" % self.PKG_INFO, self
) )
version = property(version)
#@property @property
def _dep_map(self): def _dep_map(self):
try: try:
return self.__dep_map return self.__dep_map
...@@ -2229,7 +2226,6 @@ class Distribution(object): ...@@ -2229,7 +2226,6 @@ class Distribution(object):
extra = safe_extra(extra) or None extra = safe_extra(extra) or None
dm.setdefault(extra,[]).extend(parse_requirements(reqs)) dm.setdefault(extra,[]).extend(parse_requirements(reqs))
return dm return dm
_dep_map = property(_dep_map)
def requires(self,extras=()): def requires(self,extras=()):
"""List of Requirements needed for this distro if `extras` are used""" """List of Requirements needed for this distro if `extras` are used"""
...@@ -2287,13 +2283,12 @@ class Distribution(object): ...@@ -2287,13 +2283,12 @@ class Distribution(object):
raise AttributeError(attr) raise AttributeError(attr)
return getattr(self._provider, attr) return getattr(self._provider, attr)
#@classmethod @classmethod
def from_filename(cls,filename,metadata=None, **kw): def from_filename(cls,filename,metadata=None, **kw):
return cls.from_location( return cls.from_location(
_normalize_cached(filename), os.path.basename(filename), metadata, _normalize_cached(filename), os.path.basename(filename), metadata,
**kw **kw
) )
from_filename = classmethod(from_filename)
def as_requirement(self): def as_requirement(self):
"""Return a ``Requirement`` that matches this distribution exactly""" """Return a ``Requirement`` that matches this distribution exactly"""
...@@ -2400,10 +2395,9 @@ class Distribution(object): ...@@ -2400,10 +2395,9 @@ class Distribution(object):
kw.setdefault('metadata', self._provider) kw.setdefault('metadata', self._provider)
return self.__class__(**kw) return self.__class__(**kw)
#@property @property
def extras(self): def extras(self):
return [dep for dep in self._dep_map if dep] return [dep for dep in self._dep_map if dep]
extras = property(extras)
class DistInfoDistribution(Distribution): class DistInfoDistribution(Distribution):
...@@ -2607,7 +2601,7 @@ class Requirement: ...@@ -2607,7 +2601,7 @@ class Requirement:
def __repr__(self): return "Requirement.parse(%r)" % str(self) def __repr__(self): return "Requirement.parse(%r)" % str(self)
#@staticmethod @staticmethod
def parse(s): def parse(s):
reqs = list(parse_requirements(s)) reqs = list(parse_requirements(s))
if reqs: if reqs:
...@@ -2616,8 +2610,6 @@ class Requirement: ...@@ -2616,8 +2610,6 @@ class Requirement:
raise ValueError("Expected only one requirement", s) raise ValueError("Expected only one requirement", s)
raise ValueError("No requirements found", s) raise ValueError("No requirements found", s)
parse = staticmethod(parse)
state_machine = { state_machine = {
# =>< # =><
'<': '--T', '<': '--T',
......
...@@ -42,8 +42,8 @@ is at <http://python-requests.org>. ...@@ -42,8 +42,8 @@ is at <http://python-requests.org>.
""" """
__title__ = 'requests' __title__ = 'requests'
__version__ = '2.2.0' __version__ = '2.2.1'
__build__ = 0x020200 __build__ = 0x020201
__author__ = 'Kenneth Reitz' __author__ = 'Kenneth Reitz'
__license__ = 'Apache 2.0' __license__ = 'Apache 2.0'
__copyright__ = 'Copyright 2014 Kenneth Reitz' __copyright__ = 'Copyright 2014 Kenneth Reitz'
......
...@@ -286,10 +286,6 @@ class HTTPAdapter(BaseAdapter): ...@@ -286,10 +286,6 @@ class HTTPAdapter(BaseAdapter):
username, password = get_auth_from_url(proxy) username, password = get_auth_from_url(proxy)
if username and password: if username and password:
# Proxy auth usernames and passwords will be urlencoded, we need
# to decode them.
username = unquote(username)
password = unquote(password)
headers['Proxy-Authorization'] = _basic_auth_str(username, headers['Proxy-Authorization'] = _basic_auth_str(username,
password) password)
......
...@@ -8,9 +8,9 @@ import socket ...@@ -8,9 +8,9 @@ import socket
from socket import timeout as SocketTimeout from socket import timeout as SocketTimeout
try: # Python 3 try: # Python 3
from http.client import HTTPConnection, HTTPException from http.client import HTTPConnection as _HTTPConnection, HTTPException
except ImportError: except ImportError:
from httplib import HTTPConnection, HTTPException from httplib import HTTPConnection as _HTTPConnection, HTTPException
class DummyConnection(object): class DummyConnection(object):
"Used to detect a failed ConnectionCls import." "Used to detect a failed ConnectionCls import."
...@@ -24,9 +24,9 @@ try: # Compiled with SSL? ...@@ -24,9 +24,9 @@ try: # Compiled with SSL?
pass pass
try: # Python 3 try: # Python 3
from http.client import HTTPSConnection from http.client import HTTPSConnection as _HTTPSConnection
except ImportError: except ImportError:
from httplib import HTTPSConnection from httplib import HTTPSConnection as _HTTPSConnection
import ssl import ssl
BaseSSLError = ssl.SSLError BaseSSLError = ssl.SSLError
...@@ -45,6 +45,69 @@ from .util import ( ...@@ -45,6 +45,69 @@ from .util import (
ssl_wrap_socket, ssl_wrap_socket,
) )
port_by_scheme = {
'http': 80,
'https': 443,
}
class HTTPConnection(_HTTPConnection, object):
default_port = port_by_scheme['http']
# By default, disable Nagle's Algorithm.
tcp_nodelay = 1
def _new_conn(self):
""" Establish a socket connection and set nodelay settings on it
:return: a new socket connection
"""
try:
conn = socket.create_connection(
(self.host, self.port),
self.timeout,
self.source_address,
)
except AttributeError: # Python 2.6
conn = socket.create_connection(
(self.host, self.port),
self.timeout,
)
conn.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY,
self.tcp_nodelay)
return conn
def _prepare_conn(self, conn):
self.sock = conn
if self._tunnel_host:
# TODO: Fix tunnel so it doesn't depend on self.sock state.
self._tunnel()
def connect(self):
conn = self._new_conn()
self._prepare_conn(conn)
class HTTPSConnection(HTTPConnection):
default_port = port_by_scheme['https']
def __init__(self, host, port=None, key_file=None, cert_file=None,
strict=None, timeout=socket._GLOBAL_DEFAULT_TIMEOUT,
source_address=None):
try:
HTTPConnection.__init__(self, host, port, strict, timeout, source_address)
except TypeError: # Python 2.6
HTTPConnection.__init__(self, host, port, strict, timeout)
self.key_file = key_file
self.cert_file = cert_file
def connect(self):
conn = self._new_conn()
self._prepare_conn(conn)
self.sock = ssl.wrap_socket(conn, self.key_file, self.cert_file)
class VerifiedHTTPSConnection(HTTPSConnection): class VerifiedHTTPSConnection(HTTPSConnection):
""" """
Based on httplib.HTTPSConnection but wraps the socket with Based on httplib.HTTPSConnection but wraps the socket with
...@@ -73,9 +136,12 @@ class VerifiedHTTPSConnection(HTTPSConnection): ...@@ -73,9 +136,12 @@ class VerifiedHTTPSConnection(HTTPSConnection):
timeout=self.timeout, timeout=self.timeout,
) )
except SocketTimeout: except SocketTimeout:
raise ConnectTimeoutError( raise ConnectTimeoutError(
self, "Connection to %s timed out. (connect timeout=%s)" % self, "Connection to %s timed out. (connect timeout=%s)" %
(self.host, self.timeout)) (self.host, self.timeout))
sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY,
self.tcp_nodelay)
resolved_cert_reqs = resolve_cert_reqs(self.cert_reqs) resolved_cert_reqs = resolve_cert_reqs(self.cert_reqs)
resolved_ssl_version = resolve_ssl_version(self.ssl_version) resolved_ssl_version = resolve_ssl_version(self.ssl_version)
...@@ -107,4 +173,6 @@ class VerifiedHTTPSConnection(HTTPSConnection): ...@@ -107,4 +173,6 @@ class VerifiedHTTPSConnection(HTTPSConnection):
if ssl: if ssl:
# Make a copy for testing.
UnverifiedHTTPSConnection = HTTPSConnection
HTTPSConnection = VerifiedHTTPSConnection HTTPSConnection = VerifiedHTTPSConnection
...@@ -31,6 +31,7 @@ from .exceptions import ( ...@@ -31,6 +31,7 @@ from .exceptions import (
from .packages.ssl_match_hostname import CertificateError from .packages.ssl_match_hostname import CertificateError
from .packages import six from .packages import six
from .connection import ( from .connection import (
port_by_scheme,
DummyConnection, DummyConnection,
HTTPConnection, HTTPSConnection, VerifiedHTTPSConnection, HTTPConnection, HTTPSConnection, VerifiedHTTPSConnection,
HTTPException, BaseSSLError, HTTPException, BaseSSLError,
...@@ -51,12 +52,6 @@ log = logging.getLogger(__name__) ...@@ -51,12 +52,6 @@ log = logging.getLogger(__name__)
_Default = object() _Default = object()
port_by_scheme = {
'http': 80,
'https': 443,
}
## Pool objects ## Pool objects
class ConnectionPool(object): class ConnectionPool(object):
...@@ -169,7 +164,7 @@ class HTTPConnectionPool(ConnectionPool, RequestMethods): ...@@ -169,7 +164,7 @@ class HTTPConnectionPool(ConnectionPool, RequestMethods):
def _new_conn(self): def _new_conn(self):
""" """
Return a fresh :class:`httplib.HTTPConnection`. Return a fresh :class:`HTTPConnection`.
""" """
self.num_connections += 1 self.num_connections += 1
log.info("Starting new HTTP connection (%d): %s" % log.info("Starting new HTTP connection (%d): %s" %
...@@ -179,9 +174,14 @@ class HTTPConnectionPool(ConnectionPool, RequestMethods): ...@@ -179,9 +174,14 @@ class HTTPConnectionPool(ConnectionPool, RequestMethods):
if not six.PY3: # Python 2 if not six.PY3: # Python 2
extra_params['strict'] = self.strict extra_params['strict'] = self.strict
return self.ConnectionCls(host=self.host, port=self.port, conn = self.ConnectionCls(host=self.host, port=self.port,
timeout=self.timeout.connect_timeout, timeout=self.timeout.connect_timeout,
**extra_params) **extra_params)
if self.proxy is not None:
# Enable Nagle's algorithm for proxies, to avoid packet
# fragmentation.
conn.tcp_nodelay = 0
return conn
def _get_conn(self, timeout=None): def _get_conn(self, timeout=None):
""" """
...@@ -260,7 +260,7 @@ class HTTPConnectionPool(ConnectionPool, RequestMethods): ...@@ -260,7 +260,7 @@ class HTTPConnectionPool(ConnectionPool, RequestMethods):
def _make_request(self, conn, method, url, timeout=_Default, def _make_request(self, conn, method, url, timeout=_Default,
**httplib_request_kw): **httplib_request_kw):
""" """
Perform a request on a given httplib connection object taken from our Perform a request on a given urllib connection object taken from our
pool. pool.
:param conn: :param conn:
...@@ -517,17 +517,17 @@ class HTTPConnectionPool(ConnectionPool, RequestMethods): ...@@ -517,17 +517,17 @@ class HTTPConnectionPool(ConnectionPool, RequestMethods):
raise raise
except (HTTPException, SocketError) as e: except (HTTPException, SocketError) as e:
if isinstance(e, SocketError) and self.proxy is not None:
raise ProxyError('Cannot connect to proxy. '
'Socket error: %s.' % e)
# Connection broken, discard. It will be replaced next _get_conn(). # Connection broken, discard. It will be replaced next _get_conn().
conn = None conn = None
# This is necessary so we can access e below # This is necessary so we can access e below
err = e err = e
if retries == 0: if retries == 0:
raise MaxRetryError(self, url, e) if isinstance(e, SocketError) and self.proxy is not None:
raise ProxyError('Cannot connect to proxy. '
'Socket error: %s.' % e)
else:
raise MaxRetryError(self, url, e)
finally: finally:
if release_conn: if release_conn:
...@@ -565,7 +565,7 @@ class HTTPSConnectionPool(HTTPConnectionPool): ...@@ -565,7 +565,7 @@ class HTTPSConnectionPool(HTTPConnectionPool):
When Python is compiled with the :mod:`ssl` module, then When Python is compiled with the :mod:`ssl` module, then
:class:`.VerifiedHTTPSConnection` is used, which *can* verify certificates, :class:`.VerifiedHTTPSConnection` is used, which *can* verify certificates,
instead of :class:`httplib.HTTPSConnection`. instead of :class:`.HTTPSConnection`.
:class:`.VerifiedHTTPSConnection` uses one of ``assert_fingerprint``, :class:`.VerifiedHTTPSConnection` uses one of ``assert_fingerprint``,
``assert_hostname`` and ``host`` in this order to verify connections. ``assert_hostname`` and ``host`` in this order to verify connections.
...@@ -652,6 +652,10 @@ class HTTPSConnectionPool(HTTPConnectionPool): ...@@ -652,6 +652,10 @@ class HTTPSConnectionPool(HTTPConnectionPool):
conn = self.ConnectionCls(host=actual_host, port=actual_port, conn = self.ConnectionCls(host=actual_host, port=actual_port,
timeout=self.timeout.connect_timeout, timeout=self.timeout.connect_timeout,
**extra_params) **extra_params)
if self.proxy is not None:
# Enable Nagle's algorithm for proxies, to avoid packet
# fragmentation.
conn.tcp_nodelay = 0
return self._prepare_conn(conn) return self._prepare_conn(conn)
......
'''SSL with SNI-support for Python 2. '''SSL with SNI_-support for Python 2.
This needs the following packages installed: This needs the following packages installed:
...@@ -18,12 +18,31 @@ your application begins using ``urllib3``, like this:: ...@@ -18,12 +18,31 @@ your application begins using ``urllib3``, like this::
Now you can use :mod:`urllib3` as you normally would, and it will support SNI Now you can use :mod:`urllib3` as you normally would, and it will support SNI
when the required modules are installed. when the required modules are installed.
Activating this module also has the positive side effect of disabling SSL/TLS
encryption in Python 2 (see `CRIME attack`_).
If you want to configure the default list of supported cipher suites, you can
set the ``urllib3.contrib.pyopenssl.DEFAULT_SSL_CIPHER_LIST`` variable.
Module Variables
----------------
:var DEFAULT_SSL_CIPHER_LIST: The list of supported SSL/TLS cipher suites.
Default: ``EECDH+ECDSA+AESGCM EECDH+aRSA+AESGCM EECDH+ECDSA+SHA256
EECDH+aRSA+SHA256 EECDH+aRSA+RC4 EDH+aRSA EECDH RC4 !aNULL !eNULL !LOW !3DES
!MD5 !EXP !PSK !SRP !DSS'``
.. _sni: https://en.wikipedia.org/wiki/Server_Name_Indication
.. _crime attack: https://en.wikipedia.org/wiki/CRIME_(security_exploit)
''' '''
from ndg.httpsclient.ssl_peer_verification import SUBJ_ALT_NAME_SUPPORT from ndg.httpsclient.ssl_peer_verification import SUBJ_ALT_NAME_SUPPORT
from ndg.httpsclient.subj_alt_name import SubjectAltName from ndg.httpsclient.subj_alt_name import SubjectAltName as BaseSubjectAltName
import OpenSSL.SSL import OpenSSL.SSL
from pyasn1.codec.der import decoder as der_decoder from pyasn1.codec.der import decoder as der_decoder
from pyasn1.type import univ, constraint
from socket import _fileobject from socket import _fileobject
import ssl import ssl
import select import select
...@@ -50,6 +69,13 @@ _openssl_verify = { ...@@ -50,6 +69,13 @@ _openssl_verify = {
+ OpenSSL.SSL.VERIFY_FAIL_IF_NO_PEER_CERT, + OpenSSL.SSL.VERIFY_FAIL_IF_NO_PEER_CERT,
} }
# Default SSL/TLS cipher list.
# Recommendation by https://community.qualys.com/blogs/securitylabs/2013/08/05/
# configuring-apache-nginx-and-openssl-for-forward-secrecy
DEFAULT_SSL_CIPHER_LIST = 'EECDH+ECDSA+AESGCM EECDH+aRSA+AESGCM ' + \
'EECDH+ECDSA+SHA256 EECDH+aRSA+SHA256 EECDH+aRSA+RC4 EDH+aRSA ' + \
'EECDH RC4 !aNULL !eNULL !LOW !3DES !MD5 !EXP !PSK !SRP !DSS'
orig_util_HAS_SNI = util.HAS_SNI orig_util_HAS_SNI = util.HAS_SNI
orig_connection_ssl_wrap_socket = connection.ssl_wrap_socket orig_connection_ssl_wrap_socket = connection.ssl_wrap_socket
...@@ -69,6 +95,17 @@ def extract_from_urllib3(): ...@@ -69,6 +95,17 @@ def extract_from_urllib3():
util.HAS_SNI = orig_util_HAS_SNI util.HAS_SNI = orig_util_HAS_SNI
### Note: This is a slightly bug-fixed version of same from ndg-httpsclient.
class SubjectAltName(BaseSubjectAltName):
'''ASN.1 implementation for subjectAltNames support'''
# There is no limit to how many SAN certificates a certificate may have,
# however this needs to have some limit so we'll set an arbitrarily high
# limit.
sizeSpec = univ.SequenceOf.sizeSpec + \
constraint.ValueSizeConstraint(1, 1024)
### Note: This is a slightly bug-fixed version of same from ndg-httpsclient. ### Note: This is a slightly bug-fixed version of same from ndg-httpsclient.
def get_subj_alt_name(peer_cert): def get_subj_alt_name(peer_cert):
# Search through extensions # Search through extensions
...@@ -330,6 +367,13 @@ def ssl_wrap_socket(sock, keyfile=None, certfile=None, cert_reqs=None, ...@@ -330,6 +367,13 @@ def ssl_wrap_socket(sock, keyfile=None, certfile=None, cert_reqs=None,
except OpenSSL.SSL.Error as e: except OpenSSL.SSL.Error as e:
raise ssl.SSLError('bad ca_certs: %r' % ca_certs, e) raise ssl.SSLError('bad ca_certs: %r' % ca_certs, e)
# Disable TLS compression to migitate CRIME attack (issue #309)
OP_NO_COMPRESSION = 0x20000
ctx.set_options(OP_NO_COMPRESSION)
# Set list of supported ciphersuites.
ctx.set_cipher_list(DEFAULT_SSL_CIPHER_LIST)
cnx = OpenSSL.SSL.Connection(ctx, sock) cnx = OpenSSL.SSL.Connection(ctx, sock)
cnx.set_tlsext_host_name(server_hostname) cnx.set_tlsext_host_name(server_hostname)
cnx.set_connect_state() cnx.set_connect_state()
......
...@@ -46,16 +46,15 @@ def iter_field_objects(fields): ...@@ -46,16 +46,15 @@ def iter_field_objects(fields):
def iter_fields(fields): def iter_fields(fields):
""" """
Iterate over fields. .. deprecated:: 1.6
.. deprecated :: Iterate over fields.
The addition of `~urllib3.fields.RequestField` makes this function The addition of :class:`~urllib3.fields.RequestField` makes this function
obsolete. Instead, use :func:`iter_field_objects`, which returns obsolete. Instead, use :func:`iter_field_objects`, which returns
`~urllib3.fields.RequestField` objects, instead. :class:`~urllib3.fields.RequestField` objects.
Supports list of (k, v) tuples and dicts. Supports list of (k, v) tuples and dicts.
""" """
if isinstance(fields, dict): if isinstance(fields, dict):
return ((k, v) for k, v in six.iteritems(fields)) return ((k, v) for k, v in six.iteritems(fields))
......
# urllib3/poolmanager.py # urllib3/poolmanager.py
# Copyright 2008-2013 Andrey Petrov and contributors (see CONTRIBUTORS.txt) # Copyright 2008-2014 Andrey Petrov and contributors (see CONTRIBUTORS.txt)
# #
# This module is part of urllib3 and is released under # This module is part of urllib3 and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php # the MIT License: http://www.opensource.org/licenses/mit-license.php
...@@ -176,7 +176,7 @@ class ProxyManager(PoolManager): ...@@ -176,7 +176,7 @@ class ProxyManager(PoolManager):
Behaves just like :class:`PoolManager`, but sends all requests through Behaves just like :class:`PoolManager`, but sends all requests through
the defined proxy, using the CONNECT method for HTTPS URLs. the defined proxy, using the CONNECT method for HTTPS URLs.
:param poxy_url: :param proxy_url:
The URL of the proxy to be used. The URL of the proxy to be used.
:param proxy_headers: :param proxy_headers:
......
...@@ -620,6 +620,11 @@ if SSLContext is not None: # Python 3.2+ ...@@ -620,6 +620,11 @@ if SSLContext is not None: # Python 3.2+
""" """
context = SSLContext(ssl_version) context = SSLContext(ssl_version)
context.verify_mode = cert_reqs context.verify_mode = cert_reqs
# Disable TLS compression to migitate CRIME attack (issue #309)
OP_NO_COMPRESSION = 0x20000
context.options |= OP_NO_COMPRESSION
if ca_certs: if ca_certs:
try: try:
context.load_verify_locations(ca_certs) context.load_verify_locations(ca_certs)
......
...@@ -634,12 +634,14 @@ def except_on_missing_scheme(url): ...@@ -634,12 +634,14 @@ def except_on_missing_scheme(url):
def get_auth_from_url(url): def get_auth_from_url(url):
"""Given a url with authentication components, extract them into a tuple of """Given a url with authentication components, extract them into a tuple of
username,password.""" username,password."""
if url: parsed = urlparse(url)
url = unquote(url)
parsed = urlparse(url) try:
return (parsed.username, parsed.password) auth = (unquote(parsed.username), unquote(parsed.password))
else: except (AttributeError, TypeError):
return ('', '') auth = ('', '')
return auth
def to_native_string(string, encoding='ascii'): def to_native_string(string, encoding='ascii'):
......
...@@ -2,4 +2,4 @@ distlib==0.1.7 ...@@ -2,4 +2,4 @@ distlib==0.1.7
html5lib==1.0b1 html5lib==1.0b1
six==1.3.0 six==1.3.0
colorama==0.2.7 colorama==0.2.7
requests==2.2.0 requests==2.2.1
...@@ -26,6 +26,10 @@ def parse_requirements(filename, finder=None, comes_from=None, options=None, ...@@ -26,6 +26,10 @@ def parse_requirements(filename, finder=None, comes_from=None, options=None,
for line_number, line in enumerate(content.splitlines()): for line_number, line in enumerate(content.splitlines()):
line_number += 1 line_number += 1
line = line.strip() line = line.strip()
# Remove comments from file
line = re.sub(r"(^|\s)#.*$", "", line)
if not line or line.startswith('#'): if not line or line.startswith('#'):
continue continue
if skip_match and skip_match.search(line): if skip_match and skip_match.search(line):
......
...@@ -171,3 +171,44 @@ def test_req_file_parse_use_wheel(data, monkeypatch): ...@@ -171,3 +171,44 @@ def test_req_file_parse_use_wheel(data, monkeypatch):
for req in parse_requirements(data.reqfiles.join("supported_options.txt"), finder): for req in parse_requirements(data.reqfiles.join("supported_options.txt"), finder):
pass pass
assert finder.use_wheel assert finder.use_wheel
def test_req_file_parse_comment_start_of_line(tmpdir):
"""
Test parsing comments in a requirements file
"""
with open(tmpdir.join("req1.txt"), "w") as fp:
fp.write("# Comment ")
finder = PackageFinder([], [])
reqs = list(parse_requirements(tmpdir.join("req1.txt"), finder))
assert not reqs
def test_req_file_parse_comment_end_of_line_with_url(tmpdir):
"""
Test parsing comments in a requirements file
"""
with open(tmpdir.join("req1.txt"), "w") as fp:
fp.write("https://example.com/foo.tar.gz # Comment ")
finder = PackageFinder([], [])
reqs = list(parse_requirements(tmpdir.join("req1.txt"), finder))
assert len(reqs) == 1
assert reqs[0].url == "https://example.com/foo.tar.gz"
def test_req_file_parse_egginfo_end_of_line_with_url(tmpdir):
"""
Test parsing comments in a requirements file
"""
with open(tmpdir.join("req1.txt"), "w") as fp:
fp.write("https://example.com/foo.tar.gz#egg=wat")
finder = PackageFinder([], [])
reqs = list(parse_requirements(tmpdir.join("req1.txt"), finder))
assert len(reqs) == 1
assert reqs[0].name == "wat"
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册