repo
stringclasses 358
values | pull_number
int64 6
67.9k
| instance_id
stringlengths 12
49
| issue_numbers
sequencelengths 1
7
| base_commit
stringlengths 40
40
| patch
stringlengths 87
101M
| test_patch
stringlengths 72
22.3M
| problem_statement
stringlengths 3
256k
| hints_text
stringlengths 0
545k
| created_at
stringlengths 20
20
| PASS_TO_PASS
sequencelengths 0
0
| FAIL_TO_PASS
sequencelengths 0
0
|
---|---|---|---|---|---|---|---|---|---|---|---|
urllib3/urllib3 | 1,692 | urllib3__urllib3-1692 | [
"1684"
] | 15e05b314d890949c5629c1d2ab565ed99517089 | diff --git a/src/urllib3/util/url.py b/src/urllib3/util/url.py
--- a/src/urllib3/util/url.py
+++ b/src/urllib3/util/url.py
@@ -50,7 +50,7 @@
"(?:(?:%(hex)s:){0,6}%(hex)s)?::",
]
-UNRESERVED_PAT = r"ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789._!\-"
+UNRESERVED_PAT = r"ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789._!\-~"
IPV6_PAT = "(?:" + "|".join([x % _subs for x in _variations]) + ")"
ZONE_ID_PAT = "(?:%25|%)(?:[" + UNRESERVED_PAT + "]|%[a-fA-F0-9]{2})+"
IPV6_ADDRZ_PAT = r"\[" + IPV6_PAT + r"(?:" + ZONE_ID_PAT + r")?\]"
@@ -63,17 +63,18 @@
BRACELESS_IPV6_ADDRZ_RE = re.compile("^" + IPV6_ADDRZ_PAT[2:-2] + "$")
ZONE_ID_RE = re.compile("(" + ZONE_ID_PAT + r")\]$")
-SUBAUTHORITY_PAT = (u"^(?:(.*)@)?" u"(%s|%s|%s)" u"(?::([0-9]{0,5}))?$") % (
+SUBAUTHORITY_PAT = (u"^(?:(.*)@)?(%s|%s|%s)(?::([0-9]{0,5}))?$") % (
REG_NAME_PAT,
IPV4_PAT,
IPV6_ADDRZ_PAT,
)
SUBAUTHORITY_RE = re.compile(SUBAUTHORITY_PAT, re.UNICODE | re.DOTALL)
-ZONE_ID_CHARS = set(
- "ABCDEFGHIJKLMNOPQRSTUVWXYZ" "abcdefghijklmnopqrstuvwxyz" "0123456789._!-"
+UNRESERVED_CHARS = set(
+ "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789._-~"
)
-USERINFO_CHARS = ZONE_ID_CHARS | set("$&'()*+,;=:")
+SUB_DELIM_CHARS = set("!$&'()*+,;=")
+USERINFO_CHARS = UNRESERVED_CHARS | SUB_DELIM_CHARS | {":"}
PATH_CHARS = USERINFO_CHARS | {"@", "/"}
QUERY_CHARS = FRAGMENT_CHARS = PATH_CHARS | {"?"}
@@ -290,7 +291,7 @@ def _normalize_host(host, scheme):
zone_id = zone_id[3:]
else:
zone_id = zone_id[1:]
- zone_id = "%" + _encode_invalid_chars(zone_id, ZONE_ID_CHARS)
+ zone_id = "%" + _encode_invalid_chars(zone_id, UNRESERVED_CHARS)
return host[:start].lower() + zone_id + host[end:]
else:
return host.lower()
| diff --git a/test/test_util.py b/test/test_util.py
--- a/test/test_util.py
+++ b/test/test_util.py
@@ -170,6 +170,10 @@ def test_invalid_url(self, url):
"url, expected_normalized_url",
[
("HTTP://GOOGLE.COM/MAIL/", "http://google.com/MAIL/"),
+ (
+ "http://[email protected]:[email protected]/~tilde@?@",
+ "http://user%40domain.com:[email protected]/~tilde@?@",
+ ),
(
"HTTP://JeremyCline:[email protected]:8080/",
"http://JeremyCline:[email protected]:8080/",
| Add tilde to ZONE_ID_CHARS
Fixes #1683.
Recently merged #1673 ensured that URLs are percent-encoded.
This is a behavior change in that URLs with tilde characters in
them would not previously have been percent-encoded, but are now.
RFC 3986 says[1]:
For consistency, percent-encoded octets in the ranges of ALPHA
(%41-%5A and %61-%7A), DIGIT (%30-%39), hyphen (%2D), period (%2E),
underscore (%5F), or tilde (%7E) should not be created by URI
producers and, when found in a URI, should be decoded to their
corresponding unreserved characters by URI normalizers.
This suggests that urllib3 should not escape tilde characters
in URLs. The RFC describes tilde as an "unreserved" character.
Among the character classes at the top of url.py, the closest
match to the "unreserved" set seems to be ZONE_ID_CHARS. RFC6874
says[2]:
A <zone_id> SHOULD contain only ASCII characters classified as
"unreserved" for use in URIs [RFC3986].
Which suggests that it should be safe to add to that set.
[1] https://tools.ietf.org/html/rfc3986#section-2.3
[2] https://tools.ietf.org/html/rfc6874#section-2
| # [Codecov](https://codecov.io/gh/urllib3/urllib3/pull/1684?src=pr&el=h1) Report
> Merging [#1684](https://codecov.io/gh/urllib3/urllib3/pull/1684?src=pr&el=desc) into [master](https://codecov.io/gh/urllib3/urllib3/commit/7e856c04723036934fe314c63701466e4f42d2ee?src=pr&el=desc) will **not change** coverage.
> The diff coverage is `n/a`.
[](https://codecov.io/gh/urllib3/urllib3/pull/1684?src=pr&el=tree)
```diff
@@ Coverage Diff @@
## master #1684 +/- ##
=======================================
Coverage 99.45% 99.45%
=======================================
Files 22 22
Lines 2005 2005
=======================================
Hits 1994 1994
Misses 11 11
```
| [Impacted Files](https://codecov.io/gh/urllib3/urllib3/pull/1684?src=pr&el=tree) | Coverage Δ | |
|---|---|---|
| [src/urllib3/util/url.py](https://codecov.io/gh/urllib3/urllib3/pull/1684/diff?src=pr&el=tree#diff-c3JjL3VybGxpYjMvdXRpbC91cmwucHk=) | `98.98% <ø> (ø)` | :arrow_up: |
------
[Continue to review full report at Codecov](https://codecov.io/gh/urllib3/urllib3/pull/1684?src=pr&el=continue).
> **Legend** - [Click here to learn more](https://docs.codecov.io/docs/codecov-delta)
> `Δ = absolute <relative> (impact)`, `ø = not affected`, `? = missing data`
> Powered by [Codecov](https://codecov.io/gh/urllib3/urllib3/pull/1684?src=pr&el=footer). Last update [7e856c0...4f65f3a](https://codecov.io/gh/urllib3/urllib3/pull/1684?src=pr&el=lastupdated). Read the [comment docs](https://docs.codecov.io/docs/pull-request-comments).
| 2019-09-24T13:14:45Z | [] | [] |
urllib3/urllib3 | 1,732 | urllib3__urllib3-1732 | [
"1731"
] | f4b36ad045ccfbbfaaadd8e69f9b32c5d81cbd84 | diff --git a/dummyserver/handlers.py b/dummyserver/handlers.py
--- a/dummyserver/handlers.py
+++ b/dummyserver/handlers.py
@@ -229,6 +229,10 @@ def echo(self, request):
return Response(request.body)
+ def echo_uri(self, request):
+ "Echo back the requested URI"
+ return Response(request.uri)
+
def encodingrequest(self, request):
"Check for UA accepting gzip/deflate encoding"
data = b"hello, world!"
diff --git a/src/urllib3/util/url.py b/src/urllib3/util/url.py
--- a/src/urllib3/util/url.py
+++ b/src/urllib3/util/url.py
@@ -55,7 +55,7 @@
ZONE_ID_PAT = "(?:%25|%)(?:[" + UNRESERVED_PAT + "]|%[a-fA-F0-9]{2})+"
IPV6_ADDRZ_PAT = r"\[" + IPV6_PAT + r"(?:" + ZONE_ID_PAT + r")?\]"
REG_NAME_PAT = r"(?:[^\[\]%:/?#]|%[a-fA-F0-9]{2})*"
-TARGET_RE = re.compile(r"^(/[^?]*)(?:\?([^#]+))?(?:#(.*))?$")
+TARGET_RE = re.compile(r"^(/[^?#]*)(?:\?([^#]*))?(?:#.*)?$")
IPV4_RE = re.compile("^" + IPV4_PAT + "$")
IPV6_RE = re.compile("^" + IPV6_PAT + "$")
@@ -325,14 +325,11 @@ def _encode_target(target):
if not target.startswith("/"):
return target
- path, query, fragment = TARGET_RE.match(target).groups()
+ path, query = TARGET_RE.match(target).groups()
target = _encode_invalid_chars(path, PATH_CHARS)
query = _encode_invalid_chars(query, QUERY_CHARS)
- fragment = _encode_invalid_chars(fragment, FRAGMENT_CHARS)
if query is not None:
target += "?" + query
- if fragment is not None:
- target += "#" + target
return target
| diff --git a/test/with_dummyserver/test_poolmanager.py b/test/with_dummyserver/test_poolmanager.py
--- a/test/with_dummyserver/test_poolmanager.py
+++ b/test/with_dummyserver/test_poolmanager.py
@@ -333,6 +333,25 @@ def test_http_with_ca_cert_dir(self):
r = http.request("GET", "http://%s:%s/" % (self.host, self.port))
assert r.status == 200
+ @pytest.mark.parametrize(
+ ["target", "expected_target"],
+ [
+ ("/echo_uri?q=1#fragment", b"/echo_uri?q=1"),
+ ("/echo_uri?#", b"/echo_uri?"),
+ ("/echo_uri#?", b"/echo_uri"),
+ ("/echo_uri#?#", b"/echo_uri"),
+ ("/echo_uri??#", b"/echo_uri??"),
+ ("/echo_uri?%3f#", b"/echo_uri?%3F"),
+ ("/echo_uri?%3F#", b"/echo_uri?%3F"),
+ ("/echo_uri?[]", b"/echo_uri?%5B%5D"),
+ ],
+ )
+ def test_encode_http_target(self, target, expected_target):
+ with PoolManager() as http:
+ url = "http://%s:%d%s" % (self.host, self.port, target)
+ r = http.request("GET", url)
+ assert r.data == expected_target
+
@pytest.mark.skipif(not HAS_IPV6, reason="IPv6 is not supported on this system")
class TestIPv6PoolManager(IPv6HTTPDummyServerTestCase):
| AttributeError: 'NoneType' object has no attribute 'groups' on simple PoolManager.request
```
http = urllib3.PoolManager(maxsize=10)
url = "http://someaddress:32400/library/sections?"
token = "NotSharingIsSometimesCaring"
get = http.request(method='GET', url=url, headers={'X-Plex-Token': token})
```
Tried macos Mojave with Py 3.7.4 and in docker with python 3.8
using urllib3 1.25.6
```
$ python3 test.py
Traceback (most recent call last):
File "test.py", line 9, in <module>
get = http.request(method='GET', url=url, headers={'X-Plex-Token': token})
File "/Users/someusername/Library/Python/3.7/lib/python/site-packages/urllib3/request.py", line 76, in request
method, url, fields=fields, headers=headers, **urlopen_kw
File "/Users/someusername/Library/Python/3.7/lib/python/site-packages/urllib3/request.py", line 97, in request_encode_url
return self.urlopen(method, url, **extra_kw)
File "/Users/someusername/Library/Python/3.7/lib/python/site-packages/urllib3/poolmanager.py", line 330, in urlopen
response = conn.urlopen(method, u.request_uri, **kw)
File "/Users/someusername/Library/Python/3.7/lib/python/site-packages/urllib3/connectionpool.py", line 615, in urlopen
url = six.ensure_str(_encode_target(url))
File "/Users/someusername/Library/Python/3.7/lib/python/site-packages/urllib3/util/url.py", line 328, in _encode_target
path, query, fragment = TARGET_RE.match(target).groups()
AttributeError: 'NoneType' object has no attribute 'groups'
```
| 2019-11-04T14:26:34Z | [] | [] |
|
urllib3/urllib3 | 1,782 | urllib3__urllib3-1782 | [
"1780"
] | 62ef68e49edf5dabde26732a154d0e925cef7301 | diff --git a/src/urllib3/response.py b/src/urllib3/response.py
--- a/src/urllib3/response.py
+++ b/src/urllib3/response.py
@@ -792,7 +792,7 @@ def geturl(self):
return self._request_url
def __iter__(self):
- buffer = [b""]
+ buffer = []
for chunk in self.stream(decode_content=True):
if b"\n" in chunk:
chunk = chunk.split(b"\n")
| diff --git a/test/test_response.py b/test/test_response.py
--- a/test/test_response.py
+++ b/test/test_response.py
@@ -859,8 +859,9 @@ def test_geturl_retries(self):
@pytest.mark.parametrize(
["payload", "expected_stream"],
[
- (b"", [b""]),
+ (b"", []),
(b"\n", [b"\n"]),
+ (b"\n\n\n", [b"\n", b"\n", b"\n"]),
(b"abc\ndef", [b"abc\n", b"def"]),
(b"Hello\nworld\n\n\n!", [b"Hello\n", b"world\n", b"\n", b"\n", b"!"]),
],
| iterating a closed response improperly produces data
Consider the following script:
```
import urllib3
http = urllib3.PoolManager()
resp = http.request("GET", "https://www.python.org")
resp.close()
for d in resp:
print(repr(d))
```
With urllib3 1.25.7, this program prints `b''`. With urllib3 1.24.3, one sees:
```
Traceback (most recent call last):
File "example.py", line 6, in <module>
for d in resp:
ValueError: I/O operation on closed file.
```
The latter is in line with what I expect.
| Great catch, the iteration behavior is new and hasn't been flexed enough. Would you be willing to submit a patch to fix this issue? | 2020-01-14T02:37:27Z | [] | [] |
urllib3/urllib3 | 1,812 | urllib3__urllib3-1812 | [
"1517"
] | 8c7a43b4a4ca0c8d36d55f132daa2a43d06fe3c4 | diff --git a/src/urllib3/contrib/pyopenssl.py b/src/urllib3/contrib/pyopenssl.py
--- a/src/urllib3/contrib/pyopenssl.py
+++ b/src/urllib3/contrib/pyopenssl.py
@@ -450,9 +450,12 @@ def load_verify_locations(self, cafile=None, capath=None, cadata=None):
cafile = cafile.encode("utf-8")
if capath is not None:
capath = capath.encode("utf-8")
- self._ctx.load_verify_locations(cafile, capath)
- if cadata is not None:
- self._ctx.load_verify_locations(BytesIO(cadata))
+ try:
+ self._ctx.load_verify_locations(cafile, capath)
+ if cadata is not None:
+ self._ctx.load_verify_locations(BytesIO(cadata))
+ except OpenSSL.SSL.Error as e:
+ raise ssl.SSLError("unable to load trusted certificates: %r" % e)
def load_cert_chain(self, certfile, keyfile=None, password=None):
self._ctx.use_certificate_chain_file(certfile)
| diff --git a/test/contrib/test_pyopenssl.py b/test/contrib/test_pyopenssl.py
--- a/test/contrib/test_pyopenssl.py
+++ b/test/contrib/test_pyopenssl.py
@@ -45,6 +45,7 @@ def teardown_module():
TestSNI,
TestSocketClosing,
TestClientCerts,
+ TestSSL,
)
diff --git a/test/contrib/test_securetransport.py b/test/contrib/test_securetransport.py
--- a/test/contrib/test_securetransport.py
+++ b/test/contrib/test_securetransport.py
@@ -43,6 +43,7 @@ def teardown_module():
TestSNI,
TestSocketClosing,
TestClientCerts,
+ TestSSL,
)
diff --git a/test/with_dummyserver/test_socketlevel.py b/test/with_dummyserver/test_socketlevel.py
--- a/test/with_dummyserver/test_socketlevel.py
+++ b/test/with_dummyserver/test_socketlevel.py
@@ -10,6 +10,7 @@
ProtocolError,
)
from urllib3.response import httplib
+from urllib3.util import ssl_wrap_socket
from urllib3.util.ssl_ import HAS_SNI
from urllib3.util import ssl_
from urllib3.util.timeout import Timeout
@@ -37,6 +38,7 @@ class MimeToolMessage(object):
from collections import OrderedDict
import os.path
from threading import Event
+import os
import select
import socket
import shutil
@@ -1387,6 +1389,13 @@ def socket_handler(listener):
pool.request("GET", "/", timeout=SHORT_TIMEOUT)
context.load_default_certs.assert_not_called()
+ def test_load_verify_locations_exception(self):
+ """
+ Ensure that load_verify_locations raises SSLError for all backends
+ """
+ with pytest.raises(SSLError):
+ ssl_wrap_socket(None, ca_certs=os.devnull)
+
class TestErrorWrapping(SocketDummyServerTestCase):
def test_bad_statusline(self):
| WIP: Ensure PyOpenSSLContext.load_verify_locations raises ssl.SSLError
Without this patch, an `OpenSSL.SSL.Error` is raised when `PyOpenSSLContext.load_verify_locations` fails:
```
$ python -c "import os; import urllib3; import urllib3.contrib.pyopenssl; urllib3.contrib.pyopenssl.inject_into_urllib3(); urllib3.util.ssl_wrap_socket(None, ca_certs=os.devnull)"
Traceback (most recent call last):
File "<string>", line 1, in <module>
File "/tmp/urllib3/local/lib/python2.7/site-packages/urllib3/util/ssl_.py", line 321, in ssl_wrap_socket
context.load_verify_locations(ca_certs, ca_cert_dir)
File "/tmp/urllib3/local/lib/python2.7/site-packages/urllib3/contrib/pyopenssl.py", line 428, in load_verify_locations
self._ctx.load_verify_locations(cafile, capath)
File "/tmp/urllib3/local/lib/python2.7/site-packages/OpenSSL/SSL.py", line 781, in load_verify_locations
_raise_current_error()
File "/tmp/urllib3/local/lib/python2.7/site-packages/OpenSSL/_util.py", line 53, in exception_from_error_queue
raise exception_type(errors)
OpenSSL.SSL.Error: []
```
With this patch, an `ssl.SSLError` is raised, in the same way [`do_handshake` errors are handled](https://github.com/pilou-/urllib3/blob/f17a64dbd052be6cdbdbd8b03f01ad0bc09a5ab3/src/urllib3/contrib/pyopenssl.py#L461-L462):
```
$ python -c "import os; import urllib3; import urllib3.contrib.pyopenssl; urllib3.contrib.pyopenssl.inject_into_urllib3(); urllib3.util.ssl_wrap_socket(None, ca_certs=os.devnull)"
Traceback (most recent call last):
File "<string>", line 1, in <module>
File "/home/pilou/src/urllib3/src/urllib3/util/ssl_.py", line 313, in ssl_wrap_socket
raise SSLError(e)
urllib3.exceptions.SSLError: ('unable to load trusted certificates: Error([('x509 certificate routines', 'X509_load_cert_crl_file', 'no certificate or crl found')],)',)
```
then:
1. this `ssl.SSLError` is caught by [`ssl_wrap_socket`](https://github.com/pilou-/urllib3/blob/f17a64dbd052be6cdbdbd8b03f01ad0bc09a5ab3/src/urllib3/util/ssl_.py#L310-L313)
2. an `urllib3.exceptions.SSLError` error is raised
* which is the same exception that the one raised when pyOpenSSL isn't available
* and which is what users of urllib3 library expect (this can be tested with `python -c "import requests, os; requests.get('https://github.com', verify=os.devnull)"`)
| # [Codecov](https://codecov.io/gh/urllib3/urllib3/pull/1517?src=pr&el=h1) Report
> Merging [#1517](https://codecov.io/gh/urllib3/urllib3/pull/1517?src=pr&el=desc) into [master](https://codecov.io/gh/urllib3/urllib3/commit/adb358f8e06865406d1f05e581a16cbea2136fbc?src=pr&el=desc) will **increase** coverage by `0.03%`.
> The diff coverage is `n/a`.
[](https://codecov.io/gh/urllib3/urllib3/pull/1517?src=pr&el=tree)
```diff
@@ Coverage Diff @@
## master #1517 +/- ##
==========================================
+ Coverage 64.68% 64.72% +0.03%
==========================================
Files 22 22
Lines 2897 2897
==========================================
+ Hits 1874 1875 +1
+ Misses 1023 1022 -1
```
| [Impacted Files](https://codecov.io/gh/urllib3/urllib3/pull/1517?src=pr&el=tree) | Coverage Δ | |
|---|---|---|
| [src/urllib3/util/ssl\_.py](https://codecov.io/gh/urllib3/urllib3/pull/1517/diff?src=pr&el=tree#diff-c3JjL3VybGxpYjMvdXRpbC9zc2xfLnB5) | `37.06% <0%> (+0.43%)` | :arrow_up: |
------
[Continue to review full report at Codecov](https://codecov.io/gh/urllib3/urllib3/pull/1517?src=pr&el=continue).
> **Legend** - [Click here to learn more](https://docs.codecov.io/docs/codecov-delta)
> `Δ = absolute <relative> (impact)`, `ø = not affected`, `? = missing data`
> Powered by [Codecov](https://codecov.io/gh/urllib3/urllib3/pull/1517?src=pr&el=footer). Last update [adb358f...b065bed](https://codecov.io/gh/urllib3/urllib3/pull/1517?src=pr&el=lastupdated). Read the [comment docs](https://docs.codecov.io/docs/pull-request-comments).
| 2020-03-12T13:53:13Z | [] | [] |
urllib3/urllib3 | 1,817 | urllib3__urllib3-1817 | [
"1808"
] | 02867340de0a8c1b15541d9da3531e8576105bea | diff --git a/src/urllib3/connectionpool.py b/src/urllib3/connectionpool.py
--- a/src/urllib3/connectionpool.py
+++ b/src/urllib3/connectionpool.py
@@ -765,21 +765,6 @@ def urlopen(
**response_kw
)
- def drain_and_release_conn(response):
- try:
- # discard any remaining response body, the connection will be
- # released back to the pool once the entire response is read
- response.read()
- except (
- TimeoutError,
- HTTPException,
- SocketError,
- ProtocolError,
- BaseSSLError,
- SSLError,
- ):
- pass
-
# Handle redirect?
redirect_location = redirect and response.get_redirect_location()
if redirect_location:
@@ -790,15 +775,11 @@ def drain_and_release_conn(response):
retries = retries.increment(method, url, response=response, _pool=self)
except MaxRetryError:
if retries.raise_on_redirect:
- # Drain and release the connection for this response, since
- # we're not returning it to be released manually.
- drain_and_release_conn(response)
+ response.drain_conn()
raise
return response
- # drain and return the connection to the pool before recursing
- drain_and_release_conn(response)
-
+ response.drain_conn()
retries.sleep_for_retry(response)
log.debug("Redirecting %s -> %s", url, redirect_location)
return self.urlopen(
@@ -824,15 +805,11 @@ def drain_and_release_conn(response):
retries = retries.increment(method, url, response=response, _pool=self)
except MaxRetryError:
if retries.raise_on_status:
- # Drain and release the connection for this response, since
- # we're not returning it to be released manually.
- drain_and_release_conn(response)
+ response.drain_conn()
raise
return response
- # drain and return the connection to the pool before recursing
- drain_and_release_conn(response)
-
+ response.drain_conn()
retries.sleep(response)
log.debug("Retry: %s", url)
return self.urlopen(
diff --git a/src/urllib3/poolmanager.py b/src/urllib3/poolmanager.py
--- a/src/urllib3/poolmanager.py
+++ b/src/urllib3/poolmanager.py
@@ -7,6 +7,7 @@
from ._collections import RecentlyUsedContainer
from .connectionpool import HTTPConnectionPool, HTTPSConnectionPool
from .connectionpool import port_by_scheme
+
from .exceptions import (
HTTPWarning,
LocationValueError,
@@ -384,6 +385,7 @@ def urlopen(self, method, url, redirect=True, **kw):
retries = retries.increment(method, url, response=response, _pool=conn)
except MaxRetryError:
if retries.raise_on_redirect:
+ response.drain_conn()
raise
return response
@@ -391,6 +393,8 @@ def urlopen(self, method, url, redirect=True, **kw):
kw["redirect"] = redirect
log.info("Redirecting %s -> %s", url, redirect_location)
+
+ response.drain_conn()
return self.urlopen(method, redirect_location, **kw)
diff --git a/src/urllib3/response.py b/src/urllib3/response.py
--- a/src/urllib3/response.py
+++ b/src/urllib3/response.py
@@ -20,6 +20,7 @@
ResponseNotChunked,
IncompleteRead,
InvalidHeader,
+ HTTPError,
)
from .packages.six import string_types as basestring, PY3
from .packages.six.moves import http_client as httplib
@@ -277,6 +278,17 @@ def release_conn(self):
self._pool._put_conn(self._connection)
self._connection = None
+ def drain_conn(self):
+ """
+ Read and discard any remaining HTTP response data in the response connection.
+
+ Unread data in the HTTPResponse connection blocks the connection from being released back to the pool.
+ """
+ try:
+ self.read()
+ except (HTTPError, SocketError, BaseSSLError, HTTPException):
+ pass
+
@property
def data(self):
# For backwords-compat with earlier urllib3 0.4 and earlier.
| diff --git a/test/with_dummyserver/test_poolmanager.py b/test/with_dummyserver/test_poolmanager.py
--- a/test/with_dummyserver/test_poolmanager.py
+++ b/test/with_dummyserver/test_poolmanager.py
@@ -115,6 +115,7 @@ def test_too_many_redirects(self):
% (self.base_url, self.base_url)
},
retries=1,
+ preload_content=False,
)
with pytest.raises(MaxRetryError):
@@ -126,8 +127,15 @@ def test_too_many_redirects(self):
% (self.base_url, self.base_url)
},
retries=Retry(total=None, redirect=1),
+ preload_content=False,
)
+ # Even with preload_content=False and raise on redirects, we reused the same
+ # connection
+ assert len(http.pools) == 1
+ pool = http.connection_from_host(self.host, self.port)
+ assert pool.num_connections == 1
+
def test_redirect_cross_host_remove_headers(self):
with PoolManager() as http:
r = http.request(
@@ -206,6 +214,15 @@ def test_redirect_cross_host_set_removed_headers(self):
assert "X-API-Secret" not in data
assert data["Authorization"] == "bar"
+ def test_redirect_without_preload_releases_connection(self):
+ with PoolManager(block=True, maxsize=2) as http:
+ r = http.request(
+ "GET", "%s/redirect" % self.base_url, preload_content=False
+ )
+ assert r._pool.num_requests == 2
+ assert r._pool.num_connections == 1
+ assert len(http.pools) == 1
+
def test_raise_on_redirect(self):
with PoolManager() as http:
r = http.request(
| urllib3 hanging inside call to PoolManager request
I have a repeatable issue with urllib3 hanging inside PoolManager request for an unusual edge case. I have repeated this issue on two separate machines.
I am using Windows 10, 64-bit (fully patched) and Python 3.8.2, 64-bit. The urllib3 version is 1.25.8 in a fresh venv environment.
The edge case is:
1. Second call to urllib3 PoolManager request hangs inside request call and never returns
2. Using streaming to read and write response to file
3. url has a redirect (http status 301)
The original code used concurrent.futures, but the issue is the same without it using a simple loop.
I have put together a small extract from the code I have been running for about one year. It was only when I recently started with a new use case that co-incidentally started using urls with redirects that I found this problem where each of concurrent.futures workers in my Python application would hang. Then I removed concurrent features.
After stepping through the urllib3 source code, I could not figure out the root cause.
I ask if others can reproduce the issue and then help find find the root cause.
# Code:
```
import logging
from pathlib import Path
import shutil
import urllib3
_logger = logging.getLogger(__name__)
_BUFFER_SIZE = 1024 * 1024 # buffer for downloading remote resource
url_headers = urllib3.make_headers(keep_alive=True, accept_encoding=True)
url_retries = urllib3.Retry(total=2, backoff_factor=1, status_forcelist=[500, 502, 503, 504])
url_client = urllib3.PoolManager(timeout=urllib3.Timeout(total=15.0), retries=url_retries, block=True,
headers=url_headers)
def stream_response(url: str, filepath: Path, id: int):
# Must call release_conn() after file copied but opening/writing exception is possible
rsp = None
try:
_logger.debug(f'> {id:4d} GET: {url}')
rsp = url_client.request('GET', url, preload_content=False)
_logger.debug(f'> {id:4d} resp code: {rsp.status}')
if rsp.status == 200:
_logger.debug(f'> {id:4d} writing: "{filepath.name}"')
with filepath.open('wb', buffering=_BUFFER_SIZE) as rfp:
shutil.copyfileobj(rsp, rfp, length=_BUFFER_SIZE)
except Exception as ex:
_logger.exception(f'> {id:4d} Error')
finally:
if rsp:
rsp.release_conn()
return rsp.status
logging.basicConfig(level=logging.DEBUG,
handlers=[logging.FileHandler('output.log', mode='w'), logging.StreamHandler()])
logging.captureWarnings(True)
_logger.info('Started')
items = [
['https://fwd.aws/v3mz8', Path('d1.pdf')],
['https://fwd.aws/88JKk', Path('d2.pdf')],
['https://fwd.aws/D9yDG', Path('d3.pdf')],
['https://fwd.aws/NmXza', Path('d4.pdf')],
['https://fwd.aws/NmXza', Path('d5.pdf')]
]
for id, it in enumerate(items):
stream_response(it[0], it[1], id)
```
# Output:
```
D:\Temp\Python issue>
D:\Temp\Python issue>.env\Scripts\activate
(.env) D:\Temp\Python issue>
(.env) D:\Temp\Python issue>python urllib3_locking.py
INFO:__main__:Started
DEBUG:__main__:> 0 GET: https://fwd.aws/v3mz8
DEBUG:urllib3.connectionpool:Starting new HTTPS connection (1): fwd.aws:443
DEBUG:urllib3.connectionpool:https://fwd.aws:443 "GET /v3mz8 HTTP/1.1" 301 0
DEBUG:urllib3.util.retry:Incremented Retry for (url='https://fwd.aws/v3mz8'): Retry(total=2, connect=None, read=None, redirect=None, status=None)
INFO:urllib3.poolmanager:Redirecting https://fwd.aws/v3mz8 -> https://aws-quickstart.s3.amazonaws.com/quickstart-hashicorp-consul/doc/hashicorp-consul-on-the-aws-cloud.pdf
DEBUG:urllib3.connectionpool:Starting new HTTPS connection (1): aws-quickstart.s3.amazonaws.com:443
DEBUG:urllib3.connectionpool:https://aws-quickstart.s3.amazonaws.com:443 "GET /quickstart-hashicorp-consul/doc/hashicorp-consul-on-the-aws-cloud.pdf HTTP/1.1" 200 1610783
DEBUG:__main__:> 0 resp code: 200
DEBUG:__main__:> 0 writing: "d1.pdf"
DEBUG:__main__:> 1 GET: https://fwd.aws/88JKk
Process finished with exit code -1
```
I have to manually terminate the application (CPU usage for process is zero) and the output never shows the response http status code.
[Python issue - 200307.zip](https://github.com/urllib3/urllib3/files/4301755/Python.issue.-.200307.zip)
| Thanks for the full example! Can you try setting `block=False` when instantiating urllib3.PoolManager to see if it helps?
Hi @pquentin
I tried setting `block=False` as you suggested, and it made no difference. The call to urllib3.PoolManager request still hangs and never returns
Well, I reproduce your problem, and setting `block=False` (or `maxsize=10`) fixes it for me. So I still *think* my analysis below is correct, but I don't explain why the `block=False` workaround does not help for you. Are you sure you're using the exact example in your first post?
The problem lies at the interaction between blocking connection pools, redirects and streaming download.
urllib3 uses one connection pool by host, in your example there are three hosts if we include redirects: `fwd.aws`, `aws-quickstart.s3.amazonaws.com` and `aws.amazon.com`. So that's three connection pools. Each connection pool has `block=True` and `maxsize=1` (the default `maxsize`), so you can only perform one request at a time because there can be only one connection per pool.
So what's going on in your example? When you request `https://fwd.aws/v3mz8`, you:
* open a connection to `fwd.aws`, then get a redirect,
* so you open a connection to `aws-quickstart.s3.amazonaws.com`, read the file
* then `release_conn` releases the connection to `aws-quickstart.s3.amazonaws.com` to the pool for further use.
But the connection to `fwd.aws` was never released! urllib3 should have read the `fwd.aws` response and released the connection before performing the redirect. This is a bug that was fixed in connection pools in https://github.com/urllib3/urllib3/pull/1174, but unfortunately pool managers also have the bug and it wasn't fixed there. Congratulations for finding the bug. :)
And indeed, `calling drain_and_release_conn` just before the recursive call in `PoolManager.urlopen()` also allows me to run your example without problems.
Would you be interested in working on a fix? This would involve moving `drain_and_release_conn()` to src/urllib3/response.py, calling it both from connectionpool.py and poolmanager.py, and writing tests for the `PoolManager` case (by copying the connection pool tests from https://github.com/urllib3/urllib3/pull/1174).
Hi @pquentin
I now also can use the workaround `block=False`. I had run the code from within PyCharm which hangs regardless, but running the code from the command line has the same result as you (ie does not hang with `block=False`). I might have a look at this test running from VSCode out-of-interest.
I have an understanding of your explanation and I will have an indepth look at the urrlib3 code. I will see if I am up to the fix (being refactoring and new unit tests).
@tycarac That's great to hear! I'd be happy to assist if you have any difficulties. I guess the first step is to call `drain_and_release_conn()` before the recursive call/info log here:
https://github.com/urllib3/urllib3/blob/33a29c5e34ee3375cde07addeb979aba56f2ca5a/src/urllib3/poolmanager.py#L365-L369
And see if that fixes your issue locally
@pquentin
I have a fix (with unit test) ready for review. The unit test took a long time as my first test was complicated and then I found ways to simplify it.
But, I am at a loss to how to submit it. This fix has been a lot of firsts for me (first time using nox and second time using pytest, first GitHub contribution, first git pull request). I have only seriously re-started programming (after a years and years and years break) 3 months ago. Back then I uses Visual SourceSafe :-)
So could you please say if I should submit to a git branch. If so, which one?
The unit test hangs without the fix "drain_and_release_conn", but I consider it fine-by-me as a test should never fail. That OK by you?
I found it useful to put:
```
import logging
import os
logging.basicConfig(level=logging.DEBUG,
handlers=[logging.FileHandler(os.path.splitext(__file__)[0] + '.log', mode='w'), logging.StreamHandler()])
logging.captureWarnings(True)
```
at the top of the test file but it generates a log file per test file (and was not part of the original test source code). But it is useful to see the urllib3 internal logging statements.
Lots of firsts indeed! Congratulations on the progress!
For your next pull request you can try following https://guides.github.com/activities/hello-world/ but for this one can you please upload the modified files on GitHub Gist? I'll also need a name or pseudonym and possibly an email.
I'll create the pull request myself if that's okay with you.
Thanks!
[urllib3 - 200317.zip](https://github.com/urllib3/urllib3/files/4340577/urllib3.-.200317.zip)
@pquentin
I would have preferred to put the code directly into urllib3 but I see the permission problem [Permission levels](https://help.github.com/en/github/setting-up-and-managing-your-github-user-account/permission-levels-for-a-user-account-repository). Either you have a LOT of access or very limited access - even for contributors.
I put the patch (3 files) at [Patch on Gist](https://gist.github.com/tycarac/2a61d3169b6ef49ed577eb611fb807eb). Notes:
1. Without the src\urllib3\poolmanager.py change, the test suite execution hangs. I did not investigate test timeouts as I did not see timers used elsewhere and it adds complexity
2. The change to handlers.py is nice-to-have as it gives a counter for loops. I used "page" as it is a typical query key.
3. I looked for the required exceptions in the function drain_and_release_conn(). Most urllib3 errors subclass from HTTPError
4. The unit test loops 5 times - an arbitrary number greater than 3 used for default max retries and redirects
5. I did not see loops used in unit tests but it does not make the unit test more complex or add to test execution time.
Otherwise I tried to follow the style and conventions I found in the code.
> I would have preferred to put the code directly into urllib3 but I see the permission problem [Permission levels](https://help.github.com/en/github/setting-up-and-managing-your-github-user-account/permission-levels-for-a-user-account-repository). Either you have a LOT of access or very limited access - even for contributors.
Ah, I see. You need to fork the urllib3 repository in your own namespace, push your changes to a branch, and submit a pull request. It's not easy when you do that for the first time, but I think GitHub has nice documentation to do this.
Please tell me if you want to go that route, it's the best way to learn really fast but can be demotivating too. :)
I will give it a go. Be brave :-). I know enough about git to know I can make a real mess, but that's the way to learn. | 2020-03-18T10:57:10Z | [] | [] |
urllib3/urllib3 | 1,819 | urllib3__urllib3-1819 | [
"1818"
] | eee53a69e1af019da18635d6974f893308db0ada | diff --git a/dummyserver/server.py b/dummyserver/server.py
--- a/dummyserver/server.py
+++ b/dummyserver/server.py
@@ -40,6 +40,20 @@
DEFAULT_CA_KEY = os.path.join(CERTS_PATH, "cacert.key")
+def _resolves_to_ipv6(host):
+ """ Returns True if the system resolves host to an IPv6 address by default. """
+ resolves_to_ipv6 = False
+ try:
+ for res in socket.getaddrinfo(host, None, socket.AF_UNSPEC):
+ af, _, _, _, _ = res
+ if af == socket.AF_INET6:
+ resolves_to_ipv6 = True
+ except socket.gaierror:
+ pass
+
+ return resolves_to_ipv6
+
+
def _has_ipv6(host):
""" Returns True if the system can bind an IPv6 address. """
sock = None
@@ -54,7 +68,7 @@ def _has_ipv6(host):
try:
sock = socket.socket(socket.AF_INET6)
sock.bind((host, 0))
- has_ipv6 = True
+ has_ipv6 = _resolves_to_ipv6("localhost")
except Exception:
pass
| diff --git a/test/with_dummyserver/test_socketlevel.py b/test/with_dummyserver/test_socketlevel.py
--- a/test/with_dummyserver/test_socketlevel.py
+++ b/test/with_dummyserver/test_socketlevel.py
@@ -104,7 +104,8 @@ def socket_handler(listener):
pool.request("GET", "/", retries=0)
except MaxRetryError: # We are violating the protocol
pass
- done_receiving.wait()
+ successful = done_receiving.wait(LONG_TIMEOUT)
+ assert successful, "Timed out waiting for connection accept"
assert (
self.host.encode("ascii") in self.buf
), "missing hostname in SSL handshake"
| TestSNI:test_hostname_in_first_request_packet hangs
My system supports IPv6 and resolves `localhost` to `127.0.0.1`.
When I run the test suite on my system, `TestSNI:test_hostname_in_first_request_packet` hangs.
This also fail about 80 more tests with `MaxRetryError`.
### Expected Result
1. The test should pass.
2. The test should never hang.
### Actual Result
The test uses `_start_server` to create a `SocketServerThread` that binds on an IPv6 address by default (if possible):
```python
70 HAS_IPV6_AND_DNS = _has_ipv6("localhost")
...
90 USE_IPV6 = HAS_IPV6_AND_DNS
...
101 if self.USE_IPV6:
102 sock = socket.socket(socket.AF_INET6)
```
but the client connection might still resolve to IPv4 address and use it:
```python
61 for res in socket.getaddrinfo(host, port, family, socket.SOCK_STREAM):
62 af, socktype, proto, canonname, sa = res
63 sock = None
64 try:
65 sock = socket.socket(af, socktype, proto)
...
103 if HAS_IPV6:
104 family = socket.AF_UNSPEC
```
The test waits until an event is set, which is never, because the server socket never gets accepted.
### Reproduction Steps
```
$ cat noxfile-single.patch
30a31,32
> "-o", "log_cli=true",
> "-k", "test_hostname_in_first_request_packet",
$ patch noxfile.py noxfile-single.patch
patching file noxfile.py
$ nox --sessions test-2.7
nox > Running session test-2.7
nox > Creating virtual environment (virtualenv) using python2.7 in .nox/test-2-7
nox > pip install -r dev-requirements.txt
nox > pip install .[socks,secure,brotli]
nox > pip --version
pip 20.0.2 from /home/user/dev/urllib3/.nox/test-2-7/lib/python2.7/site-packages/pip (python 2.7)
nox > python --version
Python 2.7.17
nox > python -c import struct; print(struct.calcsize('P') * 8)
64
nox > python -m OpenSSL.debug
pyOpenSSL: 19.1.0
cryptography: 2.8
cffi: 1.14.0
cryptography's compiled against OpenSSL: OpenSSL 1.1.1d 10 Sep 2019
cryptography's linked OpenSSL: OpenSSL 1.1.1d 10 Sep 2019
Pythons's OpenSSL: OpenSSL 1.1.1 11 Sep 2018
Python executable: /home/user/dev/urllib3/.nox/test-2-7/bin/python
Python version: 2.7.17 (default, Nov 7 2019, 10:07:09)
[GCC 7.4.0]
Platform: linux2
sys.path: ['', '/usr/lib/python2.7', '/usr/lib/python2.7/plat-x86_64-linux-gnu', '/usr/lib/python2.7/lib-tk', '/usr/lib/python2.7/lib-old', '/usr/lib/python2.7/lib-dynload', '/home/user/dev/urllib3/.nox/test-2-7/lib/python2.7/site-packages']
nox > coverage run --parallel-mode -m pytest -o log_cli=true -k test_hostname_in_first_request_packet -r a --tb=native --no-success-flaky-report test/
==================================== test session starts ====================================
platform linux2 -- Python 2.7.17, pytest-4.6.6, py-1.8.1, pluggy-0.13.1
rootdir: /home/user/dev/urllib3, inifile: setup.cfg
plugins: flaky-3.6.1, timeout-1.3.3
collected 1342 items / 1339 deselected / 3 selected
test/contrib/test_pyopenssl.py::TestSNI::test_hostname_in_first_request_packet
[hangs]
```
### System Information
```
$ git rev-parse --short HEAD
eee53a69
$ cat /etc/hosts
127.0.0.1 localhost
127.0.1.1 ubuntu
# The following lines are desirable for IPv6 capable hosts
::1 ip6-localhost ip6-loopback
fe00::0 ip6-localnet
ff00::0 ip6-mcastprefix
ff02::1 ip6-allnodes
ff02::2 ip6-allrouters
```
### Fix
I can add a timeout to the test, but that's a solution to the root problem.
I suggest that `ServerSocketThread` treat a system where `localhost` does not resolve to `::1` as having insane IPv6 configuration and use the current IPv4 fallback mechanism.
| 2020-03-18T15:55:02Z | [] | [] |
|
urllib3/urllib3 | 1,824 | urllib3__urllib3-1824 | [
"1746"
] | 84073f9a469669dc98aa03ca084c12aded9d6ba8 | diff --git a/src/urllib3/util/retry.py b/src/urllib3/util/retry.py
--- a/src/urllib3/util/retry.py
+++ b/src/urllib3/util/retry.py
@@ -54,8 +54,7 @@ class Retry(object):
Total number of retries to allow. Takes precedence over other counts.
Set to ``None`` to remove this constraint and fall back on other
- counts. It's a good idea to set this to some sensibly-high value to
- account for unexpected edge cases and avoid infinite retry loops.
+ counts.
Set to ``0`` to fail on the first retry.
@@ -96,6 +95,18 @@ class Retry(object):
Set to ``0`` to fail on the first retry of this type.
+ :param int other:
+ How many times to retry on other errors.
+
+ Other errors are errors that are not connect, read, redirect or status errors.
+ These errors might be raised after the request was sent to the server, so the
+ request might have side-effects.
+
+ Set to ``0`` to fail on the first retry of this type.
+
+ If ``total`` is not set, it's a good idea to set this to 0 to account
+ for unexpected edge cases and avoid infinite retry loops.
+
:param iterable method_whitelist:
Set of uppercased HTTP method verbs that we should retry on.
@@ -166,6 +177,7 @@ def __init__(
read=None,
redirect=None,
status=None,
+ other=None,
method_whitelist=DEFAULT_METHOD_WHITELIST,
status_forcelist=None,
backoff_factor=0,
@@ -180,6 +192,7 @@ def __init__(
self.connect = connect
self.read = read
self.status = status
+ self.other = other
if redirect is False or total is False:
redirect = 0
@@ -204,6 +217,7 @@ def new(self, **kw):
read=self.read,
redirect=self.redirect,
status=self.status,
+ other=self.other,
method_whitelist=self.method_whitelist,
status_forcelist=self.status_forcelist,
backoff_factor=self.backoff_factor,
@@ -348,7 +362,14 @@ def is_retry(self, method, status_code, has_retry_after=False):
def is_exhausted(self):
""" Are we out of retries? """
- retry_counts = (self.total, self.connect, self.read, self.redirect, self.status)
+ retry_counts = (
+ self.total,
+ self.connect,
+ self.read,
+ self.redirect,
+ self.status,
+ self.other,
+ )
retry_counts = list(filter(None, retry_counts))
if not retry_counts:
return False
@@ -386,6 +407,7 @@ def increment(
read = self.read
redirect = self.redirect
status_count = self.status
+ other = self.other
cause = "unknown"
status = None
redirect_location = None
@@ -404,6 +426,11 @@ def increment(
elif read is not None:
read -= 1
+ elif error:
+ # Other retry?
+ if other is not None:
+ other -= 1
+
elif response and response.get_redirect_location():
# Redirect retry?
if redirect is not None:
@@ -432,6 +459,7 @@ def increment(
read=read,
redirect=redirect,
status=status_count,
+ other=other,
history=history,
)
| diff --git a/test/test_retry.py b/test/test_retry.py
--- a/test/test_retry.py
+++ b/test/test_retry.py
@@ -13,6 +13,7 @@
MaxRetryError,
ReadTimeoutError,
ResponseError,
+ SSLError,
)
@@ -83,6 +84,7 @@ def test_retry_default(self):
assert retry.connect is None
assert retry.read is None
assert retry.redirect is None
+ assert retry.other is None
error = ConnectTimeoutError()
retry = Retry(connect=1)
@@ -97,6 +99,20 @@ def test_retry_default(self):
assert Retry(0).raise_on_redirect
assert not Retry(False).raise_on_redirect
+ def test_retry_other(self):
+ """ If an unexpected error is raised, should retry other times """
+ other_error = SSLError()
+ retry = Retry(connect=1)
+ retry = retry.increment(error=other_error)
+ retry = retry.increment(error=other_error)
+ assert not retry.is_exhausted()
+
+ retry = Retry(other=1)
+ retry = retry.increment(error=other_error)
+ with pytest.raises(MaxRetryError) as e:
+ retry.increment(error=other_error)
+ assert e.value.reason == other_error
+
def test_retry_read_zero(self):
""" No second chances on read timeouts, by default """
error = ReadTimeoutError(None, "/", "read timed out")
| Add `other` counter to Retry for fully fine-grained retry configuration
```python
from urllib3 import PoolManager
from urllib3.util.retry import Retry
from urllib3.util.timeout import Timeout
def main(**kwargs):
url = 'https://78.155.216.172'
mgr = PoolManager()
retry_opts = {
'total': None,
'redirect': 3,
'raise_on_redirect': True,
'connect': 1,
'read': 1,
}
res = mgr.request(
'GET',
url,
retries=Retry(
**retry_opts,
),
timeout=Timeout(
connect=3,
read=5,
),
)
```
Retry option `connect=1` is ignored. Urllib tries to connect again and again.
Console output:
```
Starting new HTTPS connection (1): 78.155.216.172:443
Incremented Retry for (url='/'): Retry(total=None, connect=1, read=1, redirect=3, status=None)
Retrying (Retry(total=None, connect=1, read=1, redirect=3, status=None)) after connection broken by 'SSLError(SSLError(1, '[SSL: CERTIFICATE_VERIFY_FAILED] certificate verify failed (_ssl.c:645)'),)': /
Starting new HTTPS connection (2): 78.155.216.172:443
Incremented Retry for (url='/'): Retry(total=None, connect=1, read=1, redirect=3, status=None)
Retrying (Retry(total=None, connect=1, read=1, redirect=3, status=None)) after connection broken by 'SSLError(SSLError(1, '[SSL: CERTIFICATE_VERIFY_FAILED] certificate verify failed (_ssl.c:645)'),)': /
Starting new HTTPS connection (3): 78.155.216.172:443
Incremented Retry for (url='/'): Retry(total=None, connect=1, read=1, redirect=3, status=None)
Retrying (Retry(total=None, connect=1, read=1, redirect=3, status=None)) after connection broken by 'SSLError(SSLError(1, '[SSL: CERTIFICATE_VERIFY_FAILED] certificate verify failed (_ssl.c:645)'),)': /
Starting new HTTPS connection (4): 78.155.216.172:443
Incremented Retry for (url='/'): Retry(total=None, connect=1, read=1, redirect=3, status=None)
Retrying (Retry(total=None, connect=1, read=1, redirect=3, status=None)) after connection broken by 'SSLError(SSLError(1, '[SSL: CERTIFICATE_VERIFY_FAILED] certificate verify failed (_ssl.c:645)'),)': /
Starting new HTTPS connection (5): 78.155.216.172:443
Incremented Retry for (url='/'): Retry(total=None, connect=1, read=1, redirect=3, status=None)
Retrying (Retry(total=None, connect=1, read=1, redirect=3, status=None)) after connection broken by 'SSLError(SSLError(1, '[SSL: CERTIFICATE_VERIFY_FAILED] certificate verify failed (_ssl.c:645)'),)': /
Starting new HTTPS connection (6): 78.155.216.172:443
... AND SO ON
```
| The exception raised here is an [`SSLError`](https://urllib3.readthedocs.io/en/latest/reference/index.html#urllib3.exceptions.SSLError), which subclasses `HTTPError`. In other words, it's not considered to be a connection error, and only `total` can be used here to avoid infinite retries.
The [docs about `total`](https://urllib3.readthedocs.io/en/latest/reference/urllib3.util.html#urllib3.util.retry.Retry) say:
> Set to None to remove this constraint and fall back on other counts. It’s a good idea to set this to some sensibly-high value to account for unexpected edge cases and avoid infinite retry loops.
Your `SSLError` is such an "unexpected edge-case", and it's not covered by other counts. So you need to define a "sensibly-high value" here. If you set `total` to `6` (redirect + connect + read + 1), then your other values will take precedence, but urllib3 won't retry more than six times.
Sorry, using timeouts correctly is hard, but this issue is not a bug in urllib3, so I'll close this issue for now. Feel free to provide more information, if there's a bug somewhere we'll be happy to reopen.
@pquentin
Docs say "total (int) – Total number of retries to allow. *Takes precedence over other counts*."
You say "So you need to define a "sensibly-high value" here. If you set total to 6 (redirect + connect + read + 1), then your *other values will take precedence*"
Are not these mutually exclusive statements?
Well, anyway it is a bug in design of retrying mechanism because I can't:
* set total to low value like 0 or zero
* AND set redirect to higher value like 3. In this case total will take precedence (according source code and simple test).
In other words, I do not need sensibly-high value for edge cases, I need zero. But I can't use zero because it will ruin other retry counters.
> Are not these mutually exclusive statements?
No, because "other counts" does not include all possible exceptions, while "total" does.
> Well, anyway it is a bug in design of retrying mechanism because I can't [...]
I guess one option would be to add an "other" count, to make sure that all edge cases are covered. You would then set both `total` and `other` to zero. Would that work for you?
Well, if you mean this `other` counter will work for any error not covered by (connect + read + redirect) then, yes, it would work.
In that case I will set `other` to zero and `total` to `None`
Yes, this is what I mean. And yes, set `total` to `None`, not zero. Okay, let's reopen this now that it's a feature request. :) | 2020-03-19T17:26:48Z | [] | [] |
urllib3/urllib3 | 1,828 | urllib3__urllib3-1828 | [
"1790"
] | 8e22d0fff948fec2a7efa61b15f4fff86a5e6d7f | diff --git a/dummyserver/handlers.py b/dummyserver/handlers.py
--- a/dummyserver/handlers.py
+++ b/dummyserver/handlers.py
@@ -93,7 +93,7 @@ def _call_method(self):
if not path.startswith("/"):
path = urlsplit(path).path
- target = path[1:].replace("/", "_")
+ target = path[1:].split("/", 1)[0]
method = getattr(self, target, self.index)
resp = method(req)
diff --git a/src/urllib3/connectionpool.py b/src/urllib3/connectionpool.py
--- a/src/urllib3/connectionpool.py
+++ b/src/urllib3/connectionpool.py
@@ -65,6 +65,11 @@ class ConnectionPool(object):
"""
Base class for all connection pools, such as
:class:`.HTTPConnectionPool` and :class:`.HTTPSConnectionPool`.
+
+ .. note::
+ ConnectionPool.urlopen() does not normalize or percent-encode target URIs
+ which is useful if your target server doesn't support percent-encoded
+ target URIs.
"""
scheme = None
| diff --git a/test/with_dummyserver/test_connectionpool.py b/test/with_dummyserver/test_connectionpool.py
--- a/test/with_dummyserver/test_connectionpool.py
+++ b/test/with_dummyserver/test_connectionpool.py
@@ -787,6 +787,12 @@ def test_mixed_case_hostname(self):
response = pool.request("GET", "http://LoCaLhOsT:%d/" % self.port)
assert response.status == 200
+ def test_preserves_path_dot_segments(self):
+ """ ConnectionPool preserves dot segments in the URI """
+ with HTTPConnectionPool(self.host, self.port) as pool:
+ response = pool.request("GET", "/echo_uri/seg0/../seg2")
+ assert response.data == b"/echo_uri/seg0/../seg2"
+
class TestRetry(HTTPDummyServerTestCase):
def test_max_retry(self):
| Issue with Parsing URIs - Breaks Security Tools when testing for Path Traversal
Hi all,
I have noticed an odd behaviour in requests module, which uses urllib3. I inspected the root cause via regression testing and I found that the root cause of the issue is a change that was introduced in urlib3
```python
import requests
requests.get("http://127.0.0.1/../../../../doing/certain/check")
```
Which should typically send to the following path: `/../../../doing/certain/check`. Instead, it's requesting `/doing/certain/check` and modifying the specified input.
This breaks a lot of security tools that are performing fuzzing for path traversal vulnerabilities in web-applications.
The only solution for current tools is to revert to `urlib3==1.24.3`.
This may be related: https://github.com/urllib3/urllib3/commit/0aa3e24fcd75f1bb59ab159e9f8adb44055b2271#diff-26a37c674beb46c0ae8f77d76946e054
Can we work on fixing this issue?
| Yeah I agree we should keep the previous behavior. My thoughts are that we should only do path normalization on redirects. Are you able to potentially provide a PR to fix this issue?
Hi @sethmlarson
Thanks for your response.
I have spent more time today morning to find the root cause. The normalization happens at `urlib3/util/url.py`. It was introduced in this commit https://github.com/urllib3/urllib3/commit/5b047b645f5f93900d5e2fc31230848c25eb1f5f#diff-c289049c6f3c9d7397378a8be7e700c6.
I have made a PR that can be found here: https://github.com/urllib3/urllib3/pull/1792
Unfortunately making a specific use-case where to normalize URI when doing a redirect sounds difficult. I'm not familiar with urlib3 codebase, but this PR should be a fix for the base behaviour.
Thanks again!
I'm sorry that urllib3 broke your workflow, and I would also like to apologize for [ruining your weekend](https://twitter.com/mazen160/status/1220738021626208256).
I'm not an Information Security Specialist like you are and am not the lead maintainer, but I'd like to step back for a moment. It seems to me that the fact that we were able to fix this vulnerability for all our users is actually a good thing! And I'm sure you'll appreciate that lowering our security in the name of security is quite ironic.
Are there other ways to check for this vulnerability? Maybe using a lower-level tool like [http.client](https://docs.python.org/3/library/http.client.html) would be more appropriate here?
Hi @pquentin !
Thanks for your response. It was fun debugging for this issue actually :)
This issue does not lower the security of urllib3 by any means. It can be a direct security issue if urllib3 is a server, but a client lib for sending HTTP requests should not be affected by how URLs are being normalized before sending with the `../../` case.
I'm currently writing a wrapper around the function responsible for this test in the framework, but a global fix for this issue would be nicer for everyone:)
Everyone is relying on urllib3 and requests module, as it's the expected behaviour; to send user-supplied requests as supplied, with minimal normalization if it's really needed.
For example, filtering/blocking requests for CRLF user-supplied input makes sense. Blocking schemes that can cause further damage makes sense. Etc... However, normalization `../../` does not protect against an attack vector, and removing this normalization case is does not introduce one.
It would be great if anyone can suggest a possible attack vector in the `../../` case for urllib3.
Thanks for the friendly tone, @mazen160! It's appreciated as it makes a huge difference for maintainers and can only make the discussion more constructive.
I agree that the path traversal vulnerability is a server vulnerability, not a client vulnerability. There are some cases where as an attacker you won't control the client code, but can ultimately control the URL, so if the client normalizes the URL, then you can no longer exploit the vulnerability. It's not very likely, but defense in depth suggests that we should still do this.
Another argument here is that RFC 3986 says that clients should [remove dot segments](https://tools.ietf.org/html/rfc3986#section-6.2.2.3) and even provides a [reference implementation](https://tools.ietf.org/html/rfc3986#section-5.2.4). Chrome and curl do this, and I think Firefox does it too (but am not sure). However curl provides an escape hatch, which we can't do easily as a library. :/
Hi @pquentin! Thanks!
I agree with your approach, if there is a way to have an option like in curl, this will be excellent.
I have also found another project that were discussing the same issue here. They also provided a different solution in requests module.
- https://github.com/trustedsec/cve-2019-19781/blob/master/citrixmash.py#L140
- https://github.com/trustedsec/cve-2019-19781/issues/13
I wrote a wrapper around urlib.request to overcome the issue for these specific requests.
Feel free to choose the best decision to make from your side :)
Oh, setting the URL directly in a prepared request is a great find. It works because it bypasses the parse at the requests level and then at the urllib3 level (since requests uses connection pools directly, not pool managers).
It's equivalent to doing this with urllib3:
```
import urllib3
pool = urllib3.HTTPConnectionPool("localhost", 8000)
r = pool.urlopen("GET", "/../../../../doing/certain/check")
print(r.status)
```
(Can be tested using this server: `python3 -m http.server`.)
Since we have three possible workarounds, maybe we should close this and the associated pull request! @sethmlarson What do you think?
I think we should be good with those alternatives. Can we add a simple test case to ensure we preserve the non-normalizing behavior of using a ConnectionPool?
Might be worth documenting it as an escape hatch as well?
Sounds good to me. @mazen160 Is this something you'd be interested in working on? That is, testing that ConnectionPool does not normalize URLs, and add it to the documentation. I don't know how hard it would be to write the test, but we'd be happy to help | 2020-03-23T13:38:41Z | [] | [] |
urllib3/urllib3 | 1,830 | urllib3__urllib3-1830 | [
"1744"
] | 02867340de0a8c1b15541d9da3531e8576105bea | diff --git a/src/urllib3/exceptions.py b/src/urllib3/exceptions.py
--- a/src/urllib3/exceptions.py
+++ b/src/urllib3/exceptions.py
@@ -45,7 +45,10 @@ class SSLError(HTTPError):
class ProxyError(HTTPError):
"Raised when the connection to a proxy fails."
- pass
+
+ def __init__(self, message, error, *args):
+ super(ProxyError, self).__init__(message, error, *args)
+ self.original_error = error
class DecodeError(HTTPError):
diff --git a/src/urllib3/util/retry.py b/src/urllib3/util/retry.py
--- a/src/urllib3/util/retry.py
+++ b/src/urllib3/util/retry.py
@@ -13,6 +13,7 @@
ReadTimeoutError,
ResponseError,
InvalidHeader,
+ ProxyError,
)
from ..packages import six
@@ -306,6 +307,8 @@ def _is_connection_error(self, err):
""" Errors when we're fairly sure that the server did not receive the
request, so it should be safe to retry.
"""
+ if isinstance(err, ProxyError):
+ err = err.original_error
return isinstance(err, ConnectTimeoutError)
def _is_read_error(self, err):
| diff --git a/test/test_proxymanager.py b/test/test_proxymanager.py
--- a/test/test_proxymanager.py
+++ b/test/test_proxymanager.py
@@ -1,7 +1,14 @@
import pytest
+from .port_helpers import find_unused_port
from urllib3.poolmanager import ProxyManager
from urllib3.util.url import parse_url
+from urllib3.util.retry import Retry
+from urllib3.exceptions import (
+ MaxRetryError,
+ ProxyError,
+ NewConnectionError,
+)
class TestProxyManager(object):
@@ -57,3 +64,17 @@ def test_proxy_tunnel(self):
with ProxyManager("https://proxy:8080") as p:
assert p._proxy_requires_url_absolute_form(http_url)
assert p._proxy_requires_url_absolute_form(https_url)
+
+ def test_proxy_connect_retry(self):
+ retry = Retry(total=None, connect=False)
+ with find_unused_port() as port:
+ with ProxyManager("http://localhost:{}".format(port)) as p:
+ with pytest.raises(ProxyError) as ei:
+ p.urlopen("HEAD", url="http://localhost/", retries=retry)
+ assert isinstance(ei.value.original_error, NewConnectionError)
+
+ retry = Retry(total=None, connect=2)
+ with ProxyManager("http://localhost:{}".format(port)) as p:
+ with pytest.raises(MaxRetryError) as ei:
+ p.urlopen("HEAD", url="http://localhost/", retries=retry)
+ assert isinstance(ei.value.reason.original_error, NewConnectionError)
| PoolManager and ProxyManager process retries differently
Urllib version: 1.25.6
I want to do zero retries and fail on first error. I use `Retry(total=None, connect=False)` which must fail on first error according to documentaion.
Using PoolManager I get NewConnectionError as expected:
```python
from urllib3 import PoolManager
from urllib3.util.retry import Retry
mgr = PoolManager()
mgr.urlopen(
'GET',
url='http://localhost:90/',
retries=Retry(total=None, connect=False),
)
```
Error:
```
Traceback (most recent call last):
File "/tmp/z5/lib/python3.5/site-packages/urllib3/connection.py", line 157, in _new_conn
(self._dns_host, self.port), self.timeout, **extra_kw
File "/tmp/z5/lib/python3.5/site-packages/urllib3/util/connection.py", line 84, in create_connection
raise err
File "/tmp/z5/lib/python3.5/site-packages/urllib3/util/connection.py", line 74, in create_connection
sock.connect(sa)
ConnectionRefusedError: [Errno 111] Connection refused
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "test.py", line 9, in <module>
retries=Retry(total=None, connect=False),
File "/tmp/z5/lib/python3.5/site-packages/urllib3/poolmanager.py", line 330, in urlopen
response = conn.urlopen(method, u.request_uri, **kw)
File "/tmp/z5/lib/python3.5/site-packages/urllib3/connectionpool.py", line 720, in urlopen
method, url, error=e, _pool=self, _stacktrace=sys.exc_info()[2]
File "/tmp/z5/lib/python3.5/site-packages/urllib3/util/retry.py", line 393, in increment
raise six.reraise(type(error), error, _stacktrace)
File "/tmp/z5/lib/python3.5/site-packages/urllib3/packages/six.py", line 735, in reraise
raise value
File "/tmp/z5/lib/python3.5/site-packages/urllib3/connectionpool.py", line 672, in urlopen
chunked=chunked,
File "/tmp/z5/lib/python3.5/site-packages/urllib3/connectionpool.py", line 387, in _make_request
conn.request(method, url, **httplib_request_kw)
File "/usr/lib/python3.5/http/client.py", line 1107, in request
self._send_request(method, url, body, headers)
File "/usr/lib/python3.5/http/client.py", line 1152, in _send_request
self.endheaders(body)
File "/usr/lib/python3.5/http/client.py", line 1103, in endheaders
self._send_output(message_body)
File "/usr/lib/python3.5/http/client.py", line 934, in _send_output
self.send(msg)
File "/usr/lib/python3.5/http/client.py", line 877, in send
self.connect()
File "/tmp/z5/lib/python3.5/site-packages/urllib3/connection.py", line 184, in connect
conn = self._new_conn()
File "/tmp/z5/lib/python3.5/site-packages/urllib3/connection.py", line 169, in _new_conn
self, "Failed to establish a new connection: %s" % e
urllib3.exceptions.NewConnectionError: <urllib3.connection.HTTPConnection object at 0x7f41c4e512e8>: Failed to establish a new connection: [Errno 111] Connection refused
```
Using ProxyManager I get recursion error:
```python
from urllib3 import ProxyManager
from urllib3.util.retry import Retry
mgr = ProxyManager('http://localhost:91')
mgr.urlopen(
'HEAD',
url='http://localhost:90/',
retries=Retry(total=None, connect=False),
)
```
Error:
```
Traceback (most recent call last):
File "test.py", line 20, in <module>
retries=Retry(total=None, connect=False),
File "/tmp/z5/lib/python3.5/site-packages/urllib3/poolmanager.py", line 466, in urlopen
return super(ProxyManager, self).urlopen(method, url, redirect=redirect, **kw)
File "/tmp/z5/lib/python3.5/site-packages/urllib3/poolmanager.py", line 328, in urlopen
response = conn.urlopen(method, url, **kw)
.......... LOT OF SAME LINES ...........
File "/tmp/z5/lib/python3.5/site-packages/urllib3/connectionpool.py", line 762, in urlopen
**response_kw
File "/tmp/z5/lib/python3.5/site-packages/urllib3/connectionpool.py", line 762, in urlopen
**response_kw
File "/tmp/z5/lib/python3.5/site-packages/urllib3/connectionpool.py", line 762, in urlopen
**response_kw
File "/tmp/z5/lib/python3.5/site-packages/urllib3/connectionpool.py", line 762, in urlopen
**response_kw
File "/tmp/z5/lib/python3.5/site-packages/urllib3/connectionpool.py", line 672, in urlopen
chunked=chunked,
File "/tmp/z5/lib/python3.5/site-packages/urllib3/connectionpool.py", line 387, in _make_request
conn.request(method, url, **httplib_request_kw)
File "/usr/lib/python3.5/http/client.py", line 1107, in request
self._send_request(method, url, body, headers)
File "/usr/lib/python3.5/http/client.py", line 1152, in _send_request
self.endheaders(body)
File "/usr/lib/python3.5/http/client.py", line 1103, in endheaders
self._send_output(message_body)
File "/usr/lib/python3.5/http/client.py", line 934, in _send_output
self.send(msg)
File "/usr/lib/python3.5/http/client.py", line 877, in send
self.connect()
File "/tmp/z5/lib/python3.5/site-packages/urllib3/connection.py", line 184, in connect
conn = self._new_conn()
File "/tmp/z5/lib/python3.5/site-packages/urllib3/connection.py", line 157, in _new_conn
(self._dns_host, self.port), self.timeout, **extra_kw
File "/tmp/z5/lib/python3.5/site-packages/urllib3/util/connection.py", line 61, in create_connection
for res in socket.getaddrinfo(host, port, family, socket.SOCK_STREAM):
File "/usr/lib/python3.5/socket.py", line 735, in getaddrinfo
addrlist.append((_intenum_converter(af, AddressFamily),
File "/usr/lib/python3.5/socket.py", line 93, in _intenum_converter
return enum_klass(value)
File "/usr/lib/python3.5/enum.py", line 241, in __call__
return cls.__new__(cls, value)
File "/usr/lib/python3.5/enum.py", line 463, in __new__
if type(value) is cls:
RecursionError: maximum recursion depth exceeded while calling a Python object
```
| Huh, definitely seems like a bug. Will require some more digging!
Maybe it is related to https://github.com/urllib3/urllib3/issues/1746? I have no idea, just linking my issues together :D
No, it's not related :) What's happening it that the NewConnectionError error is wrapped in a `ProxyError` when we're connecting to a proxy:
https://github.com/urllib3/urllib3/blob/37ba61a8b8120cbd866d057eaa3936f4b140dee0/src/urllib3/connectionpool.py#L714-L715
But the retries code expects to see a `ConnectTimeoutError`:
https://github.com/urllib3/urllib3/blob/37ba61a8b8120cbd866d057eaa3936f4b140dee0/src/urllib3/util/retry.py#L305-L309
And indeed `NewConnectionError` is a subclass of `ConnectTimeoutError`:
https://github.com/urllib3/urllib3/blob/37ba61a8b8120cbd866d057eaa3936f4b140dee0/src/urllib3/exceptions.py#L123-L125
----
The fix that comes to mind is to modify the `_is_connection_error` and `_is_read_error` functions to peek inside the `ProxyError` exception. I don't think such a fix would cause more problems?
@pquentin Yep, I think that's a suitable solution! :)
@lorien Is this something that you would like to work on?
@pquentin Yeah, I can do it. | 2020-03-28T21:26:18Z | [] | [] |
urllib3/urllib3 | 1,866 | urllib3__urllib3-1866 | [
"1758"
] | 898a16d09e4a6d9dbe10134a49b89eedfe8dae7f | diff --git a/src/urllib3/connectionpool.py b/src/urllib3/connectionpool.py
--- a/src/urllib3/connectionpool.py
+++ b/src/urllib3/connectionpool.py
@@ -698,9 +698,11 @@ def urlopen(
# Everything went great!
clean_exit = True
- except queue.Empty:
- # Timed out by queue.
- raise EmptyPoolError(self, "No pool connections are available.")
+ except EmptyPoolError:
+ # Didn't get a connection from the pool, no need to clean up
+ clean_exit = True
+ release_this_conn = False
+ raise
except (
TimeoutError,
| diff --git a/test/test_connectionpool.py b/test/test_connectionpool.py
--- a/test/test_connectionpool.py
+++ b/test/test_connectionpool.py
@@ -2,6 +2,7 @@
import ssl
import pytest
+from mock import Mock
from urllib3.connectionpool import (
connection_from_url,
@@ -279,7 +280,6 @@ def _test(exception, expect, reason=None):
# Make sure that all of the exceptions return the connection
# to the pool
- _test(Empty, EmptyPoolError)
_test(BaseSSLError, MaxRetryError, SSLError)
_test(CertificateError, MaxRetryError, SSLError)
@@ -292,6 +292,15 @@ def _test(exception, expect, reason=None):
pool.request("GET", "/", retries=1, pool_timeout=SHORT_TIMEOUT)
assert pool.pool.qsize() == POOL_SIZE
+ def test_empty_does_not_put_conn(self):
+ """Do not put None back in the pool if the pool was empty"""
+
+ with HTTPConnectionPool(host="localhost", maxsize=1, block=True) as pool:
+ pool._get_conn = Mock(side_effect=EmptyPoolError(pool, "Pool is empty"))
+ pool._put_conn = Mock(side_effect=AssertionError("Unexpected _put_conn"))
+ with pytest.raises(EmptyPoolError):
+ pool.request("GET", "/")
+
def test_assert_same_host(self):
with connection_from_url("http://google.com:80") as c:
with pytest.raises(HostChangedError):
| Established connections are replaced by None values
Hello!
When pool is in blocking mode, there is no connections in pool, and getting connection from pool is timed out.
https://github.com/urllib3/urllib3/pull/1759
| 2020-04-26T20:34:00Z | [] | [] |
|
urllib3/urllib3 | 1,872 | urllib3__urllib3-1872 | [
"1871"
] | 46fc29d5108553a58cc14d28e73443e864be76c2 | diff --git a/src/urllib3/exceptions.py b/src/urllib3/exceptions.py
--- a/src/urllib3/exceptions.py
+++ b/src/urllib3/exceptions.py
@@ -153,6 +153,16 @@ def __init__(self, location):
self.location = location
+class URLSchemeUnknown(LocationValueError):
+ "Raised when a URL input has an unsupported scheme."
+
+ def __init__(self, scheme):
+ message = "Not supported URL scheme %s" % scheme
+ super(URLSchemeUnknown, self).__init__(message)
+
+ self.scheme = scheme
+
+
class ResponseError(HTTPError):
"Used as a container for an error reason supplied in a MaxRetryError."
GENERIC_ERROR = "too many error responses"
@@ -236,7 +246,7 @@ class InvalidHeader(HTTPError):
pass
-class ProxySchemeUnknown(AssertionError, ValueError):
+class ProxySchemeUnknown(AssertionError, URLSchemeUnknown):
"ProxyManager does not support the supplied scheme"
# TODO(t-8ch): Stop inheriting from AssertionError in v2.0.
diff --git a/src/urllib3/poolmanager.py b/src/urllib3/poolmanager.py
--- a/src/urllib3/poolmanager.py
+++ b/src/urllib3/poolmanager.py
@@ -14,6 +14,7 @@
MaxRetryError,
ProxySchemeUnknown,
ProxySchemeUnsupported,
+ URLSchemeUnknown,
)
from .packages import six
from .packages.six.moves.urllib.parse import urljoin
@@ -255,7 +256,9 @@ def connection_from_context(self, request_context):
value must be a key in ``key_fn_by_scheme`` instance variable.
"""
scheme = request_context["scheme"].lower()
- pool_key_constructor = self.key_fn_by_scheme[scheme]
+ pool_key_constructor = self.key_fn_by_scheme.get(scheme)
+ if not pool_key_constructor:
+ raise URLSchemeUnknown(scheme)
pool_key = pool_key_constructor(request_context)
return self.connection_from_pool_key(pool_key, request_context=request_context)
| diff --git a/test/with_dummyserver/test_poolmanager.py b/test/with_dummyserver/test_poolmanager.py
--- a/test/with_dummyserver/test_poolmanager.py
+++ b/test/with_dummyserver/test_poolmanager.py
@@ -6,7 +6,7 @@
from dummyserver.testcase import HTTPDummyServerTestCase, IPv6HTTPDummyServerTestCase
from urllib3.poolmanager import PoolManager
from urllib3.connectionpool import port_by_scheme
-from urllib3.exceptions import MaxRetryError
+from urllib3.exceptions import MaxRetryError, URLSchemeUnknown
from urllib3.util.retry import Retry
from test import LONG_TIMEOUT
@@ -223,6 +223,29 @@ def test_redirect_without_preload_releases_connection(self):
assert r._pool.num_connections == 1
assert len(http.pools) == 1
+ def test_unknown_scheme(self):
+ with PoolManager() as http:
+ unknown_scheme = "unknown"
+ unknown_scheme_url = "%s://host" % unknown_scheme
+ with pytest.raises(URLSchemeUnknown) as e:
+ r = http.request("GET", unknown_scheme_url)
+ assert e.value.scheme == unknown_scheme
+ r = http.request(
+ "GET",
+ "%s/redirect" % self.base_url,
+ fields={"target": unknown_scheme_url},
+ redirect=False,
+ )
+ assert r.status == 303
+ assert r.headers.get("Location") == unknown_scheme_url
+ with pytest.raises(URLSchemeUnknown) as e:
+ r = http.request(
+ "GET",
+ "%s/redirect" % self.base_url,
+ fields={"target": unknown_scheme_url},
+ )
+ assert e.value.scheme == unknown_scheme
+
def test_raise_on_redirect(self):
with PoolManager() as http:
r = http.request(
| KeyError can be triggered by a server
I noticed a behavior in urllib3 where a server can trigger an unexpected KeyError exception.
This happens if a server causes a redirect to a protocol that is not http or https (can also be a bogus protocol).
To reproduce you need a server that creates a simple redirect, e.g. with a python CGI something like this:
```
#!/usr/bin/python3
print("location: a://b")
print("")
```
Now do a request with urllib3's poolmanager, e.g.:
```
import urllib3
pool = urllib3.PoolManager()
try:
r = pool.request("GET", 'http://localhost/)
except (ConnectionRefusedError, ConnectionResetError,
urllib3.exceptions.HTTPError):
pass
```
A similar issue has already been reported in issue #872. However it was closed without a fix and the discussion there indicates that this was due to the fact that one expects programmers not to provide invalid input.
This scenario here is different as the bug can be triggered by a server, which is not necessarily under control or trusted by the application. Causing a KeyError is a very unexpected thing for an HTTP library, so I can imagine that there are situations where this can be used to crash applications with an unexpected / unhandled exception.
| 2020-05-05T22:51:36Z | [] | [] |
|
urllib3/urllib3 | 1,888 | urllib3__urllib3-1888 | [
"1516"
] | 86fb90fdafee0077f4b90cbb2b9c3be99d4a1991 | diff --git a/src/urllib3/exceptions.py b/src/urllib3/exceptions.py
--- a/src/urllib3/exceptions.py
+++ b/src/urllib3/exceptions.py
@@ -231,6 +231,23 @@ def __repr__(self):
)
+class InvalidChunkLength(HTTPError, httplib_IncompleteRead):
+ """Invalid chunk length in a chunked response."""
+
+ def __init__(self, response, length):
+ super(InvalidChunkLength, self).__init__(
+ response.tell(), response.length_remaining
+ )
+ self.response = response
+ self.length = length
+
+ def __repr__(self):
+ return "InvalidChunkLength(got length %r, %i bytes read)" % (
+ self.length,
+ self.partial,
+ )
+
+
class InvalidHeader(HTTPError):
"The header provided was somehow invalid."
pass
diff --git a/src/urllib3/response.py b/src/urllib3/response.py
--- a/src/urllib3/response.py
+++ b/src/urllib3/response.py
@@ -19,11 +19,11 @@
ReadTimeoutError,
ResponseNotChunked,
IncompleteRead,
+ InvalidChunkLength,
InvalidHeader,
HTTPError,
)
from .packages.six import string_types as basestring, PY3
-from .packages.six.moves import http_client as httplib
from .connection import HTTPException, BaseSSLError
from .util.response import is_fp_closed, is_response_to_head
@@ -697,7 +697,7 @@ def _update_chunk_length(self):
except ValueError:
# Invalid chunked protocol response, abort.
self.close()
- raise httplib.IncompleteRead(line)
+ raise InvalidChunkLength(self, line)
def _handle_chunk(self, amt):
returned_chunk = None
| diff --git a/test/test_connectionpool.py b/test/test_connectionpool.py
--- a/test/test_connectionpool.py
+++ b/test/test_connectionpool.py
@@ -10,8 +10,9 @@
HTTPConnectionPool,
HTTPSConnectionPool,
)
-from urllib3.response import httplib, HTTPResponse
+from urllib3.response import HTTPResponse
from urllib3.util.timeout import Timeout
+from urllib3.packages.six.moves import http_client as httplib
from urllib3.packages.six.moves.http_client import HTTPException
from urllib3.packages.six.moves.queue import Empty
from urllib3.packages.ssl_match_hostname import CertificateError
diff --git a/test/test_response.py b/test/test_response.py
--- a/test/test_response.py
+++ b/test/test_response.py
@@ -16,6 +16,9 @@
ResponseNotChunked,
ProtocolError,
InvalidHeader,
+ httplib_IncompleteRead,
+ IncompleteRead,
+ InvalidChunkLength,
)
from urllib3.packages.six.moves import http_client as httplib
from urllib3.util.retry import Retry, RequestHistory
@@ -758,9 +761,44 @@ def test_read_not_chunked_response_as_chunks(self):
with pytest.raises(ResponseNotChunked):
next(r)
- def test_invalid_chunks(self):
+ def test_buggy_incomplete_read(self):
+ # Simulate buggy versions of Python (<2.7.4)
+ # See http://bugs.python.org/issue16298
+ content_length = 1337
+ fp = BytesIO(b"")
+ resp = HTTPResponse(
+ fp,
+ headers={"content-length": str(content_length)},
+ preload_content=False,
+ enforce_content_length=True,
+ )
+ with pytest.raises(ProtocolError) as ctx:
+ resp.read(3)
+
+ orig_ex = ctx.value.args[1]
+ assert isinstance(orig_ex, IncompleteRead)
+ assert orig_ex.partial == 0
+ assert orig_ex.expected == content_length
+
+ def test_incomplete_chunk(self):
+ stream = [b"foooo", b"bbbbaaaaar"]
+ fp = MockChunkedIncompleteRead(stream)
+ r = httplib.HTTPResponse(MockSock)
+ r.fp = fp
+ r.chunked = True
+ r.chunk_left = None
+ resp = HTTPResponse(
+ r, preload_content=False, headers={"transfer-encoding": "chunked"}
+ )
+ with pytest.raises(ProtocolError) as ctx:
+ next(resp.read_chunked())
+
+ orig_ex = ctx.value.args[1]
+ assert isinstance(orig_ex, httplib_IncompleteRead)
+
+ def test_invalid_chunk_length(self):
stream = [b"foooo", b"bbbbaaaaar"]
- fp = MockChunkedInvalidEncoding(stream)
+ fp = MockChunkedInvalidChunkLength(stream)
r = httplib.HTTPResponse(MockSock)
r.fp = fp
r.chunked = True
@@ -768,9 +806,13 @@ def test_invalid_chunks(self):
resp = HTTPResponse(
r, preload_content=False, headers={"transfer-encoding": "chunked"}
)
- with pytest.raises(ProtocolError):
+ with pytest.raises(ProtocolError) as ctx:
next(resp.read_chunked())
+ orig_ex = ctx.value.args[1]
+ assert isinstance(orig_ex, InvalidChunkLength)
+ assert orig_ex.length == six.b(fp.BAD_LENGTH_LINE)
+
def test_chunked_response_without_crlf_on_end(self):
stream = [b"foo", b"bar", b"baz"]
fp = MockChunkedEncodingWithoutCRLFOnEnd(stream)
@@ -971,9 +1013,16 @@ def close(self):
self.closed = True
-class MockChunkedInvalidEncoding(MockChunkedEncodingResponse):
+class MockChunkedIncompleteRead(MockChunkedEncodingResponse):
+ def _encode_chunk(self, chunk):
+ return "9999\r\n%s\r\n" % chunk.decode()
+
+
+class MockChunkedInvalidChunkLength(MockChunkedEncodingResponse):
+ BAD_LENGTH_LINE = "ZZZ\r\n"
+
def _encode_chunk(self, chunk):
- return "ZZZ\r\n%s\r\n" % chunk.decode()
+ return "%s%s\r\n" % (self.BAD_LENGTH_LINE, chunk.decode())
class MockChunkedEncodingWithoutCRLFOnEnd(MockChunkedEncodingResponse):
diff --git a/test/with_dummyserver/test_socketlevel.py b/test/with_dummyserver/test_socketlevel.py
--- a/test/with_dummyserver/test_socketlevel.py
+++ b/test/with_dummyserver/test_socketlevel.py
@@ -9,7 +9,7 @@
SSLError,
ProtocolError,
)
-from urllib3.response import httplib
+from urllib3.packages.six.moves import http_client as httplib
from urllib3.util import ssl_wrap_socket
from urllib3.util.ssl_ import HAS_SNI
from urllib3.util import ssl_
| Getting exception in _update_chunk_length
Hi,
While requesting a particular URL, I came across this error
```python
File "/root/shivam/python3_env/lib/python3.5/site-packages/urllib3/response.py", line 601, in _update_chunk_length
self.chunk_left = int(line, 16)
ValueError: invalid literal for int() with base 16: b''
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "/root/shivam/python3_env/lib/python3.5/site-packages/urllib3/response.py", line 360, in _error_catcher
yield
File "/root/shivam/python3_env/lib/python3.5/site-packages/urllib3/response.py", line 666, in read_chunked
self._update_chunk_length()
File "/root/shivam/python3_env/lib/python3.5/site-packages/urllib3/response.py", line 605, in _update_chunk_length
raise httplib.IncompleteRead(line)
http.client.IncompleteRead: IncompleteRead(0 bytes read)
During handling of the above exception, another exception occurred:
aceback (most recent call last):
File "/root/shivam/python3_env/lib/python3.5/site-packages/requests/models.py", line 750, in generate
for chunk in self.raw.stream(chunk_size, decode_content=True):
File "/root/shivam/python3_env/lib/python3.5/site-packages/urllib3/response.py", line 490, in stream
for line in self.read_chunked(amt, decode_content=decode_content):
File "/root/shivam/python3_env/lib/python3.5/site-packages/urllib3/response.py", line 694, in read_chunked
self._original_response.close()
File "/usr/lib/python3.5/contextlib.py", line 77, in __exit__
self.gen.throw(type, value, traceback)
File "/root/shivam/python3_env/lib/python3.5/site-packages/urllib3/response.py", line 378, in _error_catcher
raise ProtocolError('Connection broken: %r' % e, e)
urllib3.exceptions.ProtocolError: ('Connection broken: IncompleteRead(0 bytes read)', IncompleteRead(0 bytes read))
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "parse_dumped_data.py", line 87, in <module>
Parse(f, entity)
File "parse_dumped_data.py", line 17, in __init__
self.parse_records()
File "parse_dumped_data.py", line 32, in parse_records
data_fields = self.get_data(record.get('data'))
File "parse_dumped_data.py", line 50, in get_data
data['image_url'] = self.get_image_url(data.get('image'), _id)
File "parse_dumped_data.py", line 64, in get_image_url
resp = requests.get(url)
File "/root/shivam/python3_env/lib/python3.5/site-packages/requests/api.py", line 75, in get
return request('get', url, params=params, **kwargs)
File "/root/shivam/python3_env/lib/python3.5/site-packages/requests/api.py", line 60, in request
return session.request(method=method, url=url, **kwargs)
File "/root/shivam/python3_env/lib/python3.5/site-packages/requests/sessions.py", line 533, in request
resp = self.send(prep, **send_kwargs)
File "/root/shivam/python3_env/lib/python3.5/site-packages/requests/sessions.py", line 686, in send
r.content
File "/root/shivam/python3_env/lib/python3.5/site-packages/requests/models.py", line 828, in content
self._content = b''.join(self.iter_content(CONTENT_CHUNK_SIZE)) or b''
File "/root/shivam/python3_env/lib/python3.5/site-packages/requests/models.py", line 753, in generate
raise ChunkedEncodingError(e)
requests.exceptions.ChunkedEncodingError: ('Connection broken: IncompleteRead(0 bytes read)', IncompleteRead(0 bytes read))
```
This was reported in requests module [here](https://github.com/requests/requests/issues/4248)
I fixed it as -
```python
def _update_chunk_length(self):
# First, we'll figure out length of a chunk and then
# we'll try to read it from socket.
if self.chunk_left is not None:
return
line = self._fp.fp.readline()
line = line.split(b';', 1)[0]
try:
if len(line) == 0:
self.chunk_left = 0
else:
self.chunk_left = int(line, 16)
except ValueError:
# Invalid chunked protocol response, abort.
self.close()
raise httplib.IncompleteRead(line)
```
or a one liner as
```python
def _update_chunk_length(self):
# First, we'll figure out length of a chunk and then
# we'll try to read it from socket.
if self.chunk_left is not None:
return
line = self._fp.fp.readline()
line = line.split(b';', 1)[0]
line = (len(line)>0 and line or "0") # added this line
try:
self.chunk_left = int(line, 16)
except ValueError:
# Invalid chunked protocol response, abort.
self.close()
raise httplib.IncompleteRead(line)
```
Is it worth giving a PR regarding this??
| Is this URL publicly available? I'd like to see the exact HTTP response.
Yes, I faced this issue while requesting an URL for wikidata entry, not sure what's the exact URL because this happened after ~94000th iteration.
The URL construction was like - https://commons.wikimedia.org/wiki/File:<image_name>.jpg
Example URL - https://commons.wikimedia.org/wiki/File:Belfast_City_Hall_2.jpg
If you could get the exact URL where this happens that'd be great. The URL you've given as an example doesn't use `Transfer-Encoding: chunked` so it shouldn't hit this logic. Below is what I'm seeing when hitting this URL:
```python
>>> import urllib3
>>> p = urllib3.PoolManager()
>>> r = p.request('GET', 'https://commons.wikimedia.org/wiki/File:Belfast_City_Hall_2.jpg', preload_content=False)
>>> [x for x in r.read_chunked()]
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
File "C:\Anaconda3\lib\site-packages\urllib3\response.py", line 647, in read_chunked
"Response is not chunked. "
urllib3.exceptions.ResponseNotChunked: Response is not chunked. Header 'transfer-encoding: chunked' is missing.
```
I'm having this problem as well, but my webpage isn't public and its contents are sensitive. On a request that succeeds (e.g. from Chrome), the response headers look like this:
> HTTP/1.1 200 OK
> Date: Sat, 02 Feb 2019 14:22:35 GMT
> Server: BarracudaServer.com (Posix)
> Content-Type: text/html; charset=utf-8
> Cache-Control: no-store, no-cache, must-revalidate, max-age=0
> Transfer-Encoding: chunked
> Keep-Alive: Keep-Alive
> X-Frame-Options: SAMEORIGIN
> Strict-Transport-Security: max-age=60000; includeSubDomains
but unfortunately I can't see the chunks in Chrome and it's a TLS request so sniffing on the wire is hard.
Not being able to see the chunks, I won't opine on whether this is "really a bug."
Is there a way you can get curl to show the chunks? You can remove all the content we just need the boundaries and how much data is between them. If you've got a reliable reproducer we would love to see it. :)
I stepped through urllib3 during the read. The server was returning a 500. The response headers said it was chunked but it wasn't -- I believe the body was empty.
So in my case this wasn't a bug in urllib3; the server definitely sent a spec-violating response. I'm just going to leave this here to remind other folks that an "incomplete read" may be because the data was never written.
Could this be caught in urllib3 and re-raised with a "The server indicated a chunked response. Did the server send a non-chunked response or no chunks at all?" wrapper exception? ValueError is less than ideal to indicate a protocol error.
In my case and issue 4248 above, I'm going to guess that the body was empty; 4248 had another response on the connection while @shivam05011996 and I did not.
I'd suggest that the wrapper exception contain the HTTP response code since that might help diagnose a broken server.
> I'd suggest that the wrapper exception contain the HTTP response code since that might help diagnose a broken server.
I was thinking about this as well. In the original post, we clearly have a response we could attach. Perhaps `read_chunked` could include `self` in the exception to make it easier?
We ran into a similar issue, it turned out to be an issue on the server side that it cannot properly compress and chunk. We got a workaround that passing `Accept-Encoding: identity` with the request headers. For those who have the same issue and what to bypass the error.
I am able to reproduce the same bug by using this code
import requests
requests.get('https://www.telecreditobcp.com/tlcnp/index.do')
I think this is a bug in the server side, but maybe urllib3 could do a better job workarounding this bug in the server, as other libraries / applications do for example (same URL works ok in a web browser like chrome or firefox ...)
This is traceback I get when it fails:
Traceback (most recent call last):
File "/usr/lib/python3.7/site-packages/urllib3/response.py", line 603, in _update_chunk_length
self.chunk_left = int(line, 16)
ValueError: invalid literal for int() with base 16: b'HTTP/1.1 200 OK\r\n'
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "/usr/lib/python3.7/site-packages/urllib3/response.py", line 362, in _error_catcher
yield
File "/usr/lib/python3.7/site-packages/urllib3/response.py", line 668, in read_chunked
self._update_chunk_length()
File "/usr/lib/python3.7/site-packages/urllib3/response.py", line 607, in _update_chunk_length
raise httplib.IncompleteRead(line)
http.client.IncompleteRead: IncompleteRead(17 bytes read)
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "/usr/lib/python3.7/site-packages/requests/models.py", line 750, in generate
for chunk in self.raw.stream(chunk_size, decode_content=True):
File "/usr/lib/python3.7/site-packages/urllib3/response.py", line 492, in stream
for line in self.read_chunked(amt, decode_content=decode_content):
File "/usr/lib/python3.7/site-packages/urllib3/response.py", line 696, in read_chunked
self._original_response.close()
File "/usr/lib64/python3.7/contextlib.py", line 130, in __exit__
self.gen.throw(type, value, traceback)
File "/usr/lib/python3.7/site-packages/urllib3/response.py", line 380, in _error_catcher
raise ProtocolError('Connection broken: %r' % e, e)
urllib3.exceptions.ProtocolError: ('Connection broken: IncompleteRead(17 bytes read)', IncompleteRead(17 bytes read))
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
File "/usr/lib/python3.7/site-packages/requests/api.py", line 75, in get
return request('get', url, params=params, **kwargs)
File "/usr/lib/python3.7/site-packages/requests/api.py", line 60, in request
return session.request(method=method, url=url, **kwargs)
File "/usr/lib/python3.7/site-packages/requests/sessions.py", line 533, in request
resp = self.send(prep, **send_kwargs)
File "/usr/lib/python3.7/site-packages/requests/sessions.py", line 686, in send
r.content
File "/usr/lib/python3.7/site-packages/requests/models.py", line 828, in content
self._content = b''.join(self.iter_content(CONTENT_CHUNK_SIZE)) or b''
File "/usr/lib/python3.7/site-packages/requests/models.py", line 753, in generate
raise ChunkedEncodingError(e)
requests.exceptions.ChunkedEncodingError: ('Connection broken: IncompleteRead(17 bytes read)', IncompleteRead(17 bytes read))
As far as I could see, the server (in the example above) sends chunked transfer, but it does not send properly the last 0-length chunk and returns the response code. That's where urllib3 raises exception when trying to decode the length of the chunk from the response status code line. Some insights I could find so far:
* the same request works in a browser without complain (I have tried sending same headers than the browser but didn't see a different for the sample code reproducing the bug)
* sometimes the request works perfectly with the sample code for a while, and then stops working again. So as our local side has not changed really, I assume there is some load balancing or something and we are served by another server maybe with possibly a slightly different version of software.
* the following code makes it deliver part of the content but still not the full content:
r = requests.get('https://www.telecreditobcp.com/tlcnp/index.do', stream=True)
for line in r.iter_lines():
print(line)
* the bug in my local environment happens with python 3.6.8, 3.7.3 and 2.7.16 versions in Linux. Have tried same kernel though (5.1.16-300.fc30.x86_64).
* Have tried with Ubuntu and Fedora, both seem to fail same way.
* I have a server in AWS running Linux with python 3.5.2, and it works always, no bugs
* I've been told the bug seems not reproducible in Windows environment with any version.
* I've been told enabling traffic through a VPN makes it work in a Linux environment that used to fail.
* same bug seems to happen with curl
curl -X GET https://www.telecreditobcp.com/tlcnp/index.do
curl: (56) Illegal or missing hexadecimal sequence in chunked-encoding
Some more insights:
- browser like chrome or firefox always work, one difference is they use TLS v1.3, while requests / urllib3 uses TLS v1.2 (I tried to force TLS v1.3 in the python client as well without success)
- found that old versions of kernel (4.x) and openssl (1.0) work, while newest versions of kernel and openssl don't, but I haven't yet
- analyzing wireshark traffic was not very helpful as it is encrypted
- when it fails, it looks like the client is receiving starting from the beginning again and again non-stop (starting on the headers HTTP1/.1 OK ...), instead of receiving the rest of the page ... This can be reproduced with openssl command line directly (so probably should post it there):
openssl s_client -connect www.telecreditobcp.com:443 -servername www.telecreditobcp.com
then send this
GET /tlcnp/index.do HTTP/1.1
Host: www.telecreditobcp.com
Connection: keep-alive
Cache-Control: max-age=0
Upgrade-Insecure-Requests: 1
User-Agent: Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/75.0.3770.100 Safari/537.36
Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3
Accept-Encoding: gzip, deflate, br
Accept-Language: en-US,en;q=0.9,es-419;q=0.8,es;q=0.7
Ok, I confirmed this is a bug in openssl 1.1 versions, working in openssl 1.0
Will fill the bug there, sorry for the noise !
@mbatle Thank you, for digging in deeper on this issue, if not required, please go ahead and close this issue.
@mbatle I guess I should be watching https://github.com/openssl/openssl/issues/9360 ?
i got exactly the same bug in a HTTP request, so i think it's not about openssl
This exception is raised when urllib3 is expecting a next chunk from the
server but the server doesn't provide a validly encoded chunk and instead
provides something else.
AFAIK the cause of this is never that urllib3 is *erroneously* expecting a
next chunk, or that its chunk parsing is buggy. (If such a cause does
exist, I haven't encountered it on any of the related issues.)
Instead there are various other causes: an openssl bug, a bug in the server
itself, etc. Using a proxy (such as a VPN but any proxy would do) often
causes the bug to go away, since the proxy re-encodes the chunks.
There is a bug in urllib3 though: this exception is very unhelpfully named,
and also it's arguable that server protocol errors shouldn't raise an
exception in the client code but instead signal some other type of error.
On Wed, Feb 19, 2020 at 11:34 PM Zenk Ju <[email protected]> wrote:
> i got exactly the same bug in a HTTP request, so i think it's not about
> openssl
>
> —
> You are receiving this because you are subscribed to this thread.
> Reply to this email directly, view it on GitHub
> <https://github.com/urllib3/urllib3/issues/1516?email_source=notifications&email_token=AAVWYJJF5IW2IP26XZB5UTLRDYXABA5CNFSM4GO5HSN2YY3PNVWWK3TUL52HS4DFVREXG43VMVBW63LNMVXHJKTDN5WW2ZLOORPWSZGOEMLJCWY#issuecomment-588681563>,
> or unsubscribe
> <https://github.com/notifications/unsubscribe-auth/AAVWYJIVUDZGWAIWGZXG7BDRDYXABANCNFSM4GO5HSNQ>
> .
>
I get the same exception here with headers [Transfer-Encoding] : chunked.
on a debian 10
request.__version = 2.21.0
urllib.__version = 1.24.1 ((from dist-package)
i removed the dist-package and install the site-package : 1.25.9 -> same exception
finally i add a line as tell in the #4248
I cannot give you the Url it is a private server.
I can just tell that is on the cometd interface of the LogitechMediaServer
I ran into the same issue with yet another private service.
In my case the issue is that the server sends "Transfer-Encoding: chunked" in a 204 NO CONTENT response to a PUT request. The server then follows RFC7230 section 3.3.3 point 1, and does not send any message body - in particular it does not send even the chunk length, which urllib3 expects to receive. The RFC seems to be somewhat ambiguous, and the urllib3 behaviour is understandable in view of section 3.3.3 point 3.
curl used to have the same issue, but it was fixed in this commit: [http: don't parse body-related headers bodyless responses](https://github.com/curl/curl/commit/2e5ceb3934a7bc5422c5a3a18daafa1b1af02090)
Should urllib3 follow the same logic as curl here, and ignore the header fields Content-Encoding, Content-Length, Content-Range, Last-Modified and Transfer-Encoding whenever the response is supposed to be bodyless? | 2020-06-09T17:08:56Z | [] | [] |
urllib3/urllib3 | 1,894 | urllib3__urllib3-1894 | [
"1892"
] | a5a45dc36f3821e97bb9d6f2b4cd438a3f518af3 | diff --git a/dummyserver/handlers.py b/dummyserver/handlers.py
--- a/dummyserver/handlers.py
+++ b/dummyserver/handlers.py
@@ -116,6 +116,11 @@ def certificate(self, request):
subject = dict((k, v) for (k, v) in [y for z in cert["subject"] for y in z])
return Response(json.dumps(subject))
+ def alpn_protocol(self, request):
+ """Return the selected ALPN protocol."""
+ proto = request.connection.stream.socket.selected_alpn_protocol()
+ return Response(proto.encode("utf8") if proto is not None else u"")
+
def source_address(self, request):
"""Return the requester's IP address."""
return Response(request.remote_ip)
diff --git a/dummyserver/server.py b/dummyserver/server.py
--- a/dummyserver/server.py
+++ b/dummyserver/server.py
@@ -15,6 +15,7 @@
from datetime import datetime
from urllib3.exceptions import HTTPWarning
+from urllib3.util import resolve_cert_reqs, resolve_ssl_version, ALPN_PROTOCOLS
from cryptography.hazmat.backends import default_backend
from cryptography.hazmat.primitives import serialization
@@ -33,6 +34,7 @@
"keyfile": os.path.join(CERTS_PATH, "server.key"),
"cert_reqs": ssl.CERT_OPTIONAL,
"ca_certs": os.path.join(CERTS_PATH, "cacert.pem"),
+ "alpn_protocols": ALPN_PROTOCOLS,
}
DEFAULT_CA = os.path.join(CERTS_PATH, "cacert.pem")
DEFAULT_CA_KEY = os.path.join(CERTS_PATH, "cacert.key")
@@ -133,6 +135,39 @@ def run(self):
self.server = self._start_server()
+def ssl_options_to_context(
+ keyfile=None,
+ certfile=None,
+ server_side=None,
+ cert_reqs=None,
+ ssl_version=None,
+ ca_certs=None,
+ do_handshake_on_connect=None,
+ suppress_ragged_eofs=None,
+ ciphers=None,
+ alpn_protocols=None,
+):
+ """Return an equivalent SSLContext based on ssl.wrap_socket args."""
+ ssl_version = resolve_ssl_version(ssl_version)
+ cert_none = resolve_cert_reqs("CERT_NONE")
+ if cert_reqs is None:
+ cert_reqs = cert_none
+ else:
+ cert_reqs = resolve_cert_reqs(cert_reqs)
+
+ ctx = ssl.SSLContext(ssl_version)
+ ctx.load_cert_chain(certfile, keyfile)
+ ctx.verify_mode = cert_reqs
+ if ctx.verify_mode != cert_none:
+ ctx.load_verify_locations(cafile=ca_certs)
+ if alpn_protocols and hasattr(ctx, "set_alpn_protocols"):
+ try:
+ ctx.set_alpn_protocols(alpn_protocols)
+ except NotImplementedError:
+ pass
+ return ctx
+
+
def run_tornado_app(app, io_loop, certs, scheme, host):
assert io_loop == tornado.ioloop.IOLoop.current()
@@ -141,7 +176,11 @@ def run_tornado_app(app, io_loop, certs, scheme, host):
app.last_req = datetime(1970, 1, 1)
if scheme == "https":
- http_server = tornado.httpserver.HTTPServer(app, ssl_options=certs)
+ if sys.version_info < (2, 7, 9):
+ ssl_opts = certs
+ else:
+ ssl_opts = ssl_options_to_context(**certs)
+ http_server = tornado.httpserver.HTTPServer(app, ssl_options=ssl_opts)
else:
http_server = tornado.httpserver.HTTPServer(app)
diff --git a/src/urllib3/contrib/_securetransport/bindings.py b/src/urllib3/contrib/_securetransport/bindings.py
--- a/src/urllib3/contrib/_securetransport/bindings.py
+++ b/src/urllib3/contrib/_securetransport/bindings.py
@@ -276,6 +276,13 @@
Security.SSLSetProtocolVersionMax.argtypes = [SSLContextRef, SSLProtocol]
Security.SSLSetProtocolVersionMax.restype = OSStatus
+ try:
+ Security.SSLSetALPNProtocols.argtypes = [SSLContextRef, CFArrayRef]
+ Security.SSLSetALPNProtocols.restype = OSStatus
+ except AttributeError:
+ # Supported only in 10.12+
+ pass
+
Security.SecCopyErrorMessageString.argtypes = [OSStatus, c_void_p]
Security.SecCopyErrorMessageString.restype = CFStringRef
diff --git a/src/urllib3/contrib/_securetransport/low_level.py b/src/urllib3/contrib/_securetransport/low_level.py
--- a/src/urllib3/contrib/_securetransport/low_level.py
+++ b/src/urllib3/contrib/_securetransport/low_level.py
@@ -56,6 +56,49 @@ def _cf_dictionary_from_tuples(tuples):
)
+def _cfstr(py_bstr):
+ """
+ Given a Python binary data, create a CFString.
+ The string must be CFReleased by the caller.
+ """
+ c_str = ctypes.c_char_p(py_bstr)
+ cf_str = CoreFoundation.CFStringCreateWithCString(
+ CoreFoundation.kCFAllocatorDefault, c_str, CFConst.kCFStringEncodingUTF8,
+ )
+ return cf_str
+
+
+def _create_cfstring_array(lst):
+ """
+ Given a list of Python binary data, create an associated CFMutableArray.
+ The array must be CFReleased by the caller.
+
+ Raises an ssl.SSLError on failure.
+ """
+ cf_arr = None
+ try:
+ cf_arr = CoreFoundation.CFArrayCreateMutable(
+ CoreFoundation.kCFAllocatorDefault,
+ 0,
+ ctypes.byref(CoreFoundation.kCFTypeArrayCallBacks),
+ )
+ if not cf_arr:
+ raise MemoryError("Unable to allocate memory!")
+ for item in lst:
+ cf_str = _cfstr(item)
+ if not cf_str:
+ raise MemoryError("Unable to allocate memory!")
+ try:
+ CoreFoundation.CFArrayAppendValue(cf_arr, cf_str)
+ finally:
+ CoreFoundation.CFRelease(cf_str)
+ except BaseException as e:
+ if cf_arr:
+ CoreFoundation.CFRelease(cf_arr)
+ raise ssl.SSLError("Unable to allocate array: %s" % (e,))
+ return cf_arr
+
+
def _cf_string_to_unicode(value):
"""
Creates a Unicode string from a CFString object. Used entirely for error
diff --git a/src/urllib3/contrib/pyopenssl.py b/src/urllib3/contrib/pyopenssl.py
--- a/src/urllib3/contrib/pyopenssl.py
+++ b/src/urllib3/contrib/pyopenssl.py
@@ -465,6 +465,10 @@ def load_cert_chain(self, certfile, keyfile=None, password=None):
self._ctx.set_passwd_cb(lambda *_: password)
self._ctx.use_privatekey_file(keyfile or certfile)
+ def set_alpn_protocols(self, protocols):
+ protocols = [six.ensure_binary(p) for p in protocols]
+ return self._ctx.set_alpn_protos(protocols)
+
def wrap_socket(
self,
sock,
diff --git a/src/urllib3/contrib/securetransport.py b/src/urllib3/contrib/securetransport.py
--- a/src/urllib3/contrib/securetransport.py
+++ b/src/urllib3/contrib/securetransport.py
@@ -56,6 +56,7 @@
import errno
import os.path
import shutil
+import six
import socket
import ssl
import threading
@@ -68,6 +69,7 @@
_cert_array_from_pem,
_temporary_keychain,
_load_client_cert_chain,
+ _create_cfstring_array,
)
try: # Platform-specific: Python 2
@@ -374,6 +376,19 @@ def _set_ciphers(self):
)
_assert_no_error(result)
+ def _set_alpn_protocols(self, protocols):
+ """
+ Sets up the ALPN protocols on the context.
+ """
+ if not protocols:
+ return
+ protocols_arr = _create_cfstring_array(protocols)
+ try:
+ result = Security.SSLSetALPNProtocols(self.context, protocols_arr)
+ _assert_no_error(result)
+ finally:
+ CoreFoundation.CFRelease(protocols_arr)
+
def _custom_validate(self, verify, trust_bundle):
"""
Called when we have set custom validation. We do this in two cases:
@@ -441,6 +456,7 @@ def handshake(
client_cert,
client_key,
client_key_passphrase,
+ alpn_protocols,
):
"""
Actually performs the TLS handshake. This is run automatically by
@@ -481,6 +497,9 @@ def handshake(
# Setup the ciphers.
self._set_ciphers()
+ # Setup the ALPN protocols.
+ self._set_alpn_protocols(alpn_protocols)
+
# Set the minimum and maximum TLS versions.
result = Security.SSLSetProtocolVersionMin(self.context, min_version)
_assert_no_error(result)
@@ -754,6 +773,7 @@ def __init__(self, protocol):
self._client_cert = None
self._client_key = None
self._client_key_passphrase = None
+ self._alpn_protocols = None
@property
def check_hostname(self):
@@ -831,6 +851,18 @@ def load_cert_chain(self, certfile, keyfile=None, password=None):
self._client_key = keyfile
self._client_cert_passphrase = password
+ def set_alpn_protocols(self, protocols):
+ """
+ Sets the ALPN protocols that will later be set on the context.
+
+ Raises a NotImplementedError if ALPN is not supported.
+ """
+ if not hasattr(Security, "SSLSetALPNProtocols"):
+ raise NotImplementedError(
+ "SecureTransport supports ALPN only in macOS 10.12+"
+ )
+ self._alpn_protocols = [six.ensure_binary(p) for p in protocols]
+
def wrap_socket(
self,
sock,
@@ -860,5 +892,6 @@ def wrap_socket(
self._client_cert,
self._client_key,
self._client_key_passphrase,
+ self._alpn_protocols,
)
return wrapped_socket
diff --git a/src/urllib3/util/__init__.py b/src/urllib3/util/__init__.py
--- a/src/urllib3/util/__init__.py
+++ b/src/urllib3/util/__init__.py
@@ -14,6 +14,7 @@
resolve_ssl_version,
ssl_wrap_socket,
PROTOCOL_TLS,
+ ALPN_PROTOCOLS,
)
from .timeout import current_time, Timeout
@@ -27,6 +28,7 @@
"IS_SECURETRANSPORT",
"SSLContext",
"PROTOCOL_TLS",
+ "ALPN_PROTOCOLS",
"Retry",
"Timeout",
"Url",
diff --git a/src/urllib3/util/ssl_.py b/src/urllib3/util/ssl_.py
--- a/src/urllib3/util/ssl_.py
+++ b/src/urllib3/util/ssl_.py
@@ -17,6 +17,7 @@
HAS_SNI = False
IS_PYOPENSSL = False
IS_SECURETRANSPORT = False
+ALPN_PROTOCOLS = ["http/1.1"]
# Maps the length of a digest to a possible hash function producing this digest
HASHFUNC_MAP = {32: md5, 40: sha1, 64: sha256}
@@ -373,6 +374,12 @@ def ssl_wrap_socket(
else:
context.load_cert_chain(certfile, keyfile, key_password)
+ try:
+ if hasattr(context, "set_alpn_protocols"):
+ context.set_alpn_protocols(ALPN_PROTOCOLS)
+ except NotImplementedError:
+ pass
+
# If we detect server_hostname is an IP address then the SNI
# extension should not be used according to RFC3546 Section 3.1
# We shouldn't warn the user if SNI isn't available but we would
| diff --git a/test/__init__.py b/test/__init__.py
--- a/test/__init__.py
+++ b/test/__init__.py
@@ -17,6 +17,7 @@
from urllib3.exceptions import HTTPWarning
from urllib3.packages import six
from urllib3.util import ssl_
+from urllib3 import util
# We need a host that will not immediately close the connection with a TCP
# Reset.
@@ -56,6 +57,19 @@ def _can_resolve(host):
return False
+def has_alpn(ctx_cls=None):
+ """ Detect if ALPN support is enabled. """
+ ctx_cls = ctx_cls or util.SSLContext
+ ctx = ctx_cls(protocol=ssl_.PROTOCOL_TLS)
+ try:
+ if hasattr(ctx, "set_alpn_protocols"):
+ ctx.set_alpn_protocols(ssl_.ALPN_PROTOCOLS)
+ return True
+ except NotImplementedError:
+ pass
+ return False
+
+
# Some systems might not resolve "localhost." correctly.
# See https://github.com/urllib3/urllib3/issues/1809 and
# https://github.com/urllib3/urllib3/pull/1475#issuecomment-440788064.
diff --git a/test/with_dummyserver/test_https.py b/test/with_dummyserver/test_https.py
--- a/test/with_dummyserver/test_https.py
+++ b/test/with_dummyserver/test_https.py
@@ -49,6 +49,7 @@
from urllib3.packages import six
from urllib3.util.timeout import Timeout
import urllib3.util as util
+from .. import has_alpn
# Retry failed tests
pytestmark = pytest.mark.flaky
@@ -717,6 +718,15 @@ def test_sslkeylogfile(self, tmpdir, monkeypatch):
% str(keylog_file)
)
+ def test_alpn_default(self):
+ """Default ALPN protocols are sent by default."""
+ if not has_alpn() or not has_alpn(ssl.SSLContext):
+ pytest.skip("ALPN-support not available")
+ with HTTPSConnectionPool(self.host, self.port, ca_certs=DEFAULT_CA) as pool:
+ r = pool.request("GET", "/alpn_protocol", retries=0)
+ assert r.status == 200
+ assert r.data.decode("utf-8") == util.ALPN_PROTOCOLS[0]
+
@requiresTLSv1()
class TestHTTPS_TLSv1(TestHTTPS):
diff --git a/test/with_dummyserver/test_socketlevel.py b/test/with_dummyserver/test_socketlevel.py
--- a/test/with_dummyserver/test_socketlevel.py
+++ b/test/with_dummyserver/test_socketlevel.py
@@ -26,7 +26,7 @@
encrypt_key_pem,
)
-from .. import onlyPy3, LogRecorder
+from .. import onlyPy3, LogRecorder, has_alpn
try:
from mimetools import Message as MimeToolMessage
@@ -102,7 +102,7 @@ def socket_handler(listener):
sock.close()
self._start_server(socket_handler)
- with HTTPConnectionPool(self.host, self.port) as pool:
+ with HTTPSConnectionPool(self.host, self.port) as pool:
try:
pool.request("GET", "/", retries=0)
except MaxRetryError: # We are violating the protocol
@@ -114,6 +114,35 @@ def socket_handler(listener):
), "missing hostname in SSL handshake"
+class TestALPN(SocketDummyServerTestCase):
+ def test_alpn_protocol_in_first_request_packet(self):
+ if not has_alpn():
+ pytest.skip("ALPN-support not available")
+
+ done_receiving = Event()
+ self.buf = b""
+
+ def socket_handler(listener):
+ sock = listener.accept()[0]
+
+ self.buf = sock.recv(65536) # We only accept one packet
+ done_receiving.set() # let the test know it can proceed
+ sock.close()
+
+ self._start_server(socket_handler)
+ with HTTPSConnectionPool(self.host, self.port) as pool:
+ try:
+ pool.request("GET", "/", retries=0)
+ except MaxRetryError: # We are violating the protocol
+ pass
+ successful = done_receiving.wait(LONG_TIMEOUT)
+ assert successful, "Timed out waiting for connection accept"
+ for protocol in util.ALPN_PROTOCOLS:
+ assert (
+ protocol.encode("ascii") in self.buf
+ ), "missing ALPN protocol in SSL handshake"
+
+
class TestClientCerts(SocketDummyServerTestCase):
"""
Tests for client certificate support.
| urllib3 should send http/1.1 ALPN extension by default
Some servers start to require ALPN extension and shut down HTTPS connections forecefully when a client does not indicate HTTP protocol version in ALPN extension. The fix is a trivial one-liner. Daniel told me that curl has been setting ALPN header for years without any negative side-effects.
See https://bugs.python.org/issue40968 for CPython bug
See https://github.com/python/cpython/pull/20959 for CPython PR
| Thanks for opening this! I agree we should implement this. | 2020-06-25T15:44:30Z | [] | [] |
urllib3/urllib3 | 1,903 | urllib3__urllib3-1903 | [
"1902"
] | a5a45dc36f3821e97bb9d6f2b4cd438a3f518af3 | diff --git a/src/urllib3/util/ssl_.py b/src/urllib3/util/ssl_.py
--- a/src/urllib3/util/ssl_.py
+++ b/src/urllib3/util/ssl_.py
@@ -375,14 +375,13 @@ def ssl_wrap_socket(
# If we detect server_hostname is an IP address then the SNI
# extension should not be used according to RFC3546 Section 3.1
- # We shouldn't warn the user if SNI isn't available but we would
- # not be using SNI anyways due to IP address for server_hostname.
- if (
- server_hostname is not None and not is_ipaddress(server_hostname)
- ) or IS_SECURETRANSPORT:
- if HAS_SNI and server_hostname is not None:
- return context.wrap_socket(sock, server_hostname=server_hostname)
-
+ use_sni_hostname = server_hostname and not is_ipaddress(server_hostname)
+ # SecureTransport uses server_hostname in certificate verification.
+ send_sni = (use_sni_hostname and HAS_SNI) or (
+ IS_SECURETRANSPORT and server_hostname
+ )
+ # Do not warn the user if server_hostname is an invalid SNI hostname.
+ if not HAS_SNI and use_sni_hostname:
warnings.warn(
"An HTTPS request has been made, but the SNI (Server Name "
"Indication) extension to TLS is not available on this platform. "
@@ -394,7 +393,11 @@ def ssl_wrap_socket(
SNIMissingWarning,
)
- return context.wrap_socket(sock)
+ if send_sni:
+ ssl_sock = context.wrap_socket(sock, server_hostname=server_hostname)
+ else:
+ ssl_sock = context.wrap_socket(sock)
+ return ssl_sock
def is_ipaddress(hostname):
| diff --git a/test/contrib/test_pyopenssl.py b/test/contrib/test_pyopenssl.py
--- a/test/contrib/test_pyopenssl.py
+++ b/test/contrib/test_pyopenssl.py
@@ -30,6 +30,7 @@ def teardown_module():
pass
+from ..test_util import TestUtilSSL # noqa: E402, F401
from ..with_dummyserver.test_https import ( # noqa: E402, F401
TestHTTPS,
TestHTTPS_TLSv1,
diff --git a/test/contrib/test_securetransport.py b/test/contrib/test_securetransport.py
--- a/test/contrib/test_securetransport.py
+++ b/test/contrib/test_securetransport.py
@@ -29,6 +29,8 @@ def teardown_module():
pass
+from ..test_util import TestUtilSSL # noqa: E402, F401
+
# SecureTransport does not support TLSv1.3
# https://github.com/urllib3/urllib3/issues/1674
from ..with_dummyserver.test_https import ( # noqa: E402, F401
diff --git a/test/test_util.py b/test/test_util.py
--- a/test/test_util.py
+++ b/test/test_util.py
@@ -10,7 +10,7 @@
from mock import patch, Mock
import pytest
-from urllib3 import add_stderr_logger, disable_warnings
+from urllib3 import add_stderr_logger, disable_warnings, util
from urllib3.util.request import make_headers, rewind_body, _FAILEDTELL
from urllib3.util.response import assert_header_parsing
from urllib3.util.timeout import Timeout
@@ -29,7 +29,7 @@
UnrewindableBodyError,
)
from urllib3.util.connection import allowed_gai_family, _has_ipv6
-from urllib3.util import is_fp_closed, ssl_
+from urllib3.util import is_fp_closed
from urllib3.packages import six
from . import clear_warnings
@@ -666,31 +666,6 @@ def test_timeout_elapsed(self, current_time):
current_time.return_value = TIMEOUT_EPOCH + 37
assert timeout.get_connect_duration() == 37
- @pytest.mark.parametrize(
- "candidate, requirements",
- [
- (None, ssl.CERT_REQUIRED),
- (ssl.CERT_NONE, ssl.CERT_NONE),
- (ssl.CERT_REQUIRED, ssl.CERT_REQUIRED),
- ("REQUIRED", ssl.CERT_REQUIRED),
- ("CERT_REQUIRED", ssl.CERT_REQUIRED),
- ],
- )
- def test_resolve_cert_reqs(self, candidate, requirements):
- assert resolve_cert_reqs(candidate) == requirements
-
- @pytest.mark.parametrize(
- "candidate, version",
- [
- (ssl.PROTOCOL_TLSv1, ssl.PROTOCOL_TLSv1),
- ("PROTOCOL_TLSv1", ssl.PROTOCOL_TLSv1),
- ("TLSv1", ssl.PROTOCOL_TLSv1),
- (ssl.PROTOCOL_SSLv23, ssl.PROTOCOL_SSLv23),
- ],
- )
- def test_resolve_ssl_version(self, candidate, version):
- assert resolve_ssl_version(candidate) == version
-
def test_is_fp_closed_object_supports_closed(self):
class ClosedFile(object):
@property
@@ -722,72 +697,6 @@ class NotReallyAFile(object):
with pytest.raises(ValueError):
is_fp_closed(NotReallyAFile())
- def test_ssl_wrap_socket_loads_the_cert_chain(self):
- socket = object()
- mock_context = Mock()
- ssl_wrap_socket(
- ssl_context=mock_context, sock=socket, certfile="/path/to/certfile"
- )
-
- mock_context.load_cert_chain.assert_called_once_with("/path/to/certfile", None)
-
- @patch("urllib3.util.ssl_.create_urllib3_context")
- def test_ssl_wrap_socket_creates_new_context(self, create_urllib3_context):
- socket = object()
- ssl_wrap_socket(sock=socket, cert_reqs="CERT_REQUIRED")
-
- create_urllib3_context.assert_called_once_with(
- None, "CERT_REQUIRED", ciphers=None
- )
-
- def test_ssl_wrap_socket_loads_verify_locations(self):
- socket = object()
- mock_context = Mock()
- ssl_wrap_socket(ssl_context=mock_context, ca_certs="/path/to/pem", sock=socket)
- mock_context.load_verify_locations.assert_called_once_with(
- "/path/to/pem", None, None
- )
-
- def test_ssl_wrap_socket_loads_certificate_directories(self):
- socket = object()
- mock_context = Mock()
- ssl_wrap_socket(
- ssl_context=mock_context, ca_cert_dir="/path/to/pems", sock=socket
- )
- mock_context.load_verify_locations.assert_called_once_with(
- None, "/path/to/pems", None
- )
-
- def test_ssl_wrap_socket_loads_certificate_data(self):
- socket = object()
- mock_context = Mock()
- ssl_wrap_socket(
- ssl_context=mock_context, ca_cert_data="TOTALLY PEM DATA", sock=socket
- )
- mock_context.load_verify_locations.assert_called_once_with(
- None, None, "TOTALLY PEM DATA"
- )
-
- def test_ssl_wrap_socket_with_no_sni_warns(self):
- socket = object()
- mock_context = Mock()
- # Ugly preservation of original value
- HAS_SNI = ssl_.HAS_SNI
- ssl_.HAS_SNI = False
- try:
- with patch("warnings.warn") as warn:
- ssl_wrap_socket(
- ssl_context=mock_context,
- sock=socket,
- server_hostname="www.google.com",
- )
- mock_context.wrap_socket.assert_called_once_with(socket)
- assert warn.call_count >= 1
- warnings = [call[0][1] for call in warn.call_args_list]
- assert SNIMissingWarning in warnings
- finally:
- ssl_.HAS_SNI = HAS_SNI
-
def test_const_compare_digest_fallback(self):
target = hashlib.sha256(b"abcdef").digest()
assert _const_compare_digest_backport(target, target)
@@ -838,3 +747,118 @@ def test_ip_family_ipv6_disabled(self):
def test_assert_header_parsing_throws_typeerror_with_non_headers(self, headers):
with pytest.raises(TypeError):
assert_header_parsing(headers)
+
+
+class TestUtilSSL(object):
+ """Test utils that use an SSL backend."""
+
+ @pytest.mark.parametrize(
+ "candidate, requirements",
+ [
+ (None, ssl.CERT_REQUIRED),
+ (ssl.CERT_NONE, ssl.CERT_NONE),
+ (ssl.CERT_REQUIRED, ssl.CERT_REQUIRED),
+ ("REQUIRED", ssl.CERT_REQUIRED),
+ ("CERT_REQUIRED", ssl.CERT_REQUIRED),
+ ],
+ )
+ def test_resolve_cert_reqs(self, candidate, requirements):
+ assert resolve_cert_reqs(candidate) == requirements
+
+ @pytest.mark.parametrize(
+ "candidate, version",
+ [
+ (ssl.PROTOCOL_TLSv1, ssl.PROTOCOL_TLSv1),
+ ("PROTOCOL_TLSv1", ssl.PROTOCOL_TLSv1),
+ ("TLSv1", ssl.PROTOCOL_TLSv1),
+ (ssl.PROTOCOL_SSLv23, ssl.PROTOCOL_SSLv23),
+ ],
+ )
+ def test_resolve_ssl_version(self, candidate, version):
+ assert resolve_ssl_version(candidate) == version
+
+ def test_ssl_wrap_socket_loads_the_cert_chain(self):
+ socket = object()
+ mock_context = Mock()
+ ssl_wrap_socket(
+ ssl_context=mock_context, sock=socket, certfile="/path/to/certfile"
+ )
+
+ mock_context.load_cert_chain.assert_called_once_with("/path/to/certfile", None)
+
+ @patch("urllib3.util.ssl_.create_urllib3_context")
+ def test_ssl_wrap_socket_creates_new_context(self, create_urllib3_context):
+ socket = object()
+ ssl_wrap_socket(sock=socket, cert_reqs="CERT_REQUIRED")
+
+ create_urllib3_context.assert_called_once_with(
+ None, "CERT_REQUIRED", ciphers=None
+ )
+
+ def test_ssl_wrap_socket_loads_verify_locations(self):
+ socket = object()
+ mock_context = Mock()
+ ssl_wrap_socket(ssl_context=mock_context, ca_certs="/path/to/pem", sock=socket)
+ mock_context.load_verify_locations.assert_called_once_with(
+ "/path/to/pem", None, None
+ )
+
+ def test_ssl_wrap_socket_loads_certificate_directories(self):
+ socket = object()
+ mock_context = Mock()
+ ssl_wrap_socket(
+ ssl_context=mock_context, ca_cert_dir="/path/to/pems", sock=socket
+ )
+ mock_context.load_verify_locations.assert_called_once_with(
+ None, "/path/to/pems", None
+ )
+
+ def test_ssl_wrap_socket_loads_certificate_data(self):
+ socket = object()
+ mock_context = Mock()
+ ssl_wrap_socket(
+ ssl_context=mock_context, ca_cert_data="TOTALLY PEM DATA", sock=socket
+ )
+ mock_context.load_verify_locations.assert_called_once_with(
+ None, None, "TOTALLY PEM DATA"
+ )
+
+ def _wrap_socket_and_mock_warn(self, sock, server_hostname):
+ mock_context = Mock()
+ with patch("warnings.warn") as warn:
+ ssl_wrap_socket(
+ ssl_context=mock_context, sock=sock, server_hostname=server_hostname,
+ )
+ return mock_context, warn
+
+ def test_ssl_wrap_socket_sni_hostname_use_or_warn(self):
+ """Test that either an SNI hostname is used or a warning is made."""
+ sock = object()
+ context, warn = self._wrap_socket_and_mock_warn(sock, "www.google.com")
+ if util.HAS_SNI:
+ warn.assert_not_called()
+ context.wrap_socket.assert_called_once_with(
+ sock, server_hostname="www.google.com"
+ )
+ else:
+ assert warn.call_count >= 1
+ warnings = [call[0][1] for call in warn.call_args_list]
+ assert SNIMissingWarning in warnings
+ context.wrap_socket.assert_called_once_with(sock)
+
+ def test_ssl_wrap_socket_sni_ip_address_no_warn(self):
+ """Test that a warning is not made if server_hostname is an IP address."""
+ sock = object()
+ context, warn = self._wrap_socket_and_mock_warn(sock, "8.8.8.8")
+ if util.IS_SECURETRANSPORT:
+ context.wrap_socket.assert_called_once_with(sock, server_hostname="8.8.8.8")
+ else:
+ context.wrap_socket.assert_called_once_with(sock)
+ warn.assert_not_called()
+
+ def test_ssl_wrap_socket_sni_none_no_warn(self):
+ """Test that a warning is not made if server_hostname is not given."""
+ sock = object()
+ context, warn = self._wrap_socket_and_mock_warn(sock, None)
+ context.wrap_socket.assert_called_once_with(sock)
+ warn.assert_not_called()
| Unnecessary SNI warning with SecureTransport
SNI is always enabled with the SecureTransport backend.
If a platform lacks SNI support and otherwise SNI would have been used (a valid ```server_hostname``` is given) - we issue an SNI warning.
With the SecureTransport backend, if one wraps a socket with no ```server_hostname```, an SNI warning is still being issued. This is inconsistent with the other backends behavior.
To reproduce, run on MacOS with and without SecureTransport injection:
```python
import socket
import ssl
import threading
# comment these 2 lines
from urllib3.contrib import securetransport
securetransport.inject_into_urllib3()
from urllib3 import util
import trustme
ca = trustme.CA()
cert = ca.issue_cert(u"localhost")
ca.cert_pem.write_to_path("/tmp/ca.pem")
cert.private_key_pem.write_to_path("/tmp/cert.key")
cert.cert_chain_pems[0].write_to_path("/tmp/cert.pem")
server_up = threading.Event()
c = socket.socket()
l = socket.socket()
def client():
c.connect(('127.0.0.1', l.getsockname()[1]))
ssl_sock = util.ssl_wrap_socket(c, ca_certs="/tmp/ca.pem")
ssl_sock.close()
def server():
l.bind(('127.0.0.1', 0))
l.listen(1)
server_up.set()
s = l.accept()[0]
ssl_sock = ssl.wrap_socket(s, server_side=True, certfile="/tmp/cert.pem", keyfile="/tmp/cert.key")
ssl_sock.close()
client_thread = threading.Thread(target=client)
server_thread = threading.Thread(target=server)
server_thread.start()
assert server_up.wait(5)
client_thread.start()
server_thread.join()
client_thread.join()
```
An SNI warning is made only when using the SecureTransport backend, because of [these lines](https://github.com/urllib3/urllib3/blob/master/src/urllib3/util/ssl_.py#L380,L386).
We pass ```server_hostname``` to SecureTransport regardless of SNI hostname compliance (i.e. even if it is an IP address) since SecureTransport uses ```server_hostname``` to validate the certificate's CN.
A side effect of this commit, is issuing an SNI warning if ```server_hostname is None```.
I'm currently working on a better way to test SSL backends that will cover this scenario and a small refactor to the SNI logic in ```ssl_wrap_socket```.
| 2020-07-13T20:56:24Z | [] | [] |
|
urllib3/urllib3 | 1,923 | urllib3__urllib3-1923 | [
"1662"
] | d560e21dea87e9a24b94ea04873a68ac1a5f03aa | diff --git a/src/urllib3/connection.py b/src/urllib3/connection.py
--- a/src/urllib3/connection.py
+++ b/src/urllib3/connection.py
@@ -9,6 +9,7 @@
from .packages import six
from .packages.six.moves.http_client import HTTPConnection as _HTTPConnection
from .packages.six.moves.http_client import HTTPException # noqa: F401
+from .util.proxy import create_proxy_ssl_context
try: # Compiled with SSL?
import ssl
@@ -117,6 +118,11 @@ def __init__(self, *args, **kw):
#: The socket options provided by the user. If no options are
#: provided, we use the default options.
self.socket_options = kw.pop("socket_options", self.default_socket_options)
+
+ # Proxy options provided by the user.
+ self.proxy = kw.pop("proxy", None)
+ self.proxy_config = kw.pop("proxy_config", None)
+
_HTTPConnection.__init__(self, *args, **kw)
@property
@@ -271,6 +277,7 @@ class HTTPSConnection(HTTPConnection):
ca_cert_data = None
ssl_version = None
assert_fingerprint = None
+ tls_in_tls_required = False
def __init__(
self,
@@ -335,8 +342,13 @@ def connect(self):
# Add certificate verification
conn = self._new_conn()
hostname = self.host
+ tls_in_tls = False
if self._is_using_tunnel():
+ if self.tls_in_tls_required:
+ conn = self._connect_tls_proxy(hostname, conn)
+ tls_in_tls = True
+
self.sock = conn
# Calls self._set_hostport(), so self.host is
@@ -396,6 +408,7 @@ def connect(self):
ca_cert_data=self.ca_cert_data,
server_hostname=server_hostname,
ssl_context=context,
+ tls_in_tls=tls_in_tls,
)
if self.assert_fingerprint:
@@ -428,6 +441,40 @@ def connect(self):
or self.assert_fingerprint is not None
)
+ def _connect_tls_proxy(self, hostname, conn):
+ """
+ Establish a TLS connection to the proxy using the provided SSL context.
+ """
+ proxy_config = self.proxy_config
+ ssl_context = proxy_config.ssl_context
+ if ssl_context:
+ # If the user provided a proxy context, we assume CA and client
+ # certificates have already been set
+ return ssl_wrap_socket(
+ sock=conn,
+ server_hostname=hostname,
+ ssl_context=ssl_context,
+ )
+
+ ssl_context = create_proxy_ssl_context(
+ self.ssl_version,
+ self.cert_reqs,
+ self.ca_certs,
+ self.ca_cert_dir,
+ self.ca_cert_data,
+ )
+
+ # If no cert was provided, use only the default options for server
+ # certificate validation
+ return ssl_wrap_socket(
+ sock=conn,
+ ca_certs=self.ca_certs,
+ ca_cert_dir=self.ca_cert_dir,
+ ca_cert_data=self.ca_cert_data,
+ server_hostname=hostname,
+ ssl_context=ssl_context,
+ )
+
def _match_hostname(cert, asserted_hostname):
try:
diff --git a/src/urllib3/connectionpool.py b/src/urllib3/connectionpool.py
--- a/src/urllib3/connectionpool.py
+++ b/src/urllib3/connectionpool.py
@@ -40,6 +40,7 @@
from .response import HTTPResponse
from .util.connection import is_connection_dropped
+from .util.proxy import connection_requires_http_tunnel
from .util.request import set_file_position
from .util.response import assert_header_parsing
from .util.retry import Retry
@@ -182,6 +183,7 @@ def __init__(
retries=None,
_proxy=None,
_proxy_headers=None,
+ _proxy_config=None,
**conn_kw
):
ConnectionPool.__init__(self, host, port)
@@ -203,6 +205,7 @@ def __init__(
self.proxy = _proxy
self.proxy_headers = _proxy_headers or {}
+ self.proxy_config = _proxy_config
# Fill the queue up so that doing get() on it will block properly
for _ in xrange(maxsize):
@@ -219,6 +222,9 @@ def __init__(
# list.
self.conn_kw.setdefault("socket_options", [])
+ self.conn_kw["proxy"] = self.proxy
+ self.conn_kw["proxy_config"] = self.proxy_config
+
def _new_conn(self):
"""
Return a fresh :class:`HTTPConnection`.
@@ -621,6 +627,10 @@ def urlopen(
Additional parameters are passed to
:meth:`urllib3.response.HTTPResponse.from_httplib`
"""
+
+ parsed_url = parse_url(url)
+ destination_scheme = parsed_url.scheme
+
if headers is None:
headers = self.headers
@@ -638,7 +648,7 @@ def urlopen(
if url.startswith("/"):
url = six.ensure_str(_encode_target(url))
else:
- url = six.ensure_str(parse_url(url).url)
+ url = six.ensure_str(parsed_url.url)
conn = None
@@ -653,10 +663,14 @@ def urlopen(
# [1] <https://github.com/urllib3/urllib3/issues/651>
release_this_conn = release_conn
+ http_tunnel_required = connection_requires_http_tunnel(
+ self.proxy, self.proxy_config, destination_scheme
+ )
+
# Merge the proxy headers. Only done when not using HTTP CONNECT. We
# have to copy the headers dict so we can safely change it without those
# changes being reflected in anyone else's copy.
- if self.scheme == "http" or (self.proxy and self.proxy.scheme == "https"):
+ if not http_tunnel_required:
headers = headers.copy()
headers.update(self.proxy_headers)
@@ -682,7 +696,7 @@ def urlopen(
is_new_proxy_conn = self.proxy is not None and not getattr(
conn, "sock", None
)
- if is_new_proxy_conn:
+ if is_new_proxy_conn and http_tunnel_required:
self._prepare_proxy(conn)
# Make the request on the httplib connection object.
@@ -946,8 +960,10 @@ def _prepare_proxy(self, conn):
improperly set Host: header to proxy's IP:port.
"""
- if self.proxy.scheme != "https":
- conn.set_tunnel(self._proxy_host, self.port, self.proxy_headers)
+ conn.set_tunnel(self._proxy_host, self.port, self.proxy_headers)
+
+ if self.proxy.scheme == "https":
+ conn.tls_in_tls_required = True
conn.connect()
diff --git a/src/urllib3/poolmanager.py b/src/urllib3/poolmanager.py
--- a/src/urllib3/poolmanager.py
+++ b/src/urllib3/poolmanager.py
@@ -2,14 +2,12 @@
import collections
import functools
import logging
-import warnings
from ._collections import RecentlyUsedContainer
from .connectionpool import HTTPConnectionPool, HTTPSConnectionPool
from .connectionpool import port_by_scheme
from .exceptions import (
- HTTPWarning,
LocationValueError,
MaxRetryError,
ProxySchemeUnknown,
@@ -21,17 +19,13 @@
from .request import RequestMethods
from .util.url import parse_url
from .util.retry import Retry
+from .util.proxy import connection_requires_http_tunnel
+from .packages.six import PY3
__all__ = ["PoolManager", "ProxyManager", "proxy_from_url"]
-class InvalidProxyConfigurationWarning(HTTPWarning):
- """Raised when a user has an HTTPS proxy without enabling HTTPS proxies."""
-
- pass
-
-
log = logging.getLogger(__name__)
SSL_KEYWORDS = (
@@ -68,6 +62,7 @@ class InvalidProxyConfigurationWarning(HTTPWarning):
"key_headers", # dict
"key__proxy", # parsed proxy url
"key__proxy_headers", # dict
+ "key__proxy_config", # class
"key_socket_options", # list of (level (int), optname (int), value (int or str)) tuples
"key__socks_options", # dict
"key_assert_hostname", # bool or string
@@ -79,6 +74,9 @@ class InvalidProxyConfigurationWarning(HTTPWarning):
#: All custom key schemes should include the fields in this key at a minimum.
PoolKey = collections.namedtuple("PoolKey", _key_fields)
+_proxy_config_fields = ("ssl_context", "use_forwarding_for_https")
+ProxyConfig = collections.namedtuple("ProxyConfig", _proxy_config_fields)
+
def _default_key_normalizer(key_class, request_context):
"""
@@ -170,6 +168,7 @@ class PoolManager(RequestMethods):
"""
proxy = None
+ proxy_config = None
def __init__(self, num_pools=10, headers=None, **connection_pool_kw):
RequestMethods.__init__(self, headers)
@@ -326,14 +325,32 @@ def _merge_pool_kwargs(self, override):
def _proxy_requires_url_absolute_form(self, parsed_url):
"""
Indicates if the proxy requires the complete destination URL in the
- request.
-
- Normally this is only needed when not using an HTTP CONNECT tunnel.
+ request. Normally this is only needed when not using an HTTP CONNECT
+ tunnel.
"""
if self.proxy is None:
return False
- return parsed_url.scheme == "http" or self.proxy.scheme == "https"
+ return not connection_requires_http_tunnel(
+ self.proxy, self.proxy_config, parsed_url.scheme
+ )
+
+ def _validate_proxy_scheme_url_selection(self, url_scheme):
+ """
+ Validates that were not attempting to do TLS in TLS connections on
+ Python2 or with unsupported SSL implementations.
+ """
+ if self.proxy is None or url_scheme != "https":
+ return
+
+ if self.proxy.scheme != "https":
+ return
+
+ if not PY3 and not self.proxy_config.use_forwarding_for_https:
+ raise ProxySchemeUnsupported(
+ "Contacting HTTPS destinations through HTTPS proxies "
+ "'via CONNECT tunnels' is not supported in Python 2"
+ )
def urlopen(self, method, url, redirect=True, **kw):
"""
@@ -345,6 +362,8 @@ def urlopen(self, method, url, redirect=True, **kw):
:class:`urllib3.connectionpool.ConnectionPool` can be chosen for it.
"""
u = parse_url(url)
+ self._validate_proxy_scheme_url_selection(u.scheme)
+
conn = self.connection_from_host(u.host, port=u.port, scheme=u.scheme)
kw["assert_same_host"] = False
@@ -415,11 +434,18 @@ class ProxyManager(PoolManager):
HTTPS/CONNECT case they are sent only once. Could be used for proxy
authentication.
- :param _allow_https_proxy_to_see_traffic:
- Allows forwarding of HTTPS requests to HTTPS proxies. The proxy will
- have visibility of all the traffic sent. ONLY USE IF YOU KNOW WHAT
- YOU'RE DOING. This flag might be removed at any time in any future
- update.
+ :param proxy_ssl_context:
+ The proxy SSL context is used to establish the TLS connection to the
+ proxy when using HTTPS proxies.
+
+ :param use_forwarding_for_https:
+ (Defaults to False) If set to True will forward requests to the HTTPS
+ proxy to be made on behalf of the client instead of creating a TLS
+ tunnel via the CONNECT method. **Enabling this flag means that request
+ and response headers and content will be visible from the HTTPS proxy**
+ whereas tunneling keeps request and response headers and content
+ private. IP address, target hostname, SNI, and port are always visible
+ to an HTTPS proxy even when this flag is disabled.
Example:
>>> proxy = urllib3.ProxyManager('http://localhost:3128/')
@@ -440,7 +466,8 @@ def __init__(
num_pools=10,
headers=None,
proxy_headers=None,
- _allow_https_proxy_to_see_traffic=False,
+ proxy_ssl_context=None,
+ use_forwarding_for_https=False,
**connection_pool_kw
):
@@ -461,11 +488,12 @@ def __init__(
self.proxy = proxy
self.proxy_headers = proxy_headers or {}
+ self.proxy_ssl_context = proxy_ssl_context
+ self.proxy_config = ProxyConfig(proxy_ssl_context, use_forwarding_for_https)
connection_pool_kw["_proxy"] = self.proxy
connection_pool_kw["_proxy_headers"] = self.proxy_headers
-
- self.allow_insecure_proxy = _allow_https_proxy_to_see_traffic
+ connection_pool_kw["_proxy_config"] = self.proxy_config
super(ProxyManager, self).__init__(num_pools, headers, **connection_pool_kw)
@@ -494,35 +522,13 @@ def _set_proxy_headers(self, url, headers=None):
headers_.update(headers)
return headers_
- def _validate_proxy_scheme_url_selection(self, url_scheme):
- if (
- url_scheme == "https"
- and self.proxy.scheme == "https"
- and not self.allow_insecure_proxy
- ):
- warnings.warn(
- "Your proxy configuration specified an HTTPS scheme for the proxy. "
- "Are you sure you want to use HTTPS to contact the proxy? "
- "This most likely indicates an error in your configuration."
- "If you are sure you want use HTTPS to contact the proxy, enable "
- "the _allow_https_proxy_to_see_traffic.",
- InvalidProxyConfigurationWarning,
- )
-
- raise ProxySchemeUnsupported(
- "Contacting HTTPS destinations through HTTPS proxies is not supported."
- )
-
def urlopen(self, method, url, redirect=True, **kw):
"Same as HTTP(S)ConnectionPool.urlopen, ``url`` must be absolute."
u = parse_url(url)
- self._validate_proxy_scheme_url_selection(u.scheme)
-
- if u.scheme == "http" or self.proxy.scheme == "https":
+ if not connection_requires_http_tunnel(self.proxy, self.proxy_config, u.scheme):
# For connections using HTTP CONNECT, httplib sets the necessary
- # headers on the CONNECT to the proxy. For HTTP or when talking
- # HTTPS to the proxy, we'll definitely need to set 'Host' at the
- # very least.
+ # headers on the CONNECT to the proxy. If we're not using CONNECT,
+ # we'll definitely need to set 'Host' at the very least.
headers = kw.get("headers", self.headers)
kw["headers"] = self._set_proxy_headers(url, headers)
diff --git a/src/urllib3/util/proxy.py b/src/urllib3/util/proxy.py
new file mode 100644
--- /dev/null
+++ b/src/urllib3/util/proxy.py
@@ -0,0 +1,60 @@
+from .ssl_ import (
+ resolve_cert_reqs,
+ resolve_ssl_version,
+ create_urllib3_context,
+)
+
+
+def connection_requires_http_tunnel(
+ proxy_url=None, proxy_config=None, destination_scheme=None
+):
+ """
+ Returns True if the connection requires an HTTP CONNECT through the proxy.
+
+ :param URL proxy_url:
+ URL of the proxy.
+ :param ProxyConfig proxy_config:
+ Proxy configuration from poolmanager.py
+ :param str destination_scheme:
+ The scheme of the destination. (i.e https, http, etc)
+ """
+ # If we're not using a proxy, no way to use a tunnel.
+ if proxy_url is None:
+ return False
+
+ # HTTP destinations never require tunneling, we always forward.
+ if destination_scheme == "http":
+ return False
+
+ # Support for forwarding with HTTPS proxies and HTTPS destinations.
+ if (
+ proxy_url.scheme == "https"
+ and proxy_config
+ and proxy_config.use_forwarding_for_https
+ ):
+ return False
+
+ # Otherwise always use a tunnel.
+ return True
+
+
+def create_proxy_ssl_context(
+ ssl_version, cert_reqs, ca_certs=None, ca_cert_dir=None, ca_cert_data=None
+):
+ """
+ Generates a default proxy ssl context if one hasn't been provided by the
+ user.
+ """
+ ssl_context = create_urllib3_context(
+ ssl_version=resolve_ssl_version(ssl_version),
+ cert_reqs=resolve_cert_reqs(cert_reqs),
+ )
+ if (
+ not ca_certs
+ and not ca_cert_dir
+ and not ca_cert_data
+ and hasattr(ssl_context, "load_default_certs")
+ ):
+ ssl_context.load_default_certs()
+
+ return ssl_context
diff --git a/src/urllib3/util/ssl_.py b/src/urllib3/util/ssl_.py
--- a/src/urllib3/util/ssl_.py
+++ b/src/urllib3/util/ssl_.py
@@ -8,11 +8,17 @@
from hashlib import md5, sha1, sha256
from .url import IPV4_RE, BRACELESS_IPV6_ADDRZ_RE
-from ..exceptions import SSLError, InsecurePlatformWarning, SNIMissingWarning
+from ..exceptions import (
+ SSLError,
+ InsecurePlatformWarning,
+ SNIMissingWarning,
+ ProxySchemeUnsupported,
+)
from ..packages import six
SSLContext = None
+SSLTransport = None
HAS_SNI = False
IS_PYOPENSSL = False
IS_SECURETRANSPORT = False
@@ -41,6 +47,7 @@ def _const_compare_digest_backport(a, b):
import ssl
from ssl import wrap_socket, CERT_REQUIRED
from ssl import HAS_SNI # Has SNI?
+ from .ssltransport import SSLTransport
except ImportError:
pass
@@ -316,6 +323,7 @@ def ssl_wrap_socket(
ca_cert_dir=None,
key_password=None,
ca_cert_data=None,
+ tls_in_tls=False,
):
"""
All arguments except for server_hostname, ssl_context, and ca_cert_dir have
@@ -337,6 +345,8 @@ def ssl_wrap_socket(
:param ca_cert_data:
Optional string containing CA certificates in PEM format suitable for
passing as the cadata parameter to SSLContext.load_verify_locations()
+ :param tls_in_tls:
+ Use SSLTransport to wrap the existing socket.
"""
context = ssl_context
if context is None:
@@ -394,9 +404,11 @@ def ssl_wrap_socket(
)
if send_sni:
- ssl_sock = context.wrap_socket(sock, server_hostname=server_hostname)
+ ssl_sock = _ssl_wrap_socket_impl(
+ sock, context, tls_in_tls, server_hostname=server_hostname
+ )
else:
- ssl_sock = context.wrap_socket(sock)
+ ssl_sock = _ssl_wrap_socket_impl(sock, context, tls_in_tls)
return ssl_sock
@@ -422,3 +434,20 @@ def _is_key_file_encrypted(key_file):
return True
return False
+
+
+def _ssl_wrap_socket_impl(sock, ssl_context, tls_in_tls, server_hostname=None):
+ if tls_in_tls:
+ if not SSLTransport:
+ # Import error, ssl is not available.
+ raise ProxySchemeUnsupported(
+ "TLS in TLS requires support for the 'ssl' module"
+ )
+
+ SSLTransport._validate_ssl_context_for_tls_in_tls(ssl_context)
+ return SSLTransport(sock, ssl_context, server_hostname)
+
+ if server_hostname:
+ return ssl_context.wrap_socket(sock, server_hostname=server_hostname)
+ else:
+ return ssl_context.wrap_socket(sock)
diff --git a/src/urllib3/contrib/ssl.py b/src/urllib3/util/ssltransport.py
similarity index 87%
rename from src/urllib3/contrib/ssl.py
rename to src/urllib3/util/ssltransport.py
--- a/src/urllib3/contrib/ssl.py
+++ b/src/urllib3/util/ssltransport.py
@@ -2,6 +2,9 @@
import socket
import io
+from urllib3.exceptions import ProxySchemeUnsupported
+from urllib3.packages.six import PY3
+
SSL_BLOCKSIZE = 16384
@@ -16,6 +19,28 @@ class SSLTransport:
The class supports most of the socket API operations.
"""
+ @staticmethod
+ def _validate_ssl_context_for_tls_in_tls(ssl_context):
+ """
+ Raises a ProxySchemeUnsupported if the provided ssl_context can't be used
+ for TLS in TLS.
+
+ The only requirement is that the ssl_context provides the 'wrap_bio'
+ methods.
+ """
+
+ if not hasattr(ssl_context, "wrap_bio"):
+ if not PY3:
+ raise ProxySchemeUnsupported(
+ "TLS in TLS requires SSLContext.wrap_bio() which isn't "
+ "supported on Python 2"
+ )
+ else:
+ raise ProxySchemeUnsupported(
+ "TLS in TLS requires SSLContext.wrap_bio() which isn't "
+ "available on non-native SSLContext"
+ )
+
def __init__(
self, socket, ssl_context, suppress_ragged_eofs=True, server_hostname=None
):
| diff --git a/test/__init__.py b/test/__init__.py
--- a/test/__init__.py
+++ b/test/__init__.py
@@ -19,6 +19,11 @@
from urllib3.util import ssl_
from urllib3 import util
+try:
+ import urllib3.contrib.pyopenssl as pyopenssl
+except ImportError:
+ pyopenssl = None
+
# We need a host that will not immediately close the connection with a TCP
# Reset.
if platform.system() == "Windows":
@@ -166,6 +171,19 @@ def notBrotlipy():
)
+def onlySecureTransport(test):
+ """Runs this test when SecureTransport is in use."""
+
+ @six.wraps(test)
+ def wrapper(*args, **kwargs):
+ msg = "{name} only runs with SecureTransport".format(name=test.__name__)
+ if not ssl_.IS_SECURETRANSPORT:
+ pytest.skip(msg)
+ return test(*args, **kwargs)
+
+ return wrapper
+
+
def notSecureTransport(test):
"""Skips this test when SecureTransport is in use."""
@@ -290,6 +308,21 @@ def wrapper(*args, **kwargs):
return wrapper
+def withPyOpenSSL(test):
+ @six.wraps(test)
+ def wrapper(*args, **kwargs):
+ if not pyopenssl:
+ pytest.skip("pyopenssl not available, skipping test.")
+ return test(*args, **kwargs)
+
+ pyopenssl.inject_into_urllib3()
+ result = test(*args, **kwargs)
+ pyopenssl.extract_from_urllib3()
+ return result
+
+ return wrapper
+
+
class _ListHandler(logging.Handler):
def __init__(self):
super(_ListHandler, self).__init__()
diff --git a/test/test_poolmanager.py b/test/test_poolmanager.py
--- a/test/test_poolmanager.py
+++ b/test/test_poolmanager.py
@@ -366,3 +366,9 @@ def test_merge_pool_kwargs_invalid_key(self):
p = PoolManager(strict=True)
merged = p._merge_pool_kwargs({"invalid_key": None})
assert p.connection_pool_kw == merged
+
+ def test_pool_manager_no_url_absolute_form(self):
+ """Valides we won't send a request with absolute form without a proxy"""
+ p = PoolManager(strict=True)
+ assert p._proxy_requires_url_absolute_form("http://example.com") is False
+ assert p._proxy_requires_url_absolute_form("https://example.com") is False
diff --git a/test/test_proxymanager.py b/test/test_proxymanager.py
--- a/test/test_proxymanager.py
+++ b/test/test_proxymanager.py
@@ -62,6 +62,10 @@ def test_proxy_tunnel(self):
assert p._proxy_requires_url_absolute_form(https_url) is False
with ProxyManager("https://proxy:8080") as p:
+ assert p._proxy_requires_url_absolute_form(http_url)
+ assert p._proxy_requires_url_absolute_form(https_url) is False
+
+ with ProxyManager("https://proxy:8080", use_forwarding_for_https=True) as p:
assert p._proxy_requires_url_absolute_form(http_url)
assert p._proxy_requires_url_absolute_form(https_url)
diff --git a/test/contrib/test_ssltransport.py b/test/test_ssltransport.py
similarity index 99%
rename from test/contrib/test_ssltransport.py
rename to test/test_ssltransport.py
--- a/test/contrib/test_ssltransport.py
+++ b/test/test_ssltransport.py
@@ -4,7 +4,7 @@
DEFAULT_CA,
)
-from urllib3.contrib.ssl import SSLTransport
+from urllib3.util.ssltransport import SSLTransport
import select
import pytest
diff --git a/test/test_util.py b/test/test_util.py
--- a/test/test_util.py
+++ b/test/test_util.py
@@ -28,10 +28,16 @@
SNIMissingWarning,
UnrewindableBodyError,
)
-from urllib3.util.connection import allowed_gai_family, _has_ipv6
+from urllib3.util.proxy import (
+ connection_requires_http_tunnel,
+ create_proxy_ssl_context,
+)
from urllib3.util import is_fp_closed
+from urllib3.util.connection import allowed_gai_family, _has_ipv6
from urllib3.packages import six
+from urllib3.poolmanager import ProxyConfig
+
from . import clear_warnings
from test import onlyPy3, onlyPy2, onlyBrotlipy, notBrotlipy
@@ -748,6 +754,34 @@ def test_assert_header_parsing_throws_typeerror_with_non_headers(self, headers):
with pytest.raises(TypeError):
assert_header_parsing(headers)
+ def test_connection_requires_http_tunnel_no_proxy(self):
+ assert not connection_requires_http_tunnel(
+ proxy_url=None, proxy_config=None, destination_scheme=None
+ )
+
+ def test_connection_requires_http_tunnel_http_proxy(self):
+ proxy = parse_url("http://proxy:8080")
+ proxy_config = ProxyConfig(ssl_context=None, use_forwarding_for_https=False)
+ destination_scheme = "http"
+ assert not connection_requires_http_tunnel(
+ proxy, proxy_config, destination_scheme
+ )
+
+ destination_scheme = "https"
+ assert connection_requires_http_tunnel(proxy, proxy_config, destination_scheme)
+
+ def test_connection_requires_http_tunnel_https_proxy(self):
+ proxy = parse_url("https://proxy:8443")
+ proxy_config = ProxyConfig(ssl_context=None, use_forwarding_for_https=False)
+ destination_scheme = "http"
+ assert not connection_requires_http_tunnel(
+ proxy, proxy_config, destination_scheme
+ )
+
+ def test_create_proxy_ssl_context(self):
+ ssl_context = create_proxy_ssl_context(ssl_version=None, cert_reqs=None)
+ ssl_context.verify_mode = ssl.CERT_REQUIRED
+
@onlyPy3
def test_assert_header_parsing_no_error_on_multipart(self):
from http import client
diff --git a/test/with_dummyserver/test_proxy_poolmanager.py b/test/with_dummyserver/test_proxy_poolmanager.py
--- a/test/with_dummyserver/test_proxy_poolmanager.py
+++ b/test/with_dummyserver/test_proxy_poolmanager.py
@@ -22,8 +22,17 @@
ProxySchemeUnsupported,
)
from urllib3.connectionpool import connection_from_url, VerifiedHTTPSConnection
+from urllib3.util.ssl_ import create_urllib3_context
+
+from test import (
+ SHORT_TIMEOUT,
+ LONG_TIMEOUT,
+ onlyPy3,
+ onlyPy2,
+ withPyOpenSSL,
+ onlySecureTransport,
+)
-from test import SHORT_TIMEOUT, LONG_TIMEOUT
# Retry failed tests
pytestmark = pytest.mark.flaky
@@ -63,21 +72,75 @@ def test_basic_proxy(self):
r = http.request("GET", "%s/" % self.https_url)
assert r.status == 200
+ @onlyPy3
def test_https_proxy(self):
+ with proxy_from_url(self.https_proxy_url, ca_certs=DEFAULT_CA) as https:
+ r = https.request("GET", "%s/" % self.https_url)
+ assert r.status == 200
+
+ r = https.request("GET", "%s/" % self.http_url)
+ assert r.status == 200
+
+ @onlyPy3
+ def test_https_proxy_with_proxy_ssl_context(self):
+ proxy_ssl_context = create_urllib3_context()
+ proxy_ssl_context.load_verify_locations(DEFAULT_CA)
+ with proxy_from_url(
+ self.https_proxy_url,
+ proxy_ssl_context=proxy_ssl_context,
+ ca_certs=DEFAULT_CA,
+ ) as https:
+ r = https.request("GET", "%s/" % self.https_url)
+ assert r.status == 200
+
+ r = https.request("GET", "%s/" % self.http_url)
+ assert r.status == 200
+
+ @onlyPy2
+ def test_https_proxy_not_supported(self):
+ with proxy_from_url(self.https_proxy_url, ca_certs=DEFAULT_CA) as https:
+ r = https.request("GET", "%s/" % self.http_url)
+ assert r.status == 200
+
+ with pytest.raises(ProxySchemeUnsupported) as excinfo:
+ https.request("GET", "%s/" % self.https_url)
+
+ assert "is not supported in Python 2" in str(excinfo.value)
+
+ @withPyOpenSSL
+ @onlyPy3
+ def test_https_proxy_pyopenssl_not_supported(self):
+ with proxy_from_url(self.https_proxy_url, ca_certs=DEFAULT_CA) as https:
+ r = https.request("GET", "%s/" % self.http_url)
+ assert r.status == 200
+
+ with pytest.raises(ProxySchemeUnsupported) as excinfo:
+ https.request("GET", "%s/" % self.https_url)
+
+ assert "isn't available on non-native SSLContext" in str(excinfo.value)
+
+ @onlySecureTransport
+ @onlyPy3
+ def test_https_proxy_securetransport_not_supported(self):
with proxy_from_url(self.https_proxy_url, ca_certs=DEFAULT_CA) as https:
r = https.request("GET", "%s/" % self.http_url)
assert r.status == 200
- with pytest.raises(ProxySchemeUnsupported):
+ with pytest.raises(ProxySchemeUnsupported) as excinfo:
https.request("GET", "%s/" % self.https_url)
+ assert "isn't available on non-native SSLContext" in str(excinfo.value)
+
+ def test_https_proxy_forwarding_for_https(self):
with proxy_from_url(
self.https_proxy_url,
ca_certs=DEFAULT_CA,
- _allow_https_proxy_to_see_traffic=True,
+ use_forwarding_for_https=True,
) as https:
r = https.request("GET", "%s/" % self.http_url)
- https.request("GET", "%s/" % self.https_url)
+ assert r.status == 200
+
+ r = https.request("GET", "%s/" % self.https_url)
assert r.status == 200
def test_nagle_proxy(self):
@@ -302,6 +365,7 @@ def test_headers(self):
self.https_port,
)
+ @onlyPy3
def test_https_headers(self):
with proxy_from_url(
self.https_proxy_url,
@@ -328,19 +392,34 @@ def test_https_headers(self):
self.http_port,
)
- with pytest.raises(ProxySchemeUnsupported):
- http.request_encode_url("GET", "%s/headers" % self.https_url)
-
- r = http.request_encode_url(
- "GET", "%s/headers" % self.http_url, headers={"Baz": "quux"}
+ r = http.request_encode_body(
+ "GET", "%s/headers" % self.https_url, headers={"Baz": "quux"}
)
returned_headers = json.loads(r.data.decode())
assert returned_headers.get("Foo") is None
assert returned_headers.get("Baz") == "quux"
+ assert returned_headers.get("Hickory") is None
+ assert returned_headers.get("Host") == "%s:%s" % (
+ self.https_host,
+ self.https_port,
+ )
+
+ def test_https_headers_forwarding_for_https(self):
+ with proxy_from_url(
+ self.https_proxy_url,
+ headers={"Foo": "bar"},
+ proxy_headers={"Hickory": "dickory"},
+ ca_certs=DEFAULT_CA,
+ use_forwarding_for_https=True,
+ ) as http:
+
+ r = http.request_encode_url("GET", "%s/headers" % self.https_url)
+ returned_headers = json.loads(r.data.decode())
+ assert returned_headers.get("Foo") == "bar"
assert returned_headers.get("Hickory") == "dickory"
assert returned_headers.get("Host") == "%s:%s" % (
- self.http_host,
- self.http_port,
+ self.https_host,
+ self.https_port,
)
def test_headerdict(self):
| urllib3 does not allow to use HTTPS proxy
Hi,
When I try to use HTTPS proxy, than urllib3 tries to create HTTPS tunnel by calling CONNECT. But my HTTPS proxy does not support it and resets connection after receiving it.
If I block running `self._prepare_proxy(conn)` function than all works fine, but I am not sure, that my fix does not broke somthing.
[changes](/SeyfSV/urllib3/commit/ec7c6390d92cdee36ece3b6f54ca0d0d88bf7ced)
| The standard way to connect to an HTTPS URL over an HTTP proxy is via CONNECT. Do you know why your proxy isn't following this standard? What does your proxy support?
I did not say that I connect to HTTPS :) I try to connect through HTTPS proxy.
Really, it is not mine proxy, it is common proxy that has address: https://proxy-ssl.antizapret.prostovpn.org:3143. In headers there is info that it is squid 4.2.
I had read that HTTPS proxy and HTTP tunnel (CONNECT method) is different things, and as I understand urllib3 tries to use tunnel and there is no way to switch it to proxy.
Also I tried curl and I did not see CONNECT method in traffic:
`curl -x 'https://proxy-ssl.antizapret.prostovpn.org:3143' http://www.linkedin.com -v`
Ah your proxy is doing forwarding not tunneling. I'm not sure about whether we support proxies that do forwarding and the reasons. Might require some investigation into what httplib supports for proxies. | 2020-08-18T00:35:24Z | [] | [] |
urllib3/urllib3 | 1,932 | urllib3__urllib3-1932 | [
"1896"
] | 2bd28931bbb58a885fb4de175739b59a481cabc8 | diff --git a/src/urllib3/util/retry.py b/src/urllib3/util/retry.py
--- a/src/urllib3/util/retry.py
+++ b/src/urllib3/util/retry.py
@@ -266,10 +266,10 @@ def parse_retry_after(self, retry_after):
if re.match(r"^\s*[0-9]+\s*$", retry_after):
seconds = int(retry_after)
else:
- retry_date_tuple = email.utils.parsedate(retry_after)
+ retry_date_tuple = email.utils.parsedate_tz(retry_after)
if retry_date_tuple is None:
raise InvalidHeader("Invalid Retry-After header: %s" % retry_after)
- retry_date = time.mktime(retry_date_tuple)
+ retry_date = email.utils.mktime_tz(retry_date_tuple)
seconds = retry_date - time.time()
if seconds < 0:
| diff --git a/test/test_retry.py b/test/test_retry.py
--- a/test/test_retry.py
+++ b/test/test_retry.py
@@ -1,7 +1,5 @@
-import datetime
import mock
import pytest
-import time
from urllib3.response import HTTPResponse
from urllib3.packages import six
@@ -313,6 +311,7 @@ def test_respect_retry_after_header_propagated(self, respect_retry_after_header)
new_retry = retry.new()
assert new_retry.respect_retry_after_header == respect_retry_after_header
+ @pytest.mark.freeze_time("2019-06-03 11:00:00", tz_offset=0)
@pytest.mark.parametrize(
"retry_after_header,respect_retry_after_header,sleep_duration",
[
@@ -326,6 +325,11 @@ def test_respect_retry_after_header_propagated(self, respect_retry_after_header)
("Mon, 3 Jun 2019 11:00:00 UTC", True, None),
# Won't sleep due to current time reached + not respecting header
("Mon, 3 Jun 2019 11:00:00 UTC", False, None),
+ # Handle all the formats in RFC 7231 Section 7.1.1.1
+ ("Mon, 03 Jun 2019 11:30:12 GMT", True, 1812),
+ ("Monday, 03-Jun-19 11:30:12 GMT", True, 1812),
+ # Assume that datetimes without a timezone are in UTC per RFC 7231
+ ("Mon Jun 3 11:30:12 2019", True, 1812),
],
)
def test_respect_retry_after_header_sleep(
@@ -333,17 +337,7 @@ def test_respect_retry_after_header_sleep(
):
retry = Retry(respect_retry_after_header=respect_retry_after_header)
- # Date header syntax can specify an absolute date; compare this to the
- # time in the parametrized inputs above.
- current_time = mock.MagicMock(
- return_value=time.mktime(
- datetime.datetime(year=2019, month=6, day=3, hour=11).timetuple()
- )
- )
-
- with mock.patch("time.sleep") as sleep_mock, mock.patch(
- "time.time", current_time
- ):
+ with mock.patch("time.sleep") as sleep_mock:
# for the default behavior, it must be in RETRY_AFTER_STATUS_CODES
response = HTTPResponse(
status=503, headers={"Retry-After": retry_after_header}
| parse_retry_after() in utils/retry.py computes seconds incorrectly when Retry-After value is a http-date and local time is not in the GMT timezone
For the given Retry-After value `Thu, 25 Jun 2020 21:30:12 GMT`, with the local time being `Thu, 25 Jun 2020 17:28:12 EST`, parse_retry_after() will return `14564.671981096268`. This value is incorrect, and should be `~120`.
parse_retry_after() doesn't correctly handle the time difference between time zones.
| Here's a MRE that pulls out the code of parse_retry_after().
```
import re
import time
import email.utils
import datetime
def parse_retry_after(retry_after):
"""
Pulled from retry.py and minimally modified to change exception raised.
"""
# Whitespace: https://tools.ietf.org/html/rfc7230#section-3.2.4
if re.match(r"^\s*[0-9]+\s*$", retry_after):
seconds = int(retry_after)
else:
retry_date_tuple = email.utils.parsedate(retry_after)
if retry_date_tuple is None:
raise RuntimeError('Invalid header!')
retry_date = time.mktime(retry_date_tuple)
seconds = retry_date - time.time()
if seconds < 0:
seconds = 0
return seconds
if __name__ == '__main__':
# Create a Retry-After value (http-date) that's 180 seconds after now. Note this will only
# demonstrate the issue when the local timezone is *not* GMT.
nowutc = datetime.datetime.now(datetime.timezone.utc) + datetime.timedelta(seconds=180)
retryafter = nowutc.strftime('%a, %d %b %Y %H:%M:%S GMT ')
print(f'got {parse_retry_after(retryafter)} for {retryafter}, '
f'but should be {nowutc.timestamp() - time.time()}')
```
Thanks for opening this, will look at it later. Going to summon the timezone wizard @pganssle for assistance making sense of this.
This seems to fix the issue, but I've not done near enough testing or tried any python version other than 3.6.8:
```
def parse_retry_after(self, retry_after):
"""Fix TZ conversion"""
# Whitespace: https://tools.ietf.org/html/rfc7230#section-3.2.4
if re.match(r"^\s*[0-9]+\s*$", retry_after):
seconds = int(retry_after)
else:
retry_date_tuple = email.utils.parsedate_tz(retry_after)
if retry_date_tuple is None:
raise InvalidHeader("Invalid Retry-After header: %s" % retry_after)
retry_date = email.utils.mktime_tz(retry_date_tuple)
seconds = retry_date - time.time()
if seconds < 0:
seconds = 0
return seconds
```
@cblades-tc Would you be willing to submit a patch with a previously failing test case?
I think the `parsedate_tz` and `mktime_tz` is probably the right thing to do here. It looks like the current logic ignores time zone offsets, and assumes that *all* RFC 2822 datetimes are in system local time (which seems like a not great default for a wire format, TBH).
That said, I do not know how people currently use this, but it *may* be a breaking change to the semantics of unspecified time zones (which could legitimately be said to reflect local time):
```python
>>> from time import mktime
>>> from email.utils import parsedate, parsedate_tz, mktime_tz
>>> from datetime import datetime
>>> print(dt := datetime.fromtimestamp(1593160100))
2020-06-26 04:28:20
>>> (dt_str := dt.strftime("%a, %d %b %Y %H:%M:%S"))
'Fri, 26 Jun 2020 04:28:20'
>>> (ts := mktime(parsedate(dt_str)))
1593160100.0
>>> (ts_tz := mktime_tz(parsedate_tz(dt_str)))
1593145700
>>> print(datetime.fromtimestamp(ts))
2020-06-26 04:28:20
>>> print(datetime.fromtimestamp(ts_tz))
2020-06-26 00:28:20
```
(Note: the above was executed on a computer set to EDT. You won't see the problem on a computer with a system time set to any zone with an offset of 0).
From what I can tell, the output of `parsedate_tz` is no different between a string that is explicitly marked as GMT / UTC and one that has no time zone specified. If you care about maintaining the old behavior for timezone-unspecified strings, you may have to actively detect non-UTC time zones being parsed with time zone of 0.
There are a good number of ways to specify UTC in [RFC 2822](https://tools.ietf.org/html/rfc2822#section-3.3) — the main ways are, I think `0000` and `+0000` (`-0000` is actually supposed to refer to local time¹), and then there's also the [obsolete methods](https://tools.ietf.org/html/rfc2822#section-4.3): `GMT` `UT` and "Z" and "z". You'll probably want to carefully look at the test cases for `email.utils.parsedate_tz` to see if I've missed anything, but if not, then I think this might work (assuming it doesn't allow for random whitespace at the end or something):
```python
def parse_rfc2822_datetime_to_timestamp(dt_str):
time_t = email.utils.parsedate_tz(dt_str)
if time_t is None:
raise WhateverException()
if time_t[-1] == 0 and dt_str.endswith("-0000") or not dt_str.endswith(
("+0000", "0000", "GMT", "UT", "Z", "z")):
naive_time_t = email.utils.parsedate(dt_str)
return time.mktime(naive_time_t)
return email.utils.mktime_tz(time_t)
```
I don't know how much efficiency matters to you here, there are almost certainly many optimizations you can do to make the "check if it's UTC" part more efficient.
¹RFC 2822 is sorta vague about how `-0000` should be treated, but it seems like they are saying that it should be treated identically to "no offset specified":
> Though "-0000" also indicates Universal Time, it is used to indicate that the time was generated on a system that may be in a local time zone other than Universal Time and therefore indicates that the date-time contains no information about the local time zone.
Thanks for all this information @pganssle, I'm thinking this is the direction we should head is exactly in line with the example you provided. Maybe we can use a regex for the "check if its UTC".
If it simplifies things, I believe https://tools.ietf.org/html/rfc7231#section-7.1.1.1 suggests that http-dates should _always_ be in UTC.
`An HTTP-date value represents time as an instance of Coordinated
Universal Time (UTC).`
That's good to know. Since most dates will be GMT we can optimize for that in our UTC TZ detection.
> That's good to know. Since most dates will be GMT we can optimize for that in our UTC TZ detection.
I think the more important sentence is this one:
> values in the asctime format are assumed to be in UTC
That's what `parsedate_tz` already does, so it seems you may be able to use it without changing.
The real question is whether you want to follow the spec or try and keep the old behavior. I think everyone will agree that it's a bug if `'Fri, 26 Jun 2020 04:28:20 GMT'` is parsed as representing local time, but local time is at least a reasonable choice for `'Fri, 26 Jun 2020 04:28:20'`, so you may want to maintain backwards compatibility there. Still — given that RFC 7231¹ explicitly says that the second one should be interpreted as UTC, there's a very reasonable case to be made that "no offset supplied" is just another manifestation of the same bug.
¹Assuming that RFC 7231 is the one that applies here.
Oh, another thing to mention is that if RFC 7231 actually *is* the spec that applies here, it allows only a subset of datetime formats currently parsed by this function:
```
An example of the preferred format is
Sun, 06 Nov 1994 08:49:37 GMT ; IMF-fixdate
Examples of the two obsolete formats are
Sunday, 06-Nov-94 08:49:37 GMT ; obsolete RFC 850 format
Sun Nov 6 08:49:37 1994 ; ANSI C's asctime() format
```
I don't know how strict you want to be with `urllib3` but if you're trying to implement RFC 7231, you probably want to throw an exception if something other than one of those formats is found. Might be too late to do that without breaking the world, though. 😅
@pganssle Since this issue is being reported only now after existing for over 4 years means I can safely assume there are very few users relying on the "old" (buggy) behavior. Let's assume UTC the same way that `parsedate_tz` does and not worry about the old behavior.
And yeah RFC 7231 is the spec to apply, so maybe we can be stricter about `Retry-After` dates that we throw into `parsedate_tz()`. I wonder what formats it accepts outside of those three, maybe some research to do. | 2020-08-23T22:09:51Z | [] | [] |
urllib3/urllib3 | 1,935 | urllib3__urllib3-1935 | [
"1934"
] | d455be9463b547b4a6efb3b6de52abfe2b62e3e8 | diff --git a/src/urllib3/util/retry.py b/src/urllib3/util/retry.py
--- a/src/urllib3/util/retry.py
+++ b/src/urllib3/util/retry.py
@@ -269,6 +269,13 @@ def parse_retry_after(self, retry_after):
retry_date_tuple = email.utils.parsedate_tz(retry_after)
if retry_date_tuple is None:
raise InvalidHeader("Invalid Retry-After header: %s" % retry_after)
+ if retry_date_tuple[9] is None: # Python 2
+ # Assume UTC if no timezone was specified
+ # On Python2.7, parsedate_tz returns None for a timezone offset
+ # instead of 0 if no timezone is given, where mktime_tz treats
+ # a None timezone offset as local time.
+ retry_date_tuple = retry_date_tuple[:9] + (0,) + retry_date_tuple[10:]
+
retry_date = email.utils.mktime_tz(retry_date_tuple)
seconds = retry_date - time.time()
| diff --git a/test/conftest.py b/test/conftest.py
--- a/test/conftest.py
+++ b/test/conftest.py
@@ -8,6 +8,8 @@
import trustme
from tornado import web, ioloop
+from .tz_stub import stub_timezone_ctx
+
from dummyserver.handlers import TestingApp
from dummyserver.server import run_tornado_app
from dummyserver.server import HAS_IPV6
@@ -96,3 +98,12 @@ def ipv6_san_server(tmp_path_factory):
with run_server_in_thread("https", "::1", tmpdir, ca, server_cert) as cfg:
yield cfg
+
+
[email protected]_fixture
+def stub_timezone(request):
+ """
+ A pytest fixture that runs the test with a stub timezone.
+ """
+ with stub_timezone_ctx(request.param):
+ yield
diff --git a/test/test_retry.py b/test/test_retry.py
--- a/test/test_retry.py
+++ b/test/test_retry.py
@@ -332,6 +332,16 @@ def test_respect_retry_after_header_propagated(self, respect_retry_after_header)
("Mon Jun 3 11:30:12 2019", True, 1812),
],
)
+ @pytest.mark.parametrize(
+ "stub_timezone",
+ [
+ "UTC",
+ "Asia/Jerusalem",
+ None,
+ ],
+ indirect=True,
+ )
+ @pytest.mark.usefixtures("stub_timezone")
def test_respect_retry_after_header_sleep(
self, retry_after_header, respect_retry_after_header, sleep_duration
):
diff --git a/test/tz_stub.py b/test/tz_stub.py
new file mode 100644
--- /dev/null
+++ b/test/tz_stub.py
@@ -0,0 +1,39 @@
+from contextlib import contextmanager
+import time
+import datetime
+import os
+import pytest
+from dateutil import tz
+
+
+@contextmanager
+def stub_timezone_ctx(tzname):
+ """
+ Switch to a locally-known timezone specified by `tzname`.
+ On exit, restore the previous timezone.
+ If `tzname` is `None`, do nothing.
+ """
+ if tzname is None:
+ yield
+ return
+
+ # Only supported on Unix
+ if not hasattr(time, "tzset"):
+ pytest.skip("Timezone patching is not supported")
+
+ # Make sure the new timezone exists, at least in dateutil
+ new_tz = tz.gettz(tzname)
+ if new_tz is None:
+ raise ValueError("Invalid timezone specified: %r" % (tzname,))
+
+ # Get the current timezone
+ local_tz = tz.tzlocal()
+ if local_tz is None:
+ raise EnvironmentError("Cannot determine current timezone")
+ old_tzname = datetime.datetime.now(local_tz).tzname()
+
+ os.environ["TZ"] = tzname
+ time.tzset()
+ yield
+ os.environ["TZ"] = old_tzname
+ time.tzset()
| Incorrect handling of Retry-After (2.7)
`email.utils.parsedate_tz` behaves differently on python2.7/3 if the input string has no timezone. On [python3](https://docs.python.org/3/library/email.utils.html#email.utils.parsedate_tz), the last item of the return value is `0` where on [python2.7](https://docs.python.org/2/library/email.utils.html#email.utils.parsedate_tz) it is `None`.
Since `email.utils.mktime_tz` assumes a ```None``` timezone to be the local timezone, the resulting date might be wrong when calculating `Retry-After` timeout of a value without a timezone.
To reproduce run the following test on a python2.7 machine with timezone != UTC:
`test/test_retry.py::TestRetry::test_respect_retry_after_header_sleep[Mon Jun 3 11:30:12 2019-True-1812]`
(Luckily, `freezegun` doesn't hide the issue since it freezes only the time using `tz_offset` but it doesn't patch the timezone.)
| 2020-08-25T19:40:46Z | [] | [] |
|
urllib3/urllib3 | 1,939 | urllib3__urllib3-1939 | [
"1764"
] | 24e540fb1ee8e127d7d3a354c6fa50c22e61ed11 | diff --git a/src/urllib3/response.py b/src/urllib3/response.py
--- a/src/urllib3/response.py
+++ b/src/urllib3/response.py
@@ -22,6 +22,7 @@
InvalidChunkLength,
InvalidHeader,
HTTPError,
+ SSLError,
)
from .packages.six import string_types as basestring, PY3
from .connection import HTTPException, BaseSSLError
@@ -443,9 +444,8 @@ def _error_catcher(self):
except BaseSSLError as e:
# FIXME: Is there a better way to differentiate between SSLErrors?
if "read operation timed out" not in str(e): # Defensive:
- # This shouldn't happen but just in case we're missing an edge
- # case, let's avoid swallowing SSL errors.
- raise
+ # SSL errors related to framing/MAC get wrapped and reraised here
+ raise SSLError(e)
raise ReadTimeoutError(self._pool, None, "Read timed out.")
| diff --git a/test/test_response.py b/test/test_response.py
--- a/test/test_response.py
+++ b/test/test_response.py
@@ -1,7 +1,9 @@
# -*- coding: utf-8 -*-
+import contextlib
import re
import socket
+import ssl
import zlib
from io import BytesIO, BufferedReader, TextIOWrapper
@@ -19,6 +21,7 @@
httplib_IncompleteRead,
IncompleteRead,
InvalidChunkLength,
+ SSLError,
)
from urllib3.packages.six.moves import http_client as httplib
from urllib3.util.retry import Retry, RequestHistory
@@ -936,6 +939,30 @@ def stream():
assert b"foo\nbar" == data
+ def test_non_timeout_ssl_error_on_read(self):
+ mac_error = ssl.SSLError(
+ "SSL routines", "ssl3_get_record", "decryption failed or bad record mac"
+ )
+
+ @contextlib.contextmanager
+ def make_bad_mac_fp():
+ fp = BytesIO(b"")
+ with mock.patch.object(fp, "read") as fp_read:
+ # mac/decryption error
+ fp_read.side_effect = mac_error
+ yield fp
+
+ with make_bad_mac_fp() as fp:
+ with pytest.raises(SSLError) as e:
+ HTTPResponse(fp)
+ assert e.value.args[0] == mac_error
+
+ with make_bad_mac_fp() as fp:
+ resp = HTTPResponse(fp, preload_content=False)
+ with pytest.raises(SSLError) as e:
+ resp.read()
+ assert e.value.args[0] == mac_error
+
class MockChunkedEncodingResponse(object):
def __init__(self, content):
| SSLError
urllib 1.25.6
```
Traceback (most recent call last):
File "/home/web/web/yandex/.env/lib/python3.5/site-packages/ioweb/network_service.py", line 177, in thread_network
req, res, error, raise_network_error=False
File "/home/web/web/yandex/.env/lib/python3.5/site-packages/ioweb/transport.py", line 310, in prepare_response
self.read_with_timeout(req, res)
File "/home/web/web/yandex/.env/lib/python3.5/site-packages/ioweb/transport.py", line 266, in read_with_timeout
chunk = self.urllib3_response.read(chunk_size)
File "/home/web/web/yandex/.env/lib/python3.5/site-packages/urllib3/response.py", line 507, in read
data = self._fp.read(amt) if not fp_closed else b""
File "/usr/lib/python3.5/http/client.py", line 448, in read
n = self.readinto(b)
File "/usr/lib/python3.5/http/client.py", line 478, in readinto
return self._readinto_chunked(b)
File "/usr/lib/python3.5/http/client.py", line 573, in _readinto_chunked
chunk_left = self._get_chunk_left()
File "/usr/lib/python3.5/http/client.py", line 541, in _get_chunk_left
chunk_left = self._read_next_chunk_size()
File "/usr/lib/python3.5/http/client.py", line 501, in _read_next_chunk_size
line = self.fp.readline(_MAXLINE + 1)
File "/usr/lib/python3.5/socket.py", line 576, in readinto
return self._sock.recv_into(b)
File "/home/web/web/yandex/.env/lib/python3.5/site-packages/urllib3/contrib/pyopenssl.py", line 332, in recv_into
raise ssl.SSLError("read error: %r" % e)
ssl.SSLError: ("read error: Error([('SSL routines', 'ssl3_get_record', 'decryption failed or bad record mac')],)",)
```
Is it OK?
Should not it be `exceptions.SSLError` or something like that derived from urllib3 package?
| Hi @lorien!
Can you please submit a simple reproducing script?
I've seen this error as well. Building a server that produces this message reliably sounds like a pain, but it should be clear from the stack trace that `urllib3` is allowing an SSLError to escape even though [lower level exceptions should be wrapped](https://urllib3.readthedocs.io/en/latest/user-guide.html#errors-exceptions).
Looking at the code here: https://github.com/urllib3/urllib3/blob/9971e27e83a891ba7b832fa9e5d2f04bbcb1e65f/src/urllib3/response.py#L437 it seems like if there is any SSLError besides a read timeout, it will end up hitting user code.
@christopher-hesse I believe you're correct, we should be wrapping the `BaseSSLError` into a `urllib3.exceptions.SSLError` in line with the `_error_catcher()` contract of wrapping low-level Python exceptions into urllib3 exceptions.
It looks like `urllib3.exceptions.SSLError` says `Raised when SSL certificate fails in an HTTPS connection.` so it might be worth changing that documentation, or else special-casing this particular error with `if "decryption failed or bad record mac" in str(e): raise ProtocolError(...)` | 2020-08-28T20:51:37Z | [] | [] |
urllib3/urllib3 | 1,977 | urllib3__urllib3-1977 | [
"1976"
] | 3308d655a563e0b72e3856c4a4cb06ce2f4f0e8d | diff --git a/src/urllib3/contrib/_securetransport/low_level.py b/src/urllib3/contrib/_securetransport/low_level.py
--- a/src/urllib3/contrib/_securetransport/low_level.py
+++ b/src/urllib3/contrib/_securetransport/low_level.py
@@ -13,6 +13,7 @@
import os
import re
import ssl
+import struct
import tempfile
from .bindings import CFConst, CoreFoundation, Security
@@ -370,3 +371,26 @@ def _load_client_cert_chain(keychain, *paths):
finally:
for obj in itertools.chain(identities, certificates):
CoreFoundation.CFRelease(obj)
+
+
+TLS_PROTOCOL_VERSIONS = {
+ "SSLv2": (0, 2),
+ "SSLv3": (3, 0),
+ "TLSv1": (3, 1),
+ "TLSv1.1": (3, 2),
+ "TLSv1.2": (3, 3),
+}
+
+
+def _build_tls_unknown_ca_alert(version):
+ """
+ Builds a TLS alert record for an unknown CA.
+ """
+ ver_maj, ver_min = TLS_PROTOCOL_VERSIONS[version]
+ severity_fatal = 0x02
+ description_unknown_ca = 0x30
+ msg = struct.pack(">BB", severity_fatal, description_unknown_ca)
+ msg_len = len(msg)
+ record_type_alert = 0x15
+ record = struct.pack(">BBBH", record_type_alert, ver_maj, ver_min, msg_len) + msg
+ return record
diff --git a/src/urllib3/contrib/securetransport.py b/src/urllib3/contrib/securetransport.py
--- a/src/urllib3/contrib/securetransport.py
+++ b/src/urllib3/contrib/securetransport.py
@@ -60,6 +60,7 @@
import shutil
import socket
import ssl
+import struct
import threading
import weakref
@@ -69,6 +70,7 @@
from ._securetransport.bindings import CoreFoundation, Security, SecurityConst
from ._securetransport.low_level import (
_assert_no_error,
+ _build_tls_unknown_ca_alert,
_cert_array_from_pem,
_create_cfstring_array,
_load_client_cert_chain,
@@ -397,11 +399,37 @@ def _custom_validate(self, verify, trust_bundle):
Called when we have set custom validation. We do this in two cases:
first, when cert validation is entirely disabled; and second, when
using a custom trust DB.
+ Raises an SSLError if the connection is not trusted.
"""
# If we disabled cert validation, just say: cool.
if not verify:
return
+ successes = (
+ SecurityConst.kSecTrustResultUnspecified,
+ SecurityConst.kSecTrustResultProceed,
+ )
+ try:
+ trust_result = self._evaluate_trust(trust_bundle)
+ if trust_result in successes:
+ return
+ reason = "error code: %d" % (trust_result,)
+ except Exception as e:
+ # Do not trust on error
+ reason = "exception: %r" % (e,)
+
+ # SecureTransport does not send an alert nor shuts down the connection.
+ rec = _build_tls_unknown_ca_alert(self.version())
+ self.socket.sendall(rec)
+ # close the connection immediately
+ # l_onoff = 1, activate linger
+ # l_linger = 0, linger for 0 seoncds
+ opts = struct.pack("ii", 1, 0)
+ self.socket.setsockopt(socket.SOL_SOCKET, socket.SO_LINGER, opts)
+ self.close()
+ raise ssl.SSLError("certificate verify failed, %s" % reason)
+
+ def _evaluate_trust(self, trust_bundle):
# We want data in memory, so load it up.
if os.path.isfile(trust_bundle):
with open(trust_bundle, "rb") as f:
@@ -439,15 +467,7 @@ def _custom_validate(self, verify, trust_bundle):
if cert_array is not None:
CoreFoundation.CFRelease(cert_array)
- # Ok, now we can look at what the result was.
- successes = (
- SecurityConst.kSecTrustResultUnspecified,
- SecurityConst.kSecTrustResultProceed,
- )
- if trust_result.value not in successes:
- raise ssl.SSLError(
- "certificate verify failed, error code: %d" % trust_result.value
- )
+ return trust_result.value
def handshake(
self,
| diff --git a/test/with_dummyserver/test_https.py b/test/with_dummyserver/test_https.py
--- a/test/with_dummyserver/test_https.py
+++ b/test/with_dummyserver/test_https.py
@@ -477,29 +477,31 @@ def test_assert_fingerprint_sha256(self):
https_pool.request("GET", "/")
def test_assert_invalid_fingerprint(self):
+ def _test_request(pool):
+ with pytest.raises(MaxRetryError) as cm:
+ pool.request("GET", "/", retries=0)
+ assert isinstance(cm.value.reason, SSLError)
+ return cm.value.reason
+
with HTTPSConnectionPool(
- "127.0.0.1", self.port, cert_reqs="CERT_REQUIRED", ca_certs=DEFAULT_CA
+ self.host, self.port, cert_reqs="CERT_REQUIRED", ca_certs=DEFAULT_CA
) as https_pool:
+
https_pool.assert_fingerprint = (
"AA:AA:AA:AA:AA:AAAA:AA:AAAA:AA:AA:AA:AA:AA:AA:AA:AA:AA:AA"
)
-
- def _test_request(pool):
- with pytest.raises(MaxRetryError) as cm:
- pool.request("GET", "/", retries=0)
- assert isinstance(cm.value.reason, SSLError)
-
- _test_request(https_pool)
- https_pool._get_conn()
+ e = _test_request(https_pool)
+ assert "Fingerprints did not match." in str(e)
# Uneven length
https_pool.assert_fingerprint = "AA:A"
- _test_request(https_pool)
- https_pool._get_conn()
+ e = _test_request(https_pool)
+ assert "Fingerprint of invalid length:" in str(e)
# Invalid length
https_pool.assert_fingerprint = "AA"
- _test_request(https_pool)
+ e = _test_request(https_pool)
+ assert "Fingerprint of invalid length:" in str(e)
def test_verify_none_and_bad_fingerprint(self):
with HTTPSConnectionPool(
diff --git a/test/with_dummyserver/test_socketlevel.py b/test/with_dummyserver/test_socketlevel.py
--- a/test/with_dummyserver/test_socketlevel.py
+++ b/test/with_dummyserver/test_socketlevel.py
@@ -1427,6 +1427,49 @@ def test_load_verify_locations_exception(self):
with pytest.raises(SSLError):
ssl_wrap_socket(None, ca_certs="/tmp/fake-file")
+ def test_ssl_custom_validation_failure_terminates(self, tmpdir):
+ """
+ Ensure that the underlying socket is terminated if custom validation fails.
+ """
+ server_closed = Event()
+
+ def is_closed_socket(sock):
+ try:
+ sock.settimeout(SHORT_TIMEOUT) # Python 3
+ sock.recv(1) # Python 2
+ except (OSError, socket.error):
+ return True
+ return False
+
+ def socket_handler(listener):
+ sock = listener.accept()[0]
+ try:
+ _ = ssl.wrap_socket(
+ sock,
+ server_side=True,
+ keyfile=DEFAULT_CERTS["keyfile"],
+ certfile=DEFAULT_CERTS["certfile"],
+ ca_certs=DEFAULT_CA,
+ )
+ except ssl.SSLError as e:
+ assert "alert unknown ca" in str(e)
+ if is_closed_socket(sock):
+ server_closed.set()
+
+ self._start_server(socket_handler)
+
+ # client uses a different ca
+ other_ca = trustme.CA()
+ other_ca_path = str(tmpdir / "ca.pem")
+ other_ca.cert_pem.write_to_path(other_ca_path)
+
+ with HTTPSConnectionPool(
+ self.host, self.port, cert_reqs="REQUIRED", ca_certs=other_ca_path
+ ) as pool:
+ with pytest.raises(SSLError):
+ pool.request("GET", "/", retries=False, timeout=LONG_TIMEOUT)
+ assert server_closed.wait(LONG_TIMEOUT), "The socket was not terminated"
+
class TestErrorWrapping(SocketDummyServerTestCase):
def test_bad_statusline(self):
| SecureTransport does not close unverified connections correctly
SecureTransport does not handle SSL custom-verification failure correctly. Instead of sending an alert and terminating the socket, the client socket leaks thus hanging the server socket.
#### Expected behavior
When custom-verification is enabled, `SecureTransport` should terminate the connection.
#### Actual behavior
While `SecureTransport` fails custom validation, an `SSLError` is raised but the socket itself is not terminated:
https://github.com/urllib3/urllib3/blob/d79e82a698d51dc169ad07795812c3936b11bbc8/src/urllib3/contrib/securetransport.py#L446-L449
this leads to a `conn.close()`:
https://github.com/urllib3/urllib3/blob/d79e82a698d51dc169ad07795812c3936b11bbc8/src/urllib3/connectionpool.py#L754-L760
but `conn.sock` is still `None`, it will only be assigned the socket *after* (and only if) the socket is wrapped successfully:
https://github.com/urllib3/urllib3/blob/d79e82a698d51dc169ad07795812c3936b11bbc8/src/urllib3/connection.py#L389
#### Reproduction
This snippet hangs the server eventually instead of failing `NUM_CONNECTIONS` times cleanly.
```python
import logging
import socket
import ssl
import threading
import trustme
import urllib3
from urllib3.contrib.securetransport import inject_into_urllib3, extract_from_urllib3
CERT_PATH = "/tmp/ca.pem"
SSL_CTX2 = ssl.SSLContext()
NUM_CONNECTIONS = 3
def server(s):
for i in range(NUM_CONNECTIONS):
c, _ = s.accept()
try:
logging.critical("Server iter")
c = SSL_CTX2.wrap_socket(c, server_side=True)
c.recv(1024)
c.sendall(b"HTTP/1.1 404 Not Found\r\nConnection: close\r\n\r\n")
c.close()
except Exception as e:
logging.critical("Server ex: %r" % (e,))
def connect_and_fail(port):
with urllib3.HTTPSConnectionPool("localhost", port, cert_reqs="REQUIRED", ca_certs=CERT_PATH) as p:
for i in range(NUM_CONNECTIONS):
logging.critical("Client iter")
try:
p.request("GET", "/", retries=False)
except urllib3.exceptions.SSLError as e:
logging.critical("Client ex: %r" % (e,))
def main():
logging.basicConfig(level=logging.INFO)
# client's chain - ca
ca = trustme.CA()
ca.cert_pem.write_to_path(CERT_PATH)
# server's chain - ca2
ca2 = trustme.CA()
server2_cert = ca2.issue_cert(u"localhost")
server2_cert.configure_cert(SSL_CTX2)
# start an SSL server with ca chain
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.bind(('', 0))
port = s.getsockname()[1]
s.listen(10)
th = threading.Thread(target=server, args=(s, ))
th.start()
# inject SecureTransport and try to connect with ca2 chain
inject_into_urllib3()
connect_and_fail(port)
extract_from_urllib3()
th.join()
```
| 2020-09-22T08:09:20Z | [] | [] |
|
urllib3/urllib3 | 2,000 | urllib3__urllib3-2000 | [
"1916"
] | 6d38f171c4921043e1ff633e2a3e9f7ea382e1d5 | diff --git a/src/urllib3/util/retry.py b/src/urllib3/util/retry.py
--- a/src/urllib3/util/retry.py
+++ b/src/urllib3/util/retry.py
@@ -5,6 +5,7 @@
from itertools import takewhile
import email
import re
+import warnings
from ..exceptions import (
ConnectTimeoutError,
@@ -27,6 +28,49 @@
)
+# TODO: In v2 we can remove this sentinel and metaclass with deprecated options.
+_Default = object()
+
+
+class _RetryMeta(type):
+ @property
+ def DEFAULT_METHOD_WHITELIST(cls):
+ warnings.warn(
+ "Using 'Retry.DEFAULT_METHOD_WHITELIST' is deprecated and "
+ "will be removed in v2.0. Use 'Retry.DEFAULT_METHODS_ALLOWED' instead",
+ DeprecationWarning,
+ )
+ return cls.DEFAULT_ALLOWED_METHODS
+
+ @DEFAULT_METHOD_WHITELIST.setter
+ def DEFAULT_METHOD_WHITELIST(cls, value):
+ warnings.warn(
+ "Using 'Retry.DEFAULT_METHOD_WHITELIST' is deprecated and "
+ "will be removed in v2.0. Use 'Retry.DEFAULT_ALLOWED_METHODS' instead",
+ DeprecationWarning,
+ )
+ cls.DEFAULT_ALLOWED_METHODS = value
+
+ @property
+ def DEFAULT_REDIRECT_HEADERS_BLACKLIST(cls):
+ warnings.warn(
+ "Using 'Retry.DEFAULT_REDIRECT_HEADERS_BLACKLIST' is deprecated and "
+ "will be removed in v2.0. Use 'Retry.DEFAULT_REMOVE_HEADERS_ON_REDIRECT' instead",
+ DeprecationWarning,
+ )
+ return cls.DEFAULT_REMOVE_HEADERS_ON_REDIRECT
+
+ @DEFAULT_REDIRECT_HEADERS_BLACKLIST.setter
+ def DEFAULT_REDIRECT_HEADERS_BLACKLIST(cls, value):
+ warnings.warn(
+ "Using 'Retry.DEFAULT_REDIRECT_HEADERS_BLACKLIST' is deprecated and "
+ "will be removed in v2.0. Use 'Retry.DEFAULT_REMOVE_HEADERS_ON_REDIRECT' instead",
+ DeprecationWarning,
+ )
+ cls.DEFAULT_REMOVE_HEADERS_ON_REDIRECT = value
+
+
[email protected]_metaclass(_RetryMeta)
class Retry(object):
"""Retry configuration.
@@ -107,18 +151,23 @@ class Retry(object):
If ``total`` is not set, it's a good idea to set this to 0 to account
for unexpected edge cases and avoid infinite retry loops.
- :param iterable method_whitelist:
+ :param iterable allowed_methods:
Set of uppercased HTTP method verbs that we should retry on.
By default, we only retry on methods which are considered to be
idempotent (multiple requests with the same parameters end with the
- same state). See :attr:`Retry.DEFAULT_METHOD_WHITELIST`.
+ same state). See :attr:`Retry.DEFAULT_ALLOWED_METHODS`.
Set to a ``False`` value to retry on any verb.
+ .. warning::
+
+ Previously this parameter was named ``method_whitelist``, that
+ usage is deprecated in v1.26.0 and will be removed in v2.0.
+
:param iterable status_forcelist:
A set of integer HTTP status codes that we should force a retry on.
- A retry is initiated if the request method is in ``method_whitelist``
+ A retry is initiated if the request method is in ``allowed_methods``
and the response status code is in ``status_forcelist``.
By default, this is disabled with ``None``.
@@ -159,13 +208,16 @@ class Retry(object):
request.
"""
- DEFAULT_METHOD_WHITELIST = frozenset(
+ #: Default methods to be used for ``allowed_methods``
+ DEFAULT_ALLOWED_METHODS = frozenset(
["HEAD", "GET", "PUT", "DELETE", "OPTIONS", "TRACE"]
)
+ #: Default status codes to be used for ``status_forcelist``
RETRY_AFTER_STATUS_CODES = frozenset([413, 429, 503])
- DEFAULT_REDIRECT_HEADERS_BLACKLIST = frozenset(["Authorization"])
+ #: Default headers to be used for ``remove_headers_on_redirect``
+ DEFAULT_REMOVE_HEADERS_ON_REDIRECT = frozenset(["Authorization"])
#: Maximum backoff time.
BACKOFF_MAX = 120
@@ -178,16 +230,36 @@ def __init__(
redirect=None,
status=None,
other=None,
- method_whitelist=DEFAULT_METHOD_WHITELIST,
+ allowed_methods=_Default,
status_forcelist=None,
backoff_factor=0,
raise_on_redirect=True,
raise_on_status=True,
history=None,
respect_retry_after_header=True,
- remove_headers_on_redirect=DEFAULT_REDIRECT_HEADERS_BLACKLIST,
+ remove_headers_on_redirect=_Default,
+ # TODO: Deprecated, remove in v2.0
+ method_whitelist=_Default,
):
+ if method_whitelist is not _Default:
+ if allowed_methods is not _Default:
+ raise ValueError(
+ "Using both 'allowed_methods' and "
+ "'method_whitelist' together is not allowed. "
+ "Instead only use 'allowed_methods'"
+ )
+ warnings.warn(
+ "Using 'method_whitelist' with Retry is deprecated and "
+ "will be removed in v2.0. Use 'allowed_methods' instead",
+ DeprecationWarning,
+ )
+ allowed_methods = method_whitelist
+ if allowed_methods is _Default:
+ allowed_methods = self.DEFAULT_ALLOWED_METHODS
+ if remove_headers_on_redirect is _Default:
+ remove_headers_on_redirect = self.DEFAULT_REMOVE_HEADERS_ON_REDIRECT
+
self.total = total
self.connect = connect
self.read = read
@@ -200,7 +272,7 @@ def __init__(
self.redirect = redirect
self.status_forcelist = status_forcelist or set()
- self.method_whitelist = method_whitelist
+ self.allowed_methods = allowed_methods
self.backoff_factor = backoff_factor
self.raise_on_redirect = raise_on_redirect
self.raise_on_status = raise_on_status
@@ -218,7 +290,6 @@ def new(self, **kw):
redirect=self.redirect,
status=self.status,
other=self.other,
- method_whitelist=self.method_whitelist,
status_forcelist=self.status_forcelist,
backoff_factor=self.backoff_factor,
raise_on_redirect=self.raise_on_redirect,
@@ -227,6 +298,23 @@ def new(self, **kw):
remove_headers_on_redirect=self.remove_headers_on_redirect,
respect_retry_after_header=self.respect_retry_after_header,
)
+
+ # TODO: If already given in **kw we use what's given to us
+ # If not given we need to figure out what to pass. We decide
+ # based on whether our class has the 'method_whitelist' property
+ # and if so we pass the deprecated 'method_whitelist' otherwise
+ # we use 'allowed_methods'. Remove in v2.0
+ if "method_whitelist" not in kw and "allowed_methods" not in kw:
+ if "method_whitelist" in self.__dict__:
+ warnings.warn(
+ "Using 'method_whitelist' with Retry is deprecated and "
+ "will be removed in v2.0. Use 'allowed_methods' instead",
+ DeprecationWarning,
+ )
+ params["method_whitelist"] = self.allowed_methods
+ else:
+ params["allowed_methods"] = self.allowed_methods
+
params.update(kw)
return type(self)(**params)
@@ -340,15 +428,26 @@ def _is_read_error(self, err):
def _is_method_retryable(self, method):
"""Checks if a given HTTP method should be retried upon, depending if
- it is included on the method whitelist.
+ it is included in the allowed_methods
"""
- if self.method_whitelist and method.upper() not in self.method_whitelist:
- return False
+ # TODO: For now favor if the Retry implementation sets its own method_whitelist
+ # property outside of our constructor to avoid breaking custom implementations.
+ if "method_whitelist" in self.__dict__:
+ warnings.warn(
+ "Using 'method_whitelist' with Retry is deprecated and "
+ "will be removed in v2.0. Use 'allowed_methods' instead",
+ DeprecationWarning,
+ )
+ allowed_methods = self.method_whitelist
+ else:
+ allowed_methods = self.allowed_methods
+ if allowed_methods and method.upper() not in allowed_methods:
+ return False
return True
def is_retry(self, method, status_code, has_retry_after=False):
- """Is this method/status code retryable? (Based on whitelists and control
+ """Is this method/status code retryable? (Based on allowlists and control
variables such as the number of total retries to allow, whether to
respect the Retry-After header, whether this header is present, and
whether the returned status code is on the list of status codes to
@@ -448,7 +547,7 @@ def increment(
else:
# Incrementing because of a server error like a 500 in
- # status_forcelist and a the given method is in the whitelist
+ # status_forcelist and the given method is in the allowed_methods
cause = ResponseError.GENERIC_ERROR
if response and response.status:
if status_count is not None:
@@ -483,6 +582,20 @@ def __repr__(self):
"read={self.read}, redirect={self.redirect}, status={self.status})"
).format(cls=type(self), self=self)
+ def __getattr__(self, item):
+ if item == "method_whitelist":
+ # TODO: Remove this deprecated alias in v2.0
+ warnings.warn(
+ "Using 'method_whitelist' with Retry is deprecated and "
+ "will be removed in v2.0. Use 'allowed_methods' instead",
+ DeprecationWarning,
+ )
+ return self.allowed_methods
+ try:
+ return getattr(super(Retry, self), item)
+ except AttributeError:
+ return getattr(Retry, item)
+
# For backwards compatibility (equivalent to pre-v1.9):
Retry.DEFAULT = Retry(3)
| diff --git a/test/test_retry.py b/test/test_retry.py
--- a/test/test_retry.py
+++ b/test/test_retry.py
@@ -1,5 +1,6 @@
import mock
import pytest
+import warnings
from urllib3.response import HTTPResponse
from urllib3.packages import six
@@ -15,6 +16,13 @@
)
[email protected](scope="function", autouse=True)
+def no_retry_deprecations():
+ with warnings.catch_warnings(record=True) as w:
+ yield
+ assert len([str(x.message) for x in w if "Retry" in str(x.message)]) == 0
+
+
class TestRetry(object):
def test_string(self):
""" Retry string representation looks the way we expect """
@@ -196,14 +204,14 @@ def test_status_forcelist(self):
retry = Retry(total=1, status_forcelist=["418"])
assert not retry.is_retry("GET", status_code=418)
- def test_method_whitelist_with_status_forcelist(self):
- # Falsey method_whitelist means to retry on any method.
- retry = Retry(status_forcelist=[500], method_whitelist=None)
+ def test_allowed_methods_with_status_forcelist(self):
+ # Falsey allowed_methods means to retry on any method.
+ retry = Retry(status_forcelist=[500], allowed_methods=None)
assert retry.is_retry("GET", status_code=500)
assert retry.is_retry("POST", status_code=500)
- # Criteria of method_whitelist and status_forcelist are ANDed.
- retry = Retry(status_forcelist=[500], method_whitelist=["POST"])
+ # Criteria of allowed_methods and status_forcelist are ANDed.
+ retry = Retry(status_forcelist=[500], allowed_methods=["POST"])
assert not retry.is_retry("GET", status_code=500)
assert retry.is_retry("POST", status_code=500)
@@ -251,7 +259,7 @@ def test_error_message(self):
assert str(e.value.reason) == "conntimeout"
def test_history(self):
- retry = Retry(total=10, method_whitelist=frozenset(["GET", "POST"]))
+ retry = Retry(total=10, allowed_methods=frozenset(["GET", "POST"]))
assert retry.history == tuple()
connection_error = ConnectTimeoutError("conntimeout")
retry = retry.increment("GET", "/test1", None, connection_error)
diff --git a/test/test_retry_deprecated.py b/test/test_retry_deprecated.py
new file mode 100644
--- /dev/null
+++ b/test/test_retry_deprecated.py
@@ -0,0 +1,470 @@
+# This is a copy-paste of test_retry.py with extra asserts about deprecated options. It will be removed for v2.
+import mock
+import pytest
+import warnings
+
+from urllib3.response import HTTPResponse
+from urllib3.packages import six
+from urllib3.packages.six.moves import xrange
+from urllib3.util.retry import Retry, RequestHistory
+from urllib3.exceptions import (
+ ConnectTimeoutError,
+ InvalidHeader,
+ MaxRetryError,
+ ReadTimeoutError,
+ ResponseError,
+ SSLError,
+)
+
+
+# TODO: Remove this entire file once deprecated Retry options are removed in v2.
[email protected](scope="function")
+def expect_retry_deprecation():
+ with warnings.catch_warnings(record=True) as w:
+ yield
+ assert len([str(x.message) for x in w if "Retry" in str(x.message)]) > 0
+
+
+class TestRetry(object):
+ def test_string(self):
+ """ Retry string representation looks the way we expect """
+ retry = Retry()
+ assert (
+ str(retry)
+ == "Retry(total=10, connect=None, read=None, redirect=None, status=None)"
+ )
+ for _ in range(3):
+ retry = retry.increment(method="GET")
+ assert (
+ str(retry)
+ == "Retry(total=7, connect=None, read=None, redirect=None, status=None)"
+ )
+
+ def test_retry_both_specified(self):
+ """Total can win if it's lower than the connect value"""
+ error = ConnectTimeoutError()
+ retry = Retry(connect=3, total=2)
+ retry = retry.increment(error=error)
+ retry = retry.increment(error=error)
+ with pytest.raises(MaxRetryError) as e:
+ retry.increment(error=error)
+ assert e.value.reason == error
+
+ def test_retry_higher_total_loses(self):
+ """ A lower connect timeout than the total is honored """
+ error = ConnectTimeoutError()
+ retry = Retry(connect=2, total=3)
+ retry = retry.increment(error=error)
+ retry = retry.increment(error=error)
+ with pytest.raises(MaxRetryError):
+ retry.increment(error=error)
+
+ def test_retry_higher_total_loses_vs_read(self):
+ """ A lower read timeout than the total is honored """
+ error = ReadTimeoutError(None, "/", "read timed out")
+ retry = Retry(read=2, total=3)
+ retry = retry.increment(method="GET", error=error)
+ retry = retry.increment(method="GET", error=error)
+ with pytest.raises(MaxRetryError):
+ retry.increment(method="GET", error=error)
+
+ def test_retry_total_none(self):
+ """ if Total is none, connect error should take precedence """
+ error = ConnectTimeoutError()
+ retry = Retry(connect=2, total=None)
+ retry = retry.increment(error=error)
+ retry = retry.increment(error=error)
+ with pytest.raises(MaxRetryError) as e:
+ retry.increment(error=error)
+ assert e.value.reason == error
+
+ error = ReadTimeoutError(None, "/", "read timed out")
+ retry = Retry(connect=2, total=None)
+ retry = retry.increment(method="GET", error=error)
+ retry = retry.increment(method="GET", error=error)
+ retry = retry.increment(method="GET", error=error)
+ assert not retry.is_exhausted()
+
+ def test_retry_default(self):
+ """ If no value is specified, should retry connects 3 times """
+ retry = Retry()
+ assert retry.total == 10
+ assert retry.connect is None
+ assert retry.read is None
+ assert retry.redirect is None
+ assert retry.other is None
+
+ error = ConnectTimeoutError()
+ retry = Retry(connect=1)
+ retry = retry.increment(error=error)
+ with pytest.raises(MaxRetryError):
+ retry.increment(error=error)
+
+ retry = Retry(connect=1)
+ retry = retry.increment(error=error)
+ assert not retry.is_exhausted()
+
+ assert Retry(0).raise_on_redirect
+ assert not Retry(False).raise_on_redirect
+
+ def test_retry_other(self):
+ """ If an unexpected error is raised, should retry other times """
+ other_error = SSLError()
+ retry = Retry(connect=1)
+ retry = retry.increment(error=other_error)
+ retry = retry.increment(error=other_error)
+ assert not retry.is_exhausted()
+
+ retry = Retry(other=1)
+ retry = retry.increment(error=other_error)
+ with pytest.raises(MaxRetryError) as e:
+ retry.increment(error=other_error)
+ assert e.value.reason == other_error
+
+ def test_retry_read_zero(self):
+ """ No second chances on read timeouts, by default """
+ error = ReadTimeoutError(None, "/", "read timed out")
+ retry = Retry(read=0)
+ with pytest.raises(MaxRetryError) as e:
+ retry.increment(method="GET", error=error)
+ assert e.value.reason == error
+
+ def test_status_counter(self):
+ resp = HTTPResponse(status=400)
+ retry = Retry(status=2)
+ retry = retry.increment(response=resp)
+ retry = retry.increment(response=resp)
+ with pytest.raises(MaxRetryError) as e:
+ retry.increment(response=resp)
+ assert str(e.value.reason) == ResponseError.SPECIFIC_ERROR.format(
+ status_code=400
+ )
+
+ def test_backoff(self):
+ """ Backoff is computed correctly """
+ max_backoff = Retry.BACKOFF_MAX
+
+ retry = Retry(total=100, backoff_factor=0.2)
+ assert retry.get_backoff_time() == 0 # First request
+
+ retry = retry.increment(method="GET")
+ assert retry.get_backoff_time() == 0 # First retry
+
+ retry = retry.increment(method="GET")
+ assert retry.backoff_factor == 0.2
+ assert retry.total == 98
+ assert retry.get_backoff_time() == 0.4 # Start backoff
+
+ retry = retry.increment(method="GET")
+ assert retry.get_backoff_time() == 0.8
+
+ retry = retry.increment(method="GET")
+ assert retry.get_backoff_time() == 1.6
+
+ for _ in xrange(10):
+ retry = retry.increment(method="GET")
+
+ assert retry.get_backoff_time() == max_backoff
+
+ def test_zero_backoff(self):
+ retry = Retry()
+ assert retry.get_backoff_time() == 0
+ retry = retry.increment(method="GET")
+ retry = retry.increment(method="GET")
+ assert retry.get_backoff_time() == 0
+
+ def test_backoff_reset_after_redirect(self):
+ retry = Retry(total=100, redirect=5, backoff_factor=0.2)
+ retry = retry.increment(method="GET")
+ retry = retry.increment(method="GET")
+ assert retry.get_backoff_time() == 0.4
+ redirect_response = HTTPResponse(status=302, headers={"location": "test"})
+ retry = retry.increment(method="GET", response=redirect_response)
+ assert retry.get_backoff_time() == 0
+ retry = retry.increment(method="GET")
+ retry = retry.increment(method="GET")
+ assert retry.get_backoff_time() == 0.4
+
+ def test_sleep(self):
+ # sleep a very small amount of time so our code coverage is happy
+ retry = Retry(backoff_factor=0.0001)
+ retry = retry.increment(method="GET")
+ retry = retry.increment(method="GET")
+ retry.sleep()
+
+ def test_status_forcelist(self):
+ retry = Retry(status_forcelist=xrange(500, 600))
+ assert not retry.is_retry("GET", status_code=200)
+ assert not retry.is_retry("GET", status_code=400)
+ assert retry.is_retry("GET", status_code=500)
+
+ retry = Retry(total=1, status_forcelist=[418])
+ assert not retry.is_retry("GET", status_code=400)
+ assert retry.is_retry("GET", status_code=418)
+
+ # String status codes are not matched.
+ retry = Retry(total=1, status_forcelist=["418"])
+ assert not retry.is_retry("GET", status_code=418)
+
+ def test_method_whitelist_with_status_forcelist(self, expect_retry_deprecation):
+ # Falsey method_whitelist means to retry on any method.
+ retry = Retry(status_forcelist=[500], method_whitelist=None)
+ assert retry.is_retry("GET", status_code=500)
+ assert retry.is_retry("POST", status_code=500)
+
+ # Criteria of method_whitelist and status_forcelist are ANDed.
+ retry = Retry(status_forcelist=[500], method_whitelist=["POST"])
+ assert not retry.is_retry("GET", status_code=500)
+ assert retry.is_retry("POST", status_code=500)
+
+ def test_exhausted(self):
+ assert not Retry(0).is_exhausted()
+ assert Retry(-1).is_exhausted()
+ assert Retry(1).increment(method="GET").total == 0
+
+ @pytest.mark.parametrize("total", [-1, 0])
+ def test_disabled(self, total):
+ with pytest.raises(MaxRetryError):
+ Retry(total).increment(method="GET")
+
+ def test_error_message(self):
+ retry = Retry(total=0)
+ with pytest.raises(MaxRetryError) as e:
+ retry = retry.increment(
+ method="GET", error=ReadTimeoutError(None, "/", "read timed out")
+ )
+ assert "Caused by redirect" not in str(e.value)
+ assert str(e.value.reason) == "None: read timed out"
+
+ retry = Retry(total=1)
+ with pytest.raises(MaxRetryError) as e:
+ retry = retry.increment("POST", "/")
+ retry = retry.increment("POST", "/")
+ assert "Caused by redirect" not in str(e.value)
+ assert isinstance(e.value.reason, ResponseError)
+ assert str(e.value.reason) == ResponseError.GENERIC_ERROR
+
+ retry = Retry(total=1)
+ response = HTTPResponse(status=500)
+ with pytest.raises(MaxRetryError) as e:
+ retry = retry.increment("POST", "/", response=response)
+ retry = retry.increment("POST", "/", response=response)
+ assert "Caused by redirect" not in str(e.value)
+ msg = ResponseError.SPECIFIC_ERROR.format(status_code=500)
+ assert str(e.value.reason) == msg
+
+ retry = Retry(connect=1)
+ with pytest.raises(MaxRetryError) as e:
+ retry = retry.increment(error=ConnectTimeoutError("conntimeout"))
+ retry = retry.increment(error=ConnectTimeoutError("conntimeout"))
+ assert "Caused by redirect" not in str(e.value)
+ assert str(e.value.reason) == "conntimeout"
+
+ def test_history(self, expect_retry_deprecation):
+ retry = Retry(total=10, method_whitelist=frozenset(["GET", "POST"]))
+ assert retry.history == tuple()
+ connection_error = ConnectTimeoutError("conntimeout")
+ retry = retry.increment("GET", "/test1", None, connection_error)
+ history = (RequestHistory("GET", "/test1", connection_error, None, None),)
+ assert retry.history == history
+
+ read_error = ReadTimeoutError(None, "/test2", "read timed out")
+ retry = retry.increment("POST", "/test2", None, read_error)
+ history = (
+ RequestHistory("GET", "/test1", connection_error, None, None),
+ RequestHistory("POST", "/test2", read_error, None, None),
+ )
+ assert retry.history == history
+
+ response = HTTPResponse(status=500)
+ retry = retry.increment("GET", "/test3", response, None)
+ history = (
+ RequestHistory("GET", "/test1", connection_error, None, None),
+ RequestHistory("POST", "/test2", read_error, None, None),
+ RequestHistory("GET", "/test3", None, 500, None),
+ )
+ assert retry.history == history
+
+ def test_retry_method_not_in_whitelist(self):
+ error = ReadTimeoutError(None, "/", "read timed out")
+ retry = Retry()
+ with pytest.raises(ReadTimeoutError):
+ retry.increment(method="POST", error=error)
+
+ def test_retry_default_remove_headers_on_redirect(self):
+ retry = Retry()
+
+ assert list(retry.remove_headers_on_redirect) == ["authorization"]
+
+ def test_retry_set_remove_headers_on_redirect(self):
+ retry = Retry(remove_headers_on_redirect=["X-API-Secret"])
+
+ assert list(retry.remove_headers_on_redirect) == ["x-api-secret"]
+
+ @pytest.mark.parametrize("value", ["-1", "+1", "1.0", six.u("\xb2")]) # \xb2 = ^2
+ def test_parse_retry_after_invalid(self, value):
+ retry = Retry()
+ with pytest.raises(InvalidHeader):
+ retry.parse_retry_after(value)
+
+ @pytest.mark.parametrize(
+ "value, expected", [("0", 0), ("1000", 1000), ("\t42 ", 42)]
+ )
+ def test_parse_retry_after(self, value, expected):
+ retry = Retry()
+ assert retry.parse_retry_after(value) == expected
+
+ @pytest.mark.parametrize("respect_retry_after_header", [True, False])
+ def test_respect_retry_after_header_propagated(self, respect_retry_after_header):
+
+ retry = Retry(respect_retry_after_header=respect_retry_after_header)
+ new_retry = retry.new()
+ assert new_retry.respect_retry_after_header == respect_retry_after_header
+
+ @pytest.mark.freeze_time("2019-06-03 11:00:00", tz_offset=0)
+ @pytest.mark.parametrize(
+ "retry_after_header,respect_retry_after_header,sleep_duration",
+ [
+ ("3600", True, 3600),
+ ("3600", False, None),
+ # Will sleep due to header is 1 hour in future
+ ("Mon, 3 Jun 2019 12:00:00 UTC", True, 3600),
+ # Won't sleep due to not respecting header
+ ("Mon, 3 Jun 2019 12:00:00 UTC", False, None),
+ # Won't sleep due to current time reached
+ ("Mon, 3 Jun 2019 11:00:00 UTC", True, None),
+ # Won't sleep due to current time reached + not respecting header
+ ("Mon, 3 Jun 2019 11:00:00 UTC", False, None),
+ # Handle all the formats in RFC 7231 Section 7.1.1.1
+ ("Mon, 03 Jun 2019 11:30:12 GMT", True, 1812),
+ ("Monday, 03-Jun-19 11:30:12 GMT", True, 1812),
+ # Assume that datetimes without a timezone are in UTC per RFC 7231
+ ("Mon Jun 3 11:30:12 2019", True, 1812),
+ ],
+ )
+ @pytest.mark.parametrize(
+ "stub_timezone",
+ [
+ "UTC",
+ "Asia/Jerusalem",
+ None,
+ ],
+ indirect=True,
+ )
+ @pytest.mark.usefixtures("stub_timezone")
+ def test_respect_retry_after_header_sleep(
+ self, retry_after_header, respect_retry_after_header, sleep_duration
+ ):
+ retry = Retry(respect_retry_after_header=respect_retry_after_header)
+
+ with mock.patch("time.sleep") as sleep_mock:
+ # for the default behavior, it must be in RETRY_AFTER_STATUS_CODES
+ response = HTTPResponse(
+ status=503, headers={"Retry-After": retry_after_header}
+ )
+
+ retry.sleep(response)
+
+ # The expected behavior is that we'll only sleep if respecting
+ # this header (since we won't have any backoff sleep attempts)
+ if respect_retry_after_header and sleep_duration is not None:
+ sleep_mock.assert_called_with(sleep_duration)
+ else:
+ sleep_mock.assert_not_called()
+
+
+class TestRetryDeprecations(object):
+ def test_cls_get_default_method_whitelist(self, expect_retry_deprecation):
+ assert Retry.DEFAULT_ALLOWED_METHODS == Retry.DEFAULT_METHOD_WHITELIST
+
+ def test_cls_get_default_redirect_headers_blacklist(self, expect_retry_deprecation):
+ assert (
+ Retry.DEFAULT_REMOVE_HEADERS_ON_REDIRECT
+ == Retry.DEFAULT_REDIRECT_HEADERS_BLACKLIST
+ )
+
+ def test_cls_set_default_method_whitelist(self, expect_retry_deprecation):
+ old_setting = Retry.DEFAULT_METHOD_WHITELIST
+ try:
+ Retry.DEFAULT_METHOD_WHITELIST = {"GET"}
+ retry = Retry()
+ assert retry.DEFAULT_ALLOWED_METHODS == {"GET"}
+ assert retry.DEFAULT_METHOD_WHITELIST == {"GET"}
+ assert retry.allowed_methods == {"GET"}
+ assert retry.method_whitelist == {"GET"}
+
+ # Test that the default can be overridden both ways
+ retry = Retry(allowed_methods={"GET", "POST"})
+ assert retry.DEFAULT_ALLOWED_METHODS == {"GET"}
+ assert retry.DEFAULT_METHOD_WHITELIST == {"GET"}
+ assert retry.allowed_methods == {"GET", "POST"}
+ assert retry.method_whitelist == {"GET", "POST"}
+
+ retry = Retry(method_whitelist={"POST"})
+ assert retry.DEFAULT_ALLOWED_METHODS == {"GET"}
+ assert retry.DEFAULT_METHOD_WHITELIST == {"GET"}
+ assert retry.allowed_methods == {"POST"}
+ assert retry.method_whitelist == {"POST"}
+ finally:
+ Retry.DEFAULT_METHOD_WHITELIST = old_setting
+ assert Retry.DEFAULT_ALLOWED_METHODS == old_setting
+
+ def test_cls_set_default_redirect_headers_blacklist(self, expect_retry_deprecation):
+ old_setting = Retry.DEFAULT_REDIRECT_HEADERS_BLACKLIST
+ try:
+ Retry.DEFAULT_REDIRECT_HEADERS_BLACKLIST = {"test"}
+ retry = Retry()
+ assert retry.DEFAULT_REMOVE_HEADERS_ON_REDIRECT == {"test"}
+ assert retry.DEFAULT_REDIRECT_HEADERS_BLACKLIST == {"test"}
+ assert retry.remove_headers_on_redirect == {"test"}
+ assert retry.remove_headers_on_redirect == {"test"}
+
+ retry = Retry(remove_headers_on_redirect={"test2"})
+ assert retry.DEFAULT_REMOVE_HEADERS_ON_REDIRECT == {"test"}
+ assert retry.DEFAULT_REDIRECT_HEADERS_BLACKLIST == {"test"}
+ assert retry.remove_headers_on_redirect == {"test2"}
+ assert retry.remove_headers_on_redirect == {"test2"}
+ finally:
+ Retry.DEFAULT_REDIRECT_HEADERS_BLACKLIST = old_setting
+ assert Retry.DEFAULT_REDIRECT_HEADERS_BLACKLIST == old_setting
+
+ @pytest.mark.parametrize(
+ "options", [(None, None), ({"GET"}, None), (None, {"GET"}), ({"GET"}, {"GET"})]
+ )
+ def test_retry_allowed_methods_and_method_whitelist_error(self, options):
+ with pytest.raises(ValueError) as e:
+ Retry(allowed_methods=options[0], method_whitelist=options[1])
+ assert str(e.value) == (
+ "Using both 'allowed_methods' and 'method_whitelist' together "
+ "is not allowed. Instead only use 'allowed_methods'"
+ )
+
+ def test_retry_subclass_that_sets_method_whitelist(self, expect_retry_deprecation):
+ class SubclassRetry(Retry):
+ def __init__(self, **kwargs):
+ if "allowed_methods" in kwargs:
+ raise AssertionError(
+ "This subclass likely doesn't use 'allowed_methods'"
+ )
+
+ super(SubclassRetry, self).__init__(**kwargs)
+
+ # Since we're setting 'method_whiteist' we get fallbacks
+ # within Retry.new() and Retry._is_method_retryable()
+ # to use 'method_whitelist' instead of 'allowed_methods'
+ self.method_whitelist = self.method_whitelist | {"POST"}
+
+ retry = SubclassRetry()
+ assert retry.method_whitelist == Retry.DEFAULT_ALLOWED_METHODS | {"POST"}
+ assert retry.new(read=0).method_whitelist == retry.method_whitelist
+ assert retry._is_method_retryable("POST")
+ assert not retry._is_method_retryable("CONNECT")
+
+ assert retry.new(method_whitelist={"GET"}).method_whitelist == {"GET", "POST"}
+
+ # urllib3 doesn't do this during normal operation
+ # so we don't want users passing in 'allowed_methods'
+ # when their subclass doesn't support the option yet.
+ with pytest.raises(AssertionError) as e:
+ retry.new(allowed_methods={"GET"})
+ assert str(e.value) == "This subclass likely doesn't use 'allowed_methods'"
| Replace method_whitelist and DEFAULT_REDIRECT_HEADERS_BLACKLIST parameter names for the Retry method
Ref: https://urllib3.readthedocs.io/en/latest/reference/urllib3.util.html
Request to replace the following parameters in the `Retry` module with neutral terms that do not have any Slavery connotation.
`method_whitelist`
`DEFAULT_REDIRECT_HEADERS_BLACKLIST`
Many thanks.
| Yep, this is on my to-do list. Gotta jump through a few hoops to make it backwards compatible but also not appear in the documentation.
Black is associated with evil in many cultures simply because black is the color of darkness, and in the dark you can not see anything, you will not notice how the enemy or predator creeps up, so the darkness was associated with fear and danger. That was the reason why the expressions like black humor, black market, black metal, black sheep and black list have appeared. These expressions have been around for a long time and have nothing to do with racism or anything of that kind.
However, in Japanese culture black is the color of age and experience, so the belt of the highest rank is black. This is just the traditions of different nations, and removing the expressions like 'black list', 'white list', etc. from the code is absolutely useless, it will not solve the problem.
I'd already set my mind to doing this before this issue was raised. Unless another collaborator objects we'll be making this change.
I'm all for doing it | 2020-09-25T18:43:57Z | [] | [] |
urllib3/urllib3 | 2,002 | urllib3__urllib3-2002 | [
"1990"
] | 6d38f171c4921043e1ff633e2a3e9f7ea382e1d5 | diff --git a/src/urllib3/connection.py b/src/urllib3/connection.py
--- a/src/urllib3/connection.py
+++ b/src/urllib3/connection.py
@@ -398,6 +398,23 @@ def connect(self):
ssl_context=context,
)
+ # If we're using all defaults and the connection
+ # is TLSv1 or TLSv1.1 we throw a DeprecationWarning
+ # for the host.
+ if (
+ default_ssl_context
+ and self.ssl_version is None
+ and hasattr(self.sock, "version")
+ and self.sock.version() in {"TLSv1", "TLSv1.1"}
+ ):
+ warnings.warn(
+ "Negotiating TLSv1/TLSv1.1 by default is deprecated "
+ "and will be disabled in urllib3 v2.0.0. Connecting to "
+ "'%s' with '%s' can be enabled by explicitly opting-in "
+ "with 'ssl_version'" % (self.host, self.sock.version()),
+ DeprecationWarning,
+ )
+
if self.assert_fingerprint:
assert_fingerprint(
self.sock.getpeercert(binary_form=True), self.assert_fingerprint
| diff --git a/test/with_dummyserver/test_https.py b/test/with_dummyserver/test_https.py
--- a/test/with_dummyserver/test_https.py
+++ b/test/with_dummyserver/test_https.py
@@ -87,6 +87,9 @@
class TestHTTPS(HTTPSDummyServerTestCase):
tls_protocol_name = None
+ def tls_protocol_deprecated(self):
+ return self.tls_protocol_name in {"TLSv1", "TLSv1.1"}
+
@classmethod
def setup_class(cls):
super(TestHTTPS, cls).setup_class()
@@ -213,26 +216,25 @@ def test_verified(self):
conn = https_pool._new_conn()
assert conn.__class__ == VerifiedHTTPSConnection
- with mock.patch("warnings.warn") as warn:
+ with warnings.catch_warnings(record=True) as w:
r = https_pool.request("GET", "/")
assert r.status == 200
- # Modern versions of Python, or systems using PyOpenSSL, don't
- # emit warnings.
- if (
- sys.version_info >= (2, 7, 9)
- or util.IS_PYOPENSSL
- or util.IS_SECURETRANSPORT
- ):
- assert not warn.called, warn.call_args_list
- else:
- assert warn.called
- if util.HAS_SNI:
- call = warn.call_args_list[0]
- else:
- call = warn.call_args_list[1]
- error = call[0][1]
- assert error == InsecurePlatformWarning
+ # If we're using a deprecated TLS version we can remove 'DeprecationWarning'
+ if self.tls_protocol_deprecated():
+ w = [x for x in w if x.category != DeprecationWarning]
+
+ # Modern versions of Python, or systems using PyOpenSSL, don't
+ # emit warnings.
+ if (
+ sys.version_info >= (2, 7, 9)
+ or util.IS_PYOPENSSL
+ or util.IS_SECURETRANSPORT
+ ):
+ assert w == []
+ else:
+ assert len(w) > 1
+ assert any(x.category == InsecureRequestWarning for x in w)
def test_verified_with_context(self):
ctx = util.ssl_.create_urllib3_context(cert_reqs=ssl.CERT_REQUIRED)
@@ -306,10 +308,15 @@ def test_ca_dir_verified(self, tmpdir):
conn = https_pool._new_conn()
assert conn.__class__ == VerifiedHTTPSConnection
- with mock.patch("warnings.warn") as warn:
+ with warnings.catch_warnings(record=True) as w:
r = https_pool.request("GET", "/")
assert r.status == 200
- assert not warn.called, warn.call_args_list
+
+ # If we're using a deprecated TLS version we can remove 'DeprecationWarning'
+ if self.tls_protocol_deprecated():
+ w = [x for x in w if x.category != DeprecationWarning]
+
+ assert w == []
def test_invalid_common_name(self):
with HTTPSConnectionPool(
@@ -391,6 +398,11 @@ def test_ssl_unverified_with_ca_certs(self):
# the unverified warning. Older systems may also emit other
# warnings, which we want to ignore here.
calls = warn.call_args_list
+
+ # If we're using a deprecated TLS version we can remove 'DeprecationWarning'
+ if self.tls_protocol_deprecated():
+ calls = [call for call in calls if call[0][1] != DeprecationWarning]
+
if (
sys.version_info >= (2, 7, 9)
or util.IS_PYOPENSSL
@@ -665,7 +677,13 @@ def _request_without_resource_warnings(self, method, url):
) as https_pool:
https_pool.request(method, url)
- return [x for x in w if not isinstance(x.message, ResourceWarning)]
+ w = [x for x in w if not isinstance(x.message, ResourceWarning)]
+
+ # If we're using a deprecated TLS version we can remove 'DeprecationWarning'
+ if self.tls_protocol_deprecated():
+ w = [x for x in w if x.category != DeprecationWarning]
+
+ return w
def test_set_ssl_version_to_tls_version(self):
if self.tls_protocol_name is None:
@@ -699,6 +717,68 @@ def test_tls_protocol_name_of_socket(self):
finally:
conn.close()
+ def test_default_tls_version_deprecations(self):
+ if self.tls_protocol_name is None:
+ pytest.skip("Skipping base test class")
+
+ with HTTPSConnectionPool(
+ self.host, self.port, ca_certs=DEFAULT_CA
+ ) as https_pool:
+ conn = https_pool._get_conn()
+ try:
+ with warnings.catch_warnings(record=True) as w:
+ conn.connect()
+ if not hasattr(conn.sock, "version"):
+ pytest.skip("SSLSocket.version() not available")
+ finally:
+ conn.close()
+
+ if self.tls_protocol_deprecated():
+ assert len(w) == 1
+ assert str(w[0].message) == (
+ "Negotiating TLSv1/TLSv1.1 by default is deprecated "
+ "and will be disabled in urllib3 v2.0.0. Connecting to "
+ "'%s' with '%s' can be enabled by explicitly opting-in "
+ "with 'ssl_version'" % (self.host, self.tls_protocol_name)
+ )
+ else:
+ assert w == []
+
+ def test_no_tls_version_deprecation_with_ssl_version(self):
+ if self.tls_protocol_name is None:
+ pytest.skip("Skipping base test class")
+
+ with HTTPSConnectionPool(
+ self.host, self.port, ca_certs=DEFAULT_CA, ssl_version=util.PROTOCOL_TLS
+ ) as https_pool:
+ conn = https_pool._get_conn()
+ try:
+ with warnings.catch_warnings(record=True) as w:
+ conn.connect()
+ finally:
+ conn.close()
+
+ assert w == []
+
+ def test_no_tls_version_deprecation_with_ssl_context(self):
+ if self.tls_protocol_name is None:
+ pytest.skip("Skipping base test class")
+
+ with HTTPSConnectionPool(
+ self.host,
+ self.port,
+ ca_certs=DEFAULT_CA,
+ ssl_context=util.ssl_.create_urllib3_context(),
+ ) as https_pool:
+ conn = https_pool._get_conn()
+ try:
+ with warnings.catch_warnings(record=True) as w:
+ conn.connect()
+ finally:
+ conn.close()
+
+ assert w == []
+
@pytest.mark.skipif(sys.version_info < (3, 8), reason="requires python 3.8+")
def test_sslkeylogfile(self, tmpdir, monkeypatch):
if not hasattr(util.SSLContext, "keylog_filename"):
| [v1.26] Deprecate TLS <1.2 on v1.26.x
If we detect "TLSv1" or "TLSv1.1" from any `SSLSocket.version()` we should be emitting a `DeprecationWarning` including the host
| 2020-09-27T22:50:12Z | [] | [] |
|
urllib3/urllib3 | 2,016 | urllib3__urllib3-2016 | [
"2015"
] | b1f05ae1bf5003961c076e73c02c46b9575bff36 | diff --git a/src/urllib3/util/ssl_.py b/src/urllib3/util/ssl_.py
--- a/src/urllib3/util/ssl_.py
+++ b/src/urllib3/util/ssl_.py
@@ -314,9 +314,11 @@ def create_urllib3_context(
context.check_hostname = False
# Enable logging of TLS session keys via defacto standard environment variable
- # 'SSLKEYLOGFILE', if the feature is available (Python 3.8+).
+ # 'SSLKEYLOGFILE', if the feature is available (Python 3.8+). Skip empty values.
if hasattr(context, "keylog_filename"):
- context.keylog_filename = os.environ.get("SSLKEYLOGFILE")
+ sslkeylogfile = os.environ.get("SSLKEYLOGFILE")
+ if sslkeylogfile:
+ context.keylog_filename = sslkeylogfile
return context
| diff --git a/test/with_dummyserver/test_https.py b/test/with_dummyserver/test_https.py
--- a/test/with_dummyserver/test_https.py
+++ b/test/with_dummyserver/test_https.py
@@ -719,6 +719,18 @@ def test_sslkeylogfile(self, tmpdir, monkeypatch):
keylog_file
)
+ @pytest.mark.parametrize("sslkeylogfile", [None, ""])
+ def test_sslkeylogfile_empty(self, monkeypatch, sslkeylogfile):
+ # Assert that an HTTPS connection doesn't error out when given
+ # no SSLKEYLOGFILE or an empty value (ie 'SSLKEYLOGFILE=')
+ if sslkeylogfile is not None:
+ monkeypatch.setenv("SSLKEYLOGFILE", sslkeylogfile)
+ else:
+ monkeypatch.delenv("SSLKEYLOGFILE", raising=False)
+ with HTTPSConnectionPool(self.host, self.port, ca_certs=DEFAULT_CA) as pool:
+ r = pool.request("GET", "/")
+ assert r.status == 200, r.data
+
def test_alpn_default(self):
"""Default ALPN protocols are sent by default."""
if not has_alpn() or not has_alpn(ssl.SSLContext):
| keylog_filename triggering error when SSLKEYLOGFILE is not set
### Subject
Requests are failing with the error:
```
urllib3.exceptions.ProtocolError: ('Connection aborted.', FileNotFoundError(2, 'No such file or directory'))
```
The line that raises this exception is the following (at the bottom of the stacktrace):
```
File "/Users/basta/.local/share/virtualenvs/venv-OmZ6DiXY/lib/python3.8/site-packages/urllib3/util/ssl_.py", line 302, in create_urllib3_context
context.keylog_filename = os.environ.get("SSLKEYLOGFILE")
```
Editing this file to print `os.environ.get("SSLKEYLOGFILE")` shows that this is empty.
This issue started happening after upgrading from Python 3.7 to 3.8.
### Environment
This issue is happening for me on macOS Catalina 10.15.2.
```python
import platform
import urllib3
print("OS", platform.platform())
print("Python", platform.python_version())
print("urllib3", urllib3.__version__)
```
returns
```
>>> import platform
>>> import urllib3
>>>
>>> print("OS", platform.platform())
OS macOS-10.15.2-x86_64-i386-64bit
>>> print("Python", platform.python_version())
Python 3.8.5
>>> print("urllib3", urllib3.__version__)
urllib3 1.25.10
```
### Steps to Reproduce
This issue is always reproducible when making any requests using the `stripe-python` library. I don't have access to other machines to try to reproduce it elsewhere. However, it does not occur in my production environment (Linux) on the same Python version (3.8).
### Expected Behavior
I would expect this line to no-op rather than raising. Replacing `if hasattr(context, "keylog_filename"):` with `if hasattr(context, "keylog_filename") and os.environ.get("SSLKEYLOGFILE"):` in `ssl_.py` eliminates the issue.
### Actual Behavior
The noted exception is raised.
| Thanks for opening this, this looks like a bug. We'll get a fix in and make a 1.25.11 release.
Would you be willing to provide a patch and test case? If not no problem.
@sethmlarson I'm happy to put up my little patch as a PR. I'm not so sure how to test it, though, since the problem only seems to occur on my one machine running macOS (and I can't possibly be the only person using Stripe's SDK on python 3.8!).
If the fix alone is enough, I can have that up soon. You're welcome to use it as a guide or take it over or whatever. | 2020-10-04T21:20:13Z | [] | [] |
urllib3/urllib3 | 2,018 | urllib3__urllib3-2018 | [
"2021"
] | 3308d655a563e0b72e3856c4a4cb06ce2f4f0e8d | diff --git a/src/urllib3/_collections.py b/src/urllib3/_collections.py
--- a/src/urllib3/_collections.py
+++ b/src/urllib3/_collections.py
@@ -155,7 +155,7 @@ def __setitem__(self, key, val):
def __getitem__(self, key):
val = self._container[key.lower()]
- return ", ".join([six.ensure_str(v, "ascii") for v in val[1:]])
+ return ", ".join(val[1:])
def __delitem__(self, key):
del self._container[key.lower()]
diff --git a/src/urllib3/connection.py b/src/urllib3/connection.py
--- a/src/urllib3/connection.py
+++ b/src/urllib3/connection.py
@@ -43,7 +43,6 @@ class BrokenPipeError(Exception):
pass
-from ._collections import HTTPHeaderDict
from ._version import __version__
from .exceptions import (
ConnectTimeoutError,
@@ -52,7 +51,7 @@ class BrokenPipeError(Exception):
SystemTimeWarning,
)
from .packages.ssl_match_hostname import CertificateError, match_hostname
-from .util import SUPPRESS_USER_AGENT, connection
+from .util import SKIP_HEADER, SKIPPABLE_HEADERS, connection
from .util.ssl_ import (
assert_fingerprint,
create_urllib3_context,
@@ -213,12 +212,24 @@ def putrequest(self, method, url, *args, **kwargs):
return _HTTPConnection.putrequest(self, method, url, *args, **kwargs)
+ def putheader(self, header, *values):
+ """"""
+ if SKIP_HEADER not in values:
+ _HTTPConnection.putheader(self, header, *values)
+ elif six.ensure_str(header.lower()) not in SKIPPABLE_HEADERS:
+ raise ValueError(
+ "urllib3.util.SKIP_HEADER only supports '%s'"
+ % ("', '".join(map(str.title, sorted(SKIPPABLE_HEADERS))),)
+ )
+
def request(self, method, url, body=None, headers=None):
- headers = HTTPHeaderDict(headers if headers is not None else {})
- if "user-agent" not in headers:
+ if headers is None:
+ headers = {}
+ else:
+ # Avoid modifying the headers passed into .request()
+ headers = headers.copy()
+ if "user-agent" not in (k.lower() for k in headers):
headers["User-Agent"] = _get_default_user_agent()
- elif headers["user-agent"] == SUPPRESS_USER_AGENT:
- del headers["user-agent"]
super(HTTPConnection, self).request(method, url, body=body, headers=headers)
def request_chunked(self, method, url, body=None, headers=None):
@@ -226,16 +237,15 @@ def request_chunked(self, method, url, body=None, headers=None):
Alternative to the common request method, which sends the
body with chunked encoding and not as one block
"""
- headers = HTTPHeaderDict(headers if headers is not None else {})
- skip_accept_encoding = "accept-encoding" in headers
- skip_host = "host" in headers
+ headers = headers or {}
+ header_keys = set([six.ensure_str(k.lower()) for k in headers])
+ skip_accept_encoding = "accept-encoding" in header_keys
+ skip_host = "host" in header_keys
self.putrequest(
method, url, skip_accept_encoding=skip_accept_encoding, skip_host=skip_host
)
- if "user-agent" not in headers:
- headers["User-Agent"] = _get_default_user_agent()
- elif headers["user-agent"] == SUPPRESS_USER_AGENT:
- del headers["user-agent"]
+ if "user-agent" not in header_keys:
+ self.putheader("User-Agent", _get_default_user_agent())
for header, value in headers.items():
self.putheader(header, value)
if "transfer-encoding" not in headers:
diff --git a/src/urllib3/util/__init__.py b/src/urllib3/util/__init__.py
--- a/src/urllib3/util/__init__.py
+++ b/src/urllib3/util/__init__.py
@@ -2,7 +2,7 @@
# For backwards compatibility, provide imports that used to be here.
from .connection import is_connection_dropped
-from .request import SUPPRESS_USER_AGENT, make_headers
+from .request import SKIP_HEADER, SKIPPABLE_HEADERS, make_headers
from .response import is_fp_closed
from .retry import Retry
from .ssl_ import (
@@ -44,5 +44,6 @@
"ssl_wrap_socket",
"wait_for_read",
"wait_for_write",
- "SUPPRESS_USER_AGENT",
+ "SKIP_HEADER",
+ "SKIPPABLE_HEADERS",
)
diff --git a/src/urllib3/util/request.py b/src/urllib3/util/request.py
--- a/src/urllib3/util/request.py
+++ b/src/urllib3/util/request.py
@@ -5,10 +5,13 @@
from ..exceptions import UnrewindableBodyError
from ..packages.six import b, integer_types
-# Use an invalid User-Agent to represent suppressing of default user agent.
-# See https://tools.ietf.org/html/rfc7231#section-5.5.3 and
-# https://tools.ietf.org/html/rfc7230#section-3.2.6
-SUPPRESS_USER_AGENT = "@@@INVALID_USER_AGENT@@@"
+# Pass as a value within ``headers`` to skip
+# emitting some HTTP headers that are added automatically.
+# The only headers that are supported are ``Accept-Encoding``,
+# ``Host``, and ``User-Agent``.
+SKIP_HEADER = "@@@SKIP_HEADER@@@"
+SKIPPABLE_HEADERS = frozenset(["accept-encoding", "host", "user-agent"])
+
ACCEPT_ENCODING = "gzip,deflate"
try:
import brotli as _unused_module_brotli # noqa: F401
| diff --git a/test/with_dummyserver/test_chunked_transfer.py b/test/with_dummyserver/test_chunked_transfer.py
--- a/test/with_dummyserver/test_chunked_transfer.py
+++ b/test/with_dummyserver/test_chunked_transfer.py
@@ -8,7 +8,7 @@
consume_socket,
)
from urllib3 import HTTPConnectionPool
-from urllib3.util import SUPPRESS_USER_AGENT
+from urllib3.util import SKIP_HEADER
from urllib3.util.retry import Retry
# Retry failed tests
@@ -123,7 +123,7 @@ def test_remove_user_agent_header(self):
"GET",
"/",
chunks,
- headers={"User-Agent": SUPPRESS_USER_AGENT},
+ headers={"User-Agent": SKIP_HEADER},
chunked=True,
)
diff --git a/test/with_dummyserver/test_connectionpool.py b/test/with_dummyserver/test_connectionpool.py
--- a/test/with_dummyserver/test_connectionpool.py
+++ b/test/with_dummyserver/test_connectionpool.py
@@ -1,3 +1,5 @@
+# -*- coding: utf-8 -*-
+
import io
import json
import logging
@@ -5,15 +7,17 @@
import sys
import time
import warnings
-from test import LONG_TIMEOUT, SHORT_TIMEOUT
+from test import LONG_TIMEOUT, SHORT_TIMEOUT, onlyPy2
from threading import Event
import mock
import pytest
+import six
from dummyserver.server import HAS_IPV6_AND_DNS, NoIPv6Warning
from dummyserver.testcase import HTTPDummyServerTestCase, SocketDummyServerTestCase
from urllib3 import HTTPConnectionPool, encode_multipart_formdata
+from urllib3._collections import HTTPHeaderDict
from urllib3.connection import _get_default_user_agent
from urllib3.exceptions import (
ConnectTimeoutError,
@@ -26,7 +30,7 @@
)
from urllib3.packages.six import b, u
from urllib3.packages.six.moves.urllib.parse import urlencode
-from urllib3.util import SUPPRESS_USER_AGENT
+from urllib3.util import SKIP_HEADER, SKIPPABLE_HEADERS
from urllib3.util.retry import RequestHistory, Retry
from urllib3.util.timeout import Timeout
@@ -830,18 +834,18 @@ def test_no_user_agent_header(self):
custom_ua = "I'm not a web scraper, what are you talking about?"
with HTTPConnectionPool(self.host, self.port) as pool:
# Suppress user agent in the request headers.
- no_ua_headers = {"User-Agent": SUPPRESS_USER_AGENT}
+ no_ua_headers = {"User-Agent": SKIP_HEADER}
r = pool.request("GET", "/headers", headers=no_ua_headers)
request_headers = json.loads(r.data.decode("utf8"))
assert "User-Agent" not in request_headers
- assert no_ua_headers["User-Agent"] == SUPPRESS_USER_AGENT
+ assert no_ua_headers["User-Agent"] == SKIP_HEADER
# Suppress user agent in the pool headers.
pool.headers = no_ua_headers
r = pool.request("GET", "/headers")
request_headers = json.loads(r.data.decode("utf8"))
assert "User-Agent" not in request_headers
- assert no_ua_headers["User-Agent"] == SUPPRESS_USER_AGENT
+ assert no_ua_headers["User-Agent"] == SKIP_HEADER
# Request headers override pool headers.
pool_headers = {"User-Agent": custom_ua}
@@ -849,9 +853,106 @@ def test_no_user_agent_header(self):
r = pool.request("GET", "/headers", headers=no_ua_headers)
request_headers = json.loads(r.data.decode("utf8"))
assert "User-Agent" not in request_headers
- assert no_ua_headers["User-Agent"] == SUPPRESS_USER_AGENT
+ assert no_ua_headers["User-Agent"] == SKIP_HEADER
assert pool_headers.get("User-Agent") == custom_ua
+ @pytest.mark.parametrize(
+ "accept_encoding",
+ [
+ "Accept-Encoding",
+ "accept-encoding",
+ b"Accept-Encoding",
+ b"accept-encoding",
+ None,
+ ],
+ )
+ @pytest.mark.parametrize("host", ["Host", "host", b"Host", b"host", None])
+ @pytest.mark.parametrize(
+ "user_agent", ["User-Agent", "user-agent", b"User-Agent", b"user-agent", None]
+ )
+ @pytest.mark.parametrize("chunked", [True, False])
+ def test_skip_header(self, accept_encoding, host, user_agent, chunked):
+ headers = {}
+
+ if accept_encoding is not None:
+ headers[accept_encoding] = SKIP_HEADER
+ if host is not None:
+ headers[host] = SKIP_HEADER
+ if user_agent is not None:
+ headers[user_agent] = SKIP_HEADER
+
+ with HTTPConnectionPool(self.host, self.port) as pool:
+ r = pool.request("GET", "/headers", headers=headers, chunked=chunked)
+ request_headers = json.loads(r.data.decode("utf8"))
+
+ if accept_encoding is None:
+ assert "Accept-Encoding" in request_headers
+ else:
+ assert accept_encoding not in request_headers
+ if host is None:
+ assert "Host" in request_headers
+ else:
+ assert host not in request_headers
+ if user_agent is None:
+ assert "User-Agent" in request_headers
+ else:
+ assert user_agent not in request_headers
+
+ @pytest.mark.parametrize("header", ["Content-Length", "content-length"])
+ @pytest.mark.parametrize("chunked", [True, False])
+ def test_skip_header_non_supported(self, header, chunked):
+ with HTTPConnectionPool(self.host, self.port) as pool:
+ with pytest.raises(ValueError) as e:
+ pool.request(
+ "GET", "/headers", headers={header: SKIP_HEADER}, chunked=chunked
+ )
+ assert (
+ str(e.value)
+ == "urllib3.util.SKIP_HEADER only supports 'Accept-Encoding', 'Host', 'User-Agent'"
+ )
+
+ # Ensure that the error message stays up to date with 'SKIP_HEADER_SUPPORTED_HEADERS'
+ assert all(
+ ("'" + header.title() + "'") in str(e.value)
+ for header in SKIPPABLE_HEADERS
+ )
+
+ @pytest.mark.parametrize("chunked", [True, False])
+ @pytest.mark.parametrize("pool_request", [True, False])
+ @pytest.mark.parametrize("header_type", [dict, HTTPHeaderDict])
+ def test_headers_not_modified_by_request(self, chunked, pool_request, header_type):
+ # Test that the .request*() methods of ConnectionPool and HTTPConnection
+ # don't modify the given 'headers' structure, instead they should
+ # make their own internal copies at request time.
+ headers = header_type()
+ headers["key"] = "val"
+
+ with HTTPConnectionPool(self.host, self.port) as pool:
+ pool.headers = headers
+ if pool_request:
+ pool.request("GET", "/headers", chunked=chunked)
+ else:
+ conn = pool._get_conn()
+ if chunked:
+ conn.request_chunked("GET", "/headers")
+ else:
+ conn.request("GET", "/headers")
+
+ assert pool.headers == {"key": "val"}
+ assert isinstance(pool.headers, header_type)
+
+ with HTTPConnectionPool(self.host, self.port) as pool:
+ if pool_request:
+ pool.request("GET", "/headers", headers=headers, chunked=chunked)
+ else:
+ conn = pool._get_conn()
+ if chunked:
+ conn.request_chunked("GET", "/headers", headers=headers)
+ else:
+ conn.request("GET", "/headers", headers=headers)
+
+ assert headers == {"key": "val"}
+
def test_bytes_header(self):
with HTTPConnectionPool(self.host, self.port) as pool:
headers = {"User-Agent": b"test header"}
@@ -860,6 +961,39 @@ def test_bytes_header(self):
assert "User-Agent" in request_headers
assert request_headers["User-Agent"] == "test header"
+ @pytest.mark.parametrize(
+ "user_agent", [u"Schönefeld/1.18.0", u"Schönefeld/1.18.0".encode("iso-8859-1")]
+ )
+ def test_user_agent_non_ascii_user_agent(self, user_agent):
+ if six.PY2 and not isinstance(user_agent, str):
+ pytest.skip(
+ "Python 2 raises UnicodeEncodeError when passed a unicode header"
+ )
+
+ with HTTPConnectionPool(self.host, self.port, retries=False) as pool:
+ r = pool.urlopen(
+ "GET",
+ "/headers",
+ headers={"User-Agent": user_agent},
+ )
+ request_headers = json.loads(r.data.decode("utf8"))
+ assert "User-Agent" in request_headers
+ assert request_headers["User-Agent"] == u"Schönefeld/1.18.0"
+
+ @onlyPy2
+ def test_user_agent_non_ascii_fails_on_python_2(self):
+ with HTTPConnectionPool(self.host, self.port, retries=False) as pool:
+ with pytest.raises(UnicodeEncodeError) as e:
+ pool.urlopen(
+ "GET",
+ "/headers",
+ headers={"User-Agent": u"Schönefeld/1.18.0"},
+ )
+ assert str(e.value) == (
+ "'ascii' codec can't encode character u'\\xf6' in "
+ "position 3: ordinal not in range(128)"
+ )
+
class TestRetry(HTTPDummyServerTestCase):
def test_max_retry(self):
| Can't modify type of headers object within HTTPConnection.request()
Captured from these two comments:
- https://github.com/urllib3/urllib3/pull/2018#discussion_r499988292
- https://github.com/urllib3/urllib3/pull/2018#discussion_r500699677
Follow this logic:
- Get the list of headers
- If they're all bytes return `b", ".join(val[1:])`
- Otherwise return `", ".join(val[1:])`
This way if a user uses mixed types they still get the same explosion? I'd like a little poking around with what happens when httplib is passed both bytes and str headers on Python 2.x since it had strange behavior when doing so with HTTP method and URL.
cc @jalopezsilva
| 2020-10-05T17:06:52Z | [] | [] |
|
urllib3/urllib3 | 2,082 | urllib3__urllib3-2082 | [
"2090"
] | 3f21165969b838fda29898cbd7218ac9578e319b | diff --git a/src/urllib3/contrib/_securetransport/bindings.py b/src/urllib3/contrib/_securetransport/bindings.py
--- a/src/urllib3/contrib/_securetransport/bindings.py
+++ b/src/urllib3/contrib/_securetransport/bindings.py
@@ -482,36 +482,3 @@ class SecurityConst:
errSecNoTrustSettings = -25263
errSecItemNotFound = -25300
errSecInvalidTrustSettings = -25262
-
- # Cipher suites. We only pick the ones our default cipher string allows.
- # Source: https://developer.apple.com/documentation/security/1550981-ssl_cipher_suite_values
- TLS_ECDHE_ECDSA_WITH_AES_256_GCM_SHA384 = 0xC02C
- TLS_ECDHE_RSA_WITH_AES_256_GCM_SHA384 = 0xC030
- TLS_ECDHE_ECDSA_WITH_AES_128_GCM_SHA256 = 0xC02B
- TLS_ECDHE_RSA_WITH_AES_128_GCM_SHA256 = 0xC02F
- TLS_ECDHE_ECDSA_WITH_CHACHA20_POLY1305_SHA256 = 0xCCA9
- TLS_ECDHE_RSA_WITH_CHACHA20_POLY1305_SHA256 = 0xCCA8
- TLS_DHE_RSA_WITH_AES_256_GCM_SHA384 = 0x009F
- TLS_DHE_RSA_WITH_AES_128_GCM_SHA256 = 0x009E
- TLS_ECDHE_ECDSA_WITH_AES_256_CBC_SHA384 = 0xC024
- TLS_ECDHE_RSA_WITH_AES_256_CBC_SHA384 = 0xC028
- TLS_ECDHE_ECDSA_WITH_AES_256_CBC_SHA = 0xC00A
- TLS_ECDHE_RSA_WITH_AES_256_CBC_SHA = 0xC014
- TLS_DHE_RSA_WITH_AES_256_CBC_SHA256 = 0x006B
- TLS_DHE_RSA_WITH_AES_256_CBC_SHA = 0x0039
- TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256 = 0xC023
- TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256 = 0xC027
- TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA = 0xC009
- TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA = 0xC013
- TLS_DHE_RSA_WITH_AES_128_CBC_SHA256 = 0x0067
- TLS_DHE_RSA_WITH_AES_128_CBC_SHA = 0x0033
- TLS_RSA_WITH_AES_256_GCM_SHA384 = 0x009D
- TLS_RSA_WITH_AES_128_GCM_SHA256 = 0x009C
- TLS_RSA_WITH_AES_256_CBC_SHA256 = 0x003D
- TLS_RSA_WITH_AES_128_CBC_SHA256 = 0x003C
- TLS_RSA_WITH_AES_256_CBC_SHA = 0x0035
- TLS_RSA_WITH_AES_128_CBC_SHA = 0x002F
- TLS_AES_128_GCM_SHA256 = 0x1301
- TLS_AES_256_GCM_SHA384 = 0x1302
- TLS_AES_128_CCM_8_SHA256 = 0x1305
- TLS_AES_128_CCM_SHA256 = 0x1304
diff --git a/src/urllib3/contrib/pyopenssl.py b/src/urllib3/contrib/pyopenssl.py
--- a/src/urllib3/contrib/pyopenssl.py
+++ b/src/urllib3/contrib/pyopenssl.py
@@ -73,6 +73,11 @@ class UnsupportedExtension(Exception):
# SNI always works.
HAS_SNI = True
+# Use system TLS ciphers on OpenSSL 1.1.1+
+USE_DEFAULT_SSLCONTEXT_CIPHERS = util.ssl_._is_ge_openssl_v1_1_1(
+ openssl_backend.openssl_version_text(), openssl_backend.openssl_version_number()
+)
+
# Map from urllib3 to PyOpenSSL compatible parameter-values.
_openssl_versions = {
util.PROTOCOL_TLS: OpenSSL.SSL.SSLv23_METHOD,
@@ -102,6 +107,7 @@ class UnsupportedExtension(Exception):
orig_util_HAS_SNI = util.HAS_SNI
orig_util_SSLContext = util.ssl_.SSLContext
+orig_util_USE_SYSTEM_SSL_CIPHERS = util.ssl_.USE_DEFAULT_SSLCONTEXT_CIPHERS
log = logging.getLogger(__name__)
@@ -118,6 +124,7 @@ def inject_into_urllib3():
util.ssl_.HAS_SNI = HAS_SNI
util.IS_PYOPENSSL = True
util.ssl_.IS_PYOPENSSL = True
+ util.ssl_.USE_DEFAULT_SSLCONTEXT_CIPHERS = USE_DEFAULT_SSLCONTEXT_CIPHERS
def extract_from_urllib3():
@@ -129,6 +136,7 @@ def extract_from_urllib3():
util.ssl_.HAS_SNI = orig_util_HAS_SNI
util.IS_PYOPENSSL = False
util.ssl_.IS_PYOPENSSL = False
+ util.ssl_.USE_DEFAULT_SSLCONTEXT_CIPHERS = orig_util_USE_SYSTEM_SSL_CIPHERS
def _validate_dependencies_met():
diff --git a/src/urllib3/contrib/securetransport.py b/src/urllib3/contrib/securetransport.py
--- a/src/urllib3/contrib/securetransport.py
+++ b/src/urllib3/contrib/securetransport.py
@@ -82,6 +82,7 @@
orig_util_HAS_SNI = util.HAS_SNI
orig_util_SSLContext = util.ssl_.SSLContext
+orig_util_USE_SYSTEM_SSL_CIPHERS = util.ssl_.USE_DEFAULT_SSLCONTEXT_CIPHERS
# This dictionary is used by the read callback to obtain a handle to the
# calling wrapped socket. This is a pretty silly approach, but for now it'll
@@ -106,42 +107,6 @@
# for no better reason than we need *a* limit, and this one is right there.
SSL_WRITE_BLOCKSIZE = 16384
-# This is our equivalent of util.ssl_.DEFAULT_CIPHERS, but expanded out to
-# individual cipher suites. We need to do this because this is how
-# SecureTransport wants them.
-CIPHER_SUITES = [
- SecurityConst.TLS_ECDHE_ECDSA_WITH_AES_256_GCM_SHA384,
- SecurityConst.TLS_ECDHE_ECDSA_WITH_AES_128_GCM_SHA256,
- SecurityConst.TLS_ECDHE_RSA_WITH_AES_256_GCM_SHA384,
- SecurityConst.TLS_ECDHE_RSA_WITH_AES_128_GCM_SHA256,
- SecurityConst.TLS_ECDHE_ECDSA_WITH_CHACHA20_POLY1305_SHA256,
- SecurityConst.TLS_ECDHE_RSA_WITH_CHACHA20_POLY1305_SHA256,
- SecurityConst.TLS_DHE_RSA_WITH_AES_256_GCM_SHA384,
- SecurityConst.TLS_DHE_RSA_WITH_AES_128_GCM_SHA256,
- SecurityConst.TLS_ECDHE_ECDSA_WITH_AES_256_CBC_SHA384,
- SecurityConst.TLS_ECDHE_ECDSA_WITH_AES_256_CBC_SHA,
- SecurityConst.TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256,
- SecurityConst.TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA,
- SecurityConst.TLS_ECDHE_RSA_WITH_AES_256_CBC_SHA384,
- SecurityConst.TLS_ECDHE_RSA_WITH_AES_256_CBC_SHA,
- SecurityConst.TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256,
- SecurityConst.TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA,
- SecurityConst.TLS_DHE_RSA_WITH_AES_256_CBC_SHA256,
- SecurityConst.TLS_DHE_RSA_WITH_AES_256_CBC_SHA,
- SecurityConst.TLS_DHE_RSA_WITH_AES_128_CBC_SHA256,
- SecurityConst.TLS_DHE_RSA_WITH_AES_128_CBC_SHA,
- SecurityConst.TLS_AES_256_GCM_SHA384,
- SecurityConst.TLS_AES_128_GCM_SHA256,
- SecurityConst.TLS_RSA_WITH_AES_256_GCM_SHA384,
- SecurityConst.TLS_RSA_WITH_AES_128_GCM_SHA256,
- SecurityConst.TLS_AES_128_CCM_8_SHA256,
- SecurityConst.TLS_AES_128_CCM_SHA256,
- SecurityConst.TLS_RSA_WITH_AES_256_CBC_SHA256,
- SecurityConst.TLS_RSA_WITH_AES_128_CBC_SHA256,
- SecurityConst.TLS_RSA_WITH_AES_256_CBC_SHA,
- SecurityConst.TLS_RSA_WITH_AES_128_CBC_SHA,
-]
-
# Basically this is simple: for PROTOCOL_SSLv23 we turn it into a low of
# TLSv1 and a high of TLSv1.2. For everything else, we pin to that version.
# TLSv1 to 1.2 are supported on macOS 10.8+
@@ -186,6 +151,7 @@ def inject_into_urllib3():
util.ssl_.HAS_SNI = HAS_SNI
util.IS_SECURETRANSPORT = True
util.ssl_.IS_SECURETRANSPORT = True
+ util.ssl_.USE_DEFAULT_SSLCONTEXT_CIPHERS = True
def extract_from_urllib3():
@@ -198,6 +164,7 @@ def extract_from_urllib3():
util.ssl_.HAS_SNI = orig_util_HAS_SNI
util.IS_SECURETRANSPORT = False
util.ssl_.IS_SECURETRANSPORT = False
+ util.ssl_.USE_DEFAULT_SSLCONTEXT_CIPHERS = orig_util_USE_SYSTEM_SSL_CIPHERS
def _read_callback(connection_id, data_buffer, data_length_pointer):
@@ -357,19 +324,6 @@ def _raise_on_error(self):
self.close()
raise exception
- def _set_ciphers(self):
- """
- Sets up the allowed ciphers. By default this matches the set in
- util.ssl_.DEFAULT_CIPHERS, at least as supported by macOS. This is done
- custom and doesn't allow changing at this time, mostly because parsing
- OpenSSL cipher strings is going to be a freaking nightmare.
- """
- ciphers = (Security.SSLCipherSuite * len(CIPHER_SUITES))(*CIPHER_SUITES)
- result = Security.SSLSetEnabledCiphers(
- self.context, ciphers, len(CIPHER_SUITES)
- )
- _assert_no_error(result)
-
def _set_alpn_protocols(self, protocols):
"""
Sets up the ALPN protocols on the context.
@@ -506,9 +460,6 @@ def handshake(
)
_assert_no_error(result)
- # Setup the ciphers.
- self._set_ciphers()
-
# Setup the ALPN protocols.
self._set_alpn_protocols(alpn_protocols)
@@ -824,9 +775,7 @@ def load_default_certs(self):
return self.set_default_verify_paths()
def set_ciphers(self, ciphers):
- # For now, we just require the default cipher string.
- if ciphers != util.ssl_.DEFAULT_CIPHERS:
- raise ValueError("SecureTransport doesn't support custom cipher strings")
+ raise ValueError("SecureTransport doesn't support custom cipher strings")
def load_verify_locations(self, cafile=None, capath=None, cadata=None):
# OK, we only really support cadata and cafile.
diff --git a/src/urllib3/util/ssl_.py b/src/urllib3/util/ssl_.py
--- a/src/urllib3/util/ssl_.py
+++ b/src/urllib3/util/ssl_.py
@@ -14,11 +14,25 @@
IS_PYOPENSSL = False
IS_SECURETRANSPORT = False
ALPN_PROTOCOLS = ["http/1.1"]
+USE_DEFAULT_SSLCONTEXT_CIPHERS = False
# Maps the length of a digest to a possible hash function producing this digest
HASHFUNC_MAP = {32: md5, 40: sha1, 64: sha256}
+def _is_ge_openssl_v1_1_1(
+ openssl_version_text: str, openssl_version_number: int
+) -> bool:
+ """Returns True for OpenSSL 1.1.1+ (>=0x10101000)
+ LibreSSL reports a version number of 0x20000000 for
+ OpenSSL version number so we need to filter out LibreSSL.
+ """
+ return (
+ not openssl_version_text.startswith("LibreSSL")
+ and openssl_version_number >= 0x10101000
+ )
+
+
try: # Do we have ssl at all?
import ssl
from ssl import (
@@ -26,12 +40,17 @@
HAS_SNI,
OP_NO_COMPRESSION,
OP_NO_TICKET,
+ OPENSSL_VERSION,
+ OPENSSL_VERSION_NUMBER,
PROTOCOL_TLS,
OP_NO_SSLv2,
OP_NO_SSLv3,
SSLContext,
)
+ USE_DEFAULT_SSLCONTEXT_CIPHERS = _is_ge_openssl_v1_1_1(
+ OPENSSL_VERSION, OPENSSL_VERSION_NUMBER
+ )
PROTOCOL_SSLv23 = PROTOCOL_TLS
from .ssltransport import SSLTransport
except ImportError:
@@ -75,6 +94,7 @@
"!eNULL",
"!MD5",
"!DSS",
+ "!AESCCM",
]
)
@@ -176,14 +196,18 @@ def create_urllib3_context(
Specific OpenSSL options. These default to ``ssl.OP_NO_SSLv2``,
``ssl.OP_NO_SSLv3``, ``ssl.OP_NO_COMPRESSION``, and ``ssl.OP_NO_TICKET``.
:param ciphers:
- Which cipher suites to allow the server to select.
+ Which cipher suites to allow the server to select. Defaults to either system configured
+ ciphers if OpenSSL 1.1.1+, otherwise uses a secure default set of ciphers.
:returns:
Constructed SSLContext object with specified options
:rtype: SSLContext
"""
context = SSLContext(ssl_version or PROTOCOL_TLS)
- context.set_ciphers(ciphers or DEFAULT_CIPHERS)
+ # Unless we're given ciphers defer to either system ciphers in
+ # the case of OpenSSL 1.1.1+ or use our own secure default ciphers.
+ if ciphers is not None or not USE_DEFAULT_SSLCONTEXT_CIPHERS:
+ context.set_ciphers(ciphers or DEFAULT_CIPHERS)
# Setting the default here, as we may have no ssl module on import
cert_reqs = ssl.CERT_REQUIRED if cert_reqs is None else cert_reqs
| diff --git a/test/contrib/test_pyopenssl.py b/test/contrib/test_pyopenssl.py
--- a/test/contrib/test_pyopenssl.py
+++ b/test/contrib/test_pyopenssl.py
@@ -30,6 +30,7 @@ def teardown_module():
pass
+from ..test_ssl import TestSSL # noqa: E402, F401
from ..test_util import TestUtilSSL # noqa: E402, F401
from ..with_dummyserver.test_https import ( # noqa: E402, F401
TestHTTPS,
@@ -46,7 +47,9 @@ def teardown_module():
TestClientCerts,
TestSNI,
TestSocketClosing,
- TestSSL,
+)
+from ..with_dummyserver.test_socketlevel import ( # noqa: E402, F401
+ TestSSL as TestSocketSSL,
)
diff --git a/test/test_ssl.py b/test/test_ssl.py
--- a/test/test_ssl.py
+++ b/test/test_ssl.py
@@ -6,163 +6,186 @@
from urllib3.util import ssl_
[email protected](
- "addr",
- [
- # IPv6
- "::1",
- "::",
- "FE80::8939:7684:D84b:a5A4%251",
- # IPv4
- "127.0.0.1",
- "8.8.8.8",
- b"127.0.0.1",
- # IPv6 w/ Zone IDs
- "FE80::8939:7684:D84b:a5A4%251",
- b"FE80::8939:7684:D84b:a5A4%251",
- "FE80::8939:7684:D84b:a5A4%19",
- b"FE80::8939:7684:D84b:a5A4%19",
- ],
-)
-def test_is_ipaddress_true(addr):
- assert ssl_.is_ipaddress(addr)
-
-
[email protected](
- "addr",
- [
- "www.python.org",
- b"www.python.org",
- "v2.sg.media-imdb.com",
- b"v2.sg.media-imdb.com",
- ],
-)
-def test_is_ipaddress_false(addr):
- assert not ssl_.is_ipaddress(addr)
-
-
[email protected](
- ["has_sni", "server_hostname", "uses_sni"],
- [
- (True, "127.0.0.1", False),
- (False, "www.python.org", False),
- (False, "0.0.0.0", False),
- (True, "www.google.com", True),
- (True, None, False),
- (False, None, False),
- ],
-)
-def test_context_sni_with_ip_address(monkeypatch, has_sni, server_hostname, uses_sni):
- monkeypatch.setattr(ssl_, "HAS_SNI", has_sni)
-
- sock = mock.Mock()
- context = mock.create_autospec(ssl_.SSLContext)
-
- ssl_.ssl_wrap_socket(sock, server_hostname=server_hostname, ssl_context=context)
-
- if uses_sni:
- context.wrap_socket.assert_called_with(sock, server_hostname=server_hostname)
- else:
- context.wrap_socket.assert_called_with(sock)
-
-
[email protected](
- ["has_sni", "server_hostname", "should_warn"],
- [
- (True, "www.google.com", False),
- (True, "127.0.0.1", False),
- (False, "127.0.0.1", False),
- (False, "www.google.com", True),
- (True, None, False),
- (False, None, False),
- ],
-)
-def test_sni_missing_warning_with_ip_addresses(
- monkeypatch, has_sni, server_hostname, should_warn
-):
- monkeypatch.setattr(ssl_, "HAS_SNI", has_sni)
-
- sock = mock.Mock()
- context = mock.create_autospec(ssl_.SSLContext)
-
- with mock.patch("warnings.warn") as warn:
- ssl_.ssl_wrap_socket(sock, server_hostname=server_hostname, ssl_context=context)
-
- if should_warn:
- assert warn.call_count >= 1
- warnings = [call[0][1] for call in warn.call_args_list]
- assert SNIMissingWarning in warnings
- else:
- assert warn.call_count == 0
-
-
[email protected](
- ["ciphers", "expected_ciphers"],
- [
- (None, ssl_.DEFAULT_CIPHERS),
- ("ECDH+AESGCM:ECDH+CHACHA20", "ECDH+AESGCM:ECDH+CHACHA20"),
- ],
-)
-def test_create_urllib3_context_set_ciphers(monkeypatch, ciphers, expected_ciphers):
-
- context = mock.create_autospec(ssl_.SSLContext)
- context.set_ciphers = mock.Mock()
- context.options = 0
- monkeypatch.setattr(ssl_, "SSLContext", lambda *_, **__: context)
-
- assert ssl_.create_urllib3_context(ciphers=ciphers) is context
-
- assert context.set_ciphers.call_count == 1
- assert context.set_ciphers.call_args == mock.call(expected_ciphers)
-
-
-def test_wrap_socket_given_context_no_load_default_certs():
- context = mock.create_autospec(ssl_.SSLContext)
- context.load_default_certs = mock.Mock()
-
- sock = mock.Mock()
- ssl_.ssl_wrap_socket(sock, ssl_context=context)
-
- context.load_default_certs.assert_not_called()
-
+class TestSSL:
+ @pytest.mark.parametrize(
+ "addr",
+ [
+ # IPv6
+ "::1",
+ "::",
+ "FE80::8939:7684:D84b:a5A4%251",
+ # IPv4
+ "127.0.0.1",
+ "8.8.8.8",
+ b"127.0.0.1",
+ # IPv6 w/ Zone IDs
+ "FE80::8939:7684:D84b:a5A4%251",
+ b"FE80::8939:7684:D84b:a5A4%251",
+ "FE80::8939:7684:D84b:a5A4%19",
+ b"FE80::8939:7684:D84b:a5A4%19",
+ ],
+ )
+ def test_is_ipaddress_true(self, addr):
+ assert ssl_.is_ipaddress(addr)
+
+ @pytest.mark.parametrize(
+ "addr",
+ [
+ "www.python.org",
+ b"www.python.org",
+ "v2.sg.media-imdb.com",
+ b"v2.sg.media-imdb.com",
+ ],
+ )
+ def test_is_ipaddress_false(self, addr):
+ assert not ssl_.is_ipaddress(addr)
+
+ @pytest.mark.parametrize(
+ ["has_sni", "server_hostname", "uses_sni"],
+ [
+ (True, "127.0.0.1", False),
+ (False, "www.python.org", False),
+ (False, "0.0.0.0", False),
+ (True, "www.google.com", True),
+ (True, None, False),
+ (False, None, False),
+ ],
+ )
+ def test_context_sni_with_ip_address(
+ self, monkeypatch, has_sni, server_hostname, uses_sni
+ ):
+ monkeypatch.setattr(ssl_, "HAS_SNI", has_sni)
+
+ sock = mock.Mock()
+ context = mock.create_autospec(ssl_.SSLContext)
-def test_wrap_socket_given_ca_certs_no_load_default_certs(monkeypatch):
- context = mock.create_autospec(ssl_.SSLContext)
- context.load_default_certs = mock.Mock()
- context.options = 0
-
- monkeypatch.setattr(ssl_, "SSLContext", lambda *_, **__: context)
-
- sock = mock.Mock()
- ssl_.ssl_wrap_socket(sock, ca_certs="/tmp/fake-file")
-
- context.load_default_certs.assert_not_called()
- context.load_verify_locations.assert_called_with("/tmp/fake-file", None, None)
-
-
-def test_wrap_socket_default_loads_default_certs(monkeypatch):
- context = mock.create_autospec(ssl_.SSLContext)
- context.load_default_certs = mock.Mock()
- context.options = 0
-
- monkeypatch.setattr(ssl_, "SSLContext", lambda *_, **__: context)
-
- sock = mock.Mock()
- ssl_.ssl_wrap_socket(sock)
-
- context.load_default_certs.assert_called_with()
-
-
[email protected](
- ["pha", "expected_pha"], [(None, None), (False, True), (True, True)]
-)
-def test_create_urllib3_context_pha(monkeypatch, pha, expected_pha):
- context = mock.create_autospec(ssl_.SSLContext)
- context.set_ciphers = mock.Mock()
- context.options = 0
- context.post_handshake_auth = pha
- monkeypatch.setattr(ssl_, "SSLContext", lambda *_, **__: context)
-
- assert ssl_.create_urllib3_context() is context
+ ssl_.ssl_wrap_socket(sock, server_hostname=server_hostname, ssl_context=context)
- assert context.post_handshake_auth == expected_pha
+ if uses_sni:
+ context.wrap_socket.assert_called_with(
+ sock, server_hostname=server_hostname
+ )
+ else:
+ context.wrap_socket.assert_called_with(sock)
+
+ @pytest.mark.parametrize(
+ ["has_sni", "server_hostname", "should_warn"],
+ [
+ (True, "www.google.com", False),
+ (True, "127.0.0.1", False),
+ (False, "127.0.0.1", False),
+ (False, "www.google.com", True),
+ (True, None, False),
+ (False, None, False),
+ ],
+ )
+ def test_sni_missing_warning_with_ip_addresses(
+ self, monkeypatch, has_sni, server_hostname, should_warn
+ ):
+ monkeypatch.setattr(ssl_, "HAS_SNI", has_sni)
+
+ sock = mock.Mock()
+ context = mock.create_autospec(ssl_.SSLContext)
+
+ with mock.patch("warnings.warn") as warn:
+ ssl_.ssl_wrap_socket(
+ sock, server_hostname=server_hostname, ssl_context=context
+ )
+
+ if should_warn:
+ assert warn.call_count >= 1
+ warnings = [call[0][1] for call in warn.call_args_list]
+ assert SNIMissingWarning in warnings
+ else:
+ assert warn.call_count == 0
+
+ @pytest.mark.parametrize(
+ ["ciphers", "expected_ciphers"],
+ [
+ (None, ssl_.DEFAULT_CIPHERS),
+ ("ECDH+AESGCM:ECDH+CHACHA20", "ECDH+AESGCM:ECDH+CHACHA20"),
+ ],
+ )
+ def test_create_urllib3_context_set_ciphers(
+ self, monkeypatch, ciphers, expected_ciphers
+ ):
+
+ context = mock.create_autospec(ssl_.SSLContext)
+ context.set_ciphers = mock.Mock()
+ context.options = 0
+ monkeypatch.setattr(ssl_, "SSLContext", lambda *_, **__: context)
+
+ assert ssl_.create_urllib3_context(ciphers=ciphers) is context
+
+ if ciphers is None and ssl_.USE_DEFAULT_SSLCONTEXT_CIPHERS:
+ assert context.set_ciphers.call_count == 0
+ else:
+ assert context.set_ciphers.call_count == 1
+ assert context.set_ciphers.call_args == mock.call(expected_ciphers)
+
+ def test_wrap_socket_given_context_no_load_default_certs(self):
+ context = mock.create_autospec(ssl_.SSLContext)
+ context.load_default_certs = mock.Mock()
+
+ sock = mock.Mock()
+ ssl_.ssl_wrap_socket(sock, ssl_context=context)
+
+ context.load_default_certs.assert_not_called()
+
+ def test_wrap_socket_given_ca_certs_no_load_default_certs(self, monkeypatch):
+ context = mock.create_autospec(ssl_.SSLContext)
+ context.load_default_certs = mock.Mock()
+ context.options = 0
+
+ monkeypatch.setattr(ssl_, "SSLContext", lambda *_, **__: context)
+
+ sock = mock.Mock()
+ ssl_.ssl_wrap_socket(sock, ca_certs="/tmp/fake-file")
+
+ context.load_default_certs.assert_not_called()
+ context.load_verify_locations.assert_called_with("/tmp/fake-file", None, None)
+
+ def test_wrap_socket_default_loads_default_certs(self, monkeypatch):
+ context = mock.create_autospec(ssl_.SSLContext)
+ context.load_default_certs = mock.Mock()
+ context.options = 0
+
+ monkeypatch.setattr(ssl_, "SSLContext", lambda *_, **__: context)
+
+ sock = mock.Mock()
+ ssl_.ssl_wrap_socket(sock)
+
+ context.load_default_certs.assert_called_with()
+
+ @pytest.mark.parametrize(
+ ["pha", "expected_pha"], [(None, None), (False, True), (True, True)]
+ )
+ def test_create_urllib3_context_pha(self, monkeypatch, pha, expected_pha):
+ context = mock.create_autospec(ssl_.SSLContext)
+ context.set_ciphers = mock.Mock()
+ context.options = 0
+ context.post_handshake_auth = pha
+ monkeypatch.setattr(ssl_, "SSLContext", lambda *_, **__: context)
+
+ assert ssl_.create_urllib3_context() is context
+
+ assert context.post_handshake_auth == expected_pha
+
+ @pytest.mark.parametrize("use_default_sslcontext_ciphers", [True, False])
+ def test_create_urllib3_context_default_ciphers(
+ self, monkeypatch, use_default_sslcontext_ciphers
+ ):
+ context = mock.create_autospec(ssl_.SSLContext)
+ context.set_ciphers = mock.Mock()
+ context.options = 0
+ monkeypatch.setattr(ssl_, "SSLContext", lambda *_, **__: context)
+ monkeypatch.setattr(
+ ssl_, "USE_DEFAULT_SSLCONTEXT_CIPHERS", use_default_sslcontext_ciphers
+ )
+
+ ssl_.create_urllib3_context()
+
+ if use_default_sslcontext_ciphers:
+ context.set_ciphers.assert_not_called()
+ else:
+ context.set_ciphers.assert_called_with(ssl_.DEFAULT_CIPHERS)
| Investigate default TLS ciphers for pyOpenSSL
https://github.com/urllib3/urllib3/pull/2082 makes system ciphers the default with stdlib `SSLContext` implementation. Can we take a similar approach with pyOpenSSL or should we still rely on urllib3's default list?
If we're relying on urllib3's default list we only need to make `pyopenssl.inject_into_urllib3()` set `USE_SYSTEM_SSL_CIPHERS` to `False` unconditionally.
cc @tiran @alex
| 2020-11-22T22:41:28Z | [] | [] |
|
urllib3/urllib3 | 2,083 | urllib3__urllib3-2083 | [
"1543"
] | 5a1562cc5e7048ba9d7efd3be9b89f42bb6986bc | diff --git a/src/urllib3/connectionpool.py b/src/urllib3/connectionpool.py
--- a/src/urllib3/connectionpool.py
+++ b/src/urllib3/connectionpool.py
@@ -35,7 +35,7 @@
from .packages import six
from .packages.ssl_match_hostname import CertificateError
from .request import RequestMethods
-from .response import HTTPResponse
+from .response import BaseHTTPResponse, HTTPResponse
from .util.connection import is_connection_dropped
from .util.proxy import connection_requires_http_tunnel
from .util.queue import LifoQueue
@@ -484,7 +484,7 @@ def urlopen(
chunked=False,
body_pos=None,
**response_kw,
- ):
+ ) -> BaseHTTPResponse:
"""
Get a connection from the pool and perform an HTTP request. This is the
lowest level call for making a request, so you'll need to specify all
diff --git a/src/urllib3/request.py b/src/urllib3/request.py
--- a/src/urllib3/request.py
+++ b/src/urllib3/request.py
@@ -1,6 +1,7 @@
from urllib.parse import urlencode
from .filepost import encode_multipart_formdata
+from .response import BaseHTTPResponse
__all__ = ["RequestMethods"]
@@ -48,13 +49,15 @@ def urlopen(
encode_multipart=True,
multipart_boundary=None,
**kw
- ): # Abstract
+ ) -> BaseHTTPResponse: # Abstract
raise NotImplementedError(
"Classes extending RequestMethods must implement "
"their own ``urlopen`` method."
)
- def request(self, method, url, fields=None, headers=None, **urlopen_kw):
+ def request(
+ self, method, url, fields=None, headers=None, **urlopen_kw
+ ) -> BaseHTTPResponse:
"""
Make a request using :meth:`urlopen` with the appropriate encoding of
``fields`` based on the ``method`` used.
@@ -78,7 +81,9 @@ def request(self, method, url, fields=None, headers=None, **urlopen_kw):
method, url, fields=fields, headers=headers, **urlopen_kw
)
- def request_encode_url(self, method, url, fields=None, headers=None, **urlopen_kw):
+ def request_encode_url(
+ self, method, url, fields=None, headers=None, **urlopen_kw
+ ) -> BaseHTTPResponse:
"""
Make a request using :meth:`urlopen` with the ``fields`` encoded in
the url. This is useful for request methods like GET, HEAD, DELETE, etc.
@@ -103,7 +108,7 @@ def request_encode_body(
encode_multipart=True,
multipart_boundary=None,
**urlopen_kw
- ):
+ ) -> BaseHTTPResponse:
"""
Make a request using :meth:`urlopen` with the ``fields`` encoded in
the body. This is useful for request methods like POST, PUT, PATCH, etc.
diff --git a/src/urllib3/response.py b/src/urllib3/response.py
--- a/src/urllib3/response.py
+++ b/src/urllib3/response.py
@@ -1,5 +1,6 @@
import io
import logging
+import typing
import zlib
from contextlib import contextmanager
from socket import error as SocketError
@@ -29,15 +30,20 @@
log = logging.getLogger(__name__)
-class DeflateDecoder:
+class ContentDecoder:
+ def decompress(self, data: bytes) -> bytes:
+ raise NotImplementedError()
+
+ def flush(self) -> bytes:
+ raise NotImplementedError()
+
+
+class DeflateDecoder(ContentDecoder):
def __init__(self):
self._first_try = True
self._data = b""
self._obj = zlib.decompressobj()
- def __getattr__(self, name):
- return getattr(self._obj, name)
-
def decompress(self, data):
if not data:
return data
@@ -60,6 +66,9 @@ def decompress(self, data):
finally:
self._data = None
+ def flush(self) -> bytes:
+ return self._obj.flush()
+
class GzipDecoderState:
@@ -68,14 +77,11 @@ class GzipDecoderState:
SWALLOW_DATA = 2
-class GzipDecoder:
+class GzipDecoder(ContentDecoder):
def __init__(self):
self._obj = zlib.decompressobj(16 + zlib.MAX_WBITS)
self._state = GzipDecoderState.FIRST_MEMBER
- def __getattr__(self, name):
- return getattr(self._obj, name)
-
def decompress(self, data):
ret = bytearray()
if self._state == GzipDecoderState.SWALLOW_DATA or not data:
@@ -97,10 +103,13 @@ def decompress(self, data):
self._state = GzipDecoderState.OTHER_MEMBERS
self._obj = zlib.decompressobj(16 + zlib.MAX_WBITS)
+ def flush(self) -> bytes:
+ return self._obj.flush()
+
if brotli is not None:
- class BrotliDecoder:
+ class BrotliDecoder(ContentDecoder):
# Supports both 'brotlipy' and 'Brotli' packages
# since they share an import name. The top branches
# are for 'brotlipy' and bottom branches for 'Brotli'
@@ -117,7 +126,7 @@ def flush(self):
return b""
-class MultiDecoder:
+class MultiDecoder(ContentDecoder):
"""
From RFC7231:
If one or more encodings have been applied to a representation, the
@@ -138,7 +147,7 @@ def decompress(self, data):
return data
-def _get_decoder(mode):
+def _get_decoder(mode: str) -> ContentDecoder:
if "," in mode:
return MultiDecoder(mode)
@@ -151,7 +160,180 @@ def _get_decoder(mode):
return DeflateDecoder()
-class HTTPResponse(io.IOBase):
+class BaseHTTPResponse(io.IOBase):
+ CONTENT_DECODERS = ["gzip", "deflate"]
+ if brotli is not None:
+ CONTENT_DECODERS += ["br"]
+ REDIRECT_STATUSES = [301, 302, 303, 307, 308]
+
+ DECODER_ERROR_CLASSES = (IOError, zlib.error)
+ if brotli is not None:
+ DECODER_ERROR_CLASSES += (brotli.error,)
+
+ def __init__(
+ self,
+ *,
+ headers: typing.Optional[typing.Mapping[typing.AnyStr, typing.AnyStr]] = None,
+ status: int,
+ version: int,
+ reason: str,
+ decode_content: bool,
+ ) -> None:
+ if isinstance(headers, HTTPHeaderDict):
+ self.headers = headers
+ else:
+ self.headers = HTTPHeaderDict(headers)
+ self.status = status
+ self.version = version
+ self.reason = reason
+ self.decode_content = decode_content
+
+ self.chunked = False
+ tr_enc = self.headers.get("transfer-encoding", "").lower()
+ # Don't incur the penalty of creating a list and then discarding it
+ encodings = (enc.strip() for enc in tr_enc.split(","))
+ if "chunked" in encodings:
+ self.chunked = True
+
+ self._decoder: typing.Optional[ContentDecoder] = None
+
+ def get_redirect_location(self) -> typing.Optional[typing.Union[bool, str]]:
+ """
+ Should we redirect and where to?
+
+ :returns: Truthy redirect location string if we got a redirect status
+ code and valid location. ``None`` if redirect status and no
+ location. ``False`` if not a redirect status code.
+ """
+ if self.status in self.REDIRECT_STATUSES:
+ return self.headers.get("location")
+ return False
+
+ @property
+ def data(self) -> bytes:
+ raise NotImplementedError()
+
+ @property
+ def url(self) -> str:
+ raise NotImplementedError()
+
+ @property
+ def closed(self) -> bool:
+ raise NotImplementedError()
+
+ @property
+ def connection(self):
+ raise NotImplementedError()
+
+ def stream(
+ self, amt: int = 2 ** 16, decode_content: typing.Optional[bool] = None
+ ) -> typing.Generator[bytes, None, None]:
+ raise NotImplementedError()
+
+ def read(
+ self,
+ amt: typing.Optional[int] = None,
+ decode_content: typing.Optional[bool] = None,
+ cache_content: bool = False,
+ ) -> bytes:
+ raise NotImplementedError()
+
+ def read_chunked(
+ self,
+ amt: typing.Optional[int] = None,
+ decode_content: typing.Optional[bool] = None,
+ ) -> bytes:
+ raise NotImplementedError()
+
+ def release_conn(self) -> None:
+ raise NotImplementedError()
+
+ def drain_conn(self) -> None:
+ raise NotImplementedError()
+
+ def close(self) -> None:
+ raise NotImplementedError()
+
+ def _init_decoder(self) -> None:
+ """
+ Set-up the _decoder attribute if necessary.
+ """
+ # Note: content-encoding value should be case-insensitive, per RFC 7230
+ # Section 3.2
+ content_encoding = self.headers.get("content-encoding", "").lower()
+ if self._decoder is None:
+ if content_encoding in self.CONTENT_DECODERS:
+ self._decoder = _get_decoder(content_encoding)
+ elif "," in content_encoding:
+ encodings = [
+ e.strip()
+ for e in content_encoding.split(",")
+ if e.strip() in self.CONTENT_DECODERS
+ ]
+ if len(encodings):
+ self._decoder = _get_decoder(content_encoding)
+
+ def _decode(self, data: bytes, decode_content: bool, flush_decoder: bool) -> bytes:
+ """
+ Decode the data passed in and potentially flush the decoder.
+ """
+ if not decode_content:
+ return data
+
+ try:
+ if self._decoder:
+ data = self._decoder.decompress(data)
+ except self.DECODER_ERROR_CLASSES as e:
+ content_encoding = self.headers.get("content-encoding", "").lower()
+ raise DecodeError(
+ "Received response with content-encoding: %s, but "
+ "failed to decode it." % content_encoding,
+ e,
+ )
+ if flush_decoder:
+ data += self._flush_decoder()
+
+ return data
+
+ def _flush_decoder(self) -> bytes:
+ """
+ Flushes the decoder. Should only be called if the decoder is actually
+ being used.
+ """
+ if self._decoder:
+ return self._decoder.decompress(b"") + self._decoder.flush()
+ return b""
+
+ # Compatibility methods for `io` module
+ def readable(self) -> bool:
+ return True
+
+ def readinto(self, b: bytearray) -> int:
+ temp = self.read(len(b))
+ if len(temp) == 0:
+ return 0
+ else:
+ b[: len(temp)] = temp
+ return len(temp)
+
+ # Compatibility methods for http.client.HTTPResponse
+ def getheaders(self) -> typing.List[typing.Tuple[str, str]]:
+ return list(self.headers.items())
+
+ def getheader(
+ self, name: str, default: typing.Optional[str] = None
+ ) -> typing.Optional[str]:
+ return self.headers.get(name, default)
+
+ # Compatibility method for http.cookiejar
+ def info(self):
+ return self.headers
+
+ def geturl(self):
+ return self.url
+
+
+class HTTPResponse(BaseHTTPResponse):
"""
HTTP Response container.
@@ -184,11 +366,6 @@ class is also compatible with the Python standard library's :mod:`io`
value of Content-Length header, if present. Otherwise, raise error.
"""
- CONTENT_DECODERS = ["gzip", "deflate"]
- if brotli is not None:
- CONTENT_DECODERS += ["br"]
- REDIRECT_STATUSES = [301, 302, 303, 307, 308]
-
def __init__(
self,
body="",
@@ -208,20 +385,18 @@ def __init__(
request_url=None,
auto_close=True,
):
+ super().__init__(
+ headers=headers,
+ status=status,
+ version=version,
+ reason=reason,
+ decode_content=decode_content,
+ )
- if isinstance(headers, HTTPHeaderDict):
- self.headers = headers
- else:
- self.headers = HTTPHeaderDict(headers)
- self.status = status
- self.version = version
- self.reason = reason
- self.decode_content = decode_content
self.retries = retries
self.enforce_content_length = enforce_content_length
self.auto_close = auto_close
- self._decoder = None
self._body = None
self._fp = None
self._original_response = original_response
@@ -239,13 +414,7 @@ def __init__(
self._fp = body
# Are we using the chunked-style of transfer encoding?
- self.chunked = False
self.chunk_left = None
- tr_enc = self.headers.get("transfer-encoding", "").lower()
- # Don't incur the penalty of creating a list and then discarding it
- encodings = (enc.strip() for enc in tr_enc.split(","))
- if "chunked" in encodings:
- self.chunked = True
# Determine length of response
self.length_remaining = self._init_length(request_method)
@@ -254,19 +423,6 @@ def __init__(
if preload_content and not self._body:
self._body = self.read(decode_content=decode_content)
- def get_redirect_location(self):
- """
- Should we redirect and where to?
-
- :returns: Truthy redirect location string if we got a redirect status
- code and valid location. ``None`` if redirect status and no
- location. ``False`` if not a redirect status code.
- """
- if self.status in self.REDIRECT_STATUSES:
- return self.headers.get("location")
-
- return False
-
def release_conn(self):
if not self._pool or not self._connection:
return
@@ -361,62 +517,6 @@ def _init_length(self, request_method):
return length
- def _init_decoder(self):
- """
- Set-up the _decoder attribute if necessary.
- """
- # Note: content-encoding value should be case-insensitive, per RFC 7230
- # Section 3.2
- content_encoding = self.headers.get("content-encoding", "").lower()
- if self._decoder is None:
- if content_encoding in self.CONTENT_DECODERS:
- self._decoder = _get_decoder(content_encoding)
- elif "," in content_encoding:
- encodings = [
- e.strip()
- for e in content_encoding.split(",")
- if e.strip() in self.CONTENT_DECODERS
- ]
- if len(encodings):
- self._decoder = _get_decoder(content_encoding)
-
- DECODER_ERROR_CLASSES = (IOError, zlib.error)
- if brotli is not None:
- DECODER_ERROR_CLASSES += (brotli.error,)
-
- def _decode(self, data, decode_content, flush_decoder):
- """
- Decode the data passed in and potentially flush the decoder.
- """
- if not decode_content:
- return data
-
- try:
- if self._decoder:
- data = self._decoder.decompress(data)
- except self.DECODER_ERROR_CLASSES as e:
- content_encoding = self.headers.get("content-encoding", "").lower()
- raise DecodeError(
- "Received response with content-encoding: %s, but "
- "failed to decode it." % content_encoding,
- e,
- )
- if flush_decoder:
- data += self._flush_decoder()
-
- return data
-
- def _flush_decoder(self):
- """
- Flushes the decoder. Should only be called if the decoder is actually
- being used.
- """
- if self._decoder:
- buf = self._decoder.decompress(b"")
- return buf + self._decoder.flush()
-
- return b""
-
@contextmanager
def _error_catcher(self):
"""
@@ -597,17 +697,6 @@ def from_httplib(ResponseCls, r, **response_kw):
)
return resp
- # Backwards-compatibility methods for http.client.HTTPResponse
- def getheaders(self):
- return self.headers
-
- def getheader(self, name, default=None):
- return self.headers.get(name, default)
-
- # Backwards compatibility for http.cookiejar
- def info(self):
- return self.headers
-
# Overrides from io.IOBase
def close(self):
if not self.closed:
@@ -651,19 +740,6 @@ def flush(self):
):
return self._fp.flush()
- def readable(self):
- # This method is required for `io` module compatibility.
- return True
-
- def readinto(self, b):
- # This method is required for `io` module compatibility.
- temp = self.read(len(b))
- if len(temp) == 0:
- return 0
- else:
- b[: len(temp)] = temp
- return len(temp)
-
def supports_chunked_reads(self):
"""
Checks if the underlying file-like object looks like a
@@ -779,7 +855,8 @@ def read_chunked(self, amt=None, decode_content=None):
if self._original_response:
self._original_response.close()
- def geturl(self):
+ @property
+ def url(self) -> str:
"""
Returns the URL that was the source of this response.
If the request that generated this response redirected, this method
| diff --git a/test/test_response.py b/test/test_response.py
--- a/test/test_response.py
+++ b/test/test_response.py
@@ -55,7 +55,7 @@ class TestLegacyResponse:
def test_getheaders(self):
headers = {"host": "example.com"}
r = HTTPResponse(headers=headers)
- assert r.getheaders() == headers
+ assert r.getheaders() == [("host", "example.com")]
def test_getheader(self):
headers = {"host": "example.com"}
| HTTPResponse.getheaders() is not backwards compatible
The `getheaders()` method of the `HTTPResponse` class returns the dict-like `HTTPHeaderDict`, while the httplib version of that method returns a list of (header, value) tuples. This means that code iterating over the response as in `for header, value in response.getheaders()` will not work with urllib3 response objects unless `.items()` is called on the returned `HTTPHeaderDict` object. A comment near the `getheaders()` method indicates it's supposed to be backwards compatible.
| You're right, this comment and interface aren't correct. If we were to change this behavior I'm not sure how many people would be impacted but at a minimum that comment either needs to be removed or this interface fixed.
I think the safest bet is to just remove the comment about compatibility, but it still feels kinda bad to pretend to be the same as an `HTTPResponse` object. Thoughts from other @urllib3/maintainers? | 2020-11-23T04:19:41Z | [] | [] |
urllib3/urllib3 | 2,086 | urllib3__urllib3-2086 | [
"2058"
] | 494213c96915c6e420d6cce9c59fa99a6b621f33 | diff --git a/src/urllib3/util/retry.py b/src/urllib3/util/retry.py
--- a/src/urllib3/util/retry.py
+++ b/src/urllib3/util/retry.py
@@ -2,7 +2,6 @@
import logging
import re
import time
-import warnings
from collections import namedtuple
from itertools import takewhile
@@ -26,49 +25,7 @@
)
-# TODO: In v2 we can remove this sentinel and metaclass with deprecated options.
-_Default = object()
-
-
-class _RetryMeta(type):
- @property
- def DEFAULT_METHOD_WHITELIST(cls):
- warnings.warn(
- "Using 'Retry.DEFAULT_METHOD_WHITELIST' is deprecated and "
- "will be removed in v2.0. Use 'Retry.DEFAULT_METHODS_ALLOWED' instead",
- DeprecationWarning,
- )
- return cls.DEFAULT_ALLOWED_METHODS
-
- @DEFAULT_METHOD_WHITELIST.setter
- def DEFAULT_METHOD_WHITELIST(cls, value):
- warnings.warn(
- "Using 'Retry.DEFAULT_METHOD_WHITELIST' is deprecated and "
- "will be removed in v2.0. Use 'Retry.DEFAULT_ALLOWED_METHODS' instead",
- DeprecationWarning,
- )
- cls.DEFAULT_ALLOWED_METHODS = value
-
- @property
- def DEFAULT_REDIRECT_HEADERS_BLACKLIST(cls):
- warnings.warn(
- "Using 'Retry.DEFAULT_REDIRECT_HEADERS_BLACKLIST' is deprecated and "
- "will be removed in v2.0. Use 'Retry.DEFAULT_REMOVE_HEADERS_ON_REDIRECT' instead",
- DeprecationWarning,
- )
- return cls.DEFAULT_REMOVE_HEADERS_ON_REDIRECT
-
- @DEFAULT_REDIRECT_HEADERS_BLACKLIST.setter
- def DEFAULT_REDIRECT_HEADERS_BLACKLIST(cls, value):
- warnings.warn(
- "Using 'Retry.DEFAULT_REDIRECT_HEADERS_BLACKLIST' is deprecated and "
- "will be removed in v2.0. Use 'Retry.DEFAULT_REMOVE_HEADERS_ON_REDIRECT' instead",
- DeprecationWarning,
- )
- cls.DEFAULT_REMOVE_HEADERS_ON_REDIRECT = value
-
-
-class Retry(metaclass=_RetryMeta):
+class Retry:
"""Retry configuration.
Each retry attempt will create a new Retry object with updated values, so
@@ -157,11 +114,6 @@ class Retry(metaclass=_RetryMeta):
Set to a ``False`` value to retry on any verb.
- .. warning::
-
- Previously this parameter was named ``method_whitelist``, that
- usage is deprecated in v1.26.0 and will be removed in v2.0.
-
:param iterable status_forcelist:
A set of integer HTTP status codes that we should force a retry on.
A retry is initiated if the request method is in ``allowed_methods``
@@ -227,36 +179,15 @@ def __init__(
redirect=None,
status=None,
other=None,
- allowed_methods=_Default,
+ allowed_methods=DEFAULT_ALLOWED_METHODS,
status_forcelist=None,
backoff_factor=0,
raise_on_redirect=True,
raise_on_status=True,
history=None,
respect_retry_after_header=True,
- remove_headers_on_redirect=_Default,
- # TODO: Deprecated, remove in v2.0
- method_whitelist=_Default,
+ remove_headers_on_redirect=DEFAULT_REMOVE_HEADERS_ON_REDIRECT,
):
-
- if method_whitelist is not _Default:
- if allowed_methods is not _Default:
- raise ValueError(
- "Using both 'allowed_methods' and "
- "'method_whitelist' together is not allowed. "
- "Instead only use 'allowed_methods'"
- )
- warnings.warn(
- "Using 'method_whitelist' with Retry is deprecated and "
- "will be removed in v2.0. Use 'allowed_methods' instead",
- DeprecationWarning,
- )
- allowed_methods = method_whitelist
- if allowed_methods is _Default:
- allowed_methods = self.DEFAULT_ALLOWED_METHODS
- if remove_headers_on_redirect is _Default:
- remove_headers_on_redirect = self.DEFAULT_REMOVE_HEADERS_ON_REDIRECT
-
self.total = total
self.connect = connect
self.read = read
@@ -287,6 +218,7 @@ def new(self, **kw):
redirect=self.redirect,
status=self.status,
other=self.other,
+ allowed_methods=self.allowed_methods,
status_forcelist=self.status_forcelist,
backoff_factor=self.backoff_factor,
raise_on_redirect=self.raise_on_redirect,
@@ -296,22 +228,6 @@ def new(self, **kw):
respect_retry_after_header=self.respect_retry_after_header,
)
- # TODO: If already given in **kw we use what's given to us
- # If not given we need to figure out what to pass. We decide
- # based on whether our class has the 'method_whitelist' property
- # and if so we pass the deprecated 'method_whitelist' otherwise
- # we use 'allowed_methods'. Remove in v2.0
- if "method_whitelist" not in kw and "allowed_methods" not in kw:
- if "method_whitelist" in self.__dict__:
- warnings.warn(
- "Using 'method_whitelist' with Retry is deprecated and "
- "will be removed in v2.0. Use 'allowed_methods' instead",
- DeprecationWarning,
- )
- params["method_whitelist"] = self.allowed_methods
- else:
- params["allowed_methods"] = self.allowed_methods
-
params.update(kw)
return type(self)(**params)
@@ -421,19 +337,7 @@ def _is_method_retryable(self, method):
"""Checks if a given HTTP method should be retried upon, depending if
it is included in the allowed_methods
"""
- # TODO: For now favor if the Retry implementation sets its own method_whitelist
- # property outside of our constructor to avoid breaking custom implementations.
- if "method_whitelist" in self.__dict__:
- warnings.warn(
- "Using 'method_whitelist' with Retry is deprecated and "
- "will be removed in v2.0. Use 'allowed_methods' instead",
- DeprecationWarning,
- )
- allowed_methods = self.method_whitelist
- else:
- allowed_methods = self.allowed_methods
-
- if allowed_methods and method.upper() not in allowed_methods:
+ if self.allowed_methods and method.upper() not in self.allowed_methods:
return False
return True
@@ -573,20 +477,6 @@ def __repr__(self):
f"read={self.read}, redirect={self.redirect}, status={self.status})"
)
- def __getattr__(self, item):
- if item == "method_whitelist":
- # TODO: Remove this deprecated alias in v2.0
- warnings.warn(
- "Using 'method_whitelist' with Retry is deprecated and "
- "will be removed in v2.0. Use 'allowed_methods' instead",
- DeprecationWarning,
- )
- return self.allowed_methods
- try:
- return getattr(super(), item)
- except AttributeError:
- return getattr(Retry, item)
-
# For backwards compatibility (equivalent to pre-v1.9):
Retry.DEFAULT = Retry(3)
| diff --git a/test/test_retry.py b/test/test_retry.py
--- a/test/test_retry.py
+++ b/test/test_retry.py
@@ -1,4 +1,3 @@
-import warnings
from unittest import mock
import pytest
@@ -15,13 +14,6 @@
from urllib3.util.retry import RequestHistory, Retry
[email protected](scope="function", autouse=True)
-def no_retry_deprecations():
- with warnings.catch_warnings(record=True) as w:
- yield
- assert len([str(x.message) for x in w if "Retry" in str(x.message)]) == 0
-
-
class TestRetry:
def test_string(self):
""" Retry string representation looks the way we expect """
@@ -282,7 +274,7 @@ def test_history(self):
)
assert retry.history == history
- def test_retry_method_not_in_whitelist(self):
+ def test_retry_method_not_allowed(self):
error = ReadTimeoutError(None, "/", "read timed out")
retry = Retry()
with pytest.raises(ReadTimeoutError):
diff --git a/test/test_retry_deprecated.py b/test/test_retry_deprecated.py
deleted file mode 100644
--- a/test/test_retry_deprecated.py
+++ /dev/null
@@ -1,469 +0,0 @@
-# This is a copy-paste of test_retry.py with extra asserts about deprecated options. It will be removed for v2.
-import warnings
-from unittest import mock
-
-import pytest
-
-from urllib3.exceptions import (
- ConnectTimeoutError,
- InvalidHeader,
- MaxRetryError,
- ReadTimeoutError,
- ResponseError,
- SSLError,
-)
-from urllib3.response import HTTPResponse
-from urllib3.util.retry import RequestHistory, Retry
-
-
-# TODO: Remove this entire file once deprecated Retry options are removed in v2.
[email protected](scope="function")
-def expect_retry_deprecation():
- with warnings.catch_warnings(record=True) as w:
- yield
- assert len([str(x.message) for x in w if "Retry" in str(x.message)]) > 0
-
-
-class TestRetry:
- def test_string(self):
- """ Retry string representation looks the way we expect """
- retry = Retry()
- assert (
- str(retry)
- == "Retry(total=10, connect=None, read=None, redirect=None, status=None)"
- )
- for _ in range(3):
- retry = retry.increment(method="GET")
- assert (
- str(retry)
- == "Retry(total=7, connect=None, read=None, redirect=None, status=None)"
- )
-
- def test_retry_both_specified(self):
- """Total can win if it's lower than the connect value"""
- error = ConnectTimeoutError()
- retry = Retry(connect=3, total=2)
- retry = retry.increment(error=error)
- retry = retry.increment(error=error)
- with pytest.raises(MaxRetryError) as e:
- retry.increment(error=error)
- assert e.value.reason == error
-
- def test_retry_higher_total_loses(self):
- """ A lower connect timeout than the total is honored """
- error = ConnectTimeoutError()
- retry = Retry(connect=2, total=3)
- retry = retry.increment(error=error)
- retry = retry.increment(error=error)
- with pytest.raises(MaxRetryError):
- retry.increment(error=error)
-
- def test_retry_higher_total_loses_vs_read(self):
- """ A lower read timeout than the total is honored """
- error = ReadTimeoutError(None, "/", "read timed out")
- retry = Retry(read=2, total=3)
- retry = retry.increment(method="GET", error=error)
- retry = retry.increment(method="GET", error=error)
- with pytest.raises(MaxRetryError):
- retry.increment(method="GET", error=error)
-
- def test_retry_total_none(self):
- """ if Total is none, connect error should take precedence """
- error = ConnectTimeoutError()
- retry = Retry(connect=2, total=None)
- retry = retry.increment(error=error)
- retry = retry.increment(error=error)
- with pytest.raises(MaxRetryError) as e:
- retry.increment(error=error)
- assert e.value.reason == error
-
- error = ReadTimeoutError(None, "/", "read timed out")
- retry = Retry(connect=2, total=None)
- retry = retry.increment(method="GET", error=error)
- retry = retry.increment(method="GET", error=error)
- retry = retry.increment(method="GET", error=error)
- assert not retry.is_exhausted()
-
- def test_retry_default(self):
- """ If no value is specified, should retry connects 3 times """
- retry = Retry()
- assert retry.total == 10
- assert retry.connect is None
- assert retry.read is None
- assert retry.redirect is None
- assert retry.other is None
-
- error = ConnectTimeoutError()
- retry = Retry(connect=1)
- retry = retry.increment(error=error)
- with pytest.raises(MaxRetryError):
- retry.increment(error=error)
-
- retry = Retry(connect=1)
- retry = retry.increment(error=error)
- assert not retry.is_exhausted()
-
- assert Retry(0).raise_on_redirect
- assert not Retry(False).raise_on_redirect
-
- def test_retry_other(self):
- """ If an unexpected error is raised, should retry other times """
- other_error = SSLError()
- retry = Retry(connect=1)
- retry = retry.increment(error=other_error)
- retry = retry.increment(error=other_error)
- assert not retry.is_exhausted()
-
- retry = Retry(other=1)
- retry = retry.increment(error=other_error)
- with pytest.raises(MaxRetryError) as e:
- retry.increment(error=other_error)
- assert e.value.reason == other_error
-
- def test_retry_read_zero(self):
- """ No second chances on read timeouts, by default """
- error = ReadTimeoutError(None, "/", "read timed out")
- retry = Retry(read=0)
- with pytest.raises(MaxRetryError) as e:
- retry.increment(method="GET", error=error)
- assert e.value.reason == error
-
- def test_status_counter(self):
- resp = HTTPResponse(status=400)
- retry = Retry(status=2)
- retry = retry.increment(response=resp)
- retry = retry.increment(response=resp)
- with pytest.raises(MaxRetryError) as e:
- retry.increment(response=resp)
- assert str(e.value.reason) == ResponseError.SPECIFIC_ERROR.format(
- status_code=400
- )
-
- def test_backoff(self):
- """ Backoff is computed correctly """
- max_backoff = Retry.BACKOFF_MAX
-
- retry = Retry(total=100, backoff_factor=0.2)
- assert retry.get_backoff_time() == 0 # First request
-
- retry = retry.increment(method="GET")
- assert retry.get_backoff_time() == 0 # First retry
-
- retry = retry.increment(method="GET")
- assert retry.backoff_factor == 0.2
- assert retry.total == 98
- assert retry.get_backoff_time() == 0.4 # Start backoff
-
- retry = retry.increment(method="GET")
- assert retry.get_backoff_time() == 0.8
-
- retry = retry.increment(method="GET")
- assert retry.get_backoff_time() == 1.6
-
- for _ in range(10):
- retry = retry.increment(method="GET")
-
- assert retry.get_backoff_time() == max_backoff
-
- def test_zero_backoff(self):
- retry = Retry()
- assert retry.get_backoff_time() == 0
- retry = retry.increment(method="GET")
- retry = retry.increment(method="GET")
- assert retry.get_backoff_time() == 0
-
- def test_backoff_reset_after_redirect(self):
- retry = Retry(total=100, redirect=5, backoff_factor=0.2)
- retry = retry.increment(method="GET")
- retry = retry.increment(method="GET")
- assert retry.get_backoff_time() == 0.4
- redirect_response = HTTPResponse(status=302, headers={"location": "test"})
- retry = retry.increment(method="GET", response=redirect_response)
- assert retry.get_backoff_time() == 0
- retry = retry.increment(method="GET")
- retry = retry.increment(method="GET")
- assert retry.get_backoff_time() == 0.4
-
- def test_sleep(self):
- # sleep a very small amount of time so our code coverage is happy
- retry = Retry(backoff_factor=0.0001)
- retry = retry.increment(method="GET")
- retry = retry.increment(method="GET")
- retry.sleep()
-
- def test_status_forcelist(self):
- retry = Retry(status_forcelist=range(500, 600))
- assert not retry.is_retry("GET", status_code=200)
- assert not retry.is_retry("GET", status_code=400)
- assert retry.is_retry("GET", status_code=500)
-
- retry = Retry(total=1, status_forcelist=[418])
- assert not retry.is_retry("GET", status_code=400)
- assert retry.is_retry("GET", status_code=418)
-
- # String status codes are not matched.
- retry = Retry(total=1, status_forcelist=["418"])
- assert not retry.is_retry("GET", status_code=418)
-
- def test_method_whitelist_with_status_forcelist(self, expect_retry_deprecation):
- # Falsey method_whitelist means to retry on any method.
- retry = Retry(status_forcelist=[500], method_whitelist=None)
- assert retry.is_retry("GET", status_code=500)
- assert retry.is_retry("POST", status_code=500)
-
- # Criteria of method_whitelist and status_forcelist are ANDed.
- retry = Retry(status_forcelist=[500], method_whitelist=["POST"])
- assert not retry.is_retry("GET", status_code=500)
- assert retry.is_retry("POST", status_code=500)
-
- def test_exhausted(self):
- assert not Retry(0).is_exhausted()
- assert Retry(-1).is_exhausted()
- assert Retry(1).increment(method="GET").total == 0
-
- @pytest.mark.parametrize("total", [-1, 0])
- def test_disabled(self, total):
- with pytest.raises(MaxRetryError):
- Retry(total).increment(method="GET")
-
- def test_error_message(self):
- retry = Retry(total=0)
- with pytest.raises(MaxRetryError) as e:
- retry = retry.increment(
- method="GET", error=ReadTimeoutError(None, "/", "read timed out")
- )
- assert "Caused by redirect" not in str(e.value)
- assert str(e.value.reason) == "None: read timed out"
-
- retry = Retry(total=1)
- with pytest.raises(MaxRetryError) as e:
- retry = retry.increment("POST", "/")
- retry = retry.increment("POST", "/")
- assert "Caused by redirect" not in str(e.value)
- assert isinstance(e.value.reason, ResponseError)
- assert str(e.value.reason) == ResponseError.GENERIC_ERROR
-
- retry = Retry(total=1)
- response = HTTPResponse(status=500)
- with pytest.raises(MaxRetryError) as e:
- retry = retry.increment("POST", "/", response=response)
- retry = retry.increment("POST", "/", response=response)
- assert "Caused by redirect" not in str(e.value)
- msg = ResponseError.SPECIFIC_ERROR.format(status_code=500)
- assert str(e.value.reason) == msg
-
- retry = Retry(connect=1)
- with pytest.raises(MaxRetryError) as e:
- retry = retry.increment(error=ConnectTimeoutError("conntimeout"))
- retry = retry.increment(error=ConnectTimeoutError("conntimeout"))
- assert "Caused by redirect" not in str(e.value)
- assert str(e.value.reason) == "conntimeout"
-
- def test_history(self, expect_retry_deprecation):
- retry = Retry(total=10, method_whitelist=frozenset(["GET", "POST"]))
- assert retry.history == tuple()
- connection_error = ConnectTimeoutError("conntimeout")
- retry = retry.increment("GET", "/test1", None, connection_error)
- history = (RequestHistory("GET", "/test1", connection_error, None, None),)
- assert retry.history == history
-
- read_error = ReadTimeoutError(None, "/test2", "read timed out")
- retry = retry.increment("POST", "/test2", None, read_error)
- history = (
- RequestHistory("GET", "/test1", connection_error, None, None),
- RequestHistory("POST", "/test2", read_error, None, None),
- )
- assert retry.history == history
-
- response = HTTPResponse(status=500)
- retry = retry.increment("GET", "/test3", response, None)
- history = (
- RequestHistory("GET", "/test1", connection_error, None, None),
- RequestHistory("POST", "/test2", read_error, None, None),
- RequestHistory("GET", "/test3", None, 500, None),
- )
- assert retry.history == history
-
- def test_retry_method_not_in_whitelist(self):
- error = ReadTimeoutError(None, "/", "read timed out")
- retry = Retry()
- with pytest.raises(ReadTimeoutError):
- retry.increment(method="POST", error=error)
-
- def test_retry_default_remove_headers_on_redirect(self):
- retry = Retry()
-
- assert list(retry.remove_headers_on_redirect) == ["authorization"]
-
- def test_retry_set_remove_headers_on_redirect(self):
- retry = Retry(remove_headers_on_redirect=["X-API-Secret"])
-
- assert list(retry.remove_headers_on_redirect) == ["x-api-secret"]
-
- @pytest.mark.parametrize("value", ["-1", "+1", "1.0", "\xb2"]) # \xb2 = ^2
- def test_parse_retry_after_invalid(self, value):
- retry = Retry()
- with pytest.raises(InvalidHeader):
- retry.parse_retry_after(value)
-
- @pytest.mark.parametrize(
- "value, expected", [("0", 0), ("1000", 1000), ("\t42 ", 42)]
- )
- def test_parse_retry_after(self, value, expected):
- retry = Retry()
- assert retry.parse_retry_after(value) == expected
-
- @pytest.mark.parametrize("respect_retry_after_header", [True, False])
- def test_respect_retry_after_header_propagated(self, respect_retry_after_header):
-
- retry = Retry(respect_retry_after_header=respect_retry_after_header)
- new_retry = retry.new()
- assert new_retry.respect_retry_after_header == respect_retry_after_header
-
- @pytest.mark.freeze_time("2019-06-03 11:00:00", tz_offset=0)
- @pytest.mark.parametrize(
- "retry_after_header,respect_retry_after_header,sleep_duration",
- [
- ("3600", True, 3600),
- ("3600", False, None),
- # Will sleep due to header is 1 hour in future
- ("Mon, 3 Jun 2019 12:00:00 UTC", True, 3600),
- # Won't sleep due to not respecting header
- ("Mon, 3 Jun 2019 12:00:00 UTC", False, None),
- # Won't sleep due to current time reached
- ("Mon, 3 Jun 2019 11:00:00 UTC", True, None),
- # Won't sleep due to current time reached + not respecting header
- ("Mon, 3 Jun 2019 11:00:00 UTC", False, None),
- # Handle all the formats in RFC 7231 Section 7.1.1.1
- ("Mon, 03 Jun 2019 11:30:12 GMT", True, 1812),
- ("Monday, 03-Jun-19 11:30:12 GMT", True, 1812),
- # Assume that datetimes without a timezone are in UTC per RFC 7231
- ("Mon Jun 3 11:30:12 2019", True, 1812),
- ],
- )
- @pytest.mark.parametrize(
- "stub_timezone",
- [
- "UTC",
- "Asia/Jerusalem",
- None,
- ],
- indirect=True,
- )
- @pytest.mark.usefixtures("stub_timezone")
- def test_respect_retry_after_header_sleep(
- self, retry_after_header, respect_retry_after_header, sleep_duration
- ):
- retry = Retry(respect_retry_after_header=respect_retry_after_header)
-
- with mock.patch("time.sleep") as sleep_mock:
- # for the default behavior, it must be in RETRY_AFTER_STATUS_CODES
- response = HTTPResponse(
- status=503, headers={"Retry-After": retry_after_header}
- )
-
- retry.sleep(response)
-
- # The expected behavior is that we'll only sleep if respecting
- # this header (since we won't have any backoff sleep attempts)
- if respect_retry_after_header and sleep_duration is not None:
- sleep_mock.assert_called_with(sleep_duration)
- else:
- sleep_mock.assert_not_called()
-
-
-class TestRetryDeprecations:
- def test_cls_get_default_method_whitelist(self, expect_retry_deprecation):
- assert Retry.DEFAULT_ALLOWED_METHODS == Retry.DEFAULT_METHOD_WHITELIST
-
- def test_cls_get_default_redirect_headers_blacklist(self, expect_retry_deprecation):
- assert (
- Retry.DEFAULT_REMOVE_HEADERS_ON_REDIRECT
- == Retry.DEFAULT_REDIRECT_HEADERS_BLACKLIST
- )
-
- def test_cls_set_default_method_whitelist(self, expect_retry_deprecation):
- old_setting = Retry.DEFAULT_METHOD_WHITELIST
- try:
- Retry.DEFAULT_METHOD_WHITELIST = {"GET"}
- retry = Retry()
- assert retry.DEFAULT_ALLOWED_METHODS == {"GET"}
- assert retry.DEFAULT_METHOD_WHITELIST == {"GET"}
- assert retry.allowed_methods == {"GET"}
- assert retry.method_whitelist == {"GET"}
-
- # Test that the default can be overridden both ways
- retry = Retry(allowed_methods={"GET", "POST"})
- assert retry.DEFAULT_ALLOWED_METHODS == {"GET"}
- assert retry.DEFAULT_METHOD_WHITELIST == {"GET"}
- assert retry.allowed_methods == {"GET", "POST"}
- assert retry.method_whitelist == {"GET", "POST"}
-
- retry = Retry(method_whitelist={"POST"})
- assert retry.DEFAULT_ALLOWED_METHODS == {"GET"}
- assert retry.DEFAULT_METHOD_WHITELIST == {"GET"}
- assert retry.allowed_methods == {"POST"}
- assert retry.method_whitelist == {"POST"}
- finally:
- Retry.DEFAULT_METHOD_WHITELIST = old_setting
- assert Retry.DEFAULT_ALLOWED_METHODS == old_setting
-
- def test_cls_set_default_redirect_headers_blacklist(self, expect_retry_deprecation):
- old_setting = Retry.DEFAULT_REDIRECT_HEADERS_BLACKLIST
- try:
- Retry.DEFAULT_REDIRECT_HEADERS_BLACKLIST = {"test"}
- retry = Retry()
- assert retry.DEFAULT_REMOVE_HEADERS_ON_REDIRECT == {"test"}
- assert retry.DEFAULT_REDIRECT_HEADERS_BLACKLIST == {"test"}
- assert retry.remove_headers_on_redirect == {"test"}
- assert retry.remove_headers_on_redirect == {"test"}
-
- retry = Retry(remove_headers_on_redirect={"test2"})
- assert retry.DEFAULT_REMOVE_HEADERS_ON_REDIRECT == {"test"}
- assert retry.DEFAULT_REDIRECT_HEADERS_BLACKLIST == {"test"}
- assert retry.remove_headers_on_redirect == {"test2"}
- assert retry.remove_headers_on_redirect == {"test2"}
- finally:
- Retry.DEFAULT_REDIRECT_HEADERS_BLACKLIST = old_setting
- assert Retry.DEFAULT_REDIRECT_HEADERS_BLACKLIST == old_setting
-
- @pytest.mark.parametrize(
- "options", [(None, None), ({"GET"}, None), (None, {"GET"}), ({"GET"}, {"GET"})]
- )
- def test_retry_allowed_methods_and_method_whitelist_error(self, options):
- with pytest.raises(ValueError) as e:
- Retry(allowed_methods=options[0], method_whitelist=options[1])
- assert str(e.value) == (
- "Using both 'allowed_methods' and 'method_whitelist' together "
- "is not allowed. Instead only use 'allowed_methods'"
- )
-
- def test_retry_subclass_that_sets_method_whitelist(self, expect_retry_deprecation):
- class SubclassRetry(Retry):
- def __init__(self, **kwargs):
- if "allowed_methods" in kwargs:
- raise AssertionError(
- "This subclass likely doesn't use 'allowed_methods'"
- )
-
- super().__init__(**kwargs)
-
- # Since we're setting 'method_whiteist' we get fallbacks
- # within Retry.new() and Retry._is_method_retryable()
- # to use 'method_whitelist' instead of 'allowed_methods'
- self.method_whitelist = self.method_whitelist | {"POST"}
-
- retry = SubclassRetry()
- assert retry.method_whitelist == Retry.DEFAULT_ALLOWED_METHODS | {"POST"}
- assert retry.new(read=0).method_whitelist == retry.method_whitelist
- assert retry._is_method_retryable("POST")
- assert not retry._is_method_retryable("CONNECT")
-
- assert retry.new(method_whitelist={"GET"}).method_whitelist == {"GET", "POST"}
-
- # urllib3 doesn't do this during normal operation
- # so we don't want users passing in 'allowed_methods'
- # when their subclass doesn't support the option yet.
- with pytest.raises(AssertionError) as e:
- retry.new(allowed_methods={"GET"})
- assert str(e.value) == "This subclass likely doesn't use 'allowed_methods'"
diff --git a/test/with_dummyserver/test_connectionpool.py b/test/with_dummyserver/test_connectionpool.py
--- a/test/with_dummyserver/test_connectionpool.py
+++ b/test/with_dummyserver/test_connectionpool.py
@@ -1022,7 +1022,7 @@ def test_disabled_retry(self):
pool.request("GET", "/test", retries=False)
def test_read_retries(self):
- """ Should retry for status codes in the whitelist """
+ """ Should retry for status codes in the forcelist"""
with HTTPConnectionPool(self.host, self.port) as pool:
retry = Retry(read=1, status_forcelist=[418])
resp = pool.request(
@@ -1034,7 +1034,7 @@ def test_read_retries(self):
assert resp.status == 200
def test_read_total_retries(self):
- """ HTTP response w/ status code in the whitelist should be retried """
+ """ HTTP response w/ status code in the forcelist should be retried """
with HTTPConnectionPool(self.host, self.port) as pool:
headers = {"test-name": "test_read_total_retries"}
retry = Retry(total=1, status_forcelist=[418])
@@ -1043,35 +1043,35 @@ def test_read_total_retries(self):
)
assert resp.status == 200
- def test_retries_wrong_whitelist(self):
- """HTTP response w/ status code not in whitelist shouldn't be retried"""
+ def test_retries_wrong_forcelist(self):
+ """HTTP response w/ status code not in forcelist shouldn't be retried"""
with HTTPConnectionPool(self.host, self.port) as pool:
retry = Retry(total=1, status_forcelist=[202])
resp = pool.request(
"GET",
"/successful_retry",
- headers={"test-name": "test_wrong_whitelist"},
+ headers={"test-name": "test_wrong_forcelist"},
retries=retry,
)
assert resp.status == 418
- def test_default_method_whitelist_retried(self):
- """ urllib3 should retry methods in the default method whitelist """
+ def test_default_method_forcelist_retried(self):
+ """ urllib3 should retry methods in the default method forcelist"""
with HTTPConnectionPool(self.host, self.port) as pool:
retry = Retry(total=1, status_forcelist=[418])
resp = pool.request(
"OPTIONS",
"/successful_retry",
- headers={"test-name": "test_default_whitelist"},
+ headers={"test-name": "test_default_forcelist"},
retries=retry,
)
assert resp.status == 200
def test_retries_wrong_method_list(self):
- """Method not in our whitelist should not be retried, even if code matches"""
+ """Method not in our allowed list should not be retried, even if code matches"""
with HTTPConnectionPool(self.host, self.port) as pool:
- headers = {"test-name": "test_wrong_method_whitelist"}
- retry = Retry(total=1, status_forcelist=[418], method_whitelist=["POST"])
+ headers = {"test-name": "test_wrong_allowed_method"}
+ retry = Retry(total=1, status_forcelist=[418], allowed_methods=["POST"])
resp = pool.request(
"GET", "/successful_retry", headers=headers, retries=retry
)
| [v2] Remove deprecated Retry options
Remove the options deprecated in https://github.com/urllib3/urllib3/pull/2000
| 2020-11-24T07:30:19Z | [] | [] |
|
urllib3/urllib3 | 2,095 | urllib3__urllib3-2095 | [
"2093"
] | 3a815144ba9822b2981d3300833ae553d6d22a78 | diff --git a/src/urllib3/util/ssl_.py b/src/urllib3/util/ssl_.py
--- a/src/urllib3/util/ssl_.py
+++ b/src/urllib3/util/ssl_.py
@@ -19,21 +19,6 @@
HASHFUNC_MAP = {32: md5, 40: sha1, 64: sha256}
-def _const_compare_digest_backport(a, b):
- """
- Compare two digests of equal length in constant time.
-
- The digests must be of type str/bytes.
- Returns True if the digests match, and False otherwise.
- """
- result = abs(len(a) - len(b))
- for left, right in zip(bytearray(a), bytearray(b)):
- result |= left ^ right
- return result == 0
-
-
-_const_compare_digest = getattr(hmac, "compare_digest", _const_compare_digest_backport)
-
try: # Do we have ssl at all?
import ssl
from ssl import (
@@ -115,7 +100,7 @@ def assert_fingerprint(cert, fingerprint):
cert_digest = hashfunc(cert).digest()
- if not _const_compare_digest(cert_digest, fingerprint_bytes):
+ if not hmac.compare_digest(cert_digest, fingerprint_bytes):
raise SSLError(
f'Fingerprints did not match. Expected "{fingerprint}", got "{hexlify(cert_digest)}".'
)
| diff --git a/test/test_util.py b/test/test_util.py
--- a/test/test_util.py
+++ b/test/test_util.py
@@ -1,4 +1,3 @@
-import hashlib
import io
import logging
import socket
@@ -24,12 +23,7 @@
from urllib3.util.proxy import connection_requires_http_tunnel, create_proxy_ssl_context
from urllib3.util.request import _FAILEDTELL, make_headers, rewind_body
from urllib3.util.response import assert_header_parsing
-from urllib3.util.ssl_ import (
- _const_compare_digest_backport,
- resolve_cert_reqs,
- resolve_ssl_version,
- ssl_wrap_socket,
-)
+from urllib3.util.ssl_ import resolve_cert_reqs, resolve_ssl_version, ssl_wrap_socket
from urllib3.util.timeout import Timeout
from urllib3.util.url import Url, get_host, parse_url, split_first
from urllib3.util.util import to_bytes, to_str
@@ -669,19 +663,6 @@ class NotReallyAFile:
with pytest.raises(ValueError):
is_fp_closed(NotReallyAFile())
- def test_const_compare_digest_fallback(self):
- target = hashlib.sha256(b"abcdef").digest()
- assert _const_compare_digest_backport(target, target)
-
- prefix = target[:-1]
- assert not _const_compare_digest_backport(target, prefix)
-
- suffix = target + b"0"
- assert not _const_compare_digest_backport(target, suffix)
-
- incorrect = hashlib.sha256(b"xyz").digest()
- assert not _const_compare_digest_backport(target, incorrect)
-
def test_has_ipv6_disabled_on_compile(self):
with patch("socket.has_ipv6", False):
assert not _has_ipv6("::1")
| [v2] Remove _const_compare_digest() in favor of hmac.compare_digest()
Within `urllib3.util.ssl_` we previously needed to backport `hmac.compare_digest()`, this is no longer the case in Python 3.6+
| 2020-11-26T12:33:08Z | [] | [] |
|
urllib3/urllib3 | 2,099 | urllib3__urllib3-2099 | [
"1620"
] | 34dc7703be11d33f51077298d5cf7cadd4f6d66e | diff --git a/noxfile.py b/noxfile.py
--- a/noxfile.py
+++ b/noxfile.py
@@ -83,15 +83,6 @@ def unsupported_python2(session):
assert "Unsupported Python version" in process.stderr
[email protected](python=["3"])
-def google_brotli(session):
- # https://pypi.org/project/Brotli/ is the Google version of brotli, so
- # install it separately and don't install our brotli extra (which installs
- # brotlipy).
- session.install("brotli")
- tests_impl(session, extras="socks,secure")
-
-
@nox.session()
def format(session):
"""Run code formatters."""
diff --git a/setup.py b/setup.py
--- a/setup.py
+++ b/setup.py
@@ -109,7 +109,10 @@
requires=[],
python_requires=">=3.6, <4",
extras_require={
- "brotli": ["brotlipy>=0.6.0"],
+ "brotli": [
+ "brotli>=1.0.9; platform_python_implementation == 'CPython'",
+ "brotlicffi>=0.8.0; platform_python_implementation != 'CPython'",
+ ],
"secure": [
"pyOpenSSL>=0.14",
"cryptography>=1.3.4",
diff --git a/src/urllib3/response.py b/src/urllib3/response.py
--- a/src/urllib3/response.py
+++ b/src/urllib3/response.py
@@ -7,7 +7,10 @@
from socket import timeout as SocketTimeout
try:
- import brotli
+ try:
+ import brotlicffi as brotli
+ except ImportError:
+ import brotli
except ImportError:
brotli = None
diff --git a/src/urllib3/util/request.py b/src/urllib3/util/request.py
--- a/src/urllib3/util/request.py
+++ b/src/urllib3/util/request.py
@@ -11,7 +11,10 @@
ACCEPT_ENCODING = "gzip,deflate"
try:
- import brotli as _unused_module_brotli # noqa: F401
+ try:
+ import brotlicffi as _unused_module_brotli # noqa: F401
+ except ImportError:
+ import brotli as _unused_module_brotli # noqa: F401
except ImportError:
pass
else:
| diff --git a/test/__init__.py b/test/__init__.py
--- a/test/__init__.py
+++ b/test/__init__.py
@@ -9,7 +9,10 @@
import pytest
try:
- import brotli
+ try:
+ import brotlicffi as brotli
+ except ImportError:
+ import brotli
except ImportError:
brotli = None
| Use Brotli instead of brotlipy by default
brotlipy is stuck at brotli 0.6 and upstream is inactive. Let's switch
to the official binding by default which is up-to-date.
| Since brotlipy was actually downloaded more before we made this decision I'd actually prefer we upgrade brotlipy with the desired support. :) Would you be up to making that PR on python-hyper/brotlipy?
Current PRs in brotlipy wasn't reviewed and the repo has been silent for at least half a year. The most recent release is also two years old. I made a PR before to add shared brotli support which was merged, but never made it into a release.
So... I can work on some updates but not sure if it's going to help or worth the effort :(
I believe I have merge rights on that repository. I can talk to Cory to see if I can get publish rights as well. Would you mind making the changes to that project so I can review them there?
Going to close this as we'll be sticking with brotlipy for now. | 2020-12-02T02:09:58Z | [] | [] |
urllib3/urllib3 | 2,178 | urllib3__urllib3-2178 | [
"517"
] | 0caac2c0f34a153ba276cbc4ab388a808b422caa | diff --git a/src/urllib3/connection.py b/src/urllib3/connection.py
--- a/src/urllib3/connection.py
+++ b/src/urllib3/connection.py
@@ -30,7 +30,7 @@ class BaseSSLError(BaseException):
SystemTimeWarning,
)
from .packages.ssl_match_hostname import CertificateError, match_hostname
-from .util import SKIP_HEADER, SKIPPABLE_HEADERS, connection
+from .util import SKIP_HEADER, SKIPPABLE_HEADERS, connection, ssl_
from .util.ssl_ import (
assert_fingerprint,
create_urllib3_context,
@@ -362,6 +362,18 @@ def connect(self):
ssl_version=resolve_ssl_version(self.ssl_version),
cert_reqs=resolve_cert_reqs(self.cert_reqs),
)
+ # In some cases, we want to verify hostnames ourselves
+ if (
+ # `ssl` can't verify fingerprints or alternate hostnames
+ self.assert_fingerprint
+ or self.assert_hostname
+ # We still support OpenSSL 1.0.2, which prevents us from verifying
+ # hostnames easily: https://github.com/pyca/pyopenssl/pull/933
+ or ssl_.IS_PYOPENSSL
+ # Python 3.6 can't disable commonName checks
+ or not hasattr(self.ssl_context, "hostname_checks_common_name")
+ ):
+ self.ssl_context.check_hostname = False
context = self.ssl_context
context.verify_mode = resolve_cert_reqs(self.cert_reqs)
@@ -416,9 +428,6 @@ def connect(self):
and not context.check_hostname
and self.assert_hostname is not False
):
- # While urllib3 attempts to always turn off hostname matching from
- # the TLS library, this cannot always be done. So we check whether
- # the TLS Library still thinks it's matching hostnames.
cert = self.sock.getpeercert()
_match_hostname(cert, self.assert_hostname or server_hostname)
diff --git a/src/urllib3/contrib/pyopenssl.py b/src/urllib3/contrib/pyopenssl.py
--- a/src/urllib3/contrib/pyopenssl.py
+++ b/src/urllib3/contrib/pyopenssl.py
@@ -66,6 +66,7 @@ class UnsupportedExtension(Exception):
from socket import timeout
from .. import util
+from ..util.ssl_ import is_ipaddress
__all__ = ["inject_into_urllib3", "extract_from_urllib3"]
@@ -456,10 +457,10 @@ def wrap_socket(
):
cnx = OpenSSL.SSL.Connection(self._ctx, sock)
- if isinstance(server_hostname, str):
- server_hostname = server_hostname.encode("utf-8")
-
- if server_hostname is not None:
+ # If server_hostname is an IP, don't use it for SNI, per RFC6066 Section 3
+ if server_hostname and not is_ipaddress(server_hostname):
+ if isinstance(server_hostname, str):
+ server_hostname = server_hostname.encode("utf-8")
cnx.set_tlsext_host_name(server_hostname)
cnx.set_connect_state()
diff --git a/src/urllib3/contrib/securetransport.py b/src/urllib3/contrib/securetransport.py
--- a/src/urllib3/contrib/securetransport.py
+++ b/src/urllib3/contrib/securetransport.py
@@ -451,6 +451,8 @@ def handshake(
_assert_no_error(result)
# If we have a server hostname, we should set that too.
+ # RFC6066 Section 3 tells us not to use SNI when the host is an IP, but we have
+ # to do it anyway to match server_hostname against the server certificate
if server_hostname:
if not isinstance(server_hostname, bytes):
server_hostname = server_hostname.encode("utf-8")
diff --git a/src/urllib3/util/ssl_.py b/src/urllib3/util/ssl_.py
--- a/src/urllib3/util/ssl_.py
+++ b/src/urllib3/util/ssl_.py
@@ -241,9 +241,10 @@ def create_urllib3_context(
context.post_handshake_auth = True
context.verify_mode = cert_reqs
- # We do our own verification, including fingerprints and alternative
- # hostnames. So disable it here
- context.check_hostname = False
+ # We ask for verification here but it may be disabled in HTTPSConnection.connect
+ context.check_hostname = cert_reqs == ssl.CERT_REQUIRED
+ if hasattr(context, "hostname_checks_common_name"):
+ context.hostname_checks_common_name = False
# Enable logging of TLS session keys via defacto standard environment variable
# 'SSLKEYLOGFILE', if the feature is available (Python 3.8+). Skip empty values.
@@ -295,9 +296,8 @@ def ssl_wrap_socket(
"""
context = ssl_context
if context is None:
- # Note: This branch of code and all the variables in it are no longer
- # used by urllib3 itself. We should consider deprecating and removing
- # this code.
+ # Note: This branch of code and all the variables in it are only used in tests.
+ # We should consider deprecating and removing this code.
context = create_urllib3_context(ssl_version, cert_reqs, ciphers=ciphers)
if ca_certs or ca_cert_dir or ca_cert_data:
@@ -328,15 +328,7 @@ def ssl_wrap_socket(
except NotImplementedError: # Defensive: in CI, we always have set_alpn_protocols
pass
- # If we detect server_hostname is an IP address then the SNI
- # extension should not be used according to RFC3546 Section 3.1
- use_sni_hostname = server_hostname and not is_ipaddress(server_hostname)
- # SecureTransport uses server_hostname in certificate verification.
- send_sni = (use_sni_hostname and HAS_SNI) or (
- IS_SECURETRANSPORT and server_hostname
- )
- # Do not warn the user if server_hostname is an invalid SNI hostname.
- if not HAS_SNI and use_sni_hostname:
+ if not HAS_SNI and server_hostname and not is_ipaddress(server_hostname):
warnings.warn(
"An HTTPS request has been made, but the SNI (Server Name "
"Indication) extension to TLS is not available on this platform. "
@@ -348,7 +340,6 @@ def ssl_wrap_socket(
SNIMissingWarning,
)
- server_hostname = server_hostname if send_sni else None
ssl_sock = _ssl_wrap_socket_impl(sock, context, tls_in_tls, server_hostname)
return ssl_sock
| diff --git a/test/test_ssl.py b/test/test_ssl.py
--- a/test/test_ssl.py
+++ b/test/test_ssl.py
@@ -40,30 +40,6 @@ def test_is_ipaddress_true(self, addr):
def test_is_ipaddress_false(self, addr):
assert not ssl_.is_ipaddress(addr)
- @pytest.mark.parametrize(
- ["has_sni", "server_hostname", "uses_sni"],
- [
- (True, "127.0.0.1", False),
- (False, "www.python.org", False),
- (False, "0.0.0.0", False),
- (True, "www.google.com", True),
- (True, None, False),
- (False, None, False),
- ],
- )
- def test_context_sni_with_ip_address(
- self, monkeypatch, has_sni, server_hostname, uses_sni
- ):
- monkeypatch.setattr(ssl_, "HAS_SNI", has_sni)
-
- sock = mock.Mock()
- context = mock.create_autospec(ssl_.SSLContext)
-
- ssl_.ssl_wrap_socket(sock, server_hostname=server_hostname, ssl_context=context)
-
- expected_hostname = server_hostname if uses_sni else None
- context.wrap_socket.assert_called_with(sock, server_hostname=expected_hostname)
-
@pytest.mark.parametrize(
["has_sni", "server_hostname", "should_warn"],
[
diff --git a/test/test_util.py b/test/test_util.py
--- a/test/test_util.py
+++ b/test/test_util.py
@@ -880,10 +880,7 @@ def test_ssl_wrap_socket_sni_ip_address_no_warn(self):
"""Test that a warning is not made if server_hostname is an IP address."""
sock = object()
context, warn = self._wrap_socket_and_mock_warn(sock, "8.8.8.8")
- expected_hostname = "8.8.8.8" if util.IS_SECURETRANSPORT else None
- context.wrap_socket.assert_called_once_with(
- sock, server_hostname=expected_hostname
- )
+ context.wrap_socket.assert_called_once_with(sock, server_hostname="8.8.8.8")
warn.assert_not_called()
def test_ssl_wrap_socket_sni_none_no_warn(self):
diff --git a/test/with_dummyserver/test_https.py b/test/with_dummyserver/test_https.py
--- a/test/with_dummyserver/test_https.py
+++ b/test/with_dummyserver/test_https.py
@@ -268,7 +268,7 @@ def test_invalid_common_name(self):
"127.0.0.1", self.port, cert_reqs="CERT_REQUIRED", ca_certs=DEFAULT_CA
) as https_pool:
with pytest.raises(MaxRetryError) as e:
- https_pool.request("GET", "/")
+ https_pool.request("GET", "/", retries=0)
assert isinstance(e.value.reason, SSLError)
assert "doesn't match" in str(
e.value.reason
diff --git a/test/with_dummyserver/test_proxy_poolmanager.py b/test/with_dummyserver/test_proxy_poolmanager.py
--- a/test/with_dummyserver/test_proxy_poolmanager.py
+++ b/test/with_dummyserver/test_proxy_poolmanager.py
@@ -197,7 +197,9 @@ def test_proxy_verified(self):
)
https_fail_pool = http._new_pool("https", "127.0.0.1", self.https_port)
- with pytest.raises(MaxRetryError, match="doesn't match") as e:
+ with pytest.raises(
+ MaxRetryError, match="doesn't match|IP address mismatch"
+ ) as e:
https_fail_pool.request("GET", "/", retries=0)
assert isinstance(e.value.reason, SSLError)
diff --git a/test/with_dummyserver/test_socketlevel.py b/test/with_dummyserver/test_socketlevel.py
--- a/test/with_dummyserver/test_socketlevel.py
+++ b/test/with_dummyserver/test_socketlevel.py
@@ -1351,7 +1351,10 @@ def socket_handler(listener):
with mock.patch("urllib3.util.ssl_.SSLContext", lambda *_, **__: context):
self._start_server(socket_handler)
with HTTPSConnectionPool(self.host, self.port) as pool:
- with pytest.raises(MaxRetryError):
+ # Without a proper `SSLContext`, this request will fail in some
+ # arbitrary way, but we only want to know if load_default_certs() was
+ # called, which is why we accept any `Exception` here.
+ with pytest.raises(Exception):
pool.request("GET", "/", timeout=SHORT_TIMEOUT)
context.load_default_certs.assert_called_with()
@@ -1395,7 +1398,7 @@ def socket_handler(listener):
self._start_server(socket_handler)
with HTTPSConnectionPool(self.host, self.port, **kwargs) as pool:
- with pytest.raises(MaxRetryError):
+ with pytest.raises(Exception):
pool.request("GET", "/", timeout=SHORT_TIMEOUT)
context.load_default_certs.assert_not_called()
| Lean on SSLContext to do HTTPS hostname verification
In Python 3.4 and 2.7.9 there is a `check_hostname` attribute on `SSLContext` which will have the SSLContext instance handle checking the hostname inside of the `do_handshake`. I think it would be good for urllib3 to rely on this where possible instead of doing the check itself. I think this would go good with the other efforts to use `SSLContext` as the "bag of configuration" for TLS stuff.
This can be detected by determining if the `SSLContext` object has a `check_hostname` attribute or not.
There is one downside, this relies on passing the hostname as part of `SSLContext().wrap_socket(server_name=)`. Originally this only worked if the OpenSSL had SNI enabled. However Python 3.4.3 and 2.7.9 will accept `server_name` even if SNI isn't enabled.
This would mean that the code a little ugly until you drop older versions of Python 3, it would look something like (paraphrased and super simplified to just be the ugly parts):
``` python
import ssl
import sys
# In reality these would use the SSLContext shim that urllib3 has in
# order to work on Python 2.6 too and 2.7 earlier than
if ssl.HAS_SNI or sys.version_info[0] == 2 or sys.version_info[:3] >= (3, 4, 3):
ctx = SSLContext(ssl.PROTOCOL_SSLv23)
ctx.check_hostname = True
sock = ctx.wrap_socket(sock, server_name="example.com")
else:
ctx = SSLContext(ssl.PROTOCOL_SSLv23)
sock = ctx.wrap_socket(sock)
if not ssl.match_hostname(sock.getpeercert(), "example.com"):
raise Error()
```
Maybe this is a bad idea? I think it would be great to lay the ground work for eventually moving the responsibility for checking hostnames into the stdlib, however you wouldn't actually be able to do that completely without the ugly if statement until 3.4.3 was your minimum supports Python 3 version.
##
| +1
Currently, it appears that because urllib3 **doesn't** use this, an exception is raised.
I haven't had a chance to isolate this yet, but a `pip install` traceback from [this Stack Overflow question](http://stackoverflow.com/questions/28296476/python-pip-install-requires-server-hostname) using Python 2.7.9 implicates `urllib3`'s use of `wrap_socket()`:
``` python
Traceback (most recent call last):
File "/usr/local/lib/python2.7/site-packages/pip-6.0.7-py2.7.egg/pip/basecommand.py", line 232, in main
status = self.run(options, args)
File "/usr/local/lib/python2.7/site-packages/pip-6.0.7-py2.7.egg/pip/commands/install.py", line 339, in run
requirement_set.prepare_files(finder)
File "/usr/local/lib/python2.7/site-packages/pip-6.0.7-py2.7.egg/pip/req/req_set.py", line 333, in prepare_files
upgrade=self.upgrade,
File "/usr/local/lib/python2.7/site-packages/pip-6.0.7-py2.7.egg/pip/index.py", line 305, in find_requirement
page = self._get_page(main_index_url, req)
File "/usr/local/lib/python2.7/site-packages/pip-6.0.7-py2.7.egg/pip/index.py", line 783, in _get_page
return HTMLPage.get_page(link, req, session=self.session)
File "/usr/local/lib/python2.7/site-packages/pip-6.0.7-py2.7.egg/pip/index.py", line 872, in get_page
"Cache-Control": "max-age=600",
File "/usr/local/lib/python2.7/site-packages/pip-6.0.7-py2.7.egg/pip/_vendor/requests/sessions.py", line 473, in get
return self.request('GET', url, **kwargs)
File "/usr/local/lib/python2.7/site-packages/pip-6.0.7-py2.7.egg/pip/download.py", line 365, in request
return super(PipSession, self).request(method, url, *args, **kwargs)
File "/usr/local/lib/python2.7/site-packages/pip-6.0.7-py2.7.egg/pip/_vendor/requests/sessions.py", line 461, in request
resp = self.send(prep, **send_kwargs)
File "/usr/local/lib/python2.7/site-packages/pip-6.0.7-py2.7.egg/pip/_vendor/requests/sessions.py", line 573, in send
r = adapter.send(request, **kwargs)
File "/usr/local/lib/python2.7/site-packages/pip-6.0.7-py2.7.egg/pip/_vendor/cachecontrol/adapter.py", line 43, in send
resp = super(CacheControlAdapter, self).send(request, **kw)
File "/usr/local/lib/python2.7/site-packages/pip-6.0.7-py2.7.egg/pip/_vendor/requests/adapters.py", line 370, in send
timeout=timeout
File "/usr/local/lib/python2.7/site-packages/pip-6.0.7-py2.7.egg/pip/_vendor/requests/packages/urllib3/connectionpool.py", line 518, in urlopen
body=body, headers=headers)
File "/usr/local/lib/python2.7/site-packages/pip-6.0.7-py2.7.egg/pip/_vendor/requests/packages/urllib3/connectionpool.py", line 322, in _make_request
self._validate_conn(conn)
File "/usr/local/lib/python2.7/site-packages/pip-6.0.7-py2.7.egg/pip/_vendor/requests/packages/urllib3/connectionpool.py", line 727, in _validate_conn
conn.connect()
File "/usr/local/lib/python2.7/site-packages/pip-6.0.7-py2.7.egg/pip/_vendor/requests/packages/urllib3/connection.py", line 238, in connect
ssl_version=resolved_ssl_version)
File "/usr/local/lib/python2.7/site-packages/pip-6.0.7-py2.7.egg/pip/_vendor/requests/packages/urllib3/util/ssl_.py", line 254, in ssl_wrap_socket
return context.wrap_socket(sock)
File "/usr/local/lib/python2.7/ssl.py", line 350, in wrap_socket
_context=self)
File "/usr/local/lib/python2.7/ssl.py", line 537, in __init__
raise ValueError("check_hostname requires server_hostname")
ValueError: check_hostname requires server_hostname
```
Followup: This is pip 6.0.7 (obvious from the traceback), which uses requests 2.5.1 (see pypa/pip@ec51b69cd73125e8bb37029d936c57f29b1a00fa) which in turn uses a urllib3 checkout with SHA @a27758625e4169330fcf965652b1093faf5aaaa2 (see kennethreitz/requests@d2d576b6b1101e2871c82f63adf2c2b534c2dabc).
Should a new ticket be opened? Or should this be tracked under a different issue?
It's a new ticket, and it's what we should have opened when I closed kennethreitz/requests#2435.
Looks like the problem is:
1. ssl_context supports check_hostname to be set to False. As a result match_hostname from ssl module will not be called upon a handshake stage.
2. But urllib3 has built in a repetitive logic, which relies on its own parameters to control hostname checking.
3. The behaviour is usefull when we are not relying on hostname verification, but only ensure the certified key matches a public key used by the other side for encryption.
4. To make a workaround, developer have added fingerprint parameter for PoolManager. This way
https://github.com/urllib3/urllib3/blob/master/src/urllib3/connection.py#L346
```python
if self.assert_fingerprint:
assert_fingerprint(self.sock.getpeercert(binary_form=True),
self.assert_fingerprint)
elif context.verify_mode != ssl.CERT_NONE \
and not getattr(context, 'check_hostname', False) \
and self.assert_hostname is not False:
# While urllib3 attempts to always turn off hostname matching from
# the TLS library, this cannot always be done. So we check whether
# the TLS Library still thinks it's matching hostnames.
cert = self.sock.getpeercert()
if not cert.get('subjectAltName', ()):
warnings.warn((
'Certificate for {0} has no `subjectAltName`, falling back to check for a '
'`commonName` for now. This feature is being removed by major browsers and '
'deprecated by RFC 2818. (See https://github.com/shazow/urllib3/issues/497 '
'for details.)'.format(hostname)),
SubjectAltNameWarning
)
_match_hostname(cert, self.assert_hostname or server_hostname)
self.is_verified = (
context.verify_mode == ssl.CERT_REQUIRED or
self.assert_fingerprint is not None
)
```
no DO REPEAT YOURSELF logic is going to be called.
But fingerprint verification and certificate matching with a public key are not equal things. Since fingerprint is a shorter version of a full public key.
5. So urllib3 provides an abstraction layer which introduce lots of unnecessary clutter, yet doesn't stand for clear, open cryptography standards. That ensure people know and can control what's going on inside.
6. So we are left with the following clumsy hackaround to be built:
```python
class CustomAdapter(requests.adapters.HTTPAdapter):
def __init__(self, poolmanager_server_hostname, *args, **kwargs):
self.poolmanager_server_hostname = poolmanager_server_hostname
requests.adapters.HTTPAdapter.__init__(self, *args, **kwargs)
def init_poolmanager(self, *args, **kwargs):
self.poolmanager = requests.adapters.PoolManager(
*args,
server_hostname=self.poolmanager_server_hostname,
**kwargs)
s = requests.Session()
prefix = 'https://<ip_addr>'
server_certificate_hostname = '<some_obfuscated_hostname_inside_self_signed_certificate>'
s.mount(prefix, CustomAdapter(poolmanager_server_hostname=server_certificate_hostname))
with s.request(<parameters>)
<etc>
```
Maybe I'm not understanding your comment but I wanted to make clear that we do hostname verification of certs, not just fingerprinting. When an `HTTPSConnection` has `assert_hostname` set to a non-`False` and the `SSLContext` object that we've constructed from `create_urllib3_context()` has `check_hostname` set to `False` (meaning we've successfully told the `SSLContext` object that we're handling hostname checking) then our own hostname checking fires within `_match_hostname`. You can find the implementation within `urllib3.packages.ssl_match_hostname._implementation`. It'll look familiar if you look at how CPython handles hostname checking.
I can't speak for requests but if you want to accomplish what I believe you want to do with just urllib3 you can do this:
```python
import urllib3
pool = urllib3.PoolManager(
cert_reqs='REQUIRED',
assert_hostname=server_certificate_hostname,
# If you want SNI to emit this hostname as well you need this too:
# server_hostname=server_certificate_hostname
)
r = pool.request('GET', 'https://<ip address>')
```
If you have more questions about how we do hostname verification, SNI, or TLS I can answer them.
@sethmlarson, yeah seems like assert_hostname is what needed!
1. Is it uncommon practice to ignore hostname verification for self-signed certificates?
I though it is unnecessary, since relying on DNS resolution, or doing IP pinning, won't give more security.
2. It is still like:
```python
import requests
class CustomAdapter(requests.adapters.HTTPAdapter):
def __init__(self, *args, assert_hostname=None, **kwargs):
self.assert_hostname = assert_hostname
requests.adapters.HTTPAdapter.__init__(self, *args, **kwargs)
def init_poolmanager(self, *args, **kwargs):
self.poolmanager = requests.adapters.PoolManager(
*args,
assert_hostname=self.assert_hostname,
**kwargs)
s = requests.Session()
prefix = 'https://<ip_addr>'
s.mount(prefix, CustomAdapter(assert_hostname=False))
with s.request(<parameters>) as resp:
<etc>
```
3. Probably it's better to switch to urllib3 only. I'm looking at the problem from the requests library point of view. They mention almost nothing in their documentation regarding PoolManager options. Just a brisk example.
- proxy configuration parameters in requests are quite simple
- although requests doesn't integrate with async, and I need to run thread_pool_executor
- ssl configuration parameters are unnecessarily simplified in requests, urllib3 has them much closer to the ones in openssl, curl implementations.
1. I'm not sure! I'd say that keeping things in development environments as close to production environments as possible is preferable. So if you do hostname verification in production you should strive to do it in development too.
2+3. This is up to you, you've got your adapter written already and now you can just use and extend it if you want. urllib3 is intended to be a low-level interface and requests is a higher-level interface.
Any progress on leveraging stdlib's logic?
I don't think so, this issue is up for grabs still :)
Couple of things that came up during my initial work-through of this change:
- (wrt remove common name support https://github.com/urllib3/urllib3/issues/497) `hostname_checks_common_name` isn't available in Python 3.6 so we can't disable
via `SSLContext`. Do we keep our custom `match_hostname()` verification code and simply change it to not check `commonName` for Python 3.6 until Python 3.7 is the minimum supported version?
- `assert_hostname` versus `server_hostname`, currently we allow sending an SNI X and then verifying
certificate matches hostname Y via server_hostname=X, assert_hostname=Y. I wonder how frequent this
arrangement is used? To continue supporting this may need to keep our custom `match_hostname`.
I've been kind-of bugged by the whole `assert_hostname`/`server_hostname` option duality in the past.
* I'm not clear on the details, but is it possible to use `hostname_checks_common_name` in Python 3.7+ and only use our custom `match_hostname()` verification code for Python 3.6? Then the code will be easy to remove when we drop 3.6 support in 12 months.
* This is definitely something I struggle with too. I think we should continue supporting this because there's no good reason to remove it, but we should probably improve the documentation. https://urllib3.readthedocs.io/en/latest/advanced-usage.html#custom-sni-hostname only gives an example where we use the same value twice.
(The way I'm seeing this is that removing `match_hostname()` should not be the goal. The goal should be to rely on the standard library whenever possible.)
An additional complication is that `ssl.match_hostname()` has been deprecated in Python 3.7+.
So there are three things that urllib3 does that are not possible by setting `context.check_hostname` to `True`:
* alternative hostnames with `assert_hostname` (with or without SNI)
* fingerprint identification
* stop checking common name in Python 3.6
If we decide to keep the first two, as I think we should, then this is issue is not a breaking change and can always be handled later in v2, alongside #2111. (And it will be easier when we drop Python 3.6 support.)
Talked with Seth and he's worried that doing this later breaks weird use cases that call our internal but documented functions. So I'll work on this as part of 2.0, not 2.x.
Another complication: pyOpenSSL only [learned recently how to check hostnames in a simple way](https://github.com/pyca/pyopenssl/pull/933). But the [release that includes that change also drops support for OpenSSL 1.0.2](https://www.pyopenssl.org/en/stable/changelog.html#id2). So we're also going to check hostnames manually with pyOpenSSL. And when we'll drop support for OpenSSL 1.0.2, we'll also require pyOpenSSL >= 20.0.0.
> An additional complication is that `ssl.match_hostname()` has been deprecated in Python 3.7+.
Actually, this is fine: we need to maintain our own implementation anyway to remove commonName support. | 2021-03-12T12:16:27Z | [] | [] |
urllib3/urllib3 | 2,209 | urllib3__urllib3-2209 | [
"2200"
] | 3346a2a5a7d94e702af49afe77d89d23bf1d0d8a | diff --git a/src/urllib3/contrib/pyopenssl.py b/src/urllib3/contrib/pyopenssl.py
--- a/src/urllib3/contrib/pyopenssl.py
+++ b/src/urllib3/contrib/pyopenssl.py
@@ -66,7 +66,6 @@ class UnsupportedExtension(Exception):
from socket import timeout
from .. import util
-from ..util.ssl_ import is_ipaddress
__all__ = ["inject_into_urllib3", "extract_from_urllib3"]
@@ -80,7 +79,8 @@ class UnsupportedExtension(Exception):
# Map from urllib3 to PyOpenSSL compatible parameter-values.
_openssl_versions = {
- util.PROTOCOL_TLS: OpenSSL.SSL.SSLv23_METHOD,
+ util.ssl_.PROTOCOL_TLS: OpenSSL.SSL.SSLv23_METHOD,
+ util.ssl_.PROTOCOL_TLS_CLIENT: OpenSSL.SSL.SSLv23_METHOD,
ssl.PROTOCOL_TLSv1: OpenSSL.SSL.TLSv1_METHOD,
}
@@ -458,7 +458,7 @@ def wrap_socket(
cnx = OpenSSL.SSL.Connection(self._ctx, sock)
# If server_hostname is an IP, don't use it for SNI, per RFC6066 Section 3
- if server_hostname and not is_ipaddress(server_hostname):
+ if server_hostname and not util.ssl_.is_ipaddress(server_hostname):
if isinstance(server_hostname, str):
server_hostname = server_hostname.encode("utf-8")
cnx.set_tlsext_host_name(server_hostname)
diff --git a/src/urllib3/contrib/securetransport.py b/src/urllib3/contrib/securetransport.py
--- a/src/urllib3/contrib/securetransport.py
+++ b/src/urllib3/contrib/securetransport.py
@@ -111,7 +111,11 @@
# TLSv1 and a high of TLSv1.2. For everything else, we pin to that version.
# TLSv1 to 1.2 are supported on macOS 10.8+
_protocol_to_min_max = {
- util.PROTOCOL_TLS: (SecurityConst.kTLSProtocol1, SecurityConst.kTLSProtocol12)
+ util.ssl_.PROTOCOL_TLS: (SecurityConst.kTLSProtocol1, SecurityConst.kTLSProtocol12),
+ util.ssl_.PROTOCOL_TLS_CLIENT: (
+ SecurityConst.kTLSProtocol1,
+ SecurityConst.kTLSProtocol12,
+ ),
}
if hasattr(ssl, "PROTOCOL_SSLv2"):
diff --git a/src/urllib3/util/__init__.py b/src/urllib3/util/__init__.py
--- a/src/urllib3/util/__init__.py
+++ b/src/urllib3/util/__init__.py
@@ -8,7 +8,6 @@
HAS_SNI,
IS_PYOPENSSL,
IS_SECURETRANSPORT,
- PROTOCOL_TLS,
SSLContext,
assert_fingerprint,
resolve_cert_reqs,
@@ -24,7 +23,6 @@
"IS_PYOPENSSL",
"IS_SECURETRANSPORT",
"SSLContext",
- "PROTOCOL_TLS",
"ALPN_PROTOCOLS",
"Retry",
"Timeout",
diff --git a/src/urllib3/util/ssl_.py b/src/urllib3/util/ssl_.py
--- a/src/urllib3/util/ssl_.py
+++ b/src/urllib3/util/ssl_.py
@@ -50,6 +50,7 @@ def _is_ge_openssl_v1_1_1(
OPENSSL_VERSION,
OPENSSL_VERSION_NUMBER,
PROTOCOL_TLS,
+ PROTOCOL_TLS_CLIENT,
OP_NO_SSLv2,
OP_NO_SSLv3,
SSLContext,
@@ -66,6 +67,7 @@ def _is_ge_openssl_v1_1_1(
OP_NO_SSLv2 = 0x1000000 # type: ignore
OP_NO_SSLv3 = 0x2000000 # type: ignore
PROTOCOL_SSLv23 = PROTOCOL_TLS = 2
+ PROTOCOL_TLS_CLIENT = 16
_PCTRTT = Tuple[Tuple[str, str], ...]
@@ -223,7 +225,13 @@ def create_urllib3_context(
"""
if SSLContext is None:
raise TypeError("Can't create an SSLContext object without an ssl module")
- context = SSLContext(ssl_version or PROTOCOL_TLS)
+
+ # PROTOCOL_TLS is deprecated in Python 3.10 so we pass PROTOCOL_TLS_CLIENT instead.
+ if ssl_version in (None, PROTOCOL_TLS):
+ ssl_version = PROTOCOL_TLS_CLIENT
+
+ context = SSLContext(ssl_version)
+
# Unless we're given ciphers defer to either system ciphers in
# the case of OpenSSL 1.1.1+ or use our own secure default ciphers.
if ciphers is not None or not USE_DEFAULT_SSLCONTEXT_CIPHERS:
@@ -260,9 +268,16 @@ def create_urllib3_context(
) is not None:
context.post_handshake_auth = True
- context.verify_mode = cert_reqs
- # We ask for verification here but it may be disabled in HTTPSConnection.connect
- context.check_hostname = cert_reqs == ssl.CERT_REQUIRED
+ # The order of the below lines setting verify_mode and check_hostname
+ # matter due to safe-guards SSLContext has to prevent an SSLContext with
+ # check_hostname=True, verify_mode=NONE/OPTIONAL.
+ if cert_reqs == ssl.CERT_REQUIRED:
+ context.verify_mode = cert_reqs
+ context.check_hostname = True
+ else:
+ context.check_hostname = False
+ context.verify_mode = cert_reqs
+
if hasattr(context, "hostname_checks_common_name"):
context.hostname_checks_common_name = False
| diff --git a/test/with_dummyserver/test_https.py b/test/with_dummyserver/test_https.py
--- a/test/with_dummyserver/test_https.py
+++ b/test/with_dummyserver/test_https.py
@@ -678,12 +678,13 @@ def test_default_tls_version_deprecations(self):
else:
assert w == []
- def test_no_tls_version_deprecation_with_ssl_version(self):
+ @pytest.mark.parametrize("ssl_version", [ssl.PROTOCOL_TLS, ssl.PROTOCOL_TLS_CLIENT])
+ def test_no_tls_version_deprecation_with_ssl_version(self, ssl_version):
if self.tls_protocol_name is None:
pytest.skip("Skipping base test class")
with HTTPSConnectionPool(
- self.host, self.port, ca_certs=DEFAULT_CA, ssl_version=util.PROTOCOL_TLS
+ self.host, self.port, ca_certs=DEFAULT_CA, ssl_version=ssl_version
) as https_pool:
conn = https_pool._get_conn()
try:
| Python 3.10 ssl module deprecations
### Context
Heads up! I'm in the processing of cleaning up the ssl module. Python 3.10's ssl module will raise deprecation warnings for a lot of old cruft that has been deprecated since 3.6 and 3.7. For example SSL 3.0 to TLS 1.1, ``OP_NO_SSL/TLS*``, and more old features are going away. You'll also need OpenSSL 1.1.1+ for testing.
* https://bugs.python.org/issue43669
* https://bugs.python.org/issue43880
### Alternatives
None
### Duplicate
No
### Contribution
Would you be willing to submit a PR?
Sorry, no time :)
| Thanks for the heads up :) | 2021-05-06T19:24:27Z | [] | [] |
urllib3/urllib3 | 2,220 | urllib3__urllib3-2220 | [
"2110"
] | c9ac6c69c12c291008261a0597361f5525998a81 | diff --git a/src/urllib3/connection.py b/src/urllib3/connection.py
--- a/src/urllib3/connection.py
+++ b/src/urllib3/connection.py
@@ -347,6 +347,8 @@ class HTTPSConnection(HTTPConnection):
ca_cert_dir: Optional[str] = None
ca_cert_data: Union[None, str, bytes] = None
ssl_version: Optional[Union[int, str]] = None
+ ssl_minimum_version: Optional[int] = None
+ ssl_maximum_version: Optional[int] = None
assert_fingerprint: Optional[str] = None
tls_in_tls_required: bool = False
@@ -385,6 +387,9 @@ def __init__(
self.key_password = key_password
self.ssl_context = ssl_context
self.server_hostname = server_hostname
+ self.ssl_version = None
+ self.ssl_minimum_version = None
+ self.ssl_maximum_version = None
def set_cert(
self,
@@ -464,6 +469,8 @@ def connect(self) -> None:
default_ssl_context = True
self.ssl_context = create_urllib3_context(
ssl_version=resolve_ssl_version(self.ssl_version),
+ ssl_minimum_version=self.ssl_minimum_version,
+ ssl_maximum_version=self.ssl_maximum_version,
cert_reqs=resolve_cert_reqs(self.cert_reqs),
)
# In some cases, we want to verify hostnames ourselves
@@ -508,23 +515,6 @@ def connect(self) -> None:
tls_in_tls=tls_in_tls,
)
- # If we're using all defaults and the connection
- # is TLSv1 or TLSv1.1 we throw a DeprecationWarning
- # for the host.
- if (
- default_ssl_context
- and self.ssl_version is None
- and hasattr(self.sock, "version")
- and self.sock.version() in {"TLSv1", "TLSv1.1"}
- ):
- warnings.warn(
- "Negotiating TLSv1/TLSv1.1 by default is deprecated "
- "and will be disabled in urllib3 v2.0.0. Connecting to "
- f"'{self.host}' with '{self.sock.version()}' can be "
- "enabled by explicitly opting-in with 'ssl_version'",
- DeprecationWarning,
- )
-
if self.assert_fingerprint:
assert_fingerprint(
self.sock.getpeercert(binary_form=True), self.assert_fingerprint
diff --git a/src/urllib3/connectionpool.py b/src/urllib3/connectionpool.py
--- a/src/urllib3/connectionpool.py
+++ b/src/urllib3/connectionpool.py
@@ -53,6 +53,8 @@
from .util.util import to_str
if TYPE_CHECKING:
+ import ssl
+
from typing_extensions import Literal
log = logging.getLogger(__name__)
@@ -902,6 +904,8 @@ def __init__(
key_password: Optional[str] = None,
ca_certs: Optional[str] = None,
ssl_version: Optional[Union[int, str]] = None,
+ ssl_minimum_version: Optional["ssl.TLSVersion"] = None,
+ ssl_maximum_version: Optional["ssl.TLSVersion"] = None,
assert_hostname: Optional[Union[str, "Literal[False]"]] = None,
assert_fingerprint: Optional[str] = None,
ca_cert_dir: Optional[str] = None,
@@ -928,6 +932,8 @@ def __init__(
self.ca_certs = ca_certs
self.ca_cert_dir = ca_cert_dir
self.ssl_version = ssl_version
+ self.ssl_minimum_version = ssl_minimum_version
+ self.ssl_maximum_version = ssl_maximum_version
self.assert_hostname = assert_hostname
self.assert_fingerprint = assert_fingerprint
@@ -949,6 +955,9 @@ def _prepare_conn(self, conn: HTTPSConnection) -> HTTPConnection:
assert_fingerprint=self.assert_fingerprint,
)
conn.ssl_version = self.ssl_version
+ conn.ssl_minimum_version = self.ssl_minimum_version
+ conn.ssl_maximum_version = self.ssl_maximum_version
+
return conn
def _prepare_proxy(self, conn: HTTPSConnection) -> None: # type: ignore[override]
diff --git a/src/urllib3/contrib/pyopenssl.py b/src/urllib3/contrib/pyopenssl.py
--- a/src/urllib3/contrib/pyopenssl.py
+++ b/src/urllib3/contrib/pyopenssl.py
@@ -107,6 +107,44 @@ class UnsupportedExtension(Exception): # type: ignore[no-redef]
}
_openssl_to_stdlib_verify = {v: k for k, v in _stdlib_to_openssl_verify.items()}
+# The SSLvX values are the most likely to be missing in the future
+# but we check them all just to be sure.
+_OP_NO_SSLv2: int = getattr(OpenSSL.SSL, "OP_NO_SSLv2", 0)
+_OP_NO_SSLv3: int = getattr(OpenSSL.SSL, "OP_NO_SSLv3", 0)
+_OP_NO_TLSv1: int = getattr(OpenSSL.SSL, "OP_NO_TLSv1", 0)
+_OP_NO_TLSv1_1: int = getattr(OpenSSL.SSL, "OP_NO_TLSv1_1", 0)
+_OP_NO_TLSv1_2: int = getattr(OpenSSL.SSL, "OP_NO_TLSv1_2", 0)
+_OP_NO_TLSv1_3: int = getattr(OpenSSL.SSL, "OP_NO_TLSv1_3", 0)
+
+_openssl_to_ssl_minimum_version: Dict[int, int] = {
+ ssl.TLSVersion.MINIMUM_SUPPORTED: _OP_NO_SSLv2,
+ ssl.TLSVersion.SSLv3: _OP_NO_SSLv2,
+ ssl.TLSVersion.TLSv1: _OP_NO_SSLv2 | _OP_NO_SSLv3,
+ ssl.TLSVersion.TLSv1_1: _OP_NO_SSLv2 | _OP_NO_SSLv3 | _OP_NO_TLSv1,
+ ssl.TLSVersion.TLSv1_2: _OP_NO_SSLv2 | _OP_NO_SSLv3 | _OP_NO_TLSv1 | _OP_NO_TLSv1_1,
+ ssl.TLSVersion.TLSv1_3: (
+ _OP_NO_SSLv2 | _OP_NO_SSLv3 | _OP_NO_TLSv1 | _OP_NO_TLSv1_1 | _OP_NO_TLSv1_2
+ ),
+ ssl.TLSVersion.MAXIMUM_SUPPORTED: (
+ _OP_NO_SSLv2 | _OP_NO_SSLv3 | _OP_NO_TLSv1 | _OP_NO_TLSv1_1 | _OP_NO_TLSv1_2
+ ),
+}
+_openssl_to_ssl_maximum_version: Dict[int, int] = {
+ ssl.TLSVersion.MINIMUM_SUPPORTED: (
+ _OP_NO_SSLv2 | _OP_NO_TLSv1 | _OP_NO_TLSv1_1 | _OP_NO_TLSv1_2 | _OP_NO_TLSv1_3
+ ),
+ ssl.TLSVersion.SSLv3: (
+ _OP_NO_SSLv2 | _OP_NO_TLSv1 | _OP_NO_TLSv1_1 | _OP_NO_TLSv1_2 | _OP_NO_TLSv1_3
+ ),
+ ssl.TLSVersion.TLSv1: (
+ _OP_NO_SSLv2 | _OP_NO_TLSv1_1 | _OP_NO_TLSv1_2 | _OP_NO_TLSv1_3
+ ),
+ ssl.TLSVersion.TLSv1_1: _OP_NO_SSLv2 | _OP_NO_TLSv1_2 | _OP_NO_TLSv1_3,
+ ssl.TLSVersion.TLSv1_2: _OP_NO_SSLv2 | _OP_NO_TLSv1_3,
+ ssl.TLSVersion.TLSv1_3: _OP_NO_SSLv2,
+ ssl.TLSVersion.MAXIMUM_SUPPORTED: _OP_NO_SSLv2,
+}
+
# OpenSSL will only write 16K at a time
SSL_WRITE_BLOCKSIZE = 16384
@@ -407,6 +445,8 @@ def __init__(self, protocol: int) -> None:
self._ctx = OpenSSL.SSL.Context(self.protocol)
self._options = 0
self.check_hostname = False
+ self._minimum_version: int = ssl.TLSVersion.MINIMUM_SUPPORTED
+ self._maximum_version: int = ssl.TLSVersion.MAXIMUM_SUPPORTED
@property
def options(self) -> int:
@@ -415,7 +455,7 @@ def options(self) -> int:
@options.setter
def options(self, value: int) -> None:
self._options = value
- self._ctx.set_options(value)
+ self._set_ctx_options()
@property
def verify_mode(self) -> int:
@@ -498,6 +538,31 @@ def wrap_socket(
return WrappedSocket(cnx, sock)
+ def _set_ctx_options(self) -> None:
+ self._ctx.set_options(
+ self._options
+ | _openssl_to_ssl_minimum_version[self._minimum_version]
+ | _openssl_to_ssl_maximum_version[self._maximum_version]
+ )
+
+ @property
+ def minimum_version(self) -> int:
+ return self._minimum_version
+
+ @minimum_version.setter
+ def minimum_version(self, minimum_version: int) -> None:
+ self._minimum_version = minimum_version
+ self._set_ctx_options()
+
+ @property
+ def maximum_version(self) -> int:
+ return self._maximum_version
+
+ @maximum_version.setter
+ def maximum_version(self, maximum_version: int) -> None:
+ self._maximum_version = maximum_version
+ self._set_ctx_options()
+
def _verify_callback(
cnx: OpenSSL.SSL.Connection,
diff --git a/src/urllib3/contrib/securetransport.py b/src/urllib3/contrib/securetransport.py
--- a/src/urllib3/contrib/securetransport.py
+++ b/src/urllib3/contrib/securetransport.py
@@ -67,6 +67,7 @@
TYPE_CHECKING,
Any,
BinaryIO,
+ Dict,
Generator,
List,
Optional,
@@ -165,6 +166,15 @@
)
+_tls_version_to_st: Dict[int, int] = {
+ ssl.TLSVersion.MINIMUM_SUPPORTED: SecurityConst.kTLSProtocol1,
+ ssl.TLSVersion.TLSv1: SecurityConst.kTLSProtocol1,
+ ssl.TLSVersion.TLSv1_1: SecurityConst.kTLSProtocol11,
+ ssl.TLSVersion.TLSv1_2: SecurityConst.kTLSProtocol12,
+ ssl.TLSVersion.MAXIMUM_SUPPORTED: SecurityConst.kTLSProtocol12,
+}
+
+
def inject_into_urllib3() -> None:
"""
Monkey-patch urllib3 with SecureTransport-backed SSL-support.
@@ -751,7 +761,11 @@ class SecureTransportContext:
"""
def __init__(self, protocol: int) -> None:
- self._min_version, self._max_version = _protocol_to_min_max[protocol]
+ self._minimum_version: int = ssl.TLSVersion.MINIMUM_SUPPORTED
+ self._maximum_version: int = ssl.TLSVersion.MAXIMUM_SUPPORTED
+ if protocol not in (None, ssl.PROTOCOL_TLS, ssl.PROTOCOL_TLS_CLIENT):
+ self._min_version, self._max_version = _protocol_to_min_max[protocol]
+
self._options = 0
self._verify = False
self._trust_bundle: Optional[bytes] = None
@@ -880,11 +894,27 @@ def wrap_socket(
server_hostname,
self._verify,
self._trust_bundle,
- self._min_version,
- self._max_version,
+ _tls_version_to_st[self._minimum_version],
+ _tls_version_to_st[self._maximum_version],
self._client_cert,
self._client_key,
self._client_key_passphrase,
self._alpn_protocols,
)
return wrapped_socket
+
+ @property
+ def minimum_version(self) -> int:
+ return self._minimum_version
+
+ @minimum_version.setter
+ def minimum_version(self, minimum_version: int) -> None:
+ self._minimum_version = minimum_version
+
+ @property
+ def maximum_version(self) -> int:
+ return self._maximum_version
+
+ @maximum_version.setter
+ def maximum_version(self, maximum_version: int) -> None:
+ self._maximum_version = maximum_version
diff --git a/src/urllib3/poolmanager.py b/src/urllib3/poolmanager.py
--- a/src/urllib3/poolmanager.py
+++ b/src/urllib3/poolmanager.py
@@ -50,6 +50,8 @@
"cert_reqs",
"ca_certs",
"ssl_version",
+ "ssl_minimum_version",
+ "ssl_maximum_version",
"ca_cert_dir",
"ssl_context",
"key_password",
@@ -79,6 +81,8 @@ class PoolKey(NamedTuple):
key_cert_reqs: Optional[str]
key_ca_certs: Optional[str]
key_ssl_version: Optional[Union[int, str]]
+ key_ssl_minimum_version: Optional["ssl.TLSVersion"]
+ key_ssl_maximum_version: Optional["ssl.TLSVersion"]
key_ca_cert_dir: Optional[str]
key_ssl_context: Optional["ssl.SSLContext"]
key_maxsize: Optional[int]
diff --git a/src/urllib3/util/ssl_.py b/src/urllib3/util/ssl_.py
--- a/src/urllib3/util/ssl_.py
+++ b/src/urllib3/util/ssl_.py
@@ -41,6 +41,9 @@ def _is_ge_openssl_v1_1_1(
from .ssltransport import SSLTransport as SSLTransportType
+# Mapping from 'ssl.PROTOCOL_TLSX' to 'TLSVersion.X'
+_SSL_VERSION_TO_TLS_VERSION: Dict[int, int] = {}
+
try: # Do we have ssl at all?
import ssl
from ssl import ( # type: ignore[misc]
@@ -56,12 +59,24 @@ def _is_ge_openssl_v1_1_1(
OP_NO_SSLv2,
OP_NO_SSLv3,
SSLContext,
+ TLSVersion,
)
USE_DEFAULT_SSLCONTEXT_CIPHERS = _is_ge_openssl_v1_1_1(
OPENSSL_VERSION, OPENSSL_VERSION_NUMBER
)
PROTOCOL_SSLv23 = PROTOCOL_TLS
+
+ # Need to be careful here in case old TLS versions get
+ # removed in future 'ssl' module implementations.
+ for attr in ("TLSv1", "TLSv1_1", "TLSv1_2"):
+ try:
+ _SSL_VERSION_TO_TLS_VERSION[getattr(ssl, f"PROTOCOL_{attr}")] = getattr(
+ TLSVersion, attr
+ )
+ except AttributeError: # Defensive:
+ continue
+
from .ssltransport import SSLTransport # type: ignore[misc]
except ImportError:
OP_NO_COMPRESSION = 0x20000 # type: ignore[assignment]
@@ -77,7 +92,6 @@ def _is_ge_openssl_v1_1_1(
_TYPE_PEER_CERT_RET_DICT = Dict[str, Union[str, _PCTRTTT, _PCTRTT]]
_TYPE_PEER_CERT_RET = Union[_TYPE_PEER_CERT_RET_DICT, bytes, None]
-
# A secure default.
# Sources for more information on TLS ciphers:
#
@@ -190,6 +204,8 @@ def create_urllib3_context(
cert_reqs: Optional[int] = None,
options: Optional[int] = None,
ciphers: Optional[str] = None,
+ ssl_minimum_version: Optional[int] = None,
+ ssl_maximum_version: Optional[int] = None,
) -> "ssl.SSLContext":
"""All arguments have the same meaning as ``ssl_wrap_socket``.
@@ -212,6 +228,14 @@ def create_urllib3_context(
The desired protocol version to use. This will default to
PROTOCOL_SSLv23 which will negotiate the highest protocol that both
the server and your installation of OpenSSL support.
+
+ This parameter is deprecated instead use 'ssl_minimum_version'.
+ :param ssl_minimum_version:
+ The minimum version of TLS to be used. Use the 'ssl.TLSVersion' enum for specifying the value.
+ :param ssl_maximum_version:
+ The maximum version of TLS to be used. Use the 'ssl.TLSVersion' enum for specifying the value.
+ Not recommended to set to anything other than 'ssl.TLSVersion.MAXIMUM_SUPPORTED' which is the
+ default value.
:param cert_reqs:
Whether to require the certificate verification. This defaults to
``ssl.CERT_REQUIRED``.
@@ -228,11 +252,47 @@ def create_urllib3_context(
if SSLContext is None:
raise TypeError("Can't create an SSLContext object without an ssl module")
- # PROTOCOL_TLS is deprecated in Python 3.10 so we pass PROTOCOL_TLS_CLIENT instead.
- if ssl_version in (None, PROTOCOL_TLS):
- ssl_version = PROTOCOL_TLS_CLIENT
+ # This means 'ssl_version' was specified as an exact value.
+ if ssl_version not in (None, PROTOCOL_TLS, PROTOCOL_TLS_CLIENT):
+ # Disallow setting 'ssl_version' and 'ssl_minimum|maximum_version'
+ # to avoid conflicts.
+ if ssl_minimum_version is not None or ssl_maximum_version is not None:
+ raise ValueError(
+ "Can't specify both 'ssl_version' and either "
+ "'ssl_minimum_version' or 'ssl_maximum_version'"
+ )
+
+ # 'ssl_version' is deprecated and will be removed in the future.
+ else:
+ # Use 'ssl_minimum_version' and 'ssl_maximum_version' instead.
+ ssl_minimum_version = _SSL_VERSION_TO_TLS_VERSION.get(
+ ssl_version, TLSVersion.MINIMUM_SUPPORTED
+ )
+ ssl_maximum_version = _SSL_VERSION_TO_TLS_VERSION.get(
+ ssl_version, TLSVersion.MAXIMUM_SUPPORTED
+ )
+
+ # This warning message is pushing users to use 'ssl_minimum_version'
+ # instead of both min/max. Best practice is to only set the minimum version and
+ # keep the maximum version to be it's default value: 'TLSVersion.MAXIMUM_SUPPORTED'
+ warnings.warn(
+ "'ssl_version' option is deprecated and will be "
+ "removed in a future release of urllib3 2.x. Instead "
+ "use 'ssl_minimum_version'",
+ category=DeprecationWarning,
+ stacklevel=2,
+ )
+
+ # PROTOCOL_TLS is deprecated in Python 3.10 so we always use PROTOCOL_TLS_CLIENT
+ context = SSLContext(PROTOCOL_TLS_CLIENT)
+
+ if ssl_minimum_version is not None:
+ context.minimum_version = ssl_minimum_version
+ else: # Python <3.10 defaults to 'MINIMUM_SUPPORTED' so explicitly set TLSv1.2 here
+ context.minimum_version = TLSVersion.TLSv1_2
- context = SSLContext(ssl_version)
+ if ssl_maximum_version is not None:
+ context.maximum_version = ssl_maximum_version
# Unless we're given ciphers defer to either system ciphers in
# the case of OpenSSL 1.1.1+ or use our own secure default ciphers.
| diff --git a/test/test_ssl.py b/test/test_ssl.py
--- a/test/test_ssl.py
+++ b/test/test_ssl.py
@@ -1,3 +1,4 @@
+import ssl
from unittest import mock
import pytest
@@ -173,6 +174,63 @@ def test_create_urllib3_context_default_ciphers(
else:
context.set_ciphers.assert_called_with(ssl_.DEFAULT_CIPHERS)
+ @pytest.mark.parametrize(
+ "kwargs",
+ [
+ {
+ "ssl_version": ssl.PROTOCOL_TLSv1,
+ "ssl_minimum_version": ssl.TLSVersion.MINIMUM_SUPPORTED,
+ },
+ {
+ "ssl_version": ssl.PROTOCOL_TLSv1,
+ "ssl_maximum_version": ssl.TLSVersion.TLSv1,
+ },
+ {
+ "ssl_version": ssl.PROTOCOL_TLSv1,
+ "ssl_minimum_version": ssl.TLSVersion.MINIMUM_SUPPORTED,
+ "ssl_maximum_version": ssl.TLSVersion.MAXIMUM_SUPPORTED,
+ },
+ ],
+ )
+ def test_create_urllib3_context_ssl_version_and_ssl_min_max_version_errors(
+ self, kwargs
+ ):
+ with pytest.raises(ValueError) as e:
+ ssl_.create_urllib3_context(**kwargs)
+
+ assert str(e.value) == (
+ "Can't specify both 'ssl_version' and either 'ssl_minimum_version' or 'ssl_maximum_version'"
+ )
+
+ @pytest.mark.parametrize(
+ "kwargs",
+ [
+ {
+ "ssl_version": ssl.PROTOCOL_TLS,
+ "ssl_minimum_version": ssl.TLSVersion.MINIMUM_SUPPORTED,
+ },
+ {
+ "ssl_version": ssl.PROTOCOL_TLS_CLIENT,
+ "ssl_minimum_version": ssl.TLSVersion.MINIMUM_SUPPORTED,
+ },
+ {
+ "ssl_version": None,
+ "ssl_minimum_version": ssl.TLSVersion.MINIMUM_SUPPORTED,
+ },
+ {"ssl_version": ssl.PROTOCOL_TLSv1, "ssl_minimum_version": None},
+ {"ssl_version": ssl.PROTOCOL_TLSv1, "ssl_maximum_version": None},
+ {
+ "ssl_version": ssl.PROTOCOL_TLSv1,
+ "ssl_minimum_version": None,
+ "ssl_maximum_version": None,
+ },
+ ],
+ )
+ def test_create_urllib3_context_ssl_version_and_ssl_min_max_version_no_error(
+ self, kwargs
+ ):
+ ssl_.create_urllib3_context(**kwargs)
+
def test_assert_fingerprint_raises_exception_on_none_cert(self):
with pytest.raises(SSLError):
ssl_.assert_fingerprint(
diff --git a/test/with_dummyserver/test_https.py b/test/with_dummyserver/test_https.py
--- a/test/with_dummyserver/test_https.py
+++ b/test/with_dummyserver/test_https.py
@@ -22,6 +22,7 @@
import trustme
import urllib3.util as util
+import urllib3.util.ssl_
from dummyserver.server import (
DEFAULT_CA,
DEFAULT_CA_KEY,
@@ -75,9 +76,27 @@
class TestHTTPS(HTTPSDummyServerTestCase):
tls_protocol_name = None
- def tls_protocol_deprecated(self):
+ def tls_protocol_not_default(self):
return self.tls_protocol_name in {"TLSv1", "TLSv1.1"}
+ def tls_version(self):
+ if self.tls_protocol_name is None:
+ return pytest.skip("Skipping base test class")
+ try:
+ from ssl import TLSVersion
+ except ImportError:
+ return pytest.skip("ssl.TLSVersion isn't available")
+ return getattr(TLSVersion, self.tls_protocol_name.replace(".", "_"))
+
+ def ssl_version(self):
+ if self.tls_protocol_name is None:
+ return pytest.skip("Skipping base test class")
+ attribute = f"PROTOCOL_{self.tls_protocol_name.replace('.', '_')}"
+ ssl_version = getattr(ssl, attribute, None)
+ if ssl_version is None:
+ return pytest.skip(f"ssl.{attribute} isn't available")
+ return ssl_version
+
@classmethod
def setup_class(cls):
super().setup_class()
@@ -120,7 +139,10 @@ def teardown_class(cls):
def test_simple(self):
with HTTPSConnectionPool(
- self.host, self.port, ca_certs=DEFAULT_CA
+ self.host,
+ self.port,
+ ca_certs=DEFAULT_CA,
+ ssl_minimum_version=self.tls_version(),
) as https_pool:
r = https_pool.request("GET", "/")
assert r.status == 200, r.data
@@ -128,7 +150,10 @@ def test_simple(self):
@resolvesLocalhostFQDN()
def test_dotted_fqdn(self):
with HTTPSConnectionPool(
- self.host + ".", self.port, ca_certs=DEFAULT_CA
+ self.host + ".",
+ self.port,
+ ca_certs=DEFAULT_CA,
+ ssl_minimum_version=self.tls_version(),
) as pool:
r = pool.request("GET", "/")
assert r.status == 200, r.data
@@ -147,6 +172,7 @@ def test_client_intermediate(self):
key_file=os.path.join(self.certs_dir, CLIENT_INTERMEDIATE_KEY),
cert_file=os.path.join(self.certs_dir, CLIENT_INTERMEDIATE_PEM),
ca_certs=DEFAULT_CA,
+ ssl_minimum_version=self.tls_version(),
) as https_pool:
r = https_pool.request("GET", "/certificate")
subject = json.loads(r.data.decode("utf-8"))
@@ -164,6 +190,7 @@ def test_client_no_intermediate(self):
cert_file=os.path.join(self.certs_dir, CLIENT_NO_INTERMEDIATE_PEM),
key_file=os.path.join(self.certs_dir, CLIENT_INTERMEDIATE_KEY),
ca_certs=DEFAULT_CA,
+ ssl_minimum_version=self.tls_version(),
) as https_pool:
with pytest.raises((SSLError, ProtocolError)):
https_pool.request("GET", "/certificate", retries=False)
@@ -177,6 +204,7 @@ def test_client_key_password(self):
key_file=os.path.join(self.certs_dir, PASSWORD_CLIENT_KEYFILE),
cert_file=os.path.join(self.certs_dir, CLIENT_CERT),
key_password="letmein",
+ ssl_minimum_version=self.tls_version(),
) as https_pool:
r = https_pool.request("GET", "/certificate")
subject = json.loads(r.data.decode("utf-8"))
@@ -190,6 +218,7 @@ def test_client_encrypted_key_requires_password(self):
key_file=os.path.join(self.certs_dir, PASSWORD_CLIENT_KEYFILE),
cert_file=os.path.join(self.certs_dir, CLIENT_CERT),
key_password=None,
+ ssl_minimum_version=self.tls_version(),
) as https_pool:
with pytest.raises(MaxRetryError, match="password is required") as e:
https_pool.request("GET", "/certificate")
@@ -198,7 +227,11 @@ def test_client_encrypted_key_requires_password(self):
def test_verified(self):
with HTTPSConnectionPool(
- self.host, self.port, cert_reqs="CERT_REQUIRED", ca_certs=DEFAULT_CA
+ self.host,
+ self.port,
+ cert_reqs="CERT_REQUIRED",
+ ca_certs=DEFAULT_CA,
+ ssl_minimum_version=self.tls_version(),
) as https_pool:
conn = https_pool._new_conn()
assert conn.__class__ == VerifiedHTTPSConnection
@@ -207,14 +240,12 @@ def test_verified(self):
r = https_pool.request("GET", "/")
assert r.status == 200
- # If we're using a deprecated TLS version we can remove 'DeprecationWarning'
- if self.tls_protocol_deprecated():
- w = [x for x in w if x.category != DeprecationWarning]
-
assert w == []
def test_verified_with_context(self):
- ctx = util.ssl_.create_urllib3_context(cert_reqs=ssl.CERT_REQUIRED)
+ ctx = util.ssl_.create_urllib3_context(
+ cert_reqs=ssl.CERT_REQUIRED, ssl_minimum_version=self.tls_version()
+ )
ctx.load_verify_locations(cafile=DEFAULT_CA)
with HTTPSConnectionPool(self.host, self.port, ssl_context=ctx) as https_pool:
conn = https_pool._new_conn()
@@ -226,7 +257,9 @@ def test_verified_with_context(self):
assert not warn.called, warn.call_args_list
def test_context_combines_with_ca_certs(self):
- ctx = util.ssl_.create_urllib3_context(cert_reqs=ssl.CERT_REQUIRED)
+ ctx = util.ssl_.create_urllib3_context(
+ cert_reqs=ssl.CERT_REQUIRED, ssl_minimum_version=self.tls_version()
+ )
with HTTPSConnectionPool(
self.host, self.port, ca_certs=DEFAULT_CA, ssl_context=ctx
) as https_pool:
@@ -246,7 +279,11 @@ def test_ca_dir_verified(self, tmpdir):
shutil.copyfile(DEFAULT_CA, str(tmpdir / "81deb5f7.0"))
with HTTPSConnectionPool(
- self.host, self.port, cert_reqs="CERT_REQUIRED", ca_cert_dir=str(tmpdir)
+ self.host,
+ self.port,
+ cert_reqs="CERT_REQUIRED",
+ ca_cert_dir=str(tmpdir),
+ ssl_minimum_version=self.tls_version(),
) as https_pool:
conn = https_pool._new_conn()
assert conn.__class__ == VerifiedHTTPSConnection
@@ -255,15 +292,15 @@ def test_ca_dir_verified(self, tmpdir):
r = https_pool.request("GET", "/")
assert r.status == 200
- # If we're using a deprecated TLS version we can remove 'DeprecationWarning'
- if self.tls_protocol_deprecated():
- w = [x for x in w if x.category != DeprecationWarning]
-
assert w == []
def test_invalid_common_name(self):
with HTTPSConnectionPool(
- "127.0.0.1", self.port, cert_reqs="CERT_REQUIRED", ca_certs=DEFAULT_CA
+ "127.0.0.1",
+ self.port,
+ cert_reqs="CERT_REQUIRED",
+ ca_certs=DEFAULT_CA,
+ ssl_minimum_version=self.tls_version(),
) as https_pool:
with pytest.raises(MaxRetryError) as e:
https_pool.request("GET", "/", retries=0)
@@ -274,7 +311,11 @@ def test_invalid_common_name(self):
def test_verified_with_bad_ca_certs(self):
with HTTPSConnectionPool(
- self.host, self.port, cert_reqs="CERT_REQUIRED", ca_certs=self.bad_ca_path
+ self.host,
+ self.port,
+ cert_reqs="CERT_REQUIRED",
+ ca_certs=self.bad_ca_path,
+ ssl_minimum_version=self.tls_version(),
) as https_pool:
with pytest.raises(MaxRetryError) as e:
https_pool.request("GET", "/")
@@ -288,7 +329,10 @@ def test_verified_with_bad_ca_certs(self):
def test_verified_without_ca_certs(self):
# default is cert_reqs=None which is ssl.CERT_NONE
with HTTPSConnectionPool(
- self.host, self.port, cert_reqs="CERT_REQUIRED"
+ self.host,
+ self.port,
+ cert_reqs="CERT_REQUIRED",
+ ssl_minimum_version=self.tls_version(),
) as https_pool:
with pytest.raises(MaxRetryError) as e:
https_pool.request("GET", "/")
@@ -320,7 +364,12 @@ def test_no_ssl(self):
def test_unverified_ssl(self):
""" Test that bare HTTPSConnection can connect, make requests """
- with HTTPSConnectionPool(self.host, self.port, cert_reqs=ssl.CERT_NONE) as pool:
+ with HTTPSConnectionPool(
+ self.host,
+ self.port,
+ cert_reqs=ssl.CERT_NONE,
+ ssl_minimum_version=self.tls_version(),
+ ) as pool:
with mock.patch("warnings.warn") as warn:
r = pool.request("GET", "/")
assert r.status == 200
@@ -334,7 +383,11 @@ def test_unverified_ssl(self):
def test_ssl_unverified_with_ca_certs(self):
with HTTPSConnectionPool(
- self.host, self.port, cert_reqs="CERT_NONE", ca_certs=self.bad_ca_path
+ self.host,
+ self.port,
+ cert_reqs="CERT_NONE",
+ ca_certs=self.bad_ca_path,
+ ssl_minimum_version=self.tls_version(),
) as pool:
with mock.patch("warnings.warn") as warn:
r = pool.request("GET", "/")
@@ -346,23 +399,27 @@ def test_ssl_unverified_with_ca_certs(self):
# warnings, which we want to ignore here.
calls = warn.call_args_list
- # If we're using a deprecated TLS version we can remove 'DeprecationWarning'
- if self.tls_protocol_deprecated():
- calls = [call for call in calls if call[0][1] != DeprecationWarning]
-
category = calls[0][0][1]
assert category == InsecureRequestWarning
def test_assert_hostname_false(self):
with HTTPSConnectionPool(
- "localhost", self.port, cert_reqs="CERT_REQUIRED", ca_certs=DEFAULT_CA
+ "localhost",
+ self.port,
+ cert_reqs="CERT_REQUIRED",
+ ca_certs=DEFAULT_CA,
+ ssl_minimum_version=self.tls_version(),
) as https_pool:
https_pool.assert_hostname = False
https_pool.request("GET", "/")
def test_assert_specific_hostname(self):
with HTTPSConnectionPool(
- "localhost", self.port, cert_reqs="CERT_REQUIRED", ca_certs=DEFAULT_CA
+ "localhost",
+ self.port,
+ cert_reqs="CERT_REQUIRED",
+ ca_certs=DEFAULT_CA,
+ ssl_minimum_version=self.tls_version(),
) as https_pool:
https_pool.assert_hostname = "localhost"
https_pool.request("GET", "/")
@@ -374,6 +431,7 @@ def test_server_hostname(self):
cert_reqs="CERT_REQUIRED",
ca_certs=DEFAULT_CA,
server_hostname="localhost",
+ ssl_minimum_version=self.tls_version(),
) as https_pool:
conn = https_pool._new_conn()
conn.request("GET", "/")
@@ -387,7 +445,11 @@ def test_server_hostname(self):
def test_assert_fingerprint_md5(self):
with HTTPSConnectionPool(
- "localhost", self.port, cert_reqs="CERT_REQUIRED", ca_certs=DEFAULT_CA
+ "localhost",
+ self.port,
+ cert_reqs="CERT_REQUIRED",
+ ca_certs=DEFAULT_CA,
+ ssl_minimum_version=self.tls_version(),
) as https_pool:
https_pool.assert_fingerprint = (
"55:39:BF:70:05:12:43:FA:1F:D1:BF:4E:E8:1B:07:1D"
@@ -397,7 +459,11 @@ def test_assert_fingerprint_md5(self):
def test_assert_fingerprint_sha1(self):
with HTTPSConnectionPool(
- "localhost", self.port, cert_reqs="CERT_REQUIRED", ca_certs=DEFAULT_CA
+ "localhost",
+ self.port,
+ cert_reqs="CERT_REQUIRED",
+ ca_certs=DEFAULT_CA,
+ ssl_minimum_version=self.tls_version(),
) as https_pool:
https_pool.assert_fingerprint = (
"72:8B:55:4C:9A:FC:1E:88:A1:1C:AD:1B:B2:E7:CC:3E:DB:C8:F9:8A"
@@ -406,7 +472,11 @@ def test_assert_fingerprint_sha1(self):
def test_assert_fingerprint_sha256(self):
with HTTPSConnectionPool(
- "localhost", self.port, cert_reqs="CERT_REQUIRED", ca_certs=DEFAULT_CA
+ "localhost",
+ self.port,
+ cert_reqs="CERT_REQUIRED",
+ ca_certs=DEFAULT_CA,
+ ssl_minimum_version=self.tls_version(),
) as https_pool:
https_pool.assert_fingerprint = (
"E3:59:8E:69:FF:C5:9F:C7:88:87:44:58:22:7F:90:8D:D9:BC:12:C4:90:79:D5:"
@@ -422,7 +492,11 @@ def _test_request(pool):
return cm.value.reason
with HTTPSConnectionPool(
- self.host, self.port, cert_reqs="CERT_REQUIRED", ca_certs=DEFAULT_CA
+ self.host,
+ self.port,
+ cert_reqs="CERT_REQUIRED",
+ ca_certs=DEFAULT_CA,
+ ssl_minimum_version=self.tls_version(),
) as https_pool:
https_pool.assert_fingerprint = (
@@ -459,7 +533,11 @@ def test_verify_none_and_bad_fingerprint(self):
def test_verify_none_and_good_fingerprint(self):
with HTTPSConnectionPool(
- "127.0.0.1", self.port, cert_reqs="CERT_NONE", ca_certs=self.bad_ca_path
+ "127.0.0.1",
+ self.port,
+ cert_reqs="CERT_NONE",
+ ca_certs=self.bad_ca_path,
+ ssl_minimum_version=self.tls_version(),
) as https_pool:
https_pool.assert_fingerprint = (
"72:8B:55:4C:9A:FC:1E:88:A1:1C:AD:1B:B2:E7:CC:3E:DB:C8:F9:8A"
@@ -473,7 +551,11 @@ def test_good_fingerprint_and_hostname_mismatch(self):
# test doesn't do (deliberately). We should revisit this if we make
# new decisions.
with HTTPSConnectionPool(
- "127.0.0.1", self.port, cert_reqs="CERT_REQUIRED", ca_certs=DEFAULT_CA
+ "127.0.0.1",
+ self.port,
+ cert_reqs="CERT_REQUIRED",
+ ca_certs=DEFAULT_CA,
+ ssl_minimum_version=self.tls_version(),
) as https_pool:
https_pool.assert_fingerprint = (
"72:8B:55:4C:9A:FC:1E:88:A1:1C:AD:1B:B2:E7:CC:3E:DB:C8:F9:8A"
@@ -490,6 +572,7 @@ def test_https_timeout(self):
timeout=timeout,
retries=False,
cert_reqs="CERT_REQUIRED",
+ ssl_minimum_version=self.tls_version(),
) as https_pool:
with pytest.raises(ConnectTimeoutError):
https_pool.request("GET", "/")
@@ -501,6 +584,7 @@ def test_https_timeout(self):
timeout=timeout,
retries=False,
cert_reqs="CERT_REQUIRED",
+ ssl_minimum_version=self.tls_version(),
) as https_pool:
https_pool.ca_certs = DEFAULT_CA
https_pool.assert_fingerprint = (
@@ -509,7 +593,11 @@ def test_https_timeout(self):
timeout = Timeout(total=None)
with HTTPSConnectionPool(
- self.host, self.port, timeout=timeout, cert_reqs="CERT_NONE"
+ self.host,
+ self.port,
+ timeout=timeout,
+ cert_reqs="CERT_NONE",
+ ssl_minimum_version=self.tls_version(),
) as https_pool:
https_pool.request("GET", "/")
@@ -517,7 +605,11 @@ def test_tunnel(self):
""" test the _tunnel behavior """
timeout = Timeout(total=None)
with HTTPSConnectionPool(
- self.host, self.port, timeout=timeout, cert_reqs="CERT_NONE"
+ self.host,
+ self.port,
+ timeout=timeout,
+ cert_reqs="CERT_NONE",
+ ssl_minimum_version=self.tls_version(),
) as https_pool:
conn = https_pool._new_conn()
try:
@@ -581,13 +673,17 @@ def test_enhanced_ssl_connection(self):
cert_reqs="CERT_REQUIRED",
ca_certs=DEFAULT_CA,
assert_fingerprint=fingerprint,
+ ssl_minimum_version=self.tls_version(),
) as https_pool:
r = https_pool.request("GET", "/")
assert r.status == 200
def test_ssl_correct_system_time(self):
with HTTPSConnectionPool(
- self.host, self.port, ca_certs=DEFAULT_CA
+ self.host,
+ self.port,
+ ca_certs=DEFAULT_CA,
+ ssl_minimum_version=self.tls_version(),
) as https_pool:
https_pool.cert_reqs = "CERT_REQUIRED"
https_pool.ca_certs = DEFAULT_CA
@@ -597,7 +693,10 @@ def test_ssl_correct_system_time(self):
def test_ssl_wrong_system_time(self):
with HTTPSConnectionPool(
- self.host, self.port, ca_certs=DEFAULT_CA
+ self.host,
+ self.port,
+ ca_certs=DEFAULT_CA,
+ ssl_minimum_version=self.tls_version(),
) as https_pool:
https_pool.cert_reqs = "CERT_REQUIRED"
https_pool.ca_certs = DEFAULT_CA
@@ -616,16 +715,15 @@ def _request_without_resource_warnings(self, method, url):
with warnings.catch_warnings(record=True) as w:
warnings.simplefilter("always")
with HTTPSConnectionPool(
- self.host, self.port, ca_certs=DEFAULT_CA
+ self.host,
+ self.port,
+ ca_certs=DEFAULT_CA,
+ ssl_minimum_version=self.tls_version(),
) as https_pool:
https_pool.request(method, url)
w = [x for x in w if not isinstance(x.message, ResourceWarning)]
- # If we're using a deprecated TLS version we can remove 'DeprecationWarning'
- if self.tls_protocol_deprecated():
- w = [x for x in w if x.category != DeprecationWarning]
-
return w
def test_set_ssl_version_to_tls_version(self):
@@ -649,7 +747,10 @@ def test_tls_protocol_name_of_socket(self):
pytest.skip("Skipping base test class")
with HTTPSConnectionPool(
- self.host, self.port, ca_certs=DEFAULT_CA
+ self.host,
+ self.port,
+ ca_certs=DEFAULT_CA,
+ ssl_minimum_version=self.tls_version(),
) as https_pool:
conn = https_pool._get_conn()
try:
@@ -660,37 +761,41 @@ def test_tls_protocol_name_of_socket(self):
finally:
conn.close()
- def test_default_tls_version_deprecations(self):
+ def test_ssl_version_is_deprecated(self):
if self.tls_protocol_name is None:
pytest.skip("Skipping base test class")
with HTTPSConnectionPool(
- self.host, self.port, ca_certs=DEFAULT_CA
+ self.host, self.port, ca_certs=DEFAULT_CA, ssl_version=self.ssl_version()
) as https_pool:
conn = https_pool._get_conn()
try:
with warnings.catch_warnings(record=True) as w:
conn.connect()
- if not hasattr(conn.sock, "version"):
- pytest.skip("SSLSocket.version() not available")
finally:
conn.close()
- if self.tls_protocol_deprecated():
- assert len(w) == 1
- assert str(w[0].message) == (
- "Negotiating TLSv1/TLSv1.1 by default is deprecated "
- "and will be disabled in urllib3 v2.0.0. Connecting to "
- "'%s' with '%s' can be enabled by explicitly opting-in "
- "with 'ssl_version'" % (self.host, self.tls_protocol_name)
+ assert len(w) >= 1
+ assert any(x.category == DeprecationWarning for x in w)
+ assert any(
+ str(x.message)
+ == (
+ "'ssl_version' option is deprecated and will be removed in "
+ "a future release of urllib3 2.x. Instead use 'ssl_minimum_version'"
)
- else:
- assert w == []
+ for x in w
+ )
- @pytest.mark.parametrize("ssl_version", [ssl.PROTOCOL_TLS, ssl.PROTOCOL_TLS_CLIENT])
- def test_no_tls_version_deprecation_with_ssl_version(self, ssl_version):
+ @pytest.mark.parametrize(
+ "ssl_version", [None, ssl.PROTOCOL_TLS, ssl.PROTOCOL_TLS_CLIENT]
+ )
+ def test_ssl_version_with_protocol_tls_or_client_not_deprecated(self, ssl_version):
if self.tls_protocol_name is None:
pytest.skip("Skipping base test class")
+ if self.tls_protocol_not_default():
+ pytest.skip(
+ f"Skipping because '{self.tls_protocol_name}' isn't set by default"
+ )
with HTTPSConnectionPool(
self.host, self.port, ca_certs=DEFAULT_CA, ssl_version=ssl_version
@@ -708,11 +813,13 @@ def test_no_tls_version_deprecation_with_ssl_context(self):
if self.tls_protocol_name is None:
pytest.skip("Skipping base test class")
+ ctx = util.ssl_.create_urllib3_context(ssl_minimum_version=self.tls_version())
+
with HTTPSConnectionPool(
self.host,
self.port,
ca_certs=DEFAULT_CA,
- ssl_context=util.ssl_.create_urllib3_context(),
+ ssl_context=ctx,
) as https_pool:
conn = https_pool._get_conn()
try:
@@ -723,14 +830,46 @@ def test_no_tls_version_deprecation_with_ssl_context(self):
assert w == []
+ def test_tls_version_maximum_and_minimum(self):
+ if self.tls_protocol_name is None:
+ pytest.skip("Skipping base test class")
+
+ from ssl import TLSVersion
+
+ min_max_versions = [
+ (self.tls_version(), self.tls_version()),
+ (TLSVersion.MINIMUM_SUPPORTED, self.tls_version()),
+ (TLSVersion.MINIMUM_SUPPORTED, TLSVersion.MAXIMUM_SUPPORTED),
+ ]
+
+ for minimum_version, maximum_version in min_max_versions:
+ with HTTPSConnectionPool(
+ self.host,
+ self.port,
+ ca_certs=DEFAULT_CA,
+ ssl_minimum_version=minimum_version,
+ ssl_maximum_version=maximum_version,
+ ) as https_pool:
+ conn = https_pool._get_conn()
+ try:
+ conn.connect()
+ assert conn.sock.version() == self.tls_protocol_name
+ finally:
+ conn.close()
+
@pytest.mark.skipif(sys.version_info < (3, 8), reason="requires python 3.8+")
def test_sslkeylogfile(self, tmpdir, monkeypatch):
if not hasattr(util.SSLContext, "keylog_filename"):
pytest.skip("requires OpenSSL 1.1.1+")
+
keylog_file = tmpdir.join("keylogfile.txt")
monkeypatch.setenv("SSLKEYLOGFILE", str(keylog_file))
+
with HTTPSConnectionPool(
- self.host, self.port, ca_certs=DEFAULT_CA
+ self.host,
+ self.port,
+ ca_certs=DEFAULT_CA,
+ ssl_minimum_version=self.tls_version(),
) as https_pool:
r = https_pool.request("GET", "/")
assert r.status == 200, r.data
@@ -751,7 +890,12 @@ def test_sslkeylogfile_empty(self, monkeypatch, sslkeylogfile):
monkeypatch.setenv("SSLKEYLOGFILE", sslkeylogfile)
else:
monkeypatch.delenv("SSLKEYLOGFILE", raising=False)
- with HTTPSConnectionPool(self.host, self.port, ca_certs=DEFAULT_CA) as pool:
+ with HTTPSConnectionPool(
+ self.host,
+ self.port,
+ ca_certs=DEFAULT_CA,
+ ssl_minimum_version=self.tls_version(),
+ ) as pool:
r = pool.request("GET", "/")
assert r.status == 200, r.data
@@ -759,11 +903,26 @@ def test_alpn_default(self):
"""Default ALPN protocols are sent by default."""
if not has_alpn() or not has_alpn(ssl.SSLContext):
pytest.skip("ALPN-support not available")
- with HTTPSConnectionPool(self.host, self.port, ca_certs=DEFAULT_CA) as pool:
+ with HTTPSConnectionPool(
+ self.host,
+ self.port,
+ ca_certs=DEFAULT_CA,
+ ssl_minimum_version=self.tls_version(),
+ ) as pool:
r = pool.request("GET", "/alpn_protocol", retries=0)
assert r.status == 200
assert r.data.decode("utf-8") == util.ALPN_PROTOCOLS[0]
+ def test_default_ssl_context_ssl_min_max_versions(self):
+ ctx = urllib3.util.ssl_.create_urllib3_context()
+ assert ctx.minimum_version == ssl.TLSVersion.TLSv1_2
+ assert ctx.maximum_version == ssl.TLSVersion.MAXIMUM_SUPPORTED
+
+ def test_ssl_context_ssl_version_uses_ssl_min_max_versions(self):
+ ctx = urllib3.util.ssl_.create_urllib3_context(ssl_version=self.ssl_version())
+ assert ctx.minimum_version == self.tls_version()
+ assert ctx.maximum_version == self.tls_version()
+
@pytest.mark.usefixtures("requires_tlsv1")
class TestHTTPS_TLSv1(TestHTTPS):
| Switch to setting SSLContext.minimum_version
The `ssl.OP_NO_TLSvX` options were deprecated in Python 3.7. These values are available on OpenSSL 1.1.0g+, if we detect they are available with an SSLContext that supports them we should set `SSLContext.minumum_version = TLSVersion.TLSv1_2`
| Related to #2200 | 2021-05-15T22:16:17Z | [] | [] |
urllib3/urllib3 | 2,250 | urllib3__urllib3-2250 | [
"2243"
] | da53a649f77dd597be22f4e1bd4c7a5e1963ee5b | diff --git a/dummyserver/handlers.py b/dummyserver/handlers.py
--- a/dummyserver/handlers.py
+++ b/dummyserver/handlers.py
@@ -8,7 +8,7 @@
from datetime import datetime, timedelta
from http.client import responses
from io import BytesIO
-from typing import Dict, Optional, Sequence, Tuple, Union
+from typing import Any, Dict, Optional, Sequence, Tuple, Union
from urllib.parse import urlsplit
from tornado import httputil
@@ -25,10 +25,15 @@ def __init__(
body: Union[str, bytes, Sequence[Union[str, bytes]]] = "",
status: str = "200 OK",
headers: Optional[Sequence[Tuple[str, Union[str, bytes]]]] = None,
+ json: Optional[Any] = None,
) -> None:
self.body = body
self.status = status
- self.headers = headers or [("Content-type", "text/plain")]
+ if json is not None:
+ self.headers = headers or [("Content-type", "application/json")]
+ self.body = json
+ else:
+ self.headers = headers or [("Content-type", "text/plain")]
def __call__(self, request_handler: RequestHandler) -> None:
status, reason = self.status.split(" ", 1)
@@ -238,6 +243,10 @@ def echo(self, request: httputil.HTTPServerRequest) -> Response:
return Response(request.body)
+ def echo_json(self, request: httputil.HTTPServerRequest) -> Response:
+ "Echo back the JSON"
+ return Response(json=request.body, headers=list(request.headers.items()))
+
def echo_uri(self, request: httputil.HTTPServerRequest) -> Response:
"Echo back the requested URI"
assert request.uri is not None
diff --git a/src/urllib3/__init__.py b/src/urllib3/__init__.py
--- a/src/urllib3/__init__.py
+++ b/src/urllib3/__init__.py
@@ -6,7 +6,7 @@
import logging
import warnings
from logging import NullHandler
-from typing import Mapping, Optional, Type, Union
+from typing import Any, Mapping, Optional, Type, Union
from . import exceptions
from ._collections import HTTPHeaderDict
@@ -100,6 +100,7 @@ def request(
redirect: Optional[bool] = True,
retries: Optional[Union[Retry, bool, int]] = None,
timeout: Optional[Union[Timeout, float, int]] = 3,
+ json: Optional[Any] = None,
) -> BaseHTTPResponse:
"""
A convenience, top-level request method. It uses a module-global ``PoolManager`` instance.
@@ -119,4 +120,5 @@ def request(
redirect=redirect,
retries=retries,
timeout=timeout,
+ json=json,
)
diff --git a/src/urllib3/_request_methods.py b/src/urllib3/_request_methods.py
--- a/src/urllib3/_request_methods.py
+++ b/src/urllib3/_request_methods.py
@@ -1,3 +1,4 @@
+import json as _json
from typing import Any, Dict, Mapping, Optional, Sequence, Tuple, Union
from urllib.parse import urlencode
@@ -68,6 +69,7 @@ def request(
body: Optional[_TYPE_BODY] = None,
fields: Optional[_TYPE_FIELDS] = None,
headers: Optional[Mapping[str, str]] = None,
+ json: Optional[Any] = None,
**urlopen_kw: Any,
) -> BaseHTTPResponse:
"""
@@ -84,6 +86,21 @@ def request(
urlopen_kw["request_url"] = url
+ if json is not None and body is not None:
+ raise TypeError(
+ "request got values for both 'body' and 'json' parameters which are mutually exclusive"
+ )
+
+ if json is not None:
+ if headers is None:
+ headers = self.headers.copy() # type: ignore
+ if not ("content-type" in map(str.lower, headers.keys())):
+ headers["Content-Type"] = "application/json" # type: ignore
+
+ body = _json.dumps(json, separators=(",", ":"), ensure_ascii=False).encode(
+ "utf-8"
+ )
+
if body is not None:
urlopen_kw["body"] = body
diff --git a/src/urllib3/response.py b/src/urllib3/response.py
--- a/src/urllib3/response.py
+++ b/src/urllib3/response.py
@@ -1,4 +1,5 @@
import io
+import json as _json
import logging
import zlib
from contextlib import contextmanager
@@ -236,6 +237,19 @@ def get_redirect_location(self) -> Union[Optional[str], "Literal[False]"]:
def data(self) -> bytes:
raise NotImplementedError()
+ def json(self) -> Any:
+ """
+ Parses the body of the HTTP response as JSON.
+
+ To use a custom JSON decoder pass the result of :attr:`HTTPResponse.data` to the decoder.
+
+ This method can raise either `UnicodeDecodeError` or `json.JSONDecodeError`.
+
+ Read more :ref:`here <json>`.
+ """
+ data = self.data.decode("utf-8")
+ return _json.loads(data)
+
@property
def url(self) -> Optional[str]:
raise NotImplementedError()
| diff --git a/test/with_dummyserver/test_connectionpool.py b/test/with_dummyserver/test_connectionpool.py
--- a/test/with_dummyserver/test_connectionpool.py
+++ b/test/with_dummyserver/test_connectionpool.py
@@ -1,5 +1,4 @@
import io
-import json
import logging
import socket
import sys
@@ -818,26 +817,26 @@ def test_default_user_agent_header(self):
with HTTPConnectionPool(self.host, self.port) as pool:
# Use default user agent if no user agent was specified.
r = pool.request("GET", "/headers")
- request_headers = json.loads(r.data.decode("utf8"))
+ request_headers = r.json()
assert request_headers.get("User-Agent") == _get_default_user_agent()
# Prefer the request user agent over the default.
headers = {"UsEr-AGENt": custom_ua}
r = pool.request("GET", "/headers", headers=headers)
- request_headers = json.loads(r.data.decode("utf8"))
+ request_headers = r.json()
assert request_headers.get("User-Agent") == custom_ua
# Do not modify pool headers when using the default user agent.
pool_headers = {"foo": "bar"}
pool.headers = pool_headers
r = pool.request("GET", "/headers")
- request_headers = json.loads(r.data.decode("utf8"))
+ request_headers = r.json()
assert request_headers.get("User-Agent") == default_ua
assert "User-Agent" not in pool_headers
pool.headers.update({"User-Agent": custom_ua2})
r = pool.request("GET", "/headers")
- request_headers = json.loads(r.data.decode("utf8"))
+ request_headers = r.json()
assert request_headers.get("User-Agent") == custom_ua2
@pytest.mark.parametrize(
@@ -855,7 +854,7 @@ def test_default_user_agent_header(self):
def test_user_agent_header_not_sent_twice(self, headers, chunked):
with HTTPConnectionPool(self.host, self.port) as pool:
r = pool.request("GET", "/headers", headers=headers, chunked=chunked)
- request_headers = json.loads(r.data.decode("utf8"))
+ request_headers = r.json()
if not headers:
assert request_headers["User-Agent"].startswith("python-urllib3/")
@@ -870,14 +869,14 @@ def test_no_user_agent_header(self):
# Suppress user agent in the request headers.
no_ua_headers = {"User-Agent": SKIP_HEADER}
r = pool.request("GET", "/headers", headers=no_ua_headers)
- request_headers = json.loads(r.data.decode("utf8"))
+ request_headers = r.json()
assert "User-Agent" not in request_headers
assert no_ua_headers["User-Agent"] == SKIP_HEADER
# Suppress user agent in the pool headers.
pool.headers = no_ua_headers
r = pool.request("GET", "/headers")
- request_headers = json.loads(r.data.decode("utf8"))
+ request_headers = r.json()
assert "User-Agent" not in request_headers
assert no_ua_headers["User-Agent"] == SKIP_HEADER
@@ -885,7 +884,7 @@ def test_no_user_agent_header(self):
pool_headers = {"User-Agent": custom_ua}
pool.headers = pool_headers
r = pool.request("GET", "/headers", headers=no_ua_headers)
- request_headers = json.loads(r.data.decode("utf8"))
+ request_headers = r.json()
assert "User-Agent" not in request_headers
assert no_ua_headers["User-Agent"] == SKIP_HEADER
assert pool_headers.get("User-Agent") == custom_ua
@@ -917,7 +916,7 @@ def test_skip_header(self, accept_encoding, host, user_agent, chunked):
with HTTPConnectionPool(self.host, self.port) as pool:
r = pool.request("GET", "/headers", headers=headers, chunked=chunked)
- request_headers = json.loads(r.data.decode("utf8"))
+ request_headers = r.json()
if accept_encoding is None:
assert "Accept-Encoding" in request_headers
@@ -989,7 +988,7 @@ def test_bytes_header(self):
with HTTPConnectionPool(self.host, self.port) as pool:
headers = {"User-Agent": b"test header"}
r = pool.request("GET", "/headers", headers=headers)
- request_headers = json.loads(r.data.decode("utf8"))
+ request_headers = r.json()
assert "User-Agent" in request_headers
assert request_headers["User-Agent"] == "test header"
@@ -1003,7 +1002,7 @@ def test_user_agent_non_ascii_user_agent(self, user_agent):
"/headers",
headers={"User-Agent": user_agent},
)
- request_headers = json.loads(r.data.decode("utf8"))
+ request_headers = r.json()
assert "User-Agent" in request_headers
assert request_headers["User-Agent"] == "Schönefeld/1.18.0"
diff --git a/test/with_dummyserver/test_https.py b/test/with_dummyserver/test_https.py
--- a/test/with_dummyserver/test_https.py
+++ b/test/with_dummyserver/test_https.py
@@ -1,5 +1,4 @@
import datetime
-import json
import logging
import os.path
import shutil
@@ -178,7 +177,7 @@ def test_client_intermediate(self) -> None:
ssl_minimum_version=self.tls_version(),
) as https_pool:
r = https_pool.request("GET", "/certificate")
- subject = json.loads(r.data.decode("utf-8"))
+ subject = r.json()
assert subject["organizationalUnitName"].startswith("Testing cert")
def test_client_no_intermediate(self) -> None:
@@ -210,7 +209,7 @@ def test_client_key_password(self) -> None:
ssl_minimum_version=self.tls_version(),
) as https_pool:
r = https_pool.request("GET", "/certificate")
- subject = json.loads(r.data.decode("utf-8"))
+ subject = r.json()
assert subject["organizationalUnitName"].startswith("Testing cert")
@requires_ssl_context_keyfile_password()
diff --git a/test/with_dummyserver/test_poolmanager.py b/test/with_dummyserver/test_poolmanager.py
--- a/test/with_dummyserver/test_poolmanager.py
+++ b/test/with_dummyserver/test_poolmanager.py
@@ -1,5 +1,4 @@
import gzip
-import json
from test import LONG_TIMEOUT
from unittest import mock
@@ -145,7 +144,7 @@ def test_redirect_cross_host_remove_headers(self):
assert r.status == 200
- data = json.loads(r.data.decode("utf-8"))
+ data = r.json()
assert "Authorization" not in data
@@ -158,7 +157,7 @@ def test_redirect_cross_host_remove_headers(self):
assert r.status == 200
- data = json.loads(r.data.decode("utf-8"))
+ data = r.json()
assert "authorization" not in data
assert "Authorization" not in data
@@ -175,7 +174,7 @@ def test_redirect_cross_host_no_remove_headers(self):
assert r.status == 200
- data = json.loads(r.data.decode("utf-8"))
+ data = r.json()
assert data["Authorization"] == "foo"
@@ -191,7 +190,7 @@ def test_redirect_cross_host_set_removed_headers(self):
assert r.status == 200
- data = json.loads(r.data.decode("utf-8"))
+ data = r.json()
assert "X-API-Secret" not in data
assert data["Authorization"] == "bar"
@@ -207,7 +206,7 @@ def test_redirect_cross_host_set_removed_headers(self):
assert r.status == 200
- data = json.loads(r.data.decode("utf-8"))
+ data = r.json()
assert "x-api-secret" not in data
assert "X-API-Secret" not in data
@@ -311,32 +310,32 @@ def test_missing_port(self):
def test_headers(self):
with PoolManager(headers={"Foo": "bar"}) as http:
r = http.request("GET", f"{self.base_url}/headers")
- returned_headers = json.loads(r.data.decode())
+ returned_headers = r.json()
assert returned_headers.get("Foo") == "bar"
r = http.request("POST", f"{self.base_url}/headers")
- returned_headers = json.loads(r.data.decode())
+ returned_headers = r.json()
assert returned_headers.get("Foo") == "bar"
r = http.request_encode_url("GET", f"{self.base_url}/headers")
- returned_headers = json.loads(r.data.decode())
+ returned_headers = r.json()
assert returned_headers.get("Foo") == "bar"
r = http.request_encode_body("POST", f"{self.base_url}/headers")
- returned_headers = json.loads(r.data.decode())
+ returned_headers = r.json()
assert returned_headers.get("Foo") == "bar"
r = http.request_encode_url(
"GET", f"{self.base_url}/headers", headers={"Baz": "quux"}
)
- returned_headers = json.loads(r.data.decode())
+ returned_headers = r.json()
assert returned_headers.get("Foo") is None
assert returned_headers.get("Baz") == "quux"
r = http.request_encode_body(
"GET", f"{self.base_url}/headers", headers={"Baz": "quux"}
)
- returned_headers = json.loads(r.data.decode())
+ returned_headers = r.json()
assert returned_headers.get("Foo") is None
assert returned_headers.get("Baz") == "quux"
@@ -349,7 +348,7 @@ def test_headers_http_header_dict(self):
with PoolManager(headers=headers) as http:
r = http.request("GET", f"{self.base_url}/headers")
- returned_headers = json.loads(r.data.decode())
+ returned_headers = r.json()
assert returned_headers["Foo"] == "bar"
assert returned_headers["Multi"] == "1, 2"
assert returned_headers["Baz"] == "quux"
@@ -363,7 +362,7 @@ def test_headers_http_header_dict(self):
"Foo": "new",
},
)
- returned_headers = json.loads(r.data.decode())
+ returned_headers = r.json()
assert returned_headers["Foo"] == "new"
assert returned_headers["Multi"] == "1, 2"
assert returned_headers["Baz"] == "quux"
@@ -490,8 +489,50 @@ def test_top_level_request_with_timeout(self):
redirect=True,
retries=None,
timeout=2.5,
+ json=None,
)
+ @pytest.mark.parametrize(
+ "headers",
+ [
+ None,
+ {"content-Type": "application/json"},
+ {"content-Type": "text/plain"},
+ {"attribute": "value", "CONTENT-TYPE": "application/json"},
+ HTTPHeaderDict(cookie="foo, bar"),
+ ],
+ )
+ def test_request_with_json(self, headers):
+ body = {"attribute": "value"}
+ r = request(
+ method="POST", url=f"{self.base_url}/echo_json", headers=headers, json=body
+ )
+ assert r.status == 200
+ assert r.json() == body
+ if headers is not None and "application/json" not in headers.values():
+ assert "text/plain" in r.headers["Content-Type"].replace(" ", "").split(",")
+ else:
+ assert "application/json" in r.headers["Content-Type"].replace(
+ " ", ""
+ ).split(",")
+
+ def test_top_level_request_with_json_with_httpheaderdict(self):
+ body = {"attribute": "value"}
+ header = HTTPHeaderDict(cookie="foo, bar")
+ with PoolManager(headers=header) as http:
+ r = http.request(method="POST", url=f"{self.base_url}/echo_json", json=body)
+ assert r.status == 200
+ assert r.json() == body
+ assert "application/json" in r.headers["Content-Type"].replace(
+ " ", ""
+ ).split(",")
+
+ def test_top_level_request_with_body_and_json(self):
+ match = "request got values for both 'body' and 'json' parameters which are mutually exclusive"
+ with pytest.raises(TypeError, match=match):
+ body = {"attribute": "value"}
+ request(method="POST", url=f"{self.base_url}/echo", body=body, json=body)
+
@pytest.mark.skipif(not HAS_IPV6, reason="IPv6 is not supported on this system")
class TestIPv6PoolManager(IPv6HTTPDummyServerTestCase):
diff --git a/test/with_dummyserver/test_proxy_poolmanager.py b/test/with_dummyserver/test_proxy_poolmanager.py
--- a/test/with_dummyserver/test_proxy_poolmanager.py
+++ b/test/with_dummyserver/test_proxy_poolmanager.py
@@ -1,4 +1,3 @@
-import json
import os.path
import shutil
import socket
@@ -272,13 +271,13 @@ def test_headers(self):
) as http:
r = http.request_encode_url("GET", f"{self.http_url}/headers")
- returned_headers = json.loads(r.data.decode())
+ returned_headers = r.json()
assert returned_headers.get("Foo") == "bar"
assert returned_headers.get("Hickory") == "dickory"
assert returned_headers.get("Host") == f"{self.http_host}:{self.http_port}"
r = http.request_encode_url("GET", f"{self.http_url_alt}/headers")
- returned_headers = json.loads(r.data.decode())
+ returned_headers = r.json()
assert returned_headers.get("Foo") == "bar"
assert returned_headers.get("Hickory") == "dickory"
assert (
@@ -286,7 +285,7 @@ def test_headers(self):
)
r = http.request_encode_url("GET", f"{self.https_url}/headers")
- returned_headers = json.loads(r.data.decode())
+ returned_headers = r.json()
assert returned_headers.get("Foo") == "bar"
assert returned_headers.get("Hickory") is None
assert (
@@ -294,7 +293,7 @@ def test_headers(self):
)
r = http.request_encode_body("POST", f"{self.http_url}/headers")
- returned_headers = json.loads(r.data.decode())
+ returned_headers = r.json()
assert returned_headers.get("Foo") == "bar"
assert returned_headers.get("Hickory") == "dickory"
assert returned_headers.get("Host") == f"{self.http_host}:{self.http_port}"
@@ -302,7 +301,7 @@ def test_headers(self):
r = http.request_encode_url(
"GET", f"{self.http_url}/headers", headers={"Baz": "quux"}
)
- returned_headers = json.loads(r.data.decode())
+ returned_headers = r.json()
assert returned_headers.get("Foo") is None
assert returned_headers.get("Baz") == "quux"
assert returned_headers.get("Hickory") == "dickory"
@@ -311,7 +310,7 @@ def test_headers(self):
r = http.request_encode_url(
"GET", f"{self.https_url}/headers", headers={"Baz": "quux"}
)
- returned_headers = json.loads(r.data.decode())
+ returned_headers = r.json()
assert returned_headers.get("Foo") is None
assert returned_headers.get("Baz") == "quux"
assert returned_headers.get("Hickory") is None
@@ -322,7 +321,7 @@ def test_headers(self):
r = http.request_encode_body(
"GET", f"{self.http_url}/headers", headers={"Baz": "quux"}
)
- returned_headers = json.loads(r.data.decode())
+ returned_headers = r.json()
assert returned_headers.get("Foo") is None
assert returned_headers.get("Baz") == "quux"
assert returned_headers.get("Hickory") == "dickory"
@@ -331,7 +330,7 @@ def test_headers(self):
r = http.request_encode_body(
"GET", f"{self.https_url}/headers", headers={"Baz": "quux"}
)
- returned_headers = json.loads(r.data.decode())
+ returned_headers = r.json()
assert returned_headers.get("Foo") is None
assert returned_headers.get("Baz") == "quux"
assert returned_headers.get("Hickory") is None
@@ -348,13 +347,13 @@ def test_https_headers(self):
) as http:
r = http.request_encode_url("GET", f"{self.http_url}/headers")
- returned_headers = json.loads(r.data.decode())
+ returned_headers = r.json()
assert returned_headers.get("Foo") == "bar"
assert returned_headers.get("Hickory") == "dickory"
assert returned_headers.get("Host") == f"{self.http_host}:{self.http_port}"
r = http.request_encode_url("GET", f"{self.http_url_alt}/headers")
- returned_headers = json.loads(r.data.decode())
+ returned_headers = r.json()
assert returned_headers.get("Foo") == "bar"
assert returned_headers.get("Hickory") == "dickory"
assert (
@@ -364,7 +363,7 @@ def test_https_headers(self):
r = http.request_encode_body(
"GET", f"{self.https_url}/headers", headers={"Baz": "quux"}
)
- returned_headers = json.loads(r.data.decode())
+ returned_headers = r.json()
assert returned_headers.get("Foo") is None
assert returned_headers.get("Baz") == "quux"
assert returned_headers.get("Hickory") is None
@@ -382,7 +381,7 @@ def test_https_headers_forwarding_for_https(self):
) as http:
r = http.request_encode_url("GET", f"{self.https_url}/headers")
- returned_headers = json.loads(r.data.decode())
+ returned_headers = r.json()
assert returned_headers.get("Foo") == "bar"
assert returned_headers.get("Hickory") == "dickory"
assert (
@@ -399,7 +398,7 @@ def test_headerdict(self):
) as http:
request_headers = HTTPHeaderDict(baz="quux")
r = http.request("GET", f"{self.http_url}/headers", headers=request_headers)
- returned_headers = json.loads(r.data.decode())
+ returned_headers = r.json()
assert returned_headers.get("Foo") == "bar"
assert returned_headers.get("Baz") == "quux"
| Proposal: Improve support for JSON
Many APIs are simply JSON bodies over HTTP, it'd be nice if we could make that experience more enjoyable for most users.
On the request side:
- Raise a `ValueError` if `body` is given or if `fields` is given when within `request_encode_body`
- Set `Content-Type: application/json` via `headers.setdefault()` to avoid stepping on custom Content-Types.
- Encode the JSON to bytes and set as the `body` parameter, remove spaces after delimiters.
- Add `json` to `urllib3.request()`
On the response side:
- Add a function `json()` to `HTTPResponse`
- `HTTPResponse.json()` should parse the JSON via `json.loads(resp.data)` and return the parsed object.
On the documentation side:
- In the JSON section, recommend using `json=` instead of encoding your own JSON to bytes if you're not doing anything special
- Document how to use a custom JSON library/encoder like orjson by encoding to bytes and sending via `body=`
and to pull `resp.data` for decoding JSON responses
| This looks great! I have two thoughts.
I don't think we should expose an `encoding` parameter: since Python 3.6, `json.loads` supports all standard JSON encodings automatically. If you're handling JSON that is not UTF-8, UTF-16 or UTF-32, then you're on your own.
Regarding alternative JSON packages, I think the cool kid is orjson these days, especially for encoding to bytes: https://pythonspeed.com/articles/faster-json-library/
@pquentin Oh nice, today I learned! Begone, `encoding`! And sounds good, we'll use `orjson` as an example.
Hi, I'd like to work on this issue!
@justlund Sounds good, let me know if you have questions :)
Answering questions raised by @V1NAY8 on Discord:
> 1. We should add a new parameter that says json= to all request methods ?
The two places that should have a `json` parameter are:
- `RequestMethods.request`
- `urllib3.request`
The logic should be implemented in `RequestMethods.request`
> 2. And that json should consist of all the header, body , fields as keys. (if we get any extra throw exception)
> if we don't get header we do headers.setdefault()
> if they don't give fields its fine for all methods, except for request_encode_body
> if they don't give body its fine
If the `json=` parameter is specified there should be errors only if `body` is non-None
3. (header,body, fields) and json are mutually exclusive ?
if they give any one the left and json , then throw exception.
Does the above answer cover this or am I missing something extra? `headers` shouldn't trigger an error.
> 4. Also for this point "Encode the JSON to bytes and set as the body parameter, remove spaces after delimiters."
> https://urllib3.readthedocs.io/en/latest/user-guide.html#json
> As seen in this example
> JSON is already encoded and passed to body parameter,
> to make it more user friendly,
> We internally do the encoding using orgjson to utf-8 ?
Yes we're going to be doing the encoding ourselves and passing via `body=` to `urlopen`
@V1NAY8 could you comment in this issue so I can assign you?
Yeah, I will do this :) | 2021-06-02T09:31:25Z | [] | [] |
urllib3/urllib3 | 2,257 | urllib3__urllib3-2257 | [
"1062"
] | 2ec06d1185f1642dfe8ff767a9ad3541bd0ddc39 | diff --git a/noxfile.py b/noxfile.py
--- a/noxfile.py
+++ b/noxfile.py
@@ -135,7 +135,7 @@ def lint(session):
@nox.session()
def mypy(session):
"""Run mypy."""
- session.install("mypy")
+ session.install("mypy==0.812")
session.run("mypy", "--version")
session.log("mypy --strict src/urllib3")
diff --git a/src/urllib3/fields.py b/src/urllib3/fields.py
--- a/src/urllib3/fields.py
+++ b/src/urllib3/fields.py
@@ -1,12 +1,10 @@
import email.utils
import mimetypes
-import re
from typing import (
Callable,
Dict,
Iterable,
Mapping,
- Match,
Optional,
Sequence,
Tuple,
@@ -49,7 +47,21 @@ def format_header_param_rfc2231(name: str, value: Union[str, bytes]) -> str:
The value of the parameter, provided as ``bytes`` or `str``.
:returns:
An RFC-2231-formatted unicode string.
+
+ .. deprecated:: 2.0.0
+ Will be removed in urllib3 v3.0.0. This is not valid for
+ ``multipart/form-data`` header parameters.
"""
+ import warnings
+
+ warnings.warn(
+ "'format_header_param_rfc2231' is deprecated and will be "
+ "removed in urllib3 v3.0.0. This is not valid for "
+ "multipart/form-data header parameters.",
+ DeprecationWarning,
+ stacklevel=2,
+ )
+
if isinstance(value, bytes):
value = value.decode("utf-8")
@@ -68,60 +80,78 @@ def format_header_param_rfc2231(name: str, value: Union[str, bytes]) -> str:
return value
-_HTML5_REPLACEMENTS = {
- "\u0022": "%22",
- # Replace "\" with "\\".
- "\u005C": "\u005C\u005C",
-}
-
-# All control characters from 0x00 to 0x1F *except* 0x1B.
-_HTML5_REPLACEMENTS.update(
- {chr(cc): f"%{cc:02X}" for cc in range(0x00, 0x1F + 1) if cc not in (0x1B,)}
-)
-
-
-def _replace_multiple(value: str, needles_and_replacements: Mapping[str, str]) -> str:
- def replacer(match: Match[str]) -> str:
- return needles_and_replacements[match.group(0)]
-
- pattern = re.compile(
- r"|".join([re.escape(needle) for needle in needles_and_replacements.keys()])
- )
-
- result = pattern.sub(replacer, value)
-
- return result
-
-
-def format_header_param_html5(name: str, value: _TYPE_FIELD_VALUE) -> str:
+def format_multipart_header_param(name: str, value: _TYPE_FIELD_VALUE) -> str:
"""
- Helper function to format and quote a single header parameter using the
- HTML5 strategy.
+ Format and quote a single multipart header parameter.
- Particularly useful for header parameters which might contain
- non-ASCII values, like file names. This follows the `HTML5 Working Draft
- Section 4.10.22.7`_ and matches the behavior of curl and modern browsers.
+ This follows the `WHATWG HTML Standard`_ as of 2021/06/10, matching
+ the behavior of current browser and curl versions. Values are
+ assumed to be UTF-8. The ``\\n``, ``\\r``, and ``"`` characters are
+ percent encoded.
- .. _HTML5 Working Draft Section 4.10.22.7:
- https://w3c.github.io/html/sec-forms.html#multipart-form-data
+ .. _WHATWG HTML Standard:
+ https://html.spec.whatwg.org/multipage/
+ form-control-infrastructure.html#multipart-form-data
:param name:
- The name of the parameter, a string expected to be ASCII only.
+ The name of the parameter, an ASCII-only ``str``.
:param value:
- The value of the parameter, provided as ``bytes`` or `str``.
+ The value of the parameter, a ``str`` or UTF-8 encoded
+ ``bytes``.
:returns:
- A unicode string, stripped of troublesome characters.
+ A string ``name="value"`` with the escaped value.
+
+ .. versionchanged:: 2.0.0
+ Matches the WHATWG HTML Standard as of 2021/06/10. Control
+ characters are no longer percent encoded.
+
+ .. versionchanged:: 2.0.0
+ Renamed from ``format_header_param_html5`` and
+ ``format_header_param``. The old names will be removed in
+ urllib3 v3.0.0.
"""
if isinstance(value, bytes):
value = value.decode("utf-8")
- value = _replace_multiple(value, _HTML5_REPLACEMENTS)
-
+ # percent encode \n \r "
+ value = value.translate({10: "%0A", 13: "%0D", 34: "%22"})
return f'{name}="{value}"'
-# For backwards-compatibility.
-format_header_param = format_header_param_html5
+def format_header_param_html5(name: str, value: _TYPE_FIELD_VALUE) -> str:
+ """
+ .. deprecated:: 2.0.0
+ Renamed to :func:`format_multipart_header_param`. Will be
+ removed in urllib3 v3.0.0.
+ """
+ import warnings
+
+ warnings.warn(
+ "'format_header_param_html5' has been renamed to "
+ "'format_multipart_header_param'. The old name will be "
+ "removed in urllib3 v3.0.0.",
+ DeprecationWarning,
+ stacklevel=2,
+ )
+ return format_multipart_header_param(name, value)
+
+
+def format_header_param(name: str, value: _TYPE_FIELD_VALUE) -> str:
+ """
+ .. deprecated:: 2.0.0
+ Renamed to :func:`format_multipart_header_param`. Will be
+ removed in urllib3 v3.0.0.
+ """
+ import warnings
+
+ warnings.warn(
+ "'format_header_param' has been renamed to "
+ "'format_multipart_header_param'. The old name will be "
+ "removed in urllib3 v3.0.0.",
+ DeprecationWarning,
+ stacklevel=2,
+ )
+ return format_multipart_header_param(name, value)
class RequestField:
@@ -136,9 +166,11 @@ class RequestField:
An optional filename of the request field. Must be unicode.
:param headers:
An optional dict-like object of headers to initially use for the field.
- :param header_formatter:
- An optional callable that is used to encode and format the headers. By
- default, this is :func:`format_header_param_html5`.
+
+ .. versionchanged:: 2.0.0
+ The ``header_formatter`` parameter is deprecated and will
+ be removed in urllib3 v3.0.0. Override :meth:`_render_part`
+ instead.
"""
def __init__(
@@ -147,9 +179,7 @@ def __init__(
data: _TYPE_FIELD_VALUE,
filename: Optional[str] = None,
headers: Optional[Mapping[str, str]] = None,
- header_formatter: Callable[
- [str, _TYPE_FIELD_VALUE], str
- ] = format_header_param_html5,
+ header_formatter: Optional[Callable[[str, _TYPE_FIELD_VALUE], str]] = None,
):
self._name = name
self._filename = filename
@@ -157,16 +187,27 @@ def __init__(
self.headers: Dict[str, Optional[str]] = {}
if headers:
self.headers = dict(headers)
- self.header_formatter = header_formatter
+
+ if header_formatter is not None:
+ import warnings
+
+ warnings.warn(
+ "The 'header_formatter' parameter is deprecated and "
+ "will be removed in urllib3 v3.0.0. Override the "
+ "'_render_part' method instead.",
+ DeprecationWarning,
+ stacklevel=2,
+ )
+ self.header_formatter = header_formatter
+ else:
+ self.header_formatter = format_multipart_header_param
@classmethod
def from_tuples(
cls,
fieldname: str,
value: _TYPE_FIELD_VALUE_TUPLE,
- header_formatter: Callable[
- [str, _TYPE_FIELD_VALUE], str
- ] = format_header_param_html5,
+ header_formatter: Optional[Callable[[str, _TYPE_FIELD_VALUE], str]] = None,
) -> "RequestField":
"""
A :class:`~urllib3.fields.RequestField` factory from old-style tuple parameters.
@@ -210,15 +251,18 @@ def from_tuples(
def _render_part(self, name: str, value: _TYPE_FIELD_VALUE) -> str:
"""
- Overridable helper function to format a single header parameter. By
- default, this calls ``self.header_formatter``.
+ Override this method to change how each multipart header
+ parameter is formatted. By default, this calls
+ :func:`format_multipart_header_param`.
:param name:
- The name of the parameter, a string expected to be ASCII only.
+ The name of the parameter, an ASCII-only ``str``.
:param value:
- The value of the parameter, provided as a unicode string.
- """
+ The value of the parameter, a ``str`` or UTF-8 encoded
+ ``bytes``.
+ :meta public:
+ """
return self.header_formatter(name, value)
def _render_parts(
| diff --git a/test/test_fields.py b/test/test_fields.py
--- a/test/test_fields.py
+++ b/test/test_fields.py
@@ -1,6 +1,13 @@
import pytest
-from urllib3.fields import RequestField, format_header_param_rfc2231, guess_content_type
+from urllib3.fields import (
+ RequestField,
+ format_header_param,
+ format_header_param_html5,
+ format_header_param_rfc2231,
+ format_multipart_header_param,
+ guess_content_type,
+)
class TestRequestField:
@@ -52,50 +59,52 @@ def test_render_parts(self):
parts = field._render_parts([("name", "value"), ("filename", "value")])
assert parts == 'name="value"; filename="value"'
- def test_render_part_rfc2231_unicode(self):
- field = RequestField(
- "somename", "data", header_formatter=format_header_param_rfc2231
- )
- param = field._render_part("filename", "n\u00e4me")
- assert param == "filename*=utf-8''n%C3%A4me"
+ @pytest.mark.parametrize(
+ ("value", "expect"),
+ [("näme", "filename*=utf-8''n%C3%A4me"), (b"name", 'filename="name"')],
+ )
+ def test_format_header_param_rfc2231_deprecated(self, value, expect):
+ with pytest.deprecated_call(match=r"urllib3 v3\.0\.0"):
+ param = format_header_param_rfc2231("filename", value)
- def test_render_part_rfc2231_ascii(self):
- field = RequestField(
- "somename", "data", header_formatter=format_header_param_rfc2231
- )
- param = field._render_part("filename", b"name")
- assert param == 'filename="name"'
+ assert param == expect
- def test_render_part_html5_unicode(self):
- field = RequestField("somename", "data")
- param = field._render_part("filename", "n\u00e4me")
- assert param == 'filename="n\u00e4me"'
+ def test_format_header_param_html5_deprecated(self):
+ with pytest.deprecated_call(match=r"urllib3 v3\.0\.0"):
+ param2 = format_header_param_html5("filename", "name")
- def test_render_part_html5_ascii(self):
- field = RequestField("somename", "data")
- param = field._render_part("filename", b"name")
- assert param == 'filename="name"'
+ with pytest.deprecated_call(match=r"urllib3 v3\.0\.0"):
+ param1 = format_header_param("filename", "name")
- def test_render_part_html5_unicode_escape(self):
- field = RequestField("somename", "data")
- param = field._render_part("filename", "hello\\world\u0022")
- assert param == 'filename="hello\\\\world%22"'
+ assert param1 == param2
- def test_render_part_html5_unicode_with_control_character(self):
- field = RequestField("somename", "data")
- param = field._render_part("filename", "hello\x1A\x1B\x1C")
- assert param == 'filename="hello%1A\x1B%1C"'
+ @pytest.mark.parametrize(
+ ("value", "expect"),
+ [
+ ("name", "name"),
+ ("näme", "näme"),
+ (b"n\xc3\xa4me", "näme"),
+ ("ski ⛷.txt", "ski ⛷.txt"),
+ ("control \x1A\x1B\x1C", "control \x1A\x1B\x1C"),
+ ("backslash \\", "backslash \\"),
+ ("quotes '\"", "quotes '%22"),
+ ("newline \n\r", "newline %0A%0D"),
+ ],
+ )
+ def test_format_multipart_header_param(self, value, expect):
+ param = format_multipart_header_param("filename", value)
+ assert param == f'filename="{expect}"'
- def test_from_tuples_rfc2231(self):
- field = RequestField.from_tuples(
- "fieldname",
- ("filen\u00e4me", "data"),
- header_formatter=format_header_param_rfc2231,
- )
+ def test_from_tuples(self):
+ field = RequestField.from_tuples("file", ("スキー旅行.txt", "data"))
cd = field.headers["Content-Disposition"]
- assert cd == "form-data; name=\"fieldname\"; filename*=utf-8''filen%C3%A4me"
+ assert cd == 'form-data; name="file"; filename="スキー旅行.txt"'
+
+ def test_from_tuples_rfc2231(self):
+ with pytest.deprecated_call(match=r"urllib3 v3\.0\.0"):
+ field = RequestField.from_tuples(
+ "file", ("näme", "data"), header_formatter=format_header_param_rfc2231
+ )
- def test_from_tuples_html5(self):
- field = RequestField.from_tuples("fieldname", ("filen\u00e4me", "data"))
cd = field.headers["Content-Disposition"]
- assert cd == 'form-data; name="fieldname"; filename="filen\u00e4me"'
+ assert cd == "form-data; name=\"file\"; filename*=utf-8''n%C3%A4me"
| Bug: Content-Disposition Header Lacking "filename" When "filename*" Is Present
#### Description:
When sending files to a server that does not understand "filename*" parameters inside the Content-Disposition header, the server will fail to find the file name because the "filename" parameter is not also included.
#### Justification:
According to [RFC 6266](https://tools.ietf.org/html/rfc6266#appendix-D), ideal header generators will include both a "filename" and a "filename*" parameter whenever the "filename*" form is required:
> – Include a "filename*" parameter where the desired filename cannot be expressed faithfully using the "filename" form. Note that legacy user agents will not process this, and will fall back to using the "filename" parameter's content.
>
> – When a "filename*" parameter is sent, to also generate a "filename" parameter as a fallback for user agents that do not support the "filename*" form, if possible. This can be done by substituting characters with US-ASCII sequences (e.g., Unicode character point U+00E4 (LATIN SMALL LETTER A WITH DIARESIS) by "ae"). Note that this may not be possible in some locales.
>
> – When a "filename" parameter is included as a fallback (as per above), "filename" should occur first, due to parsing problems in some existing implementations.
#### Why I Care:
This inconsistency caused a many-hour debugging session when trying to discover why file uploads to Google AppEngine blobstore stopped working when the requests library was upgraded (which bundles urllib3 with install). The eventual resolution was to urlencode the file name to a `str` before upload so that the "filename*" parameter was not added.
---
Want to back this issue? **[Post a bounty on it!](https://www.bountysource.com/issues/39703440-bug-content-disposition-header-lacking-filename-when-filename-is-present?utm_campaign=plugin&utm_content=tracker%2F192525&utm_medium=issues&utm_source=github)** We accept bounties via [Bountysource](https://www.bountysource.com/?utm_campaign=plugin&utm_content=tracker%2F192525&utm_medium=issues&utm_source=github).
| So, we could definitely do this: we could aim to do so by encoding using an 'ignore' or 'replace' mode on the encoder. Alternatively, we could punycode.
I don't consider this enormously high priority because servers really *should* tolerate the `*` form. That means that I don't think this is going to bump any of my other work down, so anyone else who wants to is welcome to work on this.
Please note that `cgi.FieldStorage` doesn't support `filename*` form (in Python 2 at least), so all servers using the standard library's `cgi` module are actually broken. It's a bit annoying!
@sprat Pull requests to rectify this would be happily reviewed and accepted. Complaining gets us nowhere. Feel free to send a pull request to fix this.
I'm working on this.
Started doing some research. Created a Werkzeug application to echo the submitted `Content-Disposition` header.
```python
import werkzeug
form_html = """\
<!doctype html>
<meta charset=utf-8>
<form method=post enctype=multipart/form-data>
<input type=file multiple name=file>
<input type=submit>
</form>
"""
@werkzeug.Request.application
def app(request: werkzeug.Request):
if request.method == "POST":
for f in request.files.getlist("file"):
print(repr(f.filename))
return werkzeug.Response(status=204)
return werkzeug.Response(form_html, content_type="text/html")
werkzeug.run_simple("localhost", 5000, app)
```
When a file named "basic.txt" is submitted from Firefox or Chromium, it prints:
```
form-data; name="file"; filename="basic.txt"
```
When a file named "ski ⛷.txt" is submitted from Firefox or Chromium, it prints:
```
form-data; name="file"; filename="ski ⛷.txt"
```
Browsers are providing one filename, and do not escape non-ASCII characters.
Using urllib3 to upload the file, the name is not encoded.
```python
import urllib3
http = urllib3.PoolManager()
http.request(
"POST",
"http://localhost:5000/",
fields={"file": ("ski ⛷.txt", b"value")},
)
```
```
form-data; name="file"; filename="ski ⛷.txt"
```
[WHATWG HTML Standard, section 4.10.21.8, Multipart form data:](https://html.spec.whatwg.org/multipage/form-control-infrastructure.html#multipart-form-data)
> Return the byte sequence resulting from encoding the *entry list* using the rules described by RFC 7578, *Returning Values from Forms: `multipart/form-data`*
[RFC 7578, section 4.2, Content-Disposition Header Field for Each Part:](https://datatracker.ietf.org/doc/html/rfc7578#section-4.2)
> In most multipart types, the MIME header fields in each part are restricted to US-ASCII; for compatibility with those systems, file names normally visible to users MAY be encoded using the percent-encoding method in [Section 2](https://datatracker.ietf.org/doc/html/rfc7578#section-2), following how a "file:" URI \[[URI-SCHEME](https://datatracker.ietf.org/doc/html/rfc7578#ref-URI-SCHEME)\] might be encoded.
>
> NOTE: The encoding method described in \[[RFC5987](https://datatracker.ietf.org/doc/html/rfc5987)\], which would add a "filename*" parameter to the Content-Disposition header field, MUST NOT be used.
The WHATWG HTML Standard says to follow RFC 7578 to encode `multipart/form-data`. The RFC states that *only* a `filename` key should be present, a `filename*` key is invalid. Therefore the suggestion in the original issue to send both keys is incorrect, `filename*` is only used when receiving files in a response from the server (Werkzeug's `send_file` supports this, for example).
The RFC suggests using UTF-8 percent encoding. However, browser forms are using HTML character references. I'm not clear if that's a limitation of browsers, maybe since it's HTML the filename is already HTML encoded by the time it's submitted, so it's already ASCII and doesn't get percent encoded.
Turns out urllib3 is already encoding values using `fields.format_header_param_html5`, which uses percent encoding, but only on control characters 0x00 - 0x1F, not arbitrary characters. It references the same multipart-form data standard above, but oddly, it says it "matches the behavior of curl and modern browsers". However, curl seems to have the same non-encoding behavior, and browsers don't use percent encoding, so this statement isn't correct.
```
curl -F "file=@ski ⛷.txt" http://localhost:5000
```
```
form-data; name="file"; filename="ski ⛷.txt"
```
This issue was opened before #1492, the old RFC 2231 encoder *does* incorrectly generate a `filename*` key. This issue was valid at the time it was reported, was invalidated by #1492, but still revealed other changes and deprecations that needed to happen.
Did some more investigation and discussion on Discord.
* After adding `<meta charset=utf-8>` to the test HTML, the browser began passing through the name unchanged like curl did. Seems the a legacy behavior when no encoding is set is to using char ref replacement.
* Neither browsers nor curl do anything special with control characters, all bytes are passed through unchanged.
* urllib3 percent encodes control characters, but no other characters
* RFC 7578 says "MAY be encoded", not "MUST" be encoded
Therefore, we've concluded that current behavior of browsers/curl/urllib3 is correct, the utf-8 filename can be passed through unchanged. Even though urllib3 *could* remove the percent encoding of control characters, it will leave it for now.
Did more digging. #1492 https://github.com/urllib3/urllib3/pull/1492/commits/131d0d5b14aede8d061e916e765866dee9eef9e5 added the control character quoting behavior, after @sethmlarson https://github.com/urllib3/urllib3/pull/1492#discussion_r264646123 noted that the WHATWG standard said to. This was a bit confusing, since the standard no longer mentions that. I tracked that down to https://github.com/whatwg/html/pull/6282, which removed mention of escaping anything except `"` and `\` since it didn't match what clients were actually doing.
I think since we're at a major release point, we should stop escaping control characters to match the current standard. I can make that change, or I can change the docs only. Note that users can always provide a more constrained file name, and that servers should always be treating the given name as untrusted and do processing on it regardless of what the client sends. | 2021-06-10T17:41:30Z | [] | [] |
urllib3/urllib3 | 2,267 | urllib3__urllib3-2267 | [
"2263"
] | 00d4acb8f880a1246149e470fff019bb610be4c6 | diff --git a/src/urllib3/poolmanager.py b/src/urllib3/poolmanager.py
--- a/src/urllib3/poolmanager.py
+++ b/src/urllib3/poolmanager.py
@@ -1,5 +1,6 @@
import functools
import logging
+import warnings
from typing import (
TYPE_CHECKING,
Any,
@@ -302,6 +303,14 @@ def connection_from_context(
``request_context`` must at least contain the ``scheme`` key and its
value must be a key in ``key_fn_by_scheme`` instance variable.
"""
+ if "strict" in request_context:
+ warnings.warn(
+ "The 'strict' parameter is no longer needed on Python 3+. "
+ "This will raise an error in urllib3 v3.0.0.",
+ DeprecationWarning,
+ )
+ request_context.pop("strict")
+
scheme = request_context["scheme"].lower()
pool_key_constructor = self.key_fn_by_scheme.get(scheme)
if not pool_key_constructor:
| diff --git a/test/test_poolmanager.py b/test/test_poolmanager.py
--- a/test/test_poolmanager.py
+++ b/test/test_poolmanager.py
@@ -1,5 +1,6 @@
import socket
from test import resolvesLocalhostFQDN
+from unittest.mock import patch
import pytest
@@ -267,6 +268,32 @@ def test_http_connection_from_context_case_insensitive(self):
assert pool is other_pool
assert all(isinstance(key, PoolKey) for key in p.pools.keys())
+ @patch("urllib3.poolmanager.PoolManager.connection_from_pool_key")
+ def test_connection_from_context_strict_param(self, connection_from_pool_key):
+ p = PoolManager()
+ context = {
+ "scheme": "http",
+ "host": "example.com",
+ "port": 8080,
+ "strict": True,
+ }
+ with pytest.warns(DeprecationWarning) as record:
+ p.connection_from_context(context)
+
+ assert 1 == len(record)
+ msg = (
+ "The 'strict' parameter is no longer needed on Python 3+. "
+ "This will raise an error in urllib3 v3.0.0."
+ )
+ assert record[0].message.args[0] == msg
+
+ _, kwargs = connection_from_pool_key.call_args
+ assert kwargs["request_context"] == {
+ "scheme": "http",
+ "host": "example.com",
+ "port": 8080,
+ }
+
def test_custom_pool_key(self):
"""Assert it is possible to define a custom key function."""
p = PoolManager(10)
diff --git a/test/with_dummyserver/test_integration.py b/test/with_dummyserver/test_integration.py
new file mode 100644
--- /dev/null
+++ b/test/with_dummyserver/test_integration.py
@@ -0,0 +1,35 @@
+import pytest
+import requests
+
+from dummyserver.server import DEFAULT_CA
+from dummyserver.testcase import HTTPDummyServerTestCase, HTTPSDummyServerTestCase
+
+
+class TestHTTPIntegration(HTTPDummyServerTestCase):
+ def test_requests_integration(self):
+ with pytest.warns(DeprecationWarning) as record:
+ response = requests.get(f"{self.scheme}://{self.host}:{self.port}")
+
+ assert 200 == response.status_code
+ assert 1 == len(record)
+ msg = (
+ "The 'strict' parameter is no longer needed on Python 3+. "
+ "This will raise an error in urllib3 v3.0.0."
+ )
+ assert record[0].message.args[0] == msg
+
+
+class TestHTTPSIntegration(HTTPSDummyServerTestCase):
+ def test_requests_integration(self):
+ with pytest.warns(DeprecationWarning) as record:
+ response = requests.get(
+ f"{self.scheme}://{self.host}:{self.port}", verify=DEFAULT_CA
+ )
+
+ assert 200 == response.status_code
+ assert 1 == len(record)
+ msg = (
+ "The 'strict' parameter is no longer needed on Python 3+. "
+ "This will raise an error in urllib3 v3.0.0."
+ )
+ assert record[0].message.args[0] == msg
| Tolerate the strict parameter being passed via connection_from_context()
If we test Requests against urllib3 `main` branch we receive the following:
```
File "/home/sethmlarson/urllib3/src/urllib3/poolmanager.py", line 354, in connection_from_url
return self.connection_from_host(
File "/home/sethmlarson/urllib3/src/urllib3/poolmanager.py", line 294, in connection_from_host
return self.connection_from_context(request_context)
File "/home/sethmlarson/urllib3/src/urllib3/poolmanager.py", line 310, in connection_from_context
pool_key = pool_key_constructor(request_context)
File "/home/sethmlarson/urllib3/src/urllib3/poolmanager.py", line 143, in _default_key_normalizer
return key_class(**context)
TypeError: __new__() got an unexpected keyword argument 'key_strict'
```
This started in https://github.com/urllib3/urllib3/pull/2064. One of the goals of v2 is to be mostly API compatible so downstream projects can immediately start using v2. Should we add the following within `PoolManager.connection_from_context`?
```python
if "strict" in request_context:
import warnings
warnings.warn(
"The 'strict' parameter is no longer needed on Python 3+. "
"This will raise an error in urllib3 v3.0.0.",
DeprecationWarning
)
request_context.pop("strict")
```
to maintain API compatibility with Requests and other libraries that support Python 2?
Another small thought is to add a simple integration test with `requests.get(...)` to our test suite so we can track that we don't lose complete API compatibility.
| > Should we add the following within PoolManager.connection_from_context?
Yes, if we are going to be mostly compatible.
> Another small thought is to add a simple integration test with requests.get(...) to our test suite so we can track that we don't lose complete API compatibility.
Good point :+1:
I can work on this.
| 2021-06-14T20:33:56Z | [] | [] |
urllib3/urllib3 | 2,305 | urllib3__urllib3-2305 | [
"1003"
] | 6622092ea119c6867bb9a9f9d6bb86e0f4c59526 | diff --git a/src/urllib3/connection.py b/src/urllib3/connection.py
--- a/src/urllib3/connection.py
+++ b/src/urllib3/connection.py
@@ -44,6 +44,7 @@ class BaseSSLError(BaseException): # type: ignore
from .exceptions import (
ConnectTimeoutError,
HTTPSProxyError,
+ NameResolutionError,
NewConnectionError,
SystemTimeWarning,
)
@@ -205,7 +206,8 @@ def _new_conn(self) -> socket.socket:
source_address=self.source_address,
socket_options=self.socket_options,
)
-
+ except socket.gaierror as e:
+ raise NameResolutionError(self.host, e)
except SocketTimeout:
raise ConnectTimeoutError(
self,
diff --git a/src/urllib3/connectionpool.py b/src/urllib3/connectionpool.py
--- a/src/urllib3/connectionpool.py
+++ b/src/urllib3/connectionpool.py
@@ -30,6 +30,7 @@
InsecureRequestWarning,
LocationValueError,
MaxRetryError,
+ NameResolutionError,
NewConnectionError,
ProtocolError,
ProxyError,
@@ -738,12 +739,15 @@ def urlopen( # type: ignore
SSLError,
CertificateError,
HTTPSProxyError,
+ NameResolutionError,
) as e:
# Discard the connection for these exceptions. It will be
# replaced during the next _get_conn() call.
clean_exit = False
if isinstance(e, (BaseSSLError, CertificateError)):
e = SSLError(e)
+ elif isinstance(e, NameResolutionError):
+ pass
elif isinstance(e, (OSError, NewConnectionError)) and self.proxy:
e = ProxyError("Cannot connect to proxy.", e)
elif isinstance(e, (OSError, HTTPException)):
diff --git a/src/urllib3/exceptions.py b/src/urllib3/exceptions.py
--- a/src/urllib3/exceptions.py
+++ b/src/urllib3/exceptions.py
@@ -1,3 +1,4 @@
+import socket
import warnings
from http.client import IncompleteRead as httplib_IncompleteRead
from typing import TYPE_CHECKING, Callable, List, Optional, Tuple, Union
@@ -376,3 +377,11 @@ class UnrewindableBodyError(HTTPError):
"""urllib3 encountered an error when trying to rewind a body"""
pass
+
+
+class NameResolutionError(HTTPError, socket.gaierror):
+ """Raised when host name resolution fails."""
+
+ def __init__(self, host: str, reason: socket.gaierror):
+ message = f"Failed to resolve '{host}' ({reason})"
+ HTTPError.__init__(self, message)
| diff --git a/test/__init__.py b/test/__init__.py
--- a/test/__init__.py
+++ b/test/__init__.py
@@ -41,7 +41,7 @@
VALID_SOURCE_ADDRESSES = [(("::1", 0), True), (("127.0.0.1", 0), False)]
# RFC 5737: 192.0.2.0/24 is for testing only.
# RFC 3849: 2001:db8::/32 is for documentation only.
-INVALID_SOURCE_ADDRESSES = [("192.0.2.255", 0), ("2001:db8::1", 0)]
+INVALID_SOURCE_ADDRESSES = [(("192.0.2.255", 0), False), (("2001:db8::1", 0), True)]
# We use timeouts in three different ways in our tests
#
diff --git a/test/test_util.py b/test/test_util.py
--- a/test/test_util.py
+++ b/test/test_util.py
@@ -762,6 +762,20 @@ def test_create_connection_error(self, getaddrinfo):
with pytest.raises(OSError, match="getaddrinfo returns an empty list"):
create_connection(("example.com", 80))
+ @patch("socket.getaddrinfo")
+ def test_dnsresolver_forced_error(self, getaddrinfo):
+ getaddrinfo.side_effect = socket.gaierror()
+ with pytest.raises(socket.gaierror):
+ # dns is valid but we force the error just for the sake of the test
+ create_connection(("example.com", 80))
+
+ def test_dnsresolver_expected_error(self):
+ with pytest.raises(socket.gaierror):
+ # windows: [Errno 11001] getaddrinfo failed in windows
+ # linux: [Errno -2] Name or service not known
+ # macos: [Errno 8] nodename nor servname provided, or not known
+ create_connection(("badhost.invalid", 80))
+
@pytest.mark.parametrize(
"input,params,expected",
(
diff --git a/test/with_dummyserver/test_connectionpool.py b/test/with_dummyserver/test_connectionpool.py
--- a/test/with_dummyserver/test_connectionpool.py
+++ b/test/with_dummyserver/test_connectionpool.py
@@ -22,6 +22,7 @@
DecodeError,
EmptyPoolError,
MaxRetryError,
+ NameResolutionError,
NewConnectionError,
ReadTimeoutError,
UnrewindableBodyError,
@@ -417,7 +418,7 @@ def test_bad_connect(self):
with HTTPConnectionPool("badhost.invalid", self.port) as pool:
with pytest.raises(MaxRetryError) as e:
pool.request("GET", "/", retries=5)
- assert type(e.value.reason) == NewConnectionError
+ assert type(e.value.reason) == NameResolutionError
def test_keepalive(self):
with HTTPConnectionPool(self.host, self.port, block=True, maxsize=1) as pool:
@@ -709,13 +710,19 @@ def test_source_address(self):
r = pool.request("GET", "/source_address")
assert r.data == addr[0].encode()
- def test_source_address_error(self):
- for addr in INVALID_SOURCE_ADDRESSES:
- with HTTPConnectionPool(
- self.host, self.port, source_address=addr, retries=False
- ) as pool:
+ @pytest.mark.parametrize(
+ "invalid_source_address, is_ipv6", INVALID_SOURCE_ADDRESSES
+ )
+ def test_source_address_error(self, invalid_source_address, is_ipv6):
+ with HTTPConnectionPool(
+ self.host, self.port, source_address=invalid_source_address, retries=False
+ ) as pool:
+ if is_ipv6:
+ with pytest.raises(NameResolutionError):
+ pool.request("GET", f"/source_address?{invalid_source_address}")
+ else:
with pytest.raises(NewConnectionError):
- pool.request("GET", f"/source_address?{addr}")
+ pool.request("GET", f"/source_address?{invalid_source_address}")
def test_stream_keepalive(self):
x = 2
@@ -1024,7 +1031,7 @@ def test_disabled_retry(self):
with HTTPConnectionPool(
"thishostdoesnotexist.invalid", self.port, timeout=0.001
) as pool:
- with pytest.raises(NewConnectionError):
+ with pytest.raises(NameResolutionError):
pool.request("GET", "/test", retries=False)
def test_read_retries(self):
diff --git a/test/with_dummyserver/test_proxy_poolmanager.py b/test/with_dummyserver/test_proxy_poolmanager.py
--- a/test/with_dummyserver/test_proxy_poolmanager.py
+++ b/test/with_dummyserver/test_proxy_poolmanager.py
@@ -16,7 +16,7 @@
ConnectTimeoutError,
HTTPSProxyError,
MaxRetryError,
- ProxyError,
+ NameResolutionError,
ProxySchemeUnknown,
ProxySchemeUnsupported,
SSLError,
@@ -146,7 +146,7 @@ def test_proxy_conn_fail(self):
with pytest.raises(MaxRetryError) as e:
http.request("GET", f"{self.http_url}/")
- assert type(e.value.reason) == ProxyError
+ assert type(e.value.reason) == NameResolutionError
def test_https_conn_failed(self):
"""
| DNS lookup error exception
I am using `requests` to check domain status for `k8s.io` domains (https://github.com/kubernetes/k8s.io/pull/30) and I can't find a good way to check for DNS lookup error like `ERR_NAME_NOT_RESOLVED` in Chrome. Exceptions output seems platform specific:
```
Traceback (most recent call last):
File "check.py", line 11, in <module>
status = requests.head('http://' + site)
File "C:\Python27\lib\site-packages\requests\api.py", line 93, in head
return request('head', url, **kwargs)
File "C:\Python27\lib\site-packages\requests\api.py", line 53, in request
return session.request(method=method, url=url, **kwargs)
File "C:\Python27\lib\site-packages\requests\sessions.py", line 468, in request
resp = self.send(prep, **send_kwargs)
File "C:\Python27\lib\site-packages\requests\sessions.py", line 576, in send
r = adapter.send(request, **kwargs)
File "C:\Python27\lib\site-packages\requests\adapters.py", line 437, in send
raise ConnectionError(e, request=request)
requests.exceptions.ConnectionError: HTTPConnectionPool(host='jenkins.k8s.io', port=80):
Max retries exceeded with url: / (Caused by
NewConnectionError('<requests.packages.urllib3.connection.HTTPConnection object at
0x029E43B0>: Failed to establish a new connection: [Errno 11001] getaddrinfo failed',))
```
So I was redirected here from https://github.com/kennethreitz/requests/issues/3630#issuecomment-255031285 to see if `urllib3` could expose more fine-grained DNSLookupError exception and maybe remove MaxRetryError wrapper.
##
| Ok, so.
We're certainly not going to remove the `MaxRetryError`: that's not a winner. But we can probably be more clear about the specific error by hoisting the `getaddrinfo` call on line 75 of `urllib3/util/connection.py` out of the `for` statement and shoving it in a `try...except` that raises an appropriate urllib3 exception.
That would at least give you a specific exception to try to haul out. `MaxRetryError` has a `reason` attribute which you can use to fetch the underlying exception, so you can from that point take the exception.
Note, however, that this change will take a while to propagate into a released version of Requests. In the interim, you may find it more helpful to check for DNS errors by simply calling `socket.getaddrinfo` yourself before making the request.
Too late. =) I've already done exception handling through string parsing - https://github.com/kennethreitz/requests/issues/3630#issuecomment-255100951
So how about reusing Chrome's `ERR_NAME_NOT_RESOLVED` constant which seems to have cross-platform value -105? https://cs.chromium.org/chromium/src/net/base/net_error_list.h?sq=package:chromium&dr=C&rcl=1477077889&l=130
Or it could be `NXDOMAIN` error code 3 - https://support.opendns.com/hc/en-us/articles/227986827-FAQ-What-are-common-DNS-return-or-response-codes-
I don't know that we gain anything from using a constant: it's just not that Pythonic an approach. Right now we're propagating the error code returned from getaddrinfo, which is the bare minimum of effort. If we're going to put in more effort we should do it Pythonically, which means instead of using an error code we should use a different exception type.
So, is `DNSLookupError` a good name for this extension?
Maybe, or perhaps `NameResolutionError`. Either would be reasonable.
The PR is closed automatically, because I am MIA, but if anyone wants to continue fixing this issue, the code is still here - https://github.com/urllib3/urllib3/pull/1008
I came here, because I got another upvote on Stack Overflow. | 2021-06-25T07:15:43Z | [] | [] |
urllib3/urllib3 | 2,319 | urllib3__urllib3-2319 | [
"1003"
] | cb23f213fc616f30d402960807a15264b9d68b35 | diff --git a/src/urllib3/connection.py b/src/urllib3/connection.py
--- a/src/urllib3/connection.py
+++ b/src/urllib3/connection.py
@@ -207,7 +207,7 @@ def _new_conn(self) -> socket.socket:
socket_options=self.socket_options,
)
except socket.gaierror as e:
- raise NameResolutionError(self.host, e)
+ raise NameResolutionError(self.host, self, e)
except SocketTimeout:
raise ConnectTimeoutError(
self,
diff --git a/src/urllib3/connectionpool.py b/src/urllib3/connectionpool.py
--- a/src/urllib3/connectionpool.py
+++ b/src/urllib3/connectionpool.py
@@ -30,7 +30,6 @@
InsecureRequestWarning,
LocationValueError,
MaxRetryError,
- NameResolutionError,
NewConnectionError,
ProtocolError,
ProxyError,
@@ -739,7 +738,6 @@ def urlopen( # type: ignore
SSLError,
CertificateError,
HTTPSProxyError,
- NameResolutionError,
) as e:
# Discard the connection for these exceptions. It will be
# replaced during the next _get_conn() call.
@@ -747,8 +745,6 @@ def urlopen( # type: ignore
new_e: Exception = e
if isinstance(e, (BaseSSLError, CertificateError)):
new_e = SSLError(e)
- elif isinstance(e, NameResolutionError):
- pass
elif isinstance(e, (OSError, NewConnectionError)) and self.proxy:
new_e = ProxyError("Cannot connect to proxy.", e)
elif isinstance(e, (OSError, HTTPException)):
diff --git a/src/urllib3/exceptions.py b/src/urllib3/exceptions.py
--- a/src/urllib3/exceptions.py
+++ b/src/urllib3/exceptions.py
@@ -184,6 +184,14 @@ def pool(self) -> "HTTPConnection":
return self.conn
+class NameResolutionError(NewConnectionError):
+ """Raised when host name resolution fails."""
+
+ def __init__(self, host: str, conn: "HTTPConnection", reason: socket.gaierror):
+ message = f"Failed to resolve '{host}' ({reason})"
+ super().__init__(conn, message)
+
+
class EmptyPoolError(PoolError):
"""Raised when a pool runs out of connections and no more are allowed."""
@@ -379,11 +387,3 @@ class UnrewindableBodyError(HTTPError):
"""urllib3 encountered an error when trying to rewind a body"""
pass
-
-
-class NameResolutionError(HTTPError, socket.gaierror):
- """Raised when host name resolution fails."""
-
- def __init__(self, host: str, reason: socket.gaierror):
- message = f"Failed to resolve '{host}' ({reason})"
- HTTPError.__init__(self, message)
| diff --git a/test/with_dummyserver/test_proxy_poolmanager.py b/test/with_dummyserver/test_proxy_poolmanager.py
--- a/test/with_dummyserver/test_proxy_poolmanager.py
+++ b/test/with_dummyserver/test_proxy_poolmanager.py
@@ -16,7 +16,7 @@
ConnectTimeoutError,
HTTPSProxyError,
MaxRetryError,
- NameResolutionError,
+ ProxyError,
ProxySchemeUnknown,
ProxySchemeUnsupported,
SSLError,
@@ -146,7 +146,7 @@ def test_proxy_conn_fail(self):
with pytest.raises(MaxRetryError) as e:
http.request("GET", f"{self.http_url}/")
- assert type(e.value.reason) == NameResolutionError
+ assert type(e.value.reason) == ProxyError
def test_https_conn_failed(self):
"""
| DNS lookup error exception
I am using `requests` to check domain status for `k8s.io` domains (https://github.com/kubernetes/k8s.io/pull/30) and I can't find a good way to check for DNS lookup error like `ERR_NAME_NOT_RESOLVED` in Chrome. Exceptions output seems platform specific:
```
Traceback (most recent call last):
File "check.py", line 11, in <module>
status = requests.head('http://' + site)
File "C:\Python27\lib\site-packages\requests\api.py", line 93, in head
return request('head', url, **kwargs)
File "C:\Python27\lib\site-packages\requests\api.py", line 53, in request
return session.request(method=method, url=url, **kwargs)
File "C:\Python27\lib\site-packages\requests\sessions.py", line 468, in request
resp = self.send(prep, **send_kwargs)
File "C:\Python27\lib\site-packages\requests\sessions.py", line 576, in send
r = adapter.send(request, **kwargs)
File "C:\Python27\lib\site-packages\requests\adapters.py", line 437, in send
raise ConnectionError(e, request=request)
requests.exceptions.ConnectionError: HTTPConnectionPool(host='jenkins.k8s.io', port=80):
Max retries exceeded with url: / (Caused by
NewConnectionError('<requests.packages.urllib3.connection.HTTPConnection object at
0x029E43B0>: Failed to establish a new connection: [Errno 11001] getaddrinfo failed',))
```
So I was redirected here from https://github.com/kennethreitz/requests/issues/3630#issuecomment-255031285 to see if `urllib3` could expose more fine-grained DNSLookupError exception and maybe remove MaxRetryError wrapper.
##
| Ok, so.
We're certainly not going to remove the `MaxRetryError`: that's not a winner. But we can probably be more clear about the specific error by hoisting the `getaddrinfo` call on line 75 of `urllib3/util/connection.py` out of the `for` statement and shoving it in a `try...except` that raises an appropriate urllib3 exception.
That would at least give you a specific exception to try to haul out. `MaxRetryError` has a `reason` attribute which you can use to fetch the underlying exception, so you can from that point take the exception.
Note, however, that this change will take a while to propagate into a released version of Requests. In the interim, you may find it more helpful to check for DNS errors by simply calling `socket.getaddrinfo` yourself before making the request.
Too late. =) I've already done exception handling through string parsing - https://github.com/kennethreitz/requests/issues/3630#issuecomment-255100951
So how about reusing Chrome's `ERR_NAME_NOT_RESOLVED` constant which seems to have cross-platform value -105? https://cs.chromium.org/chromium/src/net/base/net_error_list.h?sq=package:chromium&dr=C&rcl=1477077889&l=130
Or it could be `NXDOMAIN` error code 3 - https://support.opendns.com/hc/en-us/articles/227986827-FAQ-What-are-common-DNS-return-or-response-codes-
I don't know that we gain anything from using a constant: it's just not that Pythonic an approach. Right now we're propagating the error code returned from getaddrinfo, which is the bare minimum of effort. If we're going to put in more effort we should do it Pythonically, which means instead of using an error code we should use a different exception type.
So, is `DNSLookupError` a good name for this extension?
Maybe, or perhaps `NameResolutionError`. Either would be reasonable.
The PR is closed automatically, because I am MIA, but if anyone wants to continue fixing this issue, the code is still here - https://github.com/urllib3/urllib3/pull/1008
I came here, because I got another upvote on Stack Overflow.
A few comments on this feature as implemented by @euri10:
---
IIUC, previously if DNS resolution failed, `NewConnectionError` (subclass of `ConnectTimeoutError`, `TimeoutError`, `OSError`, `HTTPError`) was raised, now `NameResolutionError` (subclass of `socket.gaierror`, `OSError`, `HTTPError`) is raised.
My main concern here is that users who previously handles `NameResolutionError` for DNS errors would now need to change/add `NameResolutionError`. I think that's OK and makes sense, but should be documented as a breaking change.
Another concern is is how the `NameResolutionError` is classified - for example it is possible the DNS failed due to a timeout but it would no longer be caught by `TimeoutError` (correct me if I'm wrong).
This can get quite complicated because DNS resolution is its own tiny request-response transaction, with connect and read phases and timeouts. It's hard to say how the exception hierarchy should be laid out.
---
In `HTTPConnectionPool.urlopen`
https://github.com/urllib3/urllib3/blob/1831327b881880ed871f96f56a6977d360042e1b/src/urllib3/connectionpool.py#L749-L752
I wonder what happens when using e.g. `socks5h://` and the proxy is down. For most requests DNS resolution is the first thing that happens. Will this go to the `ProxyError` branch of the `NameResolutionError` branch?
---
Regarding `Retry` and PR #2312, how are DNS errors classified - `connect`, `read` or `other`? I think it's `connect` now (and previously), but want to make sure.
It also might be worth thinking about adding a separate limit for `resolve`, seems like a useful thing to have control over apart from connection to the actual resolved host.
I'll humbly pass on responding properly since I'm too new to have a good idea on this, but all seem to be very valid points to me
@bluetech Your thoughful comments are deeply appreciated, thank you! I answered point by point, but the main takeaway is that we just have two things to do to close this issue:
* Change the subclass from `gaierror` to `NewConnectionError`
* Add retry (started #2312)
> IIUC, previously if DNS resolution failed, `NewConnectionError` (subclass of `ConnectTimeoutError`, `TimeoutError`, `OSError`, `HTTPError`) was raised, now `NameResolutionError` (subclass of `socket.gaierror`, `OSError`, `HTTPError`) is raised.
You're right. I thought `gaierror` was raised, but it's not the case. I think it would be better to make `NameResolutionError` a subclass of `NewConnectionError`.
> Another concern is is how the `NameResolutionError` is classified - for example it is possible the DNS failed due to a timeout but it would no longer be caught by `TimeoutError` (correct me if I'm wrong).
We were already raising `TimeoutError` separately, and that's still the case. This isn't affected by the change, and not a concern. (But I had to check, so thanks for raising this!)
> I wonder what happens when using e.g. `socks5h://` and the proxy is down. For most requests DNS resolution is the first thing that happens. Will this go to the `ProxyError` branch of the `NameResolutionError` branch?
I assume you meant `or` instead of `of`. If we change the parent class to `NewConnectionError`, this will go to the `ProxyError` branch again, as it should. Nice catch!
> Regarding `Retry` and PR #2312, how are DNS errors classified - `connect`, `read` or `other`? I think it's `connect` now (and previously), but want to make sure.
>
> It also might be worth thinking about adding a separate limit for `resolve`, seems like a useful thing to have control over apart from connection to the actual resolved host.
Right, it's `connect`. I guess `resolve` could make sense, but I'd rather wait until someone has an actual use case.
Actually, if we change the parent class to `NewConnectionError`, that will fix the retry issue! So that's the only needed change.
> Actually, if we change the parent class to `NewConnectionError`, that will fix the retry issue! So that's the only needed change.
do you want it done as a seperate PR @pquentin or part of https://github.com/urllib3/urllib3/pull/2312 is fine for you ? | 2021-07-08T07:24:06Z | [] | [] |
urllib3/urllib3 | 2,327 | urllib3__urllib3-2327 | [
"2326"
] | baba8c2b4aabda8907a777039baa3f8758e4333a | diff --git a/src/urllib3/_collections.py b/src/urllib3/_collections.py
--- a/src/urllib3/_collections.py
+++ b/src/urllib3/_collections.py
@@ -139,16 +139,17 @@ def __setitem__(self, key: _KT, value: _VT) -> None:
# size of the pool. Because accessing a key should move it to
# the end of the eviction line, we pop it out first.
evicted_item = key, self._container.pop(key)
+ self._container[key] = value
except KeyError:
- if len(self._container) >= self._maxsize:
+ # When the key does not exist, we insert the value first so that
+ # evicting works in all cases, including when self._maxsize is 0
+ self._container[key] = value
+ if len(self._container) > self._maxsize:
# If we didn't evict an existing value, and we've hit our maximum
# size, then we have to evict the least recently used item from
# the beginning of the container.
evicted_item = self._container.popitem(last=False)
- # Finally, insert the new value.
- self._container[key] = value
-
# After releasing the lock on the pool, dispose of any evicted value.
if evicted_item is not None and self.dispose_func:
_, evicted_value = evicted_item
| diff --git a/test/test_collections.py b/test/test_collections.py
--- a/test/test_collections.py
+++ b/test/test_collections.py
@@ -22,6 +22,11 @@ def test_maxsize(self):
assert 0 not in d
assert (i + 1) in d
+ def test_maxsize_0(self):
+ d = Container(0)
+ d[1] = 1
+ assert len(d) == 0
+
def test_expire(self):
d = Container(5)
| Fix `RecentlyUsedContainer` regression
#2115 added typing to the `_collections` module. Unfortunately, it also introduced a regression that breaks `test_urllib3_pool_connection_closed` in the requests test suite, as noticed in https://github.com/urllib3/urllib3/pull/2316/checks?check_run_id=3049608329. `__setitem__` is implemented like this in 1.26.x:
https://github.com/urllib3/urllib3/blob/ba95e9eac73452d3bccfb5413b00d9a4fe3e4c31/src/urllib3/_collections.py#L61-L74
And it's now implemented like this in main:
https://github.com/urllib3/urllib3/blob/baba8c2b4aabda8907a777039baa3f8758e4333a/src/urllib3/_collections.py#L133-L155
When `len(self._container)` == `self._maxsize` == 0, `self._container[key] = value` is now called *after* `self._container.popitem(last=False)`, which is why that `popitem` call raises an exception.
Applying this hacky diff fixes the requests test:
```diff
diff --git a/src/urllib3/_collections.py b/src/urllib3/_collections.py
index 32d5330e..798ba432 100644
--- a/src/urllib3/_collections.py
+++ b/src/urllib3/_collections.py
@@ -139,15 +139,15 @@ class RecentlyUsedContainer(Generic[_KT, _VT], MutableMapping[_KT, _VT]):
# size of the pool. Because accessing a key should move it to
# the end of the eviction line, we pop it out first.
evicted_item = key, self._container.pop(key)
+ self._container[key] = value
except KeyError:
+ self._container[key] = value
if len(self._container) >= self._maxsize:
# If we didn't evict an existing value, and we've hit our maximum
# size, then we have to evict the least recently used item from
# the beginning of the container.
evicted_item = self._container.popitem(last=False)
- # Finally, insert the new value.
- self._container[key] = value
# After releasing the lock on the pool, dispose of any evicted value.
if evicted_item is not None and self.dispose_func:
```
@haikuginger What are your thoughts on this? Is it possible to revert to the original logic?
| @pquentin thanks for opening this! The new logic is generally easier to reason about when it comes to typing, so I'd prefer to stick with it, but fixing the bug should be relatively straightforward—I think this would do for avoiding the empty container case:
```diff
--- a/src/urllib3/_collections.py
+++ b/src/urllib3/_collections.py
@@ -139,15 +139,15 @@ class RecentlyUsedContainer(Generic[_KT, _VT], MutableMapping[_KT, _VT]):
# size of the pool. Because accessing a key should move it to
# the end of the eviction line, we pop it out first.
evicted_item = key, self._container.pop(key)
except KeyError:
- if len(self._container) >= self._maxsize:
+ if self._container and len(self._container) >= self._maxsize:
# If we didn't evict an existing value, and we've hit our maximum
# size, then we have to evict the least recently used item from
# the beginning of the container.
evicted_item = self._container.popitem(last=False)
# Finally, insert the new value.
self._container[key] = value
# After releasing the lock on the pool, dispose of any evicted value.
if evicted_item is not None and self.dispose_func:
```
@haikuginger That won't work as a fix, if `RecentlyUsedContainer` is given `maxsize=0` then there shouldn't be an object in the container after calling `__setitem__`.
My proposal is this:
```diff
diff --git a/src/urllib3/_collections.py b/src/urllib3/_collections.py
index 32d5330e..57fd868b 100644
--- a/src/urllib3/_collections.py
+++ b/src/urllib3/_collections.py
@@ -139,16 +139,15 @@ class RecentlyUsedContainer(Generic[_KT, _VT], MutableMapping[_KT, _VT]):
# size of the pool. Because accessing a key should move it to
# the end of the eviction line, we pop it out first.
evicted_item = key, self._container.pop(key)
+ self._container[key] = value
except KeyError:
- if len(self._container) >= self._maxsize:
+ self._container[key] = value
+ if len(self._container) > self._maxsize:
# If we didn't evict an existing value, and we've hit our maximum
# size, then we have to evict the least recently used item from
# the beginning of the container.
evicted_item = self._container.popitem(last=False)
- # Finally, insert the new value.
- self._container[key] = value
-
# After releasing the lock on the pool, dispose of any evicted value.
if evicted_item is not None and self.dispose_func:
_, evicted_value = evicted_item
diff --git a/test/test_collections.py b/test/test_collections.py
index 7cc1b868..14ead448 100644
--- a/test/test_collections.py
+++ b/test/test_collections.py
@@ -22,6 +22,11 @@ class TestLRUContainer:
assert 0 not in d
assert (i + 1) in d
+ def test_maxsize_0(self):
+ d = Container(0)
+ d[1] = 1
+ assert len(d) == 0
+
def test_expire(self):
d = Container(5)
```
Passes all of our existing tests and adds a new one for an empty `RecentlyUsedContainer`. This is potentially a place where Hypothesis' state testing would come in handy.
We can add a comment about why the setitem to `_container` is in two places too to hopefully make this approach easier to follow?
@SethMichaelLarson ah good point; I misunderstood. That approach looks sound to me. | 2021-07-13T06:32:47Z | [] | [] |
urllib3/urllib3 | 2,330 | urllib3__urllib3-2330 | [
"2085"
] | 3d9c4a0451e0fc55d999d858813482b86948eb32 | diff --git a/docs/conf.py b/docs/conf.py
--- a/docs/conf.py
+++ b/docs/conf.py
@@ -9,6 +9,18 @@
root_path = os.path.abspath(os.path.join(os.path.dirname(__file__), ".."))
sys.path.insert(0, root_path)
+# https://docs.readthedocs.io/en/stable/builds.html#build-environment
+if "READTHEDOCS" in os.environ:
+ import glob
+
+ if glob.glob("../changelog/*.*.rst"):
+ print("-- Found changes; running towncrier --", flush=True)
+ import subprocess
+
+ subprocess.run(
+ ["towncrier", "--yes", "--date", "not released yet"], cwd="..", check=True
+ )
+
import urllib3
# -- General configuration -----------------------------------------------------
diff --git a/src/urllib3/response.py b/src/urllib3/response.py
--- a/src/urllib3/response.py
+++ b/src/urllib3/response.py
@@ -199,6 +199,7 @@ def __init__(
version: int,
reason: Optional[str],
decode_content: bool,
+ request_url: Optional[str],
) -> None:
if isinstance(headers, HTTPHeaderDict):
self.headers = headers
@@ -208,6 +209,7 @@ def __init__(
self.version = version
self.reason = reason
self.decode_content = decode_content
+ self.request_url: Optional[str]
self.chunked = False
tr_enc = self.headers.get("transfer-encoding", "").lower()
@@ -412,6 +414,7 @@ def __init__(
version=version,
reason=reason,
decode_content=decode_content,
+ request_url=request_url,
)
self.retries = retries
@@ -423,7 +426,10 @@ def __init__(
self._original_response = original_response
self._fp_bytes_read = 0
self.msg = msg
- self._request_url = request_url
+ if self.retries is not None and self.retries.history:
+ self._request_url = self.retries.history[-1].redirect_location
+ else:
+ self._request_url = request_url
if body and isinstance(body, (str, bytes)):
self._body = body
@@ -901,10 +907,11 @@ def url(self) -> Optional[str]:
If the request that generated this response redirected, this method
will return the final redirect location.
"""
- if self.retries is not None and self.retries.history:
- return self.retries.history[-1].redirect_location
- else:
- return self._request_url
+ return self._request_url
+
+ @url.setter
+ def url(self, url: str) -> None:
+ self._request_url = url
def __iter__(self) -> Iterator[bytes]:
buffer: List[bytes] = []
| diff --git a/test/test_response.py b/test/test_response.py
--- a/test/test_response.py
+++ b/test/test_response.py
@@ -869,6 +869,14 @@ def test_geturl(self):
resp = HTTPResponse(fp, request_url=request_url)
assert resp.geturl() == request_url
+ def test_url(self):
+ fp = BytesIO(b"")
+ request_url = "https://example.com"
+ resp = HTTPResponse(fp, request_url=request_url)
+ assert resp.url == request_url
+ resp.url = "https://anotherurl.com"
+ assert resp.url == "https://anotherurl.com"
+
def test_geturl_retries(self):
fp = BytesIO(b"")
resp = HTTPResponse(fp, request_url="http://example.com")
| Start using towncrier for tracking changes
This will make sure that we don't miss anything. Asking if anyone on our team has configured towncrier in the past :)
| I did :)
Are we OK with the five default categories or do we want to customize that? https://towncrier.readthedocs.io/en/actual-freaking-docs/quickstart.html#creating-news-fragments
Can we add a section for breaking changes? | 2021-07-15T06:09:38Z | [] | [] |
urllib3/urllib3 | 2,335 | urllib3__urllib3-2335 | [
"2325"
] | 21945b84b0707a3143b09ade5c59e8dbf6a69a0f | diff --git a/src/urllib3/__init__.py b/src/urllib3/__init__.py
--- a/src/urllib3/__init__.py
+++ b/src/urllib3/__init__.py
@@ -96,6 +96,7 @@ def request(
fields: Optional[_TYPE_FIELDS] = None,
headers: Optional[Mapping[str, str]] = None,
preload_content: Optional[bool] = True,
+ decode_content: Optional[bool] = True,
redirect: Optional[bool] = True,
retries: Optional[Union[Retry, bool, int]] = None,
timeout: Optional[Union[Timeout, float, int]] = 3,
@@ -114,6 +115,7 @@ def request(
fields=fields,
headers=headers,
preload_content=preload_content,
+ decode_content=decode_content,
redirect=redirect,
retries=retries,
timeout=timeout,
| diff --git a/test/with_dummyserver/test_poolmanager.py b/test/with_dummyserver/test_poolmanager.py
--- a/test/with_dummyserver/test_poolmanager.py
+++ b/test/with_dummyserver/test_poolmanager.py
@@ -1,3 +1,4 @@
+import gzip
import json
from test import LONG_TIMEOUT
from unittest import mock
@@ -424,6 +425,25 @@ def test_top_level_request_with_preload_content(self):
r.data
assert r.connection is None
+ def test_top_level_request_with_decode_content(self):
+ r = request(
+ "GET",
+ f"{self.base_url}/encodingrequest",
+ headers={"accept-encoding": "gzip"},
+ decode_content=False,
+ )
+ assert r.status == 200
+ assert gzip.decompress(r.data) == b"hello, world!"
+
+ r = request(
+ "GET",
+ f"{self.base_url}/encodingrequest",
+ headers={"accept-encoding": "gzip"},
+ decode_content=True,
+ )
+ assert r.status == 200
+ assert r.data == b"hello, world!"
+
def test_top_level_request_with_redirect(self):
r = request(
"GET",
@@ -466,6 +486,7 @@ def test_top_level_request_with_timeout(self):
fields=None,
headers=None,
preload_content=True,
+ decode_content=True,
redirect=True,
retries=None,
timeout=2.5,
| Add decode_content parameter to top-level APIs
Like the title says, add `decode_content` to the top-level APIs `urllib3.request()`.
See https://github.com/urllib3/urllib3/commit/ddf7361ac0467431a2f3df6ba346c9c506c29d56 for an example.
| I will work on this. What is the functionality of this parameter ?
The functionality is already implemented, only needs to be forwarded to the lower levels.
@V1NAY8 please let me know if you are working on this, else I will love to work on this.
@imkaka go ahead and implement it. I'll pick something else.
Cool @V1NAY8, will start on this. | 2021-07-21T13:02:36Z | [] | [] |
urllib3/urllib3 | 2,340 | urllib3__urllib3-2340 | [
"2313"
] | c674f48d59affef387d8f2ea817e1ae4d696e4da | diff --git a/src/urllib3/connection.py b/src/urllib3/connection.py
--- a/src/urllib3/connection.py
+++ b/src/urllib3/connection.py
@@ -201,15 +201,17 @@ def _new_conn(self) -> socket.socket:
socket_options=self.socket_options,
)
except socket.gaierror as e:
- raise NameResolutionError(self.host, self, e)
- except SocketTimeout:
+ raise NameResolutionError(self.host, self, e) from e
+ except SocketTimeout as e:
raise ConnectTimeoutError(
self,
f"Connection to {self.host} timed out. (connect timeout={self.timeout})",
- )
+ ) from e
except OSError as e:
- raise NewConnectionError(self, f"Failed to establish a new connection: {e}")
+ raise NewConnectionError(
+ self, f"Failed to establish a new connection: {e}"
+ ) from e
return conn
@@ -583,10 +585,11 @@ def _connect_tls_proxy(
)
except Exception as e:
# Wrap into an HTTPSProxyError for easier diagnosis.
- # Original exception is available on original_error
+ # Original exception is available on original_error and
+ # as __cause__.
raise HTTPSProxyError(
f"Unable to establish a TLS connection to {hostname}", e
- )
+ ) from e
def _match_hostname(cert: _TYPE_PEER_CERT_RET, asserted_hostname: str) -> None:
diff --git a/src/urllib3/connectionpool.py b/src/urllib3/connectionpool.py
--- a/src/urllib3/connectionpool.py
+++ b/src/urllib3/connectionpool.py
@@ -263,14 +263,14 @@ def _get_conn(self, timeout: Optional[float] = None) -> HTTPConnection:
conn = self.pool.get(block=self.block, timeout=timeout)
except AttributeError: # self.pool is None
- raise ClosedPoolError(self, "Pool is closed.") # Defensive:
+ raise ClosedPoolError(self, "Pool is closed.") from None # Defensive:
except queue.Empty:
if self.block:
raise EmptyPoolError(
self,
"Pool is empty and a new connection can't be opened due to blocking mode.",
- )
+ ) from None
pass # Oh well, we'll create a new connection then
# If this is a persistent connection, check if it got disconnected
@@ -317,7 +317,7 @@ def _put_conn(self, conn: Optional[HTTPConnection]) -> None:
raise FullPoolError(
self,
"Pool reached maximum size and no more connections are allowed.",
- )
+ ) from None
log.warning(
"Connection pool is full, discarding connection: %s", self.host
@@ -360,13 +360,13 @@ def _raise_timeout(
if isinstance(err, SocketTimeout):
raise ReadTimeoutError(
self, url, f"Read timed out. (read timeout={timeout_value})"
- )
+ ) from err
# See the above comment about EAGAIN in Python 3.
if hasattr(err, "errno") and err.errno in _blocking_errnos:
raise ReadTimeoutError(
self, url, f"Read timed out. (read timeout={timeout_value})"
- )
+ ) from err
def _make_request(
self,
diff --git a/src/urllib3/contrib/_securetransport/bindings.py b/src/urllib3/contrib/_securetransport/bindings.py
--- a/src/urllib3/contrib/_securetransport/bindings.py
+++ b/src/urllib3/contrib/_securetransport/bindings.py
@@ -417,7 +417,7 @@ def load_cdll(name: str, macos10_16_path: str) -> CDLL:
CoreFoundation.CFDictionaryRef = CFDictionaryRef
except AttributeError:
- raise ImportError("Error initializing ctypes")
+ raise ImportError("Error initializing ctypes") from None
class CFConst:
diff --git a/src/urllib3/contrib/_securetransport/low_level.py b/src/urllib3/contrib/_securetransport/low_level.py
--- a/src/urllib3/contrib/_securetransport/low_level.py
+++ b/src/urllib3/contrib/_securetransport/low_level.py
@@ -109,7 +109,7 @@ def _create_cfstring_array(lst: List[bytes]) -> CFMutableArray:
except BaseException as e:
if cf_arr:
CoreFoundation.CFRelease(cf_arr)
- raise ssl.SSLError(f"Unable to allocate array: {e}")
+ raise ssl.SSLError(f"Unable to allocate array: {e}") from None
return cf_arr
diff --git a/src/urllib3/contrib/pyopenssl.py b/src/urllib3/contrib/pyopenssl.py
--- a/src/urllib3/contrib/pyopenssl.py
+++ b/src/urllib3/contrib/pyopenssl.py
@@ -297,21 +297,21 @@ def recv(self, *args: Any, **kwargs: Any) -> bytes:
if self.suppress_ragged_eofs and e.args == (-1, "Unexpected EOF"):
return b""
else:
- raise OSError(e.args[0], str(e))
+ raise OSError(e.args[0], str(e)) from e
except OpenSSL.SSL.ZeroReturnError:
if self.connection.get_shutdown() == OpenSSL.SSL.RECEIVED_SHUTDOWN:
return b""
else:
raise
- except OpenSSL.SSL.WantReadError:
+ except OpenSSL.SSL.WantReadError as e:
if not util.wait_for_read(self.socket, self.socket.gettimeout()):
- raise timeout("The read operation timed out") # type: ignore[arg-type]
+ raise timeout("The read operation timed out") from e # type: ignore[arg-type]
else:
return self.recv(*args, **kwargs)
# TLS 1.3 post-handshake authentication
except OpenSSL.SSL.Error as e:
- raise ssl.SSLError(f"read error: {e!r}")
+ raise ssl.SSLError(f"read error: {e!r}") from e
else:
return data # type: ignore[no-any-return]
@@ -322,21 +322,21 @@ def recv_into(self, *args: Any, **kwargs: Any) -> int:
if self.suppress_ragged_eofs and e.args == (-1, "Unexpected EOF"):
return 0
else:
- raise OSError(e.args[0], str(e))
+ raise OSError(e.args[0], str(e)) from e
except OpenSSL.SSL.ZeroReturnError:
if self.connection.get_shutdown() == OpenSSL.SSL.RECEIVED_SHUTDOWN:
return 0
else:
raise
- except OpenSSL.SSL.WantReadError:
+ except OpenSSL.SSL.WantReadError as e:
if not util.wait_for_read(self.socket, self.socket.gettimeout()):
- raise timeout("The read operation timed out") # type: ignore[arg-type]
+ raise timeout("The read operation timed out") from e # type: ignore[arg-type]
else:
return self.recv_into(*args, **kwargs)
# TLS 1.3 post-handshake authentication
except OpenSSL.SSL.Error as e:
- raise ssl.SSLError(f"read error: {e!r}")
+ raise ssl.SSLError(f"read error: {e!r}") from e
def settimeout(self, timeout: float) -> None:
return self.socket.settimeout(timeout)
@@ -345,12 +345,12 @@ def _send_until_done(self, data: bytes) -> int:
while True:
try:
return self.connection.send(data) # type: ignore[no-any-return]
- except OpenSSL.SSL.WantWriteError:
+ except OpenSSL.SSL.WantWriteError as e:
if not util.wait_for_write(self.socket, self.socket.gettimeout()):
- raise timeout()
+ raise timeout() from e
continue
except OpenSSL.SSL.SysCallError as e:
- raise OSError(e.args[0], str(e))
+ raise OSError(e.args[0], str(e)) from e
def sendall(self, data: bytes) -> None:
total_sent = 0
@@ -448,7 +448,7 @@ def load_verify_locations(
if cadata is not None:
self._ctx.load_verify_locations(BytesIO(cadata))
except OpenSSL.SSL.Error as e:
- raise ssl.SSLError(f"unable to load trusted certificates: {e!r}")
+ raise ssl.SSLError(f"unable to load trusted certificates: {e!r}") from e
def load_cert_chain(
self,
@@ -488,12 +488,12 @@ def wrap_socket(
while True:
try:
cnx.do_handshake()
- except OpenSSL.SSL.WantReadError:
+ except OpenSSL.SSL.WantReadError as e:
if not util.wait_for_read(sock, sock.gettimeout()):
- raise timeout("select timed out") # type: ignore[arg-type]
+ raise timeout("select timed out") from e # type: ignore[arg-type]
continue
except OpenSSL.SSL.Error as e:
- raise ssl.SSLError(f"bad handshake: {e!r}")
+ raise ssl.SSLError(f"bad handshake: {e!r}") from e
break
return WrappedSocket(cnx, sock)
diff --git a/src/urllib3/contrib/securetransport.py b/src/urllib3/contrib/securetransport.py
--- a/src/urllib3/contrib/securetransport.py
+++ b/src/urllib3/contrib/securetransport.py
@@ -385,9 +385,11 @@ def _custom_validate(self, verify: bool, trust_bundle: Optional[bytes]) -> None:
if trust_result in successes:
return
reason = f"error code: {int(trust_result)}"
+ exc = None
except Exception as e:
# Do not trust on error
reason = f"exception: {e!r}"
+ exc = e
# SecureTransport does not send an alert nor shuts down the connection.
rec = _build_tls_unknown_ca_alert(self.version())
@@ -398,7 +400,7 @@ def _custom_validate(self, verify: bool, trust_bundle: Optional[bytes]) -> None:
opts = struct.pack("ii", 1, 0)
self.socket.setsockopt(socket.SOL_SOCKET, socket.SO_LINGER, opts)
self.close()
- raise ssl.SSLError(f"certificate verify failed, {reason}")
+ raise ssl.SSLError(f"certificate verify failed, {reason}") from exc
def _evaluate_trust(self, trust_bundle: bytes) -> int:
# We want data in memory, so load it up.
diff --git a/src/urllib3/contrib/socks.py b/src/urllib3/contrib/socks.py
--- a/src/urllib3/contrib/socks.py
+++ b/src/urllib3/contrib/socks.py
@@ -120,11 +120,11 @@ def _new_conn(self) -> "socks.socksocket":
**extra_kw,
)
- except SocketTimeout:
+ except SocketTimeout as e:
raise ConnectTimeoutError(
self,
f"Connection to {self.host} timed out. (connect timeout={self.timeout})",
- )
+ ) from e
except socks.ProxyError as e:
# This is fragile as hell, but it seems to be the only way to raise
@@ -135,18 +135,20 @@ def _new_conn(self) -> "socks.socksocket":
raise ConnectTimeoutError(
self,
f"Connection to {self.host} timed out. (connect timeout={self.timeout})",
- )
+ ) from e
else:
raise NewConnectionError(
self, f"Failed to establish a new connection: {error}"
- )
+ ) from e
else:
raise NewConnectionError(
self, f"Failed to establish a new connection: {e}"
- )
+ ) from e
except OSError as e: # Defensive: PySocks should catch all these.
- raise NewConnectionError(self, f"Failed to establish a new connection: {e}")
+ raise NewConnectionError(
+ self, f"Failed to establish a new connection: {e}"
+ ) from e
return conn
diff --git a/src/urllib3/exceptions.py b/src/urllib3/exceptions.py
--- a/src/urllib3/exceptions.py
+++ b/src/urllib3/exceptions.py
@@ -66,6 +66,7 @@ class SSLError(HTTPError):
class ProxyError(HTTPError):
"""Raised when the connection to a proxy fails."""
+ # The original error is also available as __cause__.
original_error: Exception
def __init__(self, message: str, error: Exception) -> None:
diff --git a/src/urllib3/response.py b/src/urllib3/response.py
--- a/src/urllib3/response.py
+++ b/src/urllib3/response.py
@@ -314,7 +314,7 @@ def _decode(
"Received response with content-encoding: %s, but "
"failed to decode it." % content_encoding,
e,
- )
+ ) from e
if flush_decoder:
data += self._flush_decoder()
@@ -565,22 +565,22 @@ def _error_catcher(self) -> Generator[None, None, None]:
try:
yield
- except SocketTimeout:
+ except SocketTimeout as e:
# FIXME: Ideally we'd like to include the url in the ReadTimeoutError but
# there is yet no clean way to get at it from this context.
- raise ReadTimeoutError(self._pool, None, "Read timed out.") # type: ignore[arg-type]
+ raise ReadTimeoutError(self._pool, None, "Read timed out.") from e # type: ignore[arg-type]
except BaseSSLError as e:
# FIXME: Is there a better way to differentiate between SSLErrors?
if "read operation timed out" not in str(e):
# SSL errors related to framing/MAC get wrapped and reraised here
- raise SSLError(e)
+ raise SSLError(e) from e
- raise ReadTimeoutError(self._pool, None, "Read timed out.") # type: ignore[arg-type]
+ raise ReadTimeoutError(self._pool, None, "Read timed out.") from e # type: ignore[arg-type]
except (HTTPException, OSError) as e:
# This includes IncompleteRead.
- raise ProtocolError(f"Connection broken: {e!r}", e)
+ raise ProtocolError(f"Connection broken: {e!r}", e) from e
# If no exception is thrown, we should avoid cleaning up
# unnecessarily.
@@ -804,7 +804,7 @@ def _update_chunk_length(self) -> None:
except ValueError:
# Invalid chunked protocol response, abort.
self.close()
- raise InvalidChunkLength(self, line)
+ raise InvalidChunkLength(self, line) from None
def _handle_chunk(self, amt: Optional[int]) -> bytes:
returned_chunk = None
diff --git a/src/urllib3/util/request.py b/src/urllib3/util/request.py
--- a/src/urllib3/util/request.py
+++ b/src/urllib3/util/request.py
@@ -138,10 +138,10 @@ def rewind_body(body: IO[AnyStr], body_pos: Optional[Union[int, object]]) -> Non
if body_seek is not None and isinstance(body_pos, int):
try:
body_seek(body_pos)
- except OSError:
+ except OSError as e:
raise UnrewindableBodyError(
"An error occurred when rewinding request body for redirect/retry."
- )
+ ) from e
elif body_pos is _FAILEDTELL:
raise UnrewindableBodyError(
"Unable to record file position for rewinding "
diff --git a/src/urllib3/util/retry.py b/src/urllib3/util/retry.py
--- a/src/urllib3/util/retry.py
+++ b/src/urllib3/util/retry.py
@@ -493,7 +493,8 @@ def increment(
)
if new_retry.is_exhausted():
- raise MaxRetryError(_pool, url, error or ResponseError(cause)) # type: ignore[arg-type]
+ reason = error or ResponseError(cause)
+ raise MaxRetryError(_pool, url, reason) from reason # type: ignore[arg-type]
log.debug("Incremented Retry for (url='%s'): %r", url, new_retry)
diff --git a/src/urllib3/util/ssl_.py b/src/urllib3/util/ssl_.py
--- a/src/urllib3/util/ssl_.py
+++ b/src/urllib3/util/ssl_.py
@@ -379,7 +379,7 @@ def ssl_wrap_socket(
try:
context.load_verify_locations(ca_certs, ca_cert_dir, ca_cert_data)
except OSError as e:
- raise SSLError(e)
+ raise SSLError(e) from e
elif ssl_context is None and hasattr(context, "load_default_certs"):
# try to load OS default certs; works well on Windows.
diff --git a/src/urllib3/util/timeout.py b/src/urllib3/util/timeout.py
--- a/src/urllib3/util/timeout.py
+++ b/src/urllib3/util/timeout.py
@@ -150,7 +150,7 @@ def _validate_timeout(
raise ValueError(
"Timeout value %s was %s, but it must be an "
"int, float or None." % (name, value)
- )
+ ) from None
try:
if value <= 0: # type: ignore[operator]
@@ -164,7 +164,7 @@ def _validate_timeout(
raise ValueError(
"Timeout value %s was %s, but it must be an "
"int, float or None." % (name, value)
- )
+ ) from None
return value
| diff --git a/test/with_dummyserver/test_socketlevel.py b/test/with_dummyserver/test_socketlevel.py
--- a/test/with_dummyserver/test_socketlevel.py
+++ b/test/with_dummyserver/test_socketlevel.py
@@ -482,7 +482,7 @@ def socket_handler(listener):
# second ReadTimeoutError due to errno
with HTTPSConnectionPool(host=self.host):
- err = mock.Mock()
+ err = OSError()
err.errno = errno.EAGAIN
with pytest.raises(ReadTimeoutError):
pool._raise_timeout(err, "", 0)
| [v2] Chaining exceptions
### Context
Currently urllib3 doesn't chain exceptions explicitly when wrapping exceptions (this feature wasn't available in Python 2 AFAIK). For example:
https://github.com/urllib3/urllib3/blob/1831327b881880ed871f96f56a6977d360042e1b/src/urllib3/response.py#L539-L557
If the `SocketTimeout` happens, then it renders as
```
... SocketTimeout stacktrace ...
During handling of the above exception, another exception occurred:
... ReadTimeoutError stacktrace ...
```
Explicit chaining would be
```py
except SocketTimeout as e:
raise ReadTimeoutError(self._pool, None, "Read timed out.") from e
```
then the error is the nicer
```
... SocketTimeout stacktrace ...
The above exception was the direct cause of the following exception:
... ReadTimeoutError stacktrace ...
```
Alternatively, if we don't want to expose the cause exception, can do
```py
except SocketTimeout:
raise ReadTimeoutError(self._pool, None, "Read timed out.") from None
```
then only the `ReadTimeoutError` is shown.
IMO, the first case looks like a bug to the user, and, if this issue is resolved, would indicate an unexpected exception in urllib3.
So I suggest going over all cases of exception wrapping in urllib3 and changing them to either explicitly chain or suppress the wrapped exception.
There's is a question whether an exception's `__cause__` could be considered part of the API promise. IMO, there is no such expectation and we shouldn't guarantee this. But if we do, then we'd want to be a lot more picky with chaining vs. suppressing.
### Contribution
> Would you be willing to submit a PR?
Yes.
| Thanks for opening this! Let's move towards the explicit chaining if only for the better message. This work will also expose places where maybe we do want the `from None`. | 2021-07-25T09:26:13Z | [] | [] |
urllib3/urllib3 | 2,342 | urllib3__urllib3-2342 | [
"2338"
] | 21945b84b0707a3143b09ade5c59e8dbf6a69a0f | diff --git a/src/urllib3/contrib/_securetransport/low_level.py b/src/urllib3/contrib/_securetransport/low_level.py
--- a/src/urllib3/contrib/_securetransport/low_level.py
+++ b/src/urllib3/contrib/_securetransport/low_level.py
@@ -202,6 +202,7 @@ def _cert_array_from_pem(pem_bundle: bytes) -> CFArray:
# We only want to do that if an error occurs: otherwise, the caller
# should free.
CoreFoundation.CFRelease(cert_array)
+ raise
return cert_array
| diff --git a/test/contrib/test_securetransport.py b/test/contrib/test_securetransport.py
--- a/test/contrib/test_securetransport.py
+++ b/test/contrib/test_securetransport.py
@@ -1,3 +1,4 @@
+import base64
import contextlib
import socket
import ssl
@@ -51,3 +52,15 @@ def test_no_crash_with_empty_trust_bundle():
ws = WrappedSocket(s)
with pytest.raises(ssl.SSLError):
ws._custom_validate(True, b"")
+
+
+def test_no_crash_with_invalid_trust_bundle():
+ invalid_cert = base64.b64encode(b"invalid-cert")
+ cert_bundle = (
+ b"-----BEGIN CERTIFICATE-----\n" + invalid_cert + b"\n-----END CERTIFICATE-----"
+ )
+
+ with contextlib.closing(socket.socket()) as s:
+ ws = WrappedSocket(s)
+ with pytest.raises(ssl.SSLError):
+ ws._custom_validate(True, cert_bundle)
| Possible bug in securetransport _cert_array_from_pem() error handling
While working on an unrelated change, I noticed this line, which *seems* to miss a `raise` after the free in the `except` block. I may be wrong; I didn't try at all to understand what the code does, it just looks wrong to me so thought I'd report it.
https://github.com/urllib3/urllib3/blob/21945b84b0707a3143b09ade5c59e8dbf6a69a0f/src/urllib3/contrib/_securetransport/low_level.py#L186-L206
| Ahh, I think we're missing `raise` in there to re-reaise the exception raised in the `try`-block. Good catch
OK, I'll send a PR. | 2021-07-25T16:14:39Z | [] | [] |
urllib3/urllib3 | 2,348 | urllib3__urllib3-2348 | [
"2067"
] | 3f195385726564bda2120aa1b538a858712ef931 | diff --git a/src/urllib3/poolmanager.py b/src/urllib3/poolmanager.py
--- a/src/urllib3/poolmanager.py
+++ b/src/urllib3/poolmanager.py
@@ -56,6 +56,9 @@
"ssl_context",
"key_password",
)
+# Default value for `blocksize` - a new parameter introduced to
+# http.client.HTTPConnection & http.client.HTTPSConnection in Python 3.7
+_DEFAULT_BLOCKSIZE = 16384
_SelfT = TypeVar("_SelfT")
@@ -95,6 +98,7 @@ class PoolKey(NamedTuple):
key_assert_hostname: Optional[Union[bool, str]]
key_assert_fingerprint: Optional[str]
key_server_hostname: Optional[str]
+ key_blocksize: Optional[int]
def _default_key_normalizer(
@@ -145,6 +149,10 @@ def _default_key_normalizer(
if field not in context:
context[field] = None
+ # Default key_blocksize to _DEFAULT_BLOCKSIZE if missing from the context
+ if context.get("key_blocksize") is None:
+ context["key_blocksize"] = _DEFAULT_BLOCKSIZE
+
return key_class(**context)
@@ -250,6 +258,11 @@ def _new_pool(
if request_context is None:
request_context = self.connection_pool_kw.copy()
+ # Default blocksize to _DEFAULT_BLOCKSIZE if missing or explicitly
+ # set to 'None' in the request_context.
+ if request_context.get("blocksize") is None:
+ request_context["blocksize"] = _DEFAULT_BLOCKSIZE
+
# Although the context has everything necessary to create the pool,
# this function has historically only used the scheme, host, and port
# in the positional args. When an API change is acceptable these can
| diff --git a/test/test_poolmanager.py b/test/test_poolmanager.py
--- a/test/test_poolmanager.py
+++ b/test/test_poolmanager.py
@@ -9,7 +9,12 @@
from urllib3 import connection_from_url
from urllib3.connectionpool import HTTPSConnectionPool
from urllib3.exceptions import ClosedPoolError, LocationValueError
-from urllib3.poolmanager import PoolKey, PoolManager, key_fn_by_scheme
+from urllib3.poolmanager import (
+ _DEFAULT_BLOCKSIZE,
+ PoolKey,
+ PoolManager,
+ key_fn_by_scheme,
+)
from urllib3.util import retry, timeout
from urllib3.util.url import Url
@@ -113,6 +118,7 @@ def test_http_pool_key_fields(self) -> None:
"retries": retry.Retry(total=6, connect=2),
"block": True,
"source_address": "127.0.0.1",
+ "blocksize": _DEFAULT_BLOCKSIZE + 1,
}
p = PoolManager()
conn_pools = [
@@ -145,6 +151,7 @@ def test_https_pool_key_fields(self) -> None:
"cert_reqs": "CERT_REQUIRED",
"ca_certs": "/root/path_to_pem",
"ssl_version": "SSLv23_METHOD",
+ "blocksize": _DEFAULT_BLOCKSIZE + 1,
}
p = PoolManager()
conn_pools = [
@@ -401,3 +408,23 @@ def test_pool_manager_no_url_absolute_form(self) -> None:
p = PoolManager()
assert p._proxy_requires_url_absolute_form(Url("http://example.com")) is False
assert p._proxy_requires_url_absolute_form(Url("https://example.com")) is False
+
+ @pytest.mark.parametrize(
+ "input_blocksize,expected_blocksize",
+ [
+ (_DEFAULT_BLOCKSIZE, _DEFAULT_BLOCKSIZE),
+ (None, _DEFAULT_BLOCKSIZE),
+ (8192, 8192),
+ ],
+ )
+ def test_poolmanager_blocksize(
+ self, input_blocksize: int, expected_blocksize: int
+ ) -> None:
+ """Assert PoolManager sets blocksize properly"""
+ p = PoolManager()
+
+ pool_blocksize = p.connection_from_url(
+ "http://example.com", {"blocksize": input_blocksize}
+ )
+ assert pool_blocksize.conn_kw["blocksize"] == expected_blocksize
+ assert pool_blocksize._get_conn().blocksize == expected_blocksize # type: ignore[attr-defined]
| PoolManager does not support argument blocksize
### Subject
urllib3's PoolManager does not support `blocksize`. This seems to be a new argument accepted by Python's standard library's http clients since 3.7 (https://docs.python.org/3.6/library/http.client.html vs https://docs.python.org/3.7/library/http.client.html)
Adding `key_blocksize` to poolmanager.py's `_key_fields` fixes the issue but it probably should only be appended when it is actually supported.
### Environment
OS Linux-5.4.0-47-generic-x86_64-with-glibc2.29
Python 3.8.5
urllib3 1.26.2
### Steps to Reproduce
```
from urllib3 import PoolManager
with PoolManager(blocksize = 8192) as pool:
response = pool.urlopen("GET", "https://www.google.com")
```
### Expected Behavior
Should perform a GET request successfully.
### Actual Behavior
The following exception is thrown
```
Traceback (most recent call last):
File "report.py", line 13, in <module>
response = pool.urlopen("GET", "https://www.google.com")
File "/home/geancarlo/redacted/poc/venv/lib/python3.8/site-packages/urllib3/poolmanager.py", line 364, in urlopen
conn = self.connection_from_host(u.host, port=u.port, scheme=u.scheme)
File "/home/geancarlo/redacted/poc/venv/lib/python3.8/site-packages/urllib3/poolmanager.py", line 245, in connection_from_host
return self.connection_from_context(request_context)
File "/home/geancarlo/redacted/poc/venv/lib/python3.8/site-packages/urllib3/poolmanager.py", line 258, in connection_from_context
pool_key = pool_key_constructor(request_context)
File "/home/geancarlo/redacted/poc/venv/lib/python3.8/site-packages/urllib3/poolmanager.py", line 124, in _default_key_normalizer
return key_class(**context)
TypeError: __new__() got an unexpected keyword argument 'key_blocksize'
```
| I've been thinking about `blocksize` recently actually so thanks for opening this.
We should be defaulting `blocksize` to `16384` I believe since this is what we're using as a read/write size for pyOpenSSL and SecureTransport, but we should also support setting it.
This is probably doable by:
* appending `key_blocksize` to `_key_fields` if `sys.version_info >= (3, 7)`
* copying `test_http_pool_key_fields` but call it `test_http_pool_key_blocksize`, decorate it with `@pytest.mark.skipif(sys.version_info < (3, 7), reason="requires python 3.7+")` and test `key_blocksize` specifically
@geancarlo Would you like to try adding this feature?
Hi. Yes, I can do it. This weekend I'll probably have some time
@geancarlo are you working on this? I would be happy to take over if you are not?
I will work on this, will refer @geancarlo PR. | 2021-07-28T15:46:20Z | [] | [] |
urllib3/urllib3 | 2,452 | urllib3__urllib3-2452 | [
"2191"
] | f3874ad04822576ded1f22c4fe900fff3bf86777 | diff --git a/src/urllib3/connection.py b/src/urllib3/connection.py
--- a/src/urllib3/connection.py
+++ b/src/urllib3/connection.py
@@ -483,10 +483,7 @@ def connect(self) -> None:
# We still support OpenSSL 1.0.2, which prevents us from verifying
# hostnames easily: https://github.com/pyca/pyopenssl/pull/933
or ssl_.IS_PYOPENSSL
- # context.hostname_checks_common_name seems ignored, and it's more
- # important to reject certs without SANs than to rely on the standard
- # library. See https://bugs.python.org/issue43522 for details.
- or True
+ or not ssl_.HAS_NEVER_CHECK_COMMON_NAME
):
self.ssl_context.check_hostname = False
diff --git a/src/urllib3/util/ssl_.py b/src/urllib3/util/ssl_.py
--- a/src/urllib3/util/ssl_.py
+++ b/src/urllib3/util/ssl_.py
@@ -19,6 +19,8 @@
ALPN_PROTOCOLS = ["http/1.1"]
USE_DEFAULT_SSLCONTEXT_CIPHERS = False
+_TYPE_VERSION_INFO = Tuple[int, int, int, str, int]
+
# Maps the length of a digest to a possible hash function producing this digest
HASHFUNC_MAP = {32: md5, 40: sha1, 64: sha256}
@@ -36,6 +38,55 @@ def _is_ge_openssl_v1_1_1(
)
+def _is_openssl_issue_14579_fixed(
+ openssl_version_text: str, openssl_version_number: int
+) -> bool:
+ """
+ Returns True for OpenSSL 1.1.1l+ (>=0x101010cf) where this issue was fixed.
+ Before the fix, the SSL_new() API was not copying hostflags like
+ X509_CHECK_FLAG_NEVER_CHECK_SUBJECT, which tripped up CPython.
+ https://github.com/openssl/openssl/issues/14579
+
+ LibreSSL reports a version number of 0x20000000 for
+ OpenSSL version number so we need to filter out LibreSSL.
+ """
+ return (
+ not openssl_version_text.startswith("LibreSSL")
+ and openssl_version_number >= 0x101010CF
+ )
+
+
+def _is_bpo_43522_fixed(
+ implementation_name: str, version_info: _TYPE_VERSION_INFO
+) -> bool:
+ """Return True if PyPy or CPython 3.8.9+, 3.9.3+ or 3.10+ where setting
+ SSLContext.hostname_checks_common_name to False works.
+ https://github.com/urllib3/urllib3/issues/2192#issuecomment-821832963
+ https://foss.heptapod.net/pypy/pypy/-/issues/3539#
+ """
+ if implementation_name != "cpython":
+ return True
+
+ major_minor = version_info[:2]
+ micro = version_info[2]
+ return (
+ (major_minor == (3, 8) and micro >= 9)
+ or (major_minor == (3, 9) and micro >= 3)
+ or major_minor >= (3, 10)
+ )
+
+
+def _is_has_never_check_common_name_reliable(
+ openssl_version: str,
+ openssl_version_number: int,
+ implementation_name: str,
+ version_info: _TYPE_VERSION_INFO,
+) -> bool:
+ return _is_openssl_issue_14579_fixed(
+ openssl_version, openssl_version_number
+ ) or _is_bpo_43522_fixed(implementation_name, version_info)
+
+
if TYPE_CHECKING:
from typing_extensions import Literal
@@ -67,6 +118,16 @@ def _is_ge_openssl_v1_1_1(
)
PROTOCOL_SSLv23 = PROTOCOL_TLS
+ # Setting SSLContext.hostname_checks_common_name = False didn't work before CPython
+ # 3.8.9, 3.9.3, and 3.10 (but OK on PyPy) or OpenSSL 1.1.1l+
+ if HAS_NEVER_CHECK_COMMON_NAME and not _is_has_never_check_common_name_reliable(
+ OPENSSL_VERSION,
+ OPENSSL_VERSION_NUMBER,
+ sys.implementation.name,
+ sys.version_info,
+ ):
+ HAS_NEVER_CHECK_COMMON_NAME = False
+
# Need to be careful here in case old TLS versions get
# removed in future 'ssl' module implementations.
for attr in ("TLSv1", "TLSv1_1", "TLSv1_2"):
| diff --git a/test/test_util.py b/test/test_util.py
--- a/test/test_util.py
+++ b/test/test_util.py
@@ -24,7 +24,13 @@
from urllib3.util.proxy import connection_requires_http_tunnel, create_proxy_ssl_context
from urllib3.util.request import _FAILEDTELL, make_headers, rewind_body
from urllib3.util.response import assert_header_parsing
-from urllib3.util.ssl_ import resolve_cert_reqs, resolve_ssl_version, ssl_wrap_socket
+from urllib3.util.ssl_ import (
+ _TYPE_VERSION_INFO,
+ _is_has_never_check_common_name_reliable,
+ resolve_cert_reqs,
+ resolve_ssl_version,
+ ssl_wrap_socket,
+)
from urllib3.util.timeout import Timeout
from urllib3.util.url import Url, _encode_invalid_chars, parse_url
from urllib3.util.util import to_bytes, to_str
@@ -923,6 +929,40 @@ def test_ssl_wrap_socket_sni_none_no_warn(self):
context.wrap_socket.assert_called_once_with(sock, server_hostname=None)
warn.assert_not_called()
+ @pytest.mark.parametrize(
+ "openssl_version, openssl_version_number, implementation_name, version_info, reliable",
+ [
+ # OpenSSL and Python OK -> reliable
+ ("OpenSSL 1.1.1l", 0x101010CF, "cpython", (3, 9, 3), True),
+ # Python OK -> reliable
+ ("OpenSSL 1.1.1", 0x10101000, "cpython", (3, 9, 3), True),
+ ("OpenSSL 1.1.1", 0x10101000, "pypy", (3, 6, 9), True),
+ ("LibreSSL 3.3.5", 0x101010CF, "pypy", (3, 6, 9), True),
+ # OpenSSL OK -> reliable
+ ("OpenSSL", 0x101010CF, "cpython", (3, 9, 2), True),
+ # unreliable
+ ("OpenSSL", 0x10101000, "cpython", (3, 9, 2), False),
+ ("LibreSSL", 0x101010CF, "cpython", (3, 9, 2), False),
+ ],
+ )
+ def test_is_has_never_check_common_name_reliable(
+ self,
+ openssl_version: str,
+ openssl_version_number: int,
+ implementation_name: str,
+ version_info: _TYPE_VERSION_INFO,
+ reliable: bool,
+ ) -> None:
+ assert (
+ _is_has_never_check_common_name_reliable(
+ openssl_version,
+ openssl_version_number,
+ implementation_name,
+ version_info,
+ )
+ == reliable
+ )
+
idna_blocker = ImportBlocker("idna")
module_stash = ModuleStash("urllib3")
diff --git a/test/with_dummyserver/test_https.py b/test/with_dummyserver/test_https.py
--- a/test/with_dummyserver/test_https.py
+++ b/test/with_dummyserver/test_https.py
@@ -978,8 +978,13 @@ def test_common_name_without_san_fails(self, no_san_server: ServerConfig) -> Non
cert_reqs="CERT_REQUIRED",
ca_certs=no_san_server.ca_certs,
) as https_pool:
- with pytest.raises(MaxRetryError, match="no appropriate subjectAltName"):
+ with pytest.raises(
+ MaxRetryError,
+ ) as e:
https_pool.request("GET", "/")
+ assert "mismatch, certificate is not valid" in str(
+ e.value
+ ) or "no appropriate subjectAltName" in str(e.value)
def test_strip_square_brackets_before_validating(
self, ipv6_san_server: ServerConfig
| Enable hostname_checks_common_name on Python versions that support it
We'll have to wait on the resolution of https://bugs.python.org/issue43522, and then based on that decide how we detect the feature:
* use an hypothetical new flag?
* 3.10?
* 3.7.x, 3.8.x, 3.9.x and 3.10 since this fix will be backported?
We'll have to wait for https://github.com/python/cpython/pull/24899 to land and to be in the 3.10 we use in GitHub Actions, probably when 3.10.0a7 get released.
| We'll use version detection, https://docs.python.org/3.10/library/ssl.html#ssl.SSLContext.hostname_checks_common_name documents the version where this works.
Not sure for PyPy yet. It includes the standard library but I don't know if they backport minor versions. I don't even know if the fix is in C or Python.
PyPy does not support this at all: https://foss.heptapod.net/pypy/pypy/-/issues/3539. So we'll continue to use our own matching there.
The problem with reporting bugs to PyPy is that they get fixed quickly :D. Thanks @mattip for this! The good news is that on PyPy checking for `ssl.HAS_NEVER_CHECK_COMMON_NAME` is going to be enough, we don't have to worry about https://bugs.python.org/issue43522.
(PyPy already had much of the fix for the issue in re-using CPython's stdlib v3.8.10, PyPy only needed to additionally expose the `_ssl.HAS_NEVER_CHECK_COMMON_NAME` attribute which I missed when updating the code for Python3.8) | 2021-10-08T13:04:38Z | [] | [] |
urllib3/urllib3 | 2,473 | urllib3__urllib3-2473 | [
"2252"
] | 379d710137a5c9ce530908f4cd18c0a4adc0f773 | diff --git a/src/urllib3/connection.py b/src/urllib3/connection.py
--- a/src/urllib3/connection.py
+++ b/src/urllib3/connection.py
@@ -26,6 +26,7 @@
from typing_extensions import Literal, NoReturn
from .util.proxy import create_proxy_ssl_context
+from .util.timeout import _DEFAULT_TIMEOUT, _TYPE_TIMEOUT, Timeout
from .util.util import to_bytes, to_str
try: # Compiled with SSL?
@@ -127,7 +128,7 @@ def __init__(
self,
host: str,
port: Optional[int] = None,
- timeout: Optional[float] = connection.SOCKET_GLOBAL_DEFAULT_TIMEOUT,
+ timeout: _TYPE_TIMEOUT = _DEFAULT_TIMEOUT,
source_address: Optional[Tuple[str, int]] = None,
blocksize: int = 8192,
socket_options: Optional[
@@ -148,7 +149,7 @@ def __init__(
super().__init__(
host=host,
port=port,
- timeout=timeout,
+ timeout=Timeout.resolve_default_timeout(timeout),
source_address=source_address,
blocksize=blocksize,
)
@@ -361,7 +362,7 @@ def __init__(
key_file: Optional[str] = None,
cert_file: Optional[str] = None,
key_password: Optional[str] = None,
- timeout: Optional[float] = connection.SOCKET_GLOBAL_DEFAULT_TIMEOUT,
+ timeout: _TYPE_TIMEOUT = _DEFAULT_TIMEOUT,
ssl_context: Optional["ssl.SSLContext"] = None,
server_hostname: Optional[str] = None,
source_address: Optional[Tuple[str, int]] = None,
diff --git a/src/urllib3/connectionpool.py b/src/urllib3/connectionpool.py
--- a/src/urllib3/connectionpool.py
+++ b/src/urllib3/connectionpool.py
@@ -1,7 +1,6 @@
import errno
import logging
import queue
-import socket
import sys
import warnings
from http.client import HTTPResponse as _HttplibHTTPResponse
@@ -58,7 +57,7 @@
from .util.response import assert_header_parsing
from .util.retry import Retry
from .util.ssl_match_hostname import CertificateError
-from .util.timeout import Timeout
+from .util.timeout import _DEFAULT_TIMEOUT, _TYPE_DEFAULT, Timeout
from .util.url import Url, _encode_target
from .util.url import _normalize_host as normalize_host
from .util.url import parse_url
@@ -71,10 +70,7 @@
log = logging.getLogger(__name__)
-_Default = object()
-
-
-_TYPE_TIMEOUT = Union[Timeout, int, float, object]
+_TYPE_TIMEOUT = Union[Timeout, float, _TYPE_DEFAULT]
_SelfT = TypeVar("_SelfT")
@@ -189,7 +185,7 @@ def __init__(
self,
host: str,
port: Optional[int] = None,
- timeout: Optional[Union[Timeout, float, int, object]] = Timeout.DEFAULT_TIMEOUT,
+ timeout: Optional[_TYPE_TIMEOUT] = _DEFAULT_TIMEOUT,
maxsize: int = 1,
block: bool = False,
headers: Optional[Mapping[str, str]] = None,
@@ -251,7 +247,7 @@ def _new_conn(self) -> HTTPConnection:
conn = self.ConnectionCls(
host=self.host,
port=self.port,
- timeout=self.timeout.connect_timeout, # type: ignore[arg-type]
+ timeout=self.timeout.connect_timeout,
**self.conn_kw,
)
return conn
@@ -353,7 +349,7 @@ def _prepare_proxy(self, conn: HTTPConnection) -> None:
def _get_timeout(self, timeout: _TYPE_TIMEOUT) -> Timeout:
"""Helper that always returns a :class:`urllib3.util.Timeout`"""
- if timeout is _Default:
+ if timeout is _DEFAULT_TIMEOUT:
return self.timeout.clone()
if isinstance(timeout, Timeout):
@@ -367,7 +363,7 @@ def _raise_timeout(
self,
err: Union[BaseSSLError, OSError, SocketTimeout],
url: str,
- timeout_value: _TYPE_TIMEOUT,
+ timeout_value: Optional[_TYPE_TIMEOUT],
) -> None:
"""Is the error actually a timeout? Will raise a ReadTimeout or pass"""
@@ -387,7 +383,7 @@ def _make_request(
conn: HTTPConnection,
method: str,
url: str,
- timeout: _TYPE_TIMEOUT = _Default,
+ timeout: _TYPE_TIMEOUT = _DEFAULT_TIMEOUT,
chunked: bool = False,
**httplib_request_kw: Any,
) -> _HttplibHTTPResponse:
@@ -451,10 +447,7 @@ def _make_request(
raise ReadTimeoutError(
self, url, f"Read timed out. (read timeout={read_timeout})"
)
- if read_timeout is Timeout.DEFAULT_TIMEOUT:
- conn.sock.settimeout(socket.getdefaulttimeout())
- else: # None or a value
- conn.sock.settimeout(read_timeout)
+ conn.sock.settimeout(read_timeout)
# Receive the response from the server
try:
@@ -540,7 +533,7 @@ def urlopen( # type: ignore[override]
retries: Optional[Union[Retry, bool, int]] = None,
redirect: bool = True,
assert_same_host: bool = True,
- timeout: _TYPE_TIMEOUT = _Default,
+ timeout: _TYPE_TIMEOUT = _DEFAULT_TIMEOUT,
pool_timeout: Optional[int] = None,
release_conn: Optional[bool] = None,
chunked: bool = False,
@@ -903,7 +896,7 @@ def __init__(
self,
host: str,
port: Optional[int] = None,
- timeout: _TYPE_TIMEOUT = Timeout.DEFAULT_TIMEOUT,
+ timeout: Optional[_TYPE_TIMEOUT] = _DEFAULT_TIMEOUT,
maxsize: int = 1,
block: bool = False,
headers: Optional[Mapping[str, str]] = None,
@@ -1013,7 +1006,7 @@ def _new_conn(self) -> HTTPConnection:
conn = self.ConnectionCls(
host=actual_host,
port=actual_port,
- timeout=self.timeout.connect_timeout, # type: ignore[arg-type]
+ timeout=self.timeout.connect_timeout,
cert_file=self.cert_file,
key_file=self.key_file,
key_password=self.key_password,
diff --git a/src/urllib3/util/connection.py b/src/urllib3/util/connection.py
--- a/src/urllib3/util/connection.py
+++ b/src/urllib3/util/connection.py
@@ -2,9 +2,9 @@
from typing import Optional, Sequence, Tuple, Union
from ..exceptions import LocationParseError
+from .timeout import _DEFAULT_TIMEOUT, _TYPE_TIMEOUT
from .wait import wait_for_read
-SOCKET_GLOBAL_DEFAULT_TIMEOUT = socket._GLOBAL_DEFAULT_TIMEOUT # type: ignore[attr-defined]
_TYPE_SOCKET_OPTIONS = Sequence[Tuple[int, int, Union[int, bytes]]]
@@ -28,7 +28,7 @@ def is_connection_dropped(conn: socket.socket) -> bool: # Platform-specific
# discovered in DNS if the system doesn't have IPv6 functionality.
def create_connection(
address: Tuple[str, int],
- timeout: Optional[float] = SOCKET_GLOBAL_DEFAULT_TIMEOUT,
+ timeout: _TYPE_TIMEOUT = _DEFAULT_TIMEOUT,
source_address: Optional[Tuple[str, int]] = None,
socket_options: Optional[_TYPE_SOCKET_OPTIONS] = None,
) -> socket.socket:
@@ -68,7 +68,7 @@ def create_connection(
# If provided, set socket level options before connecting.
_set_socket_options(sock, socket_options)
- if timeout is not SOCKET_GLOBAL_DEFAULT_TIMEOUT:
+ if timeout is not _DEFAULT_TIMEOUT:
sock.settimeout(timeout)
if source_address:
sock.bind(source_address)
diff --git a/src/urllib3/util/timeout.py b/src/urllib3/util/timeout.py
--- a/src/urllib3/util/timeout.py
+++ b/src/urllib3/util/timeout.py
@@ -1,18 +1,21 @@
import time
-
-# The default socket timeout, used by httplib to indicate that no timeout was
-# specified by the user
-from socket import _GLOBAL_DEFAULT_TIMEOUT # type: ignore[attr-defined]
-from typing import Optional, Union
+from enum import Enum
+from socket import getdefaulttimeout
+from typing import TYPE_CHECKING, Optional, Union
from ..exceptions import TimeoutStateError
-# A sentinel value to indicate that no timeout was specified by the user in
-# urllib3
-_Default = object()
+if TYPE_CHECKING:
+ from typing_extensions import Final
+
+
+class _TYPE_DEFAULT(Enum):
+ token = 0
+
+_DEFAULT_TIMEOUT: "Final[_TYPE_DEFAULT]" = _TYPE_DEFAULT.token
-_TYPE_TIMEOUT = Optional[Union[float, int, object]]
+_TYPE_TIMEOUT = Optional[Union[float, _TYPE_DEFAULT]]
class Timeout:
@@ -101,13 +104,13 @@ class Timeout:
"""
#: A sentinel object representing the default timeout value
- DEFAULT_TIMEOUT: object = _GLOBAL_DEFAULT_TIMEOUT
+ DEFAULT_TIMEOUT: _TYPE_TIMEOUT = _DEFAULT_TIMEOUT
def __init__(
self,
total: _TYPE_TIMEOUT = None,
- connect: _TYPE_TIMEOUT = _Default,
- read: _TYPE_TIMEOUT = _Default,
+ connect: _TYPE_TIMEOUT = _DEFAULT_TIMEOUT,
+ read: _TYPE_TIMEOUT = _DEFAULT_TIMEOUT,
) -> None:
self._connect = self._validate_timeout(connect, "connect")
self._read = self._validate_timeout(read, "read")
@@ -120,10 +123,12 @@ def __repr__(self) -> str:
# __str__ provided for backwards compatibility
__str__ = __repr__
+ @staticmethod
+ def resolve_default_timeout(timeout: _TYPE_TIMEOUT) -> Optional[float]:
+ return getdefaulttimeout() if timeout is _DEFAULT_TIMEOUT else timeout
+
@classmethod
- def _validate_timeout(
- cls, value: _TYPE_TIMEOUT, name: str
- ) -> Optional[Union[float, object]]:
+ def _validate_timeout(cls, value: _TYPE_TIMEOUT, name: str) -> _TYPE_TIMEOUT:
"""Check that a timeout attribute is valid.
:param value: The timeout value to validate
@@ -133,10 +138,7 @@ def _validate_timeout(
:raises ValueError: If it is a numeric value less than or equal to
zero, or the type is not an integer, float, or None.
"""
- if value is _Default:
- return cls.DEFAULT_TIMEOUT
-
- if value is None or value is cls.DEFAULT_TIMEOUT:
+ if value is None or value is _DEFAULT_TIMEOUT:
return value
if isinstance(value, bool):
@@ -145,7 +147,7 @@ def _validate_timeout(
"be an int, float or None."
)
try:
- float(value) # type: ignore[arg-type]
+ float(value)
except (TypeError, ValueError):
raise ValueError(
"Timeout value %s was %s, but it must be an "
@@ -153,7 +155,7 @@ def _validate_timeout(
) from None
try:
- if value <= 0: # type: ignore[operator]
+ if value <= 0:
raise ValueError(
"Attempted to set %s timeout to %s, but the "
"timeout cannot be set to a value less "
@@ -169,7 +171,7 @@ def _validate_timeout(
return value
@classmethod
- def from_float(cls, timeout: Optional[Union[int, float, object]]) -> "Timeout":
+ def from_float(cls, timeout: _TYPE_TIMEOUT) -> "Timeout":
"""Create a new Timeout from a legacy timeout value.
The timeout value used by httplib.py sets the same timeout on the
@@ -236,13 +238,13 @@ def connect_timeout(self) -> _TYPE_TIMEOUT:
if self.total is None:
return self._connect
- if self._connect is None or self._connect is self.DEFAULT_TIMEOUT:
+ if self._connect is None or self._connect is _DEFAULT_TIMEOUT:
return self.total
- return min(self._connect, self.total) # type: ignore[no-any-return, call-overload]
+ return min(self._connect, self.total) # type: ignore[type-var]
@property
- def read_timeout(self) -> _TYPE_TIMEOUT:
+ def read_timeout(self) -> Optional[float]:
"""Get the value for the read timeout.
This assumes some time has elapsed in the connection timeout and
@@ -254,21 +256,21 @@ def read_timeout(self) -> _TYPE_TIMEOUT:
raised.
:return: Value to use for the read timeout.
- :rtype: int, float, :attr:`Timeout.DEFAULT_TIMEOUT` or None
+ :rtype: int, float or None
:raises urllib3.exceptions.TimeoutStateError: If :meth:`start_connect`
has not yet been called on this object.
"""
if (
self.total is not None
- and self.total is not self.DEFAULT_TIMEOUT
+ and self.total is not _DEFAULT_TIMEOUT
and self._read is not None
- and self._read is not self.DEFAULT_TIMEOUT
+ and self._read is not _DEFAULT_TIMEOUT
):
# In case the connect timeout has not yet been established.
if self._start_connect is None:
return self._read
- return max(0, min(self.total - self.get_connect_duration(), self._read)) # type: ignore[no-any-return, call-overload, operator]
- elif self.total is not None and self.total is not self.DEFAULT_TIMEOUT:
- return max(0, self.total - self.get_connect_duration()) # type: ignore[operator]
+ return max(0, min(self.total - self.get_connect_duration(), self._read))
+ elif self.total is not None and self.total is not _DEFAULT_TIMEOUT:
+ return max(0, self.total - self.get_connect_duration())
else:
- return self._read
+ return self.resolve_default_timeout(self._read)
| diff --git a/test/test_connectionpool.py b/test/test_connectionpool.py
--- a/test/test_connectionpool.py
+++ b/test/test_connectionpool.py
@@ -31,7 +31,7 @@
)
from urllib3.response import HTTPResponse
from urllib3.util.ssl_match_hostname import CertificateError
-from urllib3.util.timeout import Timeout
+from urllib3.util.timeout import _DEFAULT_TIMEOUT, Timeout
from .test_response import MockChunkedEncodingResponse, MockSock
@@ -410,8 +410,8 @@ def test_pool_timeouts(self) -> None:
conn = pool._new_conn()
assert conn.__class__ == HTTPConnection
assert pool.timeout.__class__ == Timeout
- assert pool.timeout._read == Timeout.DEFAULT_TIMEOUT
- assert pool.timeout._connect == Timeout.DEFAULT_TIMEOUT
+ assert pool.timeout._read == _DEFAULT_TIMEOUT
+ assert pool.timeout._connect == _DEFAULT_TIMEOUT
assert pool.timeout.total is None
pool = HTTPConnectionPool(host="localhost", timeout=SHORT_TIMEOUT)
diff --git a/test/test_util.py b/test/test_util.py
--- a/test/test_util.py
+++ b/test/test_util.py
@@ -33,7 +33,7 @@
resolve_ssl_version,
ssl_wrap_socket,
)
-from urllib3.util.timeout import Timeout
+from urllib3.util.timeout import _DEFAULT_TIMEOUT, Timeout
from urllib3.util.url import Url, _encode_invalid_chars, parse_url
from urllib3.util.util import to_bytes, to_str
@@ -602,7 +602,7 @@ def test_timeout(self, time_monotonic: MagicMock) -> None:
assert timeout.connect_timeout == 2
timeout = Timeout()
- assert timeout.connect_timeout == Timeout.DEFAULT_TIMEOUT
+ assert timeout.connect_timeout == _DEFAULT_TIMEOUT
# Connect takes 5 seconds, leaving 5 seconds for read
timeout = Timeout(total=10, read=7)
@@ -629,6 +629,15 @@ def test_timeout(self, time_monotonic: MagicMock) -> None:
timeout = Timeout(5)
assert timeout.total == 5
+ def test_timeout_default_resolve(self) -> None:
+ """The timeout default is resolved when read_timeout is accessed."""
+ timeout = Timeout()
+ with patch("urllib3.util.timeout.getdefaulttimeout", return_value=2):
+ assert timeout.read_timeout == 2
+
+ with patch("urllib3.util.timeout.getdefaulttimeout", return_value=3):
+ assert timeout.read_timeout == 3
+
def test_timeout_str(self) -> None:
timeout = Timeout(connect=1, read=2, total=3)
assert str(timeout) == "Timeout(connect=1, read=2, total=3)"
| Investigate removing `object` from the `Union[...]` of many `timeout` types
`object` is only there because of the `socket` module "default" sentinel value which is kind-of an unfortunate hack if you ask me.
It's causing a lot of problems type-wise when going from urllib3 timeout to http.client timeouts, let's take a peek if we can instead remove `object` and add `type: ignore` only where the socket default timeout sentinel is used.
| Related discussion: https://github.com/encode/httpx/issues/1384
I've tested it on my own setup. by removing `object`, we will have to add 9 `type: ignore` and 9 `type: ignore` will be removed.
I can create a PR if you agree | 2021-10-26T15:33:52Z | [] | [] |
urllib3/urllib3 | 2,518 | urllib3__urllib3-2518 | [
"2486"
] | 33afb5c96c435ab7118b5f41e26f96c290fef55b | diff --git a/src/urllib3/connection.py b/src/urllib3/connection.py
--- a/src/urllib3/connection.py
+++ b/src/urllib3/connection.py
@@ -25,6 +25,8 @@
if TYPE_CHECKING:
from typing_extensions import Literal
+ from .util.ssl_ import _TYPE_PEER_CERT_RET_DICT
+
from .util.proxy import create_proxy_ssl_context
from .util.timeout import _DEFAULT_TIMEOUT, _TYPE_TIMEOUT, Timeout
from .util.util import to_bytes, to_str
@@ -50,7 +52,6 @@ class BaseSSLError(BaseException): # type: ignore[no-redef]
)
from .util import SKIP_HEADER, SKIPPABLE_HEADERS, connection, ssl_
from .util.ssl_ import (
- _TYPE_PEER_CERT_RET,
assert_fingerprint,
create_urllib3_context,
resolve_cert_reqs,
@@ -537,8 +538,23 @@ def connect(self) -> None:
and not context.check_hostname
and self.assert_hostname is not False
):
- cert = self.sock.getpeercert()
- _match_hostname(cert, self.assert_hostname or server_hostname)
+ cert: "_TYPE_PEER_CERT_RET_DICT" = self.sock.getpeercert() # type: ignore[assignment]
+
+ # Need to signal to our match_hostname whether to use 'commonName' or not.
+ # If we're using our own constructed SSLContext we explicitly set 'False'
+ # because PyPy hard-codes 'True' from SSLContext.hostname_checks_common_name.
+ if default_ssl_context:
+ hostname_checks_common_name = False
+ else:
+ hostname_checks_common_name = (
+ getattr(context, "hostname_checks_common_name", False) or False
+ )
+
+ _match_hostname(
+ cert,
+ self.assert_hostname or server_hostname,
+ hostname_checks_common_name,
+ )
self.is_verified = context.verify_mode == ssl.CERT_REQUIRED or bool(
self.assert_fingerprint
@@ -583,9 +599,13 @@ def _connect_tls_proxy(self, hostname: str, conn: socket.socket) -> "ssl.SSLSock
)
-def _match_hostname(cert: _TYPE_PEER_CERT_RET, asserted_hostname: str) -> None:
+def _match_hostname(
+ cert: Optional["_TYPE_PEER_CERT_RET_DICT"],
+ asserted_hostname: str,
+ hostname_checks_common_name: bool = False,
+) -> None:
try:
- match_hostname(cert, asserted_hostname)
+ match_hostname(cert, asserted_hostname, hostname_checks_common_name)
except CertificateError as e:
log.warning(
"Certificate did not match expected hostname: %s. Certificate: %s",
diff --git a/src/urllib3/util/ssl_.py b/src/urllib3/util/ssl_.py
--- a/src/urllib3/util/ssl_.py
+++ b/src/urllib3/util/ssl_.py
@@ -5,7 +5,7 @@
import warnings
from binascii import unhexlify
from hashlib import md5, sha1, sha256
-from typing import TYPE_CHECKING, Dict, Mapping, Optional, Tuple, Union, cast, overload
+from typing import TYPE_CHECKING, Dict, Optional, Tuple, Union, cast, overload
from ..exceptions import ProxySchemeUnsupported, SNIMissingWarning, SSLError
from .url import _BRACELESS_IPV6_ADDRZ_RE, _IPV4_RE
@@ -59,13 +59,22 @@ def _is_openssl_issue_14579_fixed(
def _is_bpo_43522_fixed(
implementation_name: str, version_info: _TYPE_VERSION_INFO
) -> bool:
- """Return True if PyPy or CPython 3.8.9+, 3.9.3+ or 3.10+ where setting
+ """Return True for CPython 3.8.9+, 3.9.3+ or 3.10+ where setting
SSLContext.hostname_checks_common_name to False works.
+
+ PyPy 7.3.7 doesn't work as it doesn't ship with OpenSSL 1.1.1l+
+ so we're waiting for a version of PyPy that works before
+ allowing this function to return 'True'.
+
+ Outside of CPython and PyPy we don't know which implementations work
+ or not so we conservatively use our hostname matching as we know that works
+ on all implementations.
+
https://github.com/urllib3/urllib3/issues/2192#issuecomment-821832963
https://foss.heptapod.net/pypy/pypy/-/issues/3539#
"""
if implementation_name != "cpython":
- return True
+ return False
major_minor = version_info[:2]
micro = version_info[2]
@@ -90,10 +99,16 @@ def _is_has_never_check_common_name_reliable(
if TYPE_CHECKING:
from ssl import VerifyMode
- from typing_extensions import Literal
+ from typing_extensions import Literal, TypedDict
from .ssltransport import SSLTransport as SSLTransportType
+ class _TYPE_PEER_CERT_RET_DICT(TypedDict, total=False):
+ subjectAltName: Tuple[Tuple[str, str], ...]
+ subject: Tuple[Tuple[Tuple[str, str], ...], ...]
+ serialNumber: str
+
+
# Mapping from 'ssl.PROTOCOL_TLSX' to 'TLSVersion.X'
_SSL_VERSION_TO_TLS_VERSION: Dict[int, int] = {}
@@ -150,10 +165,7 @@ def _is_has_never_check_common_name_reliable(
PROTOCOL_TLS_CLIENT = 16 # type: ignore[assignment]
-_PCTRTT = Tuple[Tuple[str, str], ...]
-_PCTRTTT = Tuple[_PCTRTT, ...]
-_TYPE_PEER_CERT_RET_DICT = Mapping[str, Union[str, _PCTRTTT, _PCTRTT]]
-_TYPE_PEER_CERT_RET = Union[_TYPE_PEER_CERT_RET_DICT, bytes, None]
+_TYPE_PEER_CERT_RET = Union["_TYPE_PEER_CERT_RET_DICT", bytes, None]
# A secure default.
# Sources for more information on TLS ciphers:
@@ -396,15 +408,19 @@ def create_urllib3_context(
# The order of the below lines setting verify_mode and check_hostname
# matter due to safe-guards SSLContext has to prevent an SSLContext with
# check_hostname=True, verify_mode=NONE/OPTIONAL.
- if cert_reqs == ssl.CERT_REQUIRED:
+ # We always set 'check_hostname=False' for pyOpenSSL so we rely on our own
+ # 'ssl.match_hostname()' implementation.
+ if cert_reqs == ssl.CERT_REQUIRED and not IS_PYOPENSSL:
context.verify_mode = cert_reqs
context.check_hostname = True
else:
context.check_hostname = False
context.verify_mode = cert_reqs
- if HAS_NEVER_CHECK_COMMON_NAME:
+ try:
context.hostname_checks_common_name = False
+ except AttributeError:
+ pass
# Enable logging of TLS session keys via defacto standard environment variable
# 'SSLKEYLOGFILE', if the feature is available (Python 3.8+). Skip empty values.
diff --git a/src/urllib3/util/ssl_match_hostname.py b/src/urllib3/util/ssl_match_hostname.py
--- a/src/urllib3/util/ssl_match_hostname.py
+++ b/src/urllib3/util/ssl_match_hostname.py
@@ -6,9 +6,10 @@
import ipaddress
import re
-from typing import Any, Match, Optional, Union
+from typing import TYPE_CHECKING, Any, Match, Optional, Tuple, Union
-from .ssl_ import _TYPE_PEER_CERT_RET
+if TYPE_CHECKING:
+ from .ssl_ import _TYPE_PEER_CERT_RET_DICT
__version__ = "3.5.0.1"
@@ -85,7 +86,11 @@ def _ipaddress_match(ipname: Any, host_ip: str) -> bool:
return bool(ip == host_ip)
-def match_hostname(cert: _TYPE_PEER_CERT_RET, hostname: str) -> None:
+def match_hostname(
+ cert: Optional["_TYPE_PEER_CERT_RET_DICT"],
+ hostname: str,
+ hostname_checks_common_name: bool = False,
+) -> None:
"""Verify that *cert* (in decoded format as returned by
SSLSocket.getpeercert()) matches the *hostname*. RFC 2818 and RFC 6125
rules are followed, but IP addresses are not accepted for *hostname*.
@@ -106,8 +111,10 @@ def match_hostname(cert: _TYPE_PEER_CERT_RET, hostname: str) -> None:
# Not an IP address (common case)
host_ip = None
dnsnames = []
- san = cert.get("subjectAltName", ()) # type: ignore[union-attr]
- for key, value in san: # type: ignore[misc]
+ san: Tuple[Tuple[str, str], ...] = cert.get("subjectAltName", ())
+ key: str
+ value: str
+ for key, value in san:
if key == "DNS":
if host_ip is None and _dnsname_match(value, hostname):
return
@@ -116,6 +123,17 @@ def match_hostname(cert: _TYPE_PEER_CERT_RET, hostname: str) -> None:
if host_ip is not None and _ipaddress_match(value, host_ip):
return
dnsnames.append(value)
+
+ # We only check 'commonName' if it's enabled and we're not verifying
+ # an IP address. IP addresses aren't valid within 'commonName'.
+ if hostname_checks_common_name and host_ip is None and not dnsnames:
+ for sub in cert.get("subject", ()):
+ for key, value in sub:
+ if key == "commonName":
+ if _dnsname_match(value, hostname):
+ return
+ dnsnames.append(value)
+
if len(dnsnames) > 1:
raise CertificateError(
"hostname %r "
diff --git a/src/urllib3/util/ssltransport.py b/src/urllib3/util/ssltransport.py
--- a/src/urllib3/util/ssltransport.py
+++ b/src/urllib3/util/ssltransport.py
@@ -194,14 +194,8 @@ def getpeercert(
def getpeercert(self, binary_form: "Literal[True]") -> Optional[bytes]:
...
- @overload
- def getpeercert(self, binary_form: bool) -> "_TYPE_PEER_CERT_RET":
- ...
-
- def getpeercert(
- self, binary_form: bool = False
- ) -> Union[None, bytes, "_TYPE_PEER_CERT_RET_DICT", "_TYPE_PEER_CERT_RET"]:
- return self.sslobj.getpeercert(binary_form)
+ def getpeercert(self, binary_form: bool = False) -> "_TYPE_PEER_CERT_RET":
+ return self.sslobj.getpeercert(binary_form) # type: ignore[return-value]
def version(self) -> Optional[str]:
return self.sslobj.version()
| diff --git a/test/conftest.py b/test/conftest.py
--- a/test/conftest.py
+++ b/test/conftest.py
@@ -100,6 +100,18 @@ def no_san_server(
yield cfg
[email protected]()
+def no_san_server_with_different_commmon_name(
+ tmp_path_factory: pytest.TempPathFactory,
+) -> Generator[ServerConfig, None, None]:
+ tmpdir = tmp_path_factory.mktemp("certs")
+ ca = trustme.CA()
+ server_cert = ca.issue_cert(common_name="example.com")
+
+ with run_server_in_thread("https", "localhost", tmpdir, ca, server_cert) as cfg:
+ yield cfg
+
+
@pytest.fixture
def ip_san_server(
tmp_path_factory: pytest.TempPathFactory,
diff --git a/test/test_connection.py b/test/test_connection.py
--- a/test/test_connection.py
+++ b/test/test_connection.py
@@ -1,5 +1,6 @@
import datetime
import socket
+import typing
from unittest import mock
import pytest
@@ -10,12 +11,14 @@
HTTPSConnection,
_match_hostname,
)
-from urllib3.util.ssl_ import _TYPE_PEER_CERT_RET
from urllib3.util.ssl_match_hostname import (
CertificateError as ImplementationCertificateError,
)
from urllib3.util.ssl_match_hostname import _dnsname_match, match_hostname
+if typing.TYPE_CHECKING:
+ from urllib3.util.ssl_ import _TYPE_PEER_CERT_RET_DICT
+
class TestConnection:
"""
@@ -29,18 +32,18 @@ def test_match_hostname_no_cert(self) -> None:
_match_hostname(cert, asserted_hostname)
def test_match_hostname_empty_cert(self) -> None:
- cert: _TYPE_PEER_CERT_RET = {}
+ cert: "_TYPE_PEER_CERT_RET_DICT" = {}
asserted_hostname = "foo"
with pytest.raises(ValueError):
_match_hostname(cert, asserted_hostname)
def test_match_hostname_match(self) -> None:
- cert: _TYPE_PEER_CERT_RET = {"subjectAltName": (("DNS", "foo"),)}
+ cert: "_TYPE_PEER_CERT_RET_DICT" = {"subjectAltName": (("DNS", "foo"),)}
asserted_hostname = "foo"
_match_hostname(cert, asserted_hostname)
def test_match_hostname_mismatch(self) -> None:
- cert: _TYPE_PEER_CERT_RET = {"subjectAltName": (("DNS", "foo"),)}
+ cert: "_TYPE_PEER_CERT_RET_DICT" = {"subjectAltName": (("DNS", "foo"),)}
asserted_hostname = "bar"
try:
with mock.patch("urllib3.connection.log.warning") as mock_log:
@@ -55,7 +58,7 @@ def test_match_hostname_mismatch(self) -> None:
assert e._peer_cert == cert
def test_match_hostname_no_dns(self) -> None:
- cert: _TYPE_PEER_CERT_RET = {"subjectAltName": (("DNS", ""),)}
+ cert: "_TYPE_PEER_CERT_RET_DICT" = {"subjectAltName": (("DNS", ""),)}
asserted_hostname = "bar"
try:
with mock.patch("urllib3.connection.log.warning") as mock_log:
@@ -70,24 +73,24 @@ def test_match_hostname_no_dns(self) -> None:
assert e._peer_cert == cert
def test_match_hostname_startwith_wildcard(self) -> None:
- cert: _TYPE_PEER_CERT_RET = {"subjectAltName": (("DNS", "*"),)}
+ cert: "_TYPE_PEER_CERT_RET_DICT" = {"subjectAltName": (("DNS", "*"),)}
asserted_hostname = "foo"
_match_hostname(cert, asserted_hostname)
def test_match_hostname_dnsname(self) -> None:
- cert: _TYPE_PEER_CERT_RET = {
+ cert: "_TYPE_PEER_CERT_RET_DICT" = {
"subjectAltName": (("DNS", "xn--p1b6ci4b4b3a*.xn--11b5bs8d"),)
}
asserted_hostname = "xn--p1b6ci4b4b3a*.xn--11b5bs8d"
_match_hostname(cert, asserted_hostname)
def test_match_hostname_include_wildcard(self) -> None:
- cert: _TYPE_PEER_CERT_RET = {"subjectAltName": (("DNS", "foo*"),)}
+ cert: "_TYPE_PEER_CERT_RET_DICT" = {"subjectAltName": (("DNS", "foo*"),)}
asserted_hostname = "foobar"
_match_hostname(cert, asserted_hostname)
def test_match_hostname_more_than_one_dnsname_error(self) -> None:
- cert: _TYPE_PEER_CERT_RET = {
+ cert: "_TYPE_PEER_CERT_RET_DICT" = {
"subjectAltName": (("DNS", "foo*"), ("DNS", "fo*"))
}
asserted_hostname = "bar"
@@ -99,7 +102,7 @@ def test_dnsname_match_include_more_than_one_wildcard_error(self) -> None:
_dnsname_match("foo**", "foobar")
def test_match_hostname_ignore_common_name(self) -> None:
- cert: _TYPE_PEER_CERT_RET = {"subject": (("commonName", "foo"),)}
+ cert: "_TYPE_PEER_CERT_RET_DICT" = {"subject": ((("commonName", "foo"),),)}
asserted_hostname = "foo"
with pytest.raises(
ImplementationCertificateError,
@@ -107,8 +110,15 @@ def test_match_hostname_ignore_common_name(self) -> None:
):
match_hostname(cert, asserted_hostname)
+ def test_match_hostname_check_common_name(self) -> None:
+ cert: "_TYPE_PEER_CERT_RET_DICT" = {"subject": ((("commonName", "foo"),),)}
+ asserted_hostname = "foo"
+ match_hostname(cert, asserted_hostname, True)
+
def test_match_hostname_ip_address(self) -> None:
- cert: _TYPE_PEER_CERT_RET = {"subjectAltName": (("IP Address", "1.1.1.1"),)}
+ cert: "_TYPE_PEER_CERT_RET_DICT" = {
+ "subjectAltName": (("IP Address", "1.1.1.1"),)
+ }
asserted_hostname = "1.1.1.2"
try:
with mock.patch("urllib3.connection.log.warning") as mock_log:
@@ -123,7 +133,9 @@ def test_match_hostname_ip_address(self) -> None:
assert e._peer_cert == cert
def test_match_hostname_ip_address_ipv6(self) -> None:
- cert = {"subjectAltName": (("IP Address", "1:2::2:1"),)}
+ cert: "_TYPE_PEER_CERT_RET_DICT" = {
+ "subjectAltName": (("IP Address", "1:2::2:1"),)
+ }
asserted_hostname = "1:2::2:2"
try:
with mock.patch("urllib3.connection.log.warning") as mock_log:
@@ -138,7 +150,9 @@ def test_match_hostname_ip_address_ipv6(self) -> None:
assert e._peer_cert == cert
def test_match_hostname_ip_address_ipv6_brackets(self) -> None:
- cert = {"subjectAltName": (("IP Address", "1:2::2:1"),)}
+ cert: "_TYPE_PEER_CERT_RET_DICT" = {
+ "subjectAltName": (("IP Address", "1:2::2:1"),)
+ }
asserted_hostname = "[1:2::2:1]"
# Assert no error is raised
_match_hostname(cert, asserted_hostname)
diff --git a/test/test_util.py b/test/test_util.py
--- a/test/test_util.py
+++ b/test/test_util.py
@@ -1017,8 +1017,8 @@ def test_ssl_wrap_socket_sni_none_no_warn(self) -> None:
("OpenSSL 1.1.1l", 0x101010CF, "cpython", (3, 9, 3), True),
# Python OK -> reliable
("OpenSSL 1.1.1", 0x10101000, "cpython", (3, 9, 3), True),
- ("OpenSSL 1.1.1", 0x10101000, "pypy", (3, 6, 9), True),
- ("LibreSSL 3.3.5", 0x101010CF, "pypy", (3, 6, 9), True),
+ ("OpenSSL 1.1.1", 0x10101000, "pypy", (3, 6, 9), False),
+ ("LibreSSL 3.3.5", 0x101010CF, "pypy", (3, 6, 9), False),
# OpenSSL OK -> reliable
("OpenSSL", 0x101010CF, "cpython", (3, 9, 2), True),
# unreliable
diff --git a/test/with_dummyserver/test_https.py b/test/with_dummyserver/test_https.py
--- a/test/with_dummyserver/test_https.py
+++ b/test/with_dummyserver/test_https.py
@@ -42,6 +42,7 @@
SSLError,
SystemTimeWarning,
)
+from urllib3.util.ssl_match_hostname import CertificateError
from urllib3.util.timeout import Timeout
from .. import has_alpn
@@ -988,6 +989,68 @@ def test_common_name_without_san_fails(self, no_san_server: ServerConfig) -> Non
e.value
) or "no appropriate subjectAltName" in str(e.value)
+ def test_common_name_without_san_with_different_common_name(
+ self, no_san_server_with_different_commmon_name: ServerConfig
+ ) -> None:
+ ctx = urllib3.util.ssl_.create_urllib3_context()
+ try:
+ ctx.hostname_checks_common_name = True
+ except AttributeError:
+ pytest.skip("Couldn't set 'SSLContext.hostname_checks_common_name'")
+
+ with HTTPSConnectionPool(
+ no_san_server_with_different_commmon_name.host,
+ no_san_server_with_different_commmon_name.port,
+ cert_reqs="CERT_REQUIRED",
+ ca_certs=no_san_server_with_different_commmon_name.ca_certs,
+ ssl_context=ctx,
+ ) as https_pool:
+ with pytest.raises(MaxRetryError) as e:
+ https_pool.request("GET", "/")
+ assert "mismatch, certificate is not valid for 'localhost'" in str(
+ e.value
+ ) or "hostname 'localhost' doesn't match 'example.com'" in str(e.value)
+
+ @pytest.mark.parametrize("use_assert_hostname", [True, False])
+ def test_hostname_checks_common_name_respected(
+ self, no_san_server: ServerConfig, use_assert_hostname: bool
+ ) -> None:
+ ctx = urllib3.util.ssl_.create_urllib3_context()
+ if not hasattr(ctx, "hostname_checks_common_name"):
+ pytest.skip("Test requires 'SSLContext.hostname_checks_common_name'")
+ ctx.load_verify_locations(no_san_server.ca_certs)
+ try:
+ ctx.hostname_checks_common_name = True
+ except AttributeError:
+ pytest.skip("Couldn't set 'SSLContext.hostname_checks_common_name'")
+
+ err: Optional[MaxRetryError]
+ try:
+ with HTTPSConnectionPool(
+ no_san_server.host,
+ no_san_server.port,
+ cert_reqs="CERT_REQUIRED",
+ ssl_context=ctx,
+ assert_hostname=no_san_server.host if use_assert_hostname else None,
+ ) as https_pool:
+ https_pool.request("GET", "/")
+ except MaxRetryError as e:
+ err = e
+ else:
+ err = None
+
+ # commonName is only valid for DNS names, not IP addresses.
+ if no_san_server.host == "localhost":
+ assert err is None
+
+ # IP addresses should fail for commonName.
+ else:
+ assert err is not None
+ assert type(err.reason) == SSLError
+ assert isinstance(
+ err.reason.args[0], (ssl.SSLCertVerificationError, CertificateError)
+ )
+
def test_strip_square_brackets_before_validating(
self, ipv6_san_server: ServerConfig
) -> None:
| Try our test suite against PyPy 7.3.6 (Python 3.8 beta)
We currently test against pypy-3.7 but last month PyPy 7.3.6 added beta support for Python 3.8. @pquentin psubmitted an issue](https://foss.heptapod.net/pypy/pypy/-/issues/3539) that was fixed and released in this beta version, we should test the beta version before a stable release to see if the fix was successful.
We don't necessarily need to run this test suite on every PR yet (although that is welcomed, not sure how feasible it is though?)
| 2022-01-04T12:18:49Z | [] | [] |
|
urllib3/urllib3 | 2,519 | urllib3__urllib3-2519 | [
"2512"
] | fd2759aa16b12b33298900c77d29b3813c6582de | diff --git a/src/urllib3/connectionpool.py b/src/urllib3/connectionpool.py
--- a/src/urllib3/connectionpool.py
+++ b/src/urllib3/connectionpool.py
@@ -41,7 +41,7 @@
from .response import BaseHTTPResponse, HTTPResponse
from .util.connection import is_connection_dropped
from .util.proxy import connection_requires_http_tunnel
-from .util.request import set_file_position
+from .util.request import _TYPE_BODY_POSITION, set_file_position
from .util.response import assert_header_parsing
from .util.retry import Retry
from .util.ssl_match_hostname import CertificateError
@@ -547,7 +547,7 @@ def urlopen( # type: ignore[override]
pool_timeout: Optional[int] = None,
release_conn: Optional[bool] = None,
chunked: bool = False,
- body_pos: Optional[Union[int, object]] = None,
+ body_pos: Optional[_TYPE_BODY_POSITION] = None,
**response_kw: Any,
) -> BaseHTTPResponse:
"""
diff --git a/src/urllib3/util/request.py b/src/urllib3/util/request.py
--- a/src/urllib3/util/request.py
+++ b/src/urllib3/util/request.py
@@ -1,8 +1,12 @@
from base64 import b64encode
-from typing import IO, Any, AnyStr, Dict, List, Optional, Union
+from enum import Enum
+from typing import IO, TYPE_CHECKING, Any, AnyStr, Dict, List, Optional, Union
from ..exceptions import UnrewindableBodyError
+if TYPE_CHECKING:
+ from typing_extensions import Final
+
# Pass as a value within ``headers`` to skip
# emitting some HTTP headers that are added automatically.
# The only headers that are supported are ``Accept-Encoding``,
@@ -21,7 +25,14 @@
else:
ACCEPT_ENCODING += ",br"
-_FAILEDTELL = object()
+
+class _TYPE_FAILEDTELL(Enum):
+ token = 0
+
+
+_FAILEDTELL: "Final[_TYPE_FAILEDTELL]" = _TYPE_FAILEDTELL.token
+
+_TYPE_BODY_POSITION = Union[int, _TYPE_FAILEDTELL]
def make_headers(
@@ -104,8 +115,8 @@ def make_headers(
def set_file_position(
- body: Any, pos: Optional[Union[int, object]]
-) -> Optional[Union[int, object]]:
+ body: Any, pos: Optional[_TYPE_BODY_POSITION]
+) -> Optional[_TYPE_BODY_POSITION]:
"""
If a position is provided, move file to that point.
Otherwise, we'll attempt to record a position for future use.
@@ -123,7 +134,7 @@ def set_file_position(
return pos
-def rewind_body(body: IO[AnyStr], body_pos: Optional[Union[int, object]]) -> None:
+def rewind_body(body: IO[AnyStr], body_pos: _TYPE_BODY_POSITION) -> None:
"""
Attempt to rewind body to a certain position.
Primarily used for request redirects and retries.
| diff --git a/test/test_util.py b/test/test_util.py
--- a/test/test_util.py
+++ b/test/test_util.py
@@ -530,9 +530,9 @@ def test_rewind_body_bad_position(self) -> None:
# Pass non-integer position
with pytest.raises(ValueError):
- rewind_body(body, body_pos=None)
+ rewind_body(body, body_pos=None) # type: ignore[arg-type]
with pytest.raises(ValueError):
- rewind_body(body, body_pos=object())
+ rewind_body(body, body_pos=object()) # type: ignore[arg-type]
def test_rewind_body_failed_seek(self) -> None:
class BadSeek(io.StringIO):
| Change set_file_position() type to use proper sentinel with _FAILEDTELL
Currently `_FAILEDTELL` in `urllib3.util.request` uses `object` as a sentinel type, we should give this object the same treatment as `_DEFAULT_TIMEOUT` and switch to a `Final[Enum]`. This type should be used wherever `body_pos` is used.
cc @hramezani
| I will work on this.
@imkaka Sounds good, I await a PR! 🙇♂️ | 2022-01-04T17:21:33Z | [] | [] |
urllib3/urllib3 | 2,571 | urllib3__urllib3-2571 | [
"2570"
] | b19e55edf97c2a259b90e59014c21d7f51de62f6 | diff --git a/src/urllib3/connection.py b/src/urllib3/connection.py
--- a/src/urllib3/connection.py
+++ b/src/urllib3/connection.py
@@ -447,17 +447,16 @@ def connect(self) -> None:
self._connecting_to_proxy = bool(self.proxy)
sock: Union[socket.socket, "ssl.SSLSocket"]
- sock = self._new_conn()
+ self.sock = sock = self._new_conn()
hostname: str = self.host
tls_in_tls = False
if self._is_using_tunnel():
if self.tls_in_tls_required:
- sock = self._connect_tls_proxy(hostname, sock)
+ self.sock = sock = self._connect_tls_proxy(hostname, sock)
tls_in_tls = True
self._connecting_to_proxy = False
- self.sock = sock
# Calls self._set_hostport(), so self.host is
# self._tunnel_host below.
| diff --git a/test/with_dummyserver/test_https.py b/test/with_dummyserver/test_https.py
--- a/test/with_dummyserver/test_https.py
+++ b/test/with_dummyserver/test_https.py
@@ -329,6 +329,23 @@ def test_verified_with_bad_ca_certs(self) -> None:
or "self signed certificate in certificate chain" in str(e.value.reason)
), f"Expected 'certificate verify failed', instead got: {e.value.reason!r}"
+ def test_wrap_socket_failure_resource_leak(self) -> None:
+ with HTTPSConnectionPool(
+ self.host,
+ self.port,
+ cert_reqs="CERT_REQUIRED",
+ ca_certs=self.bad_ca_path,
+ ssl_minimum_version=self.tls_version(),
+ ) as https_pool:
+ conn = https_pool._get_conn()
+ try:
+ with pytest.raises(ssl.SSLError):
+ conn.connect()
+
+ assert conn.sock
+ finally:
+ conn.close()
+
def test_verified_without_ca_certs(self) -> None:
# default is cert_reqs=None which is ssl.CERT_NONE
with HTTPSConnectionPool(
| socket leaked when ssl_wrap_socket fails
### Subject
socket leaked when ssl_wrap_socket fails
### Environment
```
>>> import platform
>>> import urllib3
>>>
>>> print("OS", platform.platform())
OS Linux-5.4.0-100-generic-x86_64-with-glibc2.31
>>> print("Python", platform.python_version())
Python 3.9.10
>>> print("urllib3", urllib3.__version__)
urllib3 1.26.8
```
### Steps to Reproduce
```python
import pytest
import urllib3
def test_requests_leak():
with pytest.raises(Exception):
with urllib3.PoolManager(ca_certs=__file__) as http:
http.request("GET", "https://google.com")
```
### Expected Behavior
no ResourceWarning
### Actual Behavior
```
:
def unraisable_exception_runtest_hook() -> Generator[None, None, None]:
with catch_unraisable_exception() as cm:
yield
if cm.unraisable:
if cm.unraisable.err_msg is not None:
err_msg = cm.unraisable.err_msg
else:
err_msg = "Exception ignored in"
msg = f"{err_msg}: {cm.unraisable.object!r}\n\n"
msg += "".join(
traceback.format_exception(
cm.unraisable.exc_type,
cm.unraisable.exc_value,
cm.unraisable.exc_traceback,
)
)
> warnings.warn(pytest.PytestUnraisableExceptionWarning(msg))
E pytest.PytestUnraisableExceptionWarning: Exception ignored in: <socket.socket fd=-1, family=AddressFamily.AF_INET, type=SocketKind.SOCK_STREAM, proto=6>
E
E Traceback (most recent call last):
E File "/home/graingert/projects/requests-leak/test_requests_leak.py", line 8, in test_requests_leak
E http.request("GET", "https://google.com")
E ResourceWarning: unclosed <socket.socket fd=14, family=AddressFamily.AF_INET, type=SocketKind.SOCK_STREAM, proto=6, laddr=('192.168.99.58', 43618), raddr=('172.217.16.238', 443)>
```
| 2022-02-25T16:47:11Z | [] | [] |
|
urllib3/urllib3 | 2,624 | urllib3__urllib3-2624 | [
"1992"
] | 972e9f02cd219892701fa0a1129dda7f81dac535 | diff --git a/noxfile.py b/noxfile.py
--- a/noxfile.py
+++ b/noxfile.py
@@ -16,7 +16,7 @@
def tests_impl(
session: nox.Session,
- extras: str = "socks,secure,brotli",
+ extras: str = "socks,secure,brotli,zstd",
byte_string_comparisons: bool = True,
) -> None:
# Install deps and the package itself.
@@ -168,7 +168,7 @@ def mypy(session: nox.Session) -> None:
@nox.session
def docs(session: nox.Session) -> None:
session.install("-r", "docs/requirements.txt")
- session.install(".[socks,secure,brotli]")
+ session.install(".[socks,secure,brotli,zstd]")
session.chdir("docs")
if os.path.exists("_build"):
diff --git a/src/urllib3/response.py b/src/urllib3/response.py
--- a/src/urllib3/response.py
+++ b/src/urllib3/response.py
@@ -1,6 +1,7 @@
import io
import json as _json
import logging
+import re
import zlib
from contextlib import contextmanager
from http.client import HTTPMessage as _HttplibHTTPMessage
@@ -27,6 +28,22 @@
except ImportError:
brotli = None
+try:
+ import zstandard as zstd # type: ignore[import]
+
+ # The package 'zstandard' added the 'eof' property starting
+ # in v0.18.0 which we require to ensure a complete and
+ # valid zstd stream was fed into the ZstdDecoder.
+ # See: https://github.com/urllib3/urllib3/pull/2624
+ _zstd_version = _zstd_version = tuple(
+ map(int, re.search(r"^([0-9]+)\.([0-9]+)", zstd.__version__).groups()) # type: ignore[union-attr]
+ )
+ if _zstd_version < (0, 18): # Defensive:
+ zstd = None
+
+except (AttributeError, ImportError, ValueError): # Defensive:
+ zstd = None
+
from ._collections import HTTPHeaderDict
from .connection import _TYPE_BODY, BaseSSLError, HTTPConnection, HTTPException
from .exceptions import (
@@ -148,6 +165,24 @@ def flush(self) -> bytes:
return b""
+if zstd is not None:
+
+ class ZstdDecoder(ContentDecoder):
+ def __init__(self) -> None:
+ self._obj = zstd.ZstdDecompressor().decompressobj()
+
+ def decompress(self, data: bytes) -> bytes:
+ if not data:
+ return b""
+ return self._obj.decompress(data) # type: ignore[no-any-return]
+
+ def flush(self) -> bytes:
+ ret = self._obj.flush()
+ if not self._obj.eof:
+ raise DecodeError("Zstandard data is incomplete")
+ return ret # type: ignore[no-any-return]
+
+
class MultiDecoder(ContentDecoder):
"""
From RFC7231:
@@ -179,6 +214,9 @@ def _get_decoder(mode: str) -> ContentDecoder:
if brotli is not None and mode == "br":
return BrotliDecoder()
+ if zstd is not None and mode == "zstd":
+ return ZstdDecoder()
+
return DeflateDecoder()
@@ -186,12 +224,17 @@ class BaseHTTPResponse(io.IOBase):
CONTENT_DECODERS = ["gzip", "deflate"]
if brotli is not None:
CONTENT_DECODERS += ["br"]
+ if zstd is not None:
+ CONTENT_DECODERS += ["zstd"]
REDIRECT_STATUSES = [301, 302, 303, 307, 308]
DECODER_ERROR_CLASSES: Tuple[Type[Exception], ...] = (IOError, zlib.error)
if brotli is not None:
DECODER_ERROR_CLASSES += (brotli.error,)
+ if zstd is not None:
+ DECODER_ERROR_CLASSES += (zstd.ZstdError,)
+
def __init__(
self,
*,
diff --git a/src/urllib3/util/request.py b/src/urllib3/util/request.py
--- a/src/urllib3/util/request.py
+++ b/src/urllib3/util/request.py
@@ -24,6 +24,12 @@
pass
else:
ACCEPT_ENCODING += ",br"
+try:
+ import zstandard as _unused_module_zstd # type: ignore[import] # noqa: F401
+except ImportError:
+ pass
+else:
+ ACCEPT_ENCODING += ",zstd"
class _TYPE_FAILEDTELL(Enum):
| diff --git a/test/__init__.py b/test/__init__.py
--- a/test/__init__.py
+++ b/test/__init__.py
@@ -31,6 +31,11 @@
except ImportError:
brotli = None
+try:
+ import zstandard as zstd # type: ignore[import]
+except ImportError:
+ zstd = None
+
import functools
from urllib3 import util
@@ -146,6 +151,19 @@ def notBrotli() -> Callable[[_TestFuncT], _TestFuncT]:
)
+def onlyZstd() -> Callable[[_TestFuncT], _TestFuncT]:
+ return pytest.mark.skipif(
+ zstd is None, reason="only run if a python-zstandard library is installed"
+ )
+
+
+def notZstd() -> Callable[[_TestFuncT], _TestFuncT]:
+ return pytest.mark.skipif(
+ zstd is not None,
+ reason="only run if a python-zstandard library is not installed",
+ )
+
+
# Hack to make pytest evaluate a condition at test runtime instead of collection time.
def lazy_condition(condition: Callable[[], bool]) -> bool:
class LazyCondition:
diff --git a/test/test_response.py b/test/test_response.py
--- a/test/test_response.py
+++ b/test/test_response.py
@@ -6,7 +6,7 @@
from base64 import b64decode
from http.client import IncompleteRead as httplib_IncompleteRead
from io import BufferedReader, BytesIO, TextIOWrapper
-from test import onlyBrotli
+from test import onlyBrotli, onlyZstd
from typing import Any, Generator, List, Optional
from unittest import mock
@@ -22,7 +22,7 @@
ResponseNotChunked,
SSLError,
)
-from urllib3.response import HTTPResponse, brotli # type: ignore[attr-defined]
+from urllib3.response import HTTPResponse, brotli, zstd # type: ignore[attr-defined]
from urllib3.util.response import is_fp_closed
from urllib3.util.retry import RequestHistory, Retry
@@ -254,6 +254,39 @@ def test_decode_brotli_error(self) -> None:
with pytest.raises(DecodeError):
HTTPResponse(fp, headers={"content-encoding": "br"})
+ @onlyZstd()
+ def test_decode_zstd(self) -> None:
+ data = zstd.compress(b"foo")
+
+ fp = BytesIO(data)
+ r = HTTPResponse(fp, headers={"content-encoding": "zstd"})
+ assert r.data == b"foo"
+
+ @onlyZstd()
+ def test_chunked_decoding_zstd(self) -> None:
+ data = zstd.compress(b"foobarbaz")
+
+ fp = BytesIO(data)
+ r = HTTPResponse(
+ fp, headers={"content-encoding": "zstd"}, preload_content=False
+ )
+
+ ret = b""
+
+ for _ in range(100):
+ ret += r.read(1)
+ if r.closed:
+ break
+ assert ret == b"foobarbaz"
+
+ @onlyZstd()
+ @pytest.mark.parametrize("data", [b"foo", b"x" * 100])
+ def test_decode_zstd_error(self, data: bytes) -> None:
+ fp = BytesIO(data)
+
+ with pytest.raises(DecodeError):
+ HTTPResponse(fp, headers={"content-encoding": "zstd"})
+
def test_multi_decoding_deflate_deflate(self) -> None:
data = zlib.compress(zlib.compress(b"foo"))
diff --git a/test/test_util.py b/test/test_util.py
--- a/test/test_util.py
+++ b/test/test_util.py
@@ -5,7 +5,7 @@
import sys
import warnings
from itertools import chain
-from test import ImportBlocker, ModuleStash, notBrotli, onlyBrotli
+from test import ImportBlocker, ModuleStash, notBrotli, notZstd, onlyBrotli, onlyZstd
from typing import TYPE_CHECKING, Dict, List, NoReturn, Optional, Tuple, Union
from unittest import mock
from unittest.mock import MagicMock, Mock, patch
@@ -515,27 +515,47 @@ def test_parse_url_bytes_type_error(self) -> None:
@pytest.mark.parametrize(
"kwargs, expected",
[
+ pytest.param(
+ {"accept_encoding": True},
+ {"accept-encoding": "gzip,deflate,br,zstd"},
+ marks=[onlyBrotli(), onlyZstd()], # type: ignore[list-item]
+ ),
pytest.param(
{"accept_encoding": True},
{"accept-encoding": "gzip,deflate,br"},
- marks=onlyBrotli(), # type: ignore[arg-type]
+ marks=[onlyBrotli(), notZstd()], # type: ignore[list-item]
+ ),
+ pytest.param(
+ {"accept_encoding": True},
+ {"accept-encoding": "gzip,deflate,zstd"},
+ marks=[notBrotli(), onlyZstd()], # type: ignore[list-item]
),
pytest.param(
{"accept_encoding": True},
{"accept-encoding": "gzip,deflate"},
- marks=notBrotli(), # type: ignore[arg-type]
+ marks=[notBrotli(), notZstd()], # type: ignore[list-item]
),
({"accept_encoding": "foo,bar"}, {"accept-encoding": "foo,bar"}),
({"accept_encoding": ["foo", "bar"]}, {"accept-encoding": "foo,bar"}),
+ pytest.param(
+ {"accept_encoding": True, "user_agent": "banana"},
+ {"accept-encoding": "gzip,deflate,br,zstd", "user-agent": "banana"},
+ marks=[onlyBrotli(), onlyZstd()], # type: ignore[list-item]
+ ),
pytest.param(
{"accept_encoding": True, "user_agent": "banana"},
{"accept-encoding": "gzip,deflate,br", "user-agent": "banana"},
- marks=onlyBrotli(), # type: ignore[arg-type]
+ marks=[onlyBrotli(), notZstd()], # type: ignore[list-item]
+ ),
+ pytest.param(
+ {"accept_encoding": True, "user_agent": "banana"},
+ {"accept-encoding": "gzip,deflate,zstd", "user-agent": "banana"},
+ marks=[notBrotli(), onlyZstd()], # type: ignore[list-item]
),
pytest.param(
{"accept_encoding": True, "user_agent": "banana"},
{"accept-encoding": "gzip,deflate", "user-agent": "banana"},
- marks=notBrotli(), # type: ignore[arg-type]
+ marks=[notBrotli(), notZstd()], # type: ignore[list-item]
),
({"user_agent": "banana"}, {"user-agent": "banana"}),
({"keep_alive": True}, {"connection": "keep-alive"}),
| [v2] Add support for Content-Encoding: zstd
See: https://tools.ietf.org/html/rfc8478
### Minimum requirements
:moneybag: **You can get paid to complete this issue! [Please read the docs for more information](https://urllib3.readthedocs.io/en/latest/contributing.html#getting-paid-for-your-contributions).**
- [ ] Evaluate all zstandard packages that are available for Python, which makes sense for us to support? One, multiple?
- [ ] Add support for a `ZstandardDecoder`, don't enable `zstd` in `Accept-Encoding` by default though.
- [ ] Add documentation for the feature
- [ ] Add test cases for the feature, see `BrotliDecoder` and `Content-Encoding: brotli` tests for inspiration.
| 2022-06-08T12:17:15Z | [] | [] |
|
urllib3/urllib3 | 2,638 | urllib3__urllib3-2638 | [
"2637"
] | fa3bed83eb2680de9e50d4173da1bfa93774f976 | diff --git a/src/urllib3/connection.py b/src/urllib3/connection.py
--- a/src/urllib3/connection.py
+++ b/src/urllib3/connection.py
@@ -607,13 +607,15 @@ def _ssl_wrap_socket_and_match_hostname(
):
context.load_default_certs()
- # Our upstream implementation of ssl.match_hostname()
- # only applies this normalization to IP addresses so it doesn't
- # match DNS SANs so we do the same thing!
+ # Ensure that IPv6 addresses are in the proper format and don't have a
+ # scope ID. Python's SSL module fails to recognize scoped IPv6 addresses
+ # and interprets them as DNS hostnames.
if server_hostname is not None:
- stripped_hostname = server_hostname.strip("[]")
- if is_ipaddress(stripped_hostname):
- server_hostname = stripped_hostname
+ normalized = server_hostname.strip("[]")
+ if "%" in normalized:
+ normalized = normalized[: normalized.rfind("%")]
+ if is_ipaddress(normalized):
+ server_hostname = normalized
ssl_sock = ssl_wrap_socket(
sock=sock,
diff --git a/src/urllib3/util/ssl_match_hostname.py b/src/urllib3/util/ssl_match_hostname.py
--- a/src/urllib3/util/ssl_match_hostname.py
+++ b/src/urllib3/util/ssl_match_hostname.py
@@ -78,13 +78,16 @@ def _dnsname_match(
def _ipaddress_match(ipname: str, host_ip: Union[IPv4Address, IPv6Address]) -> bool:
"""Exact matching of IP addresses.
- RFC 6125 explicitly doesn't define an algorithm for this
- (section 1.7.2 - "Out of Scope").
+ RFC 9110 section 4.3.5: "A reference identity of IP-ID contains the decoded
+ bytes of the IP address. An IP version 4 address is 4 octets, and an IP
+ version 6 address is 16 octets. [...] A reference identity of type IP-ID
+ matches if the address is identical to an iPAddress value of the
+ subjectAltName extension of the certificate."
"""
# OpenSSL may add a trailing newline to a subjectAltName's IP address
# Divergence from upstream: ipaddress can't handle byte str
ip = ipaddress.ip_address(ipname.rstrip())
- return bool(ip == host_ip)
+ return bool(ip.packed == host_ip.packed)
def match_hostname(
@@ -107,7 +110,15 @@ def match_hostname(
)
try:
# Divergence from upstream: ipaddress can't handle byte str
- host_ip = ipaddress.ip_address(hostname)
+ #
+ # The ipaddress module shipped with Python < 3.9 does not support
+ # scoped IPv6 addresses so we unconditionally strip the Zone IDs for
+ # now. Once we drop support for Python 3.9 we can remove this branch.
+ if "%" in hostname:
+ host_ip = ipaddress.ip_address(hostname[: hostname.rfind("%")])
+ else:
+ host_ip = ipaddress.ip_address(hostname)
+
except ValueError:
# Not an IP address (common case)
host_ip = None
| diff --git a/test/test_connection.py b/test/test_connection.py
--- a/test/test_connection.py
+++ b/test/test_connection.py
@@ -134,7 +134,24 @@ def test_match_hostname_ip_address(self) -> None:
)
assert e._peer_cert == cert
- def test_match_hostname_ip_address_ipv6(self) -> None:
+ @pytest.mark.parametrize(
+ ["asserted_hostname", "san_ip"],
+ [
+ ("1:2::3:4", "1:2:0:0:0:0:3:4"),
+ ("1:2:0:0::3:4", "1:2:0:0:0:0:3:4"),
+ ("::0.1.0.2", "0:0:0:0:0:0:1:2"),
+ ("::1%42", "0:0:0:0:0:0:0:1"),
+ ("::2%iface", "0:0:0:0:0:0:0:2"),
+ ],
+ )
+ def test_match_hostname_ip_address_ipv6(
+ self, asserted_hostname: str, san_ip: str
+ ) -> None:
+ """Check that hostname matches follow RFC 9110 rules for IPv6."""
+ cert: "_TYPE_PEER_CERT_RET_DICT" = {"subjectAltName": (("IP Address", san_ip),)}
+ match_hostname(cert, asserted_hostname)
+
+ def test_match_hostname_ip_address_ipv6_doesnt_match(self) -> None:
cert: "_TYPE_PEER_CERT_RET_DICT" = {
"subjectAltName": (("IP Address", "1:2::2:1"),)
}
diff --git a/test/test_poolmanager.py b/test/test_poolmanager.py
--- a/test/test_poolmanager.py
+++ b/test/test_poolmanager.py
@@ -454,3 +454,17 @@ def test_e2e_connect_to_ipv6_scoped(
conn.connect()
assert create_connection.call_args[0][0] == ("a::b%zone", 80)
+
+ @patch("urllib3.connection.ssl_wrap_socket")
+ @patch("urllib3.util.connection.create_connection")
+ def test_e2e_connect_to_ipv6_scoped_tls(
+ self, create_connection: MagicMock, ssl_wrap_socket: MagicMock
+ ) -> None:
+ p = PoolManager()
+ conn_pool = p.connection_from_url(
+ "https://[a::b%zone]", pool_kwargs={"assert_hostname": False}
+ )
+ conn = conn_pool._get_conn()
+ conn.connect()
+
+ assert ssl_wrap_socket.call_args[1]["server_hostname"] == "a::b"
| IPv6 zone ID shouldn't be used during certificate hostname matching
### Subject
Certificate hostname matching includes the IPv6 zone ID when matching the certificates subjectAltName fields. Zone ID should be omitted from this check as zone IDs are machine-specific so certificates shouldn't include the information.
Curl implements the correct behavior:
```
$ curl -vvv -o /dev/null "https://[2606:4700:4700::1111%32]"
...
* Server certificate:
* subject: C=US; ST=California; L=San Francisco; O=Cloudflare, Inc.; CN=cloudflare-dns.com
* start date: Oct 25 00:00:00 2021 GMT
* expire date: Oct 25 23:59:59 2022 GMT
* subjectAltName: host "2606:4700:4700::1111" matched cert's IP address! <--- here is the confirmation
* issuer: C=US; O=DigiCert Inc; CN=DigiCert TLS Hybrid ECC SHA384 2020 CA1
* SSL certificate verify ok.
```
### Environment
```python
import platform
import urllib3
print("OS", platform.platform())
print("Python", platform.python_version())
print("urllib3", urllib3.__version__)
```
```
OS Linux-5.4.0-110-generic-x86_64-with-glibc2.27
Python 3.8.6
urllib3 1.26.9
```
### Steps to Reproduce
```python
>>> import urllib3
>>> http = urllib3.PoolManager()
>>> http.request("GET", "https://[2606:4700:4700::1111%32]")
```
### Expected Behavior
Same behavior as connecting to the IPv6 address without a zone ID specified.
```python
>>> http.request("GET", "https://[2606:4700:4700::1111%32]")
<urllib3.response.HTTPResponse object at 0x7fd92f0f2d30>
```
### Actual Behavior
Zone ID is used in the comparison of certificate hostnames.
```python
urllib3.exceptions.MaxRetryError: HTTPSConnectionPool(host='2606:4700:4700::1111%32', port=443):
Max retries exceeded with url: / (Caused by SSLError(CertificateError("hostname
'2606:4700:4700::1111%32' doesn't match either of 'cloudflare-dns.com', '*.cloudflare-dns.com',
'one.one.one.one', '1.1.1.1', '1.0.0.1', '162.159.36.1', '162.159.46.1',
'2606:4700:4700:0:0:0:0:1111', '2606:4700:4700:0:0:0:0:1001', '2606:4700:4700:0:0:0:0:64',
'2606:4700:4700:0:0:0:0:6400'")))
```
cc @delroth as you've recently worked nearby this code if you're interested in another related issue.
| I actually think the bug is in OpenSSL or CPython, though it could be worked around in urllib3. My reasoning below.
RFC 6125 defines how to compare SANs for TLS, but it explicitly lists IPs as something that's out of scope. I don't know whether another normative document exists which covers how implementations should behave here.
> 1.7.2. Out of Scope
>
> [...]
>
> o Identifiers other than fully qualified DNS domain names.
>
> Some certification authorities issue server certificates based on
IP addresses, but preliminary evidence indicates that such
certificates are a very small percentage (less than 1%) of issued
certificates. Furthermore, IP addresses are not necessarily
reliable identifiers for application services because of the
existence of private internets [PRIVATE], host mobility, multiple
interfaces on a given host, Network Address Translators (NATs)
resulting in different addresses for a host from different
locations on the network, the practice of grouping many hosts
together behind a single IP address, etc. Most fundamentally,
most users find DNS domain names much easier to work with than IP
addresses, which is why the domain name system was designed in the
first place. We prefer to define best practices for the much more
common use case and not to complicate the rules in this
specification.
However, there seems to be wide consensus across current implementations for only looking at the serialized IP address when comparing names in the context of TLS. As in, 4 bytes for IPv4, 16 bytes for IPv6. This by definition ignores zone IDs, which are a hint that doesn't get encoded in the serialized form. RFC 6125 actually notes this precedent as part of the LDAP-AUTH standardization in RFC 4513:
> 3.1.3.2. Comparison of IP Addresses
> When the reference identity is an IP address, the identity MUST be
converted to the "network byte order" octet string representation
[IP] [IPv6]. For IP Version 4, as specified in RFC 791, the octet
string will contain exactly four octets. For IP Version 6, as
specified in RFC 2460, the octet string will contain exactly sixteen
octets. This octet string is then compared against subjectAltName
values of type iPAddress. A match occurs if the reference identity
octet string and value octet strings are identical.
As well as Syslog TLS in RFC 5425:
> Implementations MAY support matching a locally configured IP
address against an iPAddress stored in the subjectAltName
extension. In this case, the locally configured IP address is
converted to an octet string as specified in [PKIX], Section
4.2.1.6. A match occurs if this octet string is equal to the
value of iPAddress in the subjectAltName extension.
OpenSSL actually supports this, by having different methods for providing a DNS server hostname vs. an IP server hostname (one is provided as an ASCII C string, the other as 4/16 bytes). However, Python's SSL module doesn't currently recognize IPv6 with zone IDs as valid IPv6 addresses, and thus doesn't use the IP address specific code path. This is because instead of using `inet_pton` (like curl, and others that handle scoped IPv6 addresses fine), Python uses instead OpenSSL's IP address parsing functions, which cannot handle zone IDs (`a2i_IPADDRESS`).
My opinion here is that OpenSSL should fix their `a2i_IPADDRESS` to actually be able to parse all valid IPv6 addresses (and ignore the zone ID, in this case, similar to `inet_pton`). Alternatively, CPython could use a better IPv6 address parser than OpenSSL's. This would fix the root cause of the problem.
Now, that doesn't immediately help urllib3. We could introduce a workaround and simply strip the zone ID prior to passing it to OpenSSL -- I think it would be harmless and I'm in favor of this. It also turns out that urllib3's `match_hostname` implementation (used for `assert_hostname`) is wrong and compares `ipaddress.IPAddress` objects directly, not their `bytes` encoded form, which would also cause issues with zone IDs.
I'll file a bug on the OpenSSL side for now and look at how to properly work around this in urllib3.
x-link: openssl/openssl#18530
@delroth Thanks for digging through all the standards for this, I wasn't able to find a reference myself that mentions the IP address comparison so this is great. Your suggestion on how we can work-around this issue makes sense to me.
Actually RFC 9110 (literally released 4 days ago!) seems to properly define this for HTTP over TLS too:
> 4.3.4. https Certificate Verification
> [...]
> In general, a client MUST verify the service identity using the
verification process defined in Section 6 of [RFC6125]. The client
MUST construct a reference identity from the service's host: if the
host is a literal IP address (Section 4.3.5), the reference identity
is an IP-ID, otherwise the host is a name and the reference identity
is a DNS-ID.
> 4.3.5. IP-ID Reference Identity
>
> A server that is identified using an IP address literal in the "host"
field of an "https" URI has a reference identity of type IP-ID. An
IP version 4 address uses the "IPv4address" ABNF rule, and an IP
version 6 address uses the "IP-literal" production with the
"IPv6address" option; see Section 3.2.2 of [URI]. A reference
identity of IP-ID contains the decoded bytes of the IP address.
>
> An IP version 4 address is 4 octets, and an IP version 6 address is
16 octets. Use of IP-ID is not defined for any other IP version.
The iPAddress choice in the certificate subjectAltName extension does
not explicitly include the IP version and so relies on the length of
the address to distinguish versions; see Section 4.2.1.6 of
[RFC5280].
>
> A reference identity of type IP-ID matches if the address is
identical to an iPAddress value of the subjectAltName extension of
the certificate.
Technically RFC 9110 refers to the URI syntax in RFC 3986, which doesn't include scoped addresses. However, RFC 6874 specifically mentions it updates RFC 3986 in that regard, so I think it's all properly defined. Since the address scope doesn't impact the 16 octets "decoded" form, `https://[1::2%3]` (or rather, `https://[1::2%253]`) should match a certificate for `1::2`. | 2022-06-10T21:28:32Z | [] | [] |
urllib3/urllib3 | 2,649 | urllib3__urllib3-2649 | [
"2648"
] | 0212d8f987406029def5c44d03b6f70211e51b73 | diff --git a/noxfile.py b/noxfile.py
--- a/noxfile.py
+++ b/noxfile.py
@@ -116,7 +116,12 @@ def downstream_requests(session: nox.Session) -> None:
session.cd(tmp_dir)
git_clone(session, "https://github.com/psf/requests")
session.chdir("requests")
- session.run("git", "apply", f"{root}/ci/requests.patch", external=True)
+ session.run(
+ "git", "apply", f"{root}/ci/0003-requests-removed-warnings.patch", external=True
+ )
+ session.run(
+ "git", "apply", f"{root}/ci/0004-requests-chunked-requests.patch", external=True
+ )
session.run("git", "rev-parse", "HEAD", external=True)
session.install(".[socks]", silent=False)
session.install("-r", "requirements-dev.txt", silent=False)
diff --git a/src/urllib3/_request_methods.py b/src/urllib3/_request_methods.py
--- a/src/urllib3/_request_methods.py
+++ b/src/urllib3/_request_methods.py
@@ -85,8 +85,6 @@ def request(
"""
method = method.upper()
- urlopen_kw["request_url"] = url
-
if json is not None and body is not None:
raise TypeError(
"request got values for both 'body' and 'json' parameters which are mutually exclusive"
diff --git a/src/urllib3/connection.py b/src/urllib3/connection.py
--- a/src/urllib3/connection.py
+++ b/src/urllib3/connection.py
@@ -6,6 +6,7 @@
import warnings
from http.client import HTTPConnection as _HTTPConnection
from http.client import HTTPException as HTTPException # noqa: F401
+from http.client import ResponseNotReady
from socket import timeout as SocketTimeout
from typing import (
IO,
@@ -24,9 +25,12 @@
if TYPE_CHECKING:
from typing_extensions import Literal
+ from .response import HTTPResponse
from .util.ssl_ import _TYPE_PEER_CERT_RET_DICT
from .util.ssltransport import SSLTransport
+from ._collections import HTTPHeaderDict
+from .util.response import assert_header_parsing
from .util.timeout import _DEFAULT_TIMEOUT, _TYPE_TIMEOUT, Timeout
from .util.util import to_str
@@ -44,6 +48,7 @@ class BaseSSLError(BaseException): # type: ignore[no-redef]
from ._version import __version__
from .exceptions import (
ConnectTimeoutError,
+ HeaderParsingError,
NameResolutionError,
NewConnectionError,
ProxyError,
@@ -77,7 +82,6 @@ class BaseSSLError(BaseException): # type: ignore[no-redef]
_CONTAINS_CONTROL_CHAR_RE = re.compile(r"[^-!#$%&'*+.^_`|~0-9a-zA-Z]")
-
_TYPE_BODY = Union[bytes, IO[Any], Iterable[bytes], str]
@@ -88,6 +92,16 @@ class ProxyConfig(NamedTuple):
assert_fingerprint: Optional[str]
+class _ResponseOptions(NamedTuple):
+ # TODO: Remove this in favor of a better
+ # HTTP request/response lifecycle tracking.
+ request_method: str
+ request_url: str
+ preload_content: bool
+ decode_content: bool
+ enforce_content_length: bool
+
+
class HTTPConnection(_HTTPConnection):
"""
Based on :class:`http.client.HTTPConnection` but provides an extra constructor
@@ -134,6 +148,7 @@ class HTTPConnection(_HTTPConnection):
_tunnel_host: Optional[str]
_tunnel: Callable[["HTTPConnection"], None]
_connecting_to_proxy: bool
+ _response_options: Optional[_ResponseOptions]
def __init__(
self,
@@ -166,6 +181,7 @@ def __init__(
)
self._connecting_to_proxy = False
+ self._response_options = None
# https://github.com/python/mypy/issues/4125
# Mypy treats this as LSP violation, which is considered a bug.
@@ -292,8 +308,27 @@ def request( # type: ignore[override]
body: Optional[_TYPE_BODY] = None,
headers: Optional[Mapping[str, str]] = None,
chunked: bool = False,
+ preload_content: bool = True,
+ decode_content: bool = True,
+ enforce_content_length: bool = True,
) -> None:
+ # Store these values to be fed into the HTTPResponse
+ # object later. TODO: Remove this in favor of a real
+ # HTTP lifecycle mechanism.
+
+ # We have to store these before we call .request()
+ # because sometimes we can still salvage a response
+ # off the wire even if we aren't able to completely
+ # send the request body.
+ self._response_options = _ResponseOptions(
+ request_method=method,
+ request_url=url,
+ preload_content=preload_content,
+ decode_content=decode_content,
+ enforce_content_length=enforce_content_length,
+ )
+
if headers is None:
headers = {}
header_keys = frozenset(to_str(k.lower()) for k in headers)
@@ -371,6 +406,57 @@ def request_chunked(
"""
self.request(method, url, body=body, headers=headers, chunked=True)
+ def getresponse( # type: ignore[override]
+ self,
+ ) -> "HTTPResponse":
+ """
+ Get the response from the server.
+
+ If the HTTPConnection is in the correct state, returns an instance of HTTPResponse or of whatever object is returned by the response_class variable.
+
+ If a request has not been sent or if a previous response has not be handled, ResponseNotReady is raised. If the HTTP response indicates that the connection should be closed, then it will be closed before the response is returned. When the connection is closed, the underlying socket is closed.
+ """
+ # Raise the same error as http.client.HTTPConnection
+ if self._response_options is None:
+ raise ResponseNotReady()
+
+ # Reset this attribute for being used again.
+ resp_options = self._response_options
+ self._response_options = None
+
+ # This is needed here to avoid circular import errors
+ from .response import HTTPResponse
+
+ # Get the response from http.client.HTTPConnection
+ httplib_response = super().getresponse()
+
+ try:
+ assert_header_parsing(httplib_response.msg)
+ except (HeaderParsingError, TypeError) as hpe:
+ log.warning(
+ "Failed to parse headers (url=%s): %s",
+ _url_from_connection(self, resp_options.request_url),
+ hpe,
+ exc_info=True,
+ )
+
+ headers = HTTPHeaderDict(httplib_response.msg.items())
+
+ response = HTTPResponse(
+ body=httplib_response,
+ headers=headers,
+ status=httplib_response.status,
+ version=httplib_response.version,
+ reason=httplib_response.reason,
+ preload_content=resp_options.preload_content,
+ decode_content=resp_options.decode_content,
+ original_response=httplib_response,
+ enforce_content_length=resp_options.enforce_content_length,
+ request_method=resp_options.request_method,
+ request_url=resp_options.request_url,
+ )
+ return response
+
class HTTPSConnection(HTTPConnection):
"""
@@ -748,3 +834,13 @@ class DummyConnection:
VerifiedHTTPSConnection = HTTPSConnection
+
+
+def _url_from_connection(
+ conn: Union[HTTPConnection, HTTPSConnection], path: Optional[str] = None
+) -> str:
+ """Returns the URL from a given connection. This is mainly used for testing and logging."""
+
+ scheme = "https" if isinstance(conn, HTTPSConnection) else "http"
+
+ return Url(scheme=scheme, host=conn.host, port=conn.port, path=path).url
diff --git a/src/urllib3/connectionpool.py b/src/urllib3/connectionpool.py
--- a/src/urllib3/connectionpool.py
+++ b/src/urllib3/connectionpool.py
@@ -4,7 +4,6 @@
import sys
import warnings
import weakref
-from http.client import HTTPResponse as _HttplibHTTPResponse
from socket import timeout as SocketTimeout
from types import TracebackType
from typing import TYPE_CHECKING, Any, Mapping, Optional, Type, TypeVar, Union, overload
@@ -27,7 +26,6 @@
ClosedPoolError,
EmptyPoolError,
FullPoolError,
- HeaderParsingError,
HostChangedError,
InsecureRequestWarning,
LocationValueError,
@@ -39,11 +37,10 @@
SSLError,
TimeoutError,
)
-from .response import BaseHTTPResponse, HTTPResponse
+from .response import HTTPResponse
from .util.connection import is_connection_dropped
from .util.proxy import connection_requires_http_tunnel
from .util.request import _TYPE_BODY_POSITION, set_file_position
-from .util.response import assert_header_parsing
from .util.retry import Retry
from .util.ssl_match_hostname import CertificateError
from .util.timeout import _DEFAULT_TIMEOUT, _TYPE_DEFAULT, Timeout
@@ -384,10 +381,16 @@ def _make_request(
conn: HTTPConnection,
method: str,
url: str,
+ body: Optional[_TYPE_BODY] = None,
+ headers: Optional[Mapping[str, str]] = None,
+ retries: Optional[Retry] = None,
timeout: _TYPE_TIMEOUT = _DEFAULT_TIMEOUT,
chunked: bool = False,
- **httplib_request_kw: Any,
- ) -> _HttplibHTTPResponse:
+ response_conn: Optional[HTTPConnection] = None,
+ preload_content: bool = True,
+ decode_content: bool = True,
+ enforce_content_length: bool = True,
+ ) -> HTTPResponse:
"""
Perform a request on a given urllib connection object taken from our
pool.
@@ -395,12 +398,61 @@ def _make_request(
:param conn:
a connection from one of our connection pools
+ :param method:
+ HTTP request method (such as GET, POST, PUT, etc.)
+
+ :param url:
+ The URL to perform the request on.
+
+ :param body:
+ Data to send in the request body, either :class:`str`, :class:`bytes`,
+ an iterable of :class:`str`/:class:`bytes`, or a file-like object.
+
+ :param headers:
+ Dictionary of custom headers to send, such as User-Agent,
+ If-None-Match, etc. If None, pool headers are used. If provided,
+ these headers completely replace any pool-specific headers.
+
+ :param retries:
+ Configure the number of retries to allow before raising a
+ :class:`~urllib3.exceptions.MaxRetryError` exception.
+
+ Pass ``None`` to retry until you receive a response. Pass a
+ :class:`~urllib3.util.retry.Retry` object for fine-grained control
+ over different types of retries.
+ Pass an integer number to retry connection errors that many times,
+ but no other types of errors. Pass zero to never retry.
+
+ If ``False``, then retries are disabled and any exception is raised
+ immediately. Also, instead of raising a MaxRetryError on redirects,
+ the redirect response will be returned.
+
+ :type retries: :class:`~urllib3.util.retry.Retry`, False, or an int.
+
:param timeout:
- Socket timeout in seconds for the request. This can be a
- float or integer, which will set the same timeout value for
- the socket connect and the socket read, or an instance of
- :class:`urllib3.util.Timeout`, which gives you more fine-grained
- control over your timeouts.
+ If specified, overrides the default timeout for this one
+ request. It may be a float (in seconds) or an instance of
+ :class:`urllib3.util.Timeout`.
+
+ :param chunked:
+ If True, urllib3 will send the body using chunked transfer
+ encoding. Otherwise, urllib3 will send the body using the standard
+ content-length form. Defaults to False.
+
+ :param response_conn:
+ Set this to ``None`` if you will handle releasing the connection or
+ set the connection to have the response release it.
+
+ :param preload_content:
+ If True, the response's body will be preloaded during construction.
+
+ :param decode_content:
+ If True, will attempt to decode the body based on the
+ 'content-encoding' header.
+
+ :param enforce_content_length:
+ Enforce content length checking. Body returned by server must match
+ value of Content-Length header, if present. Otherwise, raise error.
"""
self.num_requests += 1
@@ -438,10 +490,16 @@ def _make_request(
# conn.request() calls http.client.*.request, not the method in
# urllib3.request. It also calls makefile (recv) on the socket.
try:
- if chunked:
- conn.request_chunked(method, url, **httplib_request_kw)
- else:
- conn.request(method, url, **httplib_request_kw)
+ conn.request(
+ method,
+ url,
+ body=body,
+ headers=headers,
+ chunked=chunked,
+ preload_content=preload_content,
+ decode_content=decode_content,
+ enforce_content_length=enforce_content_length,
+ )
# We are swallowing BrokenPipeError (errno.EPIPE) since the server is
# legitimately able to close the connection after sending a valid response.
@@ -472,11 +530,16 @@ def _make_request(
# Receive the response from the server
try:
- httplib_response = conn.getresponse()
+ response = conn.getresponse()
except (BaseSSLError, OSError) as e:
self._raise_timeout(err=e, url=url, timeout_value=read_timeout)
raise
+ # Set properties that are used by the pooling layer.
+ response.retries = retries
+ response._connection = response_conn
+ response._pool = self
+
log.debug(
'%s://%s:%s "%s %s %s" %s %s',
self.scheme,
@@ -486,24 +549,11 @@ def _make_request(
url,
# HTTP version
conn._http_vsn_str, # type: ignore[attr-defined]
- httplib_response.status,
- httplib_response.length,
+ response.status,
+ response.length_remaining,
)
- try:
- assert_header_parsing(httplib_response.msg)
- except (HeaderParsingError, TypeError) as hpe:
- log.warning(
- "Failed to parse headers (url=%s): %s",
- self._absolute_url(url),
- hpe,
- exc_info=True,
- )
-
- return httplib_response
-
- def _absolute_url(self, path: str) -> str:
- return Url(scheme=self.scheme, host=self.host, port=self.port, path=path).url
+ return response
def close(self) -> None:
"""
@@ -554,7 +604,7 @@ def urlopen( # type: ignore[override]
chunked: bool = False,
body_pos: Optional[_TYPE_BODY_POSITION] = None,
**response_kw: Any,
- ) -> BaseHTTPResponse:
+ ) -> HTTPResponse:
"""
Get a connection from the pool and perform an HTTP request. This is the
lowest level call for making a request, so you'll need to specify all
@@ -645,7 +695,7 @@ def urlopen( # type: ignore[override]
:param \\**response_kw:
Additional parameters are passed to
- :meth:`urllib3.response.HTTPResponse.from_httplib`
+ :meth:`urllib3.connection.HTTPConnection.getresponse`
"""
parsed_url = parse_url(url)
@@ -728,8 +778,14 @@ def urlopen( # type: ignore[override]
)
raise
- # Make the request on the httplib connection object.
- httplib_response = self._make_request(
+ # If we're going to release the connection in ``finally:``, then
+ # the response doesn't need to know about the connection. Otherwise
+ # it will also try to release it and we'll have a double-release
+ # mess.
+ response_conn = conn if not release_conn else None
+
+ # Make the request on the HTTPConnection object
+ response = self._make_request(
conn,
method,
url,
@@ -737,23 +793,8 @@ def urlopen( # type: ignore[override]
body=body,
headers=headers,
chunked=chunked,
- )
-
- # If we're going to release the connection in ``finally:``, then
- # the response doesn't need to know about the connection. Otherwise
- # it will also try to release it and we'll have a double-release
- # mess.
- response_conn = conn if not release_conn else None
-
- # Pass method to Response for length checking
- response_kw["request_method"] = method
-
- # Import httplib's response into our own wrapper object
- response = self.ResponseCls.from_httplib(
- httplib_response,
- pool=self,
- connection=response_conn,
retries=retries,
+ response_conn=response_conn,
**response_kw,
)
@@ -1126,6 +1167,13 @@ def _normalize_host(host: Optional[str], scheme: Optional[str]) -> Optional[str]
return host
+def _url_from_pool(
+ pool: Union[HTTPConnectionPool, HTTPSConnectionPool], path: Optional[str] = None
+) -> str:
+ """Returns the URL from a given connection pool. This is mainly used for testing and logging."""
+ return Url(scheme=pool.scheme, host=pool.host, port=pool.port, path=path).url
+
+
def _close_pool_connections(pool: "queue.LifoQueue[Any]") -> None:
"""Drains a queue of connections and closes each one."""
try:
diff --git a/src/urllib3/poolmanager.py b/src/urllib3/poolmanager.py
--- a/src/urllib3/poolmanager.py
+++ b/src/urllib3/poolmanager.py
@@ -468,7 +468,7 @@ def urlopen( # type: ignore[override]
kw["headers"] = new_headers
try:
- retries = retries.increment(method, url, response=response, _pool=conn) # type: ignore[arg-type]
+ retries = retries.increment(method, url, response=response, _pool=conn)
except MaxRetryError:
if retries.raise_on_redirect:
response.drain_conn()
diff --git a/src/urllib3/response.py b/src/urllib3/response.py
--- a/src/urllib3/response.py
+++ b/src/urllib3/response.py
@@ -256,7 +256,7 @@ def __init__(
self.version = version
self.reason = reason
self.decode_content = decode_content
- self.request_url: Optional[str]
+ self._request_url: Optional[str] = request_url
self.retries = retries
self.chunked = False
@@ -301,6 +301,10 @@ def json(self) -> Any:
def url(self) -> Optional[str]:
raise NotImplementedError()
+ @url.setter
+ def url(self, url: Optional[str]) -> None:
+ raise NotImplementedError()
+
@property
def closed(self) -> bool:
raise NotImplementedError()
@@ -309,6 +313,17 @@ def closed(self) -> bool:
def connection(self) -> Optional[HTTPConnection]:
raise NotImplementedError()
+ @property
+ def retries(self) -> Optional[Retry]:
+ return self._retries
+
+ @retries.setter
+ def retries(self, retries: Optional[Retry]) -> None:
+ # Override the request_url if retries has a redirect location.
+ if retries is not None and retries.history:
+ self.url = retries.history[-1].redirect_location
+ self._retries = retries
+
def stream(
self, amt: Optional[int] = 2**16, decode_content: Optional[bool] = None
) -> Iterator[bytes]:
@@ -487,10 +502,6 @@ def __init__(
self._original_response = original_response
self._fp_bytes_read = 0
self.msg = msg
- if self.retries is not None and self.retries.history:
- self._request_url = self.retries.history[-1].redirect_location
- else:
- self._request_url = request_url
if body and isinstance(body, (str, bytes)):
self._body = body
@@ -820,33 +831,6 @@ def stream(
if data:
yield data
- @classmethod
- def from_httplib(
- ResponseCls: Type["HTTPResponse"], r: _HttplibHTTPResponse, **response_kw: Any
- ) -> "HTTPResponse":
- """
- Given an :class:`http.client.HTTPResponse` instance ``r``, return a
- corresponding :class:`urllib3.response.HTTPResponse` object.
-
- Remaining parameters are passed to the HTTPResponse constructor, along
- with ``original_response=r``.
- """
- headers = r.msg
-
- if not isinstance(headers, HTTPHeaderDict):
- headers = HTTPHeaderDict(headers.items()) # type: ignore[assignment]
-
- resp = ResponseCls(
- body=r,
- headers=headers, # type: ignore[arg-type]
- status=r.status,
- version=r.version,
- reason=r.reason,
- original_response=r,
- **response_kw,
- )
- return resp
-
# Overrides from io.IOBase
def close(self) -> None:
if not self.closed and self._fp:
| diff --git a/test/test_connection.py b/test/test_connection.py
--- a/test/test_connection.py
+++ b/test/test_connection.py
@@ -1,6 +1,7 @@
import datetime
import socket
import typing
+from http.client import ResponseNotReady
from unittest import mock
import pytest
@@ -8,8 +9,10 @@
from urllib3.connection import ( # type: ignore[attr-defined]
RECENT_DATE,
CertificateError,
+ HTTPConnection,
HTTPSConnection,
_match_hostname,
+ _url_from_connection,
_wrap_proxy_error,
)
from urllib3.exceptions import HTTPError, ProxyError
@@ -217,3 +220,16 @@ def test_wrap_proxy_error(self, proxy_scheme: str, err_part: str) -> None:
new_err = _wrap_proxy_error(HTTPError("unknown protocol"), proxy_scheme)
assert isinstance(new_err, ProxyError) is True
assert err_part in new_err.args[0]
+
+ def test_url_from_pool(self) -> None:
+ conn = HTTPConnection("google.com", port=80)
+
+ path = "path?query=foo"
+ assert f"http://google.com:80/{path}" == _url_from_connection(conn, path)
+
+ def test_getresponse_requires_reponseoptions(self) -> None:
+ conn = HTTPConnection("google.com", port=80)
+
+ # Should error if a request has not been sent
+ with pytest.raises(ResponseNotReady):
+ conn.getresponse()
diff --git a/test/test_connectionpool.py b/test/test_connectionpool.py
--- a/test/test_connectionpool.py
+++ b/test/test_connectionpool.py
@@ -11,10 +11,12 @@
import pytest
from dummyserver.server import DEFAULT_CA
+from urllib3 import Retry
from urllib3.connection import HTTPConnection
from urllib3.connectionpool import (
HTTPConnectionPool,
HTTPSConnectionPool,
+ _url_from_pool,
connection_from_url,
)
from urllib3.exceptions import (
@@ -449,11 +451,10 @@ def test_contextmanager(self) -> None:
assert old_pool_queue is not None
old_pool_queue.get(block=False)
- def test_absolute_url(self) -> None:
- with connection_from_url("http://google.com:80") as c:
- assert "http://google.com:80/path?query=foo" == c._absolute_url(
- "path?query=foo"
- )
+ def test_url_from_pool(self) -> None:
+ with connection_from_url("http://google.com:80") as pool:
+ path = "path?query=foo"
+ assert f"http://google.com:80/{path}" == _url_from_pool(pool, path)
def test_ca_certs_default_cert_required(self) -> None:
with connection_from_url("https://google.com:80", ca_certs=DEFAULT_CA) as pool:
@@ -506,17 +507,45 @@ class _raise_once_make_request_function:
successful response on subsequent calls.
"""
- def __init__(self, ex: Type[BaseException]) -> None:
+ def __init__(
+ self, ex: Type[BaseException], pool: HTTPConnectionPool
+ ) -> None:
super().__init__()
self._ex: Optional[Type[BaseException]] = ex
-
- def __call__(self, *args: Any, **kwargs: Any) -> httplib.HTTPResponse:
+ self._pool = pool
+
+ def __call__(
+ self,
+ conn: HTTPConnection,
+ method: str,
+ url: str,
+ *args: Any,
+ retries: Retry,
+ **kwargs: Any,
+ ) -> HTTPResponse:
if self._ex:
ex, self._ex = self._ex, None
raise ex()
- response = httplib.HTTPResponse(MockSock) # type: ignore[arg-type]
- response.fp = MockChunkedEncodingResponse([b"f", b"o", b"o"]) # type: ignore[assignment]
- response.headers = response.msg = httplib.HTTPMessage()
+ httplib_response = httplib.HTTPResponse(MockSock) # type: ignore[arg-type]
+ httplib_response.fp = MockChunkedEncodingResponse([b"f", b"o", b"o"]) # type: ignore[assignment]
+ httplib_response.headers = httplib_response.msg = httplib.HTTPMessage()
+
+ response_conn: Optional[HTTPConnection] = kwargs.get("response_conn")
+
+ response = HTTPResponse(
+ body=httplib_response,
+ headers=httplib_response.headers, # type: ignore[arg-type]
+ status=httplib_response.status,
+ version=httplib_response.version,
+ reason=httplib_response.reason,
+ original_response=httplib_response,
+ retries=retries,
+ request_method=method,
+ request_url=url,
+ preload_content=False,
+ connection=response_conn,
+ pool=self._pool,
+ )
return response
def _test(exception: Type[BaseException]) -> None:
@@ -525,7 +554,9 @@ def _test(exception: Type[BaseException]) -> None:
# connection is left on the response object, instead of being
# released back into the pool.
with patch.object(
- pool, "_make_request", _raise_once_make_request_function(exception)
+ pool,
+ "_make_request",
+ _raise_once_make_request_function(exception, pool),
):
response = pool.urlopen(
"GET",
@@ -556,23 +587,4 @@ def test_read_timeout_0_does_not_raise_bad_status_line_error(self) -> None:
with patch.object(Timeout, "read_timeout", 0):
timeout = Timeout(1, 1, 1)
with pytest.raises(ReadTimeoutError):
- pool._make_request(conn, "", "", timeout)
-
- def test_custom_http_response_class(self) -> None:
- class CustomHTTPResponse(HTTPResponse):
- pass
-
- class CustomConnectionPool(HTTPConnectionPool):
- ResponseCls = CustomHTTPResponse
-
- def _make_request(self, *args: Any, **kwargs: Any) -> httplib.HTTPResponse:
- httplib_response = httplib.HTTPResponse(MockSock) # type: ignore[arg-type]
- httplib_response.fp = MockChunkedEncodingResponse([b"f", b"o", b"o"]) # type: ignore[assignment]
- httplib_response.headers = httplib_response.msg = httplib.HTTPMessage()
- return httplib_response
-
- with CustomConnectionPool(host="localhost", maxsize=1, block=True) as pool:
- response = pool.request(
- "GET", "/", retries=False, chunked=True, preload_content=False
- )
- assert isinstance(response, CustomHTTPResponse)
+ pool._make_request(conn, "", "", timeout=timeout)
diff --git a/test/with_dummyserver/test_connection.py b/test/with_dummyserver/test_connection.py
new file mode 100644
--- /dev/null
+++ b/test/with_dummyserver/test_connection.py
@@ -0,0 +1,79 @@
+from http.client import ResponseNotReady
+from typing import Generator
+
+import pytest
+
+from dummyserver.testcase import HTTPDummyServerTestCase as server
+from urllib3 import HTTPConnectionPool
+from urllib3.response import HTTPResponse
+
+
[email protected]()
+def pool() -> Generator[HTTPConnectionPool, None, None]:
+ server.setup_class()
+
+ with HTTPConnectionPool(server.host, server.port) as pool:
+ yield pool
+
+ server.teardown_class()
+
+
+def test_returns_urllib3_HTTPResponse(pool: HTTPConnectionPool) -> None:
+ conn = pool._get_conn()
+
+ method = "GET"
+ path = "/"
+
+ conn.request(method, path)
+
+ response: HTTPResponse = conn.getresponse()
+
+ assert isinstance(response, HTTPResponse)
+
+
+def test_does_not_release_conn(pool: HTTPConnectionPool) -> None:
+ conn = pool._get_conn()
+
+ method = "GET"
+ path = "/"
+
+ conn.request(method, path)
+
+ response: HTTPResponse = conn.getresponse()
+
+ response.release_conn()
+ assert pool.pool.qsize() == 0 # type: ignore[union-attr]
+
+
+def test_releases_conn(pool: HTTPConnectionPool) -> None:
+ conn = pool._get_conn()
+
+ method = "GET"
+ path = "/"
+
+ conn.request(method, path)
+
+ response: HTTPResponse = conn.getresponse()
+ # If these variables are set by the pool
+ # then the response can release the connection
+ # back into the pool.
+ response._pool = pool
+ response._connection = conn
+
+ response.release_conn()
+ assert pool.pool.qsize() == 1 # type: ignore[union-attr]
+
+
+def test_double_getresponse(pool: HTTPConnectionPool) -> None:
+ conn = pool._get_conn()
+
+ method = "GET"
+ path = "/"
+
+ conn.request(method, path)
+
+ _: HTTPResponse = conn.getresponse()
+
+ # Calling getrepsonse() twice should cause an error
+ with pytest.raises(ResponseNotReady):
+ conn.getresponse()
diff --git a/test/with_dummyserver/test_socketlevel.py b/test/with_dummyserver/test_socketlevel.py
--- a/test/with_dummyserver/test_socketlevel.py
+++ b/test/with_dummyserver/test_socketlevel.py
@@ -40,6 +40,7 @@
from urllib3 import HTTPConnectionPool, HTTPSConnectionPool, ProxyManager, util
from urllib3._collections import HTTPHeaderDict
from urllib3.connection import HTTPConnection, _get_default_user_agent
+from urllib3.connectionpool import _url_from_pool
from urllib3.exceptions import (
MaxRetryError,
ProtocolError,
@@ -1791,7 +1792,7 @@ def _test_broken_header_parsing(
if (
"Failed to parse headers" in record.msg
and isinstance(record.args, tuple)
- and pool._absolute_url("/") == record.args[0]
+ and _url_from_pool(pool, "/") == record.args[0]
):
if (
unparsed_data_check is None
| HTTPConnection should return urllib3.HTTPResponse instead of http.client.HTTPResponse
Part of https://github.com/urllib3/urllib3/issues/1985
Currently our `urllib3.connection.HTTPConnection` classes return directly a `http.client.HTTPResponse`. This ties our APIs directly to `http.client` which isn't desirable for future additions of new HTTP protocol implementations.
Instead our `HTTPConnection` APIs should return an instance of `urllib3.response.HTTPResponse` directly and should be forwarded upwards instead of doing the wrapping inside of `HTTPConnectionPool.urlopen`.
#### Minimum requirements
- [x] Research and understand which exceptions can be raised at which stage from `HTTPConnection.urlopen` and `HTTPConnection.getresponse` in both urllib3 and http.client. This research should be posted into this issue.
- [x] Move the wrapping of `http.client.HTTPResponse` from `HTTPConnectionPool` to `HTTPConnection`
- [x] Update type hints for `HTTPConnection`
- [x] Add test cases for `HTTPConnection` directly returning a `urllib3.response.HTTPResponse`.
- [x] Change existing test cases for the new logic.
| @sethmlarson So the bulk of the work is in overriding `http.client.HTTPConnection.getresponse()` inside of `urllib3.connection.HTTPConnection` by adding the said `getresponse()` method. And inside of that method we should wrap the `http.client.HTTPResponse` as a `urllib3.response.HTTPResponse`. And the bulk of that logic already exists inside of `HTTPConnectionPool`?
Just want to make sure I'm on the right path. I think I can implement this even though I'm pretty unfamiliar with the codebases.
@shadycuz It sounds like you're on the right path! | 2022-06-21T17:14:54Z | [] | [] |
urllib3/urllib3 | 2,656 | urllib3__urllib3-2656 | [
"2589",
"2589"
] | 9e0e698278cbc457ee7729f87fb93ab834ad62b1 | diff --git a/src/urllib3/connectionpool.py b/src/urllib3/connectionpool.py
--- a/src/urllib3/connectionpool.py
+++ b/src/urllib3/connectionpool.py
@@ -1020,7 +1020,7 @@ def _new_conn(self) -> HTTPConnection:
)
if not self.ConnectionCls or self.ConnectionCls is DummyConnection: # type: ignore[comparison-overlap]
- raise SSLError(
+ raise ImportError(
"Can't connect to HTTPS URL because the SSL module is not available."
)
| diff --git a/test/with_dummyserver/test_https.py b/test/with_dummyserver/test_https.py
--- a/test/with_dummyserver/test_https.py
+++ b/test/with_dummyserver/test_https.py
@@ -376,11 +376,10 @@ def test_verified_without_ca_certs(self) -> None:
def test_no_ssl(self) -> None:
with HTTPSConnectionPool(self.host, self.port) as pool:
pool.ConnectionCls = None # type: ignore[assignment]
- with pytest.raises(SSLError):
+ with pytest.raises(ImportError):
pool._new_conn()
- with pytest.raises(MaxRetryError) as cm:
+ with pytest.raises(ImportError):
pool.request("GET", "/", retries=0)
- assert isinstance(cm.value.reason, SSLError)
def test_unverified_ssl(self) -> None:
"""Test that bare HTTPSConnection can connect, make requests"""
| Retry retries on fruitless ssl ImportError
### Subject
Describe the issue here.
### Environment
Describe your environment.
At least, paste here the output of:
```python
import platform
import urllib3
print("OS", platform.platform())
print("Python", platform.python_version())
print("urllib3", urllib3.__version__)
```
### Steps to Reproduce
```
Python 3.10.4 (main, Mar 24 2022, 16:12:56) [GCC 9.4.0] on linux
Type "help", "copyright", "credits" or "license" for more information.
>>> import sys
>>> sys.modules["ssl"] = None
>>> import requests
>>> requests.get("https://google.com")
Traceback (most recent call last):
File "/home/graingert/.virtualenvs/testing310/lib/python3.10/site-packages/urllib3/connectionpool.py", line 692, in urlopen
conn = self._get_conn(timeout=pool_timeout)
File "/home/graingert/.virtualenvs/testing310/lib/python3.10/site-packages/urllib3/connectionpool.py", line 281, in _get_conn
return conn or self._new_conn()
File "/home/graingert/.virtualenvs/testing310/lib/python3.10/site-packages/urllib3/connectionpool.py", line 1009, in _new_conn
raise SSLError(
urllib3.exceptions.SSLError: Can't connect to HTTPS URL because the SSL module is not available.
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "/home/graingert/.virtualenvs/testing310/lib/python3.10/site-packages/requests/adapters.py", line 440, in send
resp = conn.urlopen(
File "/home/graingert/.virtualenvs/testing310/lib/python3.10/site-packages/urllib3/connectionpool.py", line 785, in urlopen
retries = retries.increment(
File "/home/graingert/.virtualenvs/testing310/lib/python3.10/site-packages/urllib3/util/retry.py", line 592, in increment
raise MaxRetryError(_pool, url, error or ResponseError(cause))
urllib3.exceptions.MaxRetryError: HTTPSConnectionPool(host='google.com', port=443): Max retries exceeded with url: / (Caused by SSLError("Can't connect to HTTPS URL because the SSL module is not available."))
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
File "/home/graingert/.virtualenvs/testing310/lib/python3.10/site-packages/requests/api.py", line 75, in get
return request('get', url, params=params, **kwargs)
File "/home/graingert/.virtualenvs/testing310/lib/python3.10/site-packages/requests/api.py", line 61, in request
return session.request(method=method, url=url, **kwargs)
File "/home/graingert/.virtualenvs/testing310/lib/python3.10/site-packages/requests/sessions.py", line 529, in request
resp = self.send(prep, **send_kwargs)
File "/home/graingert/.virtualenvs/testing310/lib/python3.10/site-packages/requests/sessions.py", line 645, in send
r = adapter.send(request, **kwargs)
File "/home/graingert/.virtualenvs/testing310/lib/python3.10/site-packages/requests/adapters.py", line 517, in send
raise SSLError(e, request=request)
requests.exceptions.SSLError: HTTPSConnectionPool(host='google.com', port=443): Max retries exceeded with url: / (Caused by SSLError("Can't connect to HTTPS URL because the SSL module is not available."))
```
### Expected Behavior
only one attempt
### Actual Behavior
^
Retry retries on fruitless ssl ImportError
### Subject
Describe the issue here.
### Environment
Describe your environment.
At least, paste here the output of:
```python
import platform
import urllib3
print("OS", platform.platform())
print("Python", platform.python_version())
print("urllib3", urllib3.__version__)
```
### Steps to Reproduce
```
Python 3.10.4 (main, Mar 24 2022, 16:12:56) [GCC 9.4.0] on linux
Type "help", "copyright", "credits" or "license" for more information.
>>> import sys
>>> sys.modules["ssl"] = None
>>> import requests
>>> requests.get("https://google.com")
Traceback (most recent call last):
File "/home/graingert/.virtualenvs/testing310/lib/python3.10/site-packages/urllib3/connectionpool.py", line 692, in urlopen
conn = self._get_conn(timeout=pool_timeout)
File "/home/graingert/.virtualenvs/testing310/lib/python3.10/site-packages/urllib3/connectionpool.py", line 281, in _get_conn
return conn or self._new_conn()
File "/home/graingert/.virtualenvs/testing310/lib/python3.10/site-packages/urllib3/connectionpool.py", line 1009, in _new_conn
raise SSLError(
urllib3.exceptions.SSLError: Can't connect to HTTPS URL because the SSL module is not available.
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "/home/graingert/.virtualenvs/testing310/lib/python3.10/site-packages/requests/adapters.py", line 440, in send
resp = conn.urlopen(
File "/home/graingert/.virtualenvs/testing310/lib/python3.10/site-packages/urllib3/connectionpool.py", line 785, in urlopen
retries = retries.increment(
File "/home/graingert/.virtualenvs/testing310/lib/python3.10/site-packages/urllib3/util/retry.py", line 592, in increment
raise MaxRetryError(_pool, url, error or ResponseError(cause))
urllib3.exceptions.MaxRetryError: HTTPSConnectionPool(host='google.com', port=443): Max retries exceeded with url: / (Caused by SSLError("Can't connect to HTTPS URL because the SSL module is not available."))
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
File "/home/graingert/.virtualenvs/testing310/lib/python3.10/site-packages/requests/api.py", line 75, in get
return request('get', url, params=params, **kwargs)
File "/home/graingert/.virtualenvs/testing310/lib/python3.10/site-packages/requests/api.py", line 61, in request
return session.request(method=method, url=url, **kwargs)
File "/home/graingert/.virtualenvs/testing310/lib/python3.10/site-packages/requests/sessions.py", line 529, in request
resp = self.send(prep, **send_kwargs)
File "/home/graingert/.virtualenvs/testing310/lib/python3.10/site-packages/requests/sessions.py", line 645, in send
r = adapter.send(request, **kwargs)
File "/home/graingert/.virtualenvs/testing310/lib/python3.10/site-packages/requests/adapters.py", line 517, in send
raise SSLError(e, request=request)
requests.exceptions.SSLError: HTTPSConnectionPool(host='google.com', port=443): Max retries exceeded with url: / (Caused by SSLError("Can't connect to HTTPS URL because the SSL module is not available."))
```
### Expected Behavior
only one attempt
### Actual Behavior
^
| Yep, agreed we should be avoiding this if possible. I think the retry behavior comes in to play because we're [raising an SSLError](https://github.com/urllib3/urllib3/blob/main/src/urllib3/connectionpool.py#L1022-L1025) which is generally retryable.
We might consider changing this to a more specific error that inherits from SSLError for backwards compatibility. That would allow us to control [the flow](https://github.com/urllib3/urllib3/blob/main/src/urllib3/connectionpool.py#L764-L800), and bypass retries like we do for the [`EmptyPoolError` case](https://github.com/urllib3/urllib3/blob/main/src/urllib3/connectionpool.py#L758-L762).
In addition `urllib3.exceptions.SSLError: Can't connect to HTTPS URL because the SSL module is not available.` hides the original error and resulted in a `pip` user not recieving sufficient feedback to resolve their python installation issue
Yep, agreed we should be avoiding this if possible. I think the retry behavior comes in to play because we're [raising an SSLError](https://github.com/urllib3/urllib3/blob/main/src/urllib3/connectionpool.py#L1022-L1025) which is generally retryable.
We might consider changing this to a more specific error that inherits from SSLError for backwards compatibility. That would allow us to control [the flow](https://github.com/urllib3/urllib3/blob/main/src/urllib3/connectionpool.py#L764-L800), and bypass retries like we do for the [`EmptyPoolError` case](https://github.com/urllib3/urllib3/blob/main/src/urllib3/connectionpool.py#L758-L762).
In addition `urllib3.exceptions.SSLError: Can't connect to HTTPS URL because the SSL module is not available.` hides the original error and resulted in a `pip` user not recieving sufficient feedback to resolve their python installation issue | 2022-06-25T07:22:57Z | [] | [] |
urllib3/urllib3 | 2,657 | urllib3__urllib3-2657 | [
"2513"
] | 72446d421c3b4faeaba392a3e2775c41dd25b7eb | diff --git a/src/urllib3/response.py b/src/urllib3/response.py
--- a/src/urllib3/response.py
+++ b/src/urllib3/response.py
@@ -2,6 +2,7 @@
import json as _json
import logging
import re
+import sys
import zlib
from contextlib import contextmanager
from http.client import HTTPMessage as _HttplibHTTPMessage
@@ -44,6 +45,7 @@
except (AttributeError, ImportError, ValueError): # Defensive:
zstd = None
+from . import util
from ._collections import HTTPHeaderDict
from .connection import _TYPE_BODY, BaseSSLError, HTTPConnection, HTTPException
from .exceptions import (
@@ -665,6 +667,54 @@ def _error_catcher(self) -> Generator[None, None, None]:
if self._original_response and self._original_response.isclosed():
self.release_conn()
+ def _fp_read(self, amt: Optional[int] = None) -> bytes:
+ """
+ Read a response with the thought that reading the number of bytes
+ larger than can fit in a 32-bit int at a time via SSL in some
+ known cases leads to an overflow error that has to be prevented
+ if `amt` or `self.length_remaining` indicate that a problem may
+ happen.
+
+ The known cases:
+ * 3.8 <= CPython < 3.9.7 because of a bug
+ https://github.com/urllib3/urllib3/issues/2513#issuecomment-1152559900.
+ * urllib3 injected with pyOpenSSL-backed SSL-support.
+ * CPython < 3.10 only when `amt` does not fit 32-bit int.
+ """
+ assert self._fp
+ c_int_max = 2**31 - 1
+ if (
+ (
+ (amt and amt > c_int_max)
+ or (self.length_remaining and self.length_remaining > c_int_max)
+ )
+ and not util.IS_SECURETRANSPORT
+ and (util.IS_PYOPENSSL or sys.version_info < (3, 10))
+ ):
+ buffer = io.BytesIO()
+ # Besides `max_chunk_amt` being a maximum chunk size, it
+ # affects memory overhead of reading a response by this
+ # method in CPython.
+ # `c_int_max` equal to 2 GiB - 1 byte is the actual maximum
+ # chunk size that does not lead to an overflow error, but
+ # 256 MiB is a compromise.
+ max_chunk_amt = 2**28
+ while amt is None or amt != 0:
+ if amt is not None:
+ chunk_amt = min(amt, max_chunk_amt)
+ amt -= chunk_amt
+ else:
+ chunk_amt = max_chunk_amt
+ data = self._fp.read(chunk_amt)
+ if not data:
+ break
+ buffer.write(data)
+ del data # to reduce peak memory usage by `max_chunk_amt`.
+ return buffer.getvalue()
+ else:
+ # StringIO doesn't like amt=None
+ return self._fp.read(amt) if amt is not None else self._fp.read()
+
def read(
self,
amt: Optional[int] = None,
@@ -702,13 +752,11 @@ def read(
fp_closed = getattr(self._fp, "closed", False)
with self._error_catcher():
+ data = self._fp_read(amt) if not fp_closed else b""
if amt is None:
- # cStringIO doesn't like amt=None
- data = self._fp.read() if not fp_closed else b""
flush_decoder = True
else:
cache_content = False
- data = self._fp.read(amt) if not fp_closed else b""
if (
amt != 0 and not data
): # Platform-specific: Buggy versions of Python.
| diff --git a/test/with_dummyserver/test_socketlevel.py b/test/with_dummyserver/test_socketlevel.py
--- a/test/with_dummyserver/test_socketlevel.py
+++ b/test/with_dummyserver/test_socketlevel.py
@@ -8,6 +8,7 @@
import shutil
import socket
import ssl
+import sys
import tempfile
import time
from collections import OrderedDict
@@ -1491,6 +1492,63 @@ def socket_handler(listener: socket.socket) -> None:
pool.request("GET", "/", retries=False, timeout=LONG_TIMEOUT)
assert server_closed.wait(LONG_TIMEOUT), "The socket was not terminated"
+ # SecureTransport can read only small pieces of data at the moment.
+ # https://github.com/urllib3/urllib3/pull/2674
+ @notSecureTransport()
+ @pytest.mark.skipif(
+ os.environ.get("CI") == "true" and sys.implementation.name == "pypy",
+ reason="too slow to run in CI",
+ )
+ @pytest.mark.parametrize(
+ "preload_content,read_amt", [(True, None), (False, None), (False, 2**31)]
+ )
+ def test_requesting_large_resources_via_ssl(
+ self, preload_content: bool, read_amt: Optional[int]
+ ) -> None:
+ """
+ Ensure that it is possible to read 2 GiB or more via an SSL
+ socket.
+ https://github.com/urllib3/urllib3/issues/2513
+ """
+ content_length = 2**31 # (`int` max value in C) + 1.
+ ssl_ready = Event()
+
+ def socket_handler(listener: socket.socket) -> None:
+ sock = listener.accept()[0]
+ ssl_sock = ssl.wrap_socket(
+ sock,
+ server_side=True,
+ keyfile=DEFAULT_CERTS["keyfile"],
+ certfile=DEFAULT_CERTS["certfile"],
+ ca_certs=DEFAULT_CA,
+ )
+ ssl_ready.set()
+
+ while not ssl_sock.recv(65536).endswith(b"\r\n\r\n"):
+ continue
+
+ ssl_sock.send(
+ b"HTTP/1.1 200 OK\r\n"
+ b"Content-Type: text/plain\r\n"
+ b"Content-Length: %d\r\n\r\n" % content_length
+ )
+
+ chunks = 2
+ for i in range(chunks):
+ ssl_sock.sendall(bytes(content_length // chunks))
+
+ ssl_sock.close()
+ sock.close()
+
+ self._start_server(socket_handler)
+ ssl_ready.wait(5)
+ with HTTPSConnectionPool(
+ self.host, self.port, ca_certs=DEFAULT_CA, retries=False
+ ) as pool:
+ response = pool.request("GET", "/", preload_content=preload_content)
+ data = response.data if preload_content else response.read(read_amt)
+ assert len(data) == content_length
+
class TestErrorWrapping(SocketDummyServerTestCase):
def test_bad_statusline(self) -> None:
| OverflowError: signed integer is greater than maximum
Both of the below calls result in an `OverflowError` being raised because the `Content-Length` is longer than 3GB. OpenSSL only supports reading (2**31)-1 bytes in a single read call.
```python
url = "https://dailymed-data.nlm.nih.gov/public-release-files/dm_spl_release_human_rx_part1.zip"
resp = http.request("GET", url)
resp.data
resp = http.request("GET", url, preload_content=False)
list(resp.stream(None))
```
```
Traceback (most recent call last):
File "/home/sethmlarson/urllib3/testme.py", line 5, in <module>
resp = http.request("GET", "https://dailymed-data.nlm.nih.gov/public-release-files/dm_spl_release_human_rx_part1.zip")
File "/home/sethmlarson/urllib3/src/urllib3/__init__.py", line 112, in request
return _DEFAULT_POOL.request(
File "/home/sethmlarson/urllib3/src/urllib3/_request_methods.py", line 109, in request
return self.request_encode_url(
File "/home/sethmlarson/urllib3/src/urllib3/_request_methods.py", line 142, in request_encode_url
return self.urlopen(method, url, **extra_kw)
File "/home/sethmlarson/urllib3/src/urllib3/poolmanager.py", line 443, in urlopen
response = conn.urlopen(method, u.request_uri, **kw)
File "/home/sethmlarson/urllib3/src/urllib3/connectionpool.py", line 744, in urlopen
response = self.ResponseCls.from_httplib(
File "/home/sethmlarson/urllib3/src/urllib3/response.py", line 748, in from_httplib
resp = ResponseCls(
File "/home/sethmlarson/urllib3/src/urllib3/response.py", line 467, in __init__
self._body = self.read(decode_content=decode_content)
File "/home/sethmlarson/urllib3/src/urllib3/response.py", line 664, in read
data = self._fp.read() if not fp_closed else b""
File "/home/sethmlarson/.pyenv/versions/3.8.6/lib/python3.8/http/client.py", line 471, in read
s = self._safe_read(self.length)
File "/home/sethmlarson/.pyenv/versions/3.8.6/lib/python3.8/http/client.py", line 612, in _safe_read
data = self.fp.read(amt)
File "/home/sethmlarson/.pyenv/versions/3.8.6/lib/python3.8/socket.py", line 669, in readinto
return self._sock.recv_into(b)
File "/home/sethmlarson/.pyenv/versions/3.8.6/lib/python3.8/ssl.py", line 1241, in recv_into
return self.read(nbytes, buffer)
File "/home/sethmlarson/.pyenv/versions/3.8.6/lib/python3.8/ssl.py", line 1099, in read
return self._sslobj.read(len, buffer)
OverflowError: signed integer is greater than maximum
```
The fix would likely need to go into `urllib3.response.HTTPResponse` to limit the amount of data requested in a single call to `http.client.HTTPResponse.read(amt)`.
Originally reported in https://github.com/psf/requests/issues/5969
### Minimum requirements
:dollar: **You can get paid to complete this issue! [Please read the docs for more information](https://urllib3.readthedocs.io/en/latest/contributing.html#getting-paid-for-your-contributions).**
- [x] Ensure that any call to `urllib3.response.HTTPResponse.read(...)` never results in a call to `.read(n)` with a value greater than `n=(2**31)-1`. Behavior should not change for users, you should buffer bytes into a `bytearray` and then return the stitched together data when necessary.
- [x] Test cases for streaming chunk sizes, `.read(2**31)`, `.read(None)`, `.read()`, `preload_content=True`
- [x] Merge fix into both `main` and `1.26.x` branches
| Thanks for bringing this up! Excited to see a solution, we've been hitting this as well. In the meanwhile does anyone happen to have any reasonable workarounds?
The best solution is to stream the response data instead of loading all at once. You'll likely want to do this anyways (even with the fix to this issue) as loading 3GB into memory all at once isn't likely to be performant.
I've looked a bit into the issue and IMHO changing the logic of `HTTPResponse.read()` would be wrong.
First, it's currently possible to call `.read()` with `amt` larger than `2**31` - if you're using HTTP instead of HTTPS. For instance, if you replace the URL in the OP with `url = "http://speed.hetzner.de/10GB.bin"`, things seem to work as intended. And `HTTPResponse.read()` has no way of guessing what the underlying connection supports.
I guess that the issue here is mainly due to the obscure error message. From my (limited) understanding of the respective scopes of urllib3 and requests, I'd say that `urllib3.request('GET', HUGE_FILE)` is user error, unlike `requests.get(HUGE_FILE)` which should just work. Additionally, the behaviour of `.stream(None)` is another source of confusion, since it just wraps the result of `.read()` in a one-element iterator instead of acting like `.stream()` as I would have expected.
When CPython is used, the error should only happen since 3.8.0a4 (https://github.com/python/cpython/issues/80231, https://github.com/python/cpython/commit/d6bf6f2d0c83f0c64ce86e7b9340278627798090), it was fixed in 3.9.7 (https://github.com/python/cpython/issues/87019, https://github.com/python/cpython/commit/153365d864c411f6fb523efa752ccb3497d815ca), the fix was not backported to 3.8.
In the course of https://github.com/python/cpython/issues/87020, [one more regression](https://twitter.com/geofft/status/1417167982665551877) was introduced in 3.10.0b1 (https://github.com/python/cpython/commit/89d1550d14ba689af12eeb726e4ff8ce73cee7e1) and fixed in 3.10.0rc1 (https://github.com/python/cpython/commit/5ec275758dbc307e9838e7038bfc3b5390950ea7). | 2022-06-25T17:00:22Z | [] | [] |
urllib3/urllib3 | 2,669 | urllib3__urllib3-2669 | [
"2242"
] | 73a7ffce6173da62f731b2f529eb7bd9c5979871 | diff --git a/src/urllib3/_collections.py b/src/urllib3/_collections.py
--- a/src/urllib3/_collections.py
+++ b/src/urllib3/_collections.py
@@ -174,14 +174,17 @@ class HTTPHeaderDictItemView(Set[Tuple[str, str]]):
>>> d['X-Header-Name']
'Value1, Value2, Value3'
- However, if we iterate over an HTTPHeaderDict's items, we want to get a
- distinct item for every different value of a header:
+ However, if we iterate over an HTTPHeaderDict's items, we will optionally combine
+ these values based on whether combine=True was called when building up the dictionary
+ >>> d = HTTPHeaderDict({"A": "1", "B": "foo"})
+ >>> d.add("A", "2", combine=True)
+ >>> d.add("B", "bar")
>>> list(d.items())
[
- ('X-Header-Name', 'Value1')
- ('X-Header-Name', 'Value2')
- ('X-Header-Name', 'Value3')
+ ('A', '1, 2'),
+ ('B', 'foo'),
+ ('B', 'bar'),
]
This class conforms to the interface required by the MutableMapping ABC while
@@ -273,6 +276,9 @@ def __contains__(self, key: object) -> bool:
return key.lower() in self._container
return False
+ def setdefault(self, key: str, default: str = "") -> str:
+ return super().setdefault(key, default)
+
def __eq__(self, other: object) -> bool:
maybe_constructable = ensure_can_construct_http_header_dict(other)
if maybe_constructable is None:
@@ -301,14 +307,24 @@ def discard(self, key: str) -> None:
except KeyError:
pass
- def add(self, key: str, val: str) -> None:
+ def add(self, key: str, val: str, *, combine: bool = False) -> None:
"""Adds a (name, value) pair, doesn't overwrite the value if it already
exists.
+ If this is called with combine=True, instead of adding a new header value
+ as a distinct item during iteration, this will instead append the value to
+ any existing header value with a comma. If no existing header value exists
+ for the key, then the value will simply be added, ignoring the combine parameter.
+
>>> headers = HTTPHeaderDict(foo='bar')
>>> headers.add('Foo', 'baz')
>>> headers['foo']
'bar, baz'
+ >>> list(headers.items())
+ [('foo', 'bar'), ('foo', 'baz')]
+ >>> headers.add('foo', 'quz', combine=True)
+ >>> list(headers.items())
+ [('foo', 'bar, baz, quz')]
"""
# avoid a bytes/str comparison by decoding before httplib
if isinstance(key, bytes):
@@ -318,7 +334,13 @@ def add(self, key: str, val: str) -> None:
# Keep the common case aka no item present as fast as possible
vals = self._container.setdefault(key_lower, new_vals)
if new_vals is not vals:
- vals.append(val)
+ # if there are values here, then there is at least the initial
+ # key/value pair
+ assert len(vals) >= 2
+ if combine:
+ vals[-1] = vals[-1] + ", " + val
+ else:
+ vals.append(val)
def extend(self, *args: ValidHTTPHeaderSource, **kwargs: str) -> None:
"""Generic import function for any type of header-like object.
diff --git a/src/urllib3/_request_methods.py b/src/urllib3/_request_methods.py
--- a/src/urllib3/_request_methods.py
+++ b/src/urllib3/_request_methods.py
@@ -2,6 +2,7 @@
from typing import Any, Dict, Mapping, Optional, Sequence, Tuple, Union
from urllib.parse import urlencode
+from ._collections import HTTPHeaderDict
from .connection import _TYPE_BODY
from .filepost import _TYPE_FIELDS, encode_multipart_formdata
from .response import BaseHTTPResponse
@@ -188,7 +189,7 @@ def request_encode_body(
if headers is None:
headers = self.headers
- extra_kw: Dict[str, Any] = {"headers": {}}
+ extra_kw: Dict[str, Any] = {"headers": HTTPHeaderDict(headers)}
body: Union[bytes, str]
if fields:
@@ -208,9 +209,8 @@ def request_encode_body(
)
extra_kw["body"] = body
- extra_kw["headers"] = {"Content-Type": content_type}
+ extra_kw["headers"].setdefault("Content-Type", content_type)
- extra_kw["headers"].update(headers)
extra_kw.update(urlopen_kw)
return self.urlopen(method, url, **extra_kw)
diff --git a/src/urllib3/connection.py b/src/urllib3/connection.py
--- a/src/urllib3/connection.py
+++ b/src/urllib3/connection.py
@@ -73,7 +73,7 @@ class BaseSSLError(BaseException): # type: ignore[no-redef]
# When it comes time to update this value as a part of regular maintenance
# (ie test_recent_date is failing) update it to ~6 months before the current date.
-RECENT_DATE = datetime.date(2020, 7, 1)
+RECENT_DATE = datetime.date(2022, 1, 1)
_CONTAINS_CONTROL_CHAR_RE = re.compile(r"[^-!#$%&'*+.^_`|~0-9a-zA-Z]")
| diff --git a/test/test_collections.py b/test/test_collections.py
--- a/test/test_collections.py
+++ b/test/test_collections.py
@@ -155,6 +155,14 @@ def test_create_from_kwargs(self) -> None:
assert len(h) == 4
assert "ab" in h
+ def test_setdefault(self) -> None:
+ h = HTTPHeaderDict(a="1")
+ assert h.setdefault("A", "3") == "1"
+ assert h.setdefault("b", "2") == "2"
+ assert h.setdefault("c") == ""
+ assert h["c"] == ""
+ assert h["b"] == "2"
+
def test_create_from_dict(self) -> None:
h = HTTPHeaderDict(dict(ab="1", cd="2", ef="3", gh="4"))
assert len(h) == 4
@@ -252,6 +260,34 @@ def test_extend_from_container(self, d: HTTPHeaderDict) -> None:
assert d["e"] == "foofoo"
assert len(d) == 2
+ def test_header_repeat(self, d: HTTPHeaderDict) -> None:
+ d["other-header"] = "hello"
+ d.add("other-header", "world")
+
+ assert list(d.items()) == [
+ ("Cookie", "foo"),
+ ("Cookie", "bar"),
+ ("other-header", "hello"),
+ ("other-header", "world"),
+ ]
+
+ d.add("other-header", "!", combine=True)
+ expected_results = [
+ ("Cookie", "foo"),
+ ("Cookie", "bar"),
+ ("other-header", "hello"),
+ ("other-header", "world, !"),
+ ]
+
+ assert list(d.items()) == expected_results
+ # make sure the values persist over copys
+ assert list(d.copy().items()) == expected_results
+
+ other_dict = HTTPHeaderDict()
+ # we also need for extensions to properly maintain results
+ other_dict.extend(d)
+ assert list(other_dict.items()) == expected_results
+
def test_extend_from_headerdict(self, d: HTTPHeaderDict) -> None:
h = HTTPHeaderDict(Cookie="foo", e="foofoo")
d.extend(h)
diff --git a/test/with_dummyserver/test_poolmanager.py b/test/with_dummyserver/test_poolmanager.py
--- a/test/with_dummyserver/test_poolmanager.py
+++ b/test/with_dummyserver/test_poolmanager.py
@@ -380,6 +380,46 @@ def test_headers_http_header_dict(self) -> None:
["Extra", "extra"],
]
+ def test_headers_http_multi_header_multipart(self) -> None:
+ headers = HTTPHeaderDict()
+ headers.add("Multi", "1")
+ headers.add("Multi", "2")
+ old_headers = headers.copy()
+
+ with PoolManager(headers=headers) as http:
+ r = http.request(
+ "POST",
+ f"{self.base_url}/multi_headers",
+ fields={"k": "v"},
+ multipart_boundary="b",
+ encode_multipart=True,
+ )
+ returned_headers = r.json()["headers"]
+ assert returned_headers[4:] == [
+ ["Multi", "1"],
+ ["Multi", "2"],
+ ["Content-Type", "multipart/form-data; boundary=b"],
+ ]
+ # Assert that the previous headers weren't modified.
+ assert headers == old_headers
+
+ # Set a default value for the Content-Type
+ headers["Content-Type"] = "multipart/form-data; boundary=b; field=value"
+ r = http.request(
+ "POST",
+ f"{self.base_url}/multi_headers",
+ fields={"k": "v"},
+ multipart_boundary="b",
+ encode_multipart=True,
+ )
+ returned_headers = r.json()["headers"]
+ assert returned_headers[4:] == [
+ ["Multi", "1"],
+ ["Multi", "2"],
+ # Uses the set value, not the one that would be generated.
+ ["Content-Type", "multipart/form-data; boundary=b; field=value"],
+ ]
+
def test_body(self) -> None:
with PoolManager() as http:
r = http.request("POST", f"{self.base_url}/echo", body=b"test")
diff --git a/test/with_dummyserver/test_socketlevel.py b/test/with_dummyserver/test_socketlevel.py
--- a/test/with_dummyserver/test_socketlevel.py
+++ b/test/with_dummyserver/test_socketlevel.py
@@ -1643,6 +1643,75 @@ def socket_handler(listener: socket.socket) -> None:
]
assert expected_response_headers == actual_response_headers
+ @pytest.mark.parametrize(
+ "method_type, body_type",
+ [
+ ("GET", None),
+ ("POST", None),
+ ("POST", "bytes"),
+ ("POST", "bytes-io"),
+ ],
+ )
+ def test_headers_sent_with_add(
+ self, method_type: str, body_type: Optional[str]
+ ) -> None:
+ """
+ Confirm that when adding headers with combine=True that we simply append to the
+ most recent value, rather than create a new header line.
+ """
+ body: Union[None, bytes, io.BytesIO]
+ if body_type is None:
+ body = None
+ elif body_type == "bytes":
+ body = b"my-body"
+ elif body_type == "bytes-io":
+ body = io.BytesIO(b"bytes-io-body")
+ body.seek(0, 0)
+ else:
+ raise ValueError("Unknonw body type")
+
+ buffer: bytes = b""
+
+ def socket_handler(listener: socket.socket) -> None:
+ nonlocal buffer
+ sock = listener.accept()[0]
+ sock.settimeout(0)
+
+ start = time.time()
+ while time.time() - start < (LONG_TIMEOUT / 2):
+ try:
+ buffer += sock.recv(65536)
+ except OSError:
+ continue
+
+ sock.sendall(
+ b"HTTP/1.1 200 OK\r\n"
+ b"Server: example.com\r\n"
+ b"Content-Length: 0\r\n\r\n"
+ )
+ sock.close()
+
+ self._start_server(socket_handler)
+
+ headers = HTTPHeaderDict()
+ headers.add("A", "1")
+ headers.add("C", "3")
+ headers.add("B", "2")
+ headers.add("B", "3")
+ headers.add("A", "4", combine=False)
+ headers.add("C", "5", combine=True)
+ headers.add("C", "6")
+
+ with HTTPConnectionPool(self.host, self.port, retries=False) as pool:
+ r = pool.request(
+ method_type,
+ "/",
+ body=body,
+ headers=headers,
+ )
+ assert r.status == 200
+ assert b"A: 1\r\nA: 4\r\nC: 3, 5\r\nC: 6\r\nB: 2\r\nB: 3" in buffer
+
class TestBrokenHeaders(SocketDummyServerTestCase):
def _test_broken_header_parsing(
| Allow using HTTPHeaderDict and multiple header keys in requests
### Context
What are you trying to do?
I am trying to pass repeated headers such as the `Impersonate-Group` header used by the kubernetes API. (https://kubernetes.io/docs/reference/access-authn-authz/authentication/#user-impersonation)
How do you expect to do it?
I would like to be able to call the urllib3 request method and pass the parameter `headers` as an HTTPHeaderDict.
Is it something you currently cannot do?
It is nearly supported. It is broken for requests with request body because of the implementation of `request_encode_body`:
https://github.com/urllib3/urllib3/blob/main/src/urllib3/request.py#L174
```
extra_kw: Dict[str, Any] = {"headers": {}}
```
https://github.com/urllib3/urllib3/blob/main/src/urllib3/request.py#L196
```
extra_kw["headers"].update(headers)
```
These lines force headers to be a plain python dictionary even when an HTTPHeaderDict is passed. Hacking on the installed library I was able to get in fully functional by changing the logic to preserve the type.
Is this related to an existing issue/problem?
Not that I am aware of.
### Alternatives
Can you achieve the same result doing it in an alternative way?
Not that I know of
Is the alternative considerable?
### Duplicate
Has the feature been requested before?
If so, please provide a link to the issue.
Not that I know of.
### Contribution
Would you be willing to submit a PR?
_(Help can be provided if you need assistance submitting a PR)_
Yes
### Minimum requirements
:moneybag: **You can get paid to complete this issue! [Please read the docs for more information](https://urllib3.readthedocs.io/en/latest/contributing.html#getting-paid-for-your-contributions).**
- [x] For all methods (`GET`, `POST`, etc) and all body values (`None`, `b"..."`, `io.BytesIO(...)`) the behavior of headers is consistent.
- [x] Add a new parameter `combine` with a default value of `False` to `HTTPHeaderDict.add`
- [ ] If the parameter is `False` then headers names and values are added as separate items in the request. (ie `A: 1\r\nA: 2\r\nA: 3`)
- [ ] If parameter is `True` then the value should be combined with a comma+space (ie `A: 1, 2, 3`).
- [x] `HTTPHeaderDict.itermerged` should iterate with all values merged.
- [x] `HTTPHeaderDict.items` should iterate with values as they were added
| +1, I believe this used to be a feature we supported at one point! Specifically, you could pass in a list of tuples which `dict.update(...)` accepted.
If the only thing blocking this is the new type annotations, makes sense to fix that. Let's wait for Seth/Quentin to give guidance on the best way to move forward though. :)
So `HTTPHeaderDict` for requests is an interesting one, because it makes total sense to use but it really wasn't meant to be a public API in v1.x. Users are still able to import it from `urllib3.connection` and `urllib3.response` and use it without knowing it isn't a public API though. But now when you try to pass that `HTTPHeaderDict` as request headers in v1.x it... almost works? And that's an unfortunate thing for us cuz now our users are using something that isn't tested anywhere!
So starting in v2.0 we're making `HTTPHeaderDict` a first-class citizen and actually the recommended way of managing headers for both requests and responses.
Basically all of this is to say we don't support duplicate headers with any released version, sorry! My recommendation is to try using commas to combine the header values. You can also use an `HTTPHeaderDict` to do this for you like so:
```python
header_dict = HTTPHeaderDict()
header_dict.add("Header", "1")
header_dict.add("Header", "2")
headers = dict(header_dict.itermerged())
{"Header": "1, 2"}
```
(Btw the `main` branch that you linked to is for v2 development and hasn't been released yet, we have a `1.26.x` branch for `v1.x` releases)
(Are we sure this works correctly in the main branch? We didn't add tests when documenting it for requests)
@pquentin Doesn't work in the main branch, just signaled the intent but should probably also create an issue to track it too :)
Going to use this issue as it contains all the discussion, I'll rename and add to the v2 milestone.
Thank you for your attention on this issue.
@sethmlarson I tried using commas in my values, but unfortunately the kubernetes API (and many others it seems) do not follow RFC 7230 https://datatracker.ietf.org/doc/html/rfc7230#section-3.2.2
"A sender MUST NOT generate multiple header fields with the same field name in a message unless either the entire field value for that header field is defined as a comma-separated list [i.e., #(values)] or the header field is a well-known exception (as noted below)."
So it seems to me the current behavior of urllib3 is technically correct, but unfortunately prevents communicating with some apis.
I've discussed the issue of allowing multiple headers with a few maintainers. There's no question that this is something we want to support, given the real-world use cases that need it.
We currently have two proposals, both handling overwrite, sending multiple headers, and folding. Suppose we want to send those headers:
```
Impersonate-User: [email protected]
Impersonate-Group: developers
Impersonate-Group: admins
Cache-Control: public, max-age=604800
```
It seems to me that neither of those proposals is breaking, so this change does not have to be in 2.0?
## First proposal: `add(fold: bool)`
```python3
headers = HTTPHeaderDict()
headers["Impersonate-User"] = "[email protected]"
headers["Impersonate-User"] = "[email protected]"
headers.add("Cache-Control", "public")
headers.add("Cache-Control", "max-age=604800")
headers.add("Impersonate-Group", "developers", fold=False)
headers.add("Impersonate-Group", "admins", fold=False)
```
## Second proposal: `add(on_duplicate=overwrite|fold|sendmultiple)`
(Using strings in this example but it would probably be an enum instead)
```python3
headers = HTTPHeaderDict()
# we now have two ways to overwrite
headers["Impersonate-User"] = "[email protected]"
headers.add("Impersonate-User", "[email protected]", on_duplicate="overwrite")
headers["Impersonate-User"] = "[email protected]"
# on_duplicate=fold is the default behavior
headers.add("Cache-Control", "public")
headers.add("Cache-Control", "max-age=604800")
headers.add("Impersonate-Group", "developers", on_duplicate="sendmultiple")
headers.add("Impersonate-Group", "admins", on_duplicate="sendmultiple")
```
So I agree that these are both backwards compatible. I do think @sethmlarson had a good point about `__setitem__` already works to be `add(header_name, header_value, on_duplicate="overwrite")` but here's the thing, I think `__setitem__` could be confusing to have to switch to. I agree, one obvious way to do something is great, and I'm also a fan of "Oh shit, I didn't realize I was doing the wrong thing here, easier to add a parameter to do the right behaviour and iterate until I find the right output".
Part of me wants a "fourth" option if we were to go with `on_duplicate` as well but I don't know that it matters for `HTTPHeaderDict` because the primary use for that case is as a server, not so much a client. I'll explain it anyway as food for thought.
Imagine that the default for `on_duplicate` is `auto`. That means for a sub-set of headers that we know are absolutely safe to send as multiple and should prefer not to be sent as a folded header (`Set-Cookie` is the most common one of these). For other headers `fold` is the right "automatic" behaviour.
If our header dict were used more widely, I'd be arguing for `auto` but I'm too sleep deprived to think of a client case that's similar that would require us to be smart for the user.
@haikuginger @shazow What are your thoughts on those APIs?
One suggestion I would consider is changing the term "fold" to "combine", since that's the term RFC 7230 uses and I think "fold" is not very beginner-programmer friendly (more of a functional programming term?).
If I was picking terms from scratch, my gut is to go with "replace", "combine", "separate".
If we wanted to go with UPSERT terminology, it's ignore/insert/update/merge. (Worth adding a 4th `ignore` option? What about a 5th `unique` option that throws?)
Other options I can think of that aren't great:
- Could have more `add`-like methods which have different names, such as `add_combined` or something like `append` or `insert`.
- Could have different HTTPHeaderDict implementations with different behaviours. (Is it easy to bring your own implementation rn or are we forcing people to use ours?)
In the spirit of making progress on this, my vote would be doing either of the current `add` proposals and renaming `fold` to `combine`. :)
Using `combine=False` instead of `fold=False` works for me too. This is the verb that RFC 7230 uses so is consistent there too. Great suggestion, @shazow!
Haha, ended up going down a rabbit hole on this one trying to find an interface that suits the more complicated cases that `Dict` and `List[Tuple[str, str]]` are insufficient for. If I were doing it from scratch (which we can think about for 2.0, because we're breaking a lot of stuff!), I might consider something like this:
```python3
class SingleValueHeader:
value: str
def try_add_value(self, value: str) -> bool:
return False
def serialized(self) -> str:
return self.value
class MultiValueHeader:
values: List[str]
def try_add_value(self, value: str) -> bool:
self.values.append(value)
return True
def serialized(self) -> str:
return ','.join(self.values)
class HttpHeader(Protocol):
def try_add_value(self, value: str) -> bool:
"""
Returns True if the value was successfully merged into this header;
returns False if this header cannot accept additional values
"""
def serialize(self) -> str:
"""
Return the string that this header should be serialized to in a request.
"""
class RequestHeaders:
def all_by_name(self, name: str) -> List[HttpHeader]:
"""
Return an iterable of all the `HttpHeader` entities going by the given
name included in this collection of headers
"""
def set_header(self, name: str, header: Union[str, HttpHeader]) -> None:
"""
Ensures there is exactly one header with the provided name, and it has
exactly the single serializable value provided (whether that value is
a single string or a collection of strings that will be concatenated
together when placing the request)
"""
def serializable_kv_pairs(self) -> List[Tuple[str, str]]:
"""
Return a list of string-string pairs that can be safely transmitted
over the wire as part of an HTTP request
"""
@overload # When an HttpHeader object is passed, `mergeable` is disregarded
def add_header(self, name: str, header: HttpHeader) -> None: ...
@overload
def add_header(self, name: str, header: str, mergeable: bool = False) -> None: ...
@overload
def add_header(
self,
name: str,
header: Union[str, HttpHeader],
mergeable: bool = False
) -> None:
"""
In addition to any other headers that will be included in this request,
send this header with the value specified; send it in addition to any
headers that bear the same name but will be sent separately.
If `mergeable` is True, the header will be created in such a way that
`add_header_value` may add additional contents to it later on, but only
if the value passed is a plain string; if it's `HttpHeader`, then trust
it to handle its own business when `try_add_value` is called.
"""
def add_header_value(self, name: str, value: str, mergeable: bool = False) -> None:
"""
Ensure that the provided value is represented in the set of headers
that will be serialized for this request.
"""
if mergeable:
# if this value can share a header with other values, get any existing
# headers we might add this value to
potential_existing_headers = self.all_by_name(name)
# iterate over those headers...
for header in potential_existing_headers:
# ...trying to add the value to each in turn...
if header.try_add_value(value):
# and finishing if we did it!
return
# if we didn't find a header we could add this value to, or if it can't
# be merged with others, create a new header that can accommodate it,
# making sure that the new header follows this value's merge rules.
return self.add_header(name, value, mergeable=mergeable)
```
I think the trick here is that for the case where we're adding values, we want to make sure that each value for a given name agrees with other already-added values about whether they can be merged together. That means that for the complex cases, we need to carry around a bit more than just strings.
I tried to the API described [in this comment](https://github.com/urllib3/urllib3/issues/2242#issuecomment-1023764816) in the PR #2652.
The biggest messiness comes from backwards compatibility for the object. In particular, around the `items` method. I ended up with just one toggle to get this work, but we could go for zero if we are OK with the break on that specific method.
I would appreciate reviews on that PR to see if that makes sense. | 2022-07-06T08:47:29Z | [] | [] |
urllib3/urllib3 | 2,702 | urllib3__urllib3-2702 | [
"2409"
] | ccbb9051f5f4c7078fca0fcd488b91f44dc57ca0 | diff --git a/src/urllib3/connection.py b/src/urllib3/connection.py
--- a/src/urllib3/connection.py
+++ b/src/urllib3/connection.py
@@ -84,6 +84,8 @@ class BaseSSLError(BaseException): # type: ignore[no-redef]
class ProxyConfig(NamedTuple):
ssl_context: Optional["ssl.SSLContext"]
use_forwarding_for_https: bool
+ assert_hostname: Union[None, str, "Literal[False]"]
+ assert_fingerprint: Optional[str]
class HTTPConnection(_HTTPConnection):
@@ -541,9 +543,9 @@ def _connect_tls_proxy(self, hostname: str, sock: socket.socket) -> "ssl.SSLSock
ca_cert_data=self.ca_cert_data,
server_hostname=hostname,
ssl_context=ssl_context,
+ assert_hostname=proxy_config.assert_hostname,
+ assert_fingerprint=proxy_config.assert_fingerprint,
# Features that aren't implemented for proxies yet:
- assert_fingerprint=None,
- assert_hostname=None,
cert_file=None,
key_file=None,
key_password=None,
diff --git a/src/urllib3/poolmanager.py b/src/urllib3/poolmanager.py
--- a/src/urllib3/poolmanager.py
+++ b/src/urllib3/poolmanager.py
@@ -514,6 +514,12 @@ class ProxyManager(PoolManager):
private. IP address, target hostname, SNI, and port are always visible
to an HTTPS proxy even when this flag is disabled.
+ :param proxy_assert_hostname:
+ The hostname of the certificate to verify against.
+
+ :param proxy_assert_fingerprint:
+ The fingerprint of the certificate to verify against.
+
Example:
.. code-block:: python
@@ -544,6 +550,8 @@ def __init__(
proxy_headers: Optional[Mapping[str, str]] = None,
proxy_ssl_context: Optional["ssl.SSLContext"] = None,
use_forwarding_for_https: bool = False,
+ proxy_assert_hostname: Union[None, str, "Literal[False]"] = None,
+ proxy_assert_fingerprint: Optional[str] = None,
**connection_pool_kw: Any,
) -> None:
@@ -563,7 +571,12 @@ def __init__(
self.proxy = proxy
self.proxy_headers = proxy_headers or {}
self.proxy_ssl_context = proxy_ssl_context
- self.proxy_config = ProxyConfig(proxy_ssl_context, use_forwarding_for_https)
+ self.proxy_config = ProxyConfig(
+ proxy_ssl_context,
+ use_forwarding_for_https,
+ proxy_assert_hostname,
+ proxy_assert_fingerprint,
+ )
connection_pool_kw["_proxy"] = self.proxy
connection_pool_kw["_proxy_headers"] = self.proxy_headers
| diff --git a/test/conftest.py b/test/conftest.py
--- a/test/conftest.py
+++ b/test/conftest.py
@@ -3,7 +3,6 @@
import socket
import ssl
import sys
-import threading
from pathlib import Path
from typing import AbstractSet, Any, Dict, Generator, NamedTuple, Optional, Tuple
@@ -13,7 +12,7 @@
from dummyserver.handlers import TestingApp
from dummyserver.proxy import ProxyHandler
-from dummyserver.server import HAS_IPV6, run_tornado_app
+from dummyserver.server import HAS_IPV6, run_loop_in_thread, run_tornado_app
from dummyserver.testcase import HTTPSDummyServerTestCase
from urllib3.util import ssl_
@@ -59,18 +58,13 @@ def run_server_in_thread(
scheme: str, host: str, tmpdir: Path, ca: trustme.CA, server_cert: trustme.LeafCert
) -> Generator[ServerConfig, None, None]:
ca_cert_path = str(tmpdir / "ca.pem")
- server_cert_path = str(tmpdir / "server.pem")
- server_key_path = str(tmpdir / "server.key")
ca.cert_pem.write_to_path(ca_cert_path)
- server_cert.private_key_pem.write_to_path(server_key_path)
- server_cert.cert_chain_pems[0].write_to_path(server_cert_path)
- server_certs = {"keyfile": server_key_path, "certfile": server_cert_path}
+ server_certs = _write_cert_to_dir(server_cert, tmpdir)
io_loop = ioloop.IOLoop.current()
app = web.Application([(r".*", TestingApp)])
server, port = run_tornado_app(app, io_loop, server_certs, scheme, host)
- server_thread = threading.Thread(target=io_loop.start)
- server_thread.start()
+ server_thread = run_loop_in_thread(io_loop)
yield ServerConfig("https", host, port, ca_cert_path)
@@ -105,8 +99,7 @@ def run_server_and_proxy_in_thread(
)
proxy_config = ServerConfig(proxy_scheme, proxy_host, proxy_port, ca_cert_path)
- server_thread = threading.Thread(target=io_loop.start)
- server_thread.start()
+ loop_thread = run_loop_in_thread(io_loop)
yield (proxy_config, server_config)
@@ -114,7 +107,7 @@ def run_server_and_proxy_in_thread(
io_loop.add_callback(proxy_app.stop)
io_loop.add_callback(io_loop.stop)
- server_thread.join()
+ loop_thread.join()
@pytest.fixture(params=["localhost", "127.0.0.1", "::1"])
@@ -162,6 +155,21 @@ def no_san_server_with_different_commmon_name(
yield cfg
[email protected]
+def san_proxy_with_server(
+ loopback_host: str, tmp_path_factory: pytest.TempPathFactory
+) -> Generator[Tuple[ServerConfig, ServerConfig], None, None]:
+ tmpdir = tmp_path_factory.mktemp("certs")
+ ca = trustme.CA()
+ proxy_cert = ca.issue_cert(loopback_host)
+ server_cert = ca.issue_cert("localhost")
+
+ with run_server_and_proxy_in_thread(
+ "https", loopback_host, tmpdir, ca, proxy_cert, server_cert
+ ) as cfg:
+ yield cfg
+
+
@pytest.fixture
def no_san_proxy_with_server(
tmp_path_factory: pytest.TempPathFactory,
diff --git a/test/test_util.py b/test/test_util.py
--- a/test/test_util.py
+++ b/test/test_util.py
@@ -799,7 +799,12 @@ def test_connection_requires_http_tunnel_no_proxy(self) -> None:
def test_connection_requires_http_tunnel_http_proxy(self) -> None:
proxy = parse_url("http://proxy:8080")
- proxy_config = ProxyConfig(ssl_context=None, use_forwarding_for_https=False)
+ proxy_config = ProxyConfig(
+ ssl_context=None,
+ use_forwarding_for_https=False,
+ assert_hostname=None,
+ assert_fingerprint=None,
+ )
destination_scheme = "http"
assert not connection_requires_http_tunnel(
proxy, proxy_config, destination_scheme
@@ -810,7 +815,12 @@ def test_connection_requires_http_tunnel_http_proxy(self) -> None:
def test_connection_requires_http_tunnel_https_proxy(self) -> None:
proxy = parse_url("https://proxy:8443")
- proxy_config = ProxyConfig(ssl_context=None, use_forwarding_for_https=False)
+ proxy_config = ProxyConfig(
+ ssl_context=None,
+ use_forwarding_for_https=False,
+ assert_hostname=None,
+ assert_fingerprint=None,
+ )
destination_scheme = "http"
assert not connection_requires_http_tunnel(
proxy, proxy_config, destination_scheme
diff --git a/test/with_dummyserver/test_proxy_poolmanager.py b/test/with_dummyserver/test_proxy_poolmanager.py
--- a/test/with_dummyserver/test_proxy_poolmanager.py
+++ b/test/with_dummyserver/test_proxy_poolmanager.py
@@ -1,4 +1,8 @@
+import binascii
+import hashlib
+import ipaddress
import os.path
+import pathlib
import shutil
import socket
import ssl
@@ -651,6 +655,93 @@ def test_basic_ipv6_proxy(self) -> None:
class TestHTTPSProxyVerification:
+ @staticmethod
+ def _get_proxy_fingerprint_md5(ca_path: str) -> str:
+ proxy_pem_path = pathlib.Path(ca_path).parent / "proxy.pem"
+ proxy_der = ssl.PEM_cert_to_DER_cert(proxy_pem_path.read_text())
+ proxy_hashed = hashlib.md5(proxy_der).digest()
+ fingerprint = binascii.hexlify(proxy_hashed).decode("ascii")
+ return fingerprint
+
+ @staticmethod
+ def _get_certificate_formatted_proxy_host(host: str) -> str:
+ try:
+ addr = ipaddress.ip_address(host)
+ except ValueError:
+ return host
+
+ if addr.version != 6:
+ return host
+
+ # Transform ipv6 like '::1' to 0:0:0:0:0:0:0:1 via '0000:0000:0000:0000:0000:0000:0000:0001'
+ return addr.exploded.replace("0000", "0").replace("000", "")
+
+ def test_https_proxy_assert_fingerprint_md5(
+ self, no_san_proxy_with_server: Tuple[ServerConfig, ServerConfig]
+ ) -> None:
+ proxy, server = no_san_proxy_with_server
+ proxy_url = f"https://{proxy.host}:{proxy.port}"
+ destination_url = f"https://{server.host}:{server.port}"
+
+ proxy_fingerprint = self._get_proxy_fingerprint_md5(proxy.ca_certs)
+ with proxy_from_url(
+ proxy_url,
+ ca_certs=proxy.ca_certs,
+ proxy_assert_fingerprint=proxy_fingerprint,
+ ) as https:
+ https.request("GET", destination_url)
+
+ def test_https_proxy_assert_fingerprint_md5_non_matching(
+ self, no_san_proxy_with_server: Tuple[ServerConfig, ServerConfig]
+ ) -> None:
+ proxy, server = no_san_proxy_with_server
+ proxy_url = f"https://{proxy.host}:{proxy.port}"
+ destination_url = f"https://{server.host}:{server.port}"
+
+ proxy_fingerprint = self._get_proxy_fingerprint_md5(proxy.ca_certs)
+ new_char = "b" if proxy_fingerprint[5] == "a" else "a"
+ proxy_fingerprint = proxy_fingerprint[:5] + new_char + proxy_fingerprint[6:]
+
+ with proxy_from_url(
+ proxy_url,
+ ca_certs=proxy.ca_certs,
+ proxy_assert_fingerprint=proxy_fingerprint,
+ ) as https:
+ with pytest.raises(MaxRetryError) as e:
+ https.request("GET", destination_url)
+
+ assert "Fingerprints did not match" in str(e)
+
+ def test_https_proxy_assert_hostname(
+ self, san_proxy_with_server: Tuple[ServerConfig, ServerConfig]
+ ) -> None:
+ proxy, server = san_proxy_with_server
+ destination_url = f"https://{server.host}:{server.port}"
+
+ with proxy_from_url(
+ proxy.base_url, ca_certs=proxy.ca_certs, proxy_assert_hostname=proxy.host
+ ) as https:
+ https.request("GET", destination_url)
+
+ def test_https_proxy_assert_hostname_non_matching(
+ self, san_proxy_with_server: Tuple[ServerConfig, ServerConfig]
+ ) -> None:
+ proxy, server = san_proxy_with_server
+ destination_url = f"https://{server.host}:{server.port}"
+
+ proxy_hostname = "example.com"
+ with proxy_from_url(
+ proxy.base_url,
+ ca_certs=proxy.ca_certs,
+ proxy_assert_hostname=proxy_hostname,
+ ) as https:
+ with pytest.raises(MaxRetryError) as e:
+ https.request("GET", destination_url)
+
+ proxy_host = self._get_certificate_formatted_proxy_host(proxy.host)
+ msg = f"hostname \\'{proxy_hostname}\\' doesn\\'t match \\'{proxy_host}\\'"
+ assert msg in str(e)
+
def test_https_proxy_hostname_verification(
self, no_localhost_san_server: ServerConfig
) -> None:
| Add support for assert_proxy_cert_fingerprint and assert_proxy_hostname
For v2, it would be nice if we expand the ways we can match an HTTPS proxy certificate. After #2407 is merged, we'll only have hostname verification but it would be ideal to be consistent in what we support for HTTPS destinations.
### Minimum requirements
:moneybag: **You can get paid to complete this issue! [Please read the docs for more information](https://urllib3.readthedocs.io/en/latest/contributing.html#getting-paid-for-your-contributions).**
* [x] Add new parameter ``assert_proxy_cert_fingerprint``
* [x] Add new parameter ``assert_proxy_hostname``
* [x] Add test cases for both parameters with similar rigor to the parameters `assert_cert_fingerprint` and `assert_hostname` tests.
| Hey there!
I'm Nishanth, and I was hoping I could work on this issue, I had a few doubts I was hoping that could be clarified:
1. I'd like to get more clarification on what you mean by parameter. Does it mean a function parameter when we initialize a proxy object or is it something else?
2. Are there example tests that I could look at? I did look at the test folder, but wanted to confirm if I'm going in the right direction by looking at the proxymanager's test
The parameters would go on `ProxyManager.__init__` and get put into the `ProxyConfig` object with other proxy HTTPS config parameters.
For finding tests you can search for `assert_hostname` and `assert_cert_fingerprint` in the `test/` folder and see what's already there. | 2022-08-06T17:11:35Z | [] | [] |
urllib3/urllib3 | 2,777 | urllib3__urllib3-2777 | [
"2772"
] | b5d20d9a11e23d8b2c841c88d7ccfeddfbca721f | diff --git a/dummyserver/server.py b/dummyserver/server.py
--- a/dummyserver/server.py
+++ b/dummyserver/server.py
@@ -4,6 +4,11 @@
Dummy server used for unit testing.
"""
+from __future__ import annotations
+
+import asyncio
+import concurrent.futures
+import contextlib
import logging
import os
import socket
@@ -11,8 +16,9 @@
import sys
import threading
import warnings
+from collections.abc import Coroutine, Generator
from datetime import datetime
-from typing import Any, Callable, Dict, Optional, Tuple, Union
+from typing import TYPE_CHECKING, Any, Callable, TypeVar
import tornado.httpserver
import tornado.ioloop
@@ -25,10 +31,15 @@
from urllib3.exceptions import HTTPWarning
from urllib3.util import ALPN_PROTOCOLS, resolve_cert_reqs, resolve_ssl_version
+if TYPE_CHECKING:
+ from typing_extensions import ParamSpec
+
+ P = ParamSpec("P")
+
log = logging.getLogger(__name__)
CERTS_PATH = os.path.join(os.path.dirname(__file__), "certs")
-DEFAULT_CERTS: Dict[str, Any] = {
+DEFAULT_CERTS: dict[str, Any] = {
"certfile": os.path.join(CERTS_PATH, "server.crt"),
"keyfile": os.path.join(CERTS_PATH, "server.key"),
"cert_reqs": ssl.CERT_OPTIONAL,
@@ -106,7 +117,7 @@ def __init__(
self,
socket_handler: Callable[[socket.socket], None],
host: str = "localhost",
- ready_event: Optional[threading.Event] = None,
+ ready_event: threading.Event | None = None,
) -> None:
super().__init__()
self.daemon = True
@@ -144,7 +155,7 @@ def ssl_options_to_context( # type: ignore[no-untyped-def]
certfile=None,
server_side=None,
cert_reqs=None,
- ssl_version: Optional[Union[str, int]] = None,
+ ssl_version: str | int | None = None,
ca_certs=None,
do_handshake_on_connect=None,
suppress_ragged_eofs=None,
@@ -172,20 +183,18 @@ def ssl_options_to_context( # type: ignore[no-untyped-def]
return ctx
-def run_tornado_app( # type: ignore[no-untyped-def]
+def run_tornado_app(
app: tornado.web.Application,
- io_loop: tornado.ioloop.IOLoop,
- certs,
+ certs: dict[str, Any] | None,
scheme: str,
host: str,
-) -> Tuple[tornado.httpserver.HTTPServer, int]:
- assert io_loop == tornado.ioloop.IOLoop.current()
-
+) -> tuple[tornado.httpserver.HTTPServer, int]:
# We can't use fromtimestamp(0) because of CPython issue 29097, so we'll
# just construct the datetime object directly.
app.last_req = datetime(1970, 1, 1) # type: ignore[attr-defined]
if scheme == "https":
+ assert certs is not None
ssl_opts = ssl_options_to_context(**certs)
http_server = tornado.httpserver.HTTPServer(app, ssl_options=ssl_opts)
else:
@@ -197,31 +206,11 @@ def run_tornado_app( # type: ignore[no-untyped-def]
return http_server, port
-def run_loop_in_thread(io_loop: tornado.ioloop.IOLoop) -> threading.Thread:
- t = threading.Thread(target=io_loop.start)
- t.start()
- return t
-
-
-def get_unreachable_address() -> Tuple[str, int]:
+def get_unreachable_address() -> tuple[str, int]:
# reserved as per rfc2606
return ("something.invalid", 54321)
-if __name__ == "__main__":
- # For debugging dummyserver itself - python -m dummyserver.server
- from .handlers import TestingApp
-
- host = "127.0.0.1"
-
- io_loop = tornado.ioloop.IOLoop.current()
- app = tornado.web.Application([(r".*", TestingApp)])
- server, port = run_tornado_app(app, io_loop, None, "http", host)
- server_thread = run_loop_in_thread(io_loop)
-
- print(f"Listening on http://{host}:{port}")
-
-
def encrypt_key_pem(private_key_pem: trustme.Blob, password: bytes) -> trustme.Blob:
private_key = serialization.load_pem_private_key(
private_key_pem.bytes(), password=None, backend=default_backend()
@@ -232,3 +221,77 @@ def encrypt_key_pem(private_key_pem: trustme.Blob, password: bytes) -> trustme.B
serialization.BestAvailableEncryption(password),
)
return trustme.Blob(encrypted_key)
+
+
+R = TypeVar("R")
+
+
+def _run_and_close_tornado(
+ async_fn: Callable[P, Coroutine[Any, Any, R]], *args: P.args, **kwargs: P.kwargs
+) -> R:
+ tornado_loop = None
+
+ async def inner_fn() -> R:
+ nonlocal tornado_loop
+ tornado_loop = tornado.ioloop.IOLoop.current()
+ return await async_fn(*args, **kwargs)
+
+ try:
+ return asyncio.run(inner_fn())
+ finally:
+ tornado_loop.close(all_fds=True) # type: ignore[union-attr]
+
+
[email protected]
+def run_loop_in_thread() -> Generator[tornado.ioloop.IOLoop, None, None]:
+ loop_started: concurrent.futures.Future[
+ tuple[tornado.ioloop.IOLoop, asyncio.Event]
+ ] = concurrent.futures.Future()
+ with concurrent.futures.ThreadPoolExecutor(
+ 1, thread_name_prefix="test IOLoop"
+ ) as tpe:
+
+ async def run() -> None:
+ io_loop = tornado.ioloop.IOLoop.current()
+ stop_event = asyncio.Event()
+ loop_started.set_result((io_loop, stop_event))
+ await stop_event.wait()
+
+ # run asyncio.run in a thread and collect exceptions from *either*
+ # the loop failing to start, or failing to close
+ ran = tpe.submit(_run_and_close_tornado, run) # type: ignore[arg-type]
+ for f in concurrent.futures.as_completed((loop_started, ran)): # type: ignore[misc]
+ if f is loop_started:
+ io_loop, stop_event = loop_started.result()
+ try:
+ yield io_loop
+ finally:
+ io_loop.add_callback(stop_event.set)
+
+ elif f is ran:
+ # if this is the first iteration the loop failed to start
+ # if it's the second iteration the loop has finished or
+ # the loop failed to close and we need to raise the exception
+ ran.result()
+ return
+
+
+def main() -> int:
+ # For debugging dummyserver itself - python -m dummyserver.server
+ from .handlers import TestingApp
+
+ host = "127.0.0.1"
+
+ async def amain() -> int:
+ app = tornado.web.Application([(r".*", TestingApp)])
+ server, port = run_tornado_app(app, None, "http", host)
+
+ print(f"Listening on http://{host}:{port}")
+ await asyncio.Event().wait()
+ return 0
+
+ return asyncio.run(amain())
+
+
+if __name__ == "__main__":
+ sys.exit(main())
diff --git a/dummyserver/testcase.py b/dummyserver/testcase.py
--- a/dummyserver/testcase.py
+++ b/dummyserver/testcase.py
@@ -1,7 +1,8 @@
+import asyncio
+import contextlib
import socket
import ssl
import threading
-from contextlib import contextmanager
from typing import Any, Callable, ClassVar, Dict, Generator, Iterable, Optional, Union
import pytest
@@ -160,21 +161,25 @@ class HTTPDummyServerTestCase:
server: ClassVar[httpserver.HTTPServer]
port: ClassVar[int]
server_thread: ClassVar[threading.Thread]
+ _stack: ClassVar[contextlib.ExitStack]
@classmethod
def _start_server(cls) -> None:
- cls.io_loop = ioloop.IOLoop.current()
- app = web.Application([(r".*", TestingApp)])
- cls.server, cls.port = run_tornado_app(
- app, cls.io_loop, cls.certs, cls.scheme, cls.host
- )
- cls.server_thread = run_loop_in_thread(cls.io_loop)
+ with contextlib.ExitStack() as stack:
+ io_loop = stack.enter_context(run_loop_in_thread())
+
+ async def run_app() -> None:
+ app = web.Application([(r".*", TestingApp)])
+ cls.server, cls.port = run_tornado_app(
+ app, cls.certs, cls.scheme, cls.host
+ )
+
+ asyncio.run_coroutine_threadsafe(run_app(), io_loop.asyncio_loop).result() # type: ignore[attr-defined]
+ cls._stack = stack.pop_all()
@classmethod
def _stop_server(cls) -> None:
- cls.io_loop.add_callback(cls.server.stop)
- cls.io_loop.add_callback(cls.io_loop.stop)
- cls.server_thread.join()
+ cls._stack.close()
@classmethod
def setup_class(cls) -> None:
@@ -224,44 +229,43 @@ class HTTPDummyProxyTestCase:
bad_ca_path: ClassVar[str] = ""
server_thread: ClassVar[threading.Thread]
+ _stack: ClassVar[contextlib.ExitStack]
@classmethod
def setup_class(cls) -> None:
- cls.io_loop = ioloop.IOLoop.current()
-
- app = web.Application([(r".*", TestingApp)])
- cls.http_server, cls.http_port = run_tornado_app(
- app, cls.io_loop, None, "http", cls.http_host
- )
-
- app = web.Application([(r".*", TestingApp)])
- cls.https_server, cls.https_port = run_tornado_app(
- app, cls.io_loop, cls.https_certs, "https", cls.http_host
- )
-
- app = web.Application([(r".*", ProxyHandler)])
- cls.proxy_server, cls.proxy_port = run_tornado_app(
- app, cls.io_loop, None, "http", cls.proxy_host
- )
-
- upstream_ca_certs = cls.https_certs.get("ca_certs")
- app = web.Application(
- [(r".*", ProxyHandler)], upstream_ca_certs=upstream_ca_certs
- )
- cls.https_proxy_server, cls.https_proxy_port = run_tornado_app(
- app, cls.io_loop, cls.https_certs, "https", cls.proxy_host
- )
-
- cls.server_thread = run_loop_in_thread(cls.io_loop)
+ with contextlib.ExitStack() as stack:
+ io_loop = stack.enter_context(run_loop_in_thread())
+
+ async def run_app() -> None:
+ app = web.Application([(r".*", TestingApp)])
+ cls.http_server, cls.http_port = run_tornado_app(
+ app, None, "http", cls.http_host
+ )
+
+ app = web.Application([(r".*", TestingApp)])
+ cls.https_server, cls.https_port = run_tornado_app(
+ app, cls.https_certs, "https", cls.http_host
+ )
+
+ app = web.Application([(r".*", ProxyHandler)])
+ cls.proxy_server, cls.proxy_port = run_tornado_app(
+ app, None, "http", cls.proxy_host
+ )
+
+ upstream_ca_certs = cls.https_certs.get("ca_certs")
+ app = web.Application(
+ [(r".*", ProxyHandler)], upstream_ca_certs=upstream_ca_certs
+ )
+ cls.https_proxy_server, cls.https_proxy_port = run_tornado_app(
+ app, cls.https_certs, "https", cls.proxy_host
+ )
+
+ asyncio.run_coroutine_threadsafe(run_app(), io_loop.asyncio_loop).result() # type: ignore[attr-defined]
+ cls._stack = stack.pop_all()
@classmethod
def teardown_class(cls) -> None:
- cls.io_loop.add_callback(cls.http_server.stop)
- cls.io_loop.add_callback(cls.https_server.stop)
- cls.io_loop.add_callback(cls.proxy_server.stop)
- cls.io_loop.add_callback(cls.https_proxy_server.stop)
- cls.io_loop.add_callback(cls.io_loop.stop)
- cls.server_thread.join()
+ cls._stack.close()
@pytest.mark.skipif(not HAS_IPV6, reason="IPv6 not available")
@@ -294,7 +298,7 @@ class ConnectionMarker:
MARK_FORMAT = b"$#MARK%04x*!"
@classmethod
- @contextmanager
+ @contextlib.contextmanager
def mark(cls, monkeypatch: pytest.MonkeyPatch) -> Generator[None, None, None]:
"""
Mark connections under in that context.
| diff --git a/test/conftest.py b/test/conftest.py
--- a/test/conftest.py
+++ b/test/conftest.py
@@ -1,14 +1,13 @@
+import asyncio
import contextlib
-import platform
import socket
import ssl
-import sys
from pathlib import Path
from typing import AbstractSet, Any, Dict, Generator, NamedTuple, Optional, Tuple
import pytest
import trustme
-from tornado import ioloop, web
+from tornado import web
from dummyserver.handlers import TestingApp
from dummyserver.proxy import ProxyHandler
@@ -19,15 +18,6 @@
from .tz_stub import stub_timezone_ctx
-# The Python 3.8+ default loop on Windows breaks Tornado
[email protected](scope="session", autouse=True)
-def configure_windows_event_loop() -> None:
- if sys.version_info >= (3, 8) and platform.system() == "Windows":
- import asyncio
-
- asyncio.set_event_loop_policy(asyncio.WindowsSelectorEventLoopPolicy()) # type: ignore[attr-defined]
-
-
class ServerConfig(NamedTuple):
scheme: str
host: str
@@ -61,16 +51,17 @@ def run_server_in_thread(
ca.cert_pem.write_to_path(ca_cert_path)
server_certs = _write_cert_to_dir(server_cert, tmpdir)
- io_loop = ioloop.IOLoop.current()
- app = web.Application([(r".*", TestingApp)])
- server, port = run_tornado_app(app, io_loop, server_certs, scheme, host)
- server_thread = run_loop_in_thread(io_loop)
+ with run_loop_in_thread() as io_loop:
- yield ServerConfig("https", host, port, ca_cert_path)
+ async def run_app() -> int:
+ app = web.Application([(r".*", TestingApp)])
+ server, port = run_tornado_app(app, server_certs, scheme, host)
+ return port
- io_loop.add_callback(server.stop)
- io_loop.add_callback(io_loop.stop)
- server_thread.join()
+ port = asyncio.run_coroutine_threadsafe(
+ run_app(), io_loop.asyncio_loop # type: ignore[attr-defined]
+ ).result()
+ yield ServerConfig("https", host, port, ca_cert_path)
@contextlib.contextmanager
@@ -88,26 +79,26 @@ def run_server_and_proxy_in_thread(
server_certs = _write_cert_to_dir(server_cert, tmpdir)
proxy_certs = _write_cert_to_dir(proxy_cert, tmpdir, "proxy")
- io_loop = ioloop.IOLoop.current()
- app = web.Application([(r".*", TestingApp)])
- server_app, port = run_tornado_app(app, io_loop, server_certs, "https", "localhost")
- server_config = ServerConfig("https", "localhost", port, ca_cert_path)
-
- proxy = web.Application([(r".*", ProxyHandler)])
- proxy_app, proxy_port = run_tornado_app(
- proxy, io_loop, proxy_certs, proxy_scheme, proxy_host
- )
- proxy_config = ServerConfig(proxy_scheme, proxy_host, proxy_port, ca_cert_path)
+ with run_loop_in_thread() as io_loop:
- loop_thread = run_loop_in_thread(io_loop)
+ async def run_app() -> Tuple[ServerConfig, ServerConfig]:
+ app = web.Application([(r".*", TestingApp)])
+ server_app, port = run_tornado_app(app, server_certs, "https", "localhost")
+ server_config = ServerConfig("https", "localhost", port, ca_cert_path)
- yield (proxy_config, server_config)
-
- io_loop.add_callback(server_app.stop)
- io_loop.add_callback(proxy_app.stop)
- io_loop.add_callback(io_loop.stop)
+ proxy = web.Application([(r".*", ProxyHandler)])
+ proxy_app, proxy_port = run_tornado_app(
+ proxy, proxy_certs, proxy_scheme, proxy_host
+ )
+ proxy_config = ServerConfig(
+ proxy_scheme, proxy_host, proxy_port, ca_cert_path
+ )
+ return proxy_config, server_config
- loop_thread.join()
+ proxy_config, server_config = asyncio.run_coroutine_threadsafe(
+ run_app(), io_loop.asyncio_loop # type: ignore[attr-defined]
+ ).result()
+ yield (proxy_config, server_config)
@pytest.fixture(params=["localhost", "127.0.0.1", "::1"])
| Fix "DeprecationWarning: There is no current event loop" in test suite
Starting with Python 3.10, using `asyncio.get_event_loop()` in a synchronous context is [deprecated in favor of `asyncio.run`](https://bugs.python.org/issue39529) and will stop working eventually. This is why we get a warning when calling `tornado.ioloop.IOLoop.current()` in our test suite: it calls `asyncio.get_event_loop()` under the hood.
Tornado has always encouraged users to do what we do: setup the server synchronously, and then start it in a background thread. This was quite cool as we did not have to worry about async at all. To address the Python 3.10 deprecation, [Tornado 6.2](https://www.tornadoweb.org/en/stable/releases/v6.2.0.html) now reluctantly encourages calling `asyncio.run()` and doing the setup in async code. However, just making `run_tornado_app` async won't be enough in our case because our synchronous test code needs to know the port number of the server. And retrieving that value from the background thread is likely to be annoying.
What we can do instead is partially setup the server synchronously (and thus get the port number) and then finalize setup in async. It does require some synchronization but I now have a working proof-of-concept that passes even with `-Werror`:
```python3
import asyncio
import threading
import tornado.web
import urllib3
class MainHandler(tornado.web.RequestHandler):
def get(self):
self.write("Hello, world")
class TornadoServerThread(threading.Thread):
def __init__(self):
super().__init__()
app = tornado.web.Application([(r"/", MainHandler)])
self.server = tornado.httpserver.HTTPServer(app)
self.sockets = tornado.netutil.bind_sockets(None, address="localhost")
self.port = self.sockets[0].getsockname()[1]
self.started = threading.Event()
self.stopped = asyncio.Event()
async def main(self):
self._loop = asyncio.get_event_loop()
self.server.add_sockets(self.sockets)
self.started.set()
await self.stopped.wait()
def run(self):
print("run")
asyncio.run(self.main())
def stop(self):
print("stop")
self._loop.call_soon_threadsafe(self.stopped.set)
t = TornadoServerThread()
t.start()
t.started.wait()
print(t.port)
print(urllib3.request("GET", f"http://localhost:{t.port}").data)
t.stop()
```
(I initially tried to use `IOLoop(make_current=False)` as suggested in https://github.com/tornadoweb/tornado/issues/3156 but did not manage to make it work without warnings.)
This issue is about taking this proof-of-concept (or coming with a better one) and use it in urllib3's test suite. We should then verify that running with Python 3.10 and Tornado 6.2 does not raise any deprecation warning.
| Will need to be something like this instead:
```python
import asyncio
import threading
import tornado.web
import urllib3
class MainHandler(tornado.web.RequestHandler):
def get(self):
self.write("Hello, world")
class TornadoServerThread(threading.Thread):
def __init__(self):
super().__init__()
self.started = threading.Event()
async def main(self):
app = tornado.web.Application([(r"/", MainHandler)])
self.server = tornado.httpserver.HTTPServer(app)
self.sockets = tornado.netutil.bind_sockets(None, address="localhost")
self.port = self.sockets[0].getsockname()[1]
self._loop = asyncio.get_running_loop()
self.server.add_sockets(self.sockets)
self.started.set()
self.stopped = asyncio.Event()
await self.stopped.wait()
def run(self):
print("run")
asyncio.run(self.main())
def stop(self):
print("stop")
self._loop.call_soon_threadsafe(self.stopped.set)
self.join()
```
@graingert Right putting more code in the main function is likely more elegant. (I would still initialize `self.stopped` in the constructor though.) But why do you say it *needs* to be like this?
`asyncio.Event()` calls get_event_loop prior to 3.10
But that's OK, the warnings only happen with Python 3.10+
But you want the code to work on all python versions
Oh, I get it now! On older versions we would have two different loops which will break. Thanks for the help and patience. :+1: | 2022-11-09T12:33:10Z | [] | [] |
urllib3/urllib3 | 2,795 | urllib3__urllib3-2795 | [
"1973"
] | f60a01b5bb73122791d2a562b4bd74af6f9eb8b8 | diff --git a/src/urllib3/_version.py b/src/urllib3/_version.py
--- a/src/urllib3/_version.py
+++ b/src/urllib3/_version.py
@@ -1,2 +1,2 @@
# This file is protected via CODEOWNERS
-__version__ = "2.0.0.dev0"
+__version__ = "2.0.0a1"
diff --git a/src/urllib3/poolmanager.py b/src/urllib3/poolmanager.py
--- a/src/urllib3/poolmanager.py
+++ b/src/urllib3/poolmanager.py
@@ -324,7 +324,7 @@ def connection_from_context(
if "strict" in request_context:
warnings.warn(
"The 'strict' parameter is no longer needed on Python 3+. "
- "This will raise an error in urllib3 v3.0.0.",
+ "This will raise an error in urllib3 v2.1.0.",
DeprecationWarning,
)
request_context.pop("strict")
| diff --git a/test/test_poolmanager.py b/test/test_poolmanager.py
--- a/test/test_poolmanager.py
+++ b/test/test_poolmanager.py
@@ -274,7 +274,7 @@ def test_connection_from_context_strict_param(
msg = (
"The 'strict' parameter is no longer needed on Python 3+. "
- "This will raise an error in urllib3 v3.0.0."
+ "This will raise an error in urllib3 v2.1.0."
)
record = records[0]
assert isinstance(record.message, Warning)
| Write a Migration Guide for v1.26.x -> v2.0.0
Can be a part of the v2.0.0 Roadmap documentation. Would be good to detail the specific steps and causes of all DeprecationWarnings and be URL-linkable so we can point to these docs within the actual warnings.
| This doesn't seem realistic right now, going to close this one until we can get volunteers.
I do think we need something like that, but it can live in the breaking section of the changelog
Always read issue titles closely before closing. Thought this was the docs translation issue.
Useful migration guides that can be used for inspiration:
* https://www.elastic.co/guide/en/elasticsearch/client/python-api/master/migration.html
* https://esrally.readthedocs.io/en/stable/migrate.html | 2022-11-11T04:00:26Z | [] | [] |
urllib3/urllib3 | 2,798 | urllib3__urllib3-2798 | [
"2128"
] | 9763f09d2ca93c2e10150bfddba341b6a54993ad | diff --git a/noxfile.py b/noxfile.py
--- a/noxfile.py
+++ b/noxfile.py
@@ -1,6 +1,7 @@
import os
import shutil
import subprocess
+import sys
import nox
@@ -31,6 +32,12 @@ def tests_impl(
# Print OpenSSL information.
session.run("python", "-m", "OpenSSL.debug")
+ memray_supported = True
+ if sys.implementation.name != "cpython" or sys.version_info < (3, 8):
+ memray_supported = False # pytest-memray requires CPython 3.8+
+ elif sys.platform == "win32":
+ memray_supported = False
+
# Inspired from https://hynek.me/articles/ditch-codecov-python/
# We use parallel mode and then combine in a later CI step
session.run(
@@ -42,6 +49,7 @@ def tests_impl(
"--parallel-mode",
"-m",
"pytest",
+ *("--memray", "--hide-memray-summary") if memray_supported else (),
"-r",
"a",
f"--color={'yes' if 'GITHUB_ACTIONS' in os.environ else 'auto'}",
diff --git a/src/urllib3/response.py b/src/urllib3/response.py
--- a/src/urllib3/response.py
+++ b/src/urllib3/response.py
@@ -1,3 +1,4 @@
+import collections
import io
import json as _json
import logging
@@ -11,6 +12,7 @@
from typing import (
TYPE_CHECKING,
Any,
+ Deque,
Generator,
Iterator,
List,
@@ -223,6 +225,63 @@ def _get_decoder(mode: str) -> ContentDecoder:
return DeflateDecoder()
+class BytesQueueBuffer:
+ """Memory-efficient bytes buffer
+
+ To return decoded data in read() and still follow the BufferedIOBase API, we need a
+ buffer to always return the correct amount of bytes.
+
+ This buffer should be filled using calls to put()
+
+ Our maximum memory usage is determined by the sum of the size of:
+
+ * self.buffer, which contains the full data
+ * the largest chunk that we will copy in get()
+
+ The worst case scenario is a single chunk, in which case we'll make a full copy of
+ the data inside get().
+ """
+
+ def __init__(self) -> None:
+ self.buffer: Deque[bytes] = collections.deque()
+ self._size: int = 0
+
+ def __len__(self) -> int:
+ return self._size
+
+ def put(self, data: bytes) -> None:
+ self.buffer.append(data)
+ self._size += len(data)
+
+ def get(self, n: int) -> bytes:
+ if not self.buffer:
+ raise RuntimeError("buffer is empty")
+ elif n < 0:
+ raise ValueError("n should be > 0")
+
+ fetched = 0
+ ret = io.BytesIO()
+ while fetched < n:
+ remaining = n - fetched
+ chunk = self.buffer.popleft()
+ chunk_length = len(chunk)
+ if remaining < chunk_length:
+ left_chunk, right_chunk = chunk[:remaining], chunk[remaining:]
+ ret.write(left_chunk)
+ self.buffer.appendleft(right_chunk)
+ self._size -= remaining
+ break
+ else:
+ ret.write(chunk)
+ self._size -= chunk_length
+ fetched += chunk_length
+
+ if not self.buffer:
+ break
+
+ return ret.getvalue()
+
+
class BaseHTTPResponse(io.IOBase):
CONTENT_DECODERS = ["gzip", "deflate"]
if brotli is not None:
@@ -512,6 +571,9 @@ def __init__(
# Determine length of response
self.length_remaining = self._init_length(request_method)
+ # Used to return the correct amount of bytes for partial read()s
+ self._decoded_buffer = BytesQueueBuffer()
+
# If requested, preload the body.
if preload_content and not self._body:
self._body = self.read(decode_content=decode_content)
@@ -720,6 +782,48 @@ def _fp_read(self, amt: Optional[int] = None) -> bytes:
# StringIO doesn't like amt=None
return self._fp.read(amt) if amt is not None else self._fp.read()
+ def _raw_read(
+ self,
+ amt: Optional[int] = None,
+ ) -> bytes:
+ """
+ Reads `amt` of bytes from the socket.
+ """
+ if self._fp is None:
+ return None # type: ignore[return-value]
+
+ fp_closed = getattr(self._fp, "closed", False)
+
+ with self._error_catcher():
+ data = self._fp_read(amt) if not fp_closed else b""
+ if amt is not None and amt != 0 and not data:
+ # Platform-specific: Buggy versions of Python.
+ # Close the connection when no data is returned
+ #
+ # This is redundant to what httplib/http.client _should_
+ # already do. However, versions of python released before
+ # December 15, 2012 (http://bugs.python.org/issue16298) do
+ # not properly close the connection in all cases. There is
+ # no harm in redundantly calling close.
+ self._fp.close()
+ if (
+ self.enforce_content_length
+ and self.length_remaining is not None
+ and self.length_remaining != 0
+ ):
+ # This is an edge case that httplib failed to cover due
+ # to concerns of backward compatibility. We're
+ # addressing it here to make sure IncompleteRead is
+ # raised during streaming, so all calls with incorrect
+ # Content-Length are caught.
+ raise IncompleteRead(self._fp_bytes_read, self.length_remaining)
+
+ if data:
+ self._fp_bytes_read += len(data)
+ if self.length_remaining is not None:
+ self.length_remaining -= len(data)
+ return data
+
def read(
self,
amt: Optional[int] = None,
@@ -750,51 +854,43 @@ def read(
if decode_content is None:
decode_content = self.decode_content
- if self._fp is None:
- return None # type: ignore[return-value]
+ if amt is not None:
+ cache_content = False
- flush_decoder = False
- fp_closed = getattr(self._fp, "closed", False)
+ if len(self._decoded_buffer) >= amt:
+ return self._decoded_buffer.get(amt)
- with self._error_catcher():
- data = self._fp_read(amt) if not fp_closed else b""
- if amt is None:
- flush_decoder = True
- else:
- cache_content = False
- if (
- amt != 0 and not data
- ): # Platform-specific: Buggy versions of Python.
- # Close the connection when no data is returned
- #
- # This is redundant to what httplib/http.client _should_
- # already do. However, versions of python released before
- # December 15, 2012 (http://bugs.python.org/issue16298) do
- # not properly close the connection in all cases. There is
- # no harm in redundantly calling close.
- self._fp.close()
- flush_decoder = True
- if (
- self.enforce_content_length
- and self.length_remaining is not None
- and self.length_remaining != 0
- ):
- # This is an edge case that httplib failed to cover due
- # to concerns of backward compatibility. We're
- # addressing it here to make sure IncompleteRead is
- # raised during streaming, so all calls with incorrect
- # Content-Length are caught.
- raise IncompleteRead(self._fp_bytes_read, self.length_remaining)
+ data = self._raw_read(amt)
- if data:
- self._fp_bytes_read += len(data)
- if self.length_remaining is not None:
- self.length_remaining -= len(data)
+ flush_decoder = False
+ if amt is None:
+ flush_decoder = True
+ elif amt != 0 and not data:
+ flush_decoder = True
- data = self._decode(data, decode_content, flush_decoder)
+ if not data and len(self._decoded_buffer) == 0:
+ return data
+ if amt is None:
+ data = self._decode(data, decode_content, flush_decoder)
if cache_content:
self._body = data
+ else:
+ # do not waste memory on buffer when not decoding
+ if not decode_content:
+ return data
+
+ decoded_data = self._decode(data, decode_content, flush_decoder)
+ self._decoded_buffer.put(decoded_data)
+
+ while len(self._decoded_buffer) < amt and data:
+ # TODO make sure to initially read enough data to get past the headers
+ # For example, the GZ file header takes 10 bytes, we don't want to read
+ # it one byte at a time
+ data = self._raw_read(amt)
+ decoded_data = self._decode(data, decode_content, flush_decoder)
+ self._decoded_buffer.put(decoded_data)
+ data = self._decoded_buffer.get(amt)
return data
| diff --git a/test/test_response.py b/test/test_response.py
--- a/test/test_response.py
+++ b/test/test_response.py
@@ -2,6 +2,7 @@
import http.client as httplib
import socket
import ssl
+import sys
import zlib
from base64 import b64decode
from http.client import IncompleteRead as httplib_IncompleteRead
@@ -24,6 +25,7 @@
)
from urllib3.response import ( # type: ignore[attr-defined]
BaseHTTPResponse,
+ BytesQueueBuffer,
HTTPResponse,
brotli,
zstd,
@@ -31,6 +33,56 @@
from urllib3.util.response import is_fp_closed
from urllib3.util.retry import RequestHistory, Retry
+
+class TestBytesQueueBuffer:
+ def test_single_chunk(self) -> None:
+ buffer = BytesQueueBuffer()
+ assert len(buffer) == 0
+ with pytest.raises(RuntimeError, match="buffer is empty"):
+ assert buffer.get(10)
+
+ buffer.put(b"foo")
+ with pytest.raises(ValueError, match="n should be > 0"):
+ buffer.get(-1)
+
+ assert buffer.get(1) == b"f"
+ assert buffer.get(2) == b"oo"
+ with pytest.raises(RuntimeError, match="buffer is empty"):
+ assert buffer.get(10)
+
+ def test_read_too_much(self) -> None:
+ buffer = BytesQueueBuffer()
+ buffer.put(b"foo")
+ assert buffer.get(100) == b"foo"
+
+ def test_multiple_chunks(self) -> None:
+ buffer = BytesQueueBuffer()
+ buffer.put(b"foo")
+ buffer.put(b"bar")
+ buffer.put(b"baz")
+ assert len(buffer) == 9
+
+ assert buffer.get(1) == b"f"
+ assert len(buffer) == 8
+ assert buffer.get(4) == b"ooba"
+ assert len(buffer) == 4
+ assert buffer.get(4) == b"rbaz"
+ assert len(buffer) == 0
+
+ @pytest.mark.skipif(
+ sys.version_info < (3, 8), reason="pytest-memray requires Python 3.8+"
+ )
+ @pytest.mark.limit_memory("12.5 MB") # assert that we're not doubling memory usage
+ def test_memory_usage(self) -> None:
+ # Allocate 10 1MiB chunks
+ buffer = BytesQueueBuffer()
+ for i in range(10):
+ # This allocates 2MiB, putting the max at around 12MiB. Not sure why.
+ buffer.put(bytes(2**20))
+
+ assert len(buffer.get(10 * 2**20)) == 10 * 2**20
+
+
# A known random (i.e, not-too-compressible) payload generated with:
# "".join(random.choice(string.printable) for i in range(512))
# .encode("zlib").encode("base64")
@@ -71,9 +123,19 @@ def test_getheader(self) -> None:
class TestResponse:
def test_cache_content(self) -> None:
r = HTTPResponse(b"foo")
+ assert r._body == b"foo"
assert r.data == b"foo"
assert r._body == b"foo"
+ def test_cache_content_preload_false(self) -> None:
+ fp = BytesIO(b"foo")
+ r = HTTPResponse(fp, preload_content=False)
+
+ assert not r._body
+ assert r.data == b"foo"
+ assert r._body == b"foo"
+ assert r.data == b"foo"
+
def test_default(self) -> None:
r = HTTPResponse()
assert r.data is None
@@ -137,13 +199,7 @@ def test_chunked_decoding_deflate(self) -> None:
fp, headers={"content-encoding": "deflate"}, preload_content=False
)
- assert r.read(3) == b""
- # Buffer in case we need to switch to the raw stream
- assert r._decoder is not None
- assert r._decoder._data is not None # type: ignore[attr-defined]
assert r.read(1) == b"f"
- # Now that we've decoded data, we just stream through the decoder
- assert r._decoder._data is None # type: ignore[attr-defined]
assert r.read(2) == b"oo"
assert r.read() == b""
assert r.read() == b""
@@ -158,11 +214,7 @@ def test_chunked_decoding_deflate2(self) -> None:
fp, headers={"content-encoding": "deflate"}, preload_content=False
)
- assert r.read(1) == b""
assert r.read(1) == b"f"
- # Once we've decoded data, we just stream to the decoder; no buffering
- assert r._decoder is not None
- assert r._decoder._data is None # type: ignore[attr-defined]
assert r.read(2) == b"oo"
assert r.read() == b""
assert r.read() == b""
@@ -177,7 +229,6 @@ def test_chunked_decoding_gzip(self) -> None:
fp, headers={"content-encoding": "gzip"}, preload_content=False
)
- assert r.read(11) == b""
assert r.read(1) == b"f"
assert r.read(2) == b"oo"
assert r.read() == b""
@@ -324,6 +375,23 @@ def test_multi_decoding_gzip_gzip(self) -> None:
assert r.data == b"foo"
+ def test_read_multi_decoding_deflate_deflate(self) -> None:
+ msg = b"foobarbaz" * 42
+ data = zlib.compress(zlib.compress(msg))
+
+ fp = BytesIO(data)
+ r = HTTPResponse(
+ fp, headers={"content-encoding": "deflate, deflate"}, preload_content=False
+ )
+
+ assert r.read(3) == b"foo"
+ assert r.read(3) == b"bar"
+ assert r.read(3) == b"baz"
+ assert r.read(9) == b"foobarbaz"
+ assert r.read(9 * 3) == b"foobarbaz" * 3
+ assert r.read(9 * 37) == b"foobarbaz" * 37
+ assert r.read() == b""
+
def test_body_blob(self) -> None:
resp = HTTPResponse(b"foo")
assert resp.data == b"foo"
@@ -527,8 +595,8 @@ def test_gzipped_streaming(self) -> None:
)
stream = resp.stream(2)
- assert next(stream) == b"f"
- assert next(stream) == b"oo"
+ assert next(stream) == b"fo"
+ assert next(stream) == b"o"
with pytest.raises(StopIteration):
next(stream)
@@ -557,6 +625,7 @@ def test_deflate_streaming_tell_intermediate_point(self) -> None:
# Ensure that ``tell()`` returns the correct number of bytes when
# part-way through streaming compressed content.
NUMBER_OF_READS = 10
+ PART_SIZE = 64
class MockCompressedDataReading(BytesIO):
"""
@@ -585,7 +654,7 @@ def read(self, _: int) -> bytes: # type: ignore[override]
resp = HTTPResponse(
fp, headers={"content-encoding": "deflate"}, preload_content=False
)
- stream = resp.stream()
+ stream = resp.stream(PART_SIZE)
parts_positions = [(part, resp.tell()) for part in stream]
end_of_stream = resp.tell()
@@ -600,12 +669,28 @@ def read(self, _: int) -> bytes: # type: ignore[override]
assert uncompressed_data == payload
# Check that the positions in the stream are correct
- expected = [(i + 1) * payload_part_size for i in range(NUMBER_OF_READS)]
- assert expected == list(positions)
+ # It is difficult to determine programatically what the positions
+ # returned by `tell` will be because the `HTTPResponse.read` method may
+ # call socket `read` a couple of times if it doesn't have enough data
+ # in the buffer or not call socket `read` at all if it has enough. All
+ # this depends on the message, how it was compressed, what is
+ # `PART_SIZE` and `payload_part_size`.
+ # So for simplicity the expected values are hardcoded.
+ expected = (92, 184, 230, 276, 322, 368, 414, 460)
+ assert expected == positions
# Check that the end of the stream is in the correct place
assert len(ZLIB_PAYLOAD) == end_of_stream
+ # Check that all parts have expected length
+ expected_last_part_size = len(uncompressed_data) % PART_SIZE
+ whole_parts = len(uncompressed_data) // PART_SIZE
+ if expected_last_part_size == 0:
+ expected_lengths = [PART_SIZE] * whole_parts
+ else:
+ expected_lengths = [PART_SIZE] * whole_parts + [expected_last_part_size]
+ assert expected_lengths == [len(part) for part in parts]
+
def test_deflate_streaming(self) -> None:
data = zlib.compress(b"foo")
@@ -615,8 +700,8 @@ def test_deflate_streaming(self) -> None:
)
stream = resp.stream(2)
- assert next(stream) == b"f"
- assert next(stream) == b"oo"
+ assert next(stream) == b"fo"
+ assert next(stream) == b"o"
with pytest.raises(StopIteration):
next(stream)
@@ -631,8 +716,8 @@ def test_deflate2_streaming(self) -> None:
)
stream = resp.stream(2)
- assert next(stream) == b"f"
- assert next(stream) == b"oo"
+ assert next(stream) == b"fo"
+ assert next(stream) == b"o"
with pytest.raises(StopIteration):
next(stream)
@@ -644,6 +729,38 @@ def test_empty_stream(self) -> None:
with pytest.raises(StopIteration):
next(stream)
+ @pytest.mark.parametrize(
+ "preload_content, amt",
+ [(True, None), (False, None), (False, 10 * 2**20)],
+ )
+ @pytest.mark.limit_memory("25 MB")
+ def test_buffer_memory_usage_decode_one_chunk(
+ self, preload_content: bool, amt: int
+ ) -> None:
+ content_length = 10 * 2**20 # 10 MiB
+ fp = BytesIO(zlib.compress(bytes(content_length)))
+ resp = HTTPResponse(
+ fp,
+ preload_content=preload_content,
+ headers={"content-encoding": "deflate"},
+ )
+ data = resp.data if preload_content else resp.read(amt)
+ assert len(data) == content_length
+
+ @pytest.mark.parametrize(
+ "preload_content, amt",
+ [(True, None), (False, None), (False, 10 * 2**20)],
+ )
+ @pytest.mark.limit_memory("10.5 MB")
+ def test_buffer_memory_usage_no_decoding(
+ self, preload_content: bool, amt: int
+ ) -> None:
+ content_length = 10 * 2**20 # 10 MiB
+ fp = BytesIO(bytes(content_length))
+ resp = HTTPResponse(fp, preload_content=preload_content, decode_content=False)
+ data = resp.data if preload_content else resp.read(amt)
+ assert len(data) == content_length
+
def test_length_no_header(self) -> None:
fp = BytesIO(b"12345")
resp = HTTPResponse(fp, preload_content=False)
diff --git a/test/with_dummyserver/test_socketlevel.py b/test/with_dummyserver/test_socketlevel.py
--- a/test/with_dummyserver/test_socketlevel.py
+++ b/test/with_dummyserver/test_socketlevel.py
@@ -1920,15 +1920,8 @@ def socket_handler(listener: socket.socket) -> None:
"GET", url="/", preload_content=False, enforce_content_length=True
)
data = get_response.stream(100)
- # Read "good" data before we try to read again.
- # This won't trigger till generator is exhausted.
- next(data)
- try:
+ with pytest.raises(ProtocolError, match="12 bytes read, 10 more expected"):
next(data)
- assert False
- except ProtocolError as e:
- assert "12 bytes read, 10 more expected" in str(e)
-
done_event.set()
def test_enforce_content_length_no_body(self) -> None:
| Standardize HTTPResponse.read(X) behavior regardless of compression
Currently calls to `HTTPResponse.read()` with a specified amount can vary in size depending on HTTP compression. This makes using `HTTPResponse` with other `io` primitives like `io.BufferedReader()` difficult as they assume `.read(X)` won't return more bytes than `X`.
Related issue: https://github.com/urllib3/urllib3/issues/709
### Minimum requirements
:dollar: **You can get paid to complete this issue! [Please read the docs for more information](https://urllib3.readthedocs.io/en/latest/contributing.html#getting-paid-for-your-contributions).**
- [ ] Calling `HTTPResponse.read(x)` should result in the same behavior regardless of whether the response is compressed or not.
- [ ] Test cases for `decode_content=True/False`, `MultiDecoder` (more than one `Content-Encoding`), all the different Content-Encodings and `identity`. Test when `.stream()`-ing from the `HTTPResponse` or in manual `.read()`.
- [ ] May need to buffer output across calls to `.read()` and `decompress` until there's at least `n` bytes available to be read.
| Part of me genuinely wonders if we need a separate collaborator that wraps an `HTTPResponse` to do the decoding, rather than having the `HTTPResponse` do it for us. I'm thinking along the lines of if someone gives us `decode_content=True` it returns a `DecodedHTTPResponse` which has the original and does the buffer magic for us to ensure we don't return more than `N` and don't lose data in the interim.
Certainly could work! Are there many users of decode_content=False? I can only remember seeing pip use that.
Betamax uses it to preserve the exact body bytes while recording interactions (request-response). I think openstack/glance used it a bunch as they were moving around compressed data they didn't want uncompressed. I don't think it's a frequent use-case but I do think it's worth supporting and branching logic in `read()` or elsewhere could get very tricky
The other thing we could do is unconditionally read into a `BytesIO` buffer and store the content twice, but that seems undesirable. Also managing that memory effectively isn't the most easy to understand. We could use a ring-buffer here since we don't care about overwriting old-data and probably don't want to have a copy of the data in memory for the user (that's Requests' job) ;)
> May need to buffer output across calls to .read() and decompress until there's at least n bytes available to be read
Does it really need to return at least n bytes? Or use a more usual socket behavior: return at least 1 byte? | 2022-11-11T07:46:40Z | [] | [] |
urllib3/urllib3 | 2,817 | urllib3__urllib3-2817 | [
"2800"
] | 342db499634e018f8ba6ace1fc58297f40d17fba | diff --git a/src/urllib3/response.py b/src/urllib3/response.py
--- a/src/urllib3/response.py
+++ b/src/urllib3/response.py
@@ -317,6 +317,7 @@ def __init__(
self.version = version
self.reason = reason
self.decode_content = decode_content
+ self._has_decoded_content = False
self._request_url: Optional[str] = request_url
self.retries = retries
@@ -436,11 +437,17 @@ def _decode(
Decode the data passed in and potentially flush the decoder.
"""
if not decode_content:
+ if self._has_decoded_content:
+ raise RuntimeError(
+ "Calling read(decode_content=False) is not supported after "
+ "read(decode_content=True) was called."
+ )
return data
try:
if self._decoder:
data = self._decoder.decompress(data)
+ self._has_decoded_content = True
except self.DECODER_ERROR_CLASSES as e:
content_encoding = self.headers.get("content-encoding", "").lower()
raise DecodeError(
@@ -891,6 +898,11 @@ def read(
else:
# do not waste memory on buffer when not decoding
if not decode_content:
+ if self._has_decoded_content:
+ raise RuntimeError(
+ "Calling read(decode_content=False) is not supported after "
+ "read(decode_content=True) was called."
+ )
return data
decoded_data = self._decode(data, decode_content, flush_decoder)
| diff --git a/test/test_response.py b/test/test_response.py
--- a/test/test_response.py
+++ b/test/test_response.py
@@ -565,6 +565,46 @@ def test_io_not_autoclose_textiowrapper(self) -> None:
with pytest.raises(ValueError, match="I/O operation on closed file.?"):
next(reader)
+ def test_read_with_illegal_mix_decode_toggle(self) -> None:
+ data = zlib.compress(b"foo")
+
+ fp = BytesIO(data)
+
+ resp = HTTPResponse(
+ fp, headers={"content-encoding": "deflate"}, preload_content=False
+ )
+
+ assert resp.read(1) == b"f"
+
+ with pytest.raises(
+ RuntimeError,
+ match=(
+ r"Calling read\(decode_content=False\) is not supported after "
+ r"read\(decode_content=True\) was called"
+ ),
+ ):
+ resp.read(1, decode_content=False)
+
+ with pytest.raises(
+ RuntimeError,
+ match=(
+ r"Calling read\(decode_content=False\) is not supported after "
+ r"read\(decode_content=True\) was called"
+ ),
+ ):
+ resp.read(decode_content=False)
+
+ def test_read_with_mix_decode_toggle(self) -> None:
+ data = zlib.compress(b"foo")
+
+ fp = BytesIO(data)
+
+ resp = HTTPResponse(
+ fp, headers={"content-encoding": "deflate"}, preload_content=False
+ )
+ assert resp.read(2, decode_content=False) is not None
+ assert resp.read(1, decode_content=True) == b"f"
+
def test_streaming(self) -> None:
fp = BytesIO(b"foo")
resp = HTTPResponse(fp, preload_content=False)
| Should we prevent `read(decode_content=True)` followed by `read(decode_content=False)`?
`BaseHTTPResponse.read()` has a `decode_content` parameter to let the user choose between... decoding the content or not. (I'm not sure why the word is "decode" because what we're really doing is decompressing. Anyway.)
Until #2712 and #2798, issuing read() calls with differing values of decode_content would be really weird but you would not loose any data. Now that we have a buffer, issuing `decode_content=False` after having issued `decode_content=True` will cause bugs:
* if you have enough decoded data in the buffer for a read(amt, decode_content=False) you will get decoded data
* if you don't have enough data, the existing data in the buffer will be silently ignored and lost
I don't think there's an use case here to support, but should we actively prevent this footgun?
### Minimum requirements
:dollar: **You can get paid to complete this issue! [Please read the docs for more information](https://urllib3.readthedocs.io/en/latest/contributing.html#getting-paid-for-your-contributions).**
- [ ] Calling `HTTPResponse.read(decode_content=True)` **or** `HTTPResponse.read(decode_content=False)` should continue working as before.
- [ ] Calling `HTTPResponse.read(decode_content=True)` **followed by** `HTTPResponse.read(decode_content=False)` should raise a RuntimeError. The other way is fine.
- [ ] The requests and botocore integration tests should continue to pass.
| I agree this isn't a use-case we should support. Too many chances for data to get stuck in a weird half-decoded half-not-decoded state and us not having a good solution to someone who wants both for likely bad reasons. Users who really want to do something like this can handle decompression on their own. Let's make calling back-and-forth like this a `RuntimeError`?
I tried it naively in [`f894e83` (#2798)](https://github.com/urllib3/urllib3/pull/2798/commits/f894e8367a0e0710efb6e000326f6213ba1fca50), but that broke the requests integration tests: https://github.com/urllib3/urllib3/actions/runs/3452169720/jobs/5761894025. @sethmlarson then had the following comment:
> @pquentin I wonder if we need to change the call to read() with decode_content=False inside of the "drain" method on HTTPResponse? I know Requests allows for raw data access. | 2022-11-17T00:41:43Z | [] | [] |
urllib3/urllib3 | 2,821 | urllib3__urllib3-2821 | [
"2819"
] | 8b8e4b5a148d0eb706daf5ac48b4423b434495f5 | diff --git a/src/urllib3/connectionpool.py b/src/urllib3/connectionpool.py
--- a/src/urllib3/connectionpool.py
+++ b/src/urllib3/connectionpool.py
@@ -862,7 +862,7 @@ def _is_ssl_error_message_from_http_proxy(ssl_error):
)
# Check if we should retry the HTTP response.
- has_retry_after = bool(response.getheader("Retry-After"))
+ has_retry_after = bool(response.headers.get("Retry-After"))
if retries.is_retry(method, response.status, has_retry_after):
try:
retries = retries.increment(method, url, response=response, _pool=self)
diff --git a/src/urllib3/response.py b/src/urllib3/response.py
--- a/src/urllib3/response.py
+++ b/src/urllib3/response.py
@@ -3,6 +3,7 @@
import io
import logging
import sys
+import warnings
import zlib
from contextlib import contextmanager
from socket import error as SocketError
@@ -663,9 +664,21 @@ def from_httplib(ResponseCls, r, **response_kw):
# Backwards-compatibility methods for http.client.HTTPResponse
def getheaders(self):
+ warnings.warn(
+ "HTTPResponse.getheaders() is deprecated and will be removed "
+ "in urllib3 v2.1.0. Instead access HTTResponse.headers directly.",
+ category=DeprecationWarning,
+ stacklevel=2,
+ )
return self.headers
def getheader(self, name, default=None):
+ warnings.warn(
+ "HTTPResponse.getheader() is deprecated and will be removed "
+ "in urllib3 v2.1.0. Instead use HTTResponse.headers.get(name, default).",
+ category=DeprecationWarning,
+ stacklevel=2,
+ )
return self.headers.get(name, default)
# Backwards compatibility for http.cookiejar
diff --git a/src/urllib3/util/retry.py b/src/urllib3/util/retry.py
--- a/src/urllib3/util/retry.py
+++ b/src/urllib3/util/retry.py
@@ -394,7 +394,7 @@ def parse_retry_after(self, retry_after):
def get_retry_after(self, response):
"""Get the value of Retry-After in seconds."""
- retry_after = response.getheader("Retry-After")
+ retry_after = response.headers.get("Retry-After")
if retry_after is None:
return None
| diff --git a/test/test_response.py b/test/test_response.py
--- a/test/test_response.py
+++ b/test/test_response.py
@@ -13,6 +13,7 @@
import pytest
import six
+from urllib3._collections import HTTPHeaderDict
from urllib3.exceptions import (
DecodeError,
IncompleteRead,
@@ -57,12 +58,20 @@ class TestLegacyResponse(object):
def test_getheaders(self):
headers = {"host": "example.com"}
r = HTTPResponse(headers=headers)
- assert r.getheaders() == headers
+ with pytest.warns(
+ DeprecationWarning,
+ match=r"HTTPResponse.getheaders\(\) is deprecated",
+ ):
+ assert r.getheaders() == HTTPHeaderDict(headers)
def test_getheader(self):
headers = {"host": "example.com"}
r = HTTPResponse(headers=headers)
- assert r.getheader("host") == "example.com"
+ with pytest.warns(
+ DeprecationWarning,
+ match=r"HTTPResponse.getheader\(\) is deprecated",
+ ):
+ assert r.getheader("host") == "example.com"
class TestResponse(object):
| Backport #2814 to 1.26.x
https://github.com/urllib3/urllib3/pull/2814 needs to be backported to 1.26.x
| 2022-11-20T12:11:34Z | [] | [] |
|
urllib3/urllib3 | 2,840 | urllib3__urllib3-2840 | [
"2839"
] | 0612a53f48f72ab6e9ef58ccf1d6a25b847152fc | diff --git a/src/urllib3/connectionpool.py b/src/urllib3/connectionpool.py
--- a/src/urllib3/connectionpool.py
+++ b/src/urllib3/connectionpool.py
@@ -545,7 +545,7 @@ def _make_request(
response._pool = self # type: ignore[attr-defined]
log.debug(
- '%s://%s:%s "%s %s %s" %s',
+ '%s://%s:%s "%s %s %s" %s %s',
self.scheme,
self.host,
self.port,
| diff --git a/test/with_dummyserver/test_connectionpool.py b/test/with_dummyserver/test_connectionpool.py
--- a/test/with_dummyserver/test_connectionpool.py
+++ b/test/with_dummyserver/test_connectionpool.py
@@ -1,9 +1,7 @@
from __future__ import annotations
import io
-import logging
import socket
-import sys
import time
import typing
import warnings
@@ -39,10 +37,6 @@
pytestmark = pytest.mark.flaky
-log = logging.getLogger("urllib3.connectionpool")
-log.setLevel(logging.NOTSET)
-log.addHandler(logging.StreamHandler(sys.stdout))
-
def wait_for_socket(ready_event: Event) -> None:
ready_event.wait()
diff --git a/test/with_dummyserver/test_https.py b/test/with_dummyserver/test_https.py
--- a/test/with_dummyserver/test_https.py
+++ b/test/with_dummyserver/test_https.py
@@ -1,7 +1,6 @@
from __future__ import annotations
import datetime
-import logging
import os.path
import shutil
import ssl
@@ -52,11 +51,6 @@
pytestmark = pytest.mark.flaky
-log = logging.getLogger("urllib3.connectionpool")
-log.setLevel(logging.NOTSET)
-log.addHandler(logging.StreamHandler(sys.stdout))
-
-
TLSv1_CERTS = DEFAULT_CERTS.copy()
TLSv1_CERTS["ssl_version"] = getattr(ssl, "PROTOCOL_TLSv1", None)
| 2.0.0a2: Logging error with `StreamHandler` / `urllib3.add_stderr_logger`
### Subject
When using `urllib3.add_stderr_logger` (or using a StreamHandler), the formatting for the message to print out the request details fails.
This happens on the current main (2.0.0a2), release 2.0.0a2 and 2.0.0a1. 1.26.13 works fine.
### Environment
Describe your environment.
At least, paste here the output of:
```python
import platform
import urllib3
print("OS", platform.platform())
print("Python", platform.python_version())
print("urllib3", urllib3.__version__)
```
```
OS Linux-5.15.72-1-lts-x86_64-with-glibc2.35
Python 3.8.14
urllib3 2.0.0a2
```
### Steps to Reproduce
A simple and isolated way to reproduce the issue. A code snippet would be great.
```python
import urllib3
urllib3.add_stderr_logger()
pool = urllib3.PoolManager()
pool.request('GET', 'https://github.com/urllib3/urllib3')
```
### Expected Behavior
What should happen.
No logging error
### Actual Behavior
What happens instead.
```
(venv) [dev@dev-vm urllib3-test]$ python urllib3_test.py
2022-11-30 15:34:40,252 DEBUG Added a stderr logging handler to logger: urllib3
2022-11-30 15:34:40,252 DEBUG Starting new HTTPS connection (1): github.com:443
--- Logging error ---
Traceback (most recent call last):
File "/home/dev/.pyenv/versions/3.8.14/lib/python3.8/logging/__init__.py", line 1085, in emit
msg = self.format(record)
File "/home/dev/.pyenv/versions/3.8.14/lib/python3.8/logging/__init__.py", line 929, in format
return fmt.format(record)
File "/home/dev/.pyenv/versions/3.8.14/lib/python3.8/logging/__init__.py", line 668, in format
record.message = record.getMessage()
File "/home/dev/.pyenv/versions/3.8.14/lib/python3.8/logging/__init__.py", line 373, in getMessage
msg = msg % self.args
TypeError: not all arguments converted during string formatting
Call stack:
File "urllib3_test.py", line 7, in <module>
pool.request('GET', 'https://github.com/urllib3/urllib3')
File "/home/dev/urllib3-test/venv/lib/python3.8/site-packages/urllib3/_request_methods.py", line 110, in request
return self.request_encode_url(
File "/home/dev/urllib3-test/venv/lib/python3.8/site-packages/urllib3/_request_methods.py", line 143, in request_encode_url
return self.urlopen(method, url, **extra_kw)
File "/home/dev/urllib3-test/venv/lib/python3.8/site-packages/urllib3/poolmanager.py", line 433, in urlopen
response = conn.urlopen(method, u.request_uri, **kw)
File "/home/dev/urllib3-test/venv/lib/python3.8/site-packages/urllib3/connectionpool.py", line 791, in urlopen
response = self._make_request(
File "/home/dev/urllib3-test/venv/lib/python3.8/site-packages/urllib3/connectionpool.py", line 547, in _make_request
log.debug(
Message: '%s://%s:%s "%s %s %s" %s'
Arguments: ('https', 'github.com', 443, 'GET', '/urllib3/urllib3', 'HTTP/1.1', 200, None)
```
| 2022-11-30T06:26:09Z | [] | [] |
|
urllib3/urllib3 | 2,843 | urllib3__urllib3-2843 | [
"2830"
] | 5d93c9c6c01efc469200044027c21c6e661f2a99 | diff --git a/noxfile.py b/noxfile.py
--- a/noxfile.py
+++ b/noxfile.py
@@ -56,7 +56,6 @@ def tests_impl(
"-ra",
f"--color={'yes' if 'GITHUB_ACTIONS' in os.environ else 'auto'}",
"--tb=native",
- "--no-success-flaky-report",
"--durations=10",
"--strict-config",
"--strict-markers",
| diff --git a/test/with_dummyserver/test_chunked_transfer.py b/test/with_dummyserver/test_chunked_transfer.py
--- a/test/with_dummyserver/test_chunked_transfer.py
+++ b/test/with_dummyserver/test_chunked_transfer.py
@@ -13,9 +13,6 @@
from urllib3.util import SKIP_HEADER
from urllib3.util.retry import Retry
-# Retry failed tests
-pytestmark = pytest.mark.flaky
-
class TestChunkedTransfer(SocketDummyServerTestCase):
def start_chunked_handler(self) -> None:
diff --git a/test/with_dummyserver/test_connectionpool.py b/test/with_dummyserver/test_connectionpool.py
--- a/test/with_dummyserver/test_connectionpool.py
+++ b/test/with_dummyserver/test_connectionpool.py
@@ -35,8 +35,6 @@
from .. import INVALID_SOURCE_ADDRESSES, TARPIT_HOST, VALID_SOURCE_ADDRESSES
from ..port_helpers import find_unused_port
-pytestmark = pytest.mark.flaky
-
def wait_for_socket(ready_event: Event) -> None:
ready_event.wait()
diff --git a/test/with_dummyserver/test_https.py b/test/with_dummyserver/test_https.py
--- a/test/with_dummyserver/test_https.py
+++ b/test/with_dummyserver/test_https.py
@@ -47,10 +47,6 @@
from .. import has_alpn
-# Retry failed tests
-pytestmark = pytest.mark.flaky
-
-
TLSv1_CERTS = DEFAULT_CERTS.copy()
TLSv1_CERTS["ssl_version"] = getattr(ssl, "PROTOCOL_TLSv1", None)
diff --git a/test/with_dummyserver/test_no_ssl.py b/test/with_dummyserver/test_no_ssl.py
--- a/test/with_dummyserver/test_no_ssl.py
+++ b/test/with_dummyserver/test_no_ssl.py
@@ -5,16 +5,11 @@
"""
from __future__ import annotations
-import pytest
-
import urllib3
from dummyserver.testcase import HTTPDummyServerTestCase, HTTPSDummyServerTestCase
from ..test_no_ssl import TestWithoutSSL
-# Retry failed tests
-pytestmark = pytest.mark.flaky
-
class TestHTTPWithoutSSL(HTTPDummyServerTestCase, TestWithoutSSL):
def test_simple(self) -> None:
diff --git a/test/with_dummyserver/test_poolmanager.py b/test/with_dummyserver/test_poolmanager.py
--- a/test/with_dummyserver/test_poolmanager.py
+++ b/test/with_dummyserver/test_poolmanager.py
@@ -14,9 +14,6 @@
from urllib3.poolmanager import PoolManager
from urllib3.util.retry import Retry
-# Retry failed tests
-pytestmark = pytest.mark.flaky
-
class TestPoolManager(HTTPDummyServerTestCase):
@classmethod
diff --git a/test/with_dummyserver/test_proxy_poolmanager.py b/test/with_dummyserver/test_proxy_poolmanager.py
--- a/test/with_dummyserver/test_proxy_poolmanager.py
+++ b/test/with_dummyserver/test_proxy_poolmanager.py
@@ -37,9 +37,6 @@
from .. import TARPIT_HOST, requires_network
-# Retry failed tests
-pytestmark = pytest.mark.flaky
-
class TestHTTPProxyManager(HTTPDummyProxyTestCase):
@classmethod
diff --git a/test/with_dummyserver/test_socketlevel.py b/test/with_dummyserver/test_socketlevel.py
--- a/test/with_dummyserver/test_socketlevel.py
+++ b/test/with_dummyserver/test_socketlevel.py
@@ -60,9 +60,6 @@
else:
StrOrBytesPath = object
-# Retry failed tests
-pytestmark = pytest.mark.flaky
-
class TestCookies(SocketDummyServerTestCase):
def test_multi_setcookie(self) -> None:
| flaky and pytest-memray incompatible
### Subject
```
______________________________________________________________________________________________________ TestHTTPProxyManager.test_forwarding_proxy_request_timeout[https-https-True] ______________________________________________________________________________________________________
Traceback (most recent call last):
File "/home/graingert/projects/urllib3/.nox/test-3-11/lib/python3.11/site-packages/pytest_memray/plugin.py", line 122, in wrapper
result: object | None = func(*args, **kwargs)
^^^^^^^^^^^^^^^^^^^^^
File "/home/graingert/projects/urllib3/.nox/test-3-11/lib/python3.11/site-packages/pytest_memray/plugin.py", line 121, in wrapper
with Tracker(result_file):
File "src/memray/_memray.pyx", line 404, in memray._memray.Tracker.__enter__
RuntimeError: No more than one Tracker instance can be active at the same time
```
caused by a flaky test:
```
===Flaky Test Report===
test_forwarding_proxy_request_timeout[https-https-True] failed (1 runs remaining out of 2).
<class 'AssertionError'>
assert <class 'urllib3.exceptions.ProxyError'> == ReadTimeoutError
+ where <class 'urllib3.exceptions.ProxyError'> = type(ProxyError('Unable to connect to proxy', ReadTimeoutError("HTTPSConnectionPool(host='240.0.0.0', port=443): Read timed out. (read timeout=0.01)")))
+ where ProxyError('Unable to connect to proxy', ReadTimeoutError("HTTPSConnectionPool(host='240.0.0.0', port=443): Read timed out. (read timeout=0.01)")) = MaxRetryError('HTTPSConnectionPool(host=\'240.0.0.0\', port=443): Max retries exceeded with url: https://240.0.0.0 (Caused by ProxyError(\'Unable to connect to proxy\', ReadTimeoutError("HTTPSConnectionPool(host=\'240.0.0.0\', port=443): Read timed out. (read timeout=0.01)")))').reason
+ where MaxRetryError('HTTPSConnectionPool(host=\'240.0.0.0\', port=443): Max retries exceeded with url: https://240.0.0.0 (Caused by ProxyError(\'Unable to connect to proxy\', ReadTimeoutError("HTTPSConnectionPool(host=\'240.0.0.0\', port=443): Read timed out. (read timeout=0.01)")))') = <ExceptionInfo MaxRetryError('HTTPSConnectionPool(host=\'240.0.0.0\', port=443): Max retries exceeded with url: https://240.0.0.0 (Ca...proxy\', ReadTimeoutError("HTTPSConnectionPool(host=\'240.0.0.0\', port=443): Read timed out. (read timeout=0.01)")))') tblen=10>.value
[<TracebackEntry /home/graingert/projects/urllib3/test/with_dummyserver/test_proxy_poolmanager.py:484>]
test_forwarding_proxy_request_timeout[https-https-True] failed; it passed 0 out of the required 1 times.
<class 'RuntimeError'>
No more than one Tracker instance can be active at the same time
[<TracebackEntry /home/graingert/projects/urllib3/.nox/test-3-11/lib/python3.11/site-packages/pytest_memray/plugin.py:122>, <TracebackEntry /home/graingert/projects/urllib3/.nox/test-3-11/lib/python3.11/site-packages/pytest_memray/plugin.py:121>, <TracebackEntry src/memray/_memray.pyx:404>]
```
see also https://github.com/bloomberg/pytest-memray/issues/53
| 2022-11-30T18:19:11Z | [] | [] |
|
urllib3/urllib3 | 2,859 | urllib3__urllib3-2859 | [
"2757"
] | c056eb3df6aae4d1dff0365baded46235d413520 | diff --git a/src/urllib3/connection.py b/src/urllib3/connection.py
--- a/src/urllib3/connection.py
+++ b/src/urllib3/connection.py
@@ -5,6 +5,7 @@
import os
import re
import socket
+import sys
import typing
import warnings
from http.client import HTTPConnection as _HTTPConnection
@@ -76,6 +77,8 @@ class BaseSSLError(BaseException): # type: ignore[no-redef]
_CONTAINS_CONTROL_CHAR_RE = re.compile(r"[^-!#$%&'*+.^_`|~0-9a-zA-Z]")
+_HAS_SYS_AUDIT = hasattr(sys, "audit")
+
class HTTPConnection(_HTTPConnection):
"""
@@ -216,6 +219,10 @@ def _new_conn(self) -> socket.socket:
self, f"Failed to establish a new connection: {e}"
) from e
+ # Audit hooks are only available in Python 3.8+
+ if _HAS_SYS_AUDIT:
+ sys.audit("http.client.connect", self, self.host, self.port)
+
return sock
def set_tunnel(
| diff --git a/test/with_dummyserver/test_connection.py b/test/with_dummyserver/test_connection.py
--- a/test/with_dummyserver/test_connection.py
+++ b/test/with_dummyserver/test_connection.py
@@ -1,7 +1,9 @@
from __future__ import annotations
+import sys
import typing
from http.client import ResponseNotReady
+from unittest import mock
import pytest
@@ -33,6 +35,19 @@ def test_returns_urllib3_HTTPResponse(pool: HTTPConnectionPool) -> None:
assert isinstance(response, HTTPResponse)
[email protected](not hasattr(sys, "audit"), reason="requires python 3.8+")
[email protected]("urllib3.connection.sys.audit")
+def test_audit_event(audit_mock: mock.Mock, pool: HTTPConnectionPool) -> None:
+ conn = pool._get_conn()
+ conn.request("GET", "/")
+ audit_mock.assert_any_call("http.client.connect", conn, conn.host, conn.port)
+ # Ensure the event is raised only once.
+ connect_events = [
+ call for call in audit_mock.mock_calls if call.args[0] == "http.client.connect"
+ ]
+ assert len(connect_events) == 1
+
+
def test_does_not_release_conn(pool: HTTPConnectionPool) -> None:
conn = pool._get_conn()
| urllib3 does not send http.client.connect audit events
### Subject
Despite being implemented in terms of `http.client`, urllib3 doesn't cause `http.client.connect` audit events to be emitted
### Steps to Reproduce
```python
import http.client
import sys
import urllib3
def _hook(event: str, args: tuple):
if event == "http.client.connect":
print(event, args)
if event == 'socket.connect':
print(event, args)
sys.addaudithook(_hook)
print('using http.client')
http.client.HTTPConnection('www.python.org').connect()
print()
print('using urllib3')
http = urllib3.PoolManager()
resp = http.request("GET", "https://www.python.org")
```
```
using http.client
http.client.connect (<http.client.HTTPConnection object at 0x7f1303983fd0>, 'www.python.org', 80)
socket.connect (<socket.socket fd=4, family=AddressFamily.AF_INET, type=SocketKind.SOCK_STREAM, proto=6, laddr=('0.0.0.0', 0)>, ('199.232.36.223', 80))
using urllib3
socket.connect (<socket.socket fd=4, family=AddressFamily.AF_INET, type=SocketKind.SOCK_STREAM, proto=6, laddr=('0.0.0.0', 0)>, ('199.232.36.223', 443))
```
This is because the urllib3 `HTTPConnection` object overrides `connect`, which is where the audit events are sent from.
https://github.com/python/cpython/blob/e87ada48a9e5d9d03f9759138869216df0d7383a/Lib/http/client.py#L954
### Expected Behavior
`http.client.connect` audit events should be emitted
### Actual Behavior
They aren't
| If we don't use http.client but try to provide a similar API not a fully complete implementation, why must we do what only standard libraries do?
My expectation that urllib3 would send this audit event is this sentence in the API docs:
> Based on http.client.HTTPConnection
To me, this implies that it is a superset of the functionality present in http.client.HTTPConnection.
Regardless of implementation details, it would be useful for urllib3 to send `http.client.connect` audit events - or less preferably, a different audit event.
> My expectation that urllib3 would send this audit event is this sentence in the API docs:
>
> > Based on http.client.HTTPConnection
>
> To me, this implies that it is a superset of the functionality present in http.client.HTTPConnection.
>
> Regardless of implementation details, it would be useful for urllib3 to send `http.client.connect` audit events - or less preferably, a different audit event.
>
If we're not using HTTPConnection.connect, why should it emit? Some things may be the same or overlap, but that isn't guaranteed to happen. Further, this library shouldn't be manufacturing events another library presently generates as that library may change them without notice. We also don't want to rely on the standard library's connect method.
I'd also argue that http.client is the namespace that's owned by the cpython core developers, no one should be reusing it. If we emitted anything to the audit events (which arguably isn't intended for libraries outside the stdlib) it would be under a namespace we ostensibly can claim to own
> If we're not using HTTPConnection.connect
This is an implementation detail that doesn't excuse the inconsistency.
You are deriving behavior from `http.client.HTTPConnection` and then breaking the public contract it provides.
This is further evidenced by the fact that `urllib3` DOES cause `http.client.send` audits to be emitted.
```python
import http.client
import sys
import urllib3
def _hook(event: str, args: tuple):
if event == "http.client.connect":
print(event, args)
if event == "http.client.send":
print(event, args)
if event == 'socket.connect':
print(event, args)
sys.addaudithook(_hook)
print('using http.client')
con = http.client.HTTPConnection('www.python.org')
con.connect()
con.request('GET', '/')
print()
print('using urllib3')
http = urllib3.PoolManager()
resp = http.request("GET", "https://www.python.org")
```
```
using http.client
http.client.connect (<http.client.HTTPConnection object at 0x7fabc2dfffd0>, 'www.python.org', 80)
socket.connect (<socket.socket fd=4, family=AddressFamily.AF_INET, type=SocketKind.SOCK_STREAM, proto=6, laddr=('0.0.0.0', 0)>, ('199.232.36.223', 80))
http.client.send (<http.client.HTTPConnection object at 0x7fabc2dfffd0>, b'GET / HTTP/1.1\r\nHost: www.python.org\r\nAccept-Encoding: identity\r\n\r\n')
using urllib3
socket.connect (<socket.socket fd=5, family=AddressFamily.AF_INET, type=SocketKind.SOCK_STREAM, proto=6, laddr=('0.0.0.0', 0)>, ('199.232.36.223', 443))
http.client.send (<urllib3.connection.HTTPSConnection object at 0x7fabc24c2ce0>, b'GET / HTTP/1.1\r\nHost: www.python.org\r\nAccept-Encoding: identity\r\nUser-Agent: python-urllib3/1.26.9\r\n\r\n')
```
Is this behavior guaranteed? subject to breakage in the future if urllib3 decides to override the [`send` method](https://github.com/python/cpython/blob/e87ada48a9e5d9d03f9759138869216df0d7383a/Lib/http/client.py#L1010)?
> Further, this library shouldn't be manufacturing events another library presently generates as that library may change them without notice.
According to the [docs](https://docs.python.org/3/library/sys.html#sys.audit) "The number and types of arguments for a given event are considered a public and stable API and should not be modified between releases". `http.client.connect` is well defined and not subject to arbitrary change.
>which arguably isn't intended for libraries outside the stdlib
I can find no evidence of this anywhere in the documentation.
For some context, this is presenting real-world problems for an application in my day job.
We would like to have some stronger guarantees that after a certain point in the application lifecycle there are no connections being made to certain hosts within our organization. We have implemented this using `http.client.connect` audit hooks. Due to this bug, this only works some of the time - some requests are made using libraries implemented in urllib3, and some using `http.client`.
Thanks for bringing this to our attention, I hadn't thought of audithooks. We're in a bit of a tough spot as our use of http.client is historical and (if all goes to plan) something that we'd like to move away from in the future.
Here's what I'm thinking:
- Let's match the standard library audithooks in 1.26.x and have a test case that compares what is emitted using http.client directly to what we emit.
- Document that audithooks for http.client only fire for the HTTPConnection based on the stdlib http.client which in 2.0 and onwards isn't guaranteed to be the default.
- Investigate adding out oen audithook events that we can make better guarantees for?
How does that sound? Also @apmorton would you be interested in contributing work towards this if we decide to move forward?
That all sounds reasonable to me - I'd be happy to contribute work on this issue. | 2022-12-23T03:46:57Z | [] | [] |
urllib3/urllib3 | 2,864 | urllib3__urllib3-2864 | [
"2850"
] | 61e86ea7c4ccc7c2a688cf06d1b76821d4ac5c1f | diff --git a/src/urllib3/util/url.py b/src/urllib3/util/url.py
--- a/src/urllib3/util/url.py
+++ b/src/urllib3/util/url.py
@@ -63,7 +63,7 @@
BRACELESS_IPV6_ADDRZ_RE = re.compile("^" + IPV6_ADDRZ_PAT[2:-2] + "$")
ZONE_ID_RE = re.compile("(" + ZONE_ID_PAT + r")\]$")
-_HOST_PORT_PAT = ("^(%s|%s|%s)(?::0*([0-9]{0,5}))?$") % (
+_HOST_PORT_PAT = ("^(%s|%s|%s)(?::0*?(|0|[1-9][0-9]{0,4}))?$") % (
REG_NAME_PAT,
IPV4_PAT,
IPV6_ADDRZ_PAT,
| diff --git a/test/test_util.py b/test/test_util.py
--- a/test/test_util.py
+++ b/test/test_util.py
@@ -351,6 +351,13 @@ def test_parse_url_remove_leading_zeros(self):
url = parse_url("https://example.com:0000000000080")
assert url.port == 80
+ def test_parse_url_only_zeros(self):
+ url = parse_url("https://example.com:0")
+ assert url.port == 0
+
+ url = parse_url("https://example.com:000000000000")
+ assert url.port == 0
+
def test_Url_str(self):
U = Url("http", host="google.com")
assert str(U) == U.url
| parse_url with :0 returns port=None
it should return port=0 instead
| @venthur Would you mind backporting #2849 to 1.26.x too?
@pquentin I can take the backporting change.
Sure, please go ahead! | 2022-12-27T16:48:54Z | [] | [] |
urllib3/urllib3 | 2,992 | urllib3__urllib3-2992 | [
"2999"
] | e5a5dfcc4ce49b7857e2ebc6158549f01e7c5439 | diff --git a/src/urllib3/connection.py b/src/urllib3/connection.py
--- a/src/urllib3/connection.py
+++ b/src/urllib3/connection.py
@@ -784,36 +784,42 @@ def _ssl_wrap_socket_and_match_hostname(
tls_in_tls=tls_in_tls,
)
- if assert_fingerprint:
- _assert_fingerprint(ssl_sock.getpeercert(binary_form=True), assert_fingerprint)
- elif (
- context.verify_mode != ssl.CERT_NONE
- and not context.check_hostname
- and assert_hostname is not False
- ):
- cert: _TYPE_PEER_CERT_RET_DICT = ssl_sock.getpeercert() # type: ignore[assignment]
-
- # Need to signal to our match_hostname whether to use 'commonName' or not.
- # If we're using our own constructed SSLContext we explicitly set 'False'
- # because PyPy hard-codes 'True' from SSLContext.hostname_checks_common_name.
- if default_ssl_context:
- hostname_checks_common_name = False
- else:
- hostname_checks_common_name = (
- getattr(context, "hostname_checks_common_name", False) or False
+ try:
+ if assert_fingerprint:
+ _assert_fingerprint(
+ ssl_sock.getpeercert(binary_form=True), assert_fingerprint
+ )
+ elif (
+ context.verify_mode != ssl.CERT_NONE
+ and not context.check_hostname
+ and assert_hostname is not False
+ ):
+ cert: _TYPE_PEER_CERT_RET_DICT = ssl_sock.getpeercert() # type: ignore[assignment]
+
+ # Need to signal to our match_hostname whether to use 'commonName' or not.
+ # If we're using our own constructed SSLContext we explicitly set 'False'
+ # because PyPy hard-codes 'True' from SSLContext.hostname_checks_common_name.
+ if default_ssl_context:
+ hostname_checks_common_name = False
+ else:
+ hostname_checks_common_name = (
+ getattr(context, "hostname_checks_common_name", False) or False
+ )
+
+ _match_hostname(
+ cert,
+ assert_hostname or server_hostname, # type: ignore[arg-type]
+ hostname_checks_common_name,
)
- _match_hostname(
- cert,
- assert_hostname or server_hostname, # type: ignore[arg-type]
- hostname_checks_common_name,
+ return _WrappedAndVerifiedSocket(
+ socket=ssl_sock,
+ is_verified=context.verify_mode == ssl.CERT_REQUIRED
+ or bool(assert_fingerprint),
)
-
- return _WrappedAndVerifiedSocket(
- socket=ssl_sock,
- is_verified=context.verify_mode == ssl.CERT_REQUIRED
- or bool(assert_fingerprint),
- )
+ except BaseException:
+ ssl_sock.close()
+ raise
def _match_hostname(
| diff --git a/test/test_connection.py b/test/test_connection.py
--- a/test/test_connection.py
+++ b/test/test_connection.py
@@ -17,7 +17,8 @@
_url_from_connection,
_wrap_proxy_error,
)
-from urllib3.exceptions import HTTPError, ProxyError
+from urllib3.exceptions import HTTPError, ProxyError, SSLError
+from urllib3.util import ssl_
from urllib3.util.ssl_match_hostname import (
CertificateError as ImplementationCertificateError,
)
@@ -235,3 +236,32 @@ def test_getresponse_requires_reponseoptions(self) -> None:
# Should error if a request has not been sent
with pytest.raises(ResponseNotReady):
conn.getresponse()
+
+ def test_assert_fingerprint_closes_socket(self) -> None:
+ context = mock.create_autospec(ssl_.SSLContext)
+ context.wrap_socket.return_value.getpeercert.return_value = b"fake cert"
+ conn = HTTPSConnection(
+ "google.com",
+ port=443,
+ assert_fingerprint="AA:AA:AA:AA:AA:AAAA:AA:AAAA:AA:AA:AA:AA:AA:AA:AA:AA:AA:AA",
+ ssl_context=context,
+ )
+ with mock.patch.object(conn, "_new_conn"):
+ with pytest.raises(SSLError):
+ conn.connect()
+
+ context.wrap_socket.return_value.close.assert_called_once_with()
+
+ def test_assert_hostname_closes_socket(self) -> None:
+ context = mock.create_autospec(ssl_.SSLContext)
+ context.wrap_socket.return_value.getpeercert.return_value = {
+ "subjectAltName": (("DNS", "google.com"),)
+ }
+ conn = HTTPSConnection(
+ "google.com", port=443, assert_hostname="example.com", ssl_context=context
+ )
+ with mock.patch.object(conn, "_new_conn"):
+ with pytest.raises(ImplementationCertificateError):
+ conn.connect()
+
+ context.wrap_socket.return_value.close.assert_called_once_with()
| Potential regression in 2.0.0 (commit ea96fde9450b2d0d233f0414c7be0a75148fae8b)
Previously the test `test_ssl_cert_pinning_fails` would succeed in 1.26.15, but fails by hanging in 2.0.0:
Ref: https://github.com/elastic/apm-agent-python/blob/main/tests/transports/test_urllib3.py#L256
From bisecting, the offending commit is: https://github.com/urllib3/urllib3/commit/ea96fde9450b2d0d233f0414c7be0a75148fae8b
| 2023-04-25T09:33:32Z | [] | [] |
|
urllib3/urllib3 | 3,022 | urllib3__urllib3-3022 | [
"3008"
] | be5e03b940c301f057e45b22ce5a7022071a3361 | diff --git a/src/urllib3/response.py b/src/urllib3/response.py
--- a/src/urllib3/response.py
+++ b/src/urllib3/response.py
@@ -169,10 +169,15 @@ def __init__(self) -> None:
def decompress(self, data: bytes) -> bytes:
if not data:
return b""
- return self._obj.decompress(data) # type: ignore[no-any-return]
+ data_parts = [self._obj.decompress(data)]
+ while self._obj.eof and self._obj.unused_data:
+ unused_data = self._obj.unused_data
+ self._obj = zstd.ZstdDecompressor().decompressobj()
+ data_parts.append(self._obj.decompress(unused_data))
+ return b"".join(data_parts)
def flush(self) -> bytes:
- ret = self._obj.flush()
+ ret = self._obj.flush() # note: this is a no-op
if not self._obj.eof:
raise DecodeError("Zstandard data is incomplete")
return ret # type: ignore[no-any-return]
| diff --git a/test/test_response.py b/test/test_response.py
--- a/test/test_response.py
+++ b/test/test_response.py
@@ -332,6 +332,25 @@ def test_decode_zstd(self) -> None:
r = HTTPResponse(fp, headers={"content-encoding": "zstd"})
assert r.data == b"foo"
+ @onlyZstd()
+ def test_decode_multiframe_zstd(self) -> None:
+ data = (
+ # Zstandard frame
+ zstd.compress(b"foo")
+ # skippable frame (must be ignored)
+ + bytes.fromhex(
+ "50 2A 4D 18" # Magic_Number (little-endian)
+ "07 00 00 00" # Frame_Size (little-endian)
+ "00 00 00 00 00 00 00" # User_Data
+ )
+ # Zstandard frame
+ + zstd.compress(b"bar")
+ )
+
+ fp = BytesIO(data)
+ r = HTTPResponse(fp, headers={"content-encoding": "zstd"})
+ assert r.data == b"foobar"
+
@onlyZstd()
def test_chunked_decoding_zstd(self) -> None:
data = zstd.compress(b"foobarbaz")
| Partial responses when using chunked encoding + zstd compression
### Subject
I am testing using zstd compression with [bazel-remote](https://github.com/buchgr/bazel-remote/). As background, bazel-remote provides content-addressible storage under the /cas/ folder and storage of encoded build action data under the /ac/ folder. For the purposes of this bug report, I am only doing /cas/ downloads. So bazel-remote is used for basic PUT and GET of binary data.
Without providing the server headers to say that I support zstd compression, I don't receive chunked encoding and response.data contains the full binary data that I would expect. When I provide the server headers to say that I support zstd compression, the response from the server indicates chunked encoding and len(response.data) is always set to 1048576 bytes for files that are larger than that.
### Environment
OS Windows-10-10.0.22621-SP0
Python 3.10.11
urllib3 2.0.1
pip freeze results:
appdirs==1.4.4
certifi==2022.9.24
charset-normalizer==2.1.1
colorama==0.4.6
flake8==5.0.4
idna==3.4
mccabe==0.7.0
numpy==1.23.4
pycodestyle==2.9.1
pydiffx==1.1
pyflakes==2.5.0
python-gitlab==3.12.0
RBTools==4.0
requests==2.28.1
requests-toolbelt==0.10.1
six==1.16.0
texttable==1.6.7
tqdm==4.64.1
typing_extensions==4.4.0
urllib3==2.0.1
zstandard==0.21.0
### Steps to Reproduce
1. I deployed bazel-remote and did a PUT of a file larger than 1048576 bytes into /cas/<sha_256_hash_of_the_file_contents>
2. I ran the following code:
```
import urllib3
server_url = 'http://<server_name_redacted>:8000'
cas_url = '/cas/d1dcece4b328f7a12d86dd77cb0373496da2dd3971db2f3e3a29612f831a9738'
headers = urllib3.make_headers(accept_encoding=True)
response = urllib3.request('GET', server_url + cas_url, headers=headers)
print('Status: %d' % response.status)
print('Info: %s' % response.info())
print('Got %d back' % len(response.data))
```
### Expected Behavior
Script should print: Got 1308160 back.
### Actual Behavior
Script prints: Got 1048576 back
### Notes
If I set headers to None, the script outputs the following:
```
Status: 200
Info: HTTPHeaderDict({'Content-Length': '1308160', 'Content-Type': 'application/octet-stream', 'Date': 'Tue, 02 May 2023 12:47:40 GMT'})
Got 1308160 back
```
If I initialize headers to support compression, the script outputs the following:
```
Status: 200
Info: HTTPHeaderDict({'Content-Encoding': 'zstd', 'Content-Type': 'application/octet-stream', 'Date': 'Tue, 02 May 2023 12:48:24 GMT', 'Transfer-Encoding': 'chunked'})
Got 1048576 back
```
| @mamico @indygreg Any idea what is going on here? (@grossag will soon report more data that seems to show that with the way we use python-zstandard, we don't decompress everything.)
Some updates from my current investigation:
curl 8.0.1 works and gives me a valid output .dll:
```
C:\Utils\temp>c:\utils\libcurl\bin\curl -w "We downloaded %{size_download} bytes\n" -D - --compressed <server_address_redacted>/cas/d1dcece4b328f7a12d86dd77cb0373496da2dd3971db2f3e3a29612f831a9738 -o C:\utils\temp\a.dll
% Total % Received % Xferd Average Speed Time Time Time Current
Dload Upload Total Spent Left Speed
0 0 0 0 0 0 0 0 --:--:-- --:--:-- --:--:-- 0HTTP/1.1 200 OK
Content-Encoding: zstd
Content-Type: application/octet-stream
Date: Tue, 02 May 2023 13:36:04 GMT
Transfer-Encoding: chunked
100 303k 0 303k 0 0 555k 0 --:--:-- --:--:-- --:--:-- 559k
We downloaded 311215 bytes
```
Both urllib3 and curl are downloading 311215 bytes, which seems right. But if I add this to response.py:
```
print('Decoding %d bytes' % len(data))
data = self._decoder.decompress(data)
print('Decoded into %d bytes' % len(data))
```
I see:
Decoding 311215 bytes
Decoded into 1048576 bytes
So the source data is correctly 311215 bytes but zstd's decompress API is only giving a chunk. This data size of 1048576 seems to match some magic numbers in python-zstandard's cffi backend.
It does seem to be something related to the usage of python-zstandard or a bug inside of python-zstandard. Inside of urllib3's ZstdDecoder.decompress(), I added this code:
```
with open(r'C:\utils\temp\a.dll.zstd', 'wb') as f:
f.write(data)
```
and then I ran:
```
C:\Utils\temp>win64_vc140\bin\zstd.exe -d a.dll.zstd -o manual.dll
a.dll.zstd : 1308160 bytes
```
and ended up with a correct file manual.dll.
I also added the following code inside of decompress()
```
print('approach 1: decompressed into %d bytes' %
len(zstd.decompress(data, max_output_size=9999999999)))
print('approach 2: decompressed into %d bytes' %
len(zstd.ZstdDecompressor().decompress(data, max_output_size=9999999999)))
```
and got this output every time:
```
approach 1: decompressed into 1048576 bytes
approach 2: decompressed into 1048576 bytes
```
(which notably is less than the expected 1308160 which zstd.exe produces)
[text.txt.zip](https://github.com/urllib3/urllib3/files/11374676/text.txt.zip)
I have attached a zip of text.txt.zstd because GitHub doesn't support attaching .zstd files. This file text.txt.zstd contains the raw data passed to ZstdDecoder.decompress(). Extract that text.txt.zstd file out of the zip then run:
```
import zstandard as zstd
with open(r'C:\utils\temp\text.txt.zstd', 'rb') as f:
data = f.read()
print('approach 1: decompressed into %d bytes' %
len(zstd.decompress(data)))
print('approach 2: decompressed into %d bytes' %
len(zstd.ZstdDecompressor().decompress(data)))
print('approach 3: decompressed into %d bytes' %
len(zstd.ZstdDecompressor().decompressobj().decompress(data)))
```
This produces:
```
approach 1: decompressed into 1048576 bytes
approach 2: decompressed into 1048576 bytes
approach 3: decompressed into 1048576 bytes
```
Whereas `zstd.exe text.txt.zstd -d -o hi.txt` produces 8724469 bytes and a valid text file.
For context, we had a similar issue with all compressed HTTP responses being truncated when using the `.stream()` method: https://github.com/urllib3/urllib3/pull/3012 Perhaps this is a similar, something to be aware of.
Ref for downstream issue: https://github.com/indygreg/python-zstandard/issues/196
@grossag Could you try again with the newest v2.0.2 urllib3 release?
I tried with 2.0.2 and could reproduce (which makes sense as we have a python-zstandard reproducer).
This is still an issue.
> I tried with 2.0.2 and could reproduce (which makes sense as we have a python-zstandard reproducer).
>
> This is still an issue.
+1. Tried and confirmed that this is still an issue. | 2023-05-08T19:15:30Z | [] | [] |
urllib3/urllib3 | 3,137 | urllib3__urllib3-3137 | [
"3077"
] | 9c2c2307dd1d6af504e09aac0326d86ee3597a0b | diff --git a/noxfile.py b/noxfile.py
--- a/noxfile.py
+++ b/noxfile.py
@@ -55,7 +55,8 @@ def google_brotli(session):
# https://pypi.org/project/Brotli/ is the Google version of brotli, so
# install it separately and don't install our brotli extra (which installs
# brotlipy).
- session.install("brotli")
+ # https://github.com/google/brotli/issues/1074
+ session.install("brotli==1.0.9" if session.python == "2.7" else "brotli")
tests_impl(session, extras="socks,secure")
diff --git a/src/urllib3/connection.py b/src/urllib3/connection.py
--- a/src/urllib3/connection.py
+++ b/src/urllib3/connection.py
@@ -437,7 +437,7 @@ def connect(self):
and self.ssl_version is None
and hasattr(self.sock, "version")
and self.sock.version() in {"TLSv1", "TLSv1.1"}
- ):
+ ): # Defensive:
warnings.warn(
"Negotiating TLSv1/TLSv1.1 by default is deprecated "
"and will be disabled in urllib3 v2.0.0. Connecting to "
| diff --git a/ci/run_tests.sh b/ci/run_tests.sh
--- a/ci/run_tests.sh
+++ b/ci/run_tests.sh
@@ -3,7 +3,7 @@
if [[ "${NOX_SESSION}" == "app_engine" ]]; then
export GAE_SDK_PATH=$HOME/.cache/google_appengine
- python2 -m pip install gcp-devrel-py-tools==0.0.16
+ python2.7 -m pip install gcp-devrel-py-tools==0.0.16
gcp-devrel-py-tools download-appengine-sdk "$(dirname ${GAE_SDK_PATH})"
fi
| [1.26] GHA Python 2 support ended on June 17th
Warning: The support for python 2.7 will be removed on June 19. Related issue: https://github.com/actions/setup-python/issues/672
| 2023-10-02T12:55:07Z | [] | [] |
|
urllib3/urllib3 | 3,146 | urllib3__urllib3-3146 | [
"2681"
] | ae0668899a144edb06c4d2ca9cec96b760a93e6a | diff --git a/src/urllib3/connection.py b/src/urllib3/connection.py
--- a/src/urllib3/connection.py
+++ b/src/urllib3/connection.py
@@ -757,10 +757,9 @@ def _ssl_wrap_socket_and_match_hostname(
):
context.check_hostname = False
- # Try to load OS default certs if none are given.
- # We need to do the hasattr() check for our custom
- # pyOpenSSL and SecureTransport SSLContext objects
- # because neither support load_default_certs().
+ # Try to load OS default certs if none are given. We need to do the hasattr() check
+ # for custom pyOpenSSL SSLContext objects because they don't support
+ # load_default_certs().
if (
not ca_certs
and not ca_cert_dir
diff --git a/src/urllib3/contrib/_securetransport/__init__.py b/src/urllib3/contrib/_securetransport/__init__.py
deleted file mode 100644
diff --git a/src/urllib3/contrib/_securetransport/bindings.py b/src/urllib3/contrib/_securetransport/bindings.py
deleted file mode 100644
--- a/src/urllib3/contrib/_securetransport/bindings.py
+++ /dev/null
@@ -1,430 +0,0 @@
-# type: ignore
-
-"""
-This module uses ctypes to bind a whole bunch of functions and constants from
-SecureTransport. The goal here is to provide the low-level API to
-SecureTransport. These are essentially the C-level functions and constants, and
-they're pretty gross to work with.
-
-This code is a bastardised version of the code found in Will Bond's oscrypto
-library. An enormous debt is owed to him for blazing this trail for us. For
-that reason, this code should be considered to be covered both by urllib3's
-license and by oscrypto's:
-
- Copyright (c) 2015-2016 Will Bond <[email protected]>
-
- Permission is hereby granted, free of charge, to any person obtaining a
- copy of this software and associated documentation files (the "Software"),
- to deal in the Software without restriction, including without limitation
- the rights to use, copy, modify, merge, publish, distribute, sublicense,
- and/or sell copies of the Software, and to permit persons to whom the
- Software is furnished to do so, subject to the following conditions:
-
- The above copyright notice and this permission notice shall be included in
- all copies or substantial portions of the Software.
-
- THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
- IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
- FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
- AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
- LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
- FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
- DEALINGS IN THE SOFTWARE.
-"""
-
-from __future__ import annotations
-
-import platform
-from ctypes import (
- CDLL,
- CFUNCTYPE,
- POINTER,
- c_bool,
- c_byte,
- c_char_p,
- c_int32,
- c_long,
- c_size_t,
- c_uint32,
- c_ulong,
- c_void_p,
-)
-from ctypes.util import find_library
-
-if platform.system() != "Darwin":
- raise ImportError("Only macOS is supported")
-
-version = platform.mac_ver()[0]
-version_info = tuple(map(int, version.split(".")))
-if version_info < (10, 8):
- raise OSError(
- f"Only OS X 10.8 and newer are supported, not {version_info[0]}.{version_info[1]}"
- )
-
-
-def load_cdll(name: str, macos10_16_path: str) -> CDLL:
- """Loads a CDLL by name, falling back to known path on 10.16+"""
- try:
- # Big Sur is technically 11 but we use 10.16 due to the Big Sur
- # beta being labeled as 10.16.
- path: str | None
- if version_info >= (10, 16):
- path = macos10_16_path
- else:
- path = find_library(name)
- if not path:
- raise OSError # Caught and reraised as 'ImportError'
- return CDLL(path, use_errno=True)
- except OSError:
- raise ImportError(f"The library {name} failed to load") from None
-
-
-Security = load_cdll(
- "Security", "/System/Library/Frameworks/Security.framework/Security"
-)
-CoreFoundation = load_cdll(
- "CoreFoundation",
- "/System/Library/Frameworks/CoreFoundation.framework/CoreFoundation",
-)
-
-
-Boolean = c_bool
-CFIndex = c_long
-CFStringEncoding = c_uint32
-CFData = c_void_p
-CFString = c_void_p
-CFArray = c_void_p
-CFMutableArray = c_void_p
-CFDictionary = c_void_p
-CFError = c_void_p
-CFType = c_void_p
-CFTypeID = c_ulong
-
-CFTypeRef = POINTER(CFType)
-CFAllocatorRef = c_void_p
-
-OSStatus = c_int32
-
-CFDataRef = POINTER(CFData)
-CFStringRef = POINTER(CFString)
-CFArrayRef = POINTER(CFArray)
-CFMutableArrayRef = POINTER(CFMutableArray)
-CFDictionaryRef = POINTER(CFDictionary)
-CFArrayCallBacks = c_void_p
-CFDictionaryKeyCallBacks = c_void_p
-CFDictionaryValueCallBacks = c_void_p
-
-SecCertificateRef = POINTER(c_void_p)
-SecExternalFormat = c_uint32
-SecExternalItemType = c_uint32
-SecIdentityRef = POINTER(c_void_p)
-SecItemImportExportFlags = c_uint32
-SecItemImportExportKeyParameters = c_void_p
-SecKeychainRef = POINTER(c_void_p)
-SSLProtocol = c_uint32
-SSLCipherSuite = c_uint32
-SSLContextRef = POINTER(c_void_p)
-SecTrustRef = POINTER(c_void_p)
-SSLConnectionRef = c_uint32
-SecTrustResultType = c_uint32
-SecTrustOptionFlags = c_uint32
-SSLProtocolSide = c_uint32
-SSLConnectionType = c_uint32
-SSLSessionOption = c_uint32
-
-
-try:
- Security.SecItemImport.argtypes = [
- CFDataRef,
- CFStringRef,
- POINTER(SecExternalFormat),
- POINTER(SecExternalItemType),
- SecItemImportExportFlags,
- POINTER(SecItemImportExportKeyParameters),
- SecKeychainRef,
- POINTER(CFArrayRef),
- ]
- Security.SecItemImport.restype = OSStatus
-
- Security.SecCertificateGetTypeID.argtypes = []
- Security.SecCertificateGetTypeID.restype = CFTypeID
-
- Security.SecIdentityGetTypeID.argtypes = []
- Security.SecIdentityGetTypeID.restype = CFTypeID
-
- Security.SecKeyGetTypeID.argtypes = []
- Security.SecKeyGetTypeID.restype = CFTypeID
-
- Security.SecCertificateCreateWithData.argtypes = [CFAllocatorRef, CFDataRef]
- Security.SecCertificateCreateWithData.restype = SecCertificateRef
-
- Security.SecCertificateCopyData.argtypes = [SecCertificateRef]
- Security.SecCertificateCopyData.restype = CFDataRef
-
- Security.SecCopyErrorMessageString.argtypes = [OSStatus, c_void_p]
- Security.SecCopyErrorMessageString.restype = CFStringRef
-
- Security.SecIdentityCreateWithCertificate.argtypes = [
- CFTypeRef,
- SecCertificateRef,
- POINTER(SecIdentityRef),
- ]
- Security.SecIdentityCreateWithCertificate.restype = OSStatus
-
- Security.SecKeychainCreate.argtypes = [
- c_char_p,
- c_uint32,
- c_void_p,
- Boolean,
- c_void_p,
- POINTER(SecKeychainRef),
- ]
- Security.SecKeychainCreate.restype = OSStatus
-
- Security.SecKeychainDelete.argtypes = [SecKeychainRef]
- Security.SecKeychainDelete.restype = OSStatus
-
- Security.SecPKCS12Import.argtypes = [
- CFDataRef,
- CFDictionaryRef,
- POINTER(CFArrayRef),
- ]
- Security.SecPKCS12Import.restype = OSStatus
-
- SSLReadFunc = CFUNCTYPE(OSStatus, SSLConnectionRef, c_void_p, POINTER(c_size_t))
- SSLWriteFunc = CFUNCTYPE(
- OSStatus, SSLConnectionRef, POINTER(c_byte), POINTER(c_size_t)
- )
-
- Security.SSLSetIOFuncs.argtypes = [SSLContextRef, SSLReadFunc, SSLWriteFunc]
- Security.SSLSetIOFuncs.restype = OSStatus
-
- Security.SSLSetPeerID.argtypes = [SSLContextRef, c_char_p, c_size_t]
- Security.SSLSetPeerID.restype = OSStatus
-
- Security.SSLSetCertificate.argtypes = [SSLContextRef, CFArrayRef]
- Security.SSLSetCertificate.restype = OSStatus
-
- Security.SSLSetCertificateAuthorities.argtypes = [SSLContextRef, CFTypeRef, Boolean]
- Security.SSLSetCertificateAuthorities.restype = OSStatus
-
- Security.SSLSetConnection.argtypes = [SSLContextRef, SSLConnectionRef]
- Security.SSLSetConnection.restype = OSStatus
-
- Security.SSLSetPeerDomainName.argtypes = [SSLContextRef, c_char_p, c_size_t]
- Security.SSLSetPeerDomainName.restype = OSStatus
-
- Security.SSLHandshake.argtypes = [SSLContextRef]
- Security.SSLHandshake.restype = OSStatus
-
- Security.SSLRead.argtypes = [SSLContextRef, c_char_p, c_size_t, POINTER(c_size_t)]
- Security.SSLRead.restype = OSStatus
-
- Security.SSLWrite.argtypes = [SSLContextRef, c_char_p, c_size_t, POINTER(c_size_t)]
- Security.SSLWrite.restype = OSStatus
-
- Security.SSLClose.argtypes = [SSLContextRef]
- Security.SSLClose.restype = OSStatus
-
- Security.SSLGetNumberSupportedCiphers.argtypes = [SSLContextRef, POINTER(c_size_t)]
- Security.SSLGetNumberSupportedCiphers.restype = OSStatus
-
- Security.SSLGetSupportedCiphers.argtypes = [
- SSLContextRef,
- POINTER(SSLCipherSuite),
- POINTER(c_size_t),
- ]
- Security.SSLGetSupportedCiphers.restype = OSStatus
-
- Security.SSLSetEnabledCiphers.argtypes = [
- SSLContextRef,
- POINTER(SSLCipherSuite),
- c_size_t,
- ]
- Security.SSLSetEnabledCiphers.restype = OSStatus
-
- Security.SSLGetNumberEnabledCiphers.argtype = [SSLContextRef, POINTER(c_size_t)]
- Security.SSLGetNumberEnabledCiphers.restype = OSStatus
-
- Security.SSLGetEnabledCiphers.argtypes = [
- SSLContextRef,
- POINTER(SSLCipherSuite),
- POINTER(c_size_t),
- ]
- Security.SSLGetEnabledCiphers.restype = OSStatus
-
- Security.SSLGetNegotiatedCipher.argtypes = [SSLContextRef, POINTER(SSLCipherSuite)]
- Security.SSLGetNegotiatedCipher.restype = OSStatus
-
- Security.SSLGetNegotiatedProtocolVersion.argtypes = [
- SSLContextRef,
- POINTER(SSLProtocol),
- ]
- Security.SSLGetNegotiatedProtocolVersion.restype = OSStatus
-
- Security.SSLCopyPeerTrust.argtypes = [SSLContextRef, POINTER(SecTrustRef)]
- Security.SSLCopyPeerTrust.restype = OSStatus
-
- Security.SecTrustSetAnchorCertificates.argtypes = [SecTrustRef, CFArrayRef]
- Security.SecTrustSetAnchorCertificates.restype = OSStatus
-
- Security.SecTrustSetAnchorCertificatesOnly.argstypes = [SecTrustRef, Boolean]
- Security.SecTrustSetAnchorCertificatesOnly.restype = OSStatus
-
- Security.SecTrustEvaluate.argtypes = [SecTrustRef, POINTER(SecTrustResultType)]
- Security.SecTrustEvaluate.restype = OSStatus
-
- Security.SecTrustGetCertificateCount.argtypes = [SecTrustRef]
- Security.SecTrustGetCertificateCount.restype = CFIndex
-
- Security.SecTrustGetCertificateAtIndex.argtypes = [SecTrustRef, CFIndex]
- Security.SecTrustGetCertificateAtIndex.restype = SecCertificateRef
-
- Security.SSLCreateContext.argtypes = [
- CFAllocatorRef,
- SSLProtocolSide,
- SSLConnectionType,
- ]
- Security.SSLCreateContext.restype = SSLContextRef
-
- Security.SSLSetSessionOption.argtypes = [SSLContextRef, SSLSessionOption, Boolean]
- Security.SSLSetSessionOption.restype = OSStatus
-
- Security.SSLSetProtocolVersionMin.argtypes = [SSLContextRef, SSLProtocol]
- Security.SSLSetProtocolVersionMin.restype = OSStatus
-
- Security.SSLSetProtocolVersionMax.argtypes = [SSLContextRef, SSLProtocol]
- Security.SSLSetProtocolVersionMax.restype = OSStatus
-
- try:
- Security.SSLSetALPNProtocols.argtypes = [SSLContextRef, CFArrayRef]
- Security.SSLSetALPNProtocols.restype = OSStatus
- except AttributeError:
- # Supported only in 10.12+
- pass
-
- Security.SecCopyErrorMessageString.argtypes = [OSStatus, c_void_p]
- Security.SecCopyErrorMessageString.restype = CFStringRef
-
- Security.SSLReadFunc = SSLReadFunc
- Security.SSLWriteFunc = SSLWriteFunc
- Security.SSLContextRef = SSLContextRef
- Security.SSLProtocol = SSLProtocol
- Security.SSLCipherSuite = SSLCipherSuite
- Security.SecIdentityRef = SecIdentityRef
- Security.SecKeychainRef = SecKeychainRef
- Security.SecTrustRef = SecTrustRef
- Security.SecTrustResultType = SecTrustResultType
- Security.SecExternalFormat = SecExternalFormat
- Security.OSStatus = OSStatus
-
- Security.kSecImportExportPassphrase = CFStringRef.in_dll(
- Security, "kSecImportExportPassphrase"
- )
- Security.kSecImportItemIdentity = CFStringRef.in_dll(
- Security, "kSecImportItemIdentity"
- )
-
- # CoreFoundation time!
- CoreFoundation.CFRetain.argtypes = [CFTypeRef]
- CoreFoundation.CFRetain.restype = CFTypeRef
-
- CoreFoundation.CFRelease.argtypes = [CFTypeRef]
- CoreFoundation.CFRelease.restype = None
-
- CoreFoundation.CFGetTypeID.argtypes = [CFTypeRef]
- CoreFoundation.CFGetTypeID.restype = CFTypeID
-
- CoreFoundation.CFStringCreateWithCString.argtypes = [
- CFAllocatorRef,
- c_char_p,
- CFStringEncoding,
- ]
- CoreFoundation.CFStringCreateWithCString.restype = CFStringRef
-
- CoreFoundation.CFStringGetCStringPtr.argtypes = [CFStringRef, CFStringEncoding]
- CoreFoundation.CFStringGetCStringPtr.restype = c_char_p
-
- CoreFoundation.CFStringGetCString.argtypes = [
- CFStringRef,
- c_char_p,
- CFIndex,
- CFStringEncoding,
- ]
- CoreFoundation.CFStringGetCString.restype = c_bool
-
- CoreFoundation.CFDataCreate.argtypes = [CFAllocatorRef, c_char_p, CFIndex]
- CoreFoundation.CFDataCreate.restype = CFDataRef
-
- CoreFoundation.CFDataGetLength.argtypes = [CFDataRef]
- CoreFoundation.CFDataGetLength.restype = CFIndex
-
- CoreFoundation.CFDataGetBytePtr.argtypes = [CFDataRef]
- CoreFoundation.CFDataGetBytePtr.restype = c_void_p
-
- CoreFoundation.CFDictionaryCreate.argtypes = [
- CFAllocatorRef,
- POINTER(CFTypeRef),
- POINTER(CFTypeRef),
- CFIndex,
- CFDictionaryKeyCallBacks,
- CFDictionaryValueCallBacks,
- ]
- CoreFoundation.CFDictionaryCreate.restype = CFDictionaryRef
-
- CoreFoundation.CFDictionaryGetValue.argtypes = [CFDictionaryRef, CFTypeRef]
- CoreFoundation.CFDictionaryGetValue.restype = CFTypeRef
-
- CoreFoundation.CFArrayCreate.argtypes = [
- CFAllocatorRef,
- POINTER(CFTypeRef),
- CFIndex,
- CFArrayCallBacks,
- ]
- CoreFoundation.CFArrayCreate.restype = CFArrayRef
-
- CoreFoundation.CFArrayCreateMutable.argtypes = [
- CFAllocatorRef,
- CFIndex,
- CFArrayCallBacks,
- ]
- CoreFoundation.CFArrayCreateMutable.restype = CFMutableArrayRef
-
- CoreFoundation.CFArrayAppendValue.argtypes = [CFMutableArrayRef, c_void_p]
- CoreFoundation.CFArrayAppendValue.restype = None
-
- CoreFoundation.CFArrayGetCount.argtypes = [CFArrayRef]
- CoreFoundation.CFArrayGetCount.restype = CFIndex
-
- CoreFoundation.CFArrayGetValueAtIndex.argtypes = [CFArrayRef, CFIndex]
- CoreFoundation.CFArrayGetValueAtIndex.restype = c_void_p
-
- CoreFoundation.kCFAllocatorDefault = CFAllocatorRef.in_dll(
- CoreFoundation, "kCFAllocatorDefault"
- )
- CoreFoundation.kCFTypeArrayCallBacks = c_void_p.in_dll(
- CoreFoundation, "kCFTypeArrayCallBacks"
- )
- CoreFoundation.kCFTypeDictionaryKeyCallBacks = c_void_p.in_dll(
- CoreFoundation, "kCFTypeDictionaryKeyCallBacks"
- )
- CoreFoundation.kCFTypeDictionaryValueCallBacks = c_void_p.in_dll(
- CoreFoundation, "kCFTypeDictionaryValueCallBacks"
- )
-
- CoreFoundation.CFTypeRef = CFTypeRef
- CoreFoundation.CFArrayRef = CFArrayRef
- CoreFoundation.CFStringRef = CFStringRef
- CoreFoundation.CFDictionaryRef = CFDictionaryRef
-
-except AttributeError:
- raise ImportError("Error initializing ctypes") from None
-
-
-class CFConst:
- """
- A class object that acts as essentially a namespace for CoreFoundation
- constants.
- """
-
- kCFStringEncodingUTF8 = CFStringEncoding(0x08000100)
diff --git a/src/urllib3/contrib/_securetransport/low_level.py b/src/urllib3/contrib/_securetransport/low_level.py
deleted file mode 100644
--- a/src/urllib3/contrib/_securetransport/low_level.py
+++ /dev/null
@@ -1,474 +0,0 @@
-"""
-Low-level helpers for the SecureTransport bindings.
-
-These are Python functions that are not directly related to the high-level APIs
-but are necessary to get them to work. They include a whole bunch of low-level
-CoreFoundation messing about and memory management. The concerns in this module
-are almost entirely about trying to avoid memory leaks and providing
-appropriate and useful assistance to the higher-level code.
-"""
-from __future__ import annotations
-
-import base64
-import ctypes
-import itertools
-import os
-import re
-import ssl
-import struct
-import tempfile
-import typing
-
-from .bindings import ( # type: ignore[attr-defined]
- CFArray,
- CFConst,
- CFData,
- CFDictionary,
- CFMutableArray,
- CFString,
- CFTypeRef,
- CoreFoundation,
- SecKeychainRef,
- Security,
-)
-
-# This regular expression is used to grab PEM data out of a PEM bundle.
-_PEM_CERTS_RE = re.compile(
- b"-----BEGIN CERTIFICATE-----\n(.*?)\n-----END CERTIFICATE-----", re.DOTALL
-)
-
-
-def _cf_data_from_bytes(bytestring: bytes) -> CFData:
- """
- Given a bytestring, create a CFData object from it. This CFData object must
- be CFReleased by the caller.
- """
- return CoreFoundation.CFDataCreate(
- CoreFoundation.kCFAllocatorDefault, bytestring, len(bytestring)
- )
-
-
-def _cf_dictionary_from_tuples(
- tuples: list[tuple[typing.Any, typing.Any]]
-) -> CFDictionary:
- """
- Given a list of Python tuples, create an associated CFDictionary.
- """
- dictionary_size = len(tuples)
-
- # We need to get the dictionary keys and values out in the same order.
- keys = (t[0] for t in tuples)
- values = (t[1] for t in tuples)
- cf_keys = (CoreFoundation.CFTypeRef * dictionary_size)(*keys)
- cf_values = (CoreFoundation.CFTypeRef * dictionary_size)(*values)
-
- return CoreFoundation.CFDictionaryCreate(
- CoreFoundation.kCFAllocatorDefault,
- cf_keys,
- cf_values,
- dictionary_size,
- CoreFoundation.kCFTypeDictionaryKeyCallBacks,
- CoreFoundation.kCFTypeDictionaryValueCallBacks,
- )
-
-
-def _cfstr(py_bstr: bytes) -> CFString:
- """
- Given a Python binary data, create a CFString.
- The string must be CFReleased by the caller.
- """
- c_str = ctypes.c_char_p(py_bstr)
- cf_str = CoreFoundation.CFStringCreateWithCString(
- CoreFoundation.kCFAllocatorDefault,
- c_str,
- CFConst.kCFStringEncodingUTF8,
- )
- return cf_str
-
-
-def _create_cfstring_array(lst: list[bytes]) -> CFMutableArray:
- """
- Given a list of Python binary data, create an associated CFMutableArray.
- The array must be CFReleased by the caller.
-
- Raises an ssl.SSLError on failure.
- """
- cf_arr = None
- try:
- cf_arr = CoreFoundation.CFArrayCreateMutable(
- CoreFoundation.kCFAllocatorDefault,
- 0,
- ctypes.byref(CoreFoundation.kCFTypeArrayCallBacks),
- )
- if not cf_arr:
- raise MemoryError("Unable to allocate memory!")
- for item in lst:
- cf_str = _cfstr(item)
- if not cf_str:
- raise MemoryError("Unable to allocate memory!")
- try:
- CoreFoundation.CFArrayAppendValue(cf_arr, cf_str)
- finally:
- CoreFoundation.CFRelease(cf_str)
- except BaseException as e:
- if cf_arr:
- CoreFoundation.CFRelease(cf_arr)
- raise ssl.SSLError(f"Unable to allocate array: {e}") from None
- return cf_arr
-
-
-def _cf_string_to_unicode(value: CFString) -> str | None:
- """
- Creates a Unicode string from a CFString object. Used entirely for error
- reporting.
-
- Yes, it annoys me quite a lot that this function is this complex.
- """
- value_as_void_p = ctypes.cast(value, ctypes.POINTER(ctypes.c_void_p))
-
- string = CoreFoundation.CFStringGetCStringPtr(
- value_as_void_p, CFConst.kCFStringEncodingUTF8
- )
- if string is None:
- buffer = ctypes.create_string_buffer(1024)
- result = CoreFoundation.CFStringGetCString(
- value_as_void_p, buffer, 1024, CFConst.kCFStringEncodingUTF8
- )
- if not result:
- raise OSError("Error copying C string from CFStringRef")
- string = buffer.value
- if string is not None:
- string = string.decode("utf-8")
- return string # type: ignore[no-any-return]
-
-
-def _assert_no_error(
- error: int, exception_class: type[BaseException] | None = None
-) -> None:
- """
- Checks the return code and throws an exception if there is an error to
- report
- """
- if error == 0:
- return
-
- cf_error_string = Security.SecCopyErrorMessageString(error, None)
- output = _cf_string_to_unicode(cf_error_string)
- CoreFoundation.CFRelease(cf_error_string)
-
- if output is None or output == "":
- output = f"OSStatus {error}"
-
- if exception_class is None:
- exception_class = ssl.SSLError
-
- raise exception_class(output)
-
-
-def _cert_array_from_pem(pem_bundle: bytes) -> CFArray:
- """
- Given a bundle of certs in PEM format, turns them into a CFArray of certs
- that can be used to validate a cert chain.
- """
- # Normalize the PEM bundle's line endings.
- pem_bundle = pem_bundle.replace(b"\r\n", b"\n")
-
- der_certs = [
- base64.b64decode(match.group(1)) for match in _PEM_CERTS_RE.finditer(pem_bundle)
- ]
- if not der_certs:
- raise ssl.SSLError("No root certificates specified")
-
- cert_array = CoreFoundation.CFArrayCreateMutable(
- CoreFoundation.kCFAllocatorDefault,
- 0,
- ctypes.byref(CoreFoundation.kCFTypeArrayCallBacks),
- )
- if not cert_array:
- raise ssl.SSLError("Unable to allocate memory!")
-
- try:
- for der_bytes in der_certs:
- certdata = _cf_data_from_bytes(der_bytes)
- if not certdata:
- raise ssl.SSLError("Unable to allocate memory!")
- cert = Security.SecCertificateCreateWithData(
- CoreFoundation.kCFAllocatorDefault, certdata
- )
- CoreFoundation.CFRelease(certdata)
- if not cert:
- raise ssl.SSLError("Unable to build cert object!")
-
- CoreFoundation.CFArrayAppendValue(cert_array, cert)
- CoreFoundation.CFRelease(cert)
- except Exception:
- # We need to free the array before the exception bubbles further.
- # We only want to do that if an error occurs: otherwise, the caller
- # should free.
- CoreFoundation.CFRelease(cert_array)
- raise
-
- return cert_array
-
-
-def _is_cert(item: CFTypeRef) -> bool:
- """
- Returns True if a given CFTypeRef is a certificate.
- """
- expected = Security.SecCertificateGetTypeID()
- return CoreFoundation.CFGetTypeID(item) == expected # type: ignore[no-any-return]
-
-
-def _is_identity(item: CFTypeRef) -> bool:
- """
- Returns True if a given CFTypeRef is an identity.
- """
- expected = Security.SecIdentityGetTypeID()
- return CoreFoundation.CFGetTypeID(item) == expected # type: ignore[no-any-return]
-
-
-def _temporary_keychain() -> tuple[SecKeychainRef, str]:
- """
- This function creates a temporary Mac keychain that we can use to work with
- credentials. This keychain uses a one-time password and a temporary file to
- store the data. We expect to have one keychain per socket. The returned
- SecKeychainRef must be freed by the caller, including calling
- SecKeychainDelete.
-
- Returns a tuple of the SecKeychainRef and the path to the temporary
- directory that contains it.
- """
- # Unfortunately, SecKeychainCreate requires a path to a keychain. This
- # means we cannot use mkstemp to use a generic temporary file. Instead,
- # we're going to create a temporary directory and a filename to use there.
- # This filename will be 8 random bytes expanded into base64. We also need
- # some random bytes to password-protect the keychain we're creating, so we
- # ask for 40 random bytes.
- random_bytes = os.urandom(40)
- filename = base64.b16encode(random_bytes[:8]).decode("utf-8")
- password = base64.b16encode(random_bytes[8:]) # Must be valid UTF-8
- tempdirectory = tempfile.mkdtemp()
-
- keychain_path = os.path.join(tempdirectory, filename).encode("utf-8")
-
- # We now want to create the keychain itself.
- keychain = Security.SecKeychainRef()
- status = Security.SecKeychainCreate(
- keychain_path, len(password), password, False, None, ctypes.byref(keychain)
- )
- _assert_no_error(status)
-
- # Having created the keychain, we want to pass it off to the caller.
- return keychain, tempdirectory
-
-
-def _load_items_from_file(
- keychain: SecKeychainRef, path: str
-) -> tuple[list[CFTypeRef], list[CFTypeRef]]:
- """
- Given a single file, loads all the trust objects from it into arrays and
- the keychain.
- Returns a tuple of lists: the first list is a list of identities, the
- second a list of certs.
- """
- certificates = []
- identities = []
- result_array = None
-
- with open(path, "rb") as f:
- raw_filedata = f.read()
-
- try:
- filedata = CoreFoundation.CFDataCreate(
- CoreFoundation.kCFAllocatorDefault, raw_filedata, len(raw_filedata)
- )
- result_array = CoreFoundation.CFArrayRef()
- result = Security.SecItemImport(
- filedata, # cert data
- None, # Filename, leaving it out for now
- None, # What the type of the file is, we don't care
- None, # what's in the file, we don't care
- 0, # import flags
- None, # key params, can include passphrase in the future
- keychain, # The keychain to insert into
- ctypes.byref(result_array), # Results
- )
- _assert_no_error(result)
-
- # A CFArray is not very useful to us as an intermediary
- # representation, so we are going to extract the objects we want
- # and then free the array. We don't need to keep hold of keys: the
- # keychain already has them!
- result_count = CoreFoundation.CFArrayGetCount(result_array)
- for index in range(result_count):
- item = CoreFoundation.CFArrayGetValueAtIndex(result_array, index)
- item = ctypes.cast(item, CoreFoundation.CFTypeRef)
-
- if _is_cert(item):
- CoreFoundation.CFRetain(item)
- certificates.append(item)
- elif _is_identity(item):
- CoreFoundation.CFRetain(item)
- identities.append(item)
- finally:
- if result_array:
- CoreFoundation.CFRelease(result_array)
-
- CoreFoundation.CFRelease(filedata)
-
- return (identities, certificates)
-
-
-def _load_client_cert_chain(keychain: SecKeychainRef, *paths: str | None) -> CFArray:
- """
- Load certificates and maybe keys from a number of files. Has the end goal
- of returning a CFArray containing one SecIdentityRef, and then zero or more
- SecCertificateRef objects, suitable for use as a client certificate trust
- chain.
- """
- # Ok, the strategy.
- #
- # This relies on knowing that macOS will not give you a SecIdentityRef
- # unless you have imported a key into a keychain. This is a somewhat
- # artificial limitation of macOS (for example, it doesn't necessarily
- # affect iOS), but there is nothing inside Security.framework that lets you
- # get a SecIdentityRef without having a key in a keychain.
- #
- # So the policy here is we take all the files and iterate them in order.
- # Each one will use SecItemImport to have one or more objects loaded from
- # it. We will also point at a keychain that macOS can use to work with the
- # private key.
- #
- # Once we have all the objects, we'll check what we actually have. If we
- # already have a SecIdentityRef in hand, fab: we'll use that. Otherwise,
- # we'll take the first certificate (which we assume to be our leaf) and
- # ask the keychain to give us a SecIdentityRef with that cert's associated
- # key.
- #
- # We'll then return a CFArray containing the trust chain: one
- # SecIdentityRef and then zero-or-more SecCertificateRef objects. The
- # responsibility for freeing this CFArray will be with the caller. This
- # CFArray must remain alive for the entire connection, so in practice it
- # will be stored with a single SSLSocket, along with the reference to the
- # keychain.
- certificates = []
- identities = []
-
- # Filter out bad paths.
- filtered_paths = (path for path in paths if path)
-
- try:
- for file_path in filtered_paths:
- new_identities, new_certs = _load_items_from_file(keychain, file_path)
- identities.extend(new_identities)
- certificates.extend(new_certs)
-
- # Ok, we have everything. The question is: do we have an identity? If
- # not, we want to grab one from the first cert we have.
- if not identities:
- new_identity = Security.SecIdentityRef()
- status = Security.SecIdentityCreateWithCertificate(
- keychain, certificates[0], ctypes.byref(new_identity)
- )
- _assert_no_error(status)
- identities.append(new_identity)
-
- # We now want to release the original certificate, as we no longer
- # need it.
- CoreFoundation.CFRelease(certificates.pop(0))
-
- # We now need to build a new CFArray that holds the trust chain.
- trust_chain = CoreFoundation.CFArrayCreateMutable(
- CoreFoundation.kCFAllocatorDefault,
- 0,
- ctypes.byref(CoreFoundation.kCFTypeArrayCallBacks),
- )
- for item in itertools.chain(identities, certificates):
- # ArrayAppendValue does a CFRetain on the item. That's fine,
- # because the finally block will release our other refs to them.
- CoreFoundation.CFArrayAppendValue(trust_chain, item)
-
- return trust_chain
- finally:
- for obj in itertools.chain(identities, certificates):
- CoreFoundation.CFRelease(obj)
-
-
-TLS_PROTOCOL_VERSIONS = {
- "SSLv2": (0, 2),
- "SSLv3": (3, 0),
- "TLSv1": (3, 1),
- "TLSv1.1": (3, 2),
- "TLSv1.2": (3, 3),
-}
-
-
-def _build_tls_unknown_ca_alert(version: str) -> bytes:
- """
- Builds a TLS alert record for an unknown CA.
- """
- ver_maj, ver_min = TLS_PROTOCOL_VERSIONS[version]
- severity_fatal = 0x02
- description_unknown_ca = 0x30
- msg = struct.pack(">BB", severity_fatal, description_unknown_ca)
- msg_len = len(msg)
- record_type_alert = 0x15
- record = struct.pack(">BBBH", record_type_alert, ver_maj, ver_min, msg_len) + msg
- return record
-
-
-class SecurityConst:
- """
- A class object that acts as essentially a namespace for Security constants.
- """
-
- kSSLSessionOptionBreakOnServerAuth = 0
-
- kSSLProtocol2 = 1
- kSSLProtocol3 = 2
- kTLSProtocol1 = 4
- kTLSProtocol11 = 7
- kTLSProtocol12 = 8
- # SecureTransport does not support TLS 1.3 even if there's a constant for it
- kTLSProtocol13 = 10
- kTLSProtocolMaxSupported = 999
-
- kSSLClientSide = 1
- kSSLStreamType = 0
-
- kSecFormatPEMSequence = 10
-
- kSecTrustResultInvalid = 0
- kSecTrustResultProceed = 1
- # This gap is present on purpose: this was kSecTrustResultConfirm, which
- # is deprecated.
- kSecTrustResultDeny = 3
- kSecTrustResultUnspecified = 4
- kSecTrustResultRecoverableTrustFailure = 5
- kSecTrustResultFatalTrustFailure = 6
- kSecTrustResultOtherError = 7
-
- errSSLProtocol = -9800
- errSSLWouldBlock = -9803
- errSSLClosedGraceful = -9805
- errSSLClosedNoNotify = -9816
- errSSLClosedAbort = -9806
-
- errSSLXCertChainInvalid = -9807
- errSSLCrypto = -9809
- errSSLInternal = -9810
- errSSLCertExpired = -9814
- errSSLCertNotYetValid = -9815
- errSSLUnknownRootCert = -9812
- errSSLNoRootCert = -9813
- errSSLHostNameMismatch = -9843
- errSSLPeerHandshakeFail = -9824
- errSSLPeerUserCancelled = -9839
- errSSLWeakPeerEphemeralDHKey = -9850
- errSSLServerAuthCompleted = -9841
- errSSLRecordOverflow = -9847
-
- errSecVerifyFailed = -67808
- errSecNoTrustSettings = -25263
- errSecItemNotFound = -25300
- errSecInvalidTrustSettings = -25262
diff --git a/src/urllib3/contrib/securetransport.py b/src/urllib3/contrib/securetransport.py
deleted file mode 100644
--- a/src/urllib3/contrib/securetransport.py
+++ /dev/null
@@ -1,913 +0,0 @@
-"""
-SecureTranport support for urllib3 via ctypes.
-
-This makes platform-native TLS available to urllib3 users on macOS without the
-use of a compiler. This is an important feature because the Python Package
-Index is moving to become a TLSv1.2-or-higher server, and the default OpenSSL
-that ships with macOS is not capable of doing TLSv1.2. The only way to resolve
-this is to give macOS users an alternative solution to the problem, and that
-solution is to use SecureTransport.
-
-We use ctypes here because this solution must not require a compiler. That's
-because pip is not allowed to require a compiler either.
-
-This is not intended to be a seriously long-term solution to this problem.
-The hope is that PEP 543 will eventually solve this issue for us, at which
-point we can retire this contrib module. But in the short term, we need to
-solve the impending tire fire that is Python on Mac without this kind of
-contrib module. So...here we are.
-
-To use this module, simply import and inject it::
-
- import urllib3.contrib.securetransport
- urllib3.contrib.securetransport.inject_into_urllib3()
-
-Happy TLSing!
-
-This code is a bastardised version of the code found in Will Bond's oscrypto
-library. An enormous debt is owed to him for blazing this trail for us. For
-that reason, this code should be considered to be covered both by urllib3's
-license and by oscrypto's:
-
-.. code-block::
-
- Copyright (c) 2015-2016 Will Bond <[email protected]>
-
- Permission is hereby granted, free of charge, to any person obtaining a
- copy of this software and associated documentation files (the "Software"),
- to deal in the Software without restriction, including without limitation
- the rights to use, copy, modify, merge, publish, distribute, sublicense,
- and/or sell copies of the Software, and to permit persons to whom the
- Software is furnished to do so, subject to the following conditions:
-
- The above copyright notice and this permission notice shall be included in
- all copies or substantial portions of the Software.
-
- THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
- IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
- FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
- AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
- LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
- FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
- DEALINGS IN THE SOFTWARE.
-"""
-
-from __future__ import annotations
-
-import contextlib
-import ctypes
-import errno
-import os.path
-import shutil
-import socket
-import ssl
-import struct
-import threading
-import typing
-import warnings
-import weakref
-from socket import socket as socket_cls
-
-from .. import util
-from ._securetransport.bindings import ( # type: ignore[attr-defined]
- CoreFoundation,
- Security,
-)
-from ._securetransport.low_level import (
- SecurityConst,
- _assert_no_error,
- _build_tls_unknown_ca_alert,
- _cert_array_from_pem,
- _create_cfstring_array,
- _load_client_cert_chain,
- _temporary_keychain,
-)
-
-warnings.warn(
- "'urllib3.contrib.securetransport' module is deprecated and will be removed "
- "in urllib3 v2.1.0. Read more in this issue: "
- "https://github.com/urllib3/urllib3/issues/2681",
- category=DeprecationWarning,
- stacklevel=2,
-)
-
-if typing.TYPE_CHECKING:
- from typing import Literal
-
-__all__ = ["inject_into_urllib3", "extract_from_urllib3"]
-
-orig_util_SSLContext = util.ssl_.SSLContext
-
-# This dictionary is used by the read callback to obtain a handle to the
-# calling wrapped socket. This is a pretty silly approach, but for now it'll
-# do. I feel like I should be able to smuggle a handle to the wrapped socket
-# directly in the SSLConnectionRef, but for now this approach will work I
-# guess.
-#
-# We need to lock around this structure for inserts, but we don't do it for
-# reads/writes in the callbacks. The reasoning here goes as follows:
-#
-# 1. It is not possible to call into the callbacks before the dictionary is
-# populated, so once in the callback the id must be in the dictionary.
-# 2. The callbacks don't mutate the dictionary, they only read from it, and
-# so cannot conflict with any of the insertions.
-#
-# This is good: if we had to lock in the callbacks we'd drastically slow down
-# the performance of this code.
-_connection_refs: weakref.WeakValueDictionary[
- int, WrappedSocket
-] = weakref.WeakValueDictionary()
-_connection_ref_lock = threading.Lock()
-
-# Limit writes to 16kB. This is OpenSSL's limit, but we'll cargo-cult it over
-# for no better reason than we need *a* limit, and this one is right there.
-SSL_WRITE_BLOCKSIZE = 16384
-
-# Basically this is simple: for PROTOCOL_SSLv23 we turn it into a low of
-# TLSv1 and a high of TLSv1.2. For everything else, we pin to that version.
-# TLSv1 to 1.2 are supported on macOS 10.8+
-_protocol_to_min_max = {
- util.ssl_.PROTOCOL_TLS: (SecurityConst.kTLSProtocol1, SecurityConst.kTLSProtocol12), # type: ignore[attr-defined]
- util.ssl_.PROTOCOL_TLS_CLIENT: ( # type: ignore[attr-defined]
- SecurityConst.kTLSProtocol1,
- SecurityConst.kTLSProtocol12,
- ),
-}
-
-if hasattr(ssl, "PROTOCOL_SSLv2"):
- _protocol_to_min_max[ssl.PROTOCOL_SSLv2] = (
- SecurityConst.kSSLProtocol2,
- SecurityConst.kSSLProtocol2,
- )
-if hasattr(ssl, "PROTOCOL_SSLv3"):
- _protocol_to_min_max[ssl.PROTOCOL_SSLv3] = (
- SecurityConst.kSSLProtocol3,
- SecurityConst.kSSLProtocol3,
- )
-if hasattr(ssl, "PROTOCOL_TLSv1"):
- _protocol_to_min_max[ssl.PROTOCOL_TLSv1] = (
- SecurityConst.kTLSProtocol1,
- SecurityConst.kTLSProtocol1,
- )
-if hasattr(ssl, "PROTOCOL_TLSv1_1"):
- _protocol_to_min_max[ssl.PROTOCOL_TLSv1_1] = (
- SecurityConst.kTLSProtocol11,
- SecurityConst.kTLSProtocol11,
- )
-if hasattr(ssl, "PROTOCOL_TLSv1_2"):
- _protocol_to_min_max[ssl.PROTOCOL_TLSv1_2] = (
- SecurityConst.kTLSProtocol12,
- SecurityConst.kTLSProtocol12,
- )
-
-
-_tls_version_to_st: dict[int, int] = {
- ssl.TLSVersion.MINIMUM_SUPPORTED: SecurityConst.kTLSProtocol1,
- ssl.TLSVersion.TLSv1: SecurityConst.kTLSProtocol1,
- ssl.TLSVersion.TLSv1_1: SecurityConst.kTLSProtocol11,
- ssl.TLSVersion.TLSv1_2: SecurityConst.kTLSProtocol12,
- ssl.TLSVersion.MAXIMUM_SUPPORTED: SecurityConst.kTLSProtocol12,
-}
-
-
-def inject_into_urllib3() -> None:
- """
- Monkey-patch urllib3 with SecureTransport-backed SSL-support.
- """
- util.SSLContext = SecureTransportContext # type: ignore[assignment]
- util.ssl_.SSLContext = SecureTransportContext # type: ignore[assignment]
- util.IS_SECURETRANSPORT = True
- util.ssl_.IS_SECURETRANSPORT = True
-
-
-def extract_from_urllib3() -> None:
- """
- Undo monkey-patching by :func:`inject_into_urllib3`.
- """
- util.SSLContext = orig_util_SSLContext
- util.ssl_.SSLContext = orig_util_SSLContext
- util.IS_SECURETRANSPORT = False
- util.ssl_.IS_SECURETRANSPORT = False
-
-
-def _read_callback(
- connection_id: int, data_buffer: int, data_length_pointer: bytearray
-) -> int:
- """
- SecureTransport read callback. This is called by ST to request that data
- be returned from the socket.
- """
- wrapped_socket = None
- try:
- wrapped_socket = _connection_refs.get(connection_id)
- if wrapped_socket is None:
- return SecurityConst.errSSLInternal
- base_socket = wrapped_socket.socket
-
- requested_length = data_length_pointer[0]
-
- timeout = wrapped_socket.gettimeout()
- error = None
- read_count = 0
-
- try:
- while read_count < requested_length:
- if timeout is None or timeout >= 0:
- if not util.wait_for_read(base_socket, timeout):
- raise OSError(errno.EAGAIN, "timed out")
-
- remaining = requested_length - read_count
- buffer = (ctypes.c_char * remaining).from_address(
- data_buffer + read_count
- )
- chunk_size = base_socket.recv_into(buffer, remaining)
- read_count += chunk_size
- if not chunk_size:
- if not read_count:
- return SecurityConst.errSSLClosedGraceful
- break
- except OSError as e:
- error = e.errno
-
- if error is not None and error != errno.EAGAIN:
- data_length_pointer[0] = read_count
- if error == errno.ECONNRESET or error == errno.EPIPE:
- return SecurityConst.errSSLClosedAbort
- raise
-
- data_length_pointer[0] = read_count
-
- if read_count != requested_length:
- return SecurityConst.errSSLWouldBlock
-
- return 0
- except Exception as e:
- if wrapped_socket is not None:
- wrapped_socket._exception = e
- return SecurityConst.errSSLInternal
-
-
-def _write_callback(
- connection_id: int, data_buffer: int, data_length_pointer: bytearray
-) -> int:
- """
- SecureTransport write callback. This is called by ST to request that data
- actually be sent on the network.
- """
- wrapped_socket = None
- try:
- wrapped_socket = _connection_refs.get(connection_id)
- if wrapped_socket is None:
- return SecurityConst.errSSLInternal
- base_socket = wrapped_socket.socket
-
- bytes_to_write = data_length_pointer[0]
- data = ctypes.string_at(data_buffer, bytes_to_write)
-
- timeout = wrapped_socket.gettimeout()
- error = None
- sent = 0
-
- try:
- while sent < bytes_to_write:
- if timeout is None or timeout >= 0:
- if not util.wait_for_write(base_socket, timeout):
- raise OSError(errno.EAGAIN, "timed out")
- chunk_sent = base_socket.send(data)
- sent += chunk_sent
-
- # This has some needless copying here, but I'm not sure there's
- # much value in optimising this data path.
- data = data[chunk_sent:]
- except OSError as e:
- error = e.errno
-
- if error is not None and error != errno.EAGAIN:
- data_length_pointer[0] = sent
- if error == errno.ECONNRESET or error == errno.EPIPE:
- return SecurityConst.errSSLClosedAbort
- raise
-
- data_length_pointer[0] = sent
-
- if sent != bytes_to_write:
- return SecurityConst.errSSLWouldBlock
-
- return 0
- except Exception as e:
- if wrapped_socket is not None:
- wrapped_socket._exception = e
- return SecurityConst.errSSLInternal
-
-
-# We need to keep these two objects references alive: if they get GC'd while
-# in use then SecureTransport could attempt to call a function that is in freed
-# memory. That would be...uh...bad. Yeah, that's the word. Bad.
-_read_callback_pointer = Security.SSLReadFunc(_read_callback)
-_write_callback_pointer = Security.SSLWriteFunc(_write_callback)
-
-
-class WrappedSocket:
- """
- API-compatibility wrapper for Python's OpenSSL wrapped socket object.
- """
-
- def __init__(self, socket: socket_cls) -> None:
- self.socket = socket
- self.context = None
- self._io_refs = 0
- self._closed = False
- self._real_closed = False
- self._exception: Exception | None = None
- self._keychain = None
- self._keychain_dir: str | None = None
- self._client_cert_chain = None
-
- # We save off the previously-configured timeout and then set it to
- # zero. This is done because we use select and friends to handle the
- # timeouts, but if we leave the timeout set on the lower socket then
- # Python will "kindly" call select on that socket again for us. Avoid
- # that by forcing the timeout to zero.
- self._timeout = self.socket.gettimeout()
- self.socket.settimeout(0)
-
- @contextlib.contextmanager
- def _raise_on_error(self) -> typing.Generator[None, None, None]:
- """
- A context manager that can be used to wrap calls that do I/O from
- SecureTransport. If any of the I/O callbacks hit an exception, this
- context manager will correctly propagate the exception after the fact.
- This avoids silently swallowing those exceptions.
-
- It also correctly forces the socket closed.
- """
- self._exception = None
-
- # We explicitly don't catch around this yield because in the unlikely
- # event that an exception was hit in the block we don't want to swallow
- # it.
- yield
- if self._exception is not None:
- exception, self._exception = self._exception, None
- self._real_close()
- raise exception
-
- def _set_alpn_protocols(self, protocols: list[bytes] | None) -> None:
- """
- Sets up the ALPN protocols on the context.
- """
- if not protocols:
- return
- protocols_arr = _create_cfstring_array(protocols)
- try:
- result = Security.SSLSetALPNProtocols(self.context, protocols_arr)
- _assert_no_error(result)
- finally:
- CoreFoundation.CFRelease(protocols_arr)
-
- def _custom_validate(self, verify: bool, trust_bundle: bytes | None) -> None:
- """
- Called when we have set custom validation. We do this in two cases:
- first, when cert validation is entirely disabled; and second, when
- using a custom trust DB.
- Raises an SSLError if the connection is not trusted.
- """
- # If we disabled cert validation, just say: cool.
- if not verify or trust_bundle is None:
- return
-
- successes = (
- SecurityConst.kSecTrustResultUnspecified,
- SecurityConst.kSecTrustResultProceed,
- )
- try:
- trust_result = self._evaluate_trust(trust_bundle)
- if trust_result in successes:
- return
- reason = f"error code: {int(trust_result)}"
- exc = None
- except Exception as e:
- # Do not trust on error
- reason = f"exception: {e!r}"
- exc = e
-
- # SecureTransport does not send an alert nor shuts down the connection.
- rec = _build_tls_unknown_ca_alert(self.version())
- self.socket.sendall(rec)
- # close the connection immediately
- # l_onoff = 1, activate linger
- # l_linger = 0, linger for 0 seoncds
- opts = struct.pack("ii", 1, 0)
- self.socket.setsockopt(socket.SOL_SOCKET, socket.SO_LINGER, opts)
- self._real_close()
- raise ssl.SSLError(f"certificate verify failed, {reason}") from exc
-
- def _evaluate_trust(self, trust_bundle: bytes) -> int:
- # We want data in memory, so load it up.
- if os.path.isfile(trust_bundle):
- with open(trust_bundle, "rb") as f:
- trust_bundle = f.read()
-
- cert_array = None
- trust = Security.SecTrustRef()
-
- try:
- # Get a CFArray that contains the certs we want.
- cert_array = _cert_array_from_pem(trust_bundle)
-
- # Ok, now the hard part. We want to get the SecTrustRef that ST has
- # created for this connection, shove our CAs into it, tell ST to
- # ignore everything else it knows, and then ask if it can build a
- # chain. This is a buuuunch of code.
- result = Security.SSLCopyPeerTrust(self.context, ctypes.byref(trust))
- _assert_no_error(result)
- if not trust:
- raise ssl.SSLError("Failed to copy trust reference")
-
- result = Security.SecTrustSetAnchorCertificates(trust, cert_array)
- _assert_no_error(result)
-
- result = Security.SecTrustSetAnchorCertificatesOnly(trust, True)
- _assert_no_error(result)
-
- trust_result = Security.SecTrustResultType()
- result = Security.SecTrustEvaluate(trust, ctypes.byref(trust_result))
- _assert_no_error(result)
- finally:
- if trust:
- CoreFoundation.CFRelease(trust)
-
- if cert_array is not None:
- CoreFoundation.CFRelease(cert_array)
-
- return trust_result.value # type: ignore[no-any-return]
-
- def handshake(
- self,
- server_hostname: bytes | str | None,
- verify: bool,
- trust_bundle: bytes | None,
- min_version: int,
- max_version: int,
- client_cert: str | None,
- client_key: str | None,
- client_key_passphrase: typing.Any,
- alpn_protocols: list[bytes] | None,
- ) -> None:
- """
- Actually performs the TLS handshake. This is run automatically by
- wrapped socket, and shouldn't be needed in user code.
- """
- # First, we do the initial bits of connection setup. We need to create
- # a context, set its I/O funcs, and set the connection reference.
- self.context = Security.SSLCreateContext(
- None, SecurityConst.kSSLClientSide, SecurityConst.kSSLStreamType
- )
- result = Security.SSLSetIOFuncs(
- self.context, _read_callback_pointer, _write_callback_pointer
- )
- _assert_no_error(result)
-
- # Here we need to compute the handle to use. We do this by taking the
- # id of self modulo 2**31 - 1. If this is already in the dictionary, we
- # just keep incrementing by one until we find a free space.
- with _connection_ref_lock:
- handle = id(self) % 2147483647
- while handle in _connection_refs:
- handle = (handle + 1) % 2147483647
- _connection_refs[handle] = self
-
- result = Security.SSLSetConnection(self.context, handle)
- _assert_no_error(result)
-
- # If we have a server hostname, we should set that too.
- # RFC6066 Section 3 tells us not to use SNI when the host is an IP, but we have
- # to do it anyway to match server_hostname against the server certificate
- if server_hostname:
- if not isinstance(server_hostname, bytes):
- server_hostname = server_hostname.encode("utf-8")
-
- result = Security.SSLSetPeerDomainName(
- self.context, server_hostname, len(server_hostname)
- )
- _assert_no_error(result)
-
- # Setup the ALPN protocols.
- self._set_alpn_protocols(alpn_protocols)
-
- # Set the minimum and maximum TLS versions.
- result = Security.SSLSetProtocolVersionMin(self.context, min_version)
- _assert_no_error(result)
-
- result = Security.SSLSetProtocolVersionMax(self.context, max_version)
- _assert_no_error(result)
-
- # If there's a trust DB, we need to use it. We do that by telling
- # SecureTransport to break on server auth. We also do that if we don't
- # want to validate the certs at all: we just won't actually do any
- # authing in that case.
- if not verify or trust_bundle is not None:
- result = Security.SSLSetSessionOption(
- self.context, SecurityConst.kSSLSessionOptionBreakOnServerAuth, True
- )
- _assert_no_error(result)
-
- # If there's a client cert, we need to use it.
- if client_cert:
- self._keychain, self._keychain_dir = _temporary_keychain()
- self._client_cert_chain = _load_client_cert_chain(
- self._keychain, client_cert, client_key
- )
- result = Security.SSLSetCertificate(self.context, self._client_cert_chain)
- _assert_no_error(result)
-
- while True:
- with self._raise_on_error():
- result = Security.SSLHandshake(self.context)
-
- if result == SecurityConst.errSSLWouldBlock:
- raise socket.timeout("handshake timed out")
- elif result == SecurityConst.errSSLServerAuthCompleted:
- self._custom_validate(verify, trust_bundle)
- continue
- else:
- _assert_no_error(result)
- break
-
- def fileno(self) -> int:
- return self.socket.fileno()
-
- # Copy-pasted from Python 3.5 source code
- def _decref_socketios(self) -> None:
- if self._io_refs > 0:
- self._io_refs -= 1
- if self._closed:
- self.close()
-
- def recv(self, bufsiz: int) -> bytes:
- buffer = ctypes.create_string_buffer(bufsiz)
- bytes_read = self.recv_into(buffer, bufsiz)
- data = buffer[:bytes_read]
- return typing.cast(bytes, data)
-
- def recv_into(
- self, buffer: ctypes.Array[ctypes.c_char], nbytes: int | None = None
- ) -> int:
- # Read short on EOF.
- if self._real_closed:
- return 0
-
- if nbytes is None:
- nbytes = len(buffer)
-
- buffer = (ctypes.c_char * nbytes).from_buffer(buffer)
- processed_bytes = ctypes.c_size_t(0)
-
- with self._raise_on_error():
- result = Security.SSLRead(
- self.context, buffer, nbytes, ctypes.byref(processed_bytes)
- )
-
- # There are some result codes that we want to treat as "not always
- # errors". Specifically, those are errSSLWouldBlock,
- # errSSLClosedGraceful, and errSSLClosedNoNotify.
- if result == SecurityConst.errSSLWouldBlock:
- # If we didn't process any bytes, then this was just a time out.
- # However, we can get errSSLWouldBlock in situations when we *did*
- # read some data, and in those cases we should just read "short"
- # and return.
- if processed_bytes.value == 0:
- # Timed out, no data read.
- raise socket.timeout("recv timed out")
- elif result in (
- SecurityConst.errSSLClosedGraceful,
- SecurityConst.errSSLClosedNoNotify,
- ):
- # The remote peer has closed this connection. We should do so as
- # well. Note that we don't actually return here because in
- # principle this could actually be fired along with return data.
- # It's unlikely though.
- self._real_close()
- else:
- _assert_no_error(result)
-
- # Ok, we read and probably succeeded. We should return whatever data
- # was actually read.
- return processed_bytes.value
-
- def settimeout(self, timeout: float) -> None:
- self._timeout = timeout
-
- def gettimeout(self) -> float | None:
- return self._timeout
-
- def send(self, data: bytes) -> int:
- processed_bytes = ctypes.c_size_t(0)
-
- with self._raise_on_error():
- result = Security.SSLWrite(
- self.context, data, len(data), ctypes.byref(processed_bytes)
- )
-
- if result == SecurityConst.errSSLWouldBlock and processed_bytes.value == 0:
- # Timed out
- raise socket.timeout("send timed out")
- else:
- _assert_no_error(result)
-
- # We sent, and probably succeeded. Tell them how much we sent.
- return processed_bytes.value
-
- def sendall(self, data: bytes) -> None:
- total_sent = 0
- while total_sent < len(data):
- sent = self.send(data[total_sent : total_sent + SSL_WRITE_BLOCKSIZE])
- total_sent += sent
-
- def shutdown(self) -> None:
- with self._raise_on_error():
- Security.SSLClose(self.context)
-
- def close(self) -> None:
- self._closed = True
- # TODO: should I do clean shutdown here? Do I have to?
- if self._io_refs <= 0:
- self._real_close()
-
- def _real_close(self) -> None:
- self._real_closed = True
- if self.context:
- CoreFoundation.CFRelease(self.context)
- self.context = None
- if self._client_cert_chain:
- CoreFoundation.CFRelease(self._client_cert_chain)
- self._client_cert_chain = None
- if self._keychain:
- Security.SecKeychainDelete(self._keychain)
- CoreFoundation.CFRelease(self._keychain)
- shutil.rmtree(self._keychain_dir)
- self._keychain = self._keychain_dir = None
- return self.socket.close()
-
- def getpeercert(self, binary_form: bool = False) -> bytes | None:
- # Urgh, annoying.
- #
- # Here's how we do this:
- #
- # 1. Call SSLCopyPeerTrust to get hold of the trust object for this
- # connection.
- # 2. Call SecTrustGetCertificateAtIndex for index 0 to get the leaf.
- # 3. To get the CN, call SecCertificateCopyCommonName and process that
- # string so that it's of the appropriate type.
- # 4. To get the SAN, we need to do something a bit more complex:
- # a. Call SecCertificateCopyValues to get the data, requesting
- # kSecOIDSubjectAltName.
- # b. Mess about with this dictionary to try to get the SANs out.
- #
- # This is gross. Really gross. It's going to be a few hundred LoC extra
- # just to repeat something that SecureTransport can *already do*. So my
- # operating assumption at this time is that what we want to do is
- # instead to just flag to urllib3 that it shouldn't do its own hostname
- # validation when using SecureTransport.
- if not binary_form:
- raise ValueError("SecureTransport only supports dumping binary certs")
- trust = Security.SecTrustRef()
- certdata = None
- der_bytes = None
-
- try:
- # Grab the trust store.
- result = Security.SSLCopyPeerTrust(self.context, ctypes.byref(trust))
- _assert_no_error(result)
- if not trust:
- # Probably we haven't done the handshake yet. No biggie.
- return None
-
- cert_count = Security.SecTrustGetCertificateCount(trust)
- if not cert_count:
- # Also a case that might happen if we haven't handshaked.
- # Handshook? Handshaken?
- return None
-
- leaf = Security.SecTrustGetCertificateAtIndex(trust, 0)
- assert leaf
-
- # Ok, now we want the DER bytes.
- certdata = Security.SecCertificateCopyData(leaf)
- assert certdata
-
- data_length = CoreFoundation.CFDataGetLength(certdata)
- data_buffer = CoreFoundation.CFDataGetBytePtr(certdata)
- der_bytes = ctypes.string_at(data_buffer, data_length)
- finally:
- if certdata:
- CoreFoundation.CFRelease(certdata)
- if trust:
- CoreFoundation.CFRelease(trust)
-
- return der_bytes
-
- def version(self) -> str:
- protocol = Security.SSLProtocol()
- result = Security.SSLGetNegotiatedProtocolVersion(
- self.context, ctypes.byref(protocol)
- )
- _assert_no_error(result)
- if protocol.value == SecurityConst.kTLSProtocol13:
- raise ssl.SSLError("SecureTransport does not support TLS 1.3")
- elif protocol.value == SecurityConst.kTLSProtocol12:
- return "TLSv1.2"
- elif protocol.value == SecurityConst.kTLSProtocol11:
- return "TLSv1.1"
- elif protocol.value == SecurityConst.kTLSProtocol1:
- return "TLSv1"
- elif protocol.value == SecurityConst.kSSLProtocol3:
- return "SSLv3"
- elif protocol.value == SecurityConst.kSSLProtocol2:
- return "SSLv2"
- else:
- raise ssl.SSLError(f"Unknown TLS version: {protocol!r}")
-
-
-def makefile(
- self: socket_cls,
- mode: (
- Literal["r"] | Literal["w"] | Literal["rw"] | Literal["wr"] | Literal[""]
- ) = "r",
- buffering: int | None = None,
- *args: typing.Any,
- **kwargs: typing.Any,
-) -> typing.BinaryIO | typing.TextIO:
- # We disable buffering with SecureTransport because it conflicts with
- # the buffering that ST does internally (see issue #1153 for more).
- buffering = 0
- return socket_cls.makefile(self, mode, buffering, *args, **kwargs)
-
-
-WrappedSocket.makefile = makefile # type: ignore[attr-defined]
-
-
-class SecureTransportContext:
- """
- I am a wrapper class for the SecureTransport library, to translate the
- interface of the standard library ``SSLContext`` object to calls into
- SecureTransport.
- """
-
- def __init__(self, protocol: int) -> None:
- self._minimum_version: int = ssl.TLSVersion.MINIMUM_SUPPORTED
- self._maximum_version: int = ssl.TLSVersion.MAXIMUM_SUPPORTED
- if protocol not in (None, ssl.PROTOCOL_TLS, ssl.PROTOCOL_TLS_CLIENT):
- self._min_version, self._max_version = _protocol_to_min_max[protocol]
-
- self._options = 0
- self._verify = False
- self._trust_bundle: bytes | None = None
- self._client_cert: str | None = None
- self._client_key: str | None = None
- self._client_key_passphrase = None
- self._alpn_protocols: list[bytes] | None = None
-
- @property
- def check_hostname(self) -> Literal[True]:
- """
- SecureTransport cannot have its hostname checking disabled. For more,
- see the comment on getpeercert() in this file.
- """
- return True
-
- @check_hostname.setter
- def check_hostname(self, value: typing.Any) -> None:
- """
- SecureTransport cannot have its hostname checking disabled. For more,
- see the comment on getpeercert() in this file.
- """
-
- @property
- def options(self) -> int:
- # TODO: Well, crap.
- #
- # So this is the bit of the code that is the most likely to cause us
- # trouble. Essentially we need to enumerate all of the SSL options that
- # users might want to use and try to see if we can sensibly translate
- # them, or whether we should just ignore them.
- return self._options
-
- @options.setter
- def options(self, value: int) -> None:
- # TODO: Update in line with above.
- self._options = value
-
- @property
- def verify_mode(self) -> int:
- return ssl.CERT_REQUIRED if self._verify else ssl.CERT_NONE
-
- @verify_mode.setter
- def verify_mode(self, value: int) -> None:
- self._verify = value == ssl.CERT_REQUIRED
-
- def set_default_verify_paths(self) -> None:
- # So, this has to do something a bit weird. Specifically, what it does
- # is nothing.
- #
- # This means that, if we had previously had load_verify_locations
- # called, this does not undo that. We need to do that because it turns
- # out that the rest of the urllib3 code will attempt to load the
- # default verify paths if it hasn't been told about any paths, even if
- # the context itself was sometime earlier. We resolve that by just
- # ignoring it.
- pass
-
- def load_default_certs(self) -> None:
- return self.set_default_verify_paths()
-
- def set_ciphers(self, ciphers: typing.Any) -> None:
- raise ValueError("SecureTransport doesn't support custom cipher strings")
-
- def load_verify_locations(
- self,
- cafile: str | None = None,
- capath: str | None = None,
- cadata: bytes | None = None,
- ) -> None:
- # OK, we only really support cadata and cafile.
- if capath is not None:
- raise ValueError("SecureTransport does not support cert directories")
-
- # Raise if cafile does not exist.
- if cafile is not None:
- with open(cafile):
- pass
-
- self._trust_bundle = cafile or cadata # type: ignore[assignment]
-
- def load_cert_chain(
- self,
- certfile: str,
- keyfile: str | None = None,
- password: str | None = None,
- ) -> None:
- self._client_cert = certfile
- self._client_key = keyfile
- self._client_cert_passphrase = password
-
- def set_alpn_protocols(self, protocols: list[str | bytes]) -> None:
- """
- Sets the ALPN protocols that will later be set on the context.
-
- Raises a NotImplementedError if ALPN is not supported.
- """
- if not hasattr(Security, "SSLSetALPNProtocols"):
- raise NotImplementedError(
- "SecureTransport supports ALPN only in macOS 10.12+"
- )
- self._alpn_protocols = [util.util.to_bytes(p, "ascii") for p in protocols]
-
- def wrap_socket(
- self,
- sock: socket_cls,
- server_side: bool = False,
- do_handshake_on_connect: bool = True,
- suppress_ragged_eofs: bool = True,
- server_hostname: bytes | str | None = None,
- ) -> WrappedSocket:
- # So, what do we do here? Firstly, we assert some properties. This is a
- # stripped down shim, so there is some functionality we don't support.
- # See PEP 543 for the real deal.
- assert not server_side
- assert do_handshake_on_connect
- assert suppress_ragged_eofs
-
- # Ok, we're good to go. Now we want to create the wrapped socket object
- # and store it in the appropriate place.
- wrapped_socket = WrappedSocket(sock)
-
- # Now we can handshake
- wrapped_socket.handshake(
- server_hostname,
- self._verify,
- self._trust_bundle,
- _tls_version_to_st[self._minimum_version],
- _tls_version_to_st[self._maximum_version],
- self._client_cert,
- self._client_key,
- self._client_key_passphrase,
- self._alpn_protocols,
- )
- return wrapped_socket
-
- @property
- def minimum_version(self) -> int:
- return self._minimum_version
-
- @minimum_version.setter
- def minimum_version(self, minimum_version: int) -> None:
- self._minimum_version = minimum_version
-
- @property
- def maximum_version(self) -> int:
- return self._maximum_version
-
- @maximum_version.setter
- def maximum_version(self, maximum_version: int) -> None:
- self._maximum_version = maximum_version
diff --git a/src/urllib3/response.py b/src/urllib3/response.py
--- a/src/urllib3/response.py
+++ b/src/urllib3/response.py
@@ -767,13 +767,9 @@ def _fp_read(self, amt: int | None = None) -> bytes:
assert self._fp
c_int_max = 2**31 - 1
if (
- (
- (amt and amt > c_int_max)
- or (self.length_remaining and self.length_remaining > c_int_max)
- )
- and not util.IS_SECURETRANSPORT
- and (util.IS_PYOPENSSL or sys.version_info < (3, 10))
- ):
+ (amt and amt > c_int_max)
+ or (self.length_remaining and self.length_remaining > c_int_max)
+ ) and (util.IS_PYOPENSSL or sys.version_info < (3, 10)):
buffer = io.BytesIO()
# Besides `max_chunk_amt` being a maximum chunk size, it
# affects memory overhead of reading a response by this
diff --git a/src/urllib3/util/__init__.py b/src/urllib3/util/__init__.py
--- a/src/urllib3/util/__init__.py
+++ b/src/urllib3/util/__init__.py
@@ -8,7 +8,6 @@
from .ssl_ import (
ALPN_PROTOCOLS,
IS_PYOPENSSL,
- IS_SECURETRANSPORT,
SSLContext,
assert_fingerprint,
create_urllib3_context,
@@ -22,7 +21,6 @@
__all__ = (
"IS_PYOPENSSL",
- "IS_SECURETRANSPORT",
"SSLContext",
"ALPN_PROTOCOLS",
"Retry",
diff --git a/src/urllib3/util/ssl_.py b/src/urllib3/util/ssl_.py
--- a/src/urllib3/util/ssl_.py
+++ b/src/urllib3/util/ssl_.py
@@ -16,7 +16,6 @@
SSLTransport = None
HAS_NEVER_CHECK_COMMON_NAME = False
IS_PYOPENSSL = False
-IS_SECURETRANSPORT = False
ALPN_PROTOCOLS = ["http/1.1"]
_TYPE_VERSION_INFO = typing.Tuple[int, int, int, str, int]
| diff --git a/test/__init__.py b/test/__init__.py
--- a/test/__init__.py
+++ b/test/__init__.py
@@ -156,31 +156,6 @@ def notZstd() -> typing.Callable[[_TestFuncT], _TestFuncT]:
)
-# Hack to make pytest evaluate a condition at test runtime instead of collection time.
-def lazy_condition(condition: typing.Callable[[], bool]) -> bool:
- class LazyCondition:
- def __bool__(self) -> bool:
- return condition()
-
- return typing.cast(bool, LazyCondition())
-
-
-def onlySecureTransport() -> typing.Callable[[_TestFuncT], _TestFuncT]:
- """Runs this test when SecureTransport is in use."""
- return pytest.mark.skipif(
- lazy_condition(lambda: not ssl_.IS_SECURETRANSPORT),
- reason="Test only runs with SecureTransport",
- )
-
-
-def notSecureTransport() -> typing.Callable[[_TestFuncT], _TestFuncT]:
- """Skips this test when SecureTransport is in use."""
- return pytest.mark.skipif(
- lazy_condition(lambda: ssl_.IS_SECURETRANSPORT),
- reason="Test does not run with SecureTransport",
- )
-
-
_requires_network_has_route = None
@@ -217,15 +192,6 @@ def _has_route() -> bool:
)
-def requires_ssl_context_keyfile_password() -> (
- typing.Callable[[_TestFuncT], _TestFuncT]
-):
- return pytest.mark.skipif(
- lazy_condition(lambda: ssl_.IS_SECURETRANSPORT),
- reason="Test requires password parameter for SSLContext.load_cert_chain()",
- )
-
-
def resolvesLocalhostFQDN() -> typing.Callable[[_TestFuncT], _TestFuncT]:
"""Test requires successful resolving of 'localhost.'"""
return pytest.mark.skipif(
diff --git a/test/contrib/test_securetransport.py b/test/contrib/test_securetransport.py
deleted file mode 100644
--- a/test/contrib/test_securetransport.py
+++ /dev/null
@@ -1,68 +0,0 @@
-from __future__ import annotations
-
-import base64
-import contextlib
-import socket
-import ssl
-
-import pytest
-
-try:
- from urllib3.contrib.securetransport import WrappedSocket
-except ImportError:
- pass
-
-
-def setup_module() -> None:
- try:
- from urllib3.contrib.securetransport import inject_into_urllib3
-
- inject_into_urllib3()
- except ImportError as e:
- pytest.skip(f"Could not import SecureTransport: {repr(e)}")
-
-
-def teardown_module() -> None:
- try:
- from urllib3.contrib.securetransport import extract_from_urllib3
-
- extract_from_urllib3()
- except ImportError:
- pass
-
-
-from ..test_util import TestUtilSSL # noqa: E402, F401
-
-# SecureTransport does not support TLSv1.3
-# https://github.com/urllib3/urllib3/issues/1674
-from ..with_dummyserver.test_https import ( # noqa: E402, F401
- TestHTTPS,
- TestHTTPS_TLSv1,
- TestHTTPS_TLSv1_1,
- TestHTTPS_TLSv1_2,
-)
-from ..with_dummyserver.test_socketlevel import ( # noqa: E402, F401
- TestClientCerts,
- TestSNI,
- TestSocketClosing,
- TestSSL,
-)
-
-
-def test_no_crash_with_empty_trust_bundle() -> None:
- with contextlib.closing(socket.socket()) as s:
- ws = WrappedSocket(s)
- with pytest.raises(ssl.SSLError):
- ws._custom_validate(True, b"")
-
-
-def test_no_crash_with_invalid_trust_bundle() -> None:
- invalid_cert = base64.b64encode(b"invalid-cert")
- cert_bundle = (
- b"-----BEGIN CERTIFICATE-----\n" + invalid_cert + b"\n-----END CERTIFICATE-----"
- )
-
- with contextlib.closing(socket.socket()) as s:
- ws = WrappedSocket(s)
- with pytest.raises(ssl.SSLError):
- ws._custom_validate(True, cert_bundle)
diff --git a/test/with_dummyserver/test_https.py b/test/with_dummyserver/test_https.py
--- a/test/with_dummyserver/test_https.py
+++ b/test/with_dummyserver/test_https.py
@@ -12,9 +12,7 @@
LONG_TIMEOUT,
SHORT_TIMEOUT,
TARPIT_HOST,
- notSecureTransport,
requires_network,
- requires_ssl_context_keyfile_password,
resolvesLocalhostFQDN,
)
from test.conftest import ServerConfig
@@ -189,7 +187,6 @@ def test_client_no_intermediate(self) -> None:
with pytest.raises((SSLError, ProtocolError)):
https_pool.request("GET", "/certificate", retries=False)
- @requires_ssl_context_keyfile_password()
def test_client_key_password(self) -> None:
with HTTPSConnectionPool(
self.host,
@@ -204,7 +201,6 @@ def test_client_key_password(self) -> None:
subject = r.json()
assert subject["organizationalUnitName"].startswith("Testing cert")
- @requires_ssl_context_keyfile_password()
def test_client_encrypted_key_requires_password(self) -> None:
with HTTPSConnectionPool(
self.host,
@@ -265,7 +261,6 @@ def test_context_combines_with_ca_certs(self) -> None:
assert r.status == 200
assert not warn.called, warn.call_args_list
- @notSecureTransport() # SecureTransport does not support cert directories
def test_ca_dir_verified(self, tmp_path: Path) -> None:
# OpenSSL looks up certificates by the hash for their name, see c_rehash
# TODO infer the bytes using `cryptography.x509.Name.public_bytes`.
@@ -552,12 +547,7 @@ def test_verify_none_and_good_fingerprint(self) -> None:
) as https_pool:
https_pool.request("GET", "/")
- @notSecureTransport()
def test_good_fingerprint_and_hostname_mismatch(self) -> None:
- # This test doesn't run with SecureTransport because we don't turn off
- # hostname validation without turning off all validation, which this
- # test doesn't do (deliberately). We should revisit this if we make
- # new decisions.
with HTTPSConnectionPool(
"127.0.0.1",
self.port,
@@ -975,13 +965,12 @@ def test_default_ssl_context_ssl_min_max_versions(self) -> None:
ctx = urllib3.util.ssl_.create_urllib3_context()
assert ctx.minimum_version == ssl.TLSVersion.TLSv1_2
# urllib3 sets a default maximum version only when it is
- # injected with PyOpenSSL- or SecureTransport-backed
- # SSL-support.
+ # injected with PyOpenSSL SSL-support.
# Otherwise, the default maximum version is set by Python's
# `ssl.SSLContext`. The value respects OpenSSL configuration and
# can be different from `ssl.TLSVersion.MAXIMUM_SUPPORTED`.
# https://github.com/urllib3/urllib3/issues/2477#issuecomment-1151452150
- if util.IS_PYOPENSSL or util.IS_SECURETRANSPORT:
+ if util.IS_PYOPENSSL:
expected_maximum_version = ssl.TLSVersion.MAXIMUM_SUPPORTED
else:
expected_maximum_version = ssl.SSLContext(
diff --git a/test/with_dummyserver/test_proxy_poolmanager.py b/test/with_dummyserver/test_proxy_poolmanager.py
--- a/test/with_dummyserver/test_proxy_poolmanager.py
+++ b/test/with_dummyserver/test_proxy_poolmanager.py
@@ -9,7 +9,7 @@
import socket
import ssl
import tempfile
-from test import LONG_TIMEOUT, SHORT_TIMEOUT, onlySecureTransport, withPyOpenSSL
+from test import LONG_TIMEOUT, SHORT_TIMEOUT, withPyOpenSSL
from test.conftest import ServerConfig
import pytest
@@ -103,17 +103,6 @@ def test_https_proxy_pyopenssl_not_supported(self) -> None:
):
https.request("GET", f"{self.https_url}/")
- @onlySecureTransport()
- def test_https_proxy_securetransport_not_supported(self) -> None:
- with proxy_from_url(self.https_proxy_url, ca_certs=DEFAULT_CA) as https:
- r = https.request("GET", f"{self.http_url}/")
- assert r.status == 200
-
- with pytest.raises(
- ProxySchemeUnsupported, match="isn't available on non-native SSLContext"
- ):
- https.request("GET", f"{self.https_url}/")
-
def test_https_proxy_forwarding_for_https(self) -> None:
with proxy_from_url(
self.https_proxy_url,
diff --git a/test/with_dummyserver/test_socketlevel.py b/test/with_dummyserver/test_socketlevel.py
--- a/test/with_dummyserver/test_socketlevel.py
+++ b/test/with_dummyserver/test_socketlevel.py
@@ -18,14 +18,7 @@
import zlib
from collections import OrderedDict
from pathlib import Path
-from test import (
- LONG_TIMEOUT,
- SHORT_TIMEOUT,
- notSecureTransport,
- notWindows,
- requires_ssl_context_keyfile_password,
- resolvesLocalhostFQDN,
-)
+from test import LONG_TIMEOUT, SHORT_TIMEOUT, notWindows, resolvesLocalhostFQDN
from threading import Event
from unittest import mock
@@ -329,11 +322,9 @@ def socket_handler(listener: socket.socket) -> None:
done_receiving.set()
done_receiving.set()
- @requires_ssl_context_keyfile_password()
def test_client_cert_with_string_password(self) -> None:
self.run_client_cert_with_password_test("letmein")
- @requires_ssl_context_keyfile_password()
def test_client_cert_with_bytes_password(self) -> None:
self.run_client_cert_with_password_test(b"letmein")
@@ -385,7 +376,6 @@ def socket_handler(listener: socket.socket) -> None:
assert len(client_certs) == 1
- @requires_ssl_context_keyfile_password()
def test_load_keyfile_with_invalid_password(self) -> None:
assert ssl_.SSLContext is not None
context = ssl_.SSLContext(ssl_.PROTOCOL_SSLv23)
@@ -396,9 +386,6 @@ def test_load_keyfile_with_invalid_password(self) -> None:
password=b"letmei",
)
- # For SecureTransport, the validation that would raise an error in
- # this case is deferred.
- @notSecureTransport()
def test_load_invalid_cert_file(self) -> None:
assert ssl_.SSLContext is not None
context = ssl_.SSLContext(ssl_.PROTOCOL_SSLv23)
@@ -993,9 +980,7 @@ def consume_ssl_socket(listener: socket.socket) -> None:
) as f:
ssl_sock.close()
f.close()
- # SecureTransport is supposed to raise OSError but raises
- # ssl.SSLError when closed because ssl_sock.context is None
- with pytest.raises((OSError, ssl.SSLError)):
+ with pytest.raises(OSError):
ssl_sock.sendall(b"hello")
assert ssl_sock.fileno() == -1
@@ -1316,7 +1301,6 @@ def socket_handler(listener: socket.socket) -> None:
):
pool.request("GET", "/", retries=False)
- @notSecureTransport()
def test_ssl_read_timeout(self) -> None:
timed_out = Event()
@@ -1600,9 +1584,6 @@ def socket_handler(listener: socket.socket) -> None:
pool.request("GET", "/", retries=False, timeout=LONG_TIMEOUT)
assert server_closed.wait(LONG_TIMEOUT), "The socket was not terminated"
- # SecureTransport can read only small pieces of data at the moment.
- # https://github.com/urllib3/urllib3/pull/2674
- @notSecureTransport()
@pytest.mark.skipif(
os.environ.get("CI") == "true" and sys.implementation.name == "pypy",
reason="too slow to run in CI",
| Deprecate the SecureTransport TLS implementation
The SecureTransport implementation was added to support OpenSSL versions that didn't support TLS 1.2 and later on macOS. Python on macOS has been improved greatly over time that we're now in a place to force OpenSSL 1.1.1+ in Python and urllib3.
Now that there's little upside to using SecureTransport it's hard to justify the maintenance burden.
My proposal for now is:
- Create documentation for SecureTransport's deprecation in the docs. We'll be targetting a "future 2.x release" with the actual removal.
- Begin unconditionally emitting a `DeprecationWarning` when `urllib3.contrib.securetransport.inject_into_urllib3` is called and point to the documentation.
- Find projects using `urllib3.contrib.securetransport` and help them remove support.
In a future 2.x version:
- Make the `inject_into_urllib3` function raise an exception with a URL to the documentation.
In a version after that:
- Remove the `inject_into_urllib3` function and `urllib3.contrib.securetransport` contrib module.
| Hi @sethmlarson i've added the deprecation warning to the inject_into_urllib3() method in the securetransport module.
Just a quick questions:
1. Hope it's fine to reference the warning to the https://urllib3.readthedocs.io/en/stable/reference/contrib/securetransport.html section of the doc as the deprecation notice will also be in this section of the doc.
```
def inject_into_urllib3() -> None:
warnings.warn(
"'urllib3.contrib.securetransport.inject_into_urllib3()' is deprecated and will be removed "
"in a future release of urllib3 2.x. Read more in the doc: "
"https://urllib3.readthedocs.io/en/stable/reference/contrib/securetransport.html",
category=DeprecationWarning,
stacklevel=2,
)
.......
.......
.......
```
Yes, links to the docs are one honking great idea -- let's do more of those!
OK nice @pquentin I'll try to work on the remaining task. | 2023-10-06T04:34:59Z | [] | [] |
urllib3/urllib3 | 3,150 | urllib3__urllib3-3150 | [
"3065"
] | 7d0648b53cf4159e7f3ebb8353cb642942ec93e6 | diff --git a/src/urllib3/poolmanager.py b/src/urllib3/poolmanager.py
--- a/src/urllib3/poolmanager.py
+++ b/src/urllib3/poolmanager.py
@@ -38,6 +38,7 @@
"cert_file",
"cert_reqs",
"ca_certs",
+ "ca_cert_data",
"ssl_version",
"ssl_minimum_version",
"ssl_maximum_version",
@@ -73,6 +74,7 @@ class PoolKey(typing.NamedTuple):
key_cert_file: str | None
key_cert_reqs: str | None
key_ca_certs: str | None
+ key_ca_cert_data: str | bytes | None
key_ssl_version: int | str | None
key_ssl_minimum_version: ssl.TLSVersion | None
key_ssl_maximum_version: ssl.TLSVersion | None
| diff --git a/test/with_dummyserver/test_proxy_poolmanager.py b/test/with_dummyserver/test_proxy_poolmanager.py
--- a/test/with_dummyserver/test_proxy_poolmanager.py
+++ b/test/with_dummyserver/test_proxy_poolmanager.py
@@ -78,6 +78,16 @@ def test_https_proxy(self) -> None:
r = https.request("GET", f"{self.http_url}/")
assert r.status == 200
+ def test_http_and_https_kwarg_ca_cert_data_proxy(self) -> None:
+ with open(DEFAULT_CA) as pem_file:
+ pem_file_data = pem_file.read()
+ with proxy_from_url(self.https_proxy_url, ca_cert_data=pem_file_data) as https:
+ r = https.request("GET", f"{self.https_url}/")
+ assert r.status == 200
+
+ r = https.request("GET", f"{self.http_url}/")
+ assert r.status == 200
+
def test_https_proxy_with_proxy_ssl_context(self) -> None:
proxy_ssl_context = create_urllib3_context()
proxy_ssl_context.load_verify_locations(DEFAULT_CA)
| Cannot pass ssl kwarg cert_data in proxy context
Discovered that one while writting tls-in-tls http/2 tests in #3030
While we can do
```python
with proxy_from_url("...", ca_certs=DEFAULT_CA) as http:
...
```
we cannot
```python
with proxy_from_url("...", cert_data=open(DEFAULT_CA, "rb").read()) as http:
...
```
output `unexpected kw key_cert_data for pool conn`
I suspect that we missed propagating required kw.
It is an easy fix.
should patch `SSL_KEYWORDS = (....)` and `class PoolKey(typing.NamedTuple):`
| will try to add a PR for this. | 2023-10-08T15:16:25Z | [] | [] |
urllib3/urllib3 | 3,176 | urllib3__urllib3-3176 | [
"3174"
] | ff764a01499203a7c6fbe2e6c0a5a670cf26745c | diff --git a/src/urllib3/response.py b/src/urllib3/response.py
--- a/src/urllib3/response.py
+++ b/src/urllib3/response.py
@@ -208,7 +208,9 @@ def _get_decoder(mode: str) -> ContentDecoder:
if "," in mode:
return MultiDecoder(mode)
- if mode == "gzip":
+ # According to RFC 9110 section 8.4.1.3, recipients should
+ # consider x-gzip equivalent to gzip
+ if mode in ("gzip", "x-gzip"):
return GzipDecoder()
if brotli is not None and mode == "br":
@@ -280,7 +282,7 @@ def get(self, n: int) -> bytes:
class BaseHTTPResponse(io.IOBase):
- CONTENT_DECODERS = ["gzip", "deflate"]
+ CONTENT_DECODERS = ["gzip", "x-gzip", "deflate"]
if brotli is not None:
CONTENT_DECODERS += ["br"]
if zstd is not None:
| diff --git a/test/test_response.py b/test/test_response.py
--- a/test/test_response.py
+++ b/test/test_response.py
@@ -229,14 +229,15 @@ def test_chunked_decoding_deflate2(self) -> None:
assert r.read() == b""
assert r.read() == b""
- def test_chunked_decoding_gzip(self) -> None:
+ @pytest.mark.parametrize("content_encoding", ["gzip", "x-gzip"])
+ def test_chunked_decoding_gzip(self, content_encoding: str) -> None:
compress = zlib.compressobj(6, zlib.DEFLATED, 16 + zlib.MAX_WBITS)
data = compress.compress(b"foo")
data += compress.flush()
fp = BytesIO(data)
r = HTTPResponse(
- fp, headers={"content-encoding": "gzip"}, preload_content=False
+ fp, headers={"content-encoding": content_encoding}, preload_content=False
)
assert r.read(1) == b"f"
| Support `Content-Encoding: x-gzip`
### Context
I'm using an API that returns a gzipped response, but the reported `Content-Encoding` is `x-gzip` rather than `gzip`, so urllib3 fails to decompress it.
According to [RFC 9110](https://www.rfc-editor.org/rfc/rfc9110.html#section-8.4.1.3):
> A recipient SHOULD consider "x-gzip" to be equivalent to "gzip".
Therefore, I would expect e.g. `Response.read(decode_content=True)` to automatically decompress such responses.
Unfortunately, I can't find a public server that would produce such responses, so I can't provide a simple reproducer.
### Alternatives
I can set `decode_content=False` and gunzip the content manually.
### Duplicate
> Has the feature been requested before?
Not that I could find.
### Contribution
> Would you be willing to submit a PR?
Probably not.
| https://github.com/urllib3/urllib3/blob/7d0648b53cf4159e7f3ebb8353cb642942ec93e6/src/urllib3/response.py#L207-L220 could be trivially updated to say, after we get past the MultiDecoder return do a `if mode.startswith("x-"):\n\t\tmode = mode[2:]` and then we don't need to worry about `x-gzip` or `x-compress`. And if any other encodings are eXperimental first, we can handle those backwards compatibly as well.
FWIW, I don't think it's a good idea to blindly assume that `x-foo` is the same as `foo`, since there is no guarantee that will be the case. Plus, the only registered `x-` encodings are `x-gzip` and `x-compress` (and urllib3 doesn't even support `compress`).
| 2023-11-03T20:46:17Z | [] | [] |
urllib3/urllib3 | 3,195 | urllib3__urllib3-3195 | [
"2951"
] | d32e1fcee8d93612985af5094c8b8615522a80ba | diff --git a/dummyserver/handlers.py b/dummyserver/handlers.py
--- a/dummyserver/handlers.py
+++ b/dummyserver/handlers.py
@@ -247,6 +247,7 @@ def echo(self, request: httputil.HTTPServerRequest) -> Response:
def echo_json(self, request: httputil.HTTPServerRequest) -> Response:
"Echo back the JSON"
+ print("ECHO JSON:", request.body)
return Response(json=request.body, headers=list(request.headers.items()))
def echo_uri(self, request: httputil.HTTPServerRequest) -> Response:
diff --git a/noxfile.py b/noxfile.py
--- a/noxfile.py
+++ b/noxfile.py
@@ -3,6 +3,8 @@
import os
import shutil
import sys
+import typing
+from pathlib import Path
import nox
@@ -14,6 +16,7 @@ def tests_impl(
# https://github.com/python-hyper/h2/issues/1236
byte_string_comparisons: bool = False,
integration: bool = False,
+ pytest_extra_args: list[str] = [],
) -> None:
# Install deps and the package itself.
session.install("-r", "dev-requirements.txt")
@@ -53,6 +56,7 @@ def tests_impl(
"--durations=10",
"--strict-config",
"--strict-markers",
+ *pytest_extra_args,
*(session.posargs or ("test/",)),
env={"PYTHONWARNINGS": "always::DeprecationWarning"},
)
@@ -152,6 +156,108 @@ def lint(session: nox.Session) -> None:
mypy(session)
+# TODO: node support is not tested yet - it should work if you require('xmlhttprequest') before
+# loading pyodide, but there is currently no nice way to do this with pytest-pyodide
+# because you can't override the test runner properties easily - see
+# https://github.com/pyodide/pytest-pyodide/issues/118 for more
[email protected](python="3.11")
[email protected]("runner", ["firefox", "chrome"])
+def emscripten(session: nox.Session, runner: str) -> None:
+ """Test on Emscripten with Pyodide & Chrome / Firefox"""
+ session.install("-r", "emscripten-requirements.txt")
+ # build wheel into dist folder
+ session.run("python", "-m", "build")
+ # make sure we have a dist dir for pyodide
+ dist_dir = None
+ if "PYODIDE_ROOT" in os.environ:
+ # we have a pyodide build tree checked out
+ # use the dist directory from that
+ dist_dir = Path(os.environ["PYODIDE_ROOT"]) / "dist"
+ else:
+ # we don't have a build tree, get one
+ # that matches the version of pyodide build
+ pyodide_version = typing.cast(
+ str,
+ session.run(
+ "python",
+ "-c",
+ "import pyodide_build;print(pyodide_build.__version__)",
+ silent=True,
+ ),
+ ).strip()
+
+ pyodide_artifacts_path = Path(session.cache_dir) / f"pyodide-{pyodide_version}"
+ if not pyodide_artifacts_path.exists():
+ print("Fetching pyodide build artifacts")
+ session.run(
+ "wget",
+ f"https://github.com/pyodide/pyodide/releases/download/{pyodide_version}/pyodide-{pyodide_version}.tar.bz2",
+ "-O",
+ f"{pyodide_artifacts_path}.tar.bz2",
+ )
+ pyodide_artifacts_path.mkdir(parents=True)
+ session.run(
+ "tar",
+ "-xjf",
+ f"{pyodide_artifacts_path}.tar.bz2",
+ "-C",
+ str(pyodide_artifacts_path),
+ "--strip-components",
+ "1",
+ )
+
+ dist_dir = pyodide_artifacts_path
+ assert dist_dir is not None
+ assert dist_dir.exists()
+ if runner == "chrome":
+ # install chrome webdriver and add it to path
+ driver = typing.cast(
+ str,
+ session.run(
+ "python",
+ "-c",
+ "from webdriver_manager.chrome import ChromeDriverManager;print(ChromeDriverManager().install())",
+ silent=True,
+ ),
+ ).strip()
+ session.env["PATH"] = f"{Path(driver).parent}:{session.env['PATH']}"
+
+ tests_impl(
+ session,
+ pytest_extra_args=[
+ "--rt",
+ "chrome-no-host",
+ "--dist-dir",
+ str(dist_dir),
+ "test",
+ ],
+ )
+ elif runner == "firefox":
+ driver = typing.cast(
+ str,
+ session.run(
+ "python",
+ "-c",
+ "from webdriver_manager.firefox import GeckoDriverManager;print(GeckoDriverManager().install())",
+ silent=True,
+ ),
+ ).strip()
+ session.env["PATH"] = f"{Path(driver).parent}:{session.env['PATH']}"
+
+ tests_impl(
+ session,
+ pytest_extra_args=[
+ "--rt",
+ "firefox-no-host",
+ "--dist-dir",
+ str(dist_dir),
+ "test",
+ ],
+ )
+ else:
+ raise ValueError(f"Unknown runnner: {runner}")
+
+
@nox.session(python="3.12")
def mypy(session: nox.Session) -> None:
"""Run mypy."""
diff --git a/src/urllib3/__init__.py b/src/urllib3/__init__.py
--- a/src/urllib3/__init__.py
+++ b/src/urllib3/__init__.py
@@ -6,6 +6,7 @@
# Set default logging handler to avoid "No handler found" warnings.
import logging
+import sys
import typing
import warnings
from logging import NullHandler
@@ -202,3 +203,9 @@ def request(
timeout=timeout,
json=json,
)
+
+
+if sys.platform == "emscripten":
+ from .contrib.emscripten import inject_into_urllib3 # noqa: 401
+
+ inject_into_urllib3()
diff --git a/src/urllib3/connectionpool.py b/src/urllib3/connectionpool.py
--- a/src/urllib3/connectionpool.py
+++ b/src/urllib3/connectionpool.py
@@ -543,6 +543,8 @@ def _make_request(
response._connection = response_conn # type: ignore[attr-defined]
response._pool = self # type: ignore[attr-defined]
+ # emscripten connection doesn't have _http_vsn_str
+ http_version = getattr(conn, "_http_vsn_str", "HTTP/?")
log.debug(
'%s://%s:%s "%s %s %s" %s %s',
self.scheme,
@@ -551,7 +553,7 @@ def _make_request(
method,
url,
# HTTP version
- conn._http_vsn_str, # type: ignore[attr-defined]
+ http_version,
response.status,
response.length_remaining, # type: ignore[attr-defined]
)
diff --git a/src/urllib3/contrib/emscripten/__init__.py b/src/urllib3/contrib/emscripten/__init__.py
new file mode 100644
--- /dev/null
+++ b/src/urllib3/contrib/emscripten/__init__.py
@@ -0,0 +1,16 @@
+from __future__ import annotations
+
+import urllib3.connection
+
+from ...connectionpool import HTTPConnectionPool, HTTPSConnectionPool
+from .connection import EmscriptenHTTPConnection, EmscriptenHTTPSConnection
+
+
+def inject_into_urllib3() -> None:
+ # override connection classes to use emscripten specific classes
+ # n.b. mypy complains about the overriding of classes below
+ # if it isn't ignored
+ HTTPConnectionPool.ConnectionCls = EmscriptenHTTPConnection
+ HTTPSConnectionPool.ConnectionCls = EmscriptenHTTPSConnection
+ urllib3.connection.HTTPConnection = EmscriptenHTTPConnection # type: ignore[misc,assignment]
+ urllib3.connection.HTTPSConnection = EmscriptenHTTPSConnection # type: ignore[misc,assignment]
diff --git a/src/urllib3/contrib/emscripten/connection.py b/src/urllib3/contrib/emscripten/connection.py
new file mode 100644
--- /dev/null
+++ b/src/urllib3/contrib/emscripten/connection.py
@@ -0,0 +1,249 @@
+from __future__ import annotations
+
+import os
+import typing
+
+# use http.client.HTTPException for consistency with non-emscripten
+from http.client import HTTPException as HTTPException # noqa: F401
+from http.client import ResponseNotReady
+
+from ..._base_connection import _TYPE_BODY
+from ...connection import HTTPConnection, ProxyConfig, port_by_scheme
+from ...exceptions import TimeoutError
+from ...response import BaseHTTPResponse
+from ...util.connection import _TYPE_SOCKET_OPTIONS
+from ...util.timeout import _DEFAULT_TIMEOUT, _TYPE_TIMEOUT
+from ...util.url import Url
+from .fetch import _RequestError, _TimeoutError, send_request, send_streaming_request
+from .request import EmscriptenRequest
+from .response import EmscriptenHttpResponseWrapper, EmscriptenResponse
+
+if typing.TYPE_CHECKING:
+ from ..._base_connection import BaseHTTPConnection, BaseHTTPSConnection
+
+
+class EmscriptenHTTPConnection:
+ default_port: typing.ClassVar[int] = port_by_scheme["http"]
+ default_socket_options: typing.ClassVar[_TYPE_SOCKET_OPTIONS]
+
+ timeout: None | (float)
+
+ host: str
+ port: int
+ blocksize: int
+ source_address: tuple[str, int] | None
+ socket_options: _TYPE_SOCKET_OPTIONS | None
+
+ proxy: Url | None
+ proxy_config: ProxyConfig | None
+
+ is_verified: bool = False
+ proxy_is_verified: bool | None = None
+
+ _response: EmscriptenResponse | None
+
+ def __init__(
+ self,
+ host: str,
+ port: int = 0,
+ *,
+ timeout: _TYPE_TIMEOUT = _DEFAULT_TIMEOUT,
+ source_address: tuple[str, int] | None = None,
+ blocksize: int = 8192,
+ socket_options: _TYPE_SOCKET_OPTIONS | None = None,
+ proxy: Url | None = None,
+ proxy_config: ProxyConfig | None = None,
+ ) -> None:
+ self.host = host
+ self.port = port
+ self.timeout = timeout if isinstance(timeout, float) else 0.0
+ self.scheme = "http"
+ self._closed = True
+ self._response = None
+ # ignore these things because we don't
+ # have control over that stuff
+ self.proxy = None
+ self.proxy_config = None
+ self.blocksize = blocksize
+ self.source_address = None
+ self.socket_options = None
+
+ def set_tunnel(
+ self,
+ host: str,
+ port: int | None = 0,
+ headers: typing.Mapping[str, str] | None = None,
+ scheme: str = "http",
+ ) -> None:
+ pass
+
+ def connect(self) -> None:
+ pass
+
+ def request(
+ self,
+ method: str,
+ url: str,
+ body: _TYPE_BODY | None = None,
+ headers: typing.Mapping[str, str] | None = None,
+ # We know *at least* botocore is depending on the order of the
+ # first 3 parameters so to be safe we only mark the later ones
+ # as keyword-only to ensure we have space to extend.
+ *,
+ chunked: bool = False,
+ preload_content: bool = True,
+ decode_content: bool = True,
+ enforce_content_length: bool = True,
+ ) -> None:
+ self._closed = False
+ if url.startswith("/"):
+ # no scheme / host / port included, make a full url
+ url = f"{self.scheme}://{self.host}:{self.port}" + url
+ request = EmscriptenRequest(
+ url=url,
+ method=method,
+ timeout=self.timeout if self.timeout else 0,
+ decode_content=decode_content,
+ )
+ request.set_body(body)
+ if headers:
+ for k, v in headers.items():
+ request.set_header(k, v)
+ self._response = None
+ try:
+ if not preload_content:
+ self._response = send_streaming_request(request)
+ if self._response is None:
+ self._response = send_request(request)
+ except _TimeoutError as e:
+ raise TimeoutError(e.message)
+ except _RequestError as e:
+ raise HTTPException(e.message)
+
+ def getresponse(self) -> BaseHTTPResponse:
+ if self._response is not None:
+ return EmscriptenHttpResponseWrapper(
+ internal_response=self._response,
+ url=self._response.request.url,
+ connection=self,
+ )
+ else:
+ raise ResponseNotReady()
+
+ def close(self) -> None:
+ self._closed = True
+ self._response = None
+
+ @property
+ def is_closed(self) -> bool:
+ """Whether the connection either is brand new or has been previously closed.
+ If this property is True then both ``is_connected`` and ``has_connected_to_proxy``
+ properties must be False.
+ """
+ return self._closed
+
+ @property
+ def is_connected(self) -> bool:
+ """Whether the connection is actively connected to any origin (proxy or target)"""
+ return True
+
+ @property
+ def has_connected_to_proxy(self) -> bool:
+ """Whether the connection has successfully connected to its proxy.
+ This returns False if no proxy is in use. Used to determine whether
+ errors are coming from the proxy layer or from tunnelling to the target origin.
+ """
+ return False
+
+
+class EmscriptenHTTPSConnection(EmscriptenHTTPConnection):
+ default_port = port_by_scheme["https"]
+ # all this is basically ignored, as browser handles https
+ cert_reqs: int | str | None = None
+ ca_certs: str | None = None
+ ca_cert_dir: str | None = None
+ ca_cert_data: None | str | bytes = None
+ cert_file: str | None
+ key_file: str | None
+ key_password: str | None
+ ssl_context: typing.Any | None
+ ssl_version: int | str | None = None
+ ssl_minimum_version: int | None = None
+ ssl_maximum_version: int | None = None
+ assert_hostname: None | str | typing.Literal[False]
+ assert_fingerprint: str | None = None
+
+ def __init__(
+ self,
+ host: str,
+ port: int = 0,
+ *,
+ timeout: _TYPE_TIMEOUT = _DEFAULT_TIMEOUT,
+ source_address: tuple[str, int] | None = None,
+ blocksize: int = 16384,
+ socket_options: None
+ | _TYPE_SOCKET_OPTIONS = HTTPConnection.default_socket_options,
+ proxy: Url | None = None,
+ proxy_config: ProxyConfig | None = None,
+ cert_reqs: int | str | None = None,
+ assert_hostname: None | str | typing.Literal[False] = None,
+ assert_fingerprint: str | None = None,
+ server_hostname: str | None = None,
+ ssl_context: typing.Any | None = None,
+ ca_certs: str | None = None,
+ ca_cert_dir: str | None = None,
+ ca_cert_data: None | str | bytes = None,
+ ssl_minimum_version: int | None = None,
+ ssl_maximum_version: int | None = None,
+ ssl_version: int | str | None = None, # Deprecated
+ cert_file: str | None = None,
+ key_file: str | None = None,
+ key_password: str | None = None,
+ ) -> None:
+ super().__init__(
+ host,
+ port=port,
+ timeout=timeout,
+ source_address=source_address,
+ blocksize=blocksize,
+ socket_options=socket_options,
+ proxy=proxy,
+ proxy_config=proxy_config,
+ )
+ self.scheme = "https"
+
+ self.key_file = key_file
+ self.cert_file = cert_file
+ self.key_password = key_password
+ self.ssl_context = ssl_context
+ self.server_hostname = server_hostname
+ self.assert_hostname = assert_hostname
+ self.assert_fingerprint = assert_fingerprint
+ self.ssl_version = ssl_version
+ self.ssl_minimum_version = ssl_minimum_version
+ self.ssl_maximum_version = ssl_maximum_version
+ self.ca_certs = ca_certs and os.path.expanduser(ca_certs)
+ self.ca_cert_dir = ca_cert_dir and os.path.expanduser(ca_cert_dir)
+ self.ca_cert_data = ca_cert_data
+
+ self.cert_reqs = None
+
+ def set_cert(
+ self,
+ key_file: str | None = None,
+ cert_file: str | None = None,
+ cert_reqs: int | str | None = None,
+ key_password: str | None = None,
+ ca_certs: str | None = None,
+ assert_hostname: None | str | typing.Literal[False] = None,
+ assert_fingerprint: str | None = None,
+ ca_cert_dir: str | None = None,
+ ca_cert_data: None | str | bytes = None,
+ ) -> None:
+ pass
+
+
+# verify that this class implements BaseHTTP(s) connection correctly
+if typing.TYPE_CHECKING:
+ _supports_http_protocol: BaseHTTPConnection = EmscriptenHTTPConnection("", 0)
+ _supports_https_protocol: BaseHTTPSConnection = EmscriptenHTTPSConnection("", 0)
diff --git a/src/urllib3/contrib/emscripten/fetch.py b/src/urllib3/contrib/emscripten/fetch.py
new file mode 100644
--- /dev/null
+++ b/src/urllib3/contrib/emscripten/fetch.py
@@ -0,0 +1,413 @@
+"""
+Support for streaming http requests in emscripten.
+
+A few caveats -
+
+Firstly, you can't do streaming http in the main UI thread, because atomics.wait isn't allowed.
+Streaming only works if you're running pyodide in a web worker.
+
+Secondly, this uses an extra web worker and SharedArrayBuffer to do the asynchronous fetch
+operation, so it requires that you have crossOriginIsolation enabled, by serving over https
+(or from localhost) with the two headers below set:
+
+ Cross-Origin-Opener-Policy: same-origin
+ Cross-Origin-Embedder-Policy: require-corp
+
+You can tell if cross origin isolation is successfully enabled by looking at the global crossOriginIsolated variable in
+javascript console. If it isn't, streaming requests will fallback to XMLHttpRequest, i.e. getting the whole
+request into a buffer and then returning it. it shows a warning in the javascript console in this case.
+
+Finally, the webworker which does the streaming fetch is created on initial import, but will only be started once
+control is returned to javascript. Call `await wait_for_streaming_ready()` to wait for streaming fetch.
+
+NB: in this code, there are a lot of javascript objects. They are named js_*
+to make it clear what type of object they are.
+"""
+from __future__ import annotations
+
+import io
+import json
+from email.parser import Parser
+from importlib.resources import files
+from typing import TYPE_CHECKING, Any
+
+import js # type: ignore[import]
+from pyodide.ffi import JsArray, JsException, JsProxy, to_js # type: ignore[import]
+
+if TYPE_CHECKING:
+ from typing_extensions import Buffer
+
+from .request import EmscriptenRequest
+from .response import EmscriptenResponse
+
+"""
+There are some headers that trigger unintended CORS preflight requests.
+See also https://github.com/koenvo/pyodide-http/issues/22
+"""
+HEADERS_TO_IGNORE = ("user-agent",)
+
+SUCCESS_HEADER = -1
+SUCCESS_EOF = -2
+ERROR_TIMEOUT = -3
+ERROR_EXCEPTION = -4
+
+_STREAMING_WORKER_CODE = (
+ files(__package__)
+ .joinpath("emscripten_fetch_worker.js")
+ .read_text(encoding="utf-8")
+)
+
+
+class _RequestError(Exception):
+ def __init__(
+ self,
+ message: str | None = None,
+ *,
+ request: EmscriptenRequest | None = None,
+ response: EmscriptenResponse | None = None,
+ ):
+ self.request = request
+ self.response = response
+ self.message = message
+ super().__init__(self.message)
+
+
+class _StreamingError(_RequestError):
+ pass
+
+
+class _TimeoutError(_RequestError):
+ pass
+
+
+def _obj_from_dict(dict_val: dict[str, Any]) -> JsProxy:
+ return to_js(dict_val, dict_converter=js.Object.fromEntries)
+
+
+class _ReadStream(io.RawIOBase):
+ def __init__(
+ self,
+ int_buffer: JsArray,
+ byte_buffer: JsArray,
+ timeout: float,
+ worker: JsProxy,
+ connection_id: int,
+ request: EmscriptenRequest,
+ ):
+ self.int_buffer = int_buffer
+ self.byte_buffer = byte_buffer
+ self.read_pos = 0
+ self.read_len = 0
+ self.connection_id = connection_id
+ self.worker = worker
+ self.timeout = int(1000 * timeout) if timeout > 0 else None
+ self.is_live = True
+ self._is_closed = False
+ self.request: EmscriptenRequest | None = request
+
+ def __del__(self) -> None:
+ self.close()
+
+ # this is compatible with _base_connection
+ def is_closed(self) -> bool:
+ return self._is_closed
+
+ # for compatibility with RawIOBase
+ @property
+ def closed(self) -> bool:
+ return self.is_closed()
+
+ def close(self) -> None:
+ if not self.is_closed():
+ self.read_len = 0
+ self.read_pos = 0
+ self.int_buffer = None
+ self.byte_buffer = None
+ self._is_closed = True
+ self.request = None
+ if self.is_live:
+ self.worker.postMessage(_obj_from_dict({"close": self.connection_id}))
+ self.is_live = False
+ super().close()
+
+ def readable(self) -> bool:
+ return True
+
+ def writable(self) -> bool:
+ return False
+
+ def seekable(self) -> bool:
+ return False
+
+ def readinto(self, byte_obj: Buffer) -> int:
+ if not self.int_buffer:
+ raise _StreamingError(
+ "No buffer for stream in _ReadStream.readinto",
+ request=self.request,
+ response=None,
+ )
+ if self.read_len == 0:
+ # wait for the worker to send something
+ js.Atomics.store(self.int_buffer, 0, ERROR_TIMEOUT)
+ self.worker.postMessage(_obj_from_dict({"getMore": self.connection_id}))
+ if (
+ js.Atomics.wait(self.int_buffer, 0, ERROR_TIMEOUT, self.timeout)
+ == "timed-out"
+ ):
+ raise _TimeoutError
+ data_len = self.int_buffer[0]
+ if data_len > 0:
+ self.read_len = data_len
+ self.read_pos = 0
+ elif data_len == ERROR_EXCEPTION:
+ string_len = self.int_buffer[1]
+ # decode the error string
+ js_decoder = js.TextDecoder.new()
+ json_str = js_decoder.decode(self.byte_buffer.slice(0, string_len))
+ raise _StreamingError(
+ f"Exception thrown in fetch: {json_str}",
+ request=self.request,
+ response=None,
+ )
+ else:
+ # EOF, free the buffers and return zero
+ # and free the request
+ self.is_live = False
+ self.close()
+ return 0
+ # copy from int32array to python bytes
+ ret_length = min(self.read_len, len(memoryview(byte_obj)))
+ subarray = self.byte_buffer.subarray(
+ self.read_pos, self.read_pos + ret_length
+ ).to_py()
+ memoryview(byte_obj)[0:ret_length] = subarray
+ self.read_len -= ret_length
+ self.read_pos += ret_length
+ return ret_length
+
+
+class _StreamingFetcher:
+ def __init__(self) -> None:
+ # make web-worker and data buffer on startup
+ self.streaming_ready = False
+
+ js_data_blob = js.Blob.new(
+ [_STREAMING_WORKER_CODE], _obj_from_dict({"type": "application/javascript"})
+ )
+
+ def promise_resolver(js_resolve_fn: JsProxy, js_reject_fn: JsProxy) -> None:
+ def onMsg(e: JsProxy) -> None:
+ self.streaming_ready = True
+ js_resolve_fn(e)
+
+ def onErr(e: JsProxy) -> None:
+ js_reject_fn(e) # Defensive: never happens in ci
+
+ self.js_worker.onmessage = onMsg
+ self.js_worker.onerror = onErr
+
+ js_data_url = js.URL.createObjectURL(js_data_blob)
+ self.js_worker = js.globalThis.Worker.new(js_data_url)
+ self.js_worker_ready_promise = js.globalThis.Promise.new(promise_resolver)
+
+ def send(self, request: EmscriptenRequest) -> EmscriptenResponse:
+ headers = {
+ k: v for k, v in request.headers.items() if k not in HEADERS_TO_IGNORE
+ }
+
+ body = request.body
+ fetch_data = {"headers": headers, "body": to_js(body), "method": request.method}
+ # start the request off in the worker
+ timeout = int(1000 * request.timeout) if request.timeout > 0 else None
+ js_shared_buffer = js.SharedArrayBuffer.new(1048576)
+ js_int_buffer = js.Int32Array.new(js_shared_buffer)
+ js_byte_buffer = js.Uint8Array.new(js_shared_buffer, 8)
+
+ js.Atomics.store(js_int_buffer, 0, ERROR_TIMEOUT)
+ js.Atomics.notify(js_int_buffer, 0)
+ js_absolute_url = js.URL.new(request.url, js.location).href
+ self.js_worker.postMessage(
+ _obj_from_dict(
+ {
+ "buffer": js_shared_buffer,
+ "url": js_absolute_url,
+ "fetchParams": fetch_data,
+ }
+ )
+ )
+ # wait for the worker to send something
+ js.Atomics.wait(js_int_buffer, 0, ERROR_TIMEOUT, timeout)
+ if js_int_buffer[0] == ERROR_TIMEOUT:
+ raise _TimeoutError(
+ "Timeout connecting to streaming request",
+ request=request,
+ response=None,
+ )
+ elif js_int_buffer[0] == SUCCESS_HEADER:
+ # got response
+ # header length is in second int of intBuffer
+ string_len = js_int_buffer[1]
+ # decode the rest to a JSON string
+ js_decoder = js.TextDecoder.new()
+ # this does a copy (the slice) because decode can't work on shared array
+ # for some silly reason
+ json_str = js_decoder.decode(js_byte_buffer.slice(0, string_len))
+ # get it as an object
+ response_obj = json.loads(json_str)
+ return EmscriptenResponse(
+ request=request,
+ status_code=response_obj["status"],
+ headers=response_obj["headers"],
+ body=_ReadStream(
+ js_int_buffer,
+ js_byte_buffer,
+ request.timeout,
+ self.js_worker,
+ response_obj["connectionID"],
+ request,
+ ),
+ )
+ elif js_int_buffer[0] == ERROR_EXCEPTION:
+ string_len = js_int_buffer[1]
+ # decode the error string
+ js_decoder = js.TextDecoder.new()
+ json_str = js_decoder.decode(js_byte_buffer.slice(0, string_len))
+ raise _StreamingError(
+ f"Exception thrown in fetch: {json_str}", request=request, response=None
+ )
+ else:
+ raise _StreamingError(
+ f"Unknown status from worker in fetch: {js_int_buffer[0]}",
+ request=request,
+ response=None,
+ )
+
+
+# check if we are in a worker or not
+def is_in_browser_main_thread() -> bool:
+ return hasattr(js, "window") and hasattr(js, "self") and js.self == js.window
+
+
+def is_cross_origin_isolated() -> bool:
+ return hasattr(js, "crossOriginIsolated") and js.crossOriginIsolated
+
+
+def is_in_node() -> bool:
+ return (
+ hasattr(js, "process")
+ and hasattr(js.process, "release")
+ and hasattr(js.process.release, "name")
+ and js.process.release.name == "node"
+ )
+
+
+def is_worker_available() -> bool:
+ return hasattr(js, "Worker") and hasattr(js, "Blob")
+
+
+_fetcher: _StreamingFetcher | None = None
+
+if is_worker_available() and (
+ (is_cross_origin_isolated() and not is_in_browser_main_thread())
+ and (not is_in_node())
+):
+ _fetcher = _StreamingFetcher()
+else:
+ _fetcher = None
+
+
+def send_streaming_request(request: EmscriptenRequest) -> EmscriptenResponse | None:
+ if _fetcher and streaming_ready():
+ return _fetcher.send(request)
+ else:
+ _show_streaming_warning()
+ return None
+
+
+_SHOWN_TIMEOUT_WARNING = False
+
+
+def _show_timeout_warning() -> None:
+ global _SHOWN_TIMEOUT_WARNING
+ if not _SHOWN_TIMEOUT_WARNING:
+ _SHOWN_TIMEOUT_WARNING = True
+ message = "Warning: Timeout is not available on main browser thread"
+ js.console.warn(message)
+
+
+_SHOWN_STREAMING_WARNING = False
+
+
+def _show_streaming_warning() -> None:
+ global _SHOWN_STREAMING_WARNING
+ if not _SHOWN_STREAMING_WARNING:
+ _SHOWN_STREAMING_WARNING = True
+ message = "Can't stream HTTP requests because: \n"
+ if not is_cross_origin_isolated():
+ message += " Page is not cross-origin isolated\n"
+ if is_in_browser_main_thread():
+ message += " Python is running in main browser thread\n"
+ if not is_worker_available():
+ message += " Worker or Blob classes are not available in this environment." # Defensive: this is always False in browsers that we test in
+ if streaming_ready() is False:
+ message += """ Streaming fetch worker isn't ready. If you want to be sure that streaming fetch
+is working, you need to call: 'await urllib3.contrib.emscripten.fetch.wait_for_streaming_ready()`"""
+ from js import console
+
+ console.warn(message)
+
+
+def send_request(request: EmscriptenRequest) -> EmscriptenResponse:
+ try:
+ js_xhr = js.XMLHttpRequest.new()
+
+ if not is_in_browser_main_thread():
+ js_xhr.responseType = "arraybuffer"
+ if request.timeout:
+ js_xhr.timeout = int(request.timeout * 1000)
+ else:
+ js_xhr.overrideMimeType("text/plain; charset=ISO-8859-15")
+ if request.timeout:
+ # timeout isn't available on the main thread - show a warning in console
+ # if it is set
+ _show_timeout_warning()
+
+ js_xhr.open(request.method, request.url, False)
+ for name, value in request.headers.items():
+ if name.lower() not in HEADERS_TO_IGNORE:
+ js_xhr.setRequestHeader(name, value)
+
+ js_xhr.send(to_js(request.body))
+
+ headers = dict(Parser().parsestr(js_xhr.getAllResponseHeaders()))
+
+ if not is_in_browser_main_thread():
+ body = js_xhr.response.to_py().tobytes()
+ else:
+ body = js_xhr.response.encode("ISO-8859-15")
+ return EmscriptenResponse(
+ status_code=js_xhr.status, headers=headers, body=body, request=request
+ )
+ except JsException as err:
+ if err.name == "TimeoutError":
+ raise _TimeoutError(err.message, request=request)
+ elif err.name == "NetworkError":
+ raise _RequestError(err.message, request=request)
+ else:
+ # general http error
+ raise _RequestError(err.message, request=request)
+
+
+def streaming_ready() -> bool | None:
+ if _fetcher:
+ return _fetcher.streaming_ready
+ else:
+ return None # no fetcher, return None to signify that
+
+
+async def wait_for_streaming_ready() -> bool:
+ if _fetcher:
+ await _fetcher.js_worker_ready_promise
+ return True
+ else:
+ return False
diff --git a/src/urllib3/contrib/emscripten/request.py b/src/urllib3/contrib/emscripten/request.py
new file mode 100644
--- /dev/null
+++ b/src/urllib3/contrib/emscripten/request.py
@@ -0,0 +1,22 @@
+from __future__ import annotations
+
+from dataclasses import dataclass, field
+
+from ..._base_connection import _TYPE_BODY
+
+
+@dataclass
+class EmscriptenRequest:
+ method: str
+ url: str
+ params: dict[str, str] | None = None
+ body: _TYPE_BODY | None = None
+ headers: dict[str, str] = field(default_factory=dict)
+ timeout: float = 0
+ decode_content: bool = True
+
+ def set_header(self, name: str, value: str) -> None:
+ self.headers[name.capitalize()] = value
+
+ def set_body(self, body: _TYPE_BODY | None) -> None:
+ self.body = body
diff --git a/src/urllib3/contrib/emscripten/response.py b/src/urllib3/contrib/emscripten/response.py
new file mode 100644
--- /dev/null
+++ b/src/urllib3/contrib/emscripten/response.py
@@ -0,0 +1,276 @@
+from __future__ import annotations
+
+import json as _json
+import logging
+import typing
+from contextlib import contextmanager
+from dataclasses import dataclass
+from http.client import HTTPException as HTTPException
+from io import BytesIO, IOBase
+
+from ...exceptions import InvalidHeader, TimeoutError
+from ...response import BaseHTTPResponse
+from ...util.retry import Retry
+from .request import EmscriptenRequest
+
+if typing.TYPE_CHECKING:
+ from ..._base_connection import BaseHTTPConnection, BaseHTTPSConnection
+
+log = logging.getLogger(__name__)
+
+
+@dataclass
+class EmscriptenResponse:
+ status_code: int
+ headers: dict[str, str]
+ body: IOBase | bytes
+ request: EmscriptenRequest
+
+
+class EmscriptenHttpResponseWrapper(BaseHTTPResponse):
+ def __init__(
+ self,
+ internal_response: EmscriptenResponse,
+ url: str | None = None,
+ connection: BaseHTTPConnection | BaseHTTPSConnection | None = None,
+ ):
+ self._pool = None # set by pool class
+ self._body = None
+ self._response = internal_response
+ self._url = url
+ self._connection = connection
+ self._closed = False
+ super().__init__(
+ headers=internal_response.headers,
+ status=internal_response.status_code,
+ request_url=url,
+ version=0,
+ reason="",
+ decode_content=True,
+ )
+ self.length_remaining = self._init_length(self._response.request.method)
+ self.length_is_certain = False
+
+ @property
+ def url(self) -> str | None:
+ return self._url
+
+ @url.setter
+ def url(self, url: str | None) -> None:
+ self._url = url
+
+ @property
+ def connection(self) -> BaseHTTPConnection | BaseHTTPSConnection | None:
+ return self._connection
+
+ @property
+ def retries(self) -> Retry | None:
+ return self._retries
+
+ @retries.setter
+ def retries(self, retries: Retry | None) -> None:
+ # Override the request_url if retries has a redirect location.
+ self._retries = retries
+
+ def stream(
+ self, amt: int | None = 2**16, decode_content: bool | None = None
+ ) -> typing.Generator[bytes, None, None]:
+ """
+ A generator wrapper for the read() method. A call will block until
+ ``amt`` bytes have been read from the connection or until the
+ connection is closed.
+
+ :param amt:
+ How much of the content to read. The generator will return up to
+ much data per iteration, but may return less. This is particularly
+ likely when using compressed data. However, the empty string will
+ never be returned.
+
+ :param decode_content:
+ If True, will attempt to decode the body based on the
+ 'content-encoding' header.
+ """
+ while True:
+ data = self.read(amt=amt, decode_content=decode_content)
+
+ if data:
+ yield data
+ else:
+ break
+
+ def _init_length(self, request_method: str | None) -> int | None:
+ length: int | None
+ content_length: str | None = self.headers.get("content-length")
+
+ if content_length is not None:
+ try:
+ # RFC 7230 section 3.3.2 specifies multiple content lengths can
+ # be sent in a single Content-Length header
+ # (e.g. Content-Length: 42, 42). This line ensures the values
+ # are all valid ints and that as long as the `set` length is 1,
+ # all values are the same. Otherwise, the header is invalid.
+ lengths = {int(val) for val in content_length.split(",")}
+ if len(lengths) > 1:
+ raise InvalidHeader(
+ "Content-Length contained multiple "
+ "unmatching values (%s)" % content_length
+ )
+ length = lengths.pop()
+ except ValueError:
+ length = None
+ else:
+ if length < 0:
+ length = None
+
+ else: # if content_length is None
+ length = None
+
+ # Check for responses that shouldn't include a body
+ if (
+ self.status in (204, 304)
+ or 100 <= self.status < 200
+ or request_method == "HEAD"
+ ):
+ length = 0
+
+ return length
+
+ def read(
+ self,
+ amt: int | None = None,
+ decode_content: bool | None = None, # ignored because browser decodes always
+ cache_content: bool = False,
+ ) -> bytes:
+ if (
+ self._closed
+ or self._response is None
+ or (isinstance(self._response.body, IOBase) and self._response.body.closed)
+ ):
+ return b""
+
+ with self._error_catcher():
+ # body has been preloaded as a string by XmlHttpRequest
+ if not isinstance(self._response.body, IOBase):
+ self.length_remaining = len(self._response.body)
+ self.length_is_certain = True
+ # wrap body in IOStream
+ self._response.body = BytesIO(self._response.body)
+ if amt is not None:
+ # don't cache partial content
+ cache_content = False
+ data = self._response.body.read(amt)
+ if self.length_remaining is not None:
+ self.length_remaining = max(self.length_remaining - len(data), 0)
+ if (self.length_is_certain and self.length_remaining == 0) or len(
+ data
+ ) < amt:
+ # definitely finished reading, close response stream
+ self._response.body.close()
+ return typing.cast(bytes, data)
+ else: # read all we can (and cache it)
+ data = self._response.body.read()
+ if cache_content:
+ self._body = data
+ if self.length_remaining is not None:
+ self.length_remaining = max(self.length_remaining - len(data), 0)
+ if len(data) == 0 or (
+ self.length_is_certain and self.length_remaining == 0
+ ):
+ # definitely finished reading, close response stream
+ self._response.body.close()
+ return typing.cast(bytes, data)
+
+ def read_chunked(
+ self,
+ amt: int | None = None,
+ decode_content: bool | None = None,
+ ) -> typing.Generator[bytes, None, None]:
+ # chunked is handled by browser
+ while True:
+ bytes = self.read(amt, decode_content)
+ if not bytes:
+ break
+ yield bytes
+
+ def release_conn(self) -> None:
+ if not self._pool or not self._connection:
+ return None
+
+ self._pool._put_conn(self._connection)
+ self._connection = None
+
+ def drain_conn(self) -> None:
+ self.close()
+
+ @property
+ def data(self) -> bytes:
+ if self._body:
+ return self._body
+ else:
+ return self.read(cache_content=True)
+
+ def json(self) -> typing.Any:
+ """
+ Parses the body of the HTTP response as JSON.
+
+ To use a custom JSON decoder pass the result of :attr:`HTTPResponse.data` to the decoder.
+
+ This method can raise either `UnicodeDecodeError` or `json.JSONDecodeError`.
+
+ Read more :ref:`here <json>`.
+ """
+ data = self.data.decode("utf-8")
+ return _json.loads(data)
+
+ def close(self) -> None:
+ if not self._closed:
+ if isinstance(self._response.body, IOBase):
+ self._response.body.close()
+ if self._connection:
+ self._connection.close()
+ self._connection = None
+ self._closed = True
+
+ @contextmanager
+ def _error_catcher(self) -> typing.Generator[None, None, None]:
+ """
+ Catch Emscripten specific exceptions thrown by fetch.py,
+ instead re-raising urllib3 variants, so that low-level exceptions
+ are not leaked in the high-level api.
+
+ On exit, release the connection back to the pool.
+ """
+ from .fetch import _RequestError, _TimeoutError # avoid circular import
+
+ clean_exit = False
+
+ try:
+ yield
+ # If no exception is thrown, we should avoid cleaning up
+ # unnecessarily.
+ clean_exit = True
+ except _TimeoutError as e:
+ raise TimeoutError(str(e))
+ except _RequestError as e:
+ raise HTTPException(str(e))
+ finally:
+ # If we didn't terminate cleanly, we need to throw away our
+ # connection.
+ if not clean_exit:
+ # The response may not be closed but we're not going to use it
+ # anymore so close it now
+ if (
+ isinstance(self._response.body, IOBase)
+ and not self._response.body.closed
+ ):
+ self._response.body.close()
+ # release the connection back to the pool
+ self.release_conn()
+ else:
+ # If we have read everything from the response stream,
+ # return the connection back to the pool.
+ if (
+ isinstance(self._response.body, IOBase)
+ and self._response.body.closed
+ ):
+ self.release_conn()
diff --git a/src/urllib3/response.py b/src/urllib3/response.py
--- a/src/urllib3/response.py
+++ b/src/urllib3/response.py
@@ -14,6 +14,9 @@
from http.client import HTTPResponse as _HttplibHTTPResponse
from socket import timeout as SocketTimeout
+if typing.TYPE_CHECKING:
+ from ._base_connection import BaseHTTPConnection
+
try:
try:
import brotlicffi as brotli # type: ignore[import]
@@ -379,7 +382,7 @@ def url(self, url: str | None) -> None:
raise NotImplementedError()
@property
- def connection(self) -> HTTPConnection | None:
+ def connection(self) -> BaseHTTPConnection | None:
raise NotImplementedError()
@property
| diff --git a/test/contrib/emscripten/__init__.py b/test/contrib/emscripten/__init__.py
new file mode 100644
diff --git a/test/contrib/emscripten/conftest.py b/test/contrib/emscripten/conftest.py
new file mode 100644
--- /dev/null
+++ b/test/contrib/emscripten/conftest.py
@@ -0,0 +1,269 @@
+from __future__ import annotations
+
+import asyncio
+import contextlib
+import mimetypes
+import os
+import random
+import textwrap
+from dataclasses import dataclass
+from pathlib import Path
+from typing import Any, Generator
+from urllib.parse import urlsplit
+
+import pytest
+from tornado import web
+from tornado.httputil import HTTPServerRequest
+
+from dummyserver.handlers import Response, TestingApp
+from dummyserver.testcase import HTTPDummyProxyTestCase
+from dummyserver.tornadoserver import run_tornado_app, run_tornado_loop_in_thread
+
+_coverage_count = 0
+
+
+def _get_coverage_filename(prefix: str) -> str:
+ global _coverage_count
+ _coverage_count += 1
+ rand_part = "".join([random.choice("1234567890") for x in range(20)])
+ return prefix + rand_part + f".{_coverage_count}"
+
+
[email protected](scope="module")
+def testserver_http(
+ request: pytest.FixtureRequest,
+) -> Generator[PyodideServerInfo, None, None]:
+ dist_dir = Path(os.getcwd(), request.config.getoption("--dist-dir"))
+ server = PyodideDummyServerTestCase
+ server.setup_class(str(dist_dir))
+ print(
+ f"Server:{server.http_host}:{server.http_port},https({server.https_port}) [{dist_dir}]"
+ )
+ yield PyodideServerInfo(
+ http_host=server.http_host,
+ http_port=server.http_port,
+ https_port=server.https_port,
+ )
+ print("Server teardown")
+ server.teardown_class()
+
+
[email protected]()
+def selenium_coverage(selenium: Any) -> Generator[Any, None, None]:
+ def _install_coverage(self: Any) -> None:
+ self.run_js(
+ """
+ await pyodide.loadPackage("coverage")
+ await pyodide.runPythonAsync(`import coverage
+_coverage= coverage.Coverage(source_pkgs=['urllib3'])
+_coverage.start()
+ `
+ )"""
+ )
+
+ setattr(
+ selenium,
+ "_install_coverage",
+ _install_coverage.__get__(selenium, selenium.__class__),
+ )
+ selenium._install_coverage()
+ yield selenium
+ # on teardown, save _coverage output
+ coverage_out_binary = bytes(
+ selenium.run_js(
+ """
+return await pyodide.runPythonAsync(`
+_coverage.stop()
+_coverage.save()
+_coverage_datafile = open(".coverage","rb")
+_coverage_outdata = _coverage_datafile.read()
+# avoid polluting main namespace too much
+import js as _coverage_js
+# convert to js Array (as default conversion is TypedArray which does
+# bad things in firefox)
+_coverage_js.Array.from_(_coverage_outdata)
+`)
+ """
+ )
+ )
+ with open(f"{_get_coverage_filename('.coverage.emscripten.')}", "wb") as outfile:
+ outfile.write(coverage_out_binary)
+
+
+class ServerRunnerInfo:
+ def __init__(self, host: str, port: int, selenium: Any) -> None:
+ self.host = host
+ self.port = port
+ self.selenium = selenium
+
+ def run_webworker(self, code: str) -> Any:
+ if isinstance(code, str) and code.startswith("\n"):
+ # we have a multiline string, fix indentation
+ code = textwrap.dedent(code)
+ # add coverage collection to this code
+ code = (
+ textwrap.dedent(
+ """
+ import coverage
+ _coverage= coverage.Coverage(source_pkgs=['urllib3'])
+ _coverage.start()
+ """
+ )
+ + code
+ )
+ code += textwrap.dedent(
+ """
+ _coverage.stop()
+ _coverage.save()
+ _coverage_datafile = open(".coverage","rb")
+ _coverage_outdata = _coverage_datafile.read()
+ # avoid polluting main namespace too much
+ import js as _coverage_js
+ # convert to js Array (as default conversion is TypedArray which does
+ # bad things in firefox)
+ _coverage_js.Array.from_(_coverage_outdata)
+ """
+ )
+
+ coverage_out_binary = bytes(
+ self.selenium.run_js(
+ f"""
+ let worker = new Worker('https://{self.host}:{self.port}/pyodide/webworker_dev.js');
+ let p = new Promise((res, rej) => {{
+ worker.onmessageerror = e => rej(e);
+ worker.onerror = e => rej(e);
+ worker.onmessage = e => {{
+ if (e.data.results) {{
+ res(e.data.results);
+ }} else {{
+ rej(e.data.error);
+ }}
+ }};
+ worker.postMessage({{ python: {repr(code)} }});
+ }});
+ return await p;
+ """,
+ pyodide_checks=False,
+ )
+ )
+ with open(
+ f"{_get_coverage_filename('.coverage.emscripten.worker.')}", "wb"
+ ) as outfile:
+ outfile.write(coverage_out_binary)
+
+
+# run pyodide on our test server instead of on the default
+# pytest-pyodide one - this makes it so that
+# we are at the same origin as web requests to server_host
[email protected]()
+def run_from_server(
+ selenium_coverage: Any, testserver_http: PyodideServerInfo
+) -> Generator[ServerRunnerInfo, None, None]:
+ addr = f"https://{testserver_http.http_host}:{testserver_http.https_port}/pyodide/test.html"
+ selenium_coverage.goto(addr)
+ selenium_coverage.javascript_setup()
+ selenium_coverage.load_pyodide()
+ selenium_coverage.initialize_pyodide()
+ selenium_coverage.save_state()
+ selenium_coverage.restore_state()
+ # install the wheel, which is served at /wheel/*
+ selenium_coverage.run_js(
+ """
+await pyodide.loadPackage('/wheel/dist.whl')
+"""
+ )
+ selenium_coverage._install_coverage()
+ yield ServerRunnerInfo(
+ testserver_http.http_host, testserver_http.https_port, selenium_coverage
+ )
+
+
+class PyodideTestingApp(TestingApp):
+ pyodide_dist_dir: str = ""
+
+ def set_default_headers(self) -> None:
+ """Allow cross-origin requests for emscripten"""
+ self.set_header("Access-Control-Allow-Origin", "*")
+ self.set_header("Cross-Origin-Opener-Policy", "same-origin")
+ self.set_header("Cross-Origin-Embedder-Policy", "require-corp")
+ self.add_header("Feature-Policy", "sync-xhr *;")
+ self.add_header("Access-Control-Allow-Headers", "*")
+
+ def slow(self, _req: HTTPServerRequest) -> Response:
+ import time
+
+ time.sleep(10)
+ return Response("TEN SECONDS LATER")
+
+ def bigfile(self, req: HTTPServerRequest) -> Response:
+ # great big text file, should force streaming
+ # if supported
+ bigdata = 1048576 * b"WOOO YAY BOOYAKAH"
+ return Response(bigdata)
+
+ def mediumfile(self, req: HTTPServerRequest) -> Response:
+ # quite big file
+ bigdata = 1024 * b"WOOO YAY BOOYAKAH"
+ return Response(bigdata)
+
+ def pyodide(self, req: HTTPServerRequest) -> Response:
+ path = req.path[:]
+ if not path.startswith("/"):
+ path = urlsplit(path).path
+ path_split = path.split("/")
+ file_path = Path(PyodideTestingApp.pyodide_dist_dir, *path_split[2:])
+ if file_path.exists():
+ mime_type, encoding = mimetypes.guess_type(file_path)
+ if not mime_type:
+ mime_type = "text/plain"
+ self.set_header("Content-Type", mime_type)
+ return Response(
+ body=file_path.read_bytes(),
+ headers=[("Access-Control-Allow-Origin", "*")],
+ )
+ else:
+ return Response(status="404 NOT FOUND")
+
+ def wheel(self, _req: HTTPServerRequest) -> Response:
+ # serve our wheel
+ wheel_folder = Path(__file__).parent.parent.parent.parent / "dist"
+ wheels = list(wheel_folder.glob("*.whl"))
+ if len(wheels) > 0:
+ resp = Response(
+ body=wheels[0].read_bytes(),
+ headers=[
+ ("Content-Disposition", f"inline; filename='{wheels[0].name}'")
+ ],
+ )
+ return resp
+ else:
+ return Response(status="404 NOT FOUND")
+
+
+class PyodideDummyServerTestCase(HTTPDummyProxyTestCase):
+ @classmethod
+ def setup_class(cls, pyodide_dist_dir: str) -> None: # type:ignore[override]
+ PyodideTestingApp.pyodide_dist_dir = pyodide_dist_dir
+ with contextlib.ExitStack() as stack:
+ io_loop = stack.enter_context(run_tornado_loop_in_thread())
+
+ async def run_app() -> None:
+ app = web.Application([(r".*", PyodideTestingApp)])
+ cls.http_server, cls.http_port = run_tornado_app(
+ app, None, "http", cls.http_host
+ )
+
+ app = web.Application([(r".*", PyodideTestingApp)])
+ cls.https_server, cls.https_port = run_tornado_app(
+ app, cls.https_certs, "https", cls.http_host
+ )
+
+ asyncio.run_coroutine_threadsafe(run_app(), io_loop.asyncio_loop).result() # type: ignore[attr-defined]
+ cls._stack = stack.pop_all()
+
+
+@dataclass
+class PyodideServerInfo:
+ http_port: int
+ https_port: int
+ http_host: str
diff --git a/test/contrib/emscripten/test_emscripten.py b/test/contrib/emscripten/test_emscripten.py
new file mode 100644
--- /dev/null
+++ b/test/contrib/emscripten/test_emscripten.py
@@ -0,0 +1,948 @@
+from __future__ import annotations
+
+import sys
+import typing
+
+import pytest
+
+from urllib3.fields import _TYPE_FIELD_VALUE_TUPLE
+
+from ...port_helpers import find_unused_port
+
+if sys.version_info < (3, 11):
+ # pyodide only works on 3.11+
+ pytest.skip(allow_module_level=True)
+
+# only run these tests if pytest_pyodide is installed
+# so we don't break non-emscripten pytest running
+pytest_pyodide = pytest.importorskip("pytest_pyodide")
+
+from pytest_pyodide import run_in_pyodide # type: ignore[import] # noqa: E402
+from pytest_pyodide.decorator import ( # type: ignore[import] # noqa: E402
+ copy_files_to_pyodide,
+)
+
+from .conftest import PyodideServerInfo, ServerRunnerInfo # noqa: E402
+
+# make our ssl certificates work in chrome
+pytest_pyodide.runner.CHROME_FLAGS.append("ignore-certificate-errors")
+
+
+# copy our wheel file to pyodide and install it
+def install_urllib3_wheel() -> (
+ typing.Callable[
+ [typing.Callable[..., typing.Any]], typing.Callable[..., typing.Any]
+ ]
+):
+ return copy_files_to_pyodide( # type: ignore[no-any-return]
+ file_list=[("dist/*.whl", "/tmp")], install_wheels=True
+ )
+
+
+@install_urllib3_wheel()
+def test_index(
+ selenium_coverage: typing.Any, testserver_http: PyodideServerInfo
+) -> None:
+ @run_in_pyodide # type: ignore[misc]
+ def pyodide_test(selenium_coverage, host: str, port: int) -> None: # type: ignore[no-untyped-def]
+ from urllib3.connection import HTTPConnection
+ from urllib3.response import BaseHTTPResponse
+
+ conn = HTTPConnection(host, port)
+ url = f"http://{host}:{port}/"
+ conn.request("GET", url)
+ response = conn.getresponse()
+ # check methods of response
+ assert isinstance(response, BaseHTTPResponse)
+ assert response.url == url
+ response.url = "http://woo"
+ assert response.url == "http://woo"
+ assert response.connection == conn
+ assert response.retries is None
+ data1 = response.data
+ decoded1 = data1.decode("utf-8")
+ data2 = response.data # check that getting data twice works
+ decoded2 = data2.decode("utf-8")
+ assert decoded1 == decoded2 == "Dummy server!"
+
+ pyodide_test(
+ selenium_coverage, testserver_http.http_host, testserver_http.http_port
+ )
+
+
+@install_urllib3_wheel()
+def test_pool_requests(
+ selenium_coverage: typing.Any, testserver_http: PyodideServerInfo
+) -> None:
+ @run_in_pyodide # type: ignore[misc]
+ def pyodide_test(selenium_coverage, host: str, port: int, https_port: int) -> None: # type: ignore[no-untyped-def]
+ # first with PoolManager
+ import urllib3
+
+ http = urllib3.PoolManager()
+ resp = http.request("GET", f"http://{host}:{port}/")
+ assert resp.data.decode("utf-8") == "Dummy server!"
+
+ resp2 = http.request("GET", f"http://{host}:{port}/index")
+ assert resp2.data.decode("utf-8") == "Dummy server!"
+
+ # should all have come from one pool
+ assert len(http.pools) == 1
+
+ resp3 = http.request("GET", f"https://{host}:{https_port}/")
+ assert resp2.data.decode("utf-8") == "Dummy server!"
+
+ # one http pool + one https pool
+ assert len(http.pools) == 2
+
+ # now with ConnectionPool
+ # because block == True, this will fail if the connection isn't
+ # returned to the pool correctly after the first request
+ pool = urllib3.HTTPConnectionPool(host, port, maxsize=1, block=True)
+ resp3 = pool.urlopen("GET", "/index")
+ assert resp3.data.decode("utf-8") == "Dummy server!"
+
+ resp4 = pool.urlopen("GET", "/")
+ assert resp4.data.decode("utf-8") == "Dummy server!"
+
+ # now with manual release of connection
+ # first - connection should be released once all
+ # data is read
+ pool2 = urllib3.HTTPConnectionPool(host, port, maxsize=1, block=True)
+
+ resp5 = pool2.urlopen("GET", "/index", preload_content=False)
+ assert pool2.pool is not None
+ # at this point, the connection should not be in the pool
+ assert pool2.pool.qsize() == 0
+ assert resp5.data.decode("utf-8") == "Dummy server!"
+ # now we've read all the data, connection should be back to the pool
+ assert pool2.pool.qsize() == 1
+ resp6 = pool2.urlopen("GET", "/index", preload_content=False)
+ assert pool2.pool.qsize() == 0
+ # force it back to the pool
+ resp6.release_conn()
+ assert pool2.pool.qsize() == 1
+ read_str = resp6.read()
+ # for consistency with urllib3, this still returns the correct data even though
+ # we are in theory not using the connection any more
+ assert read_str.decode("utf-8") == "Dummy server!"
+
+ pyodide_test(
+ selenium_coverage,
+ testserver_http.http_host,
+ testserver_http.http_port,
+ testserver_http.https_port,
+ )
+
+
+# wrong protocol / protocol error etc. should raise an exception of http.client.HTTPException
+@install_urllib3_wheel()
+def test_wrong_protocol(
+ selenium_coverage: typing.Any, testserver_http: PyodideServerInfo
+) -> None:
+ @run_in_pyodide # type: ignore[misc]
+ def pyodide_test(selenium_coverage, host: str, port: int) -> None: # type: ignore[no-untyped-def]
+ import http.client
+
+ import pytest
+
+ from urllib3.connection import HTTPConnection
+
+ conn = HTTPConnection(host, port)
+ with pytest.raises(http.client.HTTPException):
+ conn.request("GET", f"http://{host}:{port}/")
+
+ pyodide_test(
+ selenium_coverage, testserver_http.http_host, testserver_http.https_port
+ )
+
+
+# wrong protocol / protocol error etc. should raise an exception of http.client.HTTPException
+@install_urllib3_wheel()
+def test_bad_method(
+ selenium_coverage: typing.Any, testserver_http: PyodideServerInfo
+) -> None:
+ @run_in_pyodide(packages=("pytest",)) # type: ignore[misc]
+ def pyodide_test(selenium_coverage, host: str, port: int) -> None: # type: ignore[no-untyped-def]
+ import http.client
+
+ import pytest
+
+ from urllib3.connection import HTTPConnection
+
+ conn = HTTPConnection(host, port)
+ with pytest.raises(http.client.HTTPException):
+ conn.request("TRACE", f"http://{host}:{port}/")
+
+ pyodide_test(
+ selenium_coverage, testserver_http.http_host, testserver_http.https_port
+ )
+
+
+# no connection - should raise
+@install_urllib3_wheel()
+def test_no_response(
+ selenium_coverage: typing.Any, testserver_http: PyodideServerInfo
+) -> None:
+ @run_in_pyodide(packages=("pytest",)) # type: ignore[misc]
+ def pyodide_test(selenium_coverage, host: str, port: int) -> None: # type: ignore[no-untyped-def]
+ import http.client
+
+ import pytest
+
+ from urllib3.connection import HTTPConnection
+
+ conn = HTTPConnection(host, port)
+ with pytest.raises(http.client.HTTPException):
+ conn.request("GET", f"http://{host}:{port}/")
+ _ = conn.getresponse()
+
+ pyodide_test(selenium_coverage, testserver_http.http_host, find_unused_port())
+
+
+@install_urllib3_wheel()
+def test_404(selenium_coverage: typing.Any, testserver_http: PyodideServerInfo) -> None:
+ @run_in_pyodide # type: ignore[misc]
+ def pyodide_test(selenium_coverage, host: str, port: int) -> None: # type: ignore[no-untyped-def]
+ from urllib3.connection import HTTPConnection
+ from urllib3.response import BaseHTTPResponse
+
+ conn = HTTPConnection(host, port)
+ conn.request("GET", f"http://{host}:{port}/status?status=404 NOT FOUND")
+ response = conn.getresponse()
+ assert isinstance(response, BaseHTTPResponse)
+ assert response.status == 404
+
+ pyodide_test(
+ selenium_coverage, testserver_http.http_host, testserver_http.http_port
+ )
+
+
+# setting timeout should show a warning to js console
+# if we're on the ui thread, because XMLHttpRequest doesn't
+# support timeout in async mode if globalThis == Window
+@install_urllib3_wheel()
+def test_timeout_warning(
+ selenium_coverage: typing.Any, testserver_http: PyodideServerInfo
+) -> None:
+ @run_in_pyodide() # type: ignore[misc]
+ def pyodide_test(selenium_coverage, host: str, port: int) -> None: # type: ignore[no-untyped-def]
+ import js # type: ignore[import]
+
+ import urllib3.contrib.emscripten.fetch
+ from urllib3.connection import HTTPConnection
+
+ old_log = js.console.warn
+ log_msgs = []
+
+ def capture_log(*args): # type: ignore[no-untyped-def]
+ log_msgs.append(str(args))
+ old_log(*args)
+
+ js.console.warn = capture_log
+
+ conn = HTTPConnection(host, port, timeout=1.0)
+ conn.request("GET", f"http://{host}:{port}/")
+ conn.getresponse()
+ js.console.warn = old_log
+ # should have shown timeout warning exactly once by now
+ assert len([x for x in log_msgs if x.find("Warning: Timeout") != -1]) == 1
+ assert urllib3.contrib.emscripten.fetch._SHOWN_TIMEOUT_WARNING
+
+ pyodide_test(
+ selenium_coverage, testserver_http.http_host, testserver_http.http_port
+ )
+
+
+@install_urllib3_wheel()
+def test_timeout_in_worker_non_streaming(
+ selenium_coverage: typing.Any,
+ testserver_http: PyodideServerInfo,
+ run_from_server: ServerRunnerInfo,
+) -> None:
+ worker_code = f"""
+ import pyodide_js as pjs
+ await pjs.loadPackage('http://{testserver_http.http_host}:{testserver_http.http_port}/wheel/dist.whl',deps=False)
+ from urllib3.exceptions import TimeoutError
+ from urllib3.connection import HTTPConnection
+ conn = HTTPConnection("{testserver_http.http_host}", {testserver_http.http_port},timeout=1.0)
+ result=-1
+ try:
+ conn.request("GET","/slow")
+ _response = conn.getresponse()
+ result=-3
+ except TimeoutError as e:
+ result=1 # we've got the correct exception
+ except BaseException as e:
+ result=-2
+ assert result == 1
+"""
+ run_from_server.run_webworker(worker_code)
+
+
+@install_urllib3_wheel()
+def test_timeout_in_worker_streaming(
+ selenium_coverage: typing.Any,
+ testserver_http: PyodideServerInfo,
+ run_from_server: ServerRunnerInfo,
+) -> None:
+ worker_code = f"""
+ import pyodide_js as pjs
+ await pjs.loadPackage('http://{testserver_http.http_host}:{testserver_http.http_port}/wheel/dist.whl',deps=False)
+ import urllib3.contrib.emscripten.fetch
+ await urllib3.contrib.emscripten.fetch.wait_for_streaming_ready()
+ from urllib3.exceptions import TimeoutError
+ from urllib3.connection import HTTPConnection
+ conn = HTTPConnection("{testserver_http.http_host}", {testserver_http.http_port},timeout=1.0)
+ result=-1
+ try:
+ conn.request("GET","/slow",preload_content=False)
+ _response = conn.getresponse()
+ result=-3
+ except TimeoutError as e:
+ result=1 # we've got the correct exception
+ except BaseException as e:
+ result=-2
+ assert result == 1
+"""
+ run_from_server.run_webworker(worker_code)
+
+
+@install_urllib3_wheel()
+def test_index_https(
+ selenium_coverage: typing.Any, testserver_http: PyodideServerInfo
+) -> None:
+ @run_in_pyodide # type: ignore[misc]
+ def pyodide_test(selenium_coverage, host: str, port: int) -> None: # type: ignore[no-untyped-def]
+ from urllib3.connection import HTTPSConnection
+ from urllib3.response import BaseHTTPResponse
+
+ conn = HTTPSConnection(host, port)
+ conn.request("GET", f"https://{host}:{port}/")
+ response = conn.getresponse()
+ assert isinstance(response, BaseHTTPResponse)
+ data = response.data
+ assert data.decode("utf-8") == "Dummy server!"
+
+ pyodide_test(
+ selenium_coverage, testserver_http.http_host, testserver_http.https_port
+ )
+
+
+@install_urllib3_wheel()
+def test_non_streaming_no_fallback_warning(
+ selenium_coverage: typing.Any, testserver_http: PyodideServerInfo
+) -> None:
+ @run_in_pyodide # type: ignore[misc]
+ def pyodide_test(selenium_coverage, host: str, port: int) -> None: # type: ignore[no-untyped-def]
+ import js
+
+ import urllib3.contrib.emscripten.fetch
+ from urllib3.connection import HTTPSConnection
+ from urllib3.response import BaseHTTPResponse
+
+ log_msgs = []
+ old_log = js.console.warn
+
+ def capture_log(*args): # type: ignore[no-untyped-def]
+ log_msgs.append(str(args))
+ old_log(*args)
+
+ js.console.warn = capture_log
+ conn = HTTPSConnection(host, port)
+ conn.request("GET", f"https://{host}:{port}/", preload_content=True)
+ response = conn.getresponse()
+ js.console.warn = old_log
+ assert isinstance(response, BaseHTTPResponse)
+ data = response.data
+ assert data.decode("utf-8") == "Dummy server!"
+ # no console warnings because we didn't ask it to stream the response
+ # check no log messages
+ assert (
+ len([x for x in log_msgs if x.find("Can't stream HTTP requests") != -1])
+ == 0
+ )
+ assert not urllib3.contrib.emscripten.fetch._SHOWN_STREAMING_WARNING
+
+ pyodide_test(
+ selenium_coverage, testserver_http.http_host, testserver_http.https_port
+ )
+
+
+@install_urllib3_wheel()
+def test_streaming_fallback_warning(
+ selenium_coverage: typing.Any, testserver_http: PyodideServerInfo
+) -> None:
+ @run_in_pyodide # type: ignore[misc]
+ def pyodide_test(selenium_coverage, host: str, port: int) -> None: # type: ignore[no-untyped-def]
+ import js
+
+ import urllib3.contrib.emscripten.fetch
+ from urllib3.connection import HTTPSConnection
+ from urllib3.response import BaseHTTPResponse
+
+ # monkeypatch is_cross_origin_isolated so that it warns about that
+ # even if we're serving it so it is fine
+ urllib3.contrib.emscripten.fetch.is_cross_origin_isolated = lambda: False
+
+ log_msgs = []
+ old_log = js.console.warn
+
+ def capture_log(*args): # type: ignore[no-untyped-def]
+ log_msgs.append(str(args))
+ old_log(*args)
+
+ js.console.warn = capture_log
+
+ conn = HTTPSConnection(host, port)
+ conn.request("GET", f"https://{host}:{port}/", preload_content=False)
+ response = conn.getresponse()
+ js.console.warn = old_log
+ assert isinstance(response, BaseHTTPResponse)
+ data = response.data
+ assert data.decode("utf-8") == "Dummy server!"
+ # check that it has warned about falling back to non-streaming fetch exactly once
+ assert (
+ len([x for x in log_msgs if x.find("Can't stream HTTP requests") != -1])
+ == 1
+ )
+ assert urllib3.contrib.emscripten.fetch._SHOWN_STREAMING_WARNING
+
+ pyodide_test(
+ selenium_coverage, testserver_http.http_host, testserver_http.https_port
+ )
+
+
+@install_urllib3_wheel()
+def test_specific_method(
+ selenium_coverage: typing.Any,
+ testserver_http: PyodideServerInfo,
+ run_from_server: ServerRunnerInfo,
+) -> None:
+ @run_in_pyodide # type: ignore[misc]
+ def pyodide_test(selenium_coverage, host: str, port: int) -> None: # type: ignore[no-untyped-def]
+ from urllib3 import HTTPSConnectionPool
+
+ with HTTPSConnectionPool(host, port) as pool:
+ path = "/specific_method?method=POST"
+ response = pool.request("POST", path)
+ assert response.status == 200
+
+ response = pool.request("PUT", path)
+ assert response.status == 400
+
+ pyodide_test(
+ selenium_coverage, testserver_http.http_host, testserver_http.https_port
+ )
+
+
+@install_urllib3_wheel()
+def test_streaming_download(
+ selenium_coverage: typing.Any,
+ testserver_http: PyodideServerInfo,
+ run_from_server: ServerRunnerInfo,
+) -> None:
+ # test streaming download, which must be in a webworker
+ # as you can't do it on main thread
+
+ # this should return the 17mb big file, and
+ # should not log any warning about falling back
+ bigfile_url = (
+ f"http://{testserver_http.http_host}:{testserver_http.http_port}/bigfile"
+ )
+ worker_code = f"""
+ import pyodide_js as pjs
+ await pjs.loadPackage('http://{testserver_http.http_host}:{testserver_http.http_port}/wheel/dist.whl',deps=False)
+
+ import urllib3.contrib.emscripten.fetch
+ await urllib3.contrib.emscripten.fetch.wait_for_streaming_ready()
+ from urllib3.response import BaseHTTPResponse
+ from urllib3.connection import HTTPConnection
+
+ conn = HTTPConnection("{testserver_http.http_host}", {testserver_http.http_port})
+ conn.request("GET", "{bigfile_url}",preload_content=False)
+ response = conn.getresponse()
+ assert isinstance(response, BaseHTTPResponse)
+ assert urllib3.contrib.emscripten.fetch._SHOWN_STREAMING_WARNING==False
+ data=response.data.decode('utf-8')
+ assert len(data) == 17825792
+"""
+ run_from_server.run_webworker(worker_code)
+
+
+@install_urllib3_wheel()
+def test_streaming_close(
+ selenium_coverage: typing.Any,
+ testserver_http: PyodideServerInfo,
+ run_from_server: ServerRunnerInfo,
+) -> None:
+ # test streaming download, which must be in a webworker
+ # as you can't do it on main thread
+
+ # this should return the 17mb big file, and
+ # should not log any warning about falling back
+ url = f"http://{testserver_http.http_host}:{testserver_http.http_port}/"
+ worker_code = f"""
+ import pyodide_js as pjs
+ await pjs.loadPackage('http://{testserver_http.http_host}:{testserver_http.http_port}/wheel/dist.whl',deps=False)
+
+ import urllib3.contrib.emscripten.fetch
+ await urllib3.contrib.emscripten.fetch.wait_for_streaming_ready()
+ from urllib3.response import BaseHTTPResponse
+ from urllib3.connection import HTTPConnection
+ from io import RawIOBase
+
+ conn = HTTPConnection("{testserver_http.http_host}", {testserver_http.http_port})
+ conn.request("GET", "{url}",preload_content=False)
+ response = conn.getresponse()
+ # check body is a RawIOBase stream and isn't seekable, writeable
+ body_internal = response._response.body
+ assert(isinstance(body_internal,RawIOBase))
+ assert(body_internal.writable() is False)
+ assert(body_internal.seekable() is False)
+ assert(body_internal.readable() is True)
+ response.drain_conn()
+ x=response.read()
+ assert(not x)
+ response.close()
+ conn.close()
+ # try and make destructor be covered
+ # by killing everything
+ del response
+ del body_internal
+ del conn
+"""
+ run_from_server.run_webworker(worker_code)
+
+
+@install_urllib3_wheel()
+def test_streaming_bad_url(
+ selenium_coverage: typing.Any,
+ testserver_http: PyodideServerInfo,
+ run_from_server: ServerRunnerInfo,
+) -> None:
+ # this should cause an error
+ # because the protocol is bad
+ bad_url = f"hsffsdfttp://{testserver_http.http_host}:{testserver_http.http_port}/"
+ # this must be in a webworker
+ # as you can't do it on main thread
+ worker_code = f"""
+ import pytest
+ import pyodide_js as pjs
+ await pjs.loadPackage('http://{testserver_http.http_host}:{testserver_http.http_port}/wheel/dist.whl',deps=False)
+ import http.client
+ import urllib3.contrib.emscripten.fetch
+ await urllib3.contrib.emscripten.fetch.wait_for_streaming_ready()
+ from urllib3.response import BaseHTTPResponse
+ from urllib3.connection import HTTPConnection
+
+ conn = HTTPConnection("{testserver_http.http_host}", {testserver_http.http_port})
+ with pytest.raises(http.client.HTTPException):
+ conn.request("GET", "{bad_url}",preload_content=False)
+"""
+ run_from_server.run_webworker(worker_code)
+
+
+@install_urllib3_wheel()
+def test_streaming_bad_method(
+ selenium_coverage: typing.Any,
+ testserver_http: PyodideServerInfo,
+ run_from_server: ServerRunnerInfo,
+) -> None:
+ # this should cause an error
+ # because the protocol is bad
+ bad_url = f"http://{testserver_http.http_host}:{testserver_http.http_port}/"
+ # this must be in a webworker
+ # as you can't do it on main thread
+ worker_code = f"""
+ import pytest
+ import http.client
+ import pyodide_js as pjs
+ await pjs.loadPackage('http://{testserver_http.http_host}:{testserver_http.http_port}/wheel/dist.whl',deps=False)
+
+ import urllib3.contrib.emscripten.fetch
+ await urllib3.contrib.emscripten.fetch.wait_for_streaming_ready()
+ from urllib3.response import BaseHTTPResponse
+ from urllib3.connection import HTTPConnection
+
+ conn = HTTPConnection("{testserver_http.http_host}", {testserver_http.http_port})
+ with pytest.raises(http.client.HTTPException):
+ # TRACE method should throw SecurityError in Javascript
+ conn.request("TRACE", "{bad_url}",preload_content=False)
+"""
+ run_from_server.run_webworker(worker_code)
+
+
+@install_urllib3_wheel()
+def test_streaming_notready_warning(
+ selenium_coverage: typing.Any,
+ testserver_http: PyodideServerInfo,
+ run_from_server: ServerRunnerInfo,
+) -> None:
+ # test streaming download but don't wait for
+ # worker to be ready - should fallback to non-streaming
+ # and log a warning
+ file_url = f"http://{testserver_http.http_host}:{testserver_http.http_port}/"
+ worker_code = f"""
+ import pyodide_js as pjs
+ await pjs.loadPackage('http://{testserver_http.http_host}:{testserver_http.http_port}/wheel/dist.whl',deps=False)
+ import js
+ import urllib3
+ from urllib3.response import BaseHTTPResponse
+ from urllib3.connection import HTTPConnection
+
+ log_msgs=[]
+ old_log=js.console.warn
+ def capture_log(*args):
+ log_msgs.append(str(args))
+ old_log(*args)
+ js.console.warn=capture_log
+
+ conn = HTTPConnection("{testserver_http.http_host}", {testserver_http.http_port})
+ conn.request("GET", "{file_url}",preload_content=False)
+ js.console.warn=old_log
+ response = conn.getresponse()
+ assert isinstance(response, BaseHTTPResponse)
+ data=response.data.decode('utf-8')
+ assert len([x for x in log_msgs if x.find("Can't stream HTTP requests")!=-1])==1
+ assert urllib3.contrib.emscripten.fetch._SHOWN_STREAMING_WARNING==True
+ """
+ run_from_server.run_webworker(worker_code)
+
+
+@install_urllib3_wheel()
+def test_post_receive_json(
+ selenium_coverage: typing.Any, testserver_http: PyodideServerInfo
+) -> None:
+ @run_in_pyodide # type: ignore[misc]
+ def pyodide_test(selenium_coverage, host: str, port: int) -> None: # type: ignore[no-untyped-def]
+ import json
+
+ from urllib3.connection import HTTPConnection
+ from urllib3.response import BaseHTTPResponse
+
+ json_data = {
+ "Bears": "like",
+ "to": {"eat": "buns", "with": ["marmalade", "and custard"]},
+ }
+ conn = HTTPConnection(host, port)
+ conn.request(
+ "POST",
+ f"http://{host}:{port}/echo_json",
+ body=json.dumps(json_data).encode("utf-8"),
+ )
+ response = conn.getresponse()
+ assert isinstance(response, BaseHTTPResponse)
+ data = response.json()
+ assert data == json_data
+
+ pyodide_test(
+ selenium_coverage, testserver_http.http_host, testserver_http.http_port
+ )
+
+
+@install_urllib3_wheel()
+def test_upload(
+ selenium_coverage: typing.Any, testserver_http: PyodideServerInfo
+) -> None:
+ @run_in_pyodide # type: ignore[misc]
+ def pyodide_test(selenium_coverage, host: str, port: int) -> None: # type: ignore[no-untyped-def]
+ from urllib3 import HTTPConnectionPool
+
+ data = "I'm in ur multipart form-data, hazing a cheezburgr"
+ fields: dict[str, _TYPE_FIELD_VALUE_TUPLE] = {
+ "upload_param": "filefield",
+ "upload_filename": "lolcat.txt",
+ "filefield": ("lolcat.txt", data),
+ }
+ fields["upload_size"] = str(len(data))
+ with HTTPConnectionPool(host, port) as pool:
+ r = pool.request("POST", "/upload", fields=fields)
+ assert r.status == 200
+
+ pyodide_test(
+ selenium_coverage, testserver_http.http_host, testserver_http.http_port
+ )
+
+
+@install_urllib3_wheel()
+def test_streaming_not_ready_in_browser(
+ selenium_coverage: typing.Any, testserver_http: PyodideServerInfo
+) -> None:
+ # streaming ready should always be false
+ # if we're in the main browser thread
+ selenium_coverage.run_async(
+ """
+ import urllib3.contrib.emscripten.fetch
+ result=await urllib3.contrib.emscripten.fetch.wait_for_streaming_ready()
+ assert(result is False)
+ assert(urllib3.contrib.emscripten.fetch.streaming_ready() is None )
+ """
+ )
+
+
+@install_urllib3_wheel()
+def test_requests_with_micropip(
+ selenium_coverage: typing.Any, testserver_http: PyodideServerInfo
+) -> None:
+ # this can't be @run_in_pyodide because of the async code
+ selenium_coverage.run_async(
+ f"""
+ import micropip
+ await micropip.install("requests")
+ import requests
+ import json
+ r = requests.get("http://{testserver_http.http_host}:{testserver_http.http_port}/")
+ assert(r.status_code == 200)
+ assert(r.text == "Dummy server!")
+ json_data={{"woo":"yay"}}
+ # try posting some json with requests
+ r = requests.post("http://{testserver_http.http_host}:{testserver_http.http_port}/echo_json",json=json_data)
+ import js
+ assert(r.json() == json_data)
+ """
+ )
+
+
+@install_urllib3_wheel()
+def test_open_close(
+ selenium_coverage: typing.Any, testserver_http: PyodideServerInfo
+) -> None:
+ @run_in_pyodide # type: ignore[misc]
+ def pyodide_test(selenium_coverage, host: str, port: int) -> None: # type: ignore[no-untyped-def]
+ from http.client import ResponseNotReady
+
+ import pytest
+
+ from urllib3.connection import HTTPConnection
+
+ conn = HTTPConnection(host, port)
+ # initially connection should be closed
+ assert conn.is_closed is True
+ # connection should have no response
+ with pytest.raises(ResponseNotReady):
+ response = conn.getresponse()
+ # now make the response
+ conn.request("GET", f"http://{host}:{port}/")
+ # we never connect to proxy (or if we do, browser handles it)
+ assert conn.has_connected_to_proxy is False
+ # now connection should be open
+ assert conn.is_closed is False
+ # and should have a response
+ response = conn.getresponse()
+ assert response is not None
+ conn.close()
+ # now it is closed
+ assert conn.is_closed is True
+ # closed connection shouldn't have any response
+ with pytest.raises(ResponseNotReady):
+ conn.getresponse()
+
+ pyodide_test(
+ selenium_coverage, testserver_http.http_host, testserver_http.http_port
+ )
+
+
+# check that various ways that the worker may be broken
+# throw exceptions nicely, by deliberately breaking things
+# this is for coverage
+@install_urllib3_wheel()
+def test_break_worker_streaming(
+ selenium_coverage: typing.Any,
+ testserver_http: PyodideServerInfo,
+ run_from_server: ServerRunnerInfo,
+) -> None:
+ worker_code = f"""
+ import pyodide_js as pjs
+ await pjs.loadPackage('http://{testserver_http.http_host}:{testserver_http.http_port}/wheel/dist.whl',deps=False)
+ import pytest
+ import urllib3.contrib.emscripten.fetch
+ import js
+ import http.client
+
+ await urllib3.contrib.emscripten.fetch.wait_for_streaming_ready()
+ from urllib3.exceptions import TimeoutError
+ from urllib3.connection import HTTPConnection
+ conn = HTTPConnection("{testserver_http.http_host}", {testserver_http.http_port},timeout=1.0)
+ # make the fetch worker return a bad response by:
+ # 1) Clearing the int buffer
+ # in the receive stream
+ with pytest.raises(http.client.HTTPException):
+ conn.request("GET","/",preload_content=False)
+ response = conn.getresponse()
+ body_internal = response._response.body
+ assert(body_internal.int_buffer!=None)
+ body_internal.int_buffer=None
+ data=response.read()
+ # 2) Monkeypatch postMessage so that it just sets an
+ # exception status
+ old_pm= body_internal.worker.postMessage
+ with pytest.raises(http.client.HTTPException):
+ conn.request("GET","/",preload_content=False)
+ response = conn.getresponse()
+ # make posted messages set an exception
+ body_internal = response._response.body
+ def set_exception(*args):
+ body_internal.worker.postMessage = old_pm
+ body_internal.int_buffer[1]=4
+ body_internal.byte_buffer[0]=ord("W")
+ body_internal.byte_buffer[1]=ord("O")
+ body_internal.byte_buffer[2]=ord("O")
+ body_internal.byte_buffer[3]=ord("!")
+ body_internal.byte_buffer[4]=0
+ js.Atomics.store(body_internal.int_buffer, 0, -4)
+ js.Atomics.notify(body_internal.int_buffer,0)
+ body_internal.worker.postMessage = set_exception
+ data=response.read()
+ # monkeypatch so it returns an unknown value for the magic number on initial fetch call
+ with pytest.raises(http.client.HTTPException):
+ # make posted messages set an exception
+ worker=urllib3.contrib.emscripten.fetch._fetcher.js_worker
+ def set_exception(self,*args):
+ array=js.Int32Array.new(args[0].buffer)
+ array[0]=-1234
+ worker.postMessage=set_exception.__get__(worker,worker.__class__)
+ conn.request("GET","/",preload_content=False)
+ response = conn.getresponse()
+ data=response.read()
+ urllib3.contrib.emscripten.fetch._fetcher.js_worker.postMessage=old_pm
+ # 3) Stopping the worker receiving any messages which should cause a timeout error
+ # in the receive stream
+ with pytest.raises(TimeoutError):
+ conn.request("GET","/",preload_content=False)
+ response = conn.getresponse()
+ # make posted messages not be send
+ body_internal = response._response.body
+ def ignore_message(*args):
+ pass
+ old_pm= body_internal.worker.postMessage
+ body_internal.worker.postMessage = ignore_message
+ data=response.read()
+ body_internal.worker.postMessage = old_pm
+
+"""
+ run_from_server.run_webworker(worker_code)
+
+
+@install_urllib3_wheel()
+def test_response_init_length(
+ selenium_coverage: typing.Any, testserver_http: PyodideServerInfo
+) -> None:
+ @run_in_pyodide # type: ignore[misc]
+ def pyodide_test(selenium_coverage, host: str, port: int) -> None: # type: ignore[no-untyped-def]
+ import pytest
+
+ import urllib3.exceptions
+ from urllib3.connection import HTTPConnection
+ from urllib3.response import BaseHTTPResponse
+
+ conn = HTTPConnection(host, port)
+ conn.request("GET", f"http://{host}:{port}/")
+ response = conn.getresponse()
+ assert isinstance(response, BaseHTTPResponse)
+ # head shouldn't have length
+ length = response._init_length("HEAD")
+ assert length == 0
+ # multiple inconsistent lengths - should raise invalid header
+ with pytest.raises(urllib3.exceptions.InvalidHeader):
+ response.headers["Content-Length"] = "4,5,6"
+ length = response._init_length("GET")
+ # non-numeric length - should return None
+ response.headers["Content-Length"] = "anna"
+ length = response._init_length("GET")
+ assert length is None
+ # numeric length - should return it
+ response.headers["Content-Length"] = "54"
+ length = response._init_length("GET")
+ assert length == 54
+ # negative length - should return None
+ response.headers["Content-Length"] = "-12"
+ length = response._init_length("GET")
+ assert length is None
+ # none -> None
+ del response.headers["Content-Length"]
+ length = response._init_length("GET")
+ assert length is None
+
+ pyodide_test(
+ selenium_coverage, testserver_http.http_host, testserver_http.http_port
+ )
+
+
+@install_urllib3_wheel()
+def test_response_close_connection(
+ selenium_coverage: typing.Any, testserver_http: PyodideServerInfo
+) -> None:
+ @run_in_pyodide # type: ignore[misc]
+ def pyodide_test(selenium_coverage, host: str, port: int) -> None: # type: ignore[no-untyped-def]
+ from urllib3.connection import HTTPConnection
+ from urllib3.response import BaseHTTPResponse
+
+ conn = HTTPConnection(host, port)
+ conn.request("GET", f"http://{host}:{port}/")
+ response = conn.getresponse()
+ assert isinstance(response, BaseHTTPResponse)
+ response.close()
+ assert conn.is_closed
+
+ pyodide_test(
+ selenium_coverage, testserver_http.http_host, testserver_http.http_port
+ )
+
+
+@install_urllib3_wheel()
+def test_read_chunked(
+ selenium_coverage: typing.Any, testserver_http: PyodideServerInfo
+) -> None:
+ @run_in_pyodide # type: ignore[misc]
+ def pyodide_test(selenium_coverage, host: str, port: int) -> None: # type: ignore[no-untyped-def]
+ from urllib3.connection import HTTPConnection
+
+ conn = HTTPConnection(host, port)
+ conn.request("GET", f"http://{host}:{port}/mediumfile", preload_content=False)
+ response = conn.getresponse()
+ count = 0
+ for x in response.read_chunked(512):
+ count += 1
+ if count < 10:
+ assert len(x) == 512
+
+ pyodide_test(
+ selenium_coverage, testserver_http.http_host, testserver_http.http_port
+ )
+
+
+@install_urllib3_wheel()
+def test_retries(
+ selenium_coverage: typing.Any, testserver_http: PyodideServerInfo
+) -> None:
+ @run_in_pyodide # type: ignore[misc]
+ def pyodide_test(selenium_coverage, host: str, port: int) -> None: # type: ignore[no-untyped-def]
+ import pytest
+
+ import urllib3
+
+ pool = urllib3.HTTPConnectionPool(
+ host,
+ port,
+ maxsize=1,
+ block=True,
+ retries=urllib3.util.Retry(connect=5, read=5, redirect=5),
+ )
+
+ # monkeypatch connection class to count calls
+ old_request = urllib3.connection.HTTPConnection.request
+ count = 0
+
+ def count_calls(self, *args, **argv): # type: ignore[no-untyped-def]
+ nonlocal count
+ count += 1
+ return old_request(self, *args, **argv)
+
+ urllib3.connection.HTTPConnection.request = count_calls # type: ignore[method-assign]
+ with pytest.raises(urllib3.exceptions.MaxRetryError):
+ pool.urlopen("GET", "/")
+ # this should fail, but should have tried 6 times total
+ assert count == 6
+
+ pyodide_test(selenium_coverage, testserver_http.http_host, find_unused_port())
| Native support for Pyodide, PyScript (emscripten?)
When PyScript was announced at PyCon US 2022 I immediately tried to add support for synchronous HTTP requests w/ Pyodide/PyScript into urllib3 but was unfortunately unsuccessful at the time. I can see that support for this has grown since that time :rocket:
There's a lot to look at and learn on this topic, this issue is currently a stub and needs more filling in.
General thoughts right now from me are having a new module `urllib3.contrib.fetch` which exposes an `HTTP[S]Connection` implementation that uses Pyodide's pyfetch (and the equivalent for emscripten) under the hood. Wouldn't need to support the full range of options available to urllib3 (things like custom certificates, etc) but I suspect we'd get a lot from just method, url, headers, body, and some other basic features.
The tougher parts of this would be testing and documentation, we can learn from the below projects potentially?
## Requirements
- [ ] Figure out how we can test urllib3 consistently in a browser
- [ ] Add functionality which uses the JavaScript fetch API instead of httplib in a third-party module (ie `urllib3.contrib.fetch`)
- [ ] Document the new feature and restrictions
- [ ] Verify that Requests works with urllib3 in the browser
## Related projects / issues
- https://github.com/koenvo/pyodide-http
- https://github.com/emscripten-forge/requests-wasm-polyfill
- https://github.com/koenvo/pyodide-http/issues/28
- https://github.com/koenvo/pyodide-http/issues/6
- https://github.com/pyodide/pyodide/issues/3160
cc @koenvo
| One of the objectives of pyodide-http is to make non-async code work. A lot code nowadays is (still) synchronous - like every package that’s using requests.
I wasn’t able to find a way yet to make code in pyodide block on a async function call that doesn’t require all kind of extra security headers (see also https://github.com/koenvo/pyodide-http/issues/5 ). That was the reason I ended up with a sync XMLHttpRequest. This approach works fine when using background workers but blocks the main thread when running in the main thread.
When above challenge is solved the implementation should be pretty straightforward, and would love to help there.
@koenvo Hmm, do you see the requirement for security headers changing anytime soon? If not I think (assuming there's a way to detect when these features are available) to do the best thing for users in their current situation by default and then adding an option to force behavior one way or the other when necessary. I'm not sure what kinds of problems blocking the main JS thread can do if we're offering a sync HTTP API. Documentation will help a lot here, too.
Created this project on pyscript.com which shows the current state of affairs:
https://b15ad9a9-0744-48ba-8e37-eedddba9292c.pyscriptapps.com/7b5d346e-e490-43f0-b43f-cdeae7f24665/latest/ | 2023-11-16T19:01:10Z | [] | [] |
urllib3/urllib3 | 3,222 | urllib3__urllib3-3222 | [
"3197"
] | 661072c4f805fc448bfe52c04fc6e26ef454cdaa | diff --git a/dummyserver/asgi_proxy.py b/dummyserver/asgi_proxy.py
new file mode 100755
--- /dev/null
+++ b/dummyserver/asgi_proxy.py
@@ -0,0 +1,108 @@
+from __future__ import annotations
+
+import typing
+
+import httpx
+import trio
+from hypercorn.typing import (
+ ASGIReceiveCallable,
+ ASGISendCallable,
+ HTTPResponseBodyEvent,
+ HTTPResponseStartEvent,
+ HTTPScope,
+ Scope,
+)
+
+
+async def _read_body(receive: ASGIReceiveCallable) -> bytes:
+ body = bytearray()
+ body_consumed = False
+ while not body_consumed:
+ event = await receive()
+ if event["type"] == "http.request":
+ body.extend(event["body"])
+ body_consumed = not event["more_body"]
+ else:
+ raise ValueError(event["type"])
+ return bytes(body)
+
+
+async def absolute_uri(
+ scope: HTTPScope,
+ receive: ASGIReceiveCallable,
+ send: ASGISendCallable,
+) -> None:
+ async with httpx.AsyncClient() as client:
+ client_response = await client.request(
+ method=scope["method"],
+ url=scope["path"],
+ headers=list(scope["headers"]),
+ content=await _read_body(receive),
+ )
+
+ headers = []
+ for header in (
+ "Date",
+ "Cache-Control",
+ "Server",
+ "Content-Type",
+ "Location",
+ ):
+ v = client_response.headers.get(header)
+ if v:
+ headers.append((header.encode(), v.encode()))
+ headers.append((b"Content-Length", str(len(client_response.content)).encode()))
+
+ await send(
+ HTTPResponseStartEvent(
+ type="http.response.start",
+ status=client_response.status_code,
+ headers=headers,
+ )
+ )
+ await send(
+ HTTPResponseBodyEvent(
+ type="http.response.body",
+ body=client_response.content,
+ more_body=False,
+ )
+ )
+
+
+async def connect(scope: HTTPScope, send: ASGISendCallable) -> None:
+ async def start_forward(
+ reader: trio.SocketStream, writer: trio.SocketStream
+ ) -> None:
+ while True:
+ try:
+ data = await reader.receive_some(4096)
+ except trio.ClosedResourceError:
+ break
+ if not data:
+ break
+ await writer.send_all(data)
+ await writer.aclose()
+
+ host, port = scope["path"].split(":")
+ upstream = await trio.open_tcp_stream(host, int(port))
+
+ await send({"type": "http.response.start", "status": 200, "headers": []})
+ await send({"type": "http.response.body", "body": b"", "more_body": True})
+
+ client = typing.cast(trio.SocketStream, scope["extensions"]["_transport"])
+
+ async with trio.open_nursery(strict_exception_groups=True) as nursery:
+ nursery.start_soon(start_forward, client, upstream)
+ nursery.start_soon(start_forward, upstream, client)
+
+
+async def proxy_app(
+ scope: Scope, receive: ASGIReceiveCallable, send: ASGISendCallable
+) -> None:
+ assert scope["type"] == "http"
+ if scope["method"] in ["GET", "POST"]:
+ await absolute_uri(scope, receive, send)
+ elif scope["method"] == "CONNECT":
+ await connect(scope, send)
+ else:
+ raise ValueError(scope["method"])
diff --git a/dummyserver/hypercornserver.py b/dummyserver/hypercornserver.py
--- a/dummyserver/hypercornserver.py
+++ b/dummyserver/hypercornserver.py
@@ -9,6 +9,7 @@
import hypercorn
import hypercorn.trio
+import hypercorn.typing
import trio
from quart_trio import QuartTrio
@@ -41,7 +42,7 @@ async def _start_server(
@contextlib.contextmanager
def run_hypercorn_in_thread(
- config: hypercorn.Config, app: QuartTrio
+ config: hypercorn.Config, app: hypercorn.typing.ASGIFramework
) -> Generator[None, None, None]:
ready_event = threading.Event()
shutdown_event = threading.Event()
| diff --git a/test/conftest.py b/test/conftest.py
--- a/test/conftest.py
+++ b/test/conftest.py
@@ -1,6 +1,5 @@
from __future__ import annotations
-import asyncio
import contextlib
import socket
import ssl
@@ -10,18 +9,12 @@
import hypercorn
import pytest
import trustme
-from tornado import web
from dummyserver.app import hypercorn_app
-from dummyserver.handlers import TestingApp
+from dummyserver.asgi_proxy import proxy_app
from dummyserver.hypercornserver import run_hypercorn_in_thread
-from dummyserver.proxy import ProxyHandler
from dummyserver.testcase import HTTPSHypercornDummyServerTestCase
-from dummyserver.tornadoserver import (
- HAS_IPV6,
- run_tornado_app,
- run_tornado_loop_in_thread,
-)
+from dummyserver.tornadoserver import HAS_IPV6
from urllib3.util import ssl_
from urllib3.util.url import parse_url
@@ -111,26 +104,25 @@ def run_server_and_proxy_in_thread(
server_certs = _write_cert_to_dir(server_cert, tmpdir)
proxy_certs = _write_cert_to_dir(proxy_cert, tmpdir, "proxy")
- with run_tornado_loop_in_thread() as io_loop:
-
- async def run_app() -> tuple[ServerConfig, ServerConfig]:
- app = web.Application([(r".*", TestingApp)])
- server_app, port = run_tornado_app(app, server_certs, "https", "localhost")
- server_config = ServerConfig("https", "localhost", port, ca_cert_path)
-
- proxy = web.Application([(r".*", ProxyHandler)])
- proxy_app, proxy_port = run_tornado_app(
- proxy, proxy_certs, proxy_scheme, proxy_host
- )
- proxy_config = ServerConfig(
- proxy_scheme, proxy_host, proxy_port, ca_cert_path
- )
- return proxy_config, server_config
-
- proxy_config, server_config = asyncio.run_coroutine_threadsafe(
- run_app(), io_loop.asyncio_loop # type: ignore[attr-defined]
- ).result()
- yield (proxy_config, server_config)
+ with contextlib.ExitStack() as stack:
+ server_config = hypercorn.Config()
+ server_config.certfile = server_certs["certfile"]
+ server_config.keyfile = server_certs["keyfile"]
+ server_config.bind = ["localhost:0"]
+ stack.enter_context(run_hypercorn_in_thread(server_config, hypercorn_app))
+ port = typing.cast(int, parse_url(server_config.bind[0]).port)
+
+ proxy_config = hypercorn.Config()
+ proxy_config.certfile = proxy_certs["certfile"]
+ proxy_config.keyfile = proxy_certs["keyfile"]
+ proxy_config.bind = [f"{proxy_host}:0"]
+ stack.enter_context(run_hypercorn_in_thread(proxy_config, proxy_app))
+ proxy_port = typing.cast(int, parse_url(proxy_config.bind[0]).port)
+
+ yield (
+ ServerConfig(proxy_scheme, proxy_host, proxy_port, ca_cert_path),
+ ServerConfig("https", "localhost", port, ca_cert_path),
+ )
@pytest.fixture(params=["localhost", "127.0.0.1", "::1"])
| Migrate run_server_in_thread and run_server_and_proxy_in_thread to Hypercorn
The first blocker to add HTTP/2 support to urllib3 is to have a test server that supports HTTP/2. Since https://github.com/tornadoweb/tornado/issues/1438, we're migrating to Hypercorn. This issue is about migrating all uses of `run_tornado_loop_in_thread` in test/conftest.py.
- [x] #3198
- [x] #3222
| From our discussion it seems like proxies might be much harder than a regular server. Let's split the proxy issue off? | 2023-12-05T20:40:38Z | [] | [] |
urllib3/urllib3 | 3,266 | urllib3__urllib3-3266 | [
"3130"
] | 2ac4efacfef5182aaef3e26a4f1e066ab228dfdd | diff --git a/src/urllib3/connection.py b/src/urllib3/connection.py
--- a/src/urllib3/connection.py
+++ b/src/urllib3/connection.py
@@ -248,6 +248,9 @@ def connect(self) -> None:
# not using tunnelling.
self._has_connected_to_proxy = bool(self.proxy)
+ if self._has_connected_to_proxy:
+ self.proxy_is_verified = False
+
@property
def is_closed(self) -> bool:
return self.sock is None
@@ -611,8 +614,11 @@ def connect(self) -> None:
if self._tunnel_host is not None:
# We're tunneling to an HTTPS origin so need to do TLS-in-TLS.
if self._tunnel_scheme == "https":
+ # _connect_tls_proxy will verify and assign proxy_is_verified
self.sock = sock = self._connect_tls_proxy(self.host, sock)
tls_in_tls = True
+ elif self._tunnel_scheme == "http":
+ self.proxy_is_verified = False
# If we're tunneling it means we're connected to our proxy.
self._has_connected_to_proxy = True
@@ -656,6 +662,11 @@ def connect(self) -> None:
assert_fingerprint=self.assert_fingerprint,
)
self.sock = sock_and_verified.socket
+
+ # TODO: Set correct `self.is_verified` in case of HTTPS proxy +
+ # HTTP destination, see
+ # `test_is_verified_https_proxy_to_http_target` and
+ # https://github.com/urllib3/urllib3/issues/3267.
self.is_verified = sock_and_verified.is_verified
# If there's a proxy to be connected to we are fully connected.
@@ -663,6 +674,11 @@ def connect(self) -> None:
# not using tunnelling.
self._has_connected_to_proxy = bool(self.proxy)
+ # Set `self.proxy_is_verified` unless it's already set while
+ # establishing a tunnel.
+ if self._has_connected_to_proxy and self.proxy_is_verified is None:
+ self.proxy_is_verified = sock_and_verified.is_verified
+
def _connect_tls_proxy(self, hostname: str, sock: socket.socket) -> ssl.SSLSocket:
"""
Establish a TLS connection to the proxy using the provided SSL context.
| diff --git a/test/with_dummyserver/test_proxy_poolmanager.py b/test/with_dummyserver/test_proxy_poolmanager.py
--- a/test/with_dummyserver/test_proxy_poolmanager.py
+++ b/test/with_dummyserver/test_proxy_poolmanager.py
@@ -43,6 +43,16 @@
from .. import TARPIT_HOST, requires_network
+def assert_is_verified(pm: ProxyManager, *, proxy: bool, target: bool) -> None:
+ pool = list(pm.pools._container.values())[-1] # retrieve last pool entry
+ connection = (
+ pool.pool.queue[-1] if pool.pool is not None else None
+ ) # retrieve last connection entry
+
+ assert connection.proxy_is_verified is proxy
+ assert connection.is_verified is target
+
+
class TestHTTPProxyManager(HypercornDummyProxyTestCase):
@classmethod
def setup_class(cls) -> None:
@@ -83,6 +93,31 @@ def test_https_proxy(self) -> None:
r = https.request("GET", f"{self.http_url}/")
assert r.status == 200
+ def test_is_verified_http_proxy_to_http_target(self) -> None:
+ with proxy_from_url(self.proxy_url, ca_certs=DEFAULT_CA) as http:
+ r = http.request("GET", f"{self.http_url}/")
+ assert r.status == 200
+ assert_is_verified(http, proxy=False, target=False)
+
+ def test_is_verified_http_proxy_to_https_target(self) -> None:
+ with proxy_from_url(self.proxy_url, ca_certs=DEFAULT_CA) as http:
+ r = http.request("GET", f"{self.https_url}/")
+ assert r.status == 200
+ assert_is_verified(http, proxy=False, target=True)
+
+ @pytest.mark.xfail(reason="see https://github.com/urllib3/urllib3/issues/3267")
+ def test_is_verified_https_proxy_to_http_target(self) -> None:
+ with proxy_from_url(self.https_proxy_url, ca_certs=DEFAULT_CA) as https:
+ r = https.request("GET", f"{self.http_url}/")
+ assert r.status == 200
+ assert_is_verified(https, proxy=True, target=False)
+
+ def test_is_verified_https_proxy_to_https_target(self) -> None:
+ with proxy_from_url(self.https_proxy_url, ca_certs=DEFAULT_CA) as https:
+ r = https.request("GET", f"{self.https_url}/")
+ assert r.status == 200
+ assert_is_verified(https, proxy=True, target=True)
+
def test_http_and_https_kwarg_ca_cert_data_proxy(self) -> None:
with open(DEFAULT_CA) as pem_file:
pem_file_data = pem_file.read()
| Incorrect behaviour of `proxy_is_verified`
### Subject
`proxy_is_verified` is never set for HTTPs to HTTP proxies. `proxy_is_verified` is set only at https://github.com/urllib3/urllib3/blob/main/src/urllib3/connection.py#L694 and the function `_connect_tls_proxy` containing this line is called only when we are creating a HTTP tunnel https://github.com/urllib3/urllib3/blob/main/src/urllib3/connection.py#L672. We would have to set the value for `proxy_is_verified` for scenarios when we are not using a TLS-in-TLS tunnel and instead setting up a TLS connection for HTTPs to HTTP case.
More context - https://github.com/urllib3/urllib3/pull/3070#pullrequestreview-1607114075
### Environment
Describe your environment.
At least, paste here the output of:
```python
import platform
import ssl
import urllib3
print("OS", platform.platform()) # OS macOS-13.4.1-arm64-arm-64bit
print("Python", platform.python_version()) # Python 3.10.12
print(ssl.OPENSSL_VERSION) # OpenSSL 3.1.1 30 May 2023
print("urllib3", urllib3.__version__) # urllib3 1.26.14
```
### Steps to Reproduce
You can add a `print` statement to print `proxy_is_verified` for `HTTPS` to `HTTP` connection. The value of `proxy_is_verified` is set to `None` and never updated to `False` for the following example.
```python
proxy = urllib3.ProxyManager('https://127.0.0.1:8443', cert_reqs='NONE')
res = proxy.request('GET', 'https://bff.familyid.samagra.io')
```
### Expected Behavior
`proxy_is_verified` set to `False` or `True` appropriately.
### Actual Behavior
According to this comment in code if we are connected to proxy, it should not be `Nonfe` https://github.com/urllib3/urllib3/blob/d9f85a749488188c286cd50606d159874db94d5f/src/urllib3/connection.py#L120
But `proxy_is_verified` is set to `None` instead of `False`
What happens instead.
For `HTTPS` proxy connection to `HTTP` host, `proxy_is_verified` is never set properly. `proxy_is_verified` is set appropriately for `HTTPS` to `HTTPS` connections.
| Hey folks, I would like to solve this. lt will help unblock #3070 | 2024-01-06T07:49:51Z | [] | [] |
urllib3/urllib3 | 3,271 | urllib3__urllib3-3271 | [
"3268"
] | 2ac4efacfef5182aaef3e26a4f1e066ab228dfdd | diff --git a/src/urllib3/connection.py b/src/urllib3/connection.py
--- a/src/urllib3/connection.py
+++ b/src/urllib3/connection.py
@@ -862,6 +862,7 @@ def _wrap_proxy_error(err: Exception, proxy_scheme: str | None) -> ProxyError:
is_likely_http_proxy = (
"wrong version number" in error_normalized
or "unknown protocol" in error_normalized
+ or "record layer failure" in error_normalized
)
http_proxy_warning = (
". Your proxy appears to only use HTTP and not HTTPS, "
| diff --git a/test/with_dummyserver/test_socketlevel.py b/test/with_dummyserver/test_socketlevel.py
--- a/test/with_dummyserver/test_socketlevel.py
+++ b/test/with_dummyserver/test_socketlevel.py
@@ -1293,7 +1293,8 @@ def socket_handler(listener: socket.socket) -> None:
self._start_server(socket_handler)
with HTTPSConnectionPool(self.host, self.port, ca_certs=DEFAULT_CA) as pool:
with pytest.raises(
- SSLError, match=r"(wrong version number|record overflow)"
+ SSLError,
+ match=r"(wrong version number|record overflow|record layer failure)",
):
pool.request("GET", "/", retries=False)
| Handle OpenSSL 3.2.0 new error message for unexpected unencrypted records
### Subject
I was expecting to all tests working on "main" branch but there are 3 failing test cases
* TestProxyManager::test_https_proxymanager_connected_to_http_proxy[http]
* TestProxyManager::test_https_proxymanager_connected_to_http_proxy[https]
* TestSSL::test_ssl_failure_midway_through_conn
```
nox -s test-3.12
...
FAILED test/with_dummyserver/test_socketlevel.py::TestProxyManager::test_https_proxymanager_connected_to_http_proxy[http] - assert 'Your proxy appears to only use HTTP and not HTTPS' in "('Unable to connect to proxy', SSLError(SSLError(1, '[SSL] record layer failure (_ssl.c:1000)')))"
FAILED test/with_dummyserver/test_socketlevel.py::TestProxyManager::test_https_proxymanager_connected_to_http_proxy[https] - assert 'Your proxy appears to only use HTTP and not HTTPS' in "('Unable to connect to proxy', SSLError(SSLError(1, '[SSL] record layer failure (_ssl.c:1000)')))"
FAILED test/with_dummyserver/test_socketlevel.py::TestSSL::test_ssl_failure_midway_through_conn - AssertionError: Regex pattern did not match.
```
### Environment
```
OS macOS-14.2.1-x86_64-i386-64bit
Python 3.12.1
OpenSSL 3.2.0 23 Nov 2023
urllib3 2.1.0
```
### Steps to Reproduce
```
nox -s test-3.12
```
### Expected Behavior
The test should work, or they should be skipped, or if it's because they are not supported on macOS, then may it should be conditionally skipped.
### Actual Behavior
3 test fails
| 2024-01-08T23:52:44Z | [] | [] |
|
urllib3/urllib3 | 3,273 | urllib3__urllib3-3273 | [
"3261"
] | 12f923325a1794bab26c82dbfef2c47d44f054f8 | diff --git a/src/urllib3/exceptions.py b/src/urllib3/exceptions.py
--- a/src/urllib3/exceptions.py
+++ b/src/urllib3/exceptions.py
@@ -252,13 +252,16 @@ class IncompleteRead(HTTPError, httplib_IncompleteRead):
for ``partial`` to avoid creating large objects on streamed reads.
"""
+ partial: int # type: ignore[assignment]
+ expected: int
+
def __init__(self, partial: int, expected: int) -> None:
- self.partial = partial # type: ignore[assignment]
+ self.partial = partial
self.expected = expected
def __repr__(self) -> str:
return "IncompleteRead(%i bytes read, %i more expected)" % (
- self.partial, # type: ignore[str-format]
+ self.partial,
self.expected,
)
diff --git a/src/urllib3/response.py b/src/urllib3/response.py
--- a/src/urllib3/response.py
+++ b/src/urllib3/response.py
@@ -749,8 +749,18 @@ def _error_catcher(self) -> typing.Generator[None, None, None]:
raise ReadTimeoutError(self._pool, None, "Read timed out.") from e # type: ignore[arg-type]
+ except IncompleteRead as e:
+ if (
+ e.expected is not None
+ and e.partial is not None
+ and e.expected == -e.partial
+ ):
+ arg = "Response may not contain content."
+ else:
+ arg = f"Connection broken: {e!r}"
+ raise ProtocolError(arg, e) from e
+
except (HTTPException, OSError) as e:
- # This includes IncompleteRead.
raise ProtocolError(f"Connection broken: {e!r}", e) from e
# If no exception is thrown, we should avoid cleaning up
| diff --git a/test/test_response.py b/test/test_response.py
--- a/test/test_response.py
+++ b/test/test_response.py
@@ -1293,7 +1293,7 @@ def test_buggy_incomplete_read(self) -> None:
orig_ex = ctx.value.args[1]
assert isinstance(orig_ex, IncompleteRead)
- assert orig_ex.partial == 0 # type: ignore[comparison-overlap]
+ assert orig_ex.partial == 0
assert orig_ex.expected == content_length
def test_incomplete_chunk(self) -> None:
@@ -1504,6 +1504,27 @@ def make_bad_mac_fp() -> typing.Generator[BytesIO, None, None]:
resp.read()
assert e.value.args[0] == mac_error
+ def test_unexpected_body(self) -> None:
+ with pytest.raises(ProtocolError) as excinfo:
+ fp = BytesIO(b"12345")
+ headers = {"content-length": "5"}
+ resp = HTTPResponse(fp, status=204, headers=headers)
+ resp.read(16)
+ assert "Response may not contain content" in str(excinfo.value)
+
+ with pytest.raises(ProtocolError):
+ fp = BytesIO(b"12345")
+ headers = {"content-length": "0"}
+ resp = HTTPResponse(fp, status=204, headers=headers)
+ resp.read(16)
+ assert "Response may not contain content" in str(excinfo.value)
+
+ with pytest.raises(ProtocolError):
+ fp = BytesIO(b"12345")
+ resp = HTTPResponse(fp, status=204)
+ resp.read(16)
+ assert "Response may not contain content" in str(excinfo.value)
+
class MockChunkedEncodingResponse:
def __init__(self, content: list[bytes]) -> None:
| Misleading `IncompleteRead` exception, if a HTTP response contains unexpected content
### Subject
When interacting with poorly implemented servers or when paying little attention during mocking of HTTP requests, one can be faced e.g. with a 204 response, which contains an unexpected body.
So far so bad. urllib3 only raises an `IncompleteRead` exception, which provides no insight into the reason (e.g. a 204 response with body).
### Environment
* Python 3.10
* urllib3 2.1.0
### Steps to Reproduce
````shell
pip install requests responses
````
````python
# scratch.py
import unittest
import responses
import requests
class MyTest(unittest.TestCase):
url = "http://example.com"
def test_requestsmock(self):
rmock = responses.RequestsMock()
rmock.start()
rmock.post(url=self.url,
status=204,
json={"status": "ok"},
auto_calculate_content_length=True)
response = requests.post(url=self.url, data="Foo Bar!")
@responses.activate
def test_decorator(self):
responses.add(method=responses.POST,
url=self.url,
status=204,
json={"status": "ok"},
auto_calculate_content_length=True)
response = requests.post(url=self.url, data="Foo Bar!")
unittest.main()
````
### Expected Behavior
If an empty response body is expected, the raised exception should complain about the presence of an unexpected body.
Also, the reason for expecting an empty body would be helpful and speed up debugging.
### Actual Behavior
Instead, `IncompleteRead(16 bytes read, -16 more expected)` is raised:
```
python3.10 scratch.py
EE
======================================================================
ERROR: test_decorator (__main__.MyTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/home/andi/virtualenvs/checkers/lib/python3.10/site-packages/urllib3/response.py", line 712, in _error_catcher
yield
File "/home/andi/virtualenvs/checkers/lib/python3.10/site-packages/urllib3/response.py", line 833, in _raw_read
raise IncompleteRead(self._fp_bytes_read, self.length_remaining)
urllib3.exceptions.IncompleteRead: IncompleteRead(16 bytes read, -16 more expected)
The above exception was the direct cause of the following exception:
Traceback (most recent call last):
File "/home/andi/virtualenvs/checkers/lib/python3.10/site-packages/requests/models.py", line 816, in generate
yield from self.raw.stream(chunk_size, decode_content=True)
File "/home/andi/virtualenvs/checkers/lib/python3.10/site-packages/urllib3/response.py", line 934, in stream
data = self.read(amt=amt, decode_content=decode_content)
File "/home/andi/virtualenvs/checkers/lib/python3.10/site-packages/urllib3/response.py", line 905, in read
data = self._raw_read(amt)
File "/home/andi/virtualenvs/checkers/lib/python3.10/site-packages/urllib3/response.py", line 811, in _raw_read
with self._error_catcher():
File "/usr/lib/python3.10/contextlib.py", line 153, in __exit__
self.gen.throw(typ, value, traceback)
File "/home/andi/virtualenvs/checkers/lib/python3.10/site-packages/urllib3/response.py", line 729, in _error_catcher
raise ProtocolError(f"Connection broken: {e!r}", e) from e
urllib3.exceptions.ProtocolError: ('Connection broken: IncompleteRead(16 bytes read, -16 more expected)', IncompleteRead(16 bytes read, -16 more expected))
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "/home/andi/virtualenvs/checkers/lib/python3.10/site-packages/responses/__init__.py", line 232, in wrapper
return func(*args, **kwargs)
File "/home/andi/.config/JetBrains/PyCharm2023.2/scratches/scratch.py", line 28, in test_decorator
response = requests.post(url=self.url, data="Foo Bar!")
File "/home/andi/virtualenvs/checkers/lib/python3.10/site-packages/requests/api.py", line 115, in post
return request("post", url, data=data, json=json, **kwargs)
File "/home/andi/virtualenvs/checkers/lib/python3.10/site-packages/requests/api.py", line 59, in request
return session.request(method=method, url=url, **kwargs)
File "/home/andi/virtualenvs/checkers/lib/python3.10/site-packages/requests/sessions.py", line 589, in request
resp = self.send(prep, **send_kwargs)
File "/home/andi/virtualenvs/checkers/lib/python3.10/site-packages/requests/sessions.py", line 747, in send
r.content
File "/home/andi/virtualenvs/checkers/lib/python3.10/site-packages/requests/models.py", line 899, in content
self._content = b"".join(self.iter_content(CONTENT_CHUNK_SIZE)) or b""
File "/home/andi/virtualenvs/checkers/lib/python3.10/site-packages/requests/models.py", line 818, in generate
raise ChunkedEncodingError(e)
requests.exceptions.ChunkedEncodingError: ('Connection broken: IncompleteRead(16 bytes read, -16 more expected)', IncompleteRead(16 bytes read, -16 more expected))
======================================================================
ERROR: test_requestsmock (__main__.MyTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/home/andi/virtualenvs/checkers/lib/python3.10/site-packages/urllib3/response.py", line 712, in _error_catcher
yield
File "/home/andi/virtualenvs/checkers/lib/python3.10/site-packages/urllib3/response.py", line 833, in _raw_read
raise IncompleteRead(self._fp_bytes_read, self.length_remaining)
urllib3.exceptions.IncompleteRead: IncompleteRead(16 bytes read, -16 more expected)
The above exception was the direct cause of the following exception:
Traceback (most recent call last):
File "/home/andi/virtualenvs/checkers/lib/python3.10/site-packages/requests/models.py", line 816, in generate
yield from self.raw.stream(chunk_size, decode_content=True)
File "/home/andi/virtualenvs/checkers/lib/python3.10/site-packages/urllib3/response.py", line 934, in stream
data = self.read(amt=amt, decode_content=decode_content)
File "/home/andi/virtualenvs/checkers/lib/python3.10/site-packages/urllib3/response.py", line 905, in read
data = self._raw_read(amt)
File "/home/andi/virtualenvs/checkers/lib/python3.10/site-packages/urllib3/response.py", line 811, in _raw_read
with self._error_catcher():
File "/usr/lib/python3.10/contextlib.py", line 153, in __exit__
self.gen.throw(typ, value, traceback)
File "/home/andi/virtualenvs/checkers/lib/python3.10/site-packages/urllib3/response.py", line 729, in _error_catcher
raise ProtocolError(f"Connection broken: {e!r}", e) from e
urllib3.exceptions.ProtocolError: ('Connection broken: IncompleteRead(16 bytes read, -16 more expected)', IncompleteRead(16 bytes read, -16 more expected))
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "/home/andi/.config/JetBrains/PyCharm2023.2/scratches/scratch.py", line 18, in test_requestsmock
response = requests.post(url=self.url, data="Foo Bar!")
File "/home/andi/virtualenvs/checkers/lib/python3.10/site-packages/requests/api.py", line 115, in post
return request("post", url, data=data, json=json, **kwargs)
File "/home/andi/virtualenvs/checkers/lib/python3.10/site-packages/requests/api.py", line 59, in request
return session.request(method=method, url=url, **kwargs)
File "/home/andi/virtualenvs/checkers/lib/python3.10/site-packages/requests/sessions.py", line 589, in request
resp = self.send(prep, **send_kwargs)
File "/home/andi/virtualenvs/checkers/lib/python3.10/site-packages/requests/sessions.py", line 747, in send
r.content
File "/home/andi/virtualenvs/checkers/lib/python3.10/site-packages/requests/models.py", line 899, in content
self._content = b"".join(self.iter_content(CONTENT_CHUNK_SIZE)) or b""
File "/home/andi/virtualenvs/checkers/lib/python3.10/site-packages/requests/models.py", line 818, in generate
raise ChunkedEncodingError(e)
requests.exceptions.ChunkedEncodingError: ('Connection broken: IncompleteRead(16 bytes read, -16 more expected)', IncompleteRead(16 bytes read, -16 more expected))
----------------------------------------------------------------------
Ran 2 tests in 0.005s
FAILED (errors=2)
```
| @crazyscientist Thanks for reporting this. I would accept a fix that detects this situation and reraises as a `ProtocolError` with a better error message. | 2024-01-09T12:02:41Z | [] | [] |
urllib3/urllib3 | 3,283 | urllib3__urllib3-3283 | [
"3267"
] | f862bfeb3b7a100f86f4d17eb54f3b82c6921d16 | diff --git a/src/urllib3/connection.py b/src/urllib3/connection.py
--- a/src/urllib3/connection.py
+++ b/src/urllib3/connection.py
@@ -265,6 +265,13 @@ def is_connected(self) -> bool:
def has_connected_to_proxy(self) -> bool:
return self._has_connected_to_proxy
+ @property
+ def proxy_is_forwarding(self) -> bool:
+ """
+ Return True if a forwarding proxy is configured, else return False
+ """
+ return bool(self.proxy) and self._tunnel_host is None
+
def close(self) -> None:
try:
super().close()
@@ -663,11 +670,14 @@ def connect(self) -> None:
)
self.sock = sock_and_verified.socket
- # TODO: Set correct `self.is_verified` in case of HTTPS proxy +
- # HTTP destination, see
- # `test_is_verified_https_proxy_to_http_target` and
- # https://github.com/urllib3/urllib3/issues/3267.
- self.is_verified = sock_and_verified.is_verified
+ # Forwarding proxies can never have a verified target since
+ # the proxy is the one doing the verification. Should instead
+ # use a CONNECT tunnel in order to verify the target.
+ # See: https://github.com/urllib3/urllib3/issues/3267.
+ if self.proxy_is_forwarding:
+ self.is_verified = False
+ else:
+ self.is_verified = sock_and_verified.is_verified
# If there's a proxy to be connected to we are fully connected.
# This is set twice (once above and here) due to forwarding proxies
diff --git a/src/urllib3/connectionpool.py b/src/urllib3/connectionpool.py
--- a/src/urllib3/connectionpool.py
+++ b/src/urllib3/connectionpool.py
@@ -1098,7 +1098,8 @@ def _validate_conn(self, conn: BaseHTTPConnection) -> None:
if conn.is_closed:
conn.connect()
- if not conn.is_verified:
+ # TODO revise this, see https://github.com/urllib3/urllib3/issues/2791
+ if not conn.is_verified and not conn.proxy_is_verified:
warnings.warn(
(
f"Unverified HTTPS request is being made to host '{conn.host}'. "
| diff --git a/test/with_dummyserver/test_proxy_poolmanager.py b/test/with_dummyserver/test_proxy_poolmanager.py
--- a/test/with_dummyserver/test_proxy_poolmanager.py
+++ b/test/with_dummyserver/test_proxy_poolmanager.py
@@ -105,7 +105,6 @@ def test_is_verified_http_proxy_to_https_target(self) -> None:
assert r.status == 200
assert_is_verified(http, proxy=False, target=True)
- @pytest.mark.xfail(reason="see https://github.com/urllib3/urllib3/issues/3267")
def test_is_verified_https_proxy_to_http_target(self) -> None:
with proxy_from_url(self.https_proxy_url, ca_certs=DEFAULT_CA) as https:
r = https.request("GET", f"{self.http_url}/")
| https_proxy to http_host leads to `is_verfied==True`
(followup to #3130)
### Subject
@illia-v , the test you provided within https://github.com/urllib3/urllib3/pull/3149#pullrequestreview-1683158046
```py
def test_https_proxy(self) -> None:
with proxy_from_url(self.https_proxy_url, ca_certs=DEFAULT_CA) as https:
[...]
r = https.request("GET", f"{self.http_url}/")
assert r.status == 200
+ pool = list(https.pools._container.values())[-1]
+ connection = pool.pool.queue[-1]
+ assert connection.is_verified is False
+ assert connection.proxy_is_verified is True
```
Thus, the
* `proxy_is_verifed is True`, `is_verified is False`
I tend to agree with this logic, but the current code does
* `proxy_is_verifed is True`, `is_verified is True`.
This seems like an additional (besides #3130) bug (or at least flaw).
You can review the behavior within
- https://github.com/urllib3/urllib3/pull/3266
the relevant test is `test_is_verified_https_proxy_to_http_target`
| Since we already have a test case for this issue I'm assigning a $100 bounty for the fix. Thanks for reporting! | 2024-01-17T11:18:00Z | [] | [] |
urllib3/urllib3 | 3,310 | urllib3__urllib3-3310 | [
"3194"
] | 89ed0d6a65138f6d641e92ae4e0da0cdc7d66870 | diff --git a/dummyserver/socketserver.py b/dummyserver/socketserver.py
--- a/dummyserver/socketserver.py
+++ b/dummyserver/socketserver.py
@@ -20,7 +20,7 @@
from cryptography.hazmat.primitives import serialization
from urllib3.exceptions import HTTPWarning
-from urllib3.util import ALPN_PROTOCOLS, resolve_cert_reqs, resolve_ssl_version
+from urllib3.util import resolve_cert_reqs, resolve_ssl_version
if typing.TYPE_CHECKING:
from typing_extensions import ParamSpec
@@ -35,7 +35,7 @@
"keyfile": os.path.join(CERTS_PATH, "server.key"),
"cert_reqs": ssl.CERT_OPTIONAL,
"ca_certs": os.path.join(CERTS_PATH, "cacert.pem"),
- "alpn_protocols": ALPN_PROTOCOLS,
+ "alpn_protocols": ["h2", "http/1.1"],
}
DEFAULT_CA = os.path.join(CERTS_PATH, "cacert.pem")
DEFAULT_CA_KEY = os.path.join(CERTS_PATH, "cacert.key")
| diff --git a/test/conftest.py b/test/conftest.py
--- a/test/conftest.py
+++ b/test/conftest.py
@@ -10,6 +10,7 @@
import pytest
import trustme
+import urllib3.http2
from dummyserver.app import hypercorn_app
from dummyserver.asgi_proxy import ProxyApp
from dummyserver.hypercornserver import run_hypercorn_in_thread
@@ -369,3 +370,14 @@ def requires_tlsv1_3(supported_tls_versions: typing.AbstractSet[str]) -> None:
or "TLSv1.3" not in supported_tls_versions
):
pytest.skip("Test requires TLSv1.3")
+
+
[email protected](params=["h11", "h2"])
+def http_version(request: pytest.FixtureRequest) -> typing.Generator[str, None, None]:
+ if request.param == "h2":
+ urllib3.http2.inject_into_urllib3()
+
+ yield request.param
+
+ if request.param == "h2":
+ urllib3.http2.extract_from_urllib3()
diff --git a/test/with_dummyserver/test_http2.py b/test/with_dummyserver/test_http2.py
deleted file mode 100644
--- a/test/with_dummyserver/test_http2.py
+++ /dev/null
@@ -1,73 +0,0 @@
-from __future__ import annotations
-
-import subprocess
-from test import notWindows
-from test.conftest import ServerConfig
-
-import pytest
-
-import urllib3
-from dummyserver.socketserver import DEFAULT_CERTS
-from dummyserver.testcase import HTTPSHypercornDummyServerTestCase
-
-DEFAULT_CERTS_HTTP2 = DEFAULT_CERTS.copy()
-DEFAULT_CERTS_HTTP2["alpn_protocols"] = ["h2"]
-
-
-def setup_module() -> None:
- try:
- from urllib3.http2 import inject_into_urllib3
-
- inject_into_urllib3()
- except ImportError as e:
- pytest.skip(f"Could not import h2: {e!r}")
-
-
-def teardown_module() -> None:
- try:
- from urllib3.http2 import extract_from_urllib3
-
- extract_from_urllib3()
- except ImportError:
- pass
-
-
-class TestHypercornDummyServerTestCase(HTTPSHypercornDummyServerTestCase):
- certs = DEFAULT_CERTS_HTTP2
-
- @classmethod
- def setup_class(cls) -> None:
- super().setup_class()
- cls.base_url = f"https://{cls.host}:{cls.port}"
-
- @notWindows() # GitHub Actions Windows doesn't have HTTP/2 support.
- def test_hypercorn_server_http2(self) -> None:
- # This is a meta test to make sure our Hypercorn test server is actually using HTTP/2
- # before urllib3 is capable of speaking HTTP/2. Thanks, Daniel! <3
- output = subprocess.check_output(
- [
- "curl",
- "-vvv",
- "--http2",
- "--cacert",
- self.certs["ca_certs"],
- self.base_url,
- ],
- stderr=subprocess.STDOUT,
- )
-
- assert b"< HTTP/2 200" in output
- assert output.endswith(b"Dummy server!")
-
-
-def test_simple_http2(san_server: ServerConfig) -> None:
- with urllib3.PoolManager(ca_certs=san_server.ca_certs) as http:
- resp = http.request("HEAD", san_server.base_url, retries=False)
-
- assert resp.status == 200
- resp.headers.pop("date")
- assert resp.headers == {
- "content-type": "text/html; charset=utf-8",
- "content-length": "13",
- "server": "hypercorn-h2",
- }
diff --git a/test/with_dummyserver/test_https.py b/test/with_dummyserver/test_https.py
--- a/test/with_dummyserver/test_https.py
+++ b/test/with_dummyserver/test_https.py
@@ -129,7 +129,7 @@ def teardown_class(cls) -> None:
shutil.rmtree(cls.certs_dir)
- def test_simple(self) -> None:
+ def test_simple(self, http_version: str) -> None:
with HTTPSConnectionPool(
self.host,
self.port,
@@ -138,6 +138,7 @@ def test_simple(self) -> None:
) as https_pool:
r = https_pool.request("GET", "/")
assert r.status == 200, r.data
+ assert r.headers["server"] == f"hypercorn-{http_version}"
@resolvesLocalhostFQDN()
def test_dotted_fqdn(self) -> None:
@@ -1130,7 +1131,7 @@ def test_can_validate_ip_san(self, ipv4_san_server: ServerConfig) -> None:
class TestHTTPS_IPV6SAN:
@pytest.mark.parametrize("host", ["::1", "[::1]"])
def test_can_validate_ipv6_san(
- self, ipv6_san_server: ServerConfig, host: str
+ self, ipv6_san_server: ServerConfig, host: str, http_version: str
) -> None:
"""Ensure that urllib3 can validate SANs with IPv6 addresses in them."""
with HTTPSConnectionPool(
@@ -1141,3 +1142,4 @@ def test_can_validate_ipv6_san(
) as https_pool:
r = https_pool.request("GET", "/")
assert r.status == 200
+ assert r.headers["server"] == f"hypercorn-{http_version}"
| Create a mechanism to test different HTTP protocols (HTTP/1.1 and HTTP/2) with the same test case
Currently our test suite only has to worry about HTTP/1.1, but with HTTP/2 support coming we're going to need to run tests both in HTTP/1.1 and HTTP/2 modes to avoid duplicating the entire test suite for the high-level tests like for `PoolManager`.
My proposal for the mechanism would be to control Hypercorn's ALPN to only offer `http/1.1` or `h2` depending on the test suite and then either run tests twice using a fixture or allowing external control somehow (environment variable?) and then running a separate job on CI? Open to other ideas too!
| 2024-01-24T08:08:26Z | [] | [] |
|
urllib3/urllib3 | 3,311 | urllib3__urllib3-3311 | [
"3297"
] | 71e7c35662a4b2ba8543194cc132616065dfc56a | diff --git a/src/urllib3/http2.py b/src/urllib3/http2.py
--- a/src/urllib3/http2.py
+++ b/src/urllib3/http2.py
@@ -10,6 +10,7 @@
import urllib3.connection
import urllib3.util.ssl_
+from urllib3.response import BaseHTTPResponse
from ._collections import HTTPHeaderDict
from .connection import HTTPSConnection
@@ -54,6 +55,7 @@ def putrequest(
skip_accept_encoding: bool = False,
) -> None:
with self._lock_h2_conn() as h2_conn:
+ self._request_url = url
self._h2_stream = h2_conn.get_next_available_stream_id()
if ":" in self.host:
@@ -134,7 +136,12 @@ def getresponse( # type: ignore[override]
self.close()
assert status is not None
- return HTTP2Response(status=status, headers=headers, data=bytes(data))
+ return HTTP2Response(
+ status=status,
+ headers=headers,
+ request_url=self._request_url,
+ data=bytes(data),
+ )
def close(self) -> None:
with self._lock_h2_conn() as h2_conn:
@@ -155,20 +162,39 @@ def close(self) -> None:
super().close()
-class HTTP2Response:
+class HTTP2Response(BaseHTTPResponse):
# TODO: This is a woefully incomplete response object, but works for non-streaming.
- def __init__(self, status: int, headers: HTTPHeaderDict, data: bytes) -> None:
- self.status = status
- self.headers = headers
- self.data = data
+ def __init__(
+ self,
+ status: int,
+ headers: HTTPHeaderDict,
+ request_url: str,
+ data: bytes,
+ decode_content: bool = False, # TODO: support decoding
+ ) -> None:
+ super().__init__(
+ status=status,
+ headers=headers,
+ # Following CPython, we map HTTP versions to major * 10 + minor integers
+ version=20,
+ # No reason phrase in HTTP/2
+ reason=None,
+ decode_content=decode_content,
+ request_url=request_url,
+ )
+ self._data = data
self.length_remaining = 0
+ @property
+ def data(self) -> bytes:
+ return self._data
+
def get_redirect_location(self) -> None:
return None
def inject_into_urllib3() -> None:
- HTTPSConnectionPool.ConnectionCls = HTTP2Connection # type: ignore[assignment]
+ HTTPSConnectionPool.ConnectionCls = HTTP2Connection
urllib3.connection.HTTPSConnection = HTTP2Connection # type: ignore[misc]
# TODO: Offer 'http/1.1' as well, but for testing purposes this is handy.
| diff --git a/test/with_dummyserver/test_https.py b/test/with_dummyserver/test_https.py
--- a/test/with_dummyserver/test_https.py
+++ b/test/with_dummyserver/test_https.py
@@ -139,6 +139,7 @@ def test_simple(self, http_version: str) -> None:
r = https_pool.request("GET", "/")
assert r.status == 200, r.data
assert r.headers["server"] == f"hypercorn-{http_version}"
+ assert r.data == b"Dummy server!"
@resolvesLocalhostFQDN()
def test_dotted_fqdn(self) -> None:
| Add http_version property to BaseHTTPResponse
Now that HTTP/2 is coming we should add a value to HTTPResponse to provide this value. The normal HTTPResponse will either be HTTP/1.1 or HTTP/1.0. Check `_http_vsn_str` for that value.
| Turns out `BaseHTTPResponse` already has a `version` field, which gets populated from `http.client.HTTPVersion.version`. It's an integer mapped from the [HTTP-version field](https://www.rfc-editor.org/rfc/rfc9112.html#name-http-version) in the start line. For example, `HTTP/1.1` is mapped to 11: https://github.com/python/cpython/blob/b822b85ac11e73bbe4417bf03ee770ab116bb42d/Lib/http/client.py#L342-L348.
While arguably more ugly than using the full string or even the [ALPN name](https://www.iana.org/assignments/tls-extensiontype-values/tls-extensiontype-values.xhtml#alpn-protocol-ids), according to [RFC 9110](https://www.rfc-editor.org/rfc/rfc9110.html#name-protocol-version) it's OK to assume that all HTTP protocols will be two digits (major and minor), with the second digit being 0 if not specified.
Two decisions are needed here:
1. Do we add a new `http_version` field and fill it with `HTTP/1.1` or `HTTP/2`?
2. Do we keep the current `version` field and use 20 for HTTP/2?
My loosely held opinion is 1. No 2. Yes.
So I'm new to the codebase but I've been doing a bit of research into this past few hours, came to report essentially what @pquentin has already brought up!
It does seem version is already set in `BaseHTTPResponse` when it is initialized as an integer.
https://github.com/programmer-ke/urllib3/blob/03f7b65a47d87036674a7909ca6c57d4b5cf1a81/src/urllib3/response.py#L332
https://github.com/programmer-ke/urllib3/blob/03f7b65a47d87036674a7909ca6c57d4b5cf1a81/src/urllib3/connection.py#L484
https://docs.python.org/3/library/http.client.html#http.client.HTTPResponse.version
> Turns out `BaseHTTPResponse` already has a `version` field, which gets populated from `http.client.HTTPVersion.version`. It's an integer mapped from the [HTTP-version field](https://www.rfc-editor.org/rfc/rfc9112.html#name-http-version) in the start line. For example, `HTTP/1.1` is mapped to 11: https://github.com/python/cpython/blob/b822b85ac11e73bbe4417bf03ee770ab116bb42d/Lib/http/client.py#L342-L348.
>
> While arguably more ugly than using the full string or even the [ALPN name](https://www.iana.org/assignments/tls-extensiontype-values/tls-extensiontype-values.xhtml#alpn-protocol-ids), according to [RFC 9110](https://www.rfc-editor.org/rfc/rfc9110.html#name-protocol-version) it's OK to assume that a protocol will be two digits (major and minor), with the second digit being 0 if not specified.
>
> Two decisions are needed here:
>
> 1. Do we add a new `http_version` field and fill it with `HTTP/1.1` or `HTTP/2`?
>
> 2. Do we keep the current `version` field and use 20 for HTTP/2?
>
>
> My loosely held opinion is 1. No 2. Yes.
This comment suggests that with emscripten, `_http_vsn_str` doesn't exist on the connection object.
https://github.com/urllib3/urllib3/blob/03f7b65a47d87036674a7909ca6c57d4b5cf1a81/src/urllib3/connectionpool.py#L547
I'm not sure what the implications are when it comes to the version set in the `BaseHTTPResponse`. Maybe an edge case, but I'm not sure as I'm not familiar with the emscripten workings.. | 2024-01-24T16:50:00Z | [] | [] |
urllib3/urllib3 | 3,333 | urllib3__urllib3-3333 | [
"3331"
] | 12f923325a1794bab26c82dbfef2c47d44f054f8 | diff --git a/src/urllib3/contrib/emscripten/connection.py b/src/urllib3/contrib/emscripten/connection.py
--- a/src/urllib3/contrib/emscripten/connection.py
+++ b/src/urllib3/contrib/emscripten/connection.py
@@ -67,6 +67,7 @@ def __init__(
self.blocksize = blocksize
self.source_address = None
self.socket_options = None
+ self.is_verified = False
def set_tunnel(
self,
@@ -228,6 +229,10 @@ def __init__(
self.cert_reqs = None
+ # The browser will automatically verify all requests.
+ # We have no control over that setting.
+ self.is_verified = True
+
def set_cert(
self,
key_file: str | None = None,
| diff --git a/test/contrib/emscripten/test_emscripten.py b/test/contrib/emscripten/test_emscripten.py
--- a/test/contrib/emscripten/test_emscripten.py
+++ b/test/contrib/emscripten/test_emscripten.py
@@ -947,3 +947,28 @@ def count_calls(self, *args, **argv): # type: ignore[no-untyped-def]
assert count == 6
pyodide_test(selenium_coverage, testserver_http.http_host, find_unused_port())
+
+
+@install_urllib3_wheel()
+def test_insecure_requests_warning(
+ selenium_coverage: typing.Any, testserver_http: PyodideServerInfo
+) -> None:
+ @run_in_pyodide # type: ignore[misc]
+ def pyodide_test(selenium_coverage, host: str, port: int, https_port: int) -> None: # type: ignore[no-untyped-def]
+ import warnings
+
+ import urllib3
+ import urllib3.exceptions
+
+ http = urllib3.PoolManager()
+
+ with warnings.catch_warnings(record=True) as w:
+ http.request("GET", f"https://{host}:{https_port}")
+ assert len(w) == 0
+
+ pyodide_test(
+ selenium_coverage,
+ testserver_http.http_host,
+ testserver_http.http_port,
+ testserver_http.https_port,
+ )
| Emscripten support emits an InsecureRequestWarning even when using HTTPS
This is a side-effect of us using JavaScript APIs instead of Python TLS and setting `is_verified` on the `EmscriptenHTTPConnection` so urllib3 is emitting an `InsecureRequestWarning` for every request, even ones that are using HTTPS.
* Set the proper value of `is_verified` depending on whether the request is HTTP or HTTPS.
* Add a test case that asserts that an `InsecureRequestWarning` is emitted for HTTP and isn't emitted for HTTPS.
| 2024-01-31T01:39:35Z | [] | [] |
|
urllib3/urllib3 | 3,338 | urllib3__urllib3-3338 | [
"3330"
] | aa8d3dd2535cc125e123e5c2bca38738d6864b2a | diff --git a/noxfile.py b/noxfile.py
--- a/noxfile.py
+++ b/noxfile.py
@@ -200,6 +200,21 @@ def lint(session: nox.Session) -> None:
mypy(session)
[email protected](python="3.11")
+def pyodideconsole(session: nox.Session) -> None:
+ # build wheel into dist folder
+ session.install("build")
+ session.run("python", "-m", "build")
+ session.run(
+ "cp",
+ "test/contrib/emscripten/templates/pyodide-console.html",
+ "dist/index.html",
+ external=True,
+ )
+ session.cd("dist")
+ session.run("python", "-m", "http.server")
+
+
# TODO: node support is not tested yet - it should work if you require('xmlhttprequest') before
# loading pyodide, but there is currently no nice way to do this with pytest-pyodide
# because you can't override the test runner properties easily - see
| diff --git a/test/contrib/emscripten/templates/pyodide-console.html b/test/contrib/emscripten/templates/pyodide-console.html
new file mode 100644
--- /dev/null
+++ b/test/contrib/emscripten/templates/pyodide-console.html
@@ -0,0 +1,271 @@
+<!-- taken from https://github.com/pyodide/pyodide/blob/main/src/templates/console.html -->
+<!-- Copyright (C) 2019-2022, Pyodide contributors and Mozilla -->
+<!-- SPDX-FileCopyrightText: 2019-2022, Pyodide contributors and Mozilla -->
+<!-- SPDX-License-Identifier: MPL-2.0 -->
+<!doctype html>
+<html>
+ <head>
+ <meta charset="UTF-8" />
+ <script src="https://cdn.jsdelivr.net/npm/jquery"></script>
+ <script src="https://cdn.jsdelivr.net/npm/[email protected]/js/jquery.terminal.min.js"></script>
+ <script src="https://cdn.jsdelivr.net/npm/[email protected]/js/unix_formatting.min.js"></script>
+ <link
+ href="https://cdn.jsdelivr.net/npm/[email protected]/css/jquery.terminal.min.css"
+ rel="stylesheet"
+ />
+ <style>
+ .terminal {
+ --size: 1.5;
+ --color: rgba(255, 255, 255, 0.8);
+ }
+ .noblink {
+ --animation: terminal-none;
+ }
+ body {
+ background-color: black;
+ }
+ #jquery-terminal-logo {
+ color: white;
+ border-color: white;
+ position: absolute;
+ top: 7px;
+ right: 18px;
+ z-index: 2;
+ }
+ #jquery-terminal-logo a {
+ color: gray;
+ text-decoration: none;
+ font-size: 0.7em;
+ }
+ #loading {
+ display: inline-block;
+ width: 50px;
+ height: 50px;
+ position: fixed;
+ top: 50%;
+ left: 50%;
+ border: 3px solid rgba(172, 237, 255, 0.5);
+ border-radius: 50%;
+ border-top-color: #fff;
+ animation: spin 1s ease-in-out infinite;
+ -webkit-animation: spin 1s ease-in-out infinite;
+ }
+
+ @keyframes spin {
+ to {
+ -webkit-transform: rotate(360deg);
+ }
+ }
+ @-webkit-keyframes spin {
+ to {
+ -webkit-transform: rotate(360deg);
+ }
+ }
+ </style>
+ </head>
+ <body>
+ <div id="jquery-terminal-logo">
+ <a href="https://terminal.jcubic.pl/">jQuery Terminal</a>
+ </div>
+ <div id="loading"></div>
+ <script>
+ "use strict";
+
+ function sleep(s) {
+ return new Promise((resolve) => setTimeout(resolve, s));
+ }
+
+ async function main() {
+ let indexURL = "https://cdn.jsdelivr.net/pyodide/v0.25.0/full/";
+ const urlParams = new URLSearchParams(window.location.search);
+ const buildParam = urlParams.get("build");
+ if (buildParam) {
+ if (["full", "debug", "pyc"].includes(buildParam)) {
+ indexURL = indexURL.replace(
+ "/full/",
+ "/" + urlParams.get("build") + "/",
+ );
+ } else {
+ console.warn(
+ 'Invalid URL parameter: build="' +
+ buildParam +
+ '". Using default "full".',
+ );
+ }
+ }
+ const { loadPyodide } = await import(indexURL + "pyodide.mjs");
+ // to facilitate debugging
+ globalThis.loadPyodide = loadPyodide;
+
+ let term;
+ globalThis.pyodide = await loadPyodide({
+ stdin: () => {
+ let result = prompt();
+ echo(result);
+ return result;
+ },
+ });
+ let { repr_shorten, BANNER, PyodideConsole } =
+ pyodide.pyimport("pyodide.console");
+ BANNER =
+ `Welcome to the Pyodide ${pyodide.version} terminal emulator 🐍\n` +
+ BANNER;
+ const pyconsole = PyodideConsole(pyodide.globals);
+
+ const namespace = pyodide.globals.get("dict")();
+ const await_fut = pyodide.runPython(
+ `
+ import builtins
+ from pyodide.ffi import to_js
+
+ async def await_fut(fut):
+ res = await fut
+ if res is not None:
+ builtins._ = res
+ return to_js([res], depth=1)
+
+ await_fut
+ `,
+ { globals: namespace },
+ );
+ namespace.destroy();
+
+ const echo = (msg, ...opts) =>
+ term.echo(
+ msg
+ .replaceAll("]]", "]]")
+ .replaceAll("[[", "[["),
+ ...opts,
+ );
+
+ const ps1 = ">>> ";
+ const ps2 = "... ";
+
+ async function lock() {
+ let resolve;
+ const ready = term.ready;
+ term.ready = new Promise((res) => (resolve = res));
+ await ready;
+ return resolve;
+ }
+
+ async function interpreter(command) {
+ const unlock = await lock();
+ term.pause();
+ // multiline should be split (useful when pasting)
+ for (const c of command.split("\n")) {
+ const escaped = c.replaceAll(/\u00a0/g, " ");
+ const fut = pyconsole.push(escaped);
+ term.set_prompt(fut.syntax_check === "incomplete" ? ps2 : ps1);
+ switch (fut.syntax_check) {
+ case "syntax-error":
+ term.error(fut.formatted_error.trimEnd());
+ continue;
+ case "incomplete":
+ continue;
+ case "complete":
+ break;
+ default:
+ throw new Error(`Unexpected type ${ty}`);
+ }
+ // In JavaScript, await automatically also awaits any results of
+ // awaits, so if an async function returns a future, it will await
+ // the inner future too. This is not what we want so we
+ // temporarily put it into a list to protect it.
+ const wrapped = await_fut(fut);
+ // complete case, get result / error and print it.
+ try {
+ const [value] = await wrapped;
+ if (value !== undefined) {
+ echo(
+ repr_shorten.callKwargs(value, {
+ separator: "\n<long output truncated>\n",
+ }),
+ );
+ }
+ if (value instanceof pyodide.ffi.PyProxy) {
+ value.destroy();
+ }
+ } catch (e) {
+ if (e.constructor.name === "PythonError") {
+ const message = fut.formatted_error || e.message;
+ term.error(message.trimEnd());
+ } else {
+ throw e;
+ }
+ } finally {
+ fut.destroy();
+ wrapped.destroy();
+ }
+ }
+ term.resume();
+ await sleep(10);
+ unlock();
+ }
+
+ term = $("body").terminal(interpreter, {
+ greetings: BANNER,
+ prompt: ps1,
+ completionEscape: false,
+ completion: function (command, callback) {
+ callback(pyconsole.complete(command).toJs()[0]);
+ },
+ keymap: {
+ "CTRL+C": async function (event, original) {
+ pyconsole.buffer.clear();
+ term.enter();
+ echo("KeyboardInterrupt");
+ term.set_command("");
+ term.set_prompt(ps1);
+ },
+ TAB: (event, original) => {
+ const command = term.before_cursor();
+ // Disable completion for whitespaces.
+ if (command.trim() === "") {
+ term.insert("\t");
+ return false;
+ }
+ return original(event);
+ },
+ },
+ });
+ window.term = term;
+ pyconsole.stdout_callback = (s) => echo(s, { newline: false });
+ pyconsole.stderr_callback = (s) => {
+ term.error(s.trimEnd());
+ };
+ term.ready = Promise.resolve();
+ pyodide._api.on_fatal = async (e) => {
+ if (e.name === "Exit") {
+ term.error(e);
+ term.error("Pyodide exited and can no longer be used.");
+ } else {
+ term.error(
+ "Pyodide has suffered a fatal error. Please report this to the Pyodide maintainers.",
+ );
+ term.error("The cause of the fatal error was:");
+ term.error(e);
+ term.error("Look in the browser console for more details.");
+ }
+ await term.ready;
+ term.pause();
+ await sleep(15);
+ term.pause();
+ };
+
+ const searchParams = new URLSearchParams(window.location.search);
+ if (searchParams.has("noblink")) {
+ $(".cmd-cursor").addClass("noblink");
+ }
+ await term.ready;
+ await term.exec("import micropip\n");
+ await term.exec("micropip.list()\n");
+ await term.exec('await micropip.install("http://localhost:8000/urllib3-2.2.0-py3-none-any.whl")')
+ await term.exec("micropip.list()");
+ await term.exec("import urllib3");
+ await term.exec("urllib3.__version__");
+ }
+ window.console_ready = main();
+ </script>
+ </body>
+</html>
| Create a workflow (nox?) for testing Emscripten support locally
Would be great to have an easy-to-use workflow for contributors to:
* Run a single command
* Have all dependencies installed
* Opens a Pyodide console in a web browser
* Makes the local copy of urllib3 available for installation with micropip/Pyodide
This would help greatly with being able to poke around with and develop Emscripten/Pyodide support.
| 2024-02-01T21:58:35Z | [] | [] |
|
urllib3/urllib3 | 3,354 | urllib3__urllib3-3354 | [
"3313"
] | d4ffa29ee1862b3d1afe584efb57d489a7659dac | diff --git a/src/urllib3/response.py b/src/urllib3/response.py
--- a/src/urllib3/response.py
+++ b/src/urllib3/response.py
@@ -26,20 +26,21 @@
brotli = None
try:
- import zstandard as zstd # type: ignore[import-not-found]
-
+ import zstandard as zstd
+except (AttributeError, ImportError, ValueError): # Defensive:
+ HAS_ZSTD = False
+else:
# The package 'zstandard' added the 'eof' property starting
# in v0.18.0 which we require to ensure a complete and
# valid zstd stream was fed into the ZstdDecoder.
# See: https://github.com/urllib3/urllib3/pull/2624
- _zstd_version = _zstd_version = tuple(
+ _zstd_version = tuple(
map(int, re.search(r"^([0-9]+)\.([0-9]+)", zstd.__version__).groups()) # type: ignore[union-attr]
)
if _zstd_version < (0, 18): # Defensive:
- zstd = None
-
-except (AttributeError, ImportError, ValueError): # Defensive:
- zstd = None
+ HAS_ZSTD = False
+ else:
+ HAS_ZSTD = True
from . import util
from ._base_connection import _TYPE_BODY
@@ -163,7 +164,7 @@ def flush(self) -> bytes:
return b""
-if zstd is not None:
+if HAS_ZSTD:
class ZstdDecoder(ContentDecoder):
def __init__(self) -> None:
@@ -183,7 +184,7 @@ def flush(self) -> bytes:
ret = self._obj.flush() # note: this is a no-op
if not self._obj.eof:
raise DecodeError("Zstandard data is incomplete")
- return ret # type: ignore[no-any-return]
+ return ret
class MultiDecoder(ContentDecoder):
@@ -219,7 +220,7 @@ def _get_decoder(mode: str) -> ContentDecoder:
if brotli is not None and mode == "br":
return BrotliDecoder()
- if zstd is not None and mode == "zstd":
+ if HAS_ZSTD and mode == "zstd":
return ZstdDecoder()
return DeflateDecoder()
@@ -302,7 +303,7 @@ class BaseHTTPResponse(io.IOBase):
CONTENT_DECODERS = ["gzip", "x-gzip", "deflate"]
if brotli is not None:
CONTENT_DECODERS += ["br"]
- if zstd is not None:
+ if HAS_ZSTD:
CONTENT_DECODERS += ["zstd"]
REDIRECT_STATUSES = [301, 302, 303, 307, 308]
@@ -310,7 +311,7 @@ class BaseHTTPResponse(io.IOBase):
if brotli is not None:
DECODER_ERROR_CLASSES += (brotli.error,)
- if zstd is not None:
+ if HAS_ZSTD:
DECODER_ERROR_CLASSES += (zstd.ZstdError,)
def __init__(
diff --git a/src/urllib3/util/request.py b/src/urllib3/util/request.py
--- a/src/urllib3/util/request.py
+++ b/src/urllib3/util/request.py
@@ -29,7 +29,7 @@
else:
ACCEPT_ENCODING += ",br"
try:
- import zstandard as _unused_module_zstd # type: ignore[import-not-found] # noqa: F401
+ import zstandard as _unused_module_zstd # noqa: F401
except ImportError:
pass
else:
| diff --git a/test/__init__.py b/test/__init__.py
--- a/test/__init__.py
+++ b/test/__init__.py
@@ -26,9 +26,11 @@
brotli = None
try:
- import zstandard as zstd # type: ignore[import-not-found]
+ import zstandard as _unused_module_zstd # noqa: F401
except ImportError:
- zstd = None
+ HAS_ZSTD = False
+else:
+ HAS_ZSTD = True
from urllib3 import util
from urllib3.connectionpool import ConnectionPool
@@ -144,13 +146,13 @@ def notBrotli() -> typing.Callable[[_TestFuncT], _TestFuncT]:
def onlyZstd() -> typing.Callable[[_TestFuncT], _TestFuncT]:
return pytest.mark.skipif(
- zstd is None, reason="only run if a python-zstandard library is installed"
+ not HAS_ZSTD, reason="only run if a python-zstandard library is installed"
)
def notZstd() -> typing.Callable[[_TestFuncT], _TestFuncT]:
return pytest.mark.skipif(
- zstd is not None,
+ HAS_ZSTD,
reason="only run if a python-zstandard library is not installed",
)
diff --git a/test/test_response.py b/test/test_response.py
--- a/test/test_response.py
+++ b/test/test_response.py
@@ -30,7 +30,6 @@
BytesQueueBuffer,
HTTPResponse,
brotli,
- zstd,
)
from urllib3.util.response import is_fp_closed
from urllib3.util.retry import RequestHistory, Retry
@@ -389,6 +388,8 @@ def test_decode_brotli_error(self) -> None:
@onlyZstd()
def test_decode_zstd(self) -> None:
+ import zstandard as zstd
+
data = zstd.compress(b"foo")
fp = BytesIO(data)
@@ -397,6 +398,8 @@ def test_decode_zstd(self) -> None:
@onlyZstd()
def test_decode_multiframe_zstd(self) -> None:
+ import zstandard as zstd
+
data = (
# Zstandard frame
zstd.compress(b"foo")
@@ -416,6 +419,8 @@ def test_decode_multiframe_zstd(self) -> None:
@onlyZstd()
def test_chunked_decoding_zstd(self) -> None:
+ import zstandard as zstd
+
data = zstd.compress(b"foobarbaz")
fp = BytesIO(data)
@@ -447,6 +452,8 @@ def test_decode_zstd_error(self, data: bytes) -> None:
@onlyZstd()
@pytest.mark.parametrize("data", decode_param_set)
def test_decode_zstd_incomplete_preload_content(self, data: bytes) -> None:
+ import zstandard as zstd
+
data = zstd.compress(data)
fp = BytesIO(data[:-1])
@@ -456,6 +463,8 @@ def test_decode_zstd_incomplete_preload_content(self, data: bytes) -> None:
@onlyZstd()
@pytest.mark.parametrize("data", decode_param_set)
def test_decode_zstd_incomplete_read(self, data: bytes) -> None:
+ import zstandard as zstd
+
data = zstd.compress(data)
fp = BytesIO(data[:-1]) # shorten the data to trigger DecodeError
@@ -471,6 +480,8 @@ def test_decode_zstd_incomplete_read(self, data: bytes) -> None:
@onlyZstd()
@pytest.mark.parametrize("data", decode_param_set)
def test_decode_zstd_incomplete_read1(self, data: bytes) -> None:
+ import zstandard as zstd
+
data = zstd.compress(data)
fp = BytesIO(data[:-1])
@@ -489,6 +500,8 @@ def test_decode_zstd_incomplete_read1(self, data: bytes) -> None:
@onlyZstd()
@pytest.mark.parametrize("data", decode_param_set)
def test_decode_zstd_read1(self, data: bytes) -> None:
+ import zstandard as zstd
+
encoded_data = zstd.compress(data)
fp = BytesIO(encoded_data)
| Fix type checking when Zstandard is installed
### Subject
When I run type checking locally outside of a nox environment I get multiple errors because I have Zstandard installed.
`nox -s lint` succeeds because Zstandard is not listed in `mypy-requirements.txt`.
### Steps to Reproduce
1. Add `zstandard` to `mypy-requirements.txt`.
2. Run `nox -s lint`.
### Expected Behavior
Success regardless of Zstandard presence.
```sh
$ mypy -p dummyserver -m noxfile -p urllib3 -p test
Success: no issues found in 84 source files
```
### Actual Behavior
```sh
$ mypy -p dummyserver -m noxfile -p urllib3 -p test
src/urllib3/util/request.py:32: error: Unused "type: ignore" comment [unused-ignore]
src/urllib3/response.py:29: error: Unused "type: ignore" comment [unused-ignore]
src/urllib3/response.py:39: error: Incompatible types in assignment (expression has type "None", variable has type Module) [assignment]
src/urllib3/response.py:42: error: Incompatible types in assignment (expression has type "None", variable has type Module) [assignment]
src/urllib3/response.py:186: error: Unused "type: ignore" comment [unused-ignore]
test/__init__.py:29: error: Unused "type: ignore" comment [unused-ignore]
test/__init__.py:31: error: Incompatible types in assignment (expression has type "None", variable has type Module) [assignment]
```
| 2024-02-24T14:22:04Z | [] | [] |
|
urllib3/urllib3 | 3,356 | urllib3__urllib3-3356 | [
"3122"
] | a7b81f554863f6dfafe2102e38d16688fdd9869a | diff --git a/src/urllib3/contrib/emscripten/response.py b/src/urllib3/contrib/emscripten/response.py
--- a/src/urllib3/contrib/emscripten/response.py
+++ b/src/urllib3/contrib/emscripten/response.py
@@ -155,7 +155,7 @@ def read(
self.length_is_certain = True
# wrap body in IOStream
self._response.body = BytesIO(self._response.body)
- if amt is not None:
+ if amt is not None and amt >= 0:
# don't cache partial content
cache_content = False
data = self._response.body.read(amt)
diff --git a/src/urllib3/response.py b/src/urllib3/response.py
--- a/src/urllib3/response.py
+++ b/src/urllib3/response.py
@@ -935,7 +935,10 @@ def read(
if decode_content is None:
decode_content = self.decode_content
- if amt is not None:
+ if amt and amt < 0:
+ # Negative numbers and `None` should be treated the same.
+ amt = None
+ elif amt is not None:
cache_content = False
if len(self._decoded_buffer) >= amt:
@@ -995,6 +998,9 @@ def read1(
"""
if decode_content is None:
decode_content = self.decode_content
+ if amt and amt < 0:
+ # Negative numbers and `None` should be treated the same.
+ amt = None
# try and respond without going to the network
if self._has_decoded_content:
if not decode_content:
@@ -1189,6 +1195,11 @@ def read_chunked(
if self._fp.fp is None: # type: ignore[union-attr]
return None
+ if amt and amt < 0:
+ # Negative numbers and `None` should be treated the same,
+ # but httplib handles only `None` correctly.
+ amt = None
+
while True:
self._update_chunk_length()
if self.chunk_left == 0:
| diff --git a/test/test_response.py b/test/test_response.py
--- a/test/test_response.py
+++ b/test/test_response.py
@@ -217,6 +217,12 @@ def test_reference_read(self) -> None:
assert r.read() == b""
assert r.read() == b""
+ @pytest.mark.parametrize("read_args", ((), (None,), (-1,)))
+ def test_reference_read_until_eof(self, read_args: tuple[typing.Any, ...]) -> None:
+ fp = BytesIO(b"foo")
+ r = HTTPResponse(fp, preload_content=False)
+ assert r.read(*read_args) == b"foo"
+
def test_reference_read1(self) -> None:
fp = BytesIO(b"foobar")
r = HTTPResponse(fp, preload_content=False)
@@ -227,6 +233,14 @@ def test_reference_read1(self) -> None:
assert r.read1() == b"bar"
assert r.read1() == b""
+ @pytest.mark.parametrize("read1_args", ((), (None,), (-1,)))
+ def test_reference_read1_without_limit(
+ self, read1_args: tuple[typing.Any, ...]
+ ) -> None:
+ fp = BytesIO(b"foo")
+ r = HTTPResponse(fp, preload_content=False)
+ assert r.read1(*read1_args) == b"foo"
+
def test_reference_read1_nodecode(self) -> None:
fp = BytesIO(b"foobar")
r = HTTPResponse(fp, preload_content=False, decode_content=False)
@@ -1262,7 +1276,10 @@ def test_mock_transfer_encoding_chunked_custom_read(self) -> None:
response = list(resp.read_chunked(2))
assert expected_response == response
- def test_mock_transfer_encoding_chunked_unlmtd_read(self) -> None:
+ @pytest.mark.parametrize("read_chunked_args", ((), (None,), (-1,)))
+ def test_mock_transfer_encoding_chunked_unlmtd_read(
+ self, read_chunked_args: tuple[typing.Any, ...]
+ ) -> None:
stream = [b"foooo", b"bbbbaaaaar"]
fp = MockChunkedEncodingResponse(stream)
r = httplib.HTTPResponse(MockSock) # type: ignore[arg-type]
@@ -1272,7 +1289,7 @@ def test_mock_transfer_encoding_chunked_unlmtd_read(self) -> None:
resp = HTTPResponse(
r, preload_content=False, headers={"transfer-encoding": "chunked"}
)
- assert stream == list(resp.read_chunked())
+ assert stream == list(resp.read_chunked(*read_chunked_args))
def test_read_not_chunked_response_as_chunks(self) -> None:
fp = BytesIO(b"foo")
| request(..., preload_content=False).read(-1) raises an unexpected Exception
### Subject
With `preload_content=False`, calling `read(-1)` on the body raises `RuntimeError("buffer is empty")` while `read()` or `read(None)` return the whole request body as expected.
### Environment
```python
import platform
import ssl
import urllib3
print("OS", platform.platform())
print("Python", platform.python_version())
print(ssl.OPENSSL_VERSION)
print("urllib3", urllib3.__version__)
```
prints:
```
OS macOS-13.5.1-arm64-arm-64bit
Python 3.11.4
OpenSSL 3.1.1 30 May 2023
urllib3 2.0.4
```
### Steps to Reproduce
A simple and isolated way to reproduce the issue. A code snippet would be great.
Bad case:
```
import urllib3
http = urllib3.PoolManager()
http.request("GET", "https://httpbin.org/robots.txt", preload_content=False).read(-1)
```
raises: `RuntimeError: buffer is empty`.
while these work:
```
http.request("GET", "https://httpbin.org/robots.txt", preload_content=False).read()
```
or
```
http.request("GET", "https://httpbin.org/robots.txt", preload_content=False).read(None)
```
### Expected Behavior
The whole buffer should be returned, as `-1` is the expected default argument for fileobj [according to the python stdlib](https://docs.python.org/3/library/io.html?highlight=io#io.RawIOBase).
### Actual Behavior
it raises
```
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
File "/Users/kosta/Library/Caches/pypoetry/virtualenvs/urllib3-read-IiCHxn1z-py3.11/lib/python3.11/site-packages/urllib3/response.py", line 877, in read
return self._decoded_buffer.get(amt)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/Users/kosta/Library/Caches/pypoetry/virtualenvs/urllib3-read-IiCHxn1z-py3.11/lib/python3.11/site-packages/urllib3/response.py", line 255, in get
raise RuntimeError("buffer is empty")
```
Note: urllib3 1.x raises:
```
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
File "/Users/kosta/Library/Caches/pypoetry/virtualenvs/uai-snaky-path-TevYsdGm-py3.9/lib/python3.9/site-packages/urllib3/response.py", line 567, in read
data = self._fp_read(amt) if not fp_closed else b""
File "/Users/kosta/Library/Caches/pypoetry/virtualenvs/uai-snaky-path-TevYsdGm-py3.9/lib/python3.9/site-packages/urllib3/response.py", line 533, in _fp_read
return self._fp.read(amt) if amt is not None else self._fp.read()
File "/opt/homebrew/Cellar/[email protected]/3.9.17_1/Frameworks/Python.framework/Versions/3.9/lib/python3.9/http/client.py", line 462, in read
b = bytearray(amt)
ValueError: negative count
```
| Our `HTTPResponse` object actually behaves a lot more like an `io.BufferedIOBase` instead of an `io.RawIOBase` object due to how `.read()` is implemented to always return either `amt` bytes or between 0 and `amt-1` bytes when EOF is reached. Perhaps our parent class `io.IOBase` isn't correct on `HTTPResponse`?
We actually get our behavior with `-1` from the standard library `http.client.HTTPResponse` which is what our implementation is based on. It doesn't look like `-1` would work there either? | 2024-02-27T17:12:30Z | [] | [] |
urllib3/urllib3 | 3,358 | urllib3__urllib3-3358 | [
"3335"
] | 733f638a2faa02b4ff8a9f3b5668949d39396b8b | diff --git a/dummyserver/socketserver.py b/dummyserver/socketserver.py
--- a/dummyserver/socketserver.py
+++ b/dummyserver/socketserver.py
@@ -108,6 +108,7 @@ def __init__(
socket_handler: typing.Callable[[socket.socket], None],
host: str = "localhost",
ready_event: threading.Event | None = None,
+ quit_event: threading.Event | None = None,
) -> None:
super().__init__()
self.daemon = True
@@ -115,6 +116,7 @@ def __init__(
self.socket_handler = socket_handler
self.host = host
self.ready_event = ready_event
+ self.quit_event = quit_event
def _start_server(self) -> None:
if self.USE_IPV6:
diff --git a/dummyserver/testcase.py b/dummyserver/testcase.py
--- a/dummyserver/testcase.py
+++ b/dummyserver/testcase.py
@@ -5,6 +5,7 @@
import ssl
import threading
import typing
+from test import LONG_TIMEOUT
import hypercorn
import pytest
@@ -19,11 +20,19 @@
def consume_socket(
- sock: SSLTransport | socket.socket, chunks: int = 65536
+ sock: SSLTransport | socket.socket,
+ chunks: int = 65536,
+ quit_event: threading.Event | None = None,
) -> bytearray:
consumed = bytearray()
+ sock.settimeout(LONG_TIMEOUT)
while True:
- b = sock.recv(chunks)
+ if quit_event and quit_event.is_set():
+ break
+ try:
+ b = sock.recv(chunks)
+ except (TimeoutError, socket.timeout):
+ continue
assert isinstance(b, bytes)
consumed += b
if b.endswith(b"\r\n\r\n"):
@@ -57,11 +66,16 @@ class SocketDummyServerTestCase:
@classmethod
def _start_server(
- cls, socket_handler: typing.Callable[[socket.socket], None]
+ cls,
+ socket_handler: typing.Callable[[socket.socket], None],
+ quit_event: threading.Event | None = None,
) -> None:
ready_event = threading.Event()
cls.server_thread = SocketServerThread(
- socket_handler=socket_handler, ready_event=ready_event, host=cls.host
+ socket_handler=socket_handler,
+ ready_event=ready_event,
+ host=cls.host,
+ quit_event=quit_event,
)
cls.server_thread.start()
ready_event.wait(5)
@@ -71,23 +85,41 @@ def _start_server(
@classmethod
def start_response_handler(
- cls, response: bytes, num: int = 1, block_send: threading.Event | None = None
+ cls,
+ response: bytes,
+ num: int = 1,
+ block_send: threading.Event | None = None,
) -> threading.Event:
ready_event = threading.Event()
+ quit_event = threading.Event()
def socket_handler(listener: socket.socket) -> None:
for _ in range(num):
ready_event.set()
- sock = listener.accept()[0]
- consume_socket(sock)
+ listener.settimeout(LONG_TIMEOUT)
+ while True:
+ if quit_event.is_set():
+ return
+ try:
+ sock = listener.accept()[0]
+ break
+ except (TimeoutError, socket.timeout):
+ continue
+ consume_socket(sock, quit_event=quit_event)
+ if quit_event.is_set():
+ sock.close()
+ return
if block_send:
- block_send.wait()
+ while not block_send.wait(LONG_TIMEOUT):
+ if quit_event.is_set():
+ sock.close()
+ return
block_send.clear()
sock.send(response)
sock.close()
- cls._start_server(socket_handler)
+ cls._start_server(socket_handler, quit_event=quit_event)
return ready_event
@classmethod
@@ -100,10 +132,25 @@ def start_basic_handler(
block_send,
)
+ @staticmethod
+ def quit_server_thread(server_thread: SocketServerThread) -> None:
+ if server_thread.quit_event:
+ server_thread.quit_event.set()
+ # in principle the maximum time that the thread can take to notice
+ # the quit_event is LONG_TIMEOUT and the thread should terminate
+ # shortly after that, we give 5 seconds leeway just in case
+ server_thread.join(LONG_TIMEOUT * 2 + 5.0)
+ if server_thread.is_alive():
+ raise Exception("server_thread did not exit")
+
@classmethod
def teardown_class(cls) -> None:
if hasattr(cls, "server_thread"):
- cls.server_thread.join(0.1)
+ cls.quit_server_thread(cls.server_thread)
+
+ def teardown_method(self) -> None:
+ if hasattr(self, "server_thread"):
+ self.quit_server_thread(self.server_thread)
def assert_header_received(
self,
@@ -128,11 +175,16 @@ def assert_header_received(
class IPV4SocketDummyServerTestCase(SocketDummyServerTestCase):
@classmethod
def _start_server(
- cls, socket_handler: typing.Callable[[socket.socket], None]
+ cls,
+ socket_handler: typing.Callable[[socket.socket], None],
+ quit_event: threading.Event | None = None,
) -> None:
ready_event = threading.Event()
cls.server_thread = SocketServerThread(
- socket_handler=socket_handler, ready_event=ready_event, host=cls.host
+ socket_handler=socket_handler,
+ ready_event=ready_event,
+ host=cls.host,
+ quit_event=quit_event,
)
cls.server_thread.USE_IPV6 = False
cls.server_thread.start()
| diff --git a/test/test_ssltransport.py b/test/test_ssltransport.py
--- a/test/test_ssltransport.py
+++ b/test/test_ssltransport.py
@@ -4,6 +4,7 @@
import select
import socket
import ssl
+import threading
import typing
from unittest import mock
@@ -111,20 +112,29 @@ def setup_class(cls) -> None:
cls.server_context, cls.client_context = server_client_ssl_contexts()
def start_dummy_server(
- self, handler: typing.Callable[[socket.socket], None] | None = None
+ self,
+ handler: typing.Callable[[socket.socket], None] | None = None,
+ validate: bool = True,
) -> None:
+ quit_event = threading.Event()
+
def socket_handler(listener: socket.socket) -> None:
sock = listener.accept()[0]
try:
with self.server_context.wrap_socket(sock, server_side=True) as ssock:
- request = consume_socket(ssock)
+ request = consume_socket(
+ ssock,
+ quit_event=quit_event,
+ )
+ if not validate:
+ return
validate_request(request)
ssock.send(sample_response())
except (ConnectionAbortedError, ConnectionResetError):
return
chosen_handler = handler if handler else socket_handler
- self._start_server(chosen_handler)
+ self._start_server(chosen_handler, quit_event=quit_event)
@pytest.mark.timeout(PER_TEST_TIMEOUT)
def test_start_closed_socket(self) -> None:
@@ -138,7 +148,7 @@ def test_start_closed_socket(self) -> None:
@pytest.mark.timeout(PER_TEST_TIMEOUT)
def test_close_after_handshake(self) -> None:
"""Socket errors should be bubbled up"""
- self.start_dummy_server()
+ self.start_dummy_server(validate=False)
sock = socket.create_connection((self.host, self.port))
with SSLTransport(
diff --git a/test/with_dummyserver/test_socketlevel.py b/test/with_dummyserver/test_socketlevel.py
--- a/test/with_dummyserver/test_socketlevel.py
+++ b/test/with_dummyserver/test_socketlevel.py
@@ -12,6 +12,7 @@
import socket
import ssl
import tempfile
+import threading
import typing
import zlib
from collections import OrderedDict
@@ -955,7 +956,11 @@ def socket_handler(listener: socket.socket) -> None:
assert response.connection is None
def test_socket_close_socket_then_file(self) -> None:
- def consume_ssl_socket(listener: socket.socket) -> None:
+ quit_event = threading.Event()
+
+ def consume_ssl_socket(
+ listener: socket.socket,
+ ) -> None:
try:
with listener.accept()[0] as sock, original_ssl_wrap_socket(
sock,
@@ -964,11 +969,11 @@ def consume_ssl_socket(listener: socket.socket) -> None:
certfile=DEFAULT_CERTS["certfile"],
ca_certs=DEFAULT_CA,
) as ssl_sock:
- consume_socket(ssl_sock)
+ consume_socket(ssl_sock, quit_event=quit_event)
except (ConnectionResetError, ConnectionAbortedError, OSError):
pass
- self._start_server(consume_ssl_socket)
+ self._start_server(consume_ssl_socket, quit_event=quit_event)
with socket.create_connection(
(self.host, self.port)
) as sock, contextlib.closing(
@@ -983,6 +988,8 @@ def consume_ssl_socket(listener: socket.socket) -> None:
assert ssl_sock.fileno() == -1
def test_socket_close_stays_open_with_makefile_open(self) -> None:
+ quit_event = threading.Event()
+
def consume_ssl_socket(listener: socket.socket) -> None:
try:
with listener.accept()[0] as sock, original_ssl_wrap_socket(
@@ -992,11 +999,11 @@ def consume_ssl_socket(listener: socket.socket) -> None:
certfile=DEFAULT_CERTS["certfile"],
ca_certs=DEFAULT_CA,
) as ssl_sock:
- consume_socket(ssl_sock)
+ consume_socket(ssl_sock, quit_event=quit_event)
except (ConnectionResetError, ConnectionAbortedError, OSError):
pass
- self._start_server(consume_ssl_socket)
+ self._start_server(consume_ssl_socket, quit_event=quit_event)
with socket.create_connection(
(self.host, self.port)
) as sock, contextlib.closing(
@@ -2232,11 +2239,28 @@ def socket_handler(listener: socket.socket) -> None:
class TestMultipartResponse(SocketDummyServerTestCase):
def test_multipart_assert_header_parsing_no_defects(self) -> None:
+ quit_event = threading.Event()
+
def socket_handler(listener: socket.socket) -> None:
for _ in range(2):
- sock = listener.accept()[0]
- while not sock.recv(65536).endswith(b"\r\n\r\n"):
- pass
+ listener.settimeout(LONG_TIMEOUT)
+
+ while True:
+ if quit_event and quit_event.is_set():
+ return
+ try:
+ sock = listener.accept()[0]
+ break
+ except (TimeoutError, socket.timeout):
+ continue
+
+ sock.settimeout(LONG_TIMEOUT)
+ while True:
+ if quit_event and quit_event.is_set():
+ sock.close()
+ return
+ if sock.recv(65536).endswith(b"\r\n\r\n"):
+ break
sock.sendall(
b"HTTP/1.1 404 Not Found\r\n"
@@ -2252,7 +2276,7 @@ def socket_handler(listener: socket.socket) -> None:
)
sock.close()
- self._start_server(socket_handler)
+ self._start_server(socket_handler, quit_event=quit_event)
from urllib3.connectionpool import log
with mock.patch.object(log, "warning") as log_warning:
@@ -2308,15 +2332,26 @@ def socket_handler(listener: socket.socket) -> None:
def test_chunked_specified(
self, method: str, chunked: bool, body_type: str
) -> None:
+ quit_event = threading.Event()
buffer = bytearray()
expected_bytes = b"\r\n\r\na\r\nxxxxxxxxxx\r\n0\r\n\r\n"
def socket_handler(listener: socket.socket) -> None:
nonlocal buffer
- sock = listener.accept()[0]
- sock.settimeout(0)
+ listener.settimeout(LONG_TIMEOUT)
+ while True:
+ if quit_event.is_set():
+ return
+ try:
+ sock = listener.accept()[0]
+ break
+ except (TimeoutError, socket.timeout):
+ continue
+ sock.settimeout(LONG_TIMEOUT)
while expected_bytes not in buffer:
+ if quit_event.is_set():
+ return
with contextlib.suppress(BlockingIOError):
buffer += sock.recv(65536)
@@ -2327,7 +2362,7 @@ def socket_handler(listener: socket.socket) -> None:
)
sock.close()
- self._start_server(socket_handler)
+ self._start_server(socket_handler, quit_event=quit_event)
body: typing.Any
if body_type == "generator":
| Bump pytest to 8.0.0
Changelog: https://docs.pytest.org/en/8.0.x/changelog.html#pytest-8-0-0-2024-01-27
| Umm,
In current main I tested with pytest 7.4.4 and
```
nox -rs test-3.12 -- test/contrib/test_pyopenssl.py::TestHTTPS_TLSv1_3
```
passed.
With pytest 8.0.0 it fails:
```
FAILED test/contrib/test_pyopenssl.py::TestHTTPS_TLSv1_3::test_ssl_context_ssl_version_uses_ssl_min_max_versions - Failed: DID NOT WARN. No warnings of type (<class 'DeprecationWarning'>,) were emitted.
```
The changelog mentions some changes regarding `pytest.warns()` but the test should still work ... but it doesn't
With pytest 8.0.0
* `test/contrib/test_pyopenssl.py::TestHTTPS_TLSv1_3::test_ssl_context_ssl_version_uses_ssl_min_max_versions` FAILS
* `test/contrib/test_pyopenssl.py::TestHTTPS_TLSv1_2::test_ssl_context_ssl_version_uses_ssl_min_max_versions` WORKS
So I guess it's more related to pytest collect the test, since TestHTTPS_TLSv1_3 was meant to be SKIPPED with ` ssl.PROTOCOL_TLSv1_3 isn't available`.
I think we can bump the memory limit in `test_get_all_memory_usage_single_chunk` from 10.01 MB to 10.1 MB or even a bit higher if needed to fix the consistent [macOS 3.11](https://github.com/urllib3/urllib3/actions/runs/7868259343/job/21465191410?pr=3335#logs) failures, this shouldn't change the meaning of the test
This sounds good to me! But can we report it to pytest-memray first?
Since it's relevant to this discussion
https://github.com/urllib3/urllib3/pull/3337#issuecomment-1924429650
> This appears to break test_get_all_memory_usage_single_chunk on macOS 3.10/11/12 somehow:
...
> The 528B in response.py at line 246 are allocated by the deque. The 64KiB are allocated in SSLSocket.read() but this makes no sense in this context as the test does not call SSL at all:
...
> This sounds like an incompatibility between pytest 8 and pytest-memray on macOS that we should report to pytest-memray.
Copying my comment from https://github.com/urllib3/urllib3/pull/3337#issuecomment-1926391324
> I wonder , **should we increase the memory limit for this test to "10.064 MB"?** After all we already have "10.01 MB" which I assume the "0.01" was to give some room as indicated in [pytest.mark.limit_memory](https://pytest-memray.readthedocs.io/en/latest/usage.html#pytest.mark.limit_memory)
>
> `test/test_response.py::TestBytesQueueBuffer::test_get_all_memory_usage_single_chunk` fails not only on macOS
>
> * macOS 3.10
> * macOS 3.11
> * macOS 3.12
> * Ubuntu 22.04 3.12
> * Ubuntu 3.x test_brotlipy
>
> I investigated a bit
>
> * if we run `test/test_response.py::TestBytesQueueBuffer` test alone they will pass
> * if we run `test/contrib/test_pyopenssl.py::TestSocketClosing:test_socket_close_socket_then_file` or `test/contrib/test_pyopenssl.py::TestSocketClosing::test_socket_close_stays_open_with_makefile_open` before `test/test_response.py::TestBytesQueueBuffer::test_get_all_memory_usage_single_chunk` then it will fail.
>
> * I can reproduce 100% on my mac laptop.
> * failure seem to be a **consequence of running other tests prior to this one**.
>
...
> This sounds good to me! But can we report it to pytest-memray first?
I wonder if the warning is relevant to our case

My understanding is that objects created during one test may not be deleted immediately, so if you allocate two strings of 1MB each, if Python does not release the memory of the first string immediately, your test will use 2MB of memory. But objects created by one should not be leaking into another test, I think?
I created https://github.com/bloomberg/pytest-memray/issues/109
> But objects created by one should not be leaking into another test, I think?
Just to clarify here you refer to the suspicion that the allocations of 64.0KiB bllow on ssl.py:1107 does not really belong to the `test_get_all_memory_usage_single_chunk`.
When we run this test in isolation the test passes, but if we run `test_socket_close_socket_then_file` before `test_get_all_memory_usage_single_chunk ` it fails. That suggest that the allocation of 64KiB really happens on `test_socket_close_socket_then_file` but "leaks " into the `test_get_all_memory_usage_single_chunk `.
```
_________ TestBytesQueueBuffer.test_get_all_memory_usage_single_chunk __________
Test was limited to 10.0MiB but allocated 10.1MiB
------------------------------ memray-max-memory -------------------------------
List of allocations:
- 64.0KiB allocated here:
read:/Library/Frameworks/Python.framework/Versions/3.12/lib/python3.12/ssl.py:1107
...
- 528.0B allocated here:
__init__:/Users/runner/work/urllib3/urllib3/.nox/test-3-12/lib/python3.12/site-packages/urllib3/response.py:246
...
- 10.0MiB allocated here:
test_get_all_memory_usage_single_chunk:/Users/runner/work/urllib3/urllib3/test/test_response.py:115
...
```
See https://github.com/bloomberg/pytest-memray/issues/109#issuecomment-1967627764 | 2024-03-04T13:19:19Z | [] | [] |
cython/cython | 71 | cython__cython-71 | [
"674"
] | 987e27c9e0291a5f701f58917a2cfa112d77d232 | diff --git a/Cython/Compiler/ExprNodes.py b/Cython/Compiler/ExprNodes.py
--- a/Cython/Compiler/ExprNodes.py
+++ b/Cython/Compiler/ExprNodes.py
@@ -5918,6 +5918,9 @@ class PyCFunctionNode(ExprNode, ModuleNameMixin):
code_object = None
binding = False
def_node = None
+ defaults = None
+ defaults_struct = None
+ defaults_pyobjects = 0
type = py_object_type
is_temp = 1
@@ -5933,10 +5936,48 @@ def analyse_types(self, env):
env.use_utility_code(fused_function_utility_code)
else:
env.use_utility_code(binding_cfunc_utility_code)
+ self.analyse_default_args(env)
#TODO(craig,haoyu) This should be moved to a better place
self.set_mod_name(env)
+ def analyse_default_args(self, env):
+ """
+ Handle non-literal function's default arguments.
+ """
+ nonliteral_objects = []
+ nonliteral_other = []
+ for arg in self.def_node.args:
+ if arg.default and not arg.default.is_literal:
+ arg.is_dynamic = True
+ if arg.type.is_pyobject:
+ nonliteral_objects.append(arg)
+ else:
+ nonliteral_other.append(arg)
+ if nonliteral_objects or nonliteral_objects:
+ module_scope = env.global_scope()
+ cname = module_scope.next_id(Naming.defaults_struct_prefix)
+ scope = Symtab.StructOrUnionScope(cname)
+ self.defaults = []
+ for arg in nonliteral_objects:
+ entry = scope.declare_var(arg.name, arg.type, None,
+ Naming.arg_prefix + arg.name,
+ allow_pyobject=True)
+ self.defaults.append((arg, entry))
+ for arg in nonliteral_other:
+ entry = scope.declare_var(arg.name, arg.type, None,
+ Naming.arg_prefix + arg.name,
+ allow_pyobject=False)
+ self.defaults.append((arg, entry))
+ entry = module_scope.declare_struct_or_union(
+ None, 'struct', scope, 1, None, cname=cname)
+ self.defaults_struct = scope
+ self.defaults_pyobjects = len(nonliteral_objects)
+ for arg, entry in self.defaults:
+ arg.default_value = '%s->%s' % (
+ Naming.dynamic_args_cname, entry.cname)
+ self.def_node.defaults_struct = self.defaults_struct.name
+
def may_be_none(self):
return False
@@ -6018,6 +6059,17 @@ def generate_cyfunction_code(self, code):
self.result()))
code.put_giveref(self.py_result())
+ if self.defaults:
+ code.putln(
+ 'if (!__Pyx_CyFunction_InitDefaults(%s, sizeof(%s), %d)) %s' % (
+ self.result(), self.defaults_struct.name,
+ self.defaults_pyobjects, code.error_goto(self.pos)))
+ defaults = '__Pyx_CyFunction_Defaults(%s, %s)' % (
+ self.defaults_struct.name, self.result())
+ for arg, entry in self.defaults:
+ arg.generate_assignment_code(code, target='%s->%s' % (
+ defaults, entry.cname))
+
if self.specialized_cpdefs:
self.generate_fused_cpdef(code, code_object_result, flags)
diff --git a/Cython/Compiler/Naming.py b/Cython/Compiler/Naming.py
--- a/Cython/Compiler/Naming.py
+++ b/Cython/Compiler/Naming.py
@@ -49,6 +49,8 @@
closure_class_prefix = pyrex_prefix + "scope_struct_"
lambda_func_prefix = pyrex_prefix + "lambda_"
module_is_main = pyrex_prefix + "module_is_main_"
+defaults_struct_prefix = pyrex_prefix + "defaults"
+dynamic_args_cname = pyrex_prefix + "dynamic_args"
args_cname = pyrex_prefix + "args"
generator_cname = pyrex_prefix + "generator"
diff --git a/Cython/Compiler/Nodes.py b/Cython/Compiler/Nodes.py
--- a/Cython/Compiler/Nodes.py
+++ b/Cython/Compiler/Nodes.py
@@ -677,6 +677,7 @@ class CArgDeclNode(Node):
# is_self_arg boolean Is the "self" arg of an extension type method
# is_type_arg boolean Is the "class" arg of an extension type classmethod
# is_kw_only boolean Is a keyword-only argument
+ # is_dynamic boolean Non-literal arg stored inside CyFunction
child_attrs = ["base_type", "declarator", "default"]
@@ -690,6 +691,7 @@ class CArgDeclNode(Node):
name_declarator = None
default_value = None
annotation = None
+ is_dynamic = 0
def analyse(self, env, nonempty = 0, is_self_arg = False):
if is_self_arg:
@@ -738,6 +740,21 @@ def annotate(self, code):
if self.default:
self.default.annotate(code)
+ def generate_assignment_code(self, code, target=None):
+ default = self.default
+ if default is None or default.is_literal:
+ return
+ if target is None:
+ target = self.calculate_default_value_code(code)
+ default.generate_evaluation_code(code)
+ default.make_owned_reference(code)
+ result = default.result_as(self.type)
+ code.putln("%s = %s;" % (target, result))
+ if self.type.is_pyobject:
+ code.put_giveref(default.result())
+ default.generate_post_assignment_code(code)
+ default.free_temps(code)
+
class CBaseTypeNode(Node):
# Abstract base class for C base type nodes.
@@ -1800,20 +1817,8 @@ def generate_wrapper_functions(self, code):
def generate_execution_code(self, code):
# Evaluate and store argument default values
for arg in self.args:
- default = arg.default
- if default:
- if not default.is_literal:
- default.generate_evaluation_code(code)
- default.make_owned_reference(code)
- result = default.result_as(arg.type)
- code.putln(
- "%s = %s;" % (
- arg.calculate_default_value_code(code),
- result))
- if arg.type.is_pyobject:
- code.put_giveref(default.result())
- default.generate_post_assignment_code(code)
- default.free_temps(code)
+ if not arg.is_dynamic:
+ arg.generate_assignment_code(code)
# For Python class methods, create and store function object
if self.assmt:
self.assmt.generate_execution_code(code)
@@ -2645,6 +2650,7 @@ class DefNode(FuncDefNode):
self_in_stararg = 0
py_cfunc_node = None
requires_classobj = False
+ defaults_struct = None # Dynamic kwrds structure name
doc = None
fused_py_func = False
@@ -3512,6 +3518,11 @@ def generate_argument_values_setup_code(self, args, max_positional_args, argtupl
code.putln("PyObject* values[%d] = {%s};" % (
max_args, ','.join('0'*max_args)))
+ if self.defaults_struct:
+ code.putln('%s *%s = __Pyx_CyFunction_Defaults(%s, %s);' % (
+ self.defaults_struct, Naming.dynamic_args_cname,
+ self.defaults_struct, Naming.self_cname))
+
# assign borrowed Python default values to the values array,
# so that they can be overwritten by received arguments below
for i, arg in enumerate(args):
| diff --git a/tests/bugs.txt b/tests/bugs.txt
--- a/tests/bugs.txt
+++ b/tests/bugs.txt
@@ -18,7 +18,6 @@ temp_sideeffects_T654
class_scope_T671
slice2_T636
builtin_subtype_methods_T653
-default_args_T674
# CPython regression tests that don't current work:
pyregr.test_threadsignals
diff --git a/tests/run/dynamic_args.pyx b/tests/run/dynamic_args.pyx
new file mode 100644
--- /dev/null
+++ b/tests/run/dynamic_args.pyx
@@ -0,0 +1,24 @@
+# mode: run
+# ticket: 674
+
+cdef class Foo:
+ cdef str name
+
+ def __init__(self, name):
+ self.name = name
+
+ def __repr__(self):
+ return '<%s>' % self.name
+
+def test_exttype_args(a, b, c):
+ """
+ >>> f1 = test_exttype_args([1, 2, 3], 123, Foo('Foo'))
+ >>> f2 = test_exttype_args([0], 0, Foo('Bar'))
+ >>> f1()
+ ([1, 2, 3], 123, <Foo>)
+ >>> f2()
+ ([0], 0, <Bar>)
+ """
+ def inner(a=a, int b=b, Foo c=c):
+ return a, b, c
+ return inner
| NumPy integration fails on Python 2.4/64-bit due to Py_ssize_t != Py_intptr_t
See thread `numpy, Py_ssize_t, cython and 64 bits python 2.4` on numpy-discuss.
Migrated from http://trac.cython.org/ticket/114
| 2011-11-06T16:20:47Z | [] | [] |
|
cython/cython | 120 | cython__cython-120 | [
"768"
] | a1dc82035d6ef68780dbf8f578e229a4b51e609a | diff --git a/Cython/Compiler/FlowControl.py b/Cython/Compiler/FlowControl.py
--- a/Cython/Compiler/FlowControl.py
+++ b/Cython/Compiler/FlowControl.py
@@ -313,6 +313,12 @@ def __init__(self, lhs, rhs, entry):
def __repr__(self):
return '%s(entry=%r)' % (self.__class__.__name__, self.entry)
+ def infer_type(self, scope):
+ return self.rhs.infer_type(scope)
+
+ def type_dependencies(self, scope):
+ return self.rhs.type_dependencies(scope)
+
class Argument(NameAssignment):
def __init__(self, lhs, rhs, entry):
@@ -325,6 +331,13 @@ def __init__(self, lhs, entry):
NameAssignment.__init__(self, lhs, lhs, entry)
self.is_deletion = True
+ def infer_type(self, scope):
+ inferred_type = self.rhs.infer_type(scope)
+ if (not inferred_type.is_pyobject and
+ inferred_type.can_coerce_to_pyobject(scope)):
+ return py_object_type
+ return inferred_type
+
class Uninitialized(object):
pass
diff --git a/Cython/Compiler/TypeInference.py b/Cython/Compiler/TypeInference.py
--- a/Cython/Compiler/TypeInference.py
+++ b/Cython/Compiler/TypeInference.py
@@ -363,7 +363,7 @@ def infer_types(self, scope):
continue
all = set()
for assmt in entry.cf_assignments:
- all.update(assmt.rhs.type_dependencies(scope))
+ all.update(assmt.type_dependencies(scope))
if all:
dependancies_by_entry[entry] = all
for dep in all:
@@ -401,12 +401,12 @@ def resolve_dependancy(dep):
# Deal with simple circular dependancies...
for entry, deps in dependancies_by_entry.items():
if len(deps) == 1 and deps == set([entry]):
- types = [assmt.rhs.infer_type(scope)
+ types = [assmt.infer_type(scope)
for assmt in entry.cf_assignments
- if assmt.rhs.type_dependencies(scope) == ()]
+ if assmt.type_dependencies(scope) == ()]
if types:
entry.type = spanning_type(types, entry.might_overflow)
- types = [assmt.rhs.infer_type(scope)
+ types = [assmt.infer_type(scope)
for assmt in entry.cf_assignments]
entry.type = spanning_type(types, entry.might_overflow) # might be wider...
resolve_dependancy(entry)
| diff --git a/tests/errors/w_uninitialized_del.pyx b/tests/errors/w_uninitialized_del.pyx
--- a/tests/errors/w_uninitialized_del.pyx
+++ b/tests/errors/w_uninitialized_del.pyx
@@ -9,8 +9,6 @@ def foo(x):
return a, b
_ERRORS = """
-7:9: Deletion of non-Python, non-C++ object
7:12: local variable 'b' referenced before assignment
-7:12: Deletion of non-Python, non-C++ object
9:12: local variable 'a' referenced before assignment
"""
diff --git a/tests/run/type_inference_T768.pyx b/tests/run/type_inference_T768.pyx
new file mode 100644
--- /dev/null
+++ b/tests/run/type_inference_T768.pyx
@@ -0,0 +1,21 @@
+# mode: run
+# ticket: 768
+from cython cimport typeof
+
+def type_inference_del_int():
+ """
+ >>> type_inference_del_int()
+ 'Python object'
+ """
+ x = 1
+ del x
+ return typeof(x)
+
+def type_inference_del_dict():
+ """
+ >>> type_inference_del_dict()
+ 'dict object'
+ """
+ x = {}
+ del x
+ return typeof(x)
diff --git a/tests/run/type_inference_T768_cpp.pyx b/tests/run/type_inference_T768_cpp.pyx
new file mode 100644
--- /dev/null
+++ b/tests/run/type_inference_T768_cpp.pyx
@@ -0,0 +1,21 @@
+# mode: run
+# tag: cpp
+# ticket: 768
+from cython cimport typeof
+
+cdef extern from "shapes.h" namespace "shapes":
+ cdef cppclass Shape:
+ float area()
+
+ cdef cppclass Circle(Shape):
+ int radius
+ Circle(int)
+
+def type_inference_del_cpp():
+ """
+ >>> type_inference_del_cpp()
+ 'Circle *'
+ """
+ x = new Circle(10)
+ del x
+ return typeof(x)
| empty for-int-in-range loop doesn't behave as in Python
When optimised into a C loop, the for-in-range loop over an empty range still changes the value of the loop variable. In Python, this is not the case.
```
def go_py_empty():
i = 20
for i in range(4,0):
print u"Spam!"
return i # == 20
def go_c_empty():
cdef int i = 20
for i in range(4,0):
print u"Spam!"
return i # == 3
```
At 2009-02-08T11:55:47Z **mlh** added attachment [if_patch.txt](https://gist.github.com/374838b436356df3a17afb7df2827a6f)
Migrated from http://trac.cython.org/ticket/208
| 2012-05-10T17:33:45Z | [] | [] |
|
cython/cython | 200 | cython__cython-200 | [
"113"
] | 16cb9327472b05ec32e52b3567413e481d8646d2 | diff --git a/Cython/Compiler/Code.py b/Cython/Compiler/Code.py
--- a/Cython/Compiler/Code.py
+++ b/Cython/Compiler/Code.py
@@ -484,7 +484,7 @@ def __init__(self, owner, names_taken=set()):
self.in_try_finally = 0
self.exc_vars = None
- self.temps_allocated = [] # of (name, type, manage_ref)
+ self.temps_allocated = [] # of (name, type, manage_ref, static)
self.temps_free = {} # (type, manage_ref) -> list of free vars with same type/managed status
self.temps_used_type = {} # name -> (type, manage_ref)
self.temp_counter = 0
@@ -563,7 +563,7 @@ def label_used(self, lbl):
# temp handling
- def allocate_temp(self, type, manage_ref):
+ def allocate_temp(self, type, manage_ref, static=False):
"""
Allocates a temporary (which may create a new one or get a previously
allocated and released one of the same type). Type is simply registered
@@ -578,6 +578,10 @@ def allocate_temp(self, type, manage_ref):
still has to be passed. It is recommended to pass False by convention
if it is known that type will never be a Python object.
+ static=True marks the temporary declaration with "static".
+ This is only used when allocating backing store for a module-level
+ C array literals.
+
A C string referring to the variable is returned.
"""
if type.is_const:
@@ -595,7 +599,7 @@ def allocate_temp(self, type, manage_ref):
self.temp_counter += 1
result = "%s%d" % (Naming.codewriter_temp_prefix, self.temp_counter)
if not result in self.names_taken: break
- self.temps_allocated.append((result, type, manage_ref))
+ self.temps_allocated.append((result, type, manage_ref, static))
self.temps_used_type[result] = (type, manage_ref)
if DebugFlags.debug_temp_code_comments:
self.owner.putln("/* %s allocated */" % result)
@@ -626,7 +630,7 @@ def temps_in_use(self):
that are currently in use.
"""
used = []
- for name, type, manage_ref in self.temps_allocated:
+ for name, type, manage_ref, static in self.temps_allocated:
freelist = self.temps_free.get((type, manage_ref))
if freelist is None or name not in freelist:
used.append((name, type, manage_ref and type.is_pyobject))
@@ -645,7 +649,7 @@ def all_managed_temps(self):
"""Return a list of (cname, type) tuples of refcount-managed Python objects.
"""
return [(cname, type)
- for cname, type, manage_ref in self.temps_allocated
+ for cname, type, manage_ref, static in self.temps_allocated
if manage_ref]
def all_free_managed_temps(self):
@@ -1604,7 +1608,7 @@ def put_var_declaration(self, entry, storage_class="",
self.putln(";")
def put_temp_declarations(self, func_context):
- for name, type, manage_ref in func_context.temps_allocated:
+ for name, type, manage_ref, static in func_context.temps_allocated:
decl = type.declaration_code(name)
if type.is_pyobject:
self.putln("%s = NULL;" % decl)
@@ -1612,7 +1616,7 @@ def put_temp_declarations(self, func_context):
import MemoryView
self.putln("%s = %s;" % (decl, MemoryView.memslice_entry_init))
else:
- self.putln("%s;" % decl)
+ self.putln("%s%s;" % (static and "static " or "", decl))
def put_h_guard(self, guard):
self.putln("#ifndef %s" % guard)
diff --git a/Cython/Compiler/ExprNodes.py b/Cython/Compiler/ExprNodes.py
--- a/Cython/Compiler/ExprNodes.py
+++ b/Cython/Compiler/ExprNodes.py
@@ -1815,7 +1815,7 @@ def generate_assignment_code(self, rhs, code):
return # There was an error earlier
if (self.entry.type.is_ptr and isinstance(rhs, ListNode)
- and not self.lhs_of_first_assignment):
+ and not self.lhs_of_first_assignment and not rhs.in_module_scope):
error(self.pos, "Literal list must be assigned to pointer at time of declaration")
# is_pyglobal seems to be True for module level-globals only.
@@ -5914,6 +5914,7 @@ class ListNode(SequenceNode):
obj_conversion_errors = []
type = list_type
+ in_module_scope = False
gil_message = "Constructing Python list"
@@ -5934,6 +5935,8 @@ def analyse_types(self, env):
node = SequenceNode.analyse_types(self, env)
node.obj_conversion_errors = held_errors()
release_errors(ignore=True)
+ if env.is_module_scope:
+ self.in_module_scope = True
return node
def coerce_to(self, dst_type, env):
@@ -5975,6 +5978,13 @@ def as_tuple(self):
t.constant_result = tuple(self.constant_result)
return t
+ def allocate_temp_result(self, code):
+ if self.type.is_array and self.in_module_scope:
+ self.temp_code = code.funcstate.allocate_temp(
+ self.type, manage_ref=False, static=True)
+ else:
+ SequenceNode.allocate_temp_result(self, code)
+
def release_temp_result(self, env):
if self.type.is_array:
# To be valid C++, we must allocate the memory on the stack
| diff --git a/tests/run/literal_lists.pyx b/tests/run/literal_lists.pyx
--- a/tests/run/literal_lists.pyx
+++ b/tests/run/literal_lists.pyx
@@ -55,6 +55,26 @@ def test_struct(int x, y):
print_struct(aa[0])
print_struct([1, 2, <double**>1])
+cdef int m_int = -1
+cdef int* m_iarray = [4, m_int]
+cdef int** m_piarray = [m_iarray, &m_int]
+cdef char** m_carray = [b"a", b"bc"]
+cdef MyStruct* m_structarray = [[m_int,0,NULL], [1,m_int+1,NULL]]
+
+def test_module_level():
+ """
+ >>> test_module_level()
+ 4 -1
+ 4 -1
+ True True
+ 1 0 True
+ """
+ print m_iarray[0], m_iarray[1]
+ print m_piarray[0][0], m_piarray[1][0]
+ print m_carray[0] == b"a", m_carray[1] == b"bc"
+ print_struct(m_structarray[1])
+
+
# Make sure it's still naturally an object.
[0,1,2,3].append(4)
| Python array support
Adapted from patch at http://trac.cython.org/cython_trac/ticket/314
Changes: reverted Py_MEM_Realloc to Py_Mem_Resize, due to warnings
removed inline keyword (not portable, and Py_LOCAL_INLINE causes warnings).
moved a C declaration to top of function to avoid "mixed declaration"
Python array objects are useful because there is no dependency like with numpy, they make sense for small 1D-arrays. With this patch you can have C-level access to Python arrays, while still having the convenience of Python taking care of garbage collection. Another advantage is that they can be pickled, so you can pass them around to other processes with multiprocessing.
I didn't really succeed in getting the tests to compile, unfortunately. But if I place these files in the directory of a project it works fine. Another thing is that I'd like to get rid of all warnings; there's still a gcc warning about **getbuffer** being declared but not used.
| BTW I didn't succeed in removing ob_type. I tried both casting the result of type() to PyTypeObject (cannot cast temporary object to non-numeric type), and casting to arrayobject (not a type indentifier). But maybe I'm missing something obvious.
Thanks, that's better. I'll give it another couple of days to collect other opinions and then merge it.
Any news on this? Let me know if there's anything I can do to help inclusion of this patch.
| 2013-03-28T12:55:41Z | [] | [] |
cython/cython | 222 | cython__cython-222 | [
"775"
] | be0303ad9af4a4d8ad078da1f3bef5280e92423f | diff --git a/Cython/Compiler/ModuleNode.py b/Cython/Compiler/ModuleNode.py
--- a/Cython/Compiler/ModuleNode.py
+++ b/Cython/Compiler/ModuleNode.py
@@ -82,6 +82,7 @@ def extend_if_not_in(L1, L2):
# Ensure that we don't generate import code for these entries!
for entry in scope.c_class_entries:
entry.type.module_name = self.full_module_name
+ entry.type.scope.directives["internal"] = True
self.scope.merge_in(scope)
diff --git a/Cython/Compiler/Nodes.py b/Cython/Compiler/Nodes.py
--- a/Cython/Compiler/Nodes.py
+++ b/Cython/Compiler/Nodes.py
@@ -7952,10 +7952,14 @@ class CnameDecoratorNode(StatNode):
def analyse_declarations(self, env):
self.node.analyse_declarations(env)
- self.is_function = isinstance(self.node, FuncDefNode)
- is_struct_or_enum = isinstance(self.node, (CStructOrUnionDefNode,
+ node = self.node
+ if isinstance(node, CompilerDirectivesNode):
+ node = node.body.stats[0]
+
+ self.is_function = isinstance(node, FuncDefNode)
+ is_struct_or_enum = isinstance(node, (CStructOrUnionDefNode,
CEnumDefNode))
- e = self.node.entry
+ e = node.entry
if self.is_function:
e.cname = self.cname
@@ -7966,7 +7970,7 @@ def analyse_declarations(self, env):
elif is_struct_or_enum:
e.cname = e.type.cname = self.cname
else:
- scope = self.node.scope
+ scope = node.scope
e.cname = self.cname
e.type.objstruct_cname = self.cname + '_obj'
| diff --git a/tests/run/memoryview_namespace_T775.pyx b/tests/run/memoryview_namespace_T775.pyx
new file mode 100644
--- /dev/null
+++ b/tests/run/memoryview_namespace_T775.pyx
@@ -0,0 +1,13 @@
+
+cdef int data[10]
+cdef int[:] myslice = data
+
+def test_memoryview_namespace():
+ """
+ >>> test_memoryview_namespace()
+ """
+ namespace = dir(__import__(__name__))
+ assert 'array' not in namespace, namespace
+ assert 'memoryview' not in namespace, namespace
+ assert '_memoryviewslice' not in namespace, namespace
+ assert 'Enum' not in namespace, namespace
| Code generated for __setslice__ and __delslice__ fails to build in Py3
In Py 3.0.1, the `PySequenceMethods.sq_ass_slice` slot was typed as `void*` to make users aware that the slot was actually dropped. This lets the C compiler fail to build the generated source.
It might actually be best to just emit a clear and visible warning when users use these slots, and to point them to the `__*item__` slots instead. It's not really Cython's fault that they were not removed from Py3.0.
Migrated from http://trac.cython.org/ticket/215
| 2013-05-04T17:52:43Z | [] | [] |
|
cython/cython | 469 | cython__cython-469 | [
"279"
] | 0c62e665c02b438be331e445bbde2f1c6bd9dff0 | diff --git a/Cython/Compiler/ParseTreeTransforms.py b/Cython/Compiler/ParseTreeTransforms.py
--- a/Cython/Compiler/ParseTreeTransforms.py
+++ b/Cython/Compiler/ParseTreeTransforms.py
@@ -2185,6 +2185,8 @@ def visit_CClassDefNode(self, node, pxd_def=None):
if pxd_def is None:
pxd_def = self.scope.lookup(node.class_name)
if pxd_def:
+ if not pxd_def.defined_in_pxd:
+ return node
outer_scope = self.scope
self.scope = pxd_def.type.scope
self.visitchildren(node)
| diff --git a/tests/errors/pure_cclass_without_body.pxd b/tests/errors/pure_cclass_without_body.pxd
new file mode 100644
--- /dev/null
+++ b/tests/errors/pure_cclass_without_body.pxd
@@ -0,0 +1,3 @@
+# mode: error
+
+cdef class Test
diff --git a/tests/errors/pure_cclass_without_body.py b/tests/errors/pure_cclass_without_body.py
new file mode 100644
--- /dev/null
+++ b/tests/errors/pure_cclass_without_body.py
@@ -0,0 +1,8 @@
+# mode: error
+
+class Test(object):
+ pass
+
+_ERRORS = u"""
+3:5: C class 'Test' is declared but not defined
+"""
| cython-mode.el: don't highlight builtins when they're part of varname
Current version of cython-mode for Emacs caused weird highlighting when a builtin or a keyword is a part of a variable/function name (e.g. is_**float**). To avoid that one needs to check for symbol boundaries, not word boundaries when matching keywords.
| Thanks. Since I can't really evaluate this, could you please ask on the cython-users mailing list if other Emacs users can test it? I'm a bit surprised that no-one complained about this yet.
Since no-one seemed to care on the mailing list, I'll just merge this and see if users start to complain afterwards. :)
| 2015-12-01T02:27:08Z | [] | [] |
cython/cython | 1,663 | cython__cython-1663 | [
"1662"
] | 52e58debdd9170735129574bfca50d3e27144c1e | diff --git a/Cython/Compiler/ExprNodes.py b/Cython/Compiler/ExprNodes.py
--- a/Cython/Compiler/ExprNodes.py
+++ b/Cython/Compiler/ExprNodes.py
@@ -4583,7 +4583,10 @@ def analyse_types(self, env, getting=True):
self.is_temp = 1
return self
- nogil_check = Node.gil_error
+ def nogil_check(self, env):
+ if not self.base.type.is_array:
+ self.gil_error()
+
gil_message = "Slicing Python object"
get_slice_utility_code = TempitaUtilityCode.load(
| diff --git a/tests/compile/nogil_array.pyx b/tests/compile/nogil_array.pyx
new file mode 100644
--- /dev/null
+++ b/tests/compile/nogil_array.pyx
@@ -0,0 +1,8 @@
+# mode: compile
+
+cdef void foo() nogil:
+ cdef double[16] x
+ cdef double[16] y
+ # both of these operations should be allowed in a nogil function
+ y[:] = x[:]
+ y = x
| Cannot copy C arrays in nogil functions
This code:
```
cdef void foo() nogil:
cdef double[16] x
cdef double[16] y
y[:] = x[:]
y = x
```
throws the compile time error "Slicing Python object not allowed without gil" for both assignments. However, the generated code in both cases is a simple memcpy and doesn't require the GIL.
| 2017-04-06T19:03:12Z | [] | [] |
|
cython/cython | 1,669 | cython__cython-1669 | [
"1668"
] | 26c6ecb1dcafaee77c05632f1dc8561da7f1c1f9 | diff --git a/Cython/Compiler/ExprNodes.py b/Cython/Compiler/ExprNodes.py
--- a/Cython/Compiler/ExprNodes.py
+++ b/Cython/Compiler/ExprNodes.py
@@ -9287,7 +9287,13 @@ def generate_yield_code(self, code):
code.putln('%s->%s = %s;' % (Naming.cur_scope_cname, save_cname, cname))
code.put_xgiveref(Naming.retval_cname)
+ profile = code.globalstate.directives['profile']
+ linetrace = code.globalstate.directives['linetrace']
+ if profile or linetrace:
+ code.put_trace_return(Naming.retval_cname,
+ nogil=not code.funcstate.gil_owned)
code.put_finish_refcount_context()
+
code.putln("/* return from generator, yielding value */")
code.putln("%s->resume_label = %d;" % (
Naming.generator_cname, label_num))
diff --git a/Cython/Compiler/Nodes.py b/Cython/Compiler/Nodes.py
--- a/Cython/Compiler/Nodes.py
+++ b/Cython/Compiler/Nodes.py
@@ -1805,9 +1805,11 @@ def generate_function_definitions(self, env, code):
code.declare_gilstate()
if profile or linetrace:
- tempvardecl_code.put_trace_declarations()
- code_object = self.code_object.calculate_result_code(code) if self.code_object else None
- code.put_trace_frame_init(code_object)
+ if not self.is_generator:
+ # generators are traced when iterated, not at creation
+ tempvardecl_code.put_trace_declarations()
+ code_object = self.code_object.calculate_result_code(code) if self.code_object else None
+ code.put_trace_frame_init(code_object)
# ----- set up refnanny
if use_refnanny:
@@ -1862,12 +1864,14 @@ def generate_function_definitions(self, env, code):
if profile or linetrace:
# this looks a bit late, but if we don't get here due to a
# fatal error before hand, it's not really worth tracing
- if self.is_wrapper:
- trace_name = self.entry.name + " (wrapper)"
- else:
- trace_name = self.entry.name
- code.put_trace_call(
- trace_name, self.pos, nogil=not code.funcstate.gil_owned)
+ if not self.is_generator:
+ # generators are traced when iterated, not at creation
+ if self.is_wrapper:
+ trace_name = self.entry.name + " (wrapper)"
+ else:
+ trace_name = self.entry.name
+ code.put_trace_call(
+ trace_name, self.pos, nogil=not code.funcstate.gil_owned)
code.funcstate.can_trace = True
# ----- Fetch arguments
self.generate_argument_parsing_code(env, code)
@@ -2064,12 +2068,14 @@ def generate_function_definitions(self, env, code):
if profile or linetrace:
code.funcstate.can_trace = False
- if self.return_type.is_pyobject:
- code.put_trace_return(
- Naming.retval_cname, nogil=not code.funcstate.gil_owned)
- else:
- code.put_trace_return(
- "Py_None", nogil=not code.funcstate.gil_owned)
+ if not self.is_generator:
+ # generators are traced when iterated, not at creation
+ if self.return_type.is_pyobject:
+ code.put_trace_return(
+ Naming.retval_cname, nogil=not code.funcstate.gil_owned)
+ else:
+ code.put_trace_return(
+ "Py_None", nogil=not code.funcstate.gil_owned)
if not lenv.nogil:
# GIL holding function
@@ -4041,6 +4047,10 @@ def generate_function_definitions(self, env, code):
tempvardecl_code = code.insertion_point()
code.put_declare_refcount_context()
code.put_setup_refcount_context(self.entry.name)
+ profile = code.globalstate.directives['profile']
+ linetrace = code.globalstate.directives['linetrace']
+ if profile or linetrace:
+ code.put_trace_declarations()
# ----- Resume switch point.
code.funcstate.init_closure_temps(lenv.scope_class.type.scope)
@@ -4112,6 +4122,9 @@ def generate_function_definitions(self, env, code):
code.putln('%s->resume_label = -1;' % Naming.generator_cname)
# clean up as early as possible to help breaking any reference cycles
code.putln('__Pyx_Coroutine_clear((PyObject*)%s);' % Naming.generator_cname)
+ if profile or linetrace:
+ code.put_trace_return(Naming.retval_cname,
+ nogil=not code.funcstate.gil_owned)
code.put_finish_refcount_context()
code.putln("return %s;" % Naming.retval_cname)
code.putln("}")
@@ -4119,13 +4132,20 @@ def generate_function_definitions(self, env, code):
# ----- Go back and insert temp variable declarations
tempvardecl_code.put_temp_declarations(code.funcstate)
# ----- Generator resume code
+ if profile or linetrace:
+ resume_code.put_trace_call(self.entry.qualified_name, self.pos,
+ nogil=not code.funcstate.gil_owned)
resume_code.putln("switch (%s->resume_label) {" % (
Naming.generator_cname))
+
resume_code.putln("case 0: goto %s;" % first_run_label)
for i, label in code.yield_labels:
resume_code.putln("case %d: goto %s;" % (i, label))
resume_code.putln("default: /* CPython raises the right error here */")
+ if profile or linetrace:
+ resume_code.put_trace_return("Py_None",
+ nogil=not code.funcstate.gil_owned)
resume_code.put_finish_refcount_context()
resume_code.putln("return NULL;")
resume_code.putln("}")
| diff --git a/tests/run/pstats_profile_test.pyx b/tests/run/pstats_profile_test.pyx
--- a/tests/run/pstats_profile_test.pyx
+++ b/tests/run/pstats_profile_test.pyx
@@ -65,6 +65,58 @@ __doc__ = u"""
'f_raise',
'm_cdef', 'm_cpdef', 'm_cpdef (wrapper)', 'm_def',
'withgil_prof']
+
+ >>> profile.runctx("test_generators()", locals(), globals(), statsfile)
+ >>> s = pstats.Stats(statsfile)
+ >>> short_stats = dict([(k[2], v[1]) for k,v in s.stats.items()])
+ >>> short_stats['generator']
+ 3
+
+ >>> short_stats['generator_exception']
+ 2
+
+ >>> short_stats['genexpr']
+ 11
+
+ >>> sorted(callees(s, 'test_generators'))
+ ['call_generator', 'call_generator_exception', 'generator_expr']
+
+ >>> list(callees(s, 'call_generator'))
+ ['generator']
+
+ >>> list(callees(s, 'generator'))
+ []
+
+ >>> list(callees(s, 'generator_exception'))
+ []
+
+ >>> list(callees(s, 'generator_expr'))
+ ['genexpr']
+
+ >>> list(callees(s, 'genexpr'))
+ []
+
+ >>> def python_generator():
+ ... yield 1
+ ... yield 2
+ >>> def call_python_generator():
+ ... list(python_generator())
+
+ >>> profile.runctx("call_python_generator()", locals(), globals(), statsfile)
+ >>> python_stats = pstats.Stats(statsfile)
+ >>> python_stats_dict = dict([(k[2], v[1]) for k,v in python_stats.stats.items()])
+
+ >>> profile.runctx("call_generator()", locals(), globals(), statsfile)
+ >>> cython_stats = pstats.Stats(statsfile)
+ >>> cython_stats_dict = dict([(k[2], v[1]) for k,v in cython_stats.stats.items()])
+
+ >>> python_stats_dict['python_generator'] == cython_stats_dict['generator']
+ True
+
+ >>> try:
+ ... os.unlink(statsfile)
+ ... except:
+ ... pass
"""
cimport cython
@@ -147,3 +199,29 @@ cdef class A(object):
return a
cdef m_cdef(self, long a):
return a
+
+def test_generators():
+ call_generator()
+ call_generator_exception()
+ generator_expr()
+
+def call_generator():
+ list(generator())
+
+def generator():
+ yield 1
+ yield 2
+
+def call_generator_exception():
+ try:
+ list(generator_exception())
+ except ValueError:
+ pass
+
+def generator_exception():
+ yield 1
+ raise ValueError(2)
+
+def generator_expr():
+ e = (x for x in range(10))
+ return sum(e)
| Profiling doesn't work for generators or generator expressions
For generators, the profiler 'call' hook is only called once on creation and the 'return' hook is never called. This creates an invalid call graph in pstats (as the profiler treats the generator as never exited) and skews the reported results.
In CPython, generators call the 'call' and 'return' hook for each iteration, and once again for the final StopIteration.
| 2017-04-10T23:13:17Z | [] | [] |
|
cython/cython | 1,777 | cython__cython-1777 | [
"1776"
] | a2db27bb85d0daa4cc4b4b7d65c0e6a7cda5ec2a | diff --git a/Cython/Compiler/ExprNodes.py b/Cython/Compiler/ExprNodes.py
--- a/Cython/Compiler/ExprNodes.py
+++ b/Cython/Compiler/ExprNodes.py
@@ -1672,7 +1672,7 @@ def coerce_to(self, dst_type, env):
node = ImagNode(self.pos, value=self.value)
if dst_type.is_pyobject:
node.is_temp = 1
- node.type = PyrexTypes.py_object_type
+ node.type = Builtin.complex_type
# We still need to perform normal coerce_to processing on the
# result, because we might be coercing to an extension type,
# in which case a type test node will be needed.
@@ -11866,22 +11866,22 @@ def find_common_type(self, env, op, operand1, common_type=None):
new_common_type = None
# catch general errors
- if type1 == str_type and (type2.is_string or type2 in (bytes_type, unicode_type)) or \
- type2 == str_type and (type1.is_string or type1 in (bytes_type, unicode_type)):
+ if (type1 == str_type and (type2.is_string or type2 in (bytes_type, unicode_type)) or
+ type2 == str_type and (type1.is_string or type1 in (bytes_type, unicode_type))):
error(self.pos, "Comparisons between bytes/unicode and str are not portable to Python 3")
new_common_type = error_type
# try to use numeric comparisons where possible
elif type1.is_complex or type2.is_complex:
- if op not in ('==', '!=') \
- and (type1.is_complex or type1.is_numeric) \
- and (type2.is_complex or type2.is_numeric):
+ if (op not in ('==', '!=')
+ and (type1.is_complex or type1.is_numeric)
+ and (type2.is_complex or type2.is_numeric)):
error(self.pos, "complex types are unordered")
new_common_type = error_type
elif type1.is_pyobject:
- new_common_type = type1
+ new_common_type = Builtin.complex_type if type1.subtype_of(Builtin.complex_type) else py_object_type
elif type2.is_pyobject:
- new_common_type = type2
+ new_common_type = Builtin.complex_type if type2.subtype_of(Builtin.complex_type) else py_object_type
else:
new_common_type = PyrexTypes.widest_numeric_type(type1, type2)
elif type1.is_numeric and type2.is_numeric:
| diff --git a/tests/run/complex_numbers_T305.pyx b/tests/run/complex_numbers_T305.pyx
--- a/tests/run/complex_numbers_T305.pyx
+++ b/tests/run/complex_numbers_T305.pyx
@@ -1,7 +1,33 @@
# ticket: 305
+from cpython.object cimport Py_EQ, Py_NE
+
cimport cython
+
+cdef class Complex3j:
+ """
+ >>> Complex3j() == 3j
+ True
+ >>> Complex3j() == Complex3j()
+ True
+ >>> Complex3j() != 3j
+ False
+ >>> Complex3j() != 3
+ True
+ >>> Complex3j() != Complex3j()
+ False
+ """
+ def __richcmp__(a, b, int op):
+ if op == Py_EQ or op == Py_NE:
+ if isinstance(a, Complex3j):
+ eq = isinstance(b, Complex3j) or b == 3j
+ else:
+ eq = isinstance(b, Complex3j) and a == 3j
+ return eq if op == Py_EQ else not eq
+ return NotImplemented
+
+
def test_object_conversion(o):
"""
>>> test_object_conversion(2)
@@ -13,6 +39,7 @@ def test_object_conversion(o):
cdef double complex b = o
return (a, b)
+
def test_arithmetic(double complex z, double complex w):
"""
>>> test_arithmetic(2j, 4j)
@@ -24,6 +51,7 @@ def test_arithmetic(double complex z, double complex w):
"""
return +z, -z+0, z+w, z-w, z*w, z/w
+
def test_div(double complex a, double complex b, expected):
"""
>>> big = 2.0**1023
@@ -34,6 +62,7 @@ def test_div(double complex a, double complex b, expected):
if '_c99_' not in __name__:
assert a / b == expected, (a / b, expected)
+
def test_pow(double complex z, double complex w, tol=None):
"""
Various implementations produce slightly different results...
@@ -55,6 +84,7 @@ def test_pow(double complex z, double complex w, tol=None):
else:
return abs(z**w / <object>z ** <object>w - 1) < tol
+
def test_int_pow(double complex z, int n, tol=None):
"""
>>> [test_int_pow(complex(0, 1), k, 1e-15) for k in range(-4, 5)]
@@ -71,6 +101,7 @@ def test_int_pow(double complex z, int n, tol=None):
else:
return abs(z**n / <object>z ** <object>n - 1) < tol
+
@cython.cdivision(False)
def test_div_by_zero(double complex z):
"""
@@ -83,6 +114,7 @@ def test_div_by_zero(double complex z):
"""
return 1/z
+
def test_coercion(int a, float b, double c, float complex d, double complex e):
"""
>>> test_coercion(1, 1.5, 2.5, 4+1j, 10j)
@@ -101,29 +133,34 @@ def test_coercion(int a, float b, double c, float complex d, double complex e):
z = e; print z
return z + a + b + c + d + e
+
def test_compare(double complex a, double complex b):
"""
>>> test_compare(3, 3)
- (True, False)
+ (True, False, False, False, False, True)
>>> test_compare(3j, 3j)
- (True, False)
+ (True, False, True, True, True, False)
>>> test_compare(3j, 4j)
- (False, True)
+ (False, True, True, False, True, True)
>>> test_compare(3, 4)
- (False, True)
+ (False, True, False, False, False, True)
"""
- return a == b, a != b
+ return a == b, a != b, a == 3j, 3j == b, a == Complex3j(), Complex3j() != b
+
def test_compare_coerce(double complex a, int b):
"""
>>> test_compare_coerce(3, 4)
- (False, True)
+ (False, True, False, False, False, True)
>>> test_compare_coerce(4+1j, 4)
- (False, True)
+ (False, True, False, True, False, True)
>>> test_compare_coerce(4, 4)
- (True, False)
+ (True, False, False, False, False, True)
+ >>> test_compare_coerce(3j, 4)
+ (False, True, True, False, True, False)
"""
- return a == b, a != b
+ return a == b, a != b, a == 3j, 4+1j == a, a == Complex3j(), Complex3j() != a
+
def test_literal():
"""
| bug in comparison between complex and extension types
The following
cdef class MyZero(object):
pass
cdef MyZero z = MyZero()
z == 0.0 + 0.0j
gives a TypeError at execution
Traceback (most recent call last):
File "<string>", line 1, in <module>
File "test.pyx", line 5, in init test (test.c:1123)
z == 0.0 + 0.0j
TypeError: Cannot convert complex to test.MyZero
Cython is wrongly generating a type check before the comparison
/*
* cdef MyZero z = MyZero()
* z == 0.0 + 0.0j # <<<<<<<<<<<<<<
*/
__pyx_t_2 = __Pyx_c_sum_double(__pyx_t_double_complex_from_parts(0.0, 0), __pyx_t_double_complex_from_parts(0, 0.0));
__pyx_t_1 = __pyx_PyComplex_FromComplex(__pyx_t_2); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 5, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_1);
if (!(likely(__Pyx_TypeTest(__pyx_t_1, __pyx_ptype_4test_MyZero)))) __PYX_ERR(0, 5, __pyx_L1_error)
__pyx_t_3 = PyObject_RichCompare(((PyObject *)__pyx_v_4test_z), __pyx_t_1, Py_EQ); __Pyx_XGOTREF(__pyx_t_3); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 5, __pyx_L1_error)
__Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
__Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;
See also [this thread on cython-users](https://groups.google.com/forum/#!topic/cython-users/XQX1UpFup8c).
| 2017-07-17T16:05:13Z | [] | [] |
|
cython/cython | 1,789 | cython__cython-1789 | [
"1786"
] | 23b6e66f71b2b154168329f1ce0566e415ad6c19 | diff --git a/Cython/Compiler/ParseTreeTransforms.py b/Cython/Compiler/ParseTreeTransforms.py
--- a/Cython/Compiler/ParseTreeTransforms.py
+++ b/Cython/Compiler/ParseTreeTransforms.py
@@ -1673,22 +1673,22 @@ def %(unpickle_func_name)s(__pyx_type, long __pyx_checksum, __pyx_state):
if __pyx_checksum != %(checksum)s:
from pickle import PickleError
raise PickleError("Incompatible checksums (%%s vs %(checksum)s = (%(members)s))" %% __pyx_checksum)
- result = %(class_name)s.__new__(__pyx_type)
+ __pyx_result = %(class_name)s.__new__(__pyx_type)
if __pyx_state is not None:
- %(unpickle_func_name)s__set_state(<%(class_name)s> result, __pyx_state)
- return result
+ %(unpickle_func_name)s__set_state(<%(class_name)s> __pyx_result, __pyx_state)
+ return __pyx_result
- cdef %(unpickle_func_name)s__set_state(%(class_name)s result, tuple __pyx_state):
+ cdef %(unpickle_func_name)s__set_state(%(class_name)s __pyx_result, tuple __pyx_state):
%(assignments)s
- if hasattr(result, '__dict__'):
- result.__dict__.update(__pyx_state[%(num_members)s])
+ if hasattr(__pyx_result, '__dict__'):
+ __pyx_result.__dict__.update(__pyx_state[%(num_members)s])
""" % {
'unpickle_func_name': unpickle_func_name,
'checksum': checksum,
'members': ', '.join(all_members_names),
'class_name': node.class_name,
'assignments': '; '.join(
- 'result.%s = __pyx_state[%s]' % (v, ix)
+ '__pyx_result.%s = __pyx_state[%s]' % (v, ix)
for ix, v in enumerate(all_members_names)),
'num_members': len(all_members_names),
}, level='module', pipeline=[NormalizeTree(None)]).substitute({})
| diff --git a/tests/run/reduce_pickle.pyx b/tests/run/reduce_pickle.pyx
--- a/tests/run/reduce_pickle.pyx
+++ b/tests/run/reduce_pickle.pyx
@@ -114,6 +114,19 @@ cdef class DefaultReduceSubclass(DefaultReduce):
return "DefaultReduceSubclass(i=%s, s=%r, x=%s)" % (self.i, self.s, self.x)
+cdef class result(DefaultReduceSubclass):
+ """
+ >>> a = result(i=11, s='abc', x=1.5); a
+ result(i=11, s='abc', x=1.5)
+ >>> import pickle
+ >>> pickle.loads(pickle.dumps(a))
+ result(i=11, s='abc', x=1.5)
+ """
+
+ def __repr__(self):
+ return "result(i=%s, s=%r, x=%s)" % (self.i, self.s, self.x)
+
+
class DefaultReducePySubclass(DefaultReduce):
"""
>>> a = DefaultReducePySubclass(i=11, s='abc', x=1.5); a
| "local variable 'result' referenced before assignment" compiler error with auto_pickle in 0.26
gevent [fails to compile](https://travis-ci.org/gevent/gevent/jobs/255646182#L538) with cython 0.26 (observed on OS X, Windows and Linux):
```
cython -o gevent.ares.c src/gevent/ares.pyx
Error compiling Cython file:
------------------------------------------------------------
...
def __pyx_unpickle_result(__pyx_type, long __pyx_checksum, __pyx_state):
if __pyx_checksum != 0xc9b5cdc:
from pickle import PickleError
raise PickleError("Incompatible checksums (%s vs 0xc9b5cdc = (exception, value))" % __pyx_checksum)
result = result.__new__(__pyx_type)
^
------------------------------------------------------------
(tree fragment):5:19: local variable 'result' referenced before assignment
Error compiling Cython file:
------------------------------------------------------------
...
if __pyx_checksum != 0xc9b5cdc:
from pickle import PickleError
raise PickleError("Incompatible checksums (%s vs 0xc9b5cdc = (exception, value))" % __pyx_checksum)
result = result.__new__(__pyx_type)
if __pyx_state is not None:
__pyx_unpickle_result__set_state(<result> result, __pyx_state)
```
Where `result` is a cdef class. It seems like the name `result` itself is special. This produces it:
```
$ cat /tmp/ares.pyx
cdef class result:
pass
$ cython -o /tmp/a.c /tmp/ares.pyx
Error compiling Cython file:
------------------------------------------------------------
...
def __pyx_unpickle_result(__pyx_type, long __pyx_checksum, __pyx_state):
if __pyx_checksum != 0xd41d8cd:
from pickle import PickleError
raise PickleError("Incompatible checksums (%s vs 0xd41d8cd = ())" % __pyx_checksum)
result = result.__new__(__pyx_type)
^
------------------------------------------------------------
(tree fragment):5:19: local variable 'result' referenced before assignment
Error compiling Cython file:
------------------------------------------------------------
...
if __pyx_checksum != 0xd41d8cd:
from pickle import PickleError
raise PickleError("Incompatible checksums (%s vs 0xd41d8cd = ())" % __pyx_checksum)
result = result.__new__(__pyx_type)
if __pyx_state is not None:
__pyx_unpickle_result__set_state(<result> result, __pyx_state)
^
------------------------------------------------------------
(tree fragment):7:42: 'result' is not a type identifier
```
If I set `auto_pickle` to False in the directives, the class complies fine. Likewise, if I use a different name for the class ("result2"), it compiles fine:
```
$ cat /tmp/ares2.pyx
cdef class result2:
pass
$ cython -o /tmp/a.c /tmp/ares2.pyx
$
```
| This looks like normal Python semantics. If you assign to a variable, it becomes a local variable. The fix is to rename the local variable so that it does not shadow the global type name any more. Don't know what changed in Cython 0.26 that prevented it from showing up before, but the bug is definitely not in Cython.
The code in question is *not* part of the application. It's (incorrectly) auto-generated by Cython. There's a clash in the names generated by Cython.
Cython 0.26 will fail to compile this trivial file:
```python
cdef class result:
pass
```
Adding `#cython: auto-pickle=False` to the top of the file resolves the issue (because the incorrect code is no longer generated). Likewise, changing the name of the trivial class to `result2` also works (because the name no longer clashes).
Having a class named `result` shouldn't break Cython, IMHO. | 2017-07-20T21:44:35Z | [] | [] |
cython/cython | 1,799 | cython__cython-1799 | [
"1796",
"1796"
] | 0b0a2e572765f86c82edef0781ccda5404b9f493 | diff --git a/Cython/Compiler/Parsing.py b/Cython/Compiler/Parsing.py
--- a/Cython/Compiler/Parsing.py
+++ b/Cython/Compiler/Parsing.py
@@ -2160,9 +2160,10 @@ def p_DEF_statement(s):
name = p_ident(s)
s.expect('=')
expr = p_compile_time_expr(s)
- value = expr.compile_time_value(denv)
- #print "p_DEF_statement: %s = %r" % (name, value) ###
- denv.declare(name, value)
+ if s.compile_time_eval:
+ value = expr.compile_time_value(denv)
+ #print "p_DEF_statement: %s = %r" % (name, value) ###
+ denv.declare(name, value)
s.expect_newline("Expected a newline", ignore_semicolon=True)
return Nodes.PassStatNode(pos)
| diff --git a/tests/run/ct_IF.pyx b/tests/run/ct_IF.pyx
--- a/tests/run/ct_IF.pyx
+++ b/tests/run/ct_IF.pyx
@@ -42,3 +42,33 @@ def h():
ELSE:
i = 3
return i
+
+
+def control_flow_DEF1():
+ """
+ >>> control_flow_DEF1()
+ B should be 2.
+ 2
+ """
+ IF YES:
+ DEF B=2
+ print('B should be 2.')
+ ELSE:
+ DEF B=3
+ print('B should be 3.')
+ return B
+
+
+def control_flow_DEF2():
+ """
+ >>> control_flow_DEF2()
+ B should be 3.
+ 3
+ """
+ IF NO:
+ DEF B=2
+ print('B should be 2.')
+ ELSE:
+ DEF B=3
+ print('B should be 3.')
+ return B
| Compile time DEF expression inside IF/ELIF/ELSE statement
I am trying to use processor's `DEF` inside `IF`. I have asked this question [here](https://stackoverflow.com/questions/45156510/preprocessor-def-can-not-be-conditioned-inside-if-elif-and-else-statement-in-cy) as well. The equivalent code in C of what I am trying to write is:
```
#define A 1
#if A == 1
#define B 2
#else
#define B 3
#endif
```
In above `B` should be `2`, whereas it gives `3` if I write it in Cython. Here is a sample:
```
# in Definitions.pxi
DEF A=1
IF A==1:
DEF B=2
print('B should be 2.')
ELSE:
DEF B=3
print('B should be 3.')
```
and
```
# In test.pyx
include "Definitions.pxi"
print('B is: %d'%B)
```
and
```
# In setup.py
from distutils.core import setup
from distutils.extension import Extension
from Cython.Build import cythonize
setup(name = "test",ext_modules = cythonize(Extension("*",["test.pyx"])))
```
Now, testing it:
```
>>> ipython
In [1]: %reload_ext Cython
In [2]: import test
B should be 2
B is: 3
```
It seems that the very last `DEF` statement is being used and all `IF` as well as `ELIF` and `ELSE` are ignored. I am wondering if this is a bug or I am not using it correctly.
Thanks.
Compile time DEF expression inside IF/ELIF/ELSE statement
I am trying to use processor's `DEF` inside `IF`. I have asked this question [here](https://stackoverflow.com/questions/45156510/preprocessor-def-can-not-be-conditioned-inside-if-elif-and-else-statement-in-cy) as well. The equivalent code in C of what I am trying to write is:
```
#define A 1
#if A == 1
#define B 2
#else
#define B 3
#endif
```
In above `B` should be `2`, whereas it gives `3` if I write it in Cython. Here is a sample:
```
# in Definitions.pxi
DEF A=1
IF A==1:
DEF B=2
print('B should be 2.')
ELSE:
DEF B=3
print('B should be 3.')
```
and
```
# In test.pyx
include "Definitions.pxi"
print('B is: %d'%B)
```
and
```
# In setup.py
from distutils.core import setup
from distutils.extension import Extension
from Cython.Build import cythonize
setup(name = "test",ext_modules = cythonize(Extension("*",["test.pyx"])))
```
Now, testing it:
```
>>> ipython
In [1]: %reload_ext Cython
In [2]: import test
B should be 2
B is: 3
```
It seems that the very last `DEF` statement is being used and all `IF` as well as `ELIF` and `ELSE` are ignored. I am wondering if this is a bug or I am not using it correctly.
Thanks.
| Interesting case. Semantically, the code you wrote does not mean what you intended. Instead, ``IF`` means: if the condition is true, then include its *code block* in the compilation. By design, ``DEF`` does not belong into the code block, because it is *evaluated* at compile time, not *included* at compile time. ``DEF`` is a compile time thing and not controlled by conditions itself. Meaning, both ``DEF`` assignments are being evaluated, in code order, and the last one survives and goes into the execution of the ``print()``.
Now, the fix could go either way: either implement a control flow for ``DEF`` inside of conditionals, or disallow indented ``DEF``s. I'm slightly leaning towards the latter since it's not really clear to me that the first is a feature worth having. These compile time expressions have a tendency to be generally overused...
Thanks for your response. I see the difference now. However it seems the documentation [here](http://cython.readthedocs.io/en/latest/src/userguide/language_basics.html#conditional-statements) is not giving this detail. I am quoting
> An IF statement can appear anywhere that a normal statement or declaration can appear, and it can contain any statements or declarations that would be valid in that context, including DEF statements and other IF statements.
I am actually using this for early-binding. User at compile time can set which classes to be compiled.
Interesting case. Semantically, the code you wrote does not mean what you intended. Instead, ``IF`` means: if the condition is true, then include its *code block* in the compilation. By design, ``DEF`` does not belong into the code block, because it is *evaluated* at compile time, not *included* at compile time. ``DEF`` is a compile time thing and not controlled by conditions itself. Meaning, both ``DEF`` assignments are being evaluated, in code order, and the last one survives and goes into the execution of the ``print()``.
Now, the fix could go either way: either implement a control flow for ``DEF`` inside of conditionals, or disallow indented ``DEF``s. I'm slightly leaning towards the latter since it's not really clear to me that the first is a feature worth having. These compile time expressions have a tendency to be generally overused...
Thanks for your response. I see the difference now. However it seems the documentation [here](http://cython.readthedocs.io/en/latest/src/userguide/language_basics.html#conditional-statements) is not giving this detail. I am quoting
> An IF statement can appear anywhere that a normal statement or declaration can appear, and it can contain any statements or declarations that would be valid in that context, including DEF statements and other IF statements.
I am actually using this for early-binding. User at compile time can set which classes to be compiled. | 2017-07-26T10:37:28Z | [] | [] |
cython/cython | 1,810 | cython__cython-1810 | [
"1733"
] | f68f694abe027941f2b9a5822d43765da1eba31d | diff --git a/Cython/Compiler/Optimize.py b/Cython/Compiler/Optimize.py
--- a/Cython/Compiler/Optimize.py
+++ b/Cython/Compiler/Optimize.py
@@ -2429,6 +2429,14 @@ def _handle_simple_function_len(self, node, function, pos_args):
node.pos, "__Pyx_Py_UNICODE_strlen", self.Pyx_Py_UNICODE_strlen_func_type,
args = [arg],
is_temp = node.is_temp)
+ elif arg.type.is_memoryviewslice:
+ func_type = PyrexTypes.CFuncType(
+ PyrexTypes.c_size_t_type, [
+ PyrexTypes.CFuncTypeArg("memoryviewslice", arg.type, None)
+ ], nogil=True)
+ new_node = ExprNodes.PythonCapiCallNode(
+ node.pos, "__Pyx_MemoryView_Len", func_type,
+ args=[arg], is_temp=node.is_temp)
elif arg.type.is_pyobject:
cfunc_name = self._map_to_capi_len_function(arg.type)
if cfunc_name is None:
@@ -2442,8 +2450,7 @@ def _handle_simple_function_len(self, node, function, pos_args):
"object of type 'NoneType' has no len()")
new_node = ExprNodes.PythonCapiCallNode(
node.pos, cfunc_name, self.PyObject_Size_func_type,
- args = [arg],
- is_temp = node.is_temp)
+ args=[arg], is_temp=node.is_temp)
elif arg.type.is_unicode_char:
return ExprNodes.IntNode(node.pos, value='1', constant_result=1,
type=node.type)
| diff --git a/tests/memoryview/memoryviewattrs.pyx b/tests/memoryview/memoryviewattrs.pyx
--- a/tests/memoryview/memoryviewattrs.pyx
+++ b/tests/memoryview/memoryviewattrs.pyx
@@ -279,6 +279,13 @@ def two_dee():
assert len(arr) == 2
+ try:
+ _ = len(mv1)
+ except UnboundLocalError:
+ pass
+ else:
+ assert False, "UnboundLocalError not raised for uninitialised memory view"
+
cdef long *arr_data
arr_data = <long*>arr.data
diff --git a/tests/memoryview/memslice.pyx b/tests/memoryview/memslice.pyx
--- a/tests/memoryview/memslice.pyx
+++ b/tests/memoryview/memslice.pyx
@@ -1677,7 +1677,7 @@ cdef int cdef_nogil(int[:, :] a) nogil except 0:
for j in range(b.shape[1]):
b[i, j] = -b[i, j]
- return 1
+ return len(a)
@testcase
def test_nogil():
@@ -1690,10 +1690,15 @@ def test_nogil():
released A
"""
_a = IntMockBuffer("A", range(4 * 9), shape=(4, 9))
- cdef_nogil(_a)
+ assert cdef_nogil(_a) == 4
cdef int[:, :] a = _a
print a[2, 7]
+ cdef int length
+ with nogil:
+ length = cdef_nogil(a)
+ assert length == 4
+
@testcase
def test_convert_slicenode_to_indexnode():
"""
| Support __len__ for memoryview slices
Currently we are forced to use `arr.shape[0]` rather than `len(arr)` in code such as:
```cython
cdef int f(double[::1] arr) nogil:
return len(arr)
```
Supporting `__len__` would make it more convenient to convert existing Python code and would make the code more compact.
| `len()` generally works here, but it's a Python thing, i.e. it requires the GIL. It could probably be 'optimised' into something that does not require the GIL, though.
One more thing to keep in mind: memory view arguments are allowed to be `None`, which means that `len(arr)` might need to raise a `TypeError`. That also requires the GIL. The transformation into `arr.shape[0]` could at least be done if the `arr` argument is declared as `not None`, but that further restricts the applicability. | 2017-08-08T13:15:43Z | [] | [] |
cython/cython | 1,832 | cython__cython-1832 | [
"1731"
] | 570f187bfbd24842df32b719d77567968d131679 | diff --git a/Cython/Compiler/Code.py b/Cython/Compiler/Code.py
--- a/Cython/Compiler/Code.py
+++ b/Cython/Compiler/Code.py
@@ -6,10 +6,11 @@
from __future__ import absolute_import
import cython
-cython.declare(os=object, re=object, operator=object,
- Naming=object, Options=object, StringEncoding=object,
+cython.declare(os=object, re=object, operator=object, textwrap=object,
+ Template=object, Naming=object, Options=object, StringEncoding=object,
Utils=object, SourceDescriptor=object, StringIOTree=object,
- DebugFlags=object, basestring=object)
+ DebugFlags=object, basestring=object, defaultdict=object,
+ closing=object, partial=object)
import os
import re
@@ -602,6 +603,7 @@ def __init__(self, owner, names_taken=set(), scope=None):
self.in_try_finally = 0
self.exc_vars = None
+ self.current_except = None
self.can_trace = False
self.gil_owned = True
diff --git a/Cython/Compiler/ExprNodes.py b/Cython/Compiler/ExprNodes.py
--- a/Cython/Compiler/ExprNodes.py
+++ b/Cython/Compiler/ExprNodes.py
@@ -9469,6 +9469,13 @@ def generate_yield_code(self, code):
nogil=not code.funcstate.gil_owned)
code.put_finish_refcount_context()
+ if code.funcstate.current_except is not None:
+ # inside of an except block => save away currently handled exception
+ code.putln("__Pyx_Coroutine_SwapException(%s);" % Naming.generator_cname)
+ else:
+ # no exceptions being handled => restore exception state of caller
+ code.putln("__Pyx_Coroutine_ResetAndClearException(%s);" % Naming.generator_cname)
+
code.putln("/* return from %sgenerator, %sing value */" % (
'async ' if self.in_async_gen else '',
'await' if self.is_await else 'yield'))
@@ -9532,7 +9539,7 @@ def fetch_iteration_result(self, code):
code.put_gotref(self.result())
def handle_iteration_exception(self, code):
- code.putln("PyObject* exc_type = PyErr_Occurred();")
+ code.putln("PyObject* exc_type = __Pyx_PyErr_Occurred();")
code.putln("if (exc_type) {")
code.putln("if (likely(exc_type == PyExc_StopIteration || (exc_type != PyExc_GeneratorExit &&"
" __Pyx_PyErr_GivenExceptionMatches(exc_type, PyExc_StopIteration)))) PyErr_Clear();")
@@ -9582,7 +9589,7 @@ class AwaitIterNextExprNode(AwaitExprNode):
def _generate_break(self, code):
code.globalstate.use_utility_code(UtilityCode.load_cached("StopAsyncIteration", "Coroutine.c"))
- code.putln("PyObject* exc_type = PyErr_Occurred();")
+ code.putln("PyObject* exc_type = __Pyx_PyErr_Occurred();")
code.putln("if (unlikely(exc_type && (exc_type == __Pyx_PyExc_StopAsyncIteration || ("
" exc_type != PyExc_StopIteration && exc_type != PyExc_GeneratorExit &&"
" __Pyx_PyErr_GivenExceptionMatches(exc_type, __Pyx_PyExc_StopAsyncIteration))))) {")
diff --git a/Cython/Compiler/Nodes.py b/Cython/Compiler/Nodes.py
--- a/Cython/Compiler/Nodes.py
+++ b/Cython/Compiler/Nodes.py
@@ -4054,9 +4054,10 @@ def analyse_declarations(self, env):
self.declare_generator_body(env)
def generate_function_header(self, code, proto=False):
- header = "static PyObject *%s(__pyx_CoroutineObject *%s, PyObject *%s)" % (
+ header = "static PyObject *%s(__pyx_CoroutineObject *%s, CYTHON_UNUSED PyThreadState *%s, PyObject *%s)" % (
self.entry.func_cname,
Naming.generator_cname,
+ Naming.local_tstate_cname,
Naming.sent_value_cname)
if proto:
code.putln('%s; /* proto */' % header)
@@ -4157,6 +4158,7 @@ def generate_function_definitions(self, env, code):
code.put_xgiveref(Naming.retval_cname)
else:
code.put_xdecref_clear(Naming.retval_cname, py_object_type)
+ code.putln("__Pyx_Coroutine_ResetAndClearException(%s);" % Naming.generator_cname)
code.putln('%s->resume_label = -1;' % Naming.generator_cname)
# clean up as early as possible to help breaking any reference cycles
code.putln('__Pyx_Coroutine_clear((PyObject*)%s);' % Naming.generator_cname)
@@ -6696,6 +6698,7 @@ class TryExceptStatNode(StatNode):
# else_clause StatNode or None
child_attrs = ["body", "except_clauses", "else_clause"]
+ in_generator = False
def analyse_declarations(self, env):
self.body.analyse_declarations(env)
@@ -6755,8 +6758,9 @@ def generate_execution_code(self, code):
if can_raise:
# inject code before the try block to save away the exception state
code.globalstate.use_utility_code(reset_exception_utility_code)
- save_exc.putln("__Pyx_PyThreadState_declare")
- save_exc.putln("__Pyx_PyThreadState_assign")
+ if not self.in_generator:
+ save_exc.putln("__Pyx_PyThreadState_declare")
+ save_exc.putln("__Pyx_PyThreadState_assign")
save_exc.putln("__Pyx_ExceptionSave(%s);" % (
', '.join(['&%s' % var for var in exc_save_vars])))
for var in exc_save_vars:
@@ -6794,11 +6798,16 @@ def restore_saved_exception():
code.put_xdecref_clear(var, py_object_type)
code.put_goto(try_end_label)
code.put_label(our_error_label)
- code.putln("__Pyx_PyThreadState_assign") # re-assign in case a generator yielded
for temp_name, temp_type in temps_to_clean_up:
code.put_xdecref_clear(temp_name, temp_type)
+
+ outer_except = code.funcstate.current_except
+ # Currently points to self, but the ExceptClauseNode would also be ok. Change if needed.
+ code.funcstate.current_except = self
for except_clause in self.except_clauses:
except_clause.generate_handling_code(code, except_end_label)
+ code.funcstate.current_except = outer_except
+
if not self.has_default_clause:
code.put_goto(except_error_label)
@@ -6813,7 +6822,6 @@ def restore_saved_exception():
code.put_label(exit_label)
code.mark_pos(self.pos, trace=False)
if can_raise:
- code.putln("__Pyx_PyThreadState_assign") # re-assign in case a generator yielded
restore_saved_exception()
code.put_goto(old_label)
@@ -6822,7 +6830,6 @@ def restore_saved_exception():
code.put_goto(try_end_label)
code.put_label(except_end_label)
if can_raise:
- code.putln("__Pyx_PyThreadState_assign") # re-assign in case a generator yielded
restore_saved_exception()
if code.label_used(try_end_label):
code.put_label(try_end_label)
@@ -6939,8 +6946,8 @@ def generate_handling_code(self, code, end_label):
exc_args = "&%s, &%s, &%s" % tuple(exc_vars)
code.putln("if (__Pyx_GetException(%s) < 0) %s" % (
exc_args, code.error_goto(self.pos)))
- for x in exc_vars:
- code.put_gotref(x)
+ for var in exc_vars:
+ code.put_gotref(var)
if self.target:
self.exc_value.set_var(exc_vars[1])
self.exc_value.generate_evaluation_code(code)
@@ -6957,6 +6964,7 @@ def generate_handling_code(self, code, end_label):
code.funcstate.exc_vars = exc_vars
self.body.generate_execution_code(code)
code.funcstate.exc_vars = old_exc_vars
+
if not self.body.is_terminator:
for var in exc_vars:
code.put_decref_clear(var, py_object_type)
@@ -7086,7 +7094,8 @@ def fresh_finally_clause(_next=[self.finally_clause]):
if preserve_error:
code.putln('/*exception exit:*/{')
- code.putln("__Pyx_PyThreadState_declare")
+ if not self.in_generator:
+ code.putln("__Pyx_PyThreadState_declare")
if self.is_try_finally_in_nogil:
code.declare_gilstate()
if needs_success_cleanup:
@@ -7148,7 +7157,6 @@ def fresh_finally_clause(_next=[self.finally_clause]):
if old_label == return_label:
# return actually raises an (uncatchable) exception in generators that we must preserve
if self.in_generator:
- code.putln("__Pyx_PyThreadState_declare")
exc_vars = tuple([
code.funcstate.allocate_temp(py_object_type, manage_ref=False)
for _ in range(6)])
@@ -7229,8 +7237,6 @@ def put_error_uncatcher(self, code, exc_vars, exc_lineno_cnames=None, exc_filena
if self.is_try_finally_in_nogil:
code.put_ensure_gil(declare_gilstate=False)
- if self.in_generator:
- code.putln("__Pyx_PyThreadState_assign") # re-assign in case a generator yielded
# not using preprocessor here to avoid warnings about
# unused utility functions and/or temps
@@ -7257,8 +7263,6 @@ def put_error_cleaner(self, code, exc_vars):
code.globalstate.use_utility_code(reset_exception_utility_code)
if self.is_try_finally_in_nogil:
code.put_ensure_gil(declare_gilstate=False)
- if self.in_generator:
- code.putln("__Pyx_PyThreadState_assign") # re-assign in case a generator yielded
# not using preprocessor here to avoid warnings about
# unused utility functions and/or temps
diff --git a/Cython/Compiler/ParseTreeTransforms.py b/Cython/Compiler/ParseTreeTransforms.py
--- a/Cython/Compiler/ParseTreeTransforms.py
+++ b/Cython/Compiler/ParseTreeTransforms.py
@@ -2475,6 +2475,7 @@ def __init__(self):
self.yields = []
self.returns = []
self.finallys = []
+ self.excepts = []
self.has_return_value = False
self.has_yield = False
self.has_await = False
@@ -2502,6 +2503,10 @@ def visit_TryFinallyStatNode(self, node):
self.visitchildren(node)
self.finallys.append(node)
+ def visit_TryExceptStatNode(self, node):
+ self.visitchildren(node)
+ self.excepts.append(node)
+
def visit_ClassDefNode(self, node):
pass
@@ -2552,7 +2557,7 @@ def visit_FuncDefNode(self, node):
for i, yield_expr in enumerate(collector.yields, 1):
yield_expr.label_num = i
- for retnode in collector.returns + collector.finallys:
+ for retnode in collector.returns + collector.finallys + collector.excepts:
retnode.in_generator = True
gbody = Nodes.GeneratorBodyDefNode(
@@ -2665,6 +2670,9 @@ def create_class_from_scope(self, node, target_module_scope, inner_node=None):
class_scope = entry.type.scope
class_scope.is_internal = True
class_scope.is_closure_class_scope = True
+ if node.is_async_def or node.is_generator:
+ # Generators need their closure intact during cleanup as they resume to handle GeneratorExit
+ class_scope.directives['no_gc_clear'] = True
if Options.closure_freelist_size:
class_scope.directives['freelist'] = Options.closure_freelist_size
diff --git a/Tools/cevaltrace.py b/Tools/cevaltrace.py
new file mode 100644
--- /dev/null
+++ b/Tools/cevaltrace.py
@@ -0,0 +1,149 @@
+#!/usr/bin/env python3
+
+"""
+Translate the byte code of a Python function into the corresponding
+sequences of C code in CPython's "ceval.c".
+"""
+
+from __future__ import print_function, absolute_import
+
+import re
+import os.path
+
+from dis import get_instructions # requires Python 3.4+
+
+# collapse some really boring byte codes
+_COLLAPSE = {'NOP', 'LOAD_CONST', 'POP_TOP', 'JUMP_FORWARD'}
+#_COLLAPSE.clear()
+
+_is_start = re.compile(r"\s* switch \s* \( opcode \)", re.VERBOSE).match
+# Py3: TARGET(XX), Py2: case XX
+_match_target = re.compile(r"\s* (?: TARGET \s* \( | case \s* ) \s* (\w+) \s* [:)]", re.VERBOSE).match
+_ignored = re.compile(r"\s* PREDICTED[A-Z_]*\(", re.VERBOSE).match
+_is_end = re.compile(r"\s* } \s* /\* \s* switch \s* \*/", re.VERBOSE).match
+
+_find_pyversion = re.compile(r'\#define \s+ PY_VERSION \s+ "([^"]+)"', re.VERBOSE).findall
+
+class ParseError(Exception):
+ def __init__(self, message="Failed to parse ceval.c"):
+ super(ParseError, self).__init__(message)
+
+
+def parse_ceval(file_path):
+ snippets = {}
+ with open(file_path) as f:
+ lines = iter(f)
+
+ for line in lines:
+ if _is_start(line):
+ break
+ else:
+ raise ParseError()
+
+ targets = []
+ code_lines = []
+ for line in lines:
+ target_match = _match_target(line)
+ if target_match:
+ if code_lines:
+ code = ''.join(code_lines).rstrip()
+ for target in targets:
+ snippets[target] = code
+ del code_lines[:], targets[:]
+ targets.append(target_match.group(1))
+ elif _ignored(line):
+ pass
+ elif _is_end(line):
+ break
+ else:
+ code_lines.append(line)
+ else:
+ if not snippets:
+ raise ParseError()
+ return snippets
+
+
+def translate(func, ceval_snippets):
+ start_offset = 0
+ code_obj = getattr(func, '__code__', None)
+ if code_obj and os.path.exists(code_obj.co_filename):
+ start_offset = code_obj.co_firstlineno
+ with open(code_obj.co_filename) as f:
+ code_line_at = {
+ i: line.strip()
+ for i, line in enumerate(f, 1)
+ if line.strip()
+ }.get
+ else:
+ code_line_at = lambda _: None
+
+ for instr in get_instructions(func):
+ code_line = code_line_at(instr.starts_line)
+ line_no = (instr.starts_line or start_offset) - start_offset
+ yield line_no, code_line, instr, ceval_snippets.get(instr.opname)
+
+
+def main():
+ import sys
+ import importlib.util
+
+ if len(sys.argv) < 3:
+ print("Usage: %s path/to/Python/ceval.c script.py ..." % sys.argv[0], file=sys.stderr)
+ return
+
+ ceval_source_file = sys.argv[1]
+ version_header = os.path.join(os.path.dirname(ceval_source_file), '..', 'Include', 'patchlevel.h')
+ if os.path.exists(version_header):
+ with open(version_header) as f:
+ py_version = _find_pyversion(f.read())
+ if py_version:
+ py_version = py_version[0]
+ if not sys.version.startswith(py_version + ' '):
+ print("Warning: disassembling with Python %s, but ceval.c has version %s" % (
+ sys.version.split(None, 1)[0],
+ py_version,
+ ), file=sys.stderr)
+
+ snippets = parse_ceval(ceval_source_file)
+
+ for code in _COLLAPSE:
+ if code in snippets:
+ snippets[code] = ''
+
+ for file_path in sys.argv[2:]:
+ module_name = os.path.basename(file_path)
+ print("/*######## MODULE %s ########*/" % module_name)
+ print('')
+
+ spec = importlib.util.spec_from_file_location(module_name, file_path)
+ module = importlib.util.module_from_spec(spec)
+ spec.loader.exec_module(module)
+
+ for func_name, item in sorted(vars(module).items()):
+ if not callable(item):
+ continue
+ print("/* FUNCTION %s */" % func_name)
+ print("static void") # assuming that it highlights in editors
+ print("%s() {" % func_name)
+
+ last_line = None
+ for line_no, code_line, instr, snippet in translate(item, snippets):
+ if last_line != line_no:
+ if code_line:
+ print('')
+ print('/*# %3d %s */' % (line_no, code_line))
+ print('')
+ last_line = line_no
+
+ print(" %s:%s {%s" % (
+ instr.opname,
+ ' /* %s */' % instr.argrepr if instr.arg is not None else '',
+ ' /* ??? */' if snippet is None else ' /* ... */ }' if snippet == '' else '',
+ ))
+ print(snippet or '')
+
+ print("} /* FUNCTION %s */" % func_name)
+
+
+if __name__ == '__main__':
+ main()
diff --git a/runtests.py b/runtests.py
--- a/runtests.py
+++ b/runtests.py
@@ -2164,6 +2164,9 @@ def runtests(options, cmd_args, coverage=None):
pyximport.install(pyimport=True, build_dir=os.path.join(WORKDIR, '_pyximport'),
load_py_module_on_import_failure=True, inplace=True)
+ import gc
+ gc.set_debug(gc.DEBUG_UNCOLLECTABLE)
+
result = test_runner.run(test_suite)
if common_utility_dir and options.shard_num < 0 and options.cleanup_workdir:
| diff --git a/tests/run/generator_frame_cycle.py b/tests/run/generator_frame_cycle.py
--- a/tests/run/generator_frame_cycle.py
+++ b/tests/run/generator_frame_cycle.py
@@ -1,13 +1,9 @@
# mode: run
# tag: generator
+import cython
import sys
-def _next(it):
- if sys.version_info[0] >= 3:
- return next(it)
- else:
- return it.next()
def test_generator_frame_cycle():
"""
@@ -23,8 +19,42 @@ def whoo():
finally:
testit.append("I'm done")
g = whoo()
- _next(g)
+ next(g)
+
# Frame object cycle
eval('g.throw(ValueError)', {'g': g})
del g
+
+ return tuple(testit)
+
+
+def test_generator_frame_cycle_with_outer_exc():
+ """
+ >>> test_generator_frame_cycle_with_outer_exc()
+ ("I'm done",)
+ """
+ testit = []
+ def whoo():
+ try:
+ yield
+ except:
+ yield
+ finally:
+ testit.append("I'm done")
+ g = whoo()
+ next(g)
+
+ try:
+ raise ValueError()
+ except ValueError as exc:
+ assert sys.exc_info()[1] is exc, sys.exc_info()
+ # Frame object cycle
+ eval('g.throw(ValueError)', {'g': g})
+ # CPython 3.3 handles this incorrectly itself :)
+ if cython.compiled or sys.version_info[:2] not in [(3, 2), (3, 3)]:
+ assert sys.exc_info()[1] is exc, sys.exc_info()
+ del g
+ if cython.compiled or sys.version_info[:2] not in [(3, 2), (3, 3)]:
+ assert sys.exc_info()[1] is exc, sys.exc_info()
+
return tuple(testit)
diff --git a/tests/run/generators_GH1731.pyx b/tests/run/generators_GH1731.pyx
new file mode 100644
--- /dev/null
+++ b/tests/run/generators_GH1731.pyx
@@ -0,0 +1,70 @@
+# mode: run
+# ticket: gh1731
+
+
+def cygen():
+ yield 1
+
+
+def test_from_cython(g):
+ """
+ >>> def pygen(): yield 1
+ >>> test_from_cython(pygen)
+ Traceback (most recent call last):
+ ZeroDivisionError: integer division or modulo by zero
+
+ >>> test_from_cython(cygen)
+ Traceback (most recent call last):
+ ZeroDivisionError: integer division or modulo by zero
+ """
+ try:
+ 1 / 0
+ except:
+ for _ in g():
+ pass
+ raise
+
+
+def test_from_python():
+ """
+ >>> def test(g):
+ ... try:
+ ... 1 / 0
+ ... except:
+ ... for _ in g():
+ ... pass
+ ... raise
+
+ >>> def pygen():
+ ... yield 1
+ >>> test(pygen) # doctest: +ELLIPSIS
+ Traceback (most recent call last):
+ ZeroDivisionError: ...division ...by zero
+
+ >>> test(cygen) # doctest: +ELLIPSIS
+ Traceback (most recent call last):
+ ZeroDivisionError: ...division ...by zero
+ """
+
+
+def test_from_console():
+ """
+ >>> def pygen(): yield 1
+ >>> try: # doctest: +ELLIPSIS
+ ... 1 / 0
+ ... except:
+ ... for _ in pygen():
+ ... pass
+ ... raise
+ Traceback (most recent call last):
+ ZeroDivisionError: ...division ...by zero
+
+ >>> try: # doctest: +ELLIPSIS
+ ... 1 / 0
+ ... except:
+ ... for _ in cygen():
+ ... pass
+ ... raise
+ Traceback (most recent call last):
+ ZeroDivisionError: ...division ...by zero
+ """
diff --git a/tests/run/generators_py.py b/tests/run/generators_py.py
--- a/tests/run/generators_py.py
+++ b/tests/run/generators_py.py
@@ -1,6 +1,7 @@
# mode: run
# tag: generators
+import sys
import cython
@@ -147,25 +148,39 @@ def check_throw():
except ValueError:
pass
+
def check_yield_in_except():
"""
- >>> import sys
- >>> orig_exc = sys.exc_info()[0]
- >>> g = check_yield_in_except()
- >>> next(g)
- >>> next(g)
- >>> orig_exc is sys.exc_info()[0] or sys.exc_info()[0]
+ >>> if sys.version_info[0] == 2: sys.exc_clear()
+ >>> try:
+ ... raise TypeError("RAISED !")
+ ... except TypeError as orig_exc:
+ ... assert isinstance(orig_exc, TypeError), orig_exc
+ ... g = check_yield_in_except()
+ ... print(orig_exc is sys.exc_info()[1] or sys.exc_info())
+ ... next(g)
+ ... print(orig_exc is sys.exc_info()[1] or sys.exc_info())
+ ... next(g)
+ ... print(orig_exc is sys.exc_info()[1] or sys.exc_info())
True
+ True
+ True
+ >>> next(g)
+ Traceback (most recent call last):
+ StopIteration
"""
try:
yield
raise ValueError
- except ValueError:
+ except ValueError as exc:
+ assert sys.exc_info()[1] is exc, sys.exc_info()
yield
+ if cython.compiled or sys.version_info[0] > 2:
+ assert sys.exc_info()[1] is exc, sys.exc_info()
+
def yield_in_except_throw_exc_type():
"""
- >>> import sys
>>> g = yield_in_except_throw_exc_type()
>>> next(g)
>>> g.throw(TypeError)
@@ -177,12 +192,14 @@ def yield_in_except_throw_exc_type():
"""
try:
raise ValueError
- except ValueError:
+ except ValueError as exc:
+ assert sys.exc_info()[1] is exc, sys.exc_info()
yield
+ assert sys.exc_info()[1] is exc, sys.exc_info()
+
def yield_in_except_throw_instance():
"""
- >>> import sys
>>> g = yield_in_except_throw_instance()
>>> next(g)
>>> g.throw(TypeError())
@@ -194,8 +211,11 @@ def yield_in_except_throw_instance():
"""
try:
raise ValueError
- except ValueError:
+ except ValueError as exc:
+ assert sys.exc_info()[1] is exc, sys.exc_info()
yield
+ assert sys.exc_info()[1] is exc, sys.exc_info()
+
def test_swap_assignment():
"""
| Cython generators clear the current exception
Cython version 0.25.2
To reproduce, create the following files:
#### generator_bug.pyx
```
def gen():
yield 1
```
#### setup.py
```python
from distutils.core import setup
from distutils.extension import Extension
from Cython.Build import cythonize
ext_modules = [
Extension('*', sources=['generator_bug.pyx'])
]
setup(
name="generator_bug",
ext_modules=cythonize(ext_modules),
script_args=['build_ext'],
options={'build_ext': {'inplace': True}}
)
```
Run `python setup.py` to build the extension module.
Run the following code from Python REPL:
```python
from generator_bug import gen
try:
1 / 0
except:
for _ in gen():
pass
raise
```
The above code will not exit with a `ZeroDivisionError`, but a `TypeError` or `RuntimeError`, depending on whether the code is run using Python 2 or Python 3.
| 2017-08-20T21:55:25Z | [] | [] |
|
cython/cython | 1,900 | cython__cython-1900 | [
"1898"
] | ba6f36abffa6817da2f05e32bbc9401484d6b776 | diff --git a/Cython/Coverage.py b/Cython/Coverage.py
--- a/Cython/Coverage.py
+++ b/Cython/Coverage.py
@@ -12,6 +12,7 @@
from collections import defaultdict
from coverage.plugin import CoveragePlugin, FileTracer, FileReporter # requires coverage.py 4.0+
+from coverage.files import canonical_filename
from .Utils import find_root_package_dir, is_package_dir, open_source_file
@@ -41,8 +42,8 @@ def _find_dep_file_path(main_file, file_path):
for sys_path in sys.path:
test_path = os.path.realpath(os.path.join(sys_path, file_path))
if os.path.exists(test_path):
- return test_path
- return abs_path
+ return canonical_filename(test_path)
+ return canonical_filename(abs_path)
class Plugin(CoveragePlugin):
@@ -63,7 +64,7 @@ def file_tracer(self, filename):
if filename.startswith('<') or filename.startswith('memory:'):
return None
c_file = py_file = None
- filename = os.path.abspath(filename)
+ filename = canonical_filename(os.path.abspath(filename))
if self._c_files_map and filename in self._c_files_map:
c_file = self._c_files_map[filename][0]
@@ -91,7 +92,7 @@ def file_reporter(self, filename):
# from coverage.python import PythonFileReporter
# return PythonFileReporter(filename)
- filename = os.path.abspath(filename)
+ filename = canonical_filename(os.path.abspath(filename))
if self._c_files_map and filename in self._c_files_map:
c_file, rel_file_path, code = self._c_files_map[filename]
else:
| diff --git a/tests/run/coverage_api.srctree b/tests/run/coverage_api.srctree
--- a/tests/run/coverage_api.srctree
+++ b/tests/run/coverage_api.srctree
@@ -2,7 +2,7 @@
# tag: coverage,trace
"""
-PYTHON -c 'import shutil; shutil.copy("pkg/coverage_test_pyx.pyx", "pkg/coverage_test_pyx.pxi")'
+PYTHON -c "import shutil; shutil.copy('pkg/coverage_test_pyx.pyx', 'pkg/coverage_test_pyx.pxi')"
PYTHON setup.py build_ext -i
PYTHON coverage_test.py
"""
@@ -14,7 +14,8 @@ from Cython.Build import cythonize
setup(ext_modules = cythonize([
'coverage_test_*.py*',
- 'pkg/coverage_test_*.py*'
+ 'pkg/coverage_test_*.py*',
+ 'Package2/CoverageTest_*.py*'
]))
@@ -68,6 +69,52 @@ def main_func(int x): # 11
return cfunc1(x) + func1(x, 4) + func2(x) # 12
+######## Package2/__init__.py ########
+# Add MixedCase package and filenames to test if the files are found
+
+######## Package2/CoverageTest_py.py ########
+# cython: linetrace=True
+# distutils: define_macros=CYTHON_TRACE=1
+
+def func1(a, b):
+ x = 1 # 5
+ c = func2(a) + b # 6
+ return x + c # 7
+
+
+def func2(a):
+ return a * 2 # 11
+
+
+######## Package2/CoverageTest_pyx.pyx ########
+# cython: linetrace=True
+# distutils: define_macros=CYTHON_TRACE=1
+
+def func1(int a, int b):
+ cdef int x = 1 # 5
+ c = func2(a) + b # 6
+ return x + c # 7
+
+
+def func2(int a):
+ return a * 2 # 11
+
+
+######## coverage_test_include_pyx.pyx ########
+# cython: linetrace=True
+# distutils: define_macros=CYTHON_TRACE=1
+
+cdef int x = 5 # 4
+
+cdef int cfunc1(int x): # 6
+ return x * 3 # 7
+
+include "pkg/coverage_test_pyx.pxi" # 9
+
+def main_func(int x): # 11
+ return cfunc1(x) + func1(x, 4) + func2(x) # 12
+
+
######## coverage_test.py ########
import re
@@ -84,8 +131,12 @@ from pkg import coverage_test_py
from pkg import coverage_test_pyx
import coverage_test_include_pyx
+# test the MixedCase Files and packages
+from Package2 import CoverageTest_py
+from Package2 import CoverageTest_pyx
-for module in [coverage_test_py, coverage_test_pyx, coverage_test_include_pyx]:
+for module in [coverage_test_py, coverage_test_pyx, coverage_test_include_pyx,
+ CoverageTest_py, CoverageTest_pyx]:
assert not any(module.__file__.endswith(ext) for ext in '.py .pyc .pyo .pyw .pyx .pxi'.split()), \
module.__file__
@@ -93,10 +144,18 @@ for module in [coverage_test_py, coverage_test_pyx, coverage_test_include_pyx]:
def source_file_for(module):
module_name = module.__name__
path, ext = os.path.splitext(module.__file__)
- platform_suffix = re.search(r'[.](?:cpython|pypy)-[0-9]+[^.]*$', path, re.I)
- if platform_suffix:
- path = path[:platform_suffix.start()]
- return path + '.' + module_name.rsplit('_', 1)[-1]
+ if ext == '.so':
+ # Linux/Unix/Mac extension module
+ platform_suffix = re.search(r'[.](?:cpython|pypy)-[0-9]+[-_a-z0-9]*$', path, re.I)
+ if platform_suffix:
+ path = path[:platform_suffix.start()]
+ elif ext == '.pyd':
+ # Windows extension module
+ platform_suffix = re.search(r'[.]cp[0-9]+-win[_a-z0-9]*$', path, re.I)
+ if platform_suffix:
+ path = path[:platform_suffix.start()]
+ source_filepath = path + '.' + module_name.rsplit('_', 1)[-1]
+ return source_filepath
def run_coverage(module):
@@ -137,3 +196,5 @@ if __name__ == '__main__':
run_coverage(coverage_test_py)
run_coverage(coverage_test_pyx)
run_coverage(coverage_test_include_pyx)
+ run_coverage(CoverageTest_py)
+ run_coverage(CoverageTest_pyx)
diff --git a/tests/run/coverage_cmd.srctree b/tests/run/coverage_cmd.srctree
--- a/tests/run/coverage_cmd.srctree
+++ b/tests/run/coverage_cmd.srctree
@@ -2,7 +2,7 @@
# tag: coverage,trace
"""
-PYTHON -c 'import shutil; shutil.copy("pkg/coverage_test_pyx.pyx", "pkg/coverage_test_pyx.pxi")'
+PYTHON -c "import shutil; shutil.copy('pkg/coverage_test_pyx.pyx', 'pkg/coverage_test_pyx.pxi')"
PYTHON setup.py build_ext -i
PYTHON -m coverage run coverage_test.py
PYTHON collect_coverage.py
| Cython.Coverage: error with pyx-file coverage with lower-case filenames and wrong line numbers in .coverage
On windows,
with cython0.27
and
Coverage.py, version 4.3.4 with C extension
I try to report the coverage of a pyx-file with the Cython.Coverage plugin for coverage.py
`coverage run --source=src -m py.test --pyargs cythonarrays` the .coverage-file including line-numbers that were hit for the .pyx files.
However, `coverage report` issues
> src\cythonarrays\array_shapes.pyx TypeError: 'NoneType' object is not iterable
> src\cythonarrays\tests\example_cython.pyx TypeError: 'NoneType' object is not iterable
Unfortunately, the coverage module catches the exception and did not show where the error occured.
So i had to patch coverage.summary.py to get the tracebak, which showed me, that set(self._code), becuase self._code was None in line 286 in
```
class CythonModuleReporter(FileReporter):
def lines(self):
return set(self._code)
```
Further debugging led me to the point, that the "None" of self._code came from
```
class Plugin(CoveragePlugin):
def _parse_lines(self, c_file, sourcefile):
if sourcefile not in self._c_files_map:
return (None,) * 2 # e.g. shared library file
```
and sourcefile was not in self._c_files_map, because sourcefile started with C:\Dev\cythonarrays\..., while the key in the self._c_files_map-dictionary was written with lowercase c:\dev\cythonarrays...
So abspath returns the path with lowercase because it finds the files in PATH. I installed the package ediable with pip install -e cythonarrays and this lead to entries in easyinstall.pth with lowercase path.
To patch the problem, i converted all filepaths to lowercase
```
class Plugin(CoveragePlugin):
def file_reporter(self, filename)
filename = os.path.abspath(filename).lower()
def _parse_lines(self, c_file, sourcefile):
(...)
for filename, code in code_lines.items():
abs_path = _find_dep_file_path(c_file, filename).lower()
```
This adds the *.pyx-files to the report.
However, this might be not the best solution, which works always on windows and linux...
Maybe you have another idea how to fix this.
The other problem are the line numbers reported by `coverage run` and stored in the .coverage result file, for the .pyx-file, which do not match the actual line numbers in the pyx-file.
In the .coverage -file, for my example_cython.pyx-file there are 26 line numbers between 1 and 983 collected.
`!coverage.py: This is a private format, don't read it directly!{"lines":{"C:\\Dev\\cythonarrays\\code\\cythonarrays\\src\\cythonarrays\\tests\\example_cython.pxd":[],"C:\\Dev\\cythonarrays\\code\\cythonarrays\\src\\cythonarrays\\tests\\example_cython.pyx":[1,246,10,288,207,983,537,155,284,285,286,31,32,289,290,291,36,37,287,243,244,245,54,55,314,315]},"file_tracers":{"C:\\Dev\\cythonarrays\\code\\cythonarrays\\src\\cythonarrays\\tests\\example_cython.pyx":"Cython.Coverage.Plugin","C:\\Dev\\cythonarrays\\code\\cythonarrays\\src\\cythonarrays\\tests\\example_cython.pxd":"Cython.Coverage.Plugin","C:\\Dev\\cythonarrays\\code\\cythonarrays\\src\\cythonarrays\\array_shapes.pyx":"Cython.Coverage.Plugin"}}`
The example_cython.pyx file has only 86 lines.
The C-File i generated contains comments for the traced lines in the actual .pyx-file like:
> /* "cythonarrays/tests/example_cython.pyx":31
to C-imported files like:
> /* "../../../../Anaconda/envs/cythonarrays/lib/site-packages/Cython/Includes/numpy/__init__.pxd":220
and to View.MemoryView and to a "Tree fragment"
> /* "View.MemoryView":120
> /* "(tree fragment)":1
So the strange numbers the Cython.Coverage plugin collected in the c-file are mostly line numbers refering to the "View.MemoryView"
```
/* "View.MemoryView":983
* return self.from_object
*
* __pyx_getbuffer = capsule(<void *> &__pyx_memoryview_getbuffer, "getbuffer(obj, view, flags)") # <<<<<<<<<<<<<<
*
*
*/
```
or to the "tree fragment"
```
/* "(tree fragment)":1
* def __reduce_cython__(self): # <<<<<<<<<<<<<<
* cdef bint use_setstate
* state = (self.INF_d, self.INF_f, self.NAN_d, self.NAN_f, self.NINF_d, self.NINF_f, self._invalid_g, self._jobs_j, self._km_ij, self._not_initialized_ij, self._param_g, self._persons_gi, self._trips_ij, self._valid_g, self._zonenumbers_i, self.destinations, self.groups, self.n_threads, self.origins)
*/
```
What could be the reason that these line numbers are collected instead of the numbers of the lines in the actual .pyx-file, that were hit?
| I found an alternative to .lower(), that helped to get the right line_numbers:
Instead of converting the filepath to lowercase, the following function returnes the filepath as the file system returnes it (on Windows - on linux it remains as it is):
```
import glob
def get_actual_filename(name):
if not sys.platform.startswith('win'):
return name
dirs = name.split('\\')
# disk letter
test_name = [dirs[0].upper()]
for d in dirs[1:]:
test_name += ["%s[%s]" % (d[:-1], d[-1])]
res = glob.glob('\\'.join(test_name))
if not res:
#File not found
return None
return res[0]
```
(thanks to [this link](https://stackoverflow.com/questions/3692261/in-python-how-can-i-get-the-correctly-cased-path-for-a-file)
Then the filename then can be converted with
```
class Plugin(CoveragePlugin):
def file_reporter(self, filename)
filename = get_actual_filename(os.path.abspath(filename))
def _parse_lines(self, c_file, sourcefile):
(...)
for filename, code in code_lines.items():
abs_path = get_actual_filename(_find_dep_file_path(c_file, filename))
```
| 2017-09-29T19:20:38Z | [] | [] |
cython/cython | 1,933 | cython__cython-1933 | [
"1484"
] | 31752522d88f03642fb9c17474135cb16d2fe474 | diff --git a/runtests.py b/runtests.py
--- a/runtests.py
+++ b/runtests.py
@@ -71,7 +71,7 @@ def __nonzero__(self):
from distutils.command.build_ext import build_ext as _build_ext
from distutils import sysconfig
-
+from distutils import ccompiler
_to_clean = []
@atexit.register
@@ -240,7 +240,6 @@ def update_openmp_extension(ext):
if flags:
compile_flags, link_flags = flags
-
ext.extra_compile_args.extend(compile_flags.split())
ext.extra_link_args.extend(link_flags.split())
return ext
@@ -249,21 +248,32 @@ def update_openmp_extension(ext):
return EXCLUDE_EXT
-def get_openmp_compiler_flags(language):
+def update_cpp11_extension(ext):
"""
- As of gcc 4.2, it supports OpenMP 2.5. Gcc 4.4 implements 3.0. We don't
- (currently) check for other compilers.
+ update cpp11 extensions that will run on versions of gcc >4.8
+ """
+ gcc_version = get_gcc_version(ext.language)
+ compiler_version = gcc_version.group(1)
+ if gcc_version is not None:
+ compiler_version = gcc_version.group(1)
+ if float(compiler_version) > 4.8:
+ ext.extra_compile_args.extend("-std=c++11")
+ return ext
+ return EXCLUDE_EXT
- returns a two-tuple of (CFLAGS, LDFLAGS) to build the OpenMP extension
+
+def get_gcc_version(language):
+ """
+ finds gcc version using Popen
"""
if language == 'cpp':
cc = sysconfig.get_config_var('CXX')
else:
cc = sysconfig.get_config_var('CC')
+ if not cc:
+ cc = ccompiler.get_default_compiler()
if not cc:
- if sys.platform == 'win32':
- return '/openmp', ''
return None
# For some reason, cc can be e.g. 'gcc -pthread'
@@ -272,7 +282,6 @@ def get_openmp_compiler_flags(language):
# Force english output
env = os.environ.copy()
env['LC_MESSAGES'] = 'C'
-
matcher = re.compile(r"gcc version (\d+\.\d+)").search
try:
p = subprocess.Popen([cc, "-v"], stderr=subprocess.PIPE, env=env)
@@ -282,12 +291,25 @@ def get_openmp_compiler_flags(language):
(language, os.strerror(sys.exc_info()[1].errno), cc))
return None
_, output = p.communicate()
-
output = output.decode(locale.getpreferredencoding() or 'ASCII', 'replace')
-
gcc_version = matcher(output)
+ return gcc_version
+
+
+def get_openmp_compiler_flags(language):
+ """
+ As of gcc 4.2, it supports OpenMP 2.5. Gcc 4.4 implements 3.0. We don't
+ (currently) check for other compilers.
+
+ returns a two-tuple of (CFLAGS, LDFLAGS) to build the OpenMP extension
+ """
+ gcc_version = get_gcc_version(language)
+
if not gcc_version:
- return None # not gcc - FIXME: do something about other compilers
+ if sys.platform == 'win32':
+ return '/openmp', ''
+ else:
+ return None # not gcc - FIXME: do something about other compilers
# gcc defines "__int128_t", assume that at least all 64 bit architectures have it
global COMPILER_HAS_INT128
@@ -313,6 +335,7 @@ def get_openmp_compiler_flags(language):
EXT_EXTRAS = {
'tag:numpy' : update_numpy_extension,
'tag:openmp': update_openmp_extension,
+ 'tag:cpp11': update_cpp11_extension,
'tag:trace' : update_linetrace_extension,
}
| diff --git a/tests/run/cpp_stl_cpp11.pyx b/tests/run/cpp_stl_cpp11.pyx
new file mode 100644
--- /dev/null
+++ b/tests/run/cpp_stl_cpp11.pyx
@@ -0,0 +1,30 @@
+# mode: run
+# tag: cpp, werror, cpp11
+# distutils: extra_compile_args=-std=c++0x
+
+import sys
+from libcpp.unordered_map cimport unordered_map
+from libcpp.pair cimport pair
+
+def test_unordered_map_functionality():
+ """
+ >>> test_unordered_map_functionality()
+ 'pass'
+ """
+ cdef:
+ unordered_map[int, int] int_map = unordered_map[int,int]()
+ pair[int, int] pair_insert = pair[int, int](1, 2)
+ unordered_map[int,int].iterator iterator = int_map.begin()
+ pair[unordered_map[int,int].iterator, bint] pair_iter = int_map.insert(pair_insert)
+ assert int_map[1] == 2
+ assert int_map.size() == 1
+ assert int_map.erase(1) == 1 # returns number of elements erased
+ assert int_map.size() == 0
+ int_map[1] = 2
+ assert int_map.size() == 1
+ assert int_map[1] == 2
+ iterator = int_map.find(1)
+ assert int_map.erase(iterator) == int_map.end()
+ return "pass"
+
+
| Return type of unordered_map erase methods should be iterator not void
The return type of unordered_map erase methods should be `iterator` not `void`, like in `vector`.
https://github.com/cython/cython/blob/master/Cython/Includes/libcpp/unordered_map.pxd#L45
C++ reference: http://en.cppreference.com/w/cpp/container/unordered_map/erase
| Thanks. We'd welcome a PR with a test :).
Ok, working on that :)
Created a PR for this https://github.com/cython/cython/pull/1490
I think it would be good to add more tests for `erase` method on other data structures.
Also, because `unordered_map` is from C++11, I don't know if it will compile on all platforms.
I added `-std=c++11` to `CXXFLAGS`
``` diff
(dev) carlos@macbookair:~/code/cython$ git diff HEAD
diff --git a/runtests.py b/runtests.py
index 0e0abf5..01f44a2 100755
--- a/runtests.py
+++ b/runtests.py
@@ -423,6 +423,7 @@ class build_ext(_build_ext):
compiler_obj = self.compiler
if ext.language == 'c++':
compiler_obj.compiler_so.remove('-Wstrict-prototypes')
+ compiler_obj.compiler_so.append('-std=c++11')
if CCACHE:
compiler_obj.compiler_so = CCACHE + compiler_obj.compiler_so
if getattr(ext, 'openmp', None) and compiler_obj.compiler_type == 'msvc':
(dev) carlos@macbookair:~/code/cython$
```
| 2017-10-13T17:44:52Z | [] | [] |
cython/cython | 2,049 | cython__cython-2049 | [
"2047"
] | e1ae63dd990e5d74a500d4051f0a8a1dd5fbae9b | diff --git a/Cython/Compiler/Optimize.py b/Cython/Compiler/Optimize.py
--- a/Cython/Compiler/Optimize.py
+++ b/Cython/Compiler/Optimize.py
@@ -2913,6 +2913,27 @@ def _handle_simple_method_dict_setdefault(self, node, function, args, is_unbound
may_return_none=True,
utility_code=load_c_utility('dict_setdefault'))
+ PyDict_Pop_func_type = PyrexTypes.CFuncType(
+ PyrexTypes.py_object_type, [
+ PyrexTypes.CFuncTypeArg("dict", PyrexTypes.py_object_type, None),
+ PyrexTypes.CFuncTypeArg("key", PyrexTypes.py_object_type, None),
+ PyrexTypes.CFuncTypeArg("default", PyrexTypes.py_object_type, None),
+ ])
+
+ def _handle_simple_method_dict_pop(self, node, function, args, is_unbound_method):
+ """Replace dict.pop() by a call to _PyDict_Pop().
+ """
+ if len(args) == 2:
+ args.append(ExprNodes.NullNode(node.pos))
+ elif len(args) != 3:
+ self._error_wrong_arg_count('dict.pop', node, args, "2 or 3")
+ return node
+
+ return self._substitute_method_call(
+ node, function,
+ "_PyDict_Pop", self.PyDict_Pop_func_type,
+ 'pop', is_unbound_method, args)
+
Pyx_PyInt_BinopInt_func_type = PyrexTypes.CFuncType(
PyrexTypes.py_object_type, [
PyrexTypes.CFuncTypeArg("op1", PyrexTypes.py_object_type, None),
| diff --git a/tests/run/dict_pop.pyx b/tests/run/dict_pop.pyx
new file mode 100644
--- /dev/null
+++ b/tests/run/dict_pop.pyx
@@ -0,0 +1,34 @@
+
+cimport cython
+
[email protected]_assert_path_exists("//PythonCapiCallNode")
[email protected]_fail_if_path_exists("//AttributeNode")
+def dict_pop(dict d, key):
+ """
+ >>> d = { 1: 10, 2: 20 }
+ >>> dict_pop(d, 1)
+ (10, {2: 20})
+ >>> dict_pop(d, 3)
+ Traceback (most recent call last):
+ KeyError: 3
+ >>> dict_pop(d, 2)
+ (20, {})
+ """
+ return d.pop(key), d
+
+
[email protected]_assert_path_exists("//PythonCapiCallNode")
[email protected]_fail_if_path_exists("//AttributeNode")
+def dict_pop_default(dict d, key, default):
+ """
+ >>> d = { 1: 10, 2: 20 }
+ >>> dict_pop_default(d, 1, "default")
+ (10, {2: 20})
+ >>> dict_pop_default(d, 3, None)
+ (None, {2: 20})
+ >>> dict_pop_default(d, 3, "default")
+ ('default', {2: 20})
+ >>> dict_pop_default(d, 2, "default")
+ (20, {})
+ """
+ return d.pop(key, default), d
| dict.pop() calls not optimized
`dict.pop()` could be optimized as calling `_PyDict_Pop()`, but instead it calls the Python-facing method object.
| 2017-12-20T11:33:00Z | [] | [] |
|
cython/cython | 2,237 | cython__cython-2237 | [
"1397"
] | 97d42bceeb7172a3c5296cdda8ab4a4ee0aa05ec | diff --git a/Cython/Build/Dependencies.py b/Cython/Build/Dependencies.py
--- a/Cython/Build/Dependencies.py
+++ b/Cython/Build/Dependencies.py
@@ -616,15 +616,28 @@ def extract_timestamp(self, filename):
def newest_dependency(self, filename):
return max([self.extract_timestamp(f) for f in self.all_dependencies(filename)])
- def transitive_fingerprint(self, filename, extra=None):
+ def transitive_fingerprint(self, filename, module, compilation_options):
+ r"""
+ Return a fingerprint of a cython file that is about to be cythonized.
+
+ Fingerprints are looked up in future compilations. If the fingerprint
+ is found, the cythonization can be skipped. The fingerprint must
+ incorporate everything that has an influence on the generated code.
+ """
try:
m = hashlib.md5(__version__.encode('UTF-8'))
m.update(file_hash(filename).encode('UTF-8'))
for x in sorted(self.all_dependencies(filename)):
if os.path.splitext(x)[1] not in ('.c', '.cpp', '.h'):
m.update(file_hash(x).encode('UTF-8'))
- if extra is not None:
- m.update(str(extra).encode('UTF-8'))
+ # Include the module attributes that change the compilation result
+ # in the fingerprint. We do not iterate over module.__dict__ and
+ # include almost everything here as users might extend Extension
+ # with arbitrary (random) attributes that would lead to cache
+ # misses.
+ m.update(str((module.language, module.py_limited_api, module.np_pythran)).encode('UTF-8'))
+
+ m.update(compilation_options.get_fingerprint().encode('UTF-8'))
return m.hexdigest()
except IOError:
return None
@@ -881,8 +894,6 @@ def cythonize(module_list, exclude=None, nthreads=0, aliases=None, quiet=False,
if 'include_path' not in options:
options['include_path'] = ['.']
if 'common_utility_include_dir' in options:
- if options.get('cache'):
- raise NotImplementedError("common_utility_include_dir does not yet work with caching")
safe_makedirs(options['common_utility_include_dir'])
pythran_options = None
@@ -973,8 +984,7 @@ def copy_to_build_dir(filepath, root=os.getcwd()):
else:
print("Compiling %s because it depends on %s." % (source, dep))
if not force and options.cache:
- extra = m.language
- fingerprint = deps.transitive_fingerprint(source, extra)
+ fingerprint = deps.transitive_fingerprint(source, m, options)
else:
fingerprint = None
to_compile.append((
diff --git a/Cython/Compiler/Main.py b/Cython/Compiler/Main.py
--- a/Cython/Compiler/Main.py
+++ b/Cython/Compiler/Main.py
@@ -519,29 +519,11 @@ def __init__(self, source_desc, full_module_name, cwd):
class CompilationOptions(object):
+ r"""
+ See default_options at the end of this module for a list of all possible
+ options and CmdLine.usage and CmdLine.parse_command_line() for their
+ meaning.
"""
- Options to the Cython compiler:
-
- show_version boolean Display version number
- use_listing_file boolean Generate a .lis file
- errors_to_stderr boolean Echo errors to stderr when using .lis
- include_path [string] Directories to search for include files
- output_file string Name of generated .c file
- generate_pxi boolean Generate .pxi file for public declarations
- capi_reexport_cincludes
- boolean Add cincluded headers to any auto-generated
- header files.
- timestamps boolean Only compile changed source files.
- verbose boolean Always print source names being compiled
- compiler_directives dict Overrides for pragma options (see Options.py)
- embedded_metadata dict Metadata to embed in the C file as json.
- evaluate_tree_assertions boolean Test support: evaluate parse tree assertions
- language_level integer The Python language level: 2 or 3
- formal_grammar boolean Parse the file with the formal grammar
-
- cplus boolean Compile as c++ code
- """
-
def __init__(self, defaults=None, **kw):
self.include_path = []
if defaults:
@@ -595,6 +577,84 @@ def create_context(self):
return Context(self.include_path, self.compiler_directives,
self.cplus, self.language_level, options=self)
+ def get_fingerprint(self):
+ r"""
+ Return a string that contains all the options that are relevant for cache invalidation.
+ """
+ data = {}
+
+ for key in self.__dict__:
+ if key in ['show_version', 'errors_to_stderr', 'verbose', 'quiet']:
+ # verbosity flags have no influence on the compilation result
+ continue
+ elif key in ['output_file', 'output_dir']:
+ # ignore the exact name of the output file
+ continue
+ elif key in ['timestamps']:
+ # the cache cares about the content of files, not about the timestamps of sources
+ continue
+ elif key in ['cache']:
+ # hopefully caching has no influence on the compilation result
+ continue
+ elif key in ['compiler_directives']:
+ # directives passed on to the C compiler do not influence the generated C code
+ continue
+ elif key in ['include_path']:
+ # this path changes which headers are tracked as dependencies,
+ # it has no influence on the generated C code
+ continue
+ elif key in ['working_path']:
+ # this path changes where modules and pxd files are found;
+ # their content is part of the fingerprint anyway, their
+ # absolute path does not matter
+ continue
+ elif key in ['create_extension']:
+ # create_extension() has already mangled the options, e.g.,
+ # embedded_metadata, when the fingerprint is computed so we
+ # ignore it here.
+ continue
+ elif key in ['build_dir']:
+ # the (temporary) directory where we collect dependencies
+ # has no influence on the C output
+ continue
+ elif key in ['use_listing_file', 'generate_pxi', 'annotate', 'annotate_coverage_xml']:
+ # all output files are contained in the cache so the types of
+ # files generated must be part of the fingerprint
+ pass
+ elif key in ['formal_grammar', 'evaluate_tree_assertions']:
+ # these bits can change whether compilation to C passes/fails
+ pass
+ elif key in ['embedded_metadata', 'emit_linenums', 'c_line_in_traceback', 'gdb_debug', 'relative_path_in_code_position_comments']:
+ # the generated code contains additional bits when these are set
+ pass
+ elif key in ['cplus', 'language_level', 'compile_time_env', 'np_pythran']:
+ # assorted bits that, e.g., influence the parser
+ pass
+ elif key == ['capi_reexport_cincludes']:
+ if self.capi_reexport_cincludes:
+ # our caching implementation does not yet include fingerprints of all the header files
+ raise NotImplementedError('capi_reexport_cincludes is not compatible with Cython caching')
+ elif key == ['common_utility_include_dir']:
+ if self.common_utility_include_dir:
+ raise NotImplementedError('common_utility_include_dir is not compatible with Cython caching yet')
+ else:
+ # any unexpected option should go into the fingerprint; it's better
+ # to recompile than to return incorrect results from the cache.
+ pass
+
+ data[key] = self.__dict__[key]
+
+ def to_fingerprint(item):
+ r"""
+ Recursively turn item into a string, turning dicts into lists with
+ deterministic ordering.
+ """
+ if isinstance(item, dict):
+ item = sorted([(repr(key), to_fingerprint(item[key])) for key in item])
+ return repr(item)
+
+ return to_fingerprint(data)
+
class CompilationResult(object):
"""
| diff --git a/Cython/Build/Tests/TestCyCache.py b/Cython/Build/Tests/TestCyCache.py
--- a/Cython/Build/Tests/TestCyCache.py
+++ b/Cython/Build/Tests/TestCyCache.py
@@ -78,3 +78,23 @@ def test_multi_file_output(self):
self.fresh_cythonize(a_pyx, cache=self.cache_dir)
for output in expected:
self.assertTrue(os.path.exists(output), output)
+
+ def test_options_invalidation(self):
+ hash_pyx = os.path.join(self.src_dir, 'options.pyx')
+ hash_c = hash_pyx[:-len('.pyx')] + '.c'
+
+ open(hash_pyx, 'w').write('pass')
+ self.fresh_cythonize(hash_pyx, cache=self.cache_dir, cplus=False)
+ self.assertEqual(1, len(self.cache_files('options.c*')))
+
+ open(hash_pyx, 'w').write('pass')
+ self.fresh_cythonize(hash_pyx, cache=self.cache_dir, cplus=True)
+ self.assertEqual(2, len(self.cache_files('options.c*')))
+
+ open(hash_pyx, 'w').write('pass')
+ self.fresh_cythonize(hash_pyx, cache=self.cache_dir, cplus=False, show_version=False)
+ self.assertEqual(2, len(self.cache_files('options.c*')))
+
+ open(hash_pyx, 'w').write('pass')
+ self.fresh_cythonize(hash_pyx, cache=self.cache_dir, cplus=False, show_version=True)
+ self.assertEqual(2, len(self.cache_files('options.c*')))
| cycache fingerprint does not look at compiler directives and Cython version
The compiler directives used for Cythonizing should be part of the fingerprint for cycache.
Migrated from http://trac.cython.org/ticket/842
| @jdemeyer commented
Also the Cython version should be part of the fingerprint.
@jdemeyer changed **summary** from
cycache fingerprint does not look at compiler directives
to
cycache fingerprint does not look at compiler directives and Cython version
commented
| 2018-05-04T15:46:59Z | [] | [] |
cython/cython | 2,453 | cython__cython-2453 | [
"2422",
"2422"
] | 5e7d47656163017936ea4105226d1c392b0bba54 | diff --git a/Cython/Build/Dependencies.py b/Cython/Build/Dependencies.py
--- a/Cython/Build/Dependencies.py
+++ b/Cython/Build/Dependencies.py
@@ -809,8 +809,7 @@ class Extension_setuptools(Extension): pass
elif name:
module_name = name
- if module_name == 'cython':
- raise ValueError('cython is a special module, cannot be used as a module name')
+ Utils.raise_error_if_module_name_forbidden(module_name)
if module_name not in seen:
try:
diff --git a/Cython/Compiler/Main.py b/Cython/Compiler/Main.py
--- a/Cython/Compiler/Main.py
+++ b/Cython/Compiler/Main.py
@@ -469,6 +469,8 @@ def run_pipeline(source, options, full_module_name=None, context=None):
abs_path = os.path.abspath(source)
full_module_name = full_module_name or context.extract_module_name(source, options)
+ Utils.raise_error_if_module_name_forbidden(full_module_name)
+
if options.relative_path_in_code_position_comments:
rel_path = full_module_name.replace('.', os.sep) + source_ext
if not abs_path.endswith(rel_path):
diff --git a/Cython/Utils.py b/Cython/Utils.py
--- a/Cython/Utils.py
+++ b/Cython/Utils.py
@@ -487,3 +487,9 @@ def wrapper(cls):
orig_vars.pop('__weakref__', None)
return metaclass(cls.__name__, cls.__bases__, orig_vars)
return wrapper
+
+
+def raise_error_if_module_name_forbidden(full_module_name):
+ #it is bad idea to call the pyx-file cython.pyx, so fail early
+ if full_module_name == 'cython' or full_module_name.endswith('.cython'):
+ raise ValueError('cython is a special module, cannot be used as a module name')
| diff --git a/tests/build/cythonize_cython.srctree b/tests/build/cythonize_cython.srctree
new file mode 100644
--- /dev/null
+++ b/tests/build/cythonize_cython.srctree
@@ -0,0 +1,50 @@
+PYTHON -c "import cythonize_tests"
+PYTHON -c "import cython_tests"
+
+######## cythonize_tests.py ########
+
+from Cython.Build.Cythonize import main as cythonize
+
+for test_case in ["cython.pyx", "src/cython.pyx", "src2/cython.pyx"]:
+ try:
+ cythonize([test_case])
+ except ValueError:
+ pass
+ else:
+ assert False, "ValueError not raised - forbidding cythonize "+test_case+" doesn't work"
+
+try:
+ cythonize(["notcython.pys"])
+except ValueError:
+ assert False, "ValueError raised - forbidding cythonize notcython.pyx should work"
+else:
+ pass
+
+######## cython_tests.py ########
+
+
+from Cython.Compiler.Main import main as cython
+import sys
+
+for test_case in ["cython.pyx", "src/cython.pyx", "src2/cython.pyx"]:
+ sys.argv=["cython", test_case] #cython.py will extract parameters from sys.argv
+ try:
+ cython(command_line=1)
+ except ValueError:
+ pass
+ else:
+ assert False, "ValueError not raised - forbidding cython "+test_case+" doesn't work"
+
+sys.argv=["cython", "notcython.pyx"] #cython.py will extract parameters from sys.argv
+try:
+ cython(["notcython.pys"])
+except ValueError:
+ assert False, "ValueError raised - forbidding cythonize notcython.pyx should work"
+else:
+ pass
+
+######## cython.pyx ########
+######## src/__init__.py ########
+######## src/cython.pyx ########
+######## notcython.pyx ########
+######## src2/cython.pyx ########
| cython-tool might produce an error for cython.pyx, the same way cythonize does
When building a file called `cython.pyx` via
`cythonize cython.pyx`
I get an error message saying: "ValueError: cython is a special module, cannot be used as a module name" which is a Good Thing.
However, using `cython cython.pyx` doesn't produce any errors but a cython.c file - one which at least at Linux cannot be compiled.
Given that `cython` - tool is advertized in documentation (http://cython.readthedocs.io/en/latest/src/reference/compilation.html#compiling-from-the-command-line) and that it is not unusual for somebody new to Cython to call her/his pyx-file "cython.pyx", please consider to produce the same error message also with `cython`-tool.
cython-tool might produce an error for cython.pyx, the same way cythonize does
When building a file called `cython.pyx` via
`cythonize cython.pyx`
I get an error message saying: "ValueError: cython is a special module, cannot be used as a module name" which is a Good Thing.
However, using `cython cython.pyx` doesn't produce any errors but a cython.c file - one which at least at Linux cannot be compiled.
Given that `cython` - tool is advertized in documentation (http://cython.readthedocs.io/en/latest/src/reference/compilation.html#compiling-from-the-command-line) and that it is not unusual for somebody new to Cython to call her/his pyx-file "cython.pyx", please consider to produce the same error message also with `cython`-tool.
| PR welcome
PR welcome
| 2018-06-29T21:44:25Z | [] | [] |
cython/cython | 2,497 | cython__cython-2497 | [
"2496"
] | 3f7a0302c0b1e66071840aa4a01f9f67d2a488c4 | diff --git a/Cython/Utils.py b/Cython/Utils.py
--- a/Cython/Utils.py
+++ b/Cython/Utils.py
@@ -491,5 +491,5 @@ def wrapper(cls):
def raise_error_if_module_name_forbidden(full_module_name):
#it is bad idea to call the pyx-file cython.pyx, so fail early
- if full_module_name == 'cython' or full_module_name.endswith('.cython'):
+ if full_module_name == 'cython' or full_module_name.startswith('cython.'):
raise ValueError('cython is a special module, cannot be used as a module name')
| diff --git a/tests/build/cythonize_cython.srctree b/tests/build/cythonize_cython.srctree
--- a/tests/build/cythonize_cython.srctree
+++ b/tests/build/cythonize_cython.srctree
@@ -5,7 +5,7 @@ PYTHON -c "import cython_tests"
from Cython.Build.Cythonize import main as cythonize
-for test_case in ["cython.pyx", "src/cython.pyx", "src2/cython.pyx"]:
+for test_case in ["cython.pyx", "src2/cython.pyx", "src/cython/helper.pyx"]:
try:
cythonize([test_case])
except ValueError:
@@ -13,12 +13,13 @@ for test_case in ["cython.pyx", "src/cython.pyx", "src2/cython.pyx"]:
else:
assert False, "ValueError not raised - forbidding cythonize "+test_case+" doesn't work"
-try:
- cythonize(["notcython.pys"])
-except ValueError:
- assert False, "ValueError raised - forbidding cythonize notcython.pyx should work"
-else:
- pass
+for test_case in ["notcython.pyx", "my_module/cython.pyx", "cythontest/helper.pyx"]:
+ try:
+ cythonize([test_case])
+ except ValueError:
+ assert False, "ValueError raised - cythonize "+test_case+" should work"
+ else:
+ pass
######## cython_tests.py ########
@@ -26,7 +27,7 @@ else:
from Cython.Compiler.Main import main as cython
import sys
-for test_case in ["cython.pyx", "src/cython.pyx", "src2/cython.pyx"]:
+for test_case in ["cython.pyx", "scr2/cython.pyx", "src/cython/helper.pyx"]:
sys.argv=["cython", test_case] #cython.py will extract parameters from sys.argv
try:
cython(command_line=1)
@@ -35,16 +36,23 @@ for test_case in ["cython.pyx", "src/cython.pyx", "src2/cython.pyx"]:
else:
assert False, "ValueError not raised - forbidding cython "+test_case+" doesn't work"
-sys.argv=["cython", "notcython.pyx"] #cython.py will extract parameters from sys.argv
-try:
- cython(["notcython.pys"])
-except ValueError:
- assert False, "ValueError raised - forbidding cythonize notcython.pyx should work"
-else:
- pass
+
+for test_case in ["notcython.pyx", "my_module/cython.pyx", "cythontest/helper.pyx"]:
+ sys.argv=["cython", test_case] #cython.py will extract parameters from sys.argv
+ try:
+ cython([test_case])
+ except ValueError:
+ assert False, "ValueError raised - cython "+test_case+" should work"
+ else:
+ pass
+
######## cython.pyx ########
-######## src/__init__.py ########
-######## src/cython.pyx ########
+######## my_module/__init__.py ########
+######## my_module/cython.pyx ########
######## notcython.pyx ########
######## src2/cython.pyx ########
+######## src/cython/__init__.py ########
+######## src/cython/helper.pyx ########
+######## cythontest/__init__.py ########
+######## cythontest/helper.pyx ########
| submodule with name "cython.pyx" doesn't build
The[ fix for #2422](https://github.com/cython/cython/commit/6c91bf8e5bc99b625405919f9318d5626ecfa782#diff-26945d164aa2d5cb24bbe2cb4b8903ed) introduced a regression: submodules called cython.pyx are no longer built, i.e. for such a test case:
```
######## my_module/__init__.py ########
######## mymodule/cython.pyx ########
```
It might be a little bit inconsistent to build cython.pyx in one case (submodule) but not in the other, but it is probably better not to break existing workflows.
| 2018-07-15T09:06:06Z | [] | [] |
Subsets and Splits