diff --git a/CHANGES.rst b/CHANGES.rst index 4d254f3e69..05ddf29900 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -1,3 +1,9 @@ +2.11.903 (2024-10-22) +===================== + +- Fixed (low-level) exception leak when using ``get_response(...)`` after ``urlopen(..., multiplexed=True)``. +- Fixed erroneous calculated maximal wait when starting a connection upgrade to a higher protocol version in rare cases (async+windows only). + 2.11.902 (2024-10-22) ===================== diff --git a/README.md b/README.md index d0796493a6..2ba8bc2d45 100644 --- a/README.md +++ b/README.md @@ -202,6 +202,16 @@ python -m pip install urllib3 The order is (actually) important. +Super! But how can I do that when installing something that requires somewhere urllib3-future? + +Let's say you want to install Niquests and keep BOTH urllib3 and urllib3-future, do: + +``` +URLLIB3_NO_OVERRIDE=true pip install niquests --no-binary urllib3-future +``` + +This applies to every package you wish to install and brings indirectly urllib3-future. + - **Can you guarantee us that everything will go smooth?** Guarantee is a strong word with a lot of (legal) implication. We cannot offer a "guarantee". diff --git a/docs/advanced-usage.rst b/docs/advanced-usage.rst index 7b9376148d..d5f82e62cf 100644 --- a/docs/advanced-usage.rst +++ b/docs/advanced-usage.rst @@ -1032,6 +1032,8 @@ You can pass the following options to the DNS url: - ``doh://dns.google/?disabled_svn=h3`` -> Disable HTTP/3 - proxy _(url)_ - proxy_headers +- keepalive_delay +- keepalive_idle_window .. warning:: DNS over HTTPS support HTTP/1.1, HTTP/2 and HTTP/3. By default it tries to negotiate HTTP/2, then if available negotiate HTTP/3. The server must provide a valid ``Alt-Svc`` in responses. diff --git a/mypy-requirements.txt b/mypy-requirements.txt index 5b261cd434..8d93b52332 100644 --- a/mypy-requirements.txt +++ b/mypy-requirements.txt @@ -1,4 +1,4 @@ -mypy==1.11.1; python_version >= '3.8' +mypy==1.12.1; python_version >= '3.8' mypy==1.4.1; python_version < '3.8' idna>=2.0.0 cryptography==42.0.5 diff --git a/noxfile.py b/noxfile.py index 8a364cb91f..7bb29f2cb2 100644 --- a/noxfile.py +++ b/noxfile.py @@ -347,7 +347,7 @@ def downstream_requests(session: nox.Session) -> None: session.install("-r", "requirements-dev.txt", silent=False) session.cd(root) - session.install(".[socks]", silent=False) + session.install(".", silent=False) session.cd(f"{tmp_dir}/requests") session.run("python", "-c", "import urllib3; print(urllib3.__version__)") diff --git a/src/urllib3/_async/connectionpool.py b/src/urllib3/_async/connectionpool.py index 0091b452c3..65c7dda9bd 100644 --- a/src/urllib3/_async/connectionpool.py +++ b/src/urllib3/_async/connectionpool.py @@ -792,17 +792,84 @@ async def get_response( if self.pool is None: raise ClosedPoolError(self, "Pool is closed") + if promise is not None and not isinstance(promise, ResponsePromise): + raise TypeError( + f"get_response only support ResponsePromise but received {type(promise)} instead. " + f"This may occur if you expected the remote peer to support multiplexing but did not." + ) + + clean_exit = True + try: async with self.pool.borrow( promise or ResponsePromise, block=promise is not None, not_idle_only=promise is None, ) as conn: - response = await conn.getresponse( - promise=promise, police_officer=self.pool - ) + try: + response = await conn.getresponse( + promise=promise, police_officer=self.pool + ) + except (BaseSSLError, OSError) as e: + if promise is not None: + url = typing.cast(str, promise.get_parameter("url")) + else: + url = "" + self._raise_timeout(err=e, url=url, timeout_value=conn.timeout) + raise except UnavailableTraffic: return None + except ( + TimeoutError, + OSError, + ProtocolError, + BaseSSLError, + SSLError, + CertificateError, + ProxyError, + RecoverableError, + ) as e: + # Discard the connection for these exceptions. It will be + # replaced during the next _get_conn() call. + clean_exit = False + new_e: Exception = e + if isinstance(e, (BaseSSLError, CertificateError)): + new_e = SSLError(e) + if isinstance( + new_e, + ( + OSError, + NewConnectionError, + TimeoutError, + SSLError, + ), + ) and (conn and conn.proxy and not conn.has_connected_to_proxy): + new_e = _wrap_proxy_error(new_e, conn.proxy.scheme) + elif isinstance(new_e, OSError): + new_e = ProtocolError("Connection aborted.", new_e) + + if promise is not None: + retries = typing.cast(Retry, promise.get_parameter("retries")) + + method = typing.cast(str, promise.get_parameter("method")) + url = typing.cast(str, promise.get_parameter("url")) + + retries = retries.increment( + method, url, error=new_e, _pool=self, _stacktrace=sys.exc_info()[2] + ) + await retries.async_sleep() + else: + raise new_e # we only retry if we were specified a specific promise. we can't blindly assume to retry. + + # Keep track of the error for the retry warning. + err = e + + if not clean_exit: + log.warning( + "Retrying (%r) after connection broken by '%r': %s", retries, err, url + ) + + return await self.get_response(promise=promise) if promise is not None and response is None: raise ValueError( diff --git a/src/urllib3/_version.py b/src/urllib3/_version.py index 03cf36cf32..18deb3bd10 100644 --- a/src/urllib3/_version.py +++ b/src/urllib3/_version.py @@ -1,4 +1,4 @@ # This file is protected via CODEOWNERS from __future__ import annotations -__version__ = "2.11.902" +__version__ = "2.11.903" diff --git a/src/urllib3/backend/_async/hface.py b/src/urllib3/backend/_async/hface.py index 4c0007a24d..569456a245 100644 --- a/src/urllib3/backend/_async/hface.py +++ b/src/urllib3/backend/_async/hface.py @@ -261,6 +261,10 @@ async def _upgrade(self) -> None: # type: ignore[override] self.conn_info.tls_handshake_latency.total_seconds() ) self._max_tolerable_delay_for_upgrade *= 10.0 + # we can, in rare case get self._max_tolerable_delay_for_upgrade <= 0.001 + # we want to avoid this at all cost. + if self._max_tolerable_delay_for_upgrade <= 0.01: + self._max_tolerable_delay_for_upgrade = 3.0 else: # by default (safe/conservative fallback) to 3000ms self._max_tolerable_delay_for_upgrade = 3.0 diff --git a/src/urllib3/backend/hface.py b/src/urllib3/backend/hface.py index 63181b8cea..1b701ea515 100644 --- a/src/urllib3/backend/hface.py +++ b/src/urllib3/backend/hface.py @@ -271,6 +271,10 @@ def _upgrade(self) -> None: self.conn_info.tls_handshake_latency.total_seconds() ) self._max_tolerable_delay_for_upgrade *= 10.0 + # we can, in rare case get self._max_tolerable_delay_for_upgrade == 0.0 + # we want to avoid this at all cost. + if self._max_tolerable_delay_for_upgrade <= 0.01: + self._max_tolerable_delay_for_upgrade = 3.0 else: # by default (safe/conservative fallback) to 3000ms self._max_tolerable_delay_for_upgrade = 3.0 diff --git a/src/urllib3/connectionpool.py b/src/urllib3/connectionpool.py index b73dfef884..5594f0b81b 100644 --- a/src/urllib3/connectionpool.py +++ b/src/urllib3/connectionpool.py @@ -806,15 +806,78 @@ def get_response( f"This may occur if you expected the remote peer to support multiplexing but did not." ) + clean_exit = True + try: with self.pool.borrow( promise or ResponsePromise, block=promise is not None, not_idle_only=promise is None, ) as conn: - response = conn.getresponse(promise=promise, police_officer=self.pool) + try: + response = conn.getresponse( + promise=promise, police_officer=self.pool + ) + except (BaseSSLError, OSError) as e: + if promise is not None: + url = typing.cast(str, promise.get_parameter("url")) + else: + url = "" + self._raise_timeout(err=e, url=url, timeout_value=conn.timeout) + raise except UnavailableTraffic: return None + except ( + TimeoutError, + OSError, + ProtocolError, + BaseSSLError, + SSLError, + CertificateError, + ProxyError, + RecoverableError, + ) as e: + # Discard the connection for these exceptions. It will be + # replaced during the next _get_conn() call. + clean_exit = False + new_e: Exception = e + if isinstance(e, (BaseSSLError, CertificateError)): + new_e = SSLError(e) + if isinstance( + new_e, + ( + OSError, + NewConnectionError, + TimeoutError, + SSLError, + ), + ) and (conn and conn.proxy and not conn.has_connected_to_proxy): + new_e = _wrap_proxy_error(new_e, conn.proxy.scheme) + elif isinstance(new_e, OSError): + new_e = ProtocolError("Connection aborted.", new_e) + + if promise is not None: + retries = typing.cast(Retry, promise.get_parameter("retries")) + + method = typing.cast(str, promise.get_parameter("method")) + url = typing.cast(str, promise.get_parameter("url")) + + retries = retries.increment( + method, url, error=new_e, _pool=self, _stacktrace=sys.exc_info()[2] + ) + retries.sleep() + else: + raise e + + # Keep track of the error for the retry warning. + err = e + + if not clean_exit: + log.warning( + "Retrying (%r) after connection broken by '%r': %s", retries, err, url + ) + + return self.get_response(promise=promise) if promise is not None and response is None: raise ValueError( diff --git a/src/urllib3/contrib/resolver/_async/factories.py b/src/urllib3/contrib/resolver/_async/factories.py index 514abfd047..fb231260c3 100644 --- a/src/urllib3/contrib/resolver/_async/factories.py +++ b/src/urllib3/contrib/resolver/_async/factories.py @@ -220,7 +220,7 @@ def from_url(url: str) -> AsyncResolverDescription: value if value_converted is None else value_converted ) - host_patterns = [] + host_patterns: list[str] = [] if "hosts" in kwargs: host_patterns = ( diff --git a/src/urllib3/contrib/resolver/factories.py b/src/urllib3/contrib/resolver/factories.py index 63e7a9f11b..a08e3702a2 100644 --- a/src/urllib3/contrib/resolver/factories.py +++ b/src/urllib3/contrib/resolver/factories.py @@ -242,7 +242,7 @@ def from_url(url: str) -> ResolverDescription: value if value_converted is None else value_converted ) - host_patterns = [] + host_patterns: list[str] = [] if "hosts" in kwargs: host_patterns = ( diff --git a/test/test_response.py b/test/test_response.py index 0e286d76e7..00c672a626 100644 --- a/test/test_response.py +++ b/test/test_response.py @@ -122,7 +122,8 @@ def test_cache_content_preload_false(self) -> None: assert not r._body assert r.data == b"foo" - assert r._body == b"foo" + # mypy bug? + assert r._body == b"foo" # type: ignore[comparison-overlap] assert r.data == b"foo" def test_default(self) -> None: @@ -521,7 +522,7 @@ def test_io_not_autoclose_bufferedreader(self) -> None: def test_io_textiowrapper(self) -> None: fp = BytesIO(b"\xc3\xa4\xc3\xb6\xc3\xbc\xc3\x9f") resp = HTTPResponse(fp, preload_content=False) - br = TextIOWrapper(resp, encoding="utf8") # type: ignore[arg-type] + br = TextIOWrapper(resp, encoding="utf8") # type: ignore[type-var] assert br.read() == "äöüß" @@ -535,14 +536,14 @@ def test_io_textiowrapper(self) -> None: ) resp = HTTPResponse(fp, preload_content=False) with pytest.raises(ValueError, match="I/O operation on closed file.?"): - list(TextIOWrapper(resp)) # type: ignore[arg-type] + list(TextIOWrapper(resp)) # type: ignore[type-var] def test_io_not_autoclose_textiowrapper(self) -> None: fp = BytesIO( b"\xc3\xa4\xc3\xb6\xc3\xbc\xc3\x9f\n\xce\xb1\xce\xb2\xce\xb3\xce\xb4" ) resp = HTTPResponse(fp, preload_content=False, auto_close=False) - reader = TextIOWrapper(resp, encoding="utf8") # type: ignore[arg-type] + reader = TextIOWrapper(resp, encoding="utf8") # type: ignore[type-var] assert list(reader) == ["äöüß\n", "αβγδ"] assert not reader.closed