diff --git a/CHANGES.rst b/CHANGES.rst index 099f98c469..76642fd78d 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -1,3 +1,17 @@ +2.8.905 (2024-08-04) +==================== + +- Fixed wrong upgrade attempt to QUIC when using a SOCKS proxy. Any usage of a proxy disable HTTP/3 over QUIC as per documented. + until proper support is implemented in a next minor version. +- Backported upstream urllib3 #3434: util/ssl: make code resilient to missing hash functions. + In certain environments such as in a FIPS enabled system, certain algorithms such as md5 may be unavailable. Due + to the importing of such a module on a system where it is unavailable, urllib3(-future) will crash and is unusable. + https://github.com/urllib3/urllib3/pull/3434 +- Backported upstream urllib3 GHSA-34jh-p97f-mpxf: Strip Proxy-Authorization header on redirects. + Added the ``Proxy-Authorization`` header to the list of headers to strip from requests when redirecting to a different host. + As before, different headers can be set via ``Retry.remove_headers_on_redirect``. +- Fixed state-machine desync on a rare scenario when uploading a body using HTTP/3 over QUIC. + 2.8.904 (2024-07-18) ==================== diff --git a/README.md b/README.md index 4d66d340ca..cfadf8f726 100644 --- a/README.md +++ b/README.md @@ -141,26 +141,29 @@ We agree that this solution isn't perfect and actually put a lot of pressure on Here are some of the reasons (not exhaustive) we choose to work this way: -- A) Some major companies may not be able to touch the production code but can "change/swap" dependencies. -- B) urllib3-future main purpose is to fuel Niquests, which is itself a drop-in replacement of Requests. +> A) Some major companies may not be able to touch the production code but can "change/swap" dependencies. + +> B) urllib3-future main purpose is to fuel Niquests, which is itself a drop-in replacement of Requests. And there's more than 100 packages commonly used that plug into Requests, but the code (of the packages) invoke urllib3 So... We cannot fork those 100+ projects to patch urllib3 usage, it is impossible at the moment, given our means. Requests trapped us, and there should be a way to escape the nonsense "migrate" to another http client that reinvent basic things and interactions. -- C) We don't have to reinvent the wheel. -- D) Some of our partners started noticing that HTTP/1 started to be disabled by some webservices in favor of HTTP/2+ + +> C) We don't have to reinvent the wheel. + +> D) Some of our partners started noticing that HTTP/1 started to be disabled by some webservices in favor of HTTP/2+ So, this fork can unblock them at (almost) zero cost. -**OK... then what do I gain from this?** +- **OK... then what do I gain from this?** -- It is faster than its counterpart, we measured gain up to 2X faster in a multithreaded environment using a http2 endpoint. -- It works well with gevent / does not conflict. We do not use the standard queue class from stdlib as it does not fit http2+ constraints. -- Leveraging recent protocols like http2 and http3 transparently. Code and behaviors does not change one bit. -- You do not depend on the standard library to emit http/1 requests, and that is actually a good news. http.client +1. It is faster than its counterpart, we measured gain up to 2X faster in a multithreaded environment using a http2 endpoint. +2. It works well with gevent / does not conflict. We do not use the standard queue class from stdlib as it does not fit http2+ constraints. +3. Leveraging recent protocols like http2 and http3 transparently. Code and behaviors does not change one bit. +4. You do not depend on the standard library to emit http/1 requests, and that is actually a good news. http.client has numerous known flaws but cannot be fixed as we speak. (e.g. urllib3 is based on http.client) -- There a ton of other improvement you may leverage, but for that you will need to migrate to Niquests or update your code +5. There a ton of other improvement you may leverage, but for that you will need to migrate to Niquests or update your code to enable specific capabilities, like but not limited to: "DNS over QUIC, HTTP" / "Happy Eyeballs" / "Native Asyncio" / "Advanced Multiplexing". -- Non-blocking IO with concurrent streams/requests. And yes, transparently. +6. Non-blocking IO with concurrent streams/requests. And yes, transparently. - **Is this funded?** diff --git a/dev-requirements.txt b/dev-requirements.txt index 3a5278e69a..75717b7a97 100644 --- a/dev-requirements.txt +++ b/dev-requirements.txt @@ -1,5 +1,6 @@ coverage>=7.2.7,<=7.4.1 tornado>=6.2,<=6.4 +# 2.5 seems to break test_proxy_rejection by hanging forever python-socks==2.4.4 pytest==7.4.4 pytest-timeout==2.3.1 diff --git a/pyproject.toml b/pyproject.toml index 2c5ec0d325..c84e3df407 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -126,6 +126,7 @@ filterwarnings = [ '''ignore:A plugin raised an exception during''', '''ignore:Exception ignored in:pytest.PytestUnraisableExceptionWarning''', '''ignore:Exception in thread:pytest.PytestUnhandledThreadExceptionWarning''', + '''ignore:The `hash` argument is deprecated in favor of `unsafe_hash`:DeprecationWarning''', ] [tool.isort] diff --git a/src/urllib3/_version.py b/src/urllib3/_version.py index fb0d476ce1..257c05d1a8 100644 --- a/src/urllib3/_version.py +++ b/src/urllib3/_version.py @@ -1,4 +1,4 @@ # This file is protected via CODEOWNERS from __future__ import annotations -__version__ = "2.8.904" +__version__ = "2.8.905" diff --git a/src/urllib3/backend/_async/hface.py b/src/urllib3/backend/_async/hface.py index 1b191007dd..9a9e5e9d71 100644 --- a/src/urllib3/backend/_async/hface.py +++ b/src/urllib3/backend/_async/hface.py @@ -825,7 +825,12 @@ def putheader(self, header: str, *values: str) -> None: # We assume it is passed as-is (meaning 'keep-alive' lower-cased) # It may(should) break the connection. if not support_te_chunked: - if encoded_header in {b"transfer-encoding", b"connection"}: + if encoded_header in { + b"transfer-encoding", + b"connection", + b"upgrade", + b"keep-alive", + }: return if self.__expected_body_length is None and encoded_header == b"content-length": @@ -1106,6 +1111,14 @@ async def send( # type: ignore[override] raise EarlyResponse(promise=rp) + while True: + data_out = self._protocol.bytes_to_send() + + if not data_out: + break + + await self.sock.sendall(data_out) + if self.__remaining_body_length: self.__remaining_body_length -= len(data) diff --git a/src/urllib3/backend/hface.py b/src/urllib3/backend/hface.py index 80cbca0089..121dad73e5 100644 --- a/src/urllib3/backend/hface.py +++ b/src/urllib3/backend/hface.py @@ -891,7 +891,12 @@ def putheader(self, header: str, *values: str) -> None: # We assume it is passed as-is (meaning 'keep-alive' lower-cased) # It may(should) break the connection. if not support_te_chunked: - if encoded_header in {b"transfer-encoding", b"connection"}: + if encoded_header in { + b"transfer-encoding", + b"connection", + b"upgrade", + b"keep-alive", + }: return if self.__expected_body_length is None and encoded_header == b"content-length": @@ -1178,6 +1183,14 @@ def send( raise EarlyResponse(promise=rp) + while True: + data_out = self._protocol.bytes_to_send() + + if not data_out: + break + + self.sock.sendall(data_out) + if self.__remaining_body_length: self.__remaining_body_length -= len(data) diff --git a/src/urllib3/contrib/socks.py b/src/urllib3/contrib/socks.py index b0cb84644e..d7a6f19bca 100644 --- a/src/urllib3/contrib/socks.py +++ b/src/urllib3/contrib/socks.py @@ -1,7 +1,7 @@ """ This module contains provisional support for SOCKS proxies from within urllib3. This module supports SOCKS4, SOCKS4A (an extension of SOCKS4), and -SOCKS5. To enable its functionality, either install PySocks or install this +SOCKS5. To enable its functionality, either install python-socks or install this module with the ``socks`` extra. The SOCKS implementation supports the full range of urllib3 features. It also @@ -40,14 +40,13 @@ from __future__ import annotations -from python_socks import ( # type: ignore[import-untyped] - ProxyConnectionError, - ProxyError, - ProxyTimeoutError, - ProxyType, -) - try: + from python_socks import ( # type: ignore[import-untyped] + ProxyConnectionError, + ProxyError, + ProxyTimeoutError, + ProxyType, + ) from python_socks.sync import Proxy # type: ignore[import-untyped] from ._socks_override import AsyncioProxy @@ -89,6 +88,7 @@ from .._async.connectionpool import AsyncHTTPConnectionPool, AsyncHTTPSConnectionPool from .._async.poolmanager import AsyncPoolManager from .._typing import _TYPE_SOCKS_OPTIONS +from ..backend import HttpVersion # synchronous part from ..connection import HTTPConnection, HTTPSConnection @@ -257,6 +257,11 @@ def __init__( } connection_pool_kw["_socks_options"] = socks_options + if "disabled_svn" not in connection_pool_kw: + connection_pool_kw["disabled_svn"] = set() + + connection_pool_kw["disabled_svn"].add(HttpVersion.h3) + super().__init__(num_pools, headers, **connection_pool_kw) self.pool_classes_by_scheme = SOCKSProxyManager.pool_classes_by_scheme @@ -415,6 +420,11 @@ def __init__( } connection_pool_kw["_socks_options"] = socks_options + if "disabled_svn" not in connection_pool_kw: + connection_pool_kw["disabled_svn"] = set() + + connection_pool_kw["disabled_svn"].add(HttpVersion.h3) + super().__init__(num_pools, headers, **connection_pool_kw) self.pool_classes_by_scheme = AsyncSOCKSProxyManager.pool_classes_by_scheme diff --git a/src/urllib3/poolmanager.py b/src/urllib3/poolmanager.py index 4459326b45..7a6c51c987 100644 --- a/src/urllib3/poolmanager.py +++ b/src/urllib3/poolmanager.py @@ -935,6 +935,11 @@ def __init__( connection_pool_kw["_proxy_headers"] = self.proxy_headers connection_pool_kw["_proxy_config"] = self.proxy_config + if "disabled_svn" not in connection_pool_kw: + connection_pool_kw["disabled_svn"] = set() + + connection_pool_kw["disabled_svn"].add(HttpVersion.h3) + super().__init__(num_pools, headers, **connection_pool_kw) def connection_from_host( diff --git a/src/urllib3/util/retry.py b/src/urllib3/util/retry.py index 9931dd005e..182fe3246f 100644 --- a/src/urllib3/util/retry.py +++ b/src/urllib3/util/retry.py @@ -190,7 +190,9 @@ class Retry: RETRY_AFTER_STATUS_CODES = frozenset([413, 429, 503]) #: Default headers to be used for ``remove_headers_on_redirect`` - DEFAULT_REMOVE_HEADERS_ON_REDIRECT = frozenset(["Cookie", "Authorization"]) + DEFAULT_REMOVE_HEADERS_ON_REDIRECT = frozenset( + ["Cookie", "Authorization", "Proxy-Authorization"] + ) #: Default maximum backoff time. DEFAULT_BACKOFF_MAX = 120 diff --git a/src/urllib3/util/ssl_.py b/src/urllib3/util/ssl_.py index a38c00e04d..3fbc217f1a 100644 --- a/src/urllib3/util/ssl_.py +++ b/src/urllib3/util/ssl_.py @@ -1,5 +1,6 @@ from __future__ import annotations +import hashlib import hmac import io import os @@ -9,7 +10,6 @@ import typing import warnings from binascii import unhexlify -from hashlib import md5, sha1, sha256 from .._constant import MOZ_INTERMEDIATE_CIPHERS from ..contrib.imcc import load_cert_chain as _ctx_load_cert_chain @@ -27,7 +27,14 @@ IS_PYOPENSSL = False # kept for BC reason # Maps the length of a digest to a possible hash function producing this digest -HASHFUNC_MAP = {32: md5, 40: sha1, 64: sha256} +HASHFUNC_MAP = { + length: getattr(hashlib, algorithm, None) + for length, algorithm in ( + (32, "md5"), + (40, "sha1"), + (64, "sha256"), + ) +} def _compute_key_ctx_build( @@ -238,10 +245,17 @@ def assert_fingerprint(cert: bytes | None, fingerprint: str) -> None: fingerprint = fingerprint.replace(":", "").lower() digest_length = len(fingerprint) - hashfunc = HASHFUNC_MAP.get(digest_length) - if not hashfunc: + if digest_length not in HASHFUNC_MAP: raise SSLError(f"Fingerprint of invalid length: {fingerprint}") + hashfunc = HASHFUNC_MAP[digest_length] + + if hashfunc is None: + raise SSLError( + f"Hash function implementation unavailable for fingerprint length: {digest_length}. " + "Hint: your OpenSSL build may not include it for compliance issues." + ) + # We need encode() here for py32; works on py2 and p33. fingerprint_bytes = unhexlify(fingerprint.encode()) diff --git a/test/contrib/asynchronous/test_resolver.py b/test/contrib/asynchronous/test_resolver.py index ba8cf54250..5b75eff6a5 100644 --- a/test/contrib/asynchronous/test_resolver.py +++ b/test/contrib/asynchronous/test_resolver.py @@ -2,6 +2,7 @@ import asyncio import os +import platform import socket from test import requires_network @@ -64,9 +65,11 @@ async def test_null_resolver(hostname: str, expect_error: bool) -> None: pytest.param( "doq://dns.nextdns.io/?timeout=1", QUICResolver, - marks=pytest.mark.skipif( - os.environ.get("CI", None) is not None, - reason="Github Action CI Unpredictable", + marks=pytest.mark.xfail( + os.environ.get("CI", None) is not None + and platform.system() != "Darwin", + reason="Github Action CI: Network Unreachable UDP/QUIC", + strict=False, ), ), ("dns://dns.nextdns.io", None), @@ -85,9 +88,11 @@ async def test_null_resolver(hostname: str, expect_error: bool) -> None: pytest.param( "doq://dns.nextdns.io/?implementation=qh3&timeout=1", QUICResolver, - marks=pytest.mark.skipif( - os.environ.get("CI", None) is not None, - reason="Github Action CI Unpredictable", + marks=pytest.mark.xfail( + os.environ.get("CI", None) is not None + and platform.system() != "Darwin", + reason="Github Action CI: Network Unreachable UDP/QUIC", + strict=False, ), ), ], @@ -127,9 +132,11 @@ async def test_url_resolver( "dot://one.one.one.one", pytest.param( "doq://dns.nextdns.io/?timeout=1", - marks=pytest.mark.skipif( - os.environ.get("CI", None) is not None, - reason="Github Action CI Unpredictable", + marks=pytest.mark.xfail( + os.environ.get("CI", None) is not None + and platform.system() != "Darwin", + reason="Github Action CI: Network Unreachable UDP/QUIC", + strict=False, ), ), "doh+google://", @@ -165,9 +172,11 @@ async def test_1_1_1_1_ipv4_resolution_across_protocols(dns_url: str) -> None: "dot://one.one.one.one", pytest.param( "doq://dns.nextdns.io/?timeout=1", - marks=pytest.mark.skipif( - os.environ.get("CI", None) is not None, - reason="Github Action CI Unpredictable", + marks=pytest.mark.xfail( + os.environ.get("CI", None) is not None + and platform.system() != "Darwin", + reason="Github Action CI: Network Unreachable UDP/QUIC", + strict=False, ), ), ], @@ -303,9 +312,11 @@ async def test_many_resolver_host_constraint_distribution() -> None: "doh+cloudflare://", pytest.param( "doq://dns.nextdns.io/?timeout=1", - marks=pytest.mark.skipif( - os.environ.get("CI", None) is not None, - reason="Github Action CI Unpredictable", + marks=pytest.mark.xfail( + os.environ.get("CI", None) is not None + and platform.system() != "Darwin", + reason="Github Action CI: Network Unreachable UDP/QUIC", + strict=False, ), ), "dot://one.one.one.one", @@ -392,9 +403,11 @@ async def test_doh_rfc8484(dns_url: str) -> None: "doh+cloudflare://", pytest.param( "doq://dns.nextdns.io/?timeout=1", - marks=pytest.mark.skipif( - os.environ.get("CI", None) is not None, - reason="Github Action CI Unpredictable", + marks=pytest.mark.xfail( + os.environ.get("CI", None) is not None + and platform.system() != "Darwin", + reason="Github Action CI: Network Unreachable UDP/QUIC", + strict=False, ), ), "dot://one.one.one.one", @@ -479,9 +492,11 @@ async def test_many_resolver_task_safe() -> None: "doh+cloudflare://", pytest.param( "doq://dns.nextdns.io/?timeout=1", - marks=pytest.mark.skipif( - os.environ.get("CI", None) is not None, - reason="Github Action CI Unpredictable", + marks=pytest.mark.xfail( + os.environ.get("CI", None) is not None + and platform.system() != "Darwin", + reason="Github Action CI: Network Unreachable UDP/QUIC", + strict=False, ), ), "dot://one.one.one.one", @@ -518,9 +533,11 @@ async def test_resolver_recycle(dns_url: str) -> None: "doh+cloudflare://", pytest.param( "doq://dns.nextdns.io/?timeout=1", - marks=pytest.mark.skipif( - os.environ.get("CI", None) is not None, - reason="Github Action CI Unpredictable", + marks=pytest.mark.xfail( + os.environ.get("CI", None) is not None + and platform.system() != "Darwin", + reason="Github Action CI: Network Unreachable UDP/QUIC", + strict=False, ), ), "dot://one.one.one.one", @@ -545,9 +562,11 @@ async def test_resolve_cannot_recycle_when_available(dns_url: str) -> None: "doh+cloudflare://", pytest.param( "doq://dns.nextdns.io/?timeout=1", - marks=pytest.mark.skipif( - os.environ.get("CI", None) is not None, - reason="Github Action CI Unpredictable", + marks=pytest.mark.xfail( + os.environ.get("CI", None) is not None + and platform.system() != "Darwin", + reason="Github Action CI: Network Unreachable UDP/QUIC", + strict=False, ), ), "dot://one.one.one.one", @@ -586,9 +605,11 @@ async def test_ipv6_always_preferred(dns_url: str) -> None: "doh+cloudflare://", pytest.param( "doq://dns.nextdns.io/?timeout=1", - marks=pytest.mark.skipif( - os.environ.get("CI", None) is not None, - reason="Github Action CI Unpredictable", + marks=pytest.mark.xfail( + os.environ.get("CI", None) is not None + and platform.system() != "Darwin", + reason="Github Action CI: Network Unreachable UDP/QUIC", + strict=False, ), ), "dot://one.one.one.one", diff --git a/test/contrib/test_resolver.py b/test/contrib/test_resolver.py index f79f22c408..95267e9a1c 100644 --- a/test/contrib/test_resolver.py +++ b/test/contrib/test_resolver.py @@ -1,6 +1,7 @@ from __future__ import annotations import os +import platform import socket from concurrent.futures import ThreadPoolExecutor from socket import AddressFamily, SocketKind @@ -65,8 +66,9 @@ def test_null_resolver(hostname: str, expect_error: bool) -> None: "doq://dns.nextdns.io/?timeout=1", QUICResolver, marks=pytest.mark.xfail( - os.environ.get("CI", None) is not None, - reason="Github Action CI Unpredictable", + os.environ.get("CI", None) is not None + and platform.system() != "Darwin", + reason="Github Action CI: Network Unreachable UDP/QUIC", strict=False, ), ), @@ -87,8 +89,9 @@ def test_null_resolver(hostname: str, expect_error: bool) -> None: "doq://dns.nextdns.io/?implementation=qh3&timeout=1", QUICResolver, marks=pytest.mark.xfail( - os.environ.get("CI", None) is not None, - reason="Github Action CI Unpredictable", + os.environ.get("CI", None) is not None + and platform.system() != "Darwin", + reason="Github Action CI: Network Unreachable UDP/QUIC", strict=False, ), ), @@ -129,8 +132,9 @@ def test_url_resolver( pytest.param( "doq://dns.nextdns.io/?timeout=1", marks=pytest.mark.xfail( - os.environ.get("CI", None) is not None, - reason="Github Action CI Unpredictable", + os.environ.get("CI", None) is not None + and platform.system() != "Darwin", + reason="Github Action CI: Network Unreachable UDP/QUIC", strict=False, ), ), @@ -166,8 +170,9 @@ def test_1_1_1_1_ipv4_resolution_across_protocols(dns_url: str) -> None: pytest.param( "doq://dns.nextdns.io/?timeout=1", marks=pytest.mark.xfail( - os.environ.get("CI", None) is not None, - reason="Github Action CI Unpredictable", + os.environ.get("CI", None) is not None + and platform.system() != "Darwin", + reason="Github Action CI: Network Unreachable UDP/QUIC", strict=False, ), ), @@ -300,8 +305,9 @@ def test_many_resolver_host_constraint_distribution() -> None: pytest.param( "doq://dns.nextdns.io/?timeout=1", marks=pytest.mark.xfail( - os.environ.get("CI", None) is not None, - reason="Github Action CI Unpredictable", + os.environ.get("CI", None) is not None + and platform.system() != "Darwin", + reason="Github Action CI: Network Unreachable UDP/QUIC", strict=False, ), ), @@ -388,8 +394,9 @@ def test_doh_rfc8484(dns_url: str) -> None: pytest.param( "doq://dns.nextdns.io/?timeout=1", marks=pytest.mark.xfail( - os.environ.get("CI", None) is not None, - reason="Github Action CI Unpredictable", + os.environ.get("CI", None) is not None + and platform.system() != "Darwin", + reason="Github Action CI: Network Unreachable UDP/QUIC", strict=False, ), ), @@ -494,8 +501,9 @@ def _run( pytest.param( "doq://dns.nextdns.io/?timeout=1", marks=pytest.mark.xfail( - os.environ.get("CI", None) is not None, - reason="Github Action CI Unpredictable", + os.environ.get("CI", None) is not None + and platform.system() != "Darwin", + reason="Github Action CI: Network Unreachable UDP/QUIC", strict=False, ), ), @@ -533,8 +541,9 @@ def test_resolver_recycle(dns_url: str) -> None: pytest.param( "doq://dns.nextdns.io/?timeout=1", marks=pytest.mark.xfail( - os.environ.get("CI", None) is not None, - reason="Github Action CI Unpredictable", + os.environ.get("CI", None) is not None + and platform.system() != "Darwin", + reason="Github Action CI: Network Unreachable UDP/QUIC", strict=False, ), ), @@ -560,8 +569,9 @@ def test_resolve_cannot_recycle_when_available(dns_url: str) -> None: pytest.param( "doq://dns.nextdns.io/?timeout=1", marks=pytest.mark.xfail( - os.environ.get("CI", None) is not None, - reason="Github Action CI Unpredictable", + os.environ.get("CI", None) is not None + and platform.system() != "Darwin", + reason="Github Action CI: Network Unreachable UDP/QUIC", strict=False, ), ), @@ -601,8 +611,9 @@ def test_ipv6_always_preferred(dns_url: str) -> None: pytest.param( "doq://dns.nextdns.io/?timeout=1", marks=pytest.mark.xfail( - os.environ.get("CI", None) is not None, - reason="Github Action CI Unpredictable", + os.environ.get("CI", None) is not None + and platform.system() != "Darwin", + reason="Github Action CI: Network Unreachable UDP/QUIC", strict=False, ), ), diff --git a/test/test_retry.py b/test/test_retry.py index f71e7acc9e..ac3ce4ca73 100644 --- a/test/test_retry.py +++ b/test/test_retry.py @@ -334,7 +334,11 @@ def test_retry_method_not_allowed(self) -> None: def test_retry_default_remove_headers_on_redirect(self) -> None: retry = Retry() - assert retry.remove_headers_on_redirect == {"authorization", "cookie"} + assert retry.remove_headers_on_redirect == { + "authorization", + "proxy-authorization", + "cookie", + } def test_retry_set_remove_headers_on_redirect(self) -> None: retry = Retry(remove_headers_on_redirect=["X-API-Secret"]) diff --git a/test/with_dummyserver/asynchronous/test_poolmanager.py b/test/with_dummyserver/asynchronous/test_poolmanager.py index b87cd892cd..90cac1f52d 100644 --- a/test/with_dummyserver/asynchronous/test_poolmanager.py +++ b/test/with_dummyserver/asynchronous/test_poolmanager.py @@ -161,7 +161,11 @@ async def test_redirect_cross_host_remove_headers(self) -> None: "GET", f"{self.base_url}/redirect", fields={"target": f"{self.base_url_alt}/headers"}, - headers={"Authorization": "foo", "Cookie": "foo=bar"}, + headers={ + "Authorization": "foo", + "Proxy-Authorization": "bar", + "Cookie": "foo=bar", + }, ) assert r.status == 200 @@ -169,13 +173,18 @@ async def test_redirect_cross_host_remove_headers(self) -> None: data = await r.json() assert "Authorization" not in data + assert "Proxy-Authorization" not in data assert "Cookie" not in data r = await http.request( "GET", f"{self.base_url}/redirect", fields={"target": f"{self.base_url_alt}/headers"}, - headers={"authorization": "foo", "cookie": "foo=bar"}, + headers={ + "authorization": "foo", + "proxy-authorization": "baz", + "cookie": "foo=bar", + }, ) assert r.status == 200 @@ -184,6 +193,8 @@ async def test_redirect_cross_host_remove_headers(self) -> None: assert "authorization" not in data assert "Authorization" not in data + assert "proxy-authorization" not in data + assert "Proxy-Authorization" not in data assert "cookie" not in data assert "Cookie" not in data @@ -193,7 +204,11 @@ async def test_redirect_cross_host_no_remove_headers(self) -> None: "GET", f"{self.base_url}/redirect", fields={"target": f"{self.base_url_alt}/headers"}, - headers={"Authorization": "foo", "Cookie": "foo=bar"}, + headers={ + "Authorization": "foo", + "Proxy-Authorization": "bar", + "Cookie": "foo=bar", + }, retries=Retry(remove_headers_on_redirect=[]), ) @@ -202,6 +217,7 @@ async def test_redirect_cross_host_no_remove_headers(self) -> None: data = await r.json() assert data["Authorization"] == "foo" + assert data["Proxy-Authorization"] == "bar" assert data["Cookie"] == "foo=bar" async def test_redirect_cross_host_set_removed_headers(self) -> None: @@ -213,6 +229,7 @@ async def test_redirect_cross_host_set_removed_headers(self) -> None: headers={ "X-API-Secret": "foo", "Authorization": "bar", + "Proxy-Authorization": "baz", "Cookie": "foo=bar", }, retries=Retry(remove_headers_on_redirect=["X-API-Secret"]), @@ -224,11 +241,13 @@ async def test_redirect_cross_host_set_removed_headers(self) -> None: assert "X-API-Secret" not in data assert data["Authorization"] == "bar" + assert data["Proxy-Authorization"] == "baz" assert data["Cookie"] == "foo=bar" headers = { "x-api-secret": "foo", "authorization": "bar", + "proxy-authorization": "baz", "cookie": "foo=bar", } r = await http.request( @@ -246,12 +265,14 @@ async def test_redirect_cross_host_set_removed_headers(self) -> None: assert "x-api-secret" not in data assert "X-API-Secret" not in data assert data["Authorization"] == "bar" + assert data["Proxy-Authorization"] == "baz" assert data["Cookie"] == "foo=bar" # Ensure the header argument itself is not modified in-place. assert headers == { "x-api-secret": "foo", "authorization": "bar", + "proxy-authorization": "baz", "cookie": "foo=bar", } diff --git a/test/with_dummyserver/test_https.py b/test/with_dummyserver/test_https.py index 65fe36d755..70e59c1665 100644 --- a/test/with_dummyserver/test_https.py +++ b/test/with_dummyserver/test_https.py @@ -973,6 +973,26 @@ def test_ssl_context_ssl_version_uses_ssl_min_max_versions(self) -> None: assert ctx.minimum_version == self.tls_version() assert ctx.maximum_version == self.tls_version() + def test_assert_missing_hashfunc(self, monkeypatch: pytest.MonkeyPatch) -> None: + fingerprint = "55:39:BF:70:05:12:43:FA:1F:D1:BF:4E:E8:1B:07:1D" + with HTTPSConnectionPool( + "localhost", + self.port, + cert_reqs="CERT_REQUIRED", + ca_certs=DEFAULT_CA, + assert_fingerprint=(fingerprint), + ssl_minimum_version=self.tls_version(), + ) as https_pool: + digest_length = len(fingerprint.replace(":", "").lower()) + monkeypatch.setitem(urllib3.util.ssl_.HASHFUNC_MAP, digest_length, None) + with pytest.raises(MaxRetryError) as cm: + https_pool.request("GET", "/", retries=0) + assert type(cm.value.reason) is SSLError + assert ( + f"Hash function implementation unavailable for fingerprint length: {digest_length}" + in str(cm.value.reason) + ) + @pytest.mark.usefixtures("requires_tlsv1") class TestHTTPS_TLSv1(TestHTTPS): diff --git a/test/with_dummyserver/test_poolmanager.py b/test/with_dummyserver/test_poolmanager.py index c8619f2ec9..64023c83ff 100644 --- a/test/with_dummyserver/test_poolmanager.py +++ b/test/with_dummyserver/test_poolmanager.py @@ -162,7 +162,11 @@ def test_redirect_cross_host_remove_headers(self) -> None: "GET", f"{self.base_url}/redirect", fields={"target": f"{self.base_url_alt}/headers"}, - headers={"Authorization": "foo", "Cookie": "foo=bar"}, + headers={ + "Authorization": "foo", + "Proxy-Authorization": "bar", + "Cookie": "foo=bar", + }, ) assert r.status == 200 @@ -170,13 +174,18 @@ def test_redirect_cross_host_remove_headers(self) -> None: data = r.json() assert "Authorization" not in data + assert "Proxy-Authorization" not in data assert "Cookie" not in data r = http.request( "GET", f"{self.base_url}/redirect", fields={"target": f"{self.base_url_alt}/headers"}, - headers={"authorization": "foo", "cookie": "foo=bar"}, + headers={ + "authorization": "foo", + "proxy-authorization": "baz", + "cookie": "foo=bar", + }, ) assert r.status == 200 @@ -185,6 +194,8 @@ def test_redirect_cross_host_remove_headers(self) -> None: assert "authorization" not in data assert "Authorization" not in data + assert "proxy-authorization" not in data + assert "Proxy-Authorization" not in data assert "cookie" not in data assert "Cookie" not in data @@ -194,7 +205,11 @@ def test_redirect_cross_host_no_remove_headers(self) -> None: "GET", f"{self.base_url}/redirect", fields={"target": f"{self.base_url_alt}/headers"}, - headers={"Authorization": "foo", "Cookie": "foo=bar"}, + headers={ + "Authorization": "foo", + "Proxy-Authorization": "bar", + "Cookie": "foo=bar", + }, retries=Retry(remove_headers_on_redirect=[]), ) @@ -203,6 +218,7 @@ def test_redirect_cross_host_no_remove_headers(self) -> None: data = r.json() assert data["Authorization"] == "foo" + assert data["Proxy-Authorization"] == "bar" assert data["Cookie"] == "foo=bar" def test_redirect_cross_host_set_removed_headers(self) -> None: @@ -214,6 +230,7 @@ def test_redirect_cross_host_set_removed_headers(self) -> None: headers={ "X-API-Secret": "foo", "Authorization": "bar", + "Proxy-Authorization": "baz", "Cookie": "foo=bar", }, retries=Retry(remove_headers_on_redirect=["X-API-Secret"]), @@ -225,11 +242,13 @@ def test_redirect_cross_host_set_removed_headers(self) -> None: assert "X-API-Secret" not in data assert data["Authorization"] == "bar" + assert data["Proxy-Authorization"] == "baz" assert data["Cookie"] == "foo=bar" headers = { "x-api-secret": "foo", "authorization": "bar", + "proxy-authorization": "baz", "cookie": "foo=bar", } r = http.request( @@ -247,12 +266,14 @@ def test_redirect_cross_host_set_removed_headers(self) -> None: assert "x-api-secret" not in data assert "X-API-Secret" not in data assert data["Authorization"] == "bar" + assert data["Proxy-Authorization"] == "baz" assert data["Cookie"] == "foo=bar" # Ensure the header argument itself is not modified in-place. assert headers == { "x-api-secret": "foo", "authorization": "bar", + "proxy-authorization": "baz", "cookie": "foo=bar", } diff --git a/test/with_traefik/asynchronous/test_send_data.py b/test/with_traefik/asynchronous/test_send_data.py index 5d8e83a1ac..5d82949420 100644 --- a/test/with_traefik/asynchronous/test_send_data.py +++ b/test/with_traefik/asynchronous/test_send_data.py @@ -61,25 +61,39 @@ async def test_h2n3_data(self, method: str, body: bytes | str | BytesIO) -> None for i in range(3): if isinstance(body, BytesIO): body.seek(0, 0) - # traefik bug with http3, should not happen! - if i > 0: - headers = {"content-length": "-1"} - else: - headers = {} - else: - headers = {} - resp = await p.request( - method, f"/{method.lower()}", body=body, headers=headers + # in some cases, urllib3 cannot infer in advance the body full length + # it will trigger a stream upload + # http1.1 => (Transfer-Encoding: chunked) legacy algorithm + # http2+ => send data frames, server aware of the end with the FIN bit. + expect_no_content_length = isinstance(body, BytesIO) or hasattr( + body, "__next__" ) + # traefik bug with http3, should not happen! + # see https://github.com/traefik/traefik/issues/10185 + if i > 0 and expect_no_content_length: + pytest.skip( + "traefik bug with http3 forbid stream upload without content-length" + ) + + resp = await p.request(method, f"/{method.lower()}", body=body) + assert resp.status == 200 assert resp.version == (20 if i == 0 else 30) + echo_data_from_httpbin = (await resp.json())["data"] + need_b64_decode = echo_data_from_httpbin.startswith( + "data:application/octet-stream;base64," + ) + + if need_b64_decode: + echo_data_from_httpbin = b64decode(echo_data_from_httpbin[37:]) + payload_seen_by_server: bytes = ( - b64decode((await resp.json())["data"][37:]) - if not isinstance(body, str) - else (await resp.json())["data"].encode() + echo_data_from_httpbin + if isinstance(echo_data_from_httpbin, bytes) + else echo_data_from_httpbin.encode() ) if isinstance(body, str): diff --git a/test/with_traefik/test_send_data.py b/test/with_traefik/test_send_data.py index cb0fe20aab..4526ad244e 100644 --- a/test/with_traefik/test_send_data.py +++ b/test/with_traefik/test_send_data.py @@ -55,28 +55,44 @@ def test_h2n3_data(self, method: str, body: bytes | str | BytesIO) -> None: ca_certs=self.ca_authority, resolver=self.test_resolver, ) as p: + # first will be done in HTTP/2 + # two others using HTTP/3 for i in range(3): if isinstance(body, BytesIO): body.seek(0, 0) - # traefik bug with http3, should not happen! - if i > 0: - headers = {"content-length": "-1"} - else: - headers = {} - else: - headers = {} - resp = p.request( - method, f"/{method.lower()}", body=body, headers=headers + # in some cases, urllib3 cannot infer in advance the body full length + # it will trigger a stream upload + # http1.1 => (Transfer-Encoding: chunked) legacy algorithm + # http2+ => send data frames, server aware of the end with the FIN bit. + expect_no_content_length = isinstance(body, BytesIO) or hasattr( + body, "__next__" ) + # traefik bug with http3, should not happen! + # see https://github.com/traefik/traefik/issues/10185 + if i > 0 and expect_no_content_length: + pytest.skip( + "traefik bug with http3 forbid stream upload without content-length" + ) + + resp = p.request(method, f"/{method.lower()}", body=body) + assert resp.status == 200 assert resp.version == (20 if i == 0 else 30) + echo_data_from_httpbin = resp.json()["data"] + need_b64_decode = echo_data_from_httpbin.startswith( + "data:application/octet-stream;base64," + ) + + if need_b64_decode: + echo_data_from_httpbin = b64decode(echo_data_from_httpbin[37:]) + payload_seen_by_server: bytes = ( - b64decode(resp.json()["data"][37:]) - if not isinstance(body, str) - else resp.json()["data"].encode() + echo_data_from_httpbin + if isinstance(echo_data_from_httpbin, bytes) + else echo_data_from_httpbin.encode() ) if isinstance(body, str):