diff --git a/.eslintrc.yml b/.eslintrc.yml new file mode 100644 index 0000000000..b1be2badbd --- /dev/null +++ b/.eslintrc.yml @@ -0,0 +1,7 @@ +env: + es2020 : true + worker: true +rules: {} +extends: +- eslint:recommended + diff --git a/.github/PULL_REQUEST_TEMPLATE/release.md b/.github/PULL_REQUEST_TEMPLATE/release.md index 9d295b14dc..c8dfe983c3 100644 --- a/.github/PULL_REQUEST_TEMPLATE/release.md +++ b/.github/PULL_REQUEST_TEMPLATE/release.md @@ -1,4 +1,4 @@ -* [ ] See if all tests, including integration, pass +* [ ] See if all tests, including downstream, pass * [ ] Get the release pull request approved by a [CODEOWNER](https://github.com/urllib3/urllib3/blob/main/.github/CODEOWNERS) * [ ] Squash merge the release pull request with message "`Release `" * [ ] Tag with X.Y.Z, push tag on urllib3/urllib3 (not on your fork, update `` accordingly) diff --git a/.github/workflows/changelog.yml b/.github/workflows/changelog.yml index a47ab4ffc7..105837dbfa 100644 --- a/.github/workflows/changelog.yml +++ b/.github/workflows/changelog.yml @@ -13,7 +13,7 @@ jobs: steps: - name: "Checkout repository" - uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac # v4.0.0 + uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b # v4.1.4 with: # `towncrier check` runs `git diff --name-only origin/main...`, which # needs a non-shallow clone. diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 2fa25a91ab..b1f3723b6d 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -1,6 +1,6 @@ name: CI -on: [push, pull_request] +on: [push, pull_request, workflow_dispatch] permissions: "read-all" @@ -15,10 +15,10 @@ jobs: steps: - name: "Checkout repository" - uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac # v4.0.0 + uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b # v4.1.4 - name: "Setup Python" - uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1 # v4.7.0 + uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d # v5.1.0 with: python-version: "3.x" cache: "pip" @@ -34,32 +34,49 @@ jobs: strategy: fail-fast: false matrix: - python-version: ["3.7", "3.8", "3.9", "3.10", "3.11", "3.12"] + python-version: ["3.8", "3.9", "3.10", "3.11", "3.12", "3.13"] os: - - macos-11 + - macos-12 - windows-latest - - ubuntu-20.04 # OpenSSL 1.1.1 - - ubuntu-22.04 # OpenSSL 3.0 + - ubuntu-22.04 nox-session: [''] include: - experimental: false - - python-version: "pypy-3.7" + # integration + # 3.8 and 3.9 have a known issue with large SSL requests that we work around: + # https://github.com/urllib3/urllib3/pull/3181#issuecomment-1794830698 + - python-version: "3.8" + os: ubuntu-latest + experimental: false + nox-session: test_integration + - python-version: "3.9" os: ubuntu-latest experimental: false - nox-session: test-pypy + nox-session: test_integration + - python-version: "3.12" + os: ubuntu-latest + experimental: false + nox-session: test_integration + # OpenSSL 1.1.1 + - python-version: "3.8" + os: ubuntu-20.04 + experimental: false + nox-session: test-3.8 + # pypy - python-version: "pypy-3.8" os: ubuntu-latest experimental: false - nox-session: test-pypy - - python-version: "pypy-3.9" + nox-session: test-pypy3.8 + - python-version: "pypy-3.9-v7.3.13" os: ubuntu-latest experimental: false - nox-session: test-pypy + nox-session: test-pypy3.9 - python-version: "pypy-3.10" os: ubuntu-latest experimental: false - nox-session: test-pypy + nox-session: test-pypy3.10 - python-version: "3.x" + # brotli os: ubuntu-latest experimental: false nox-session: test_brotlipy @@ -68,29 +85,28 @@ jobs: os: ubuntu-20.04 # CPython 3.9.2 is not available for ubuntu-22.04. experimental: false nox-session: test-3.9 - - python-version: "3.12" + - python-version: "3.11" + os: ubuntu-latest + nox-session: emscripten + experimental: true + - python-version: "3.13" experimental: true exclude: # Ubuntu 22.04 comes with OpenSSL 3.0, so only CPython 3.9+ is compatible with it # https://github.com/python/cpython/issues/83001 - - python-version: "3.7" - os: ubuntu-22.04 - python-version: "3.8" os: ubuntu-22.04 - # Testing with non-final CPython on macOS is too slow for CI. - - python-version: "3.12" - os: macos-11 runs-on: ${{ matrix.os }} - name: ${{ fromJson('{"macos-11":"macOS","windows-latest":"Windows","ubuntu-latest":"Ubuntu","ubuntu-20.04":"Ubuntu 20.04 (OpenSSL 1.1.1)","ubuntu-22.04":"Ubuntu 22.04 (OpenSSL 3.0)"}')[matrix.os] }} ${{ matrix.python-version }} ${{ matrix.nox-session}} + name: ${{ fromJson('{"macos-12":"macOS","windows-latest":"Windows","ubuntu-latest":"Ubuntu","ubuntu-20.04":"Ubuntu 20.04 (OpenSSL 1.1.1)","ubuntu-22.04":"Ubuntu 22.04 (OpenSSL 3.0)"}')[matrix.os] }} ${{ matrix.python-version }} ${{ matrix.nox-session}} continue-on-error: ${{ matrix.experimental }} timeout-minutes: 30 steps: - name: "Checkout repository" - uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac # v4.0.0 + uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b # v4.1.4 - name: "Setup Python ${{ matrix.python-version }}" - uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1 # v4.7.0 + uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d # v5.1.0 with: python-version: ${{ matrix.python-version }} allow-prereleases: true @@ -98,17 +114,23 @@ jobs: - name: "Install dependencies" run: python -m pip install --upgrade pip setuptools nox + - name: "Install Chrome" + uses: browser-actions/setup-chrome@db1b524c26f20a8d1a10f7fc385c92387e2d0477 # v1.7.1 + if: ${{ matrix.nox-session == 'emscripten' }} + - name: "Install Firefox" + uses: browser-actions/setup-firefox@233224b712fc07910ded8c15fb95a555c86da76f # v1.5.0 + if: ${{ matrix.nox-session == 'emscripten' }} - name: "Run tests" # If no explicit NOX_SESSION is set, run the default tests for the chosen Python version - run: nox -s ${NOX_SESSION:-test-$PYTHON_VERSION} --error-on-missing-interpreters + run: nox -s ${NOX_SESSION:-test-$PYTHON_VERSION} env: PYTHON_VERSION: ${{ matrix.python-version }} NOX_SESSION: ${{ matrix.nox-session }} - - name: "Upload artifact" - uses: actions/upload-artifact@0b7f8abb1508181956e8e162db84b466c27e18ce # v3.1.2 + - name: "Upload coverage data" + uses: actions/upload-artifact@65462800fd760344b1a7b4382951275a0abb4808 # v4.3.3 with: - name: coverage-data + name: coverage-data-${{ matrix.python-version }}-${{ matrix.os }}-${{ matrix.experimental }}-${{ matrix.nox-session }} path: ".coverage.*" if-no-files-found: error @@ -119,20 +141,21 @@ jobs: needs: test steps: - name: "Checkout repository" - uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac # v4.0.0 + uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b # v4.1.4 - name: "Setup Python" - uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1 # v4.7.0 + uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d # v5.1.0 with: python-version: "3.x" - name: "Install coverage" - run: "python -m pip install --upgrade coverage" + run: "python -m pip install -r dev-requirements.txt" - - name: "Download artifact" - uses: actions/download-artifact@9bc31d5ccc31df68ecc42ccf4149144866c47d8a # v3.0.2 + - name: "Download coverage data" + uses: actions/download-artifact@65a9edc5881444af0b9093a5e628f2fe47ea3b2e # v4.1.7 with: - name: coverage-data + pattern: coverage-data-* + merge-multiple: true - name: "Combine & check coverage" run: | @@ -142,7 +165,7 @@ jobs: - if: ${{ failure() }} name: "Upload report if check failed" - uses: actions/upload-artifact@0b7f8abb1508181956e8e162db84b466c27e18ce # v3.1.2 + uses: actions/upload-artifact@65462800fd760344b1a7b4382951275a0abb4808 # v4.3.3 with: name: coverage-report path: htmlcov diff --git a/.github/workflows/codeql.yml b/.github/workflows/codeql.yml index a25c01daeb..ad45723b99 100644 --- a/.github/workflows/codeql.yml +++ b/.github/workflows/codeql.yml @@ -7,11 +7,13 @@ on: branches: ["main"] schedule: - cron: "0 0 * * 5" + workflow_dispatch: permissions: "read-all" jobs: analyze: + if: github.repository_owner == 'urllib3' name: "Analyze" runs-on: "ubuntu-latest" permissions: @@ -20,16 +22,16 @@ jobs: security-events: write steps: - name: "Checkout repository" - uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac # v4.0.0 + uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b # v4.1.4 - name: "Run CodeQL init" - uses: github/codeql-action/init@cdcdbb579706841c47f7063dda365e292e5cad7a # v2.13.4 + uses: github/codeql-action/init@9fdb3e49720b44c48891d036bb502feb25684276 # v3.25.6 with: config-file: "./.github/codeql.yml" languages: "python" - name: "Run CodeQL autobuild" - uses: github/codeql-action/autobuild@cdcdbb579706841c47f7063dda365e292e5cad7a # v2.13.4 + uses: github/codeql-action/autobuild@9fdb3e49720b44c48891d036bb502feb25684276 # v3.25.6 - name: "Run CodeQL analyze" - uses: github/codeql-action/analyze@cdcdbb579706841c47f7063dda365e292e5cad7a # v2.13.4 + uses: github/codeql-action/analyze@9fdb3e49720b44c48891d036bb502feb25684276 # v3.25.6 diff --git a/.github/workflows/integration.yml b/.github/workflows/downstream.yml similarity index 68% rename from .github/workflows/integration.yml rename to .github/workflows/downstream.yml index 374d8efb1c..8f4206bb06 100644 --- a/.github/workflows/integration.yml +++ b/.github/workflows/downstream.yml @@ -1,11 +1,11 @@ name: Downstream -on: [push, pull_request] +on: [push, pull_request, workflow_dispatch] permissions: "read-all" jobs: - integration: + downstream: strategy: fail-fast: false matrix: @@ -15,10 +15,10 @@ jobs: steps: - name: "Checkout repository" - uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac # v4.0.0 + uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b # v4.1.4 - name: "Setup Python" - uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1 # v4.7.0 + uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d # v5.1.0 with: python-version: "3.x" diff --git a/.github/workflows/lint.yml b/.github/workflows/lint.yml index 8cf8c69207..d28ea81816 100644 --- a/.github/workflows/lint.yml +++ b/.github/workflows/lint.yml @@ -1,6 +1,6 @@ name: lint -on: [push, pull_request] +on: [push, pull_request, workflow_dispatch] permissions: "read-all" @@ -11,10 +11,10 @@ jobs: steps: - name: "Checkout repository" - uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac # v4.0.0 + uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b # v4.1.4 - name: "Setup Python" - uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1 # v4.7.0 + uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d # v5.1.0 with: python-version: "3.x" cache: pip diff --git a/.github/workflows/publish.yml b/.github/workflows/publish.yml index 4a516c281c..d020de0515 100644 --- a/.github/workflows/publish.yml +++ b/.github/workflows/publish.yml @@ -19,10 +19,10 @@ jobs: steps: - name: "Checkout repository" - uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac # v4.0.0 + uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b # v4.1.4 - name: "Setup Python" - uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1 # v4.7.0 + uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d # v5.1.0 with: python-version: "3.x" @@ -37,7 +37,7 @@ jobs: - name: "Generate hashes" id: hash run: | - cd dist && echo "::set-output name=hashes::$(sha256sum * | base64 -w0)" + cd dist && echo "hashes=$(sha256sum * | base64 -w0)" >> $GITHUB_OUTPUT - name: "Upload dists" uses: actions/upload-artifact@0b7f8abb1508181956e8e162db84b466c27e18ce # v3.1.2 @@ -53,7 +53,7 @@ jobs: actions: read contents: write id-token: write # Needed to access the workflow's OIDC identity. - uses: slsa-framework/slsa-github-generator/.github/workflows/generator_generic_slsa3.yml@v1.9.0 + uses: slsa-framework/slsa-github-generator/.github/workflows/generator_generic_slsa3.yml@v1.10.0 with: base64-subjects: "${{ needs.build.outputs.hashes }}" upload-assets: true diff --git a/.github/workflows/scorecards.yml b/.github/workflows/scorecards.yml index fd29d3e25e..2714f1056d 100644 --- a/.github/workflows/scorecards.yml +++ b/.github/workflows/scorecards.yml @@ -10,6 +10,7 @@ permissions: read-all jobs: analysis: + if: github.repository_owner == 'urllib3' name: "Scorecard" runs-on: "ubuntu-latest" permissions: @@ -20,12 +21,12 @@ jobs: steps: - name: "Checkout repository" - uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac # v4.0.0 + uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b # v4.1.4 with: persist-credentials: false - name: "Run Scorecard" - uses: ossf/scorecard-action@08b4669551908b1024bb425080c797723083c031 # v2.2.0 + uses: ossf/scorecard-action@483ef80eb98fb506c348f7d62e28055e49fe2398 # v2.3.0 with: results_file: results.sarif results_format: sarif diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index dc05ec6820..7cb61dedcd 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -3,13 +3,13 @@ repos: rev: v3.3.1 hooks: - id: pyupgrade - args: ["--py37-plus"] + args: ["--py38-plus"] - repo: https://github.com/psf/black rev: 23.1.0 hooks: - id: black - args: ["--target-version", "py37"] + args: ["--target-version", "py38"] - repo: https://github.com/PyCQA/isort rev: 5.12.0 @@ -17,7 +17,18 @@ repos: - id: isort - repo: https://github.com/PyCQA/flake8 - rev: 6.0.0 + rev: 6.1.0 hooks: - id: flake8 additional_dependencies: [flake8-2020] + + - repo: https://github.com/pre-commit/mirrors-prettier + rev: "v3.1.0" + hooks: + - id: prettier + types_or: [javascript] + - repo: https://github.com/pre-commit/mirrors-eslint + rev: v8.53.0 + hooks: + - id: eslint + args: ["--fix"] \ No newline at end of file diff --git a/.readthedocs.yml b/.readthedocs.yml index 78bd2064f6..7c59df5b3a 100644 --- a/.readthedocs.yml +++ b/.readthedocs.yml @@ -3,7 +3,7 @@ version: 2 build: os: ubuntu-22.04 tools: - python: "3" + python: "3.11" python: install: diff --git a/CHANGES.rst b/CHANGES.rst index 27038fef47..0f4e5cc581 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -1,8 +1,61 @@ +2.2.2 (2024-06-17) +================== + +- Added the ``Proxy-Authorization`` header to the list of headers to strip from requests when redirecting to a different host. As before, different headers can be set via ``Retry.remove_headers_on_redirect``. +- Allowed passing negative integers as ``amt`` to read methods of ``http.client.HTTPResponse`` as an alternative to ``None``. (`#3122 `__) +- Fixed return types representing copying actions to use ``typing.Self``. (`#3363 `__) + +2.2.1 (2024-02-16) +================== + +- Fixed issue where ``InsecureRequestWarning`` was emitted for HTTPS connections when using Emscripten. (`#3331 `__) +- Fixed ``HTTPConnectionPool.urlopen`` to stop automatically casting non-proxy headers to ``HTTPHeaderDict``. This change was premature as it did not apply to proxy headers and ``HTTPHeaderDict`` does not handle byte header values correctly yet. (`#3343 `__) +- Changed ``InvalidChunkLength`` to ``ProtocolError`` when response terminates before the chunk length is sent. (`#2860 `__) +- Changed ``ProtocolError`` to be more verbose on incomplete reads with excess content. (`#3261 `__) + + +2.2.0 (2024-01-30) +================== + +- Added support for `Emscripten and Pyodide `__, including streaming support in cross-origin isolated browser environments where threading is enabled. (`#2951 `__) +- Added support for ``HTTPResponse.read1()`` method. (`#3186 `__) +- Added rudimentary support for HTTP/2. (`#3284 `__) +- Fixed issue where requests against urls with trailing dots were failing due to SSL errors + when using proxy. (`#2244 `__) +- Fixed ``HTTPConnection.proxy_is_verified`` and ``HTTPSConnection.proxy_is_verified`` + to be always set to a boolean after connecting to a proxy. It could be + ``None`` in some cases previously. (`#3130 `__) +- Fixed an issue where ``headers`` passed in a request with ``json=`` would be mutated (`#3203 `__) +- Fixed ``HTTPSConnection.is_verified`` to be set to ``False`` when connecting + from a HTTPS proxy to an HTTP target. It was set to ``True`` previously. (`#3267 `__) +- Fixed handling of new error message from OpenSSL 3.2.0 when configuring an HTTP proxy as HTTPS (`#3268 `__) +- Fixed TLS 1.3 post-handshake auth when the server certificate validation is disabled (`#3325 `__) +- Note for downstream distributors: To run integration tests, you now need to run the tests a second + time with the ``--integration`` pytest flag. (`#3181 `__) + + +2.1.0 (2023-11-13) +================== + +- Removed support for the deprecated urllib3[secure] extra. (`#2680 `__) +- Removed support for the deprecated SecureTransport TLS implementation. (`#2681 `__) +- Removed support for the end-of-life Python 3.7. (`#3143 `__) +- Allowed loading CA certificates from memory for proxies. (`#3065 `__) +- Fixed decoding Gzip-encoded responses which specified ``x-gzip`` content-encoding. (`#3174 `__) + + +2.0.7 (2023-10-17) +================== + +* Made body stripped from HTTP requests changing the request method to GET after HTTP 303 "See Other" redirect responses. + + 2.0.6 (2023-10-02) ================== * Added the ``Cookie`` header to the list of headers to strip from requests when redirecting to a different host. As before, different headers can be set via ``Retry.remove_headers_on_redirect``. + 2.0.5 (2023-09-20) ================== @@ -167,6 +220,11 @@ Fixed * Fixed a socket leak if ``HTTPConnection.connect()`` fails (`#2571 `__). * Fixed ``urllib3.contrib.pyopenssl.WrappedSocket`` and ``urllib3.contrib.securetransport.WrappedSocket`` close methods (`#2970 `__) +1.26.18 (2023-10-17) +==================== + +* Made body stripped from HTTP requests changing the request method to GET after HTTP 303 "See Other" redirect responses. + 1.26.17 (2023-10-02) ==================== diff --git a/README.md b/README.md index 27df7a1aa5..1d94fceb71 100644 --- a/README.md +++ b/README.md @@ -87,6 +87,7 @@ Tidelift will coordinate the fix and disclosure with maintainers. - [@sethmlarson](https://github.com/sethmlarson) (Seth M. Larson) - [@pquentin](https://github.com/pquentin) (Quentin Pradet) +- [@illia-v](https://github.com/illia-v) (Illia Volochii) - [@theacodes](https://github.com/theacodes) (Thea Flowers) - [@haikuginger](https://github.com/haikuginger) (Jess Shapiro) - [@lukasa](https://github.com/lukasa) (Cory Benfield) diff --git a/dev-requirements.txt b/dev-requirements.txt index 70f5097338..f712fe2a9e 100644 --- a/dev-requirements.txt +++ b/dev-requirements.txt @@ -1,13 +1,26 @@ -coverage==7.0.4 -tornado==6.2 +h2==4.1.0 +coverage==7.4.1 PySocks==1.7.1 -pytest==7.4.0 +pytest==8.0.2 pytest-timeout==2.1.0 -trustme==0.9.0 -# We have to install at most cryptography 39.0.2 for PyPy<7.3.10 -# versions of Python 3.7, 3.8, and 3.9. -cryptography==39.0.2;implementation_name=="pypy" and implementation_version<"7.3.10" -cryptography==41.0.4;implementation_name!="pypy" or implementation_version>="7.3.10" +pyOpenSSL==24.0.0 +idna==3.7 +# As of v1.1.0, child CA certificates generated by trustme fail +# verification by CPython 3.13. +# https://github.com/python-trio/trustme/pull/642 +trustme @ git+https://github.com/python-trio/trustme@b3a767f336e20600f30c9ff78385a58352ff6ee3 +cryptography==42.0.4 backports.zoneinfo==0.2.1;python_version<"3.9" towncrier==23.6.0 -pytest-memray==1.4.0;python_version>="3.8" and python_version<"3.12" and sys_platform!="win32" and implementation_name=="cpython" +pytest-memray==1.5.0;python_version<"3.13" and sys_platform!="win32" and implementation_name=="cpython" +trio==0.25.0 +Quart==0.19.4 +quart-trio==0.11.1 +# https://github.com/pgjones/hypercorn/issues/62 +# https://github.com/pgjones/hypercorn/issues/168 +# https://github.com/pgjones/hypercorn/issues/169 +hypercorn @ git+https://github.com/urllib3/hypercorn@urllib3-changes +httpx==0.25.2 +pytest-socket==0.7.0 +cffi==1.17.0rc1 + diff --git a/docs/conf.py b/docs/conf.py index ba67335880..0ab1b3a245 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -84,8 +84,8 @@ html_theme_options = { "announcement": """ - Support urllib3 on GitHub Sponsors + href=\"https://opencollective.com/urllib3/updates/urllib3-is-fundraising-for-http-2-support\"> + urllib3 is fundraising for HTTP/2 support! """, "sidebar_hide_name": True, @@ -110,7 +110,6 @@ ("py:class", "_HttplibHTTPResponse"), ("py:class", "_HttplibHTTPMessage"), ("py:class", "TracebackType"), - ("py:class", "Literal"), ("py:class", "email.errors.MessageDefect"), ("py:class", "MessageDefect"), ("py:class", "http.client.HTTPMessage"), diff --git a/docs/contributing.rst b/docs/contributing.rst index a75e5f6901..4bc27a7e7f 100644 --- a/docs/contributing.rst +++ b/docs/contributing.rst @@ -13,8 +13,8 @@ If you wish to add a new feature or fix a bug: to start making your changes. #. Write a test which shows that the bug was fixed or that the feature works as expected. -#. Format your changes with black using command `$ nox -rs format` and lint your - changes using command `nox -rs lint`. +#. Format your changes with black using command ``nox -rs format`` and lint your + changes using command ``nox -rs lint``. #. Add a `changelog entry `__. #. Send a pull request and bug the maintainer until it gets merged and published. @@ -36,18 +36,21 @@ We use some external dependencies, multiple interpreters and code coverage analysis while running test suite. Our ``noxfile.py`` handles much of this for you:: - $ nox --reuse-existing-virtualenvs --sessions test-3.7 test-3.9 + $ nox --reuse-existing-virtualenvs --sessions test-3.12 test-pypy3.10 [ Nox will create virtualenv if needed, install the specified dependencies, and run the commands in order.] - nox > Running session test-3.7 - ....... - ....... - nox > Session test-3.7 was successful. - ....... - ....... - nox > Running session test-3.9 - ....... - ....... - nox > Session test-3.9 was successful. + + +Note that for nox to test different interpreters, the interpreters must be on the +``PATH`` first. Check with ``which`` to see if the interpreter is on the ``PATH`` +like so:: + + + $ which python3.12 + ~/.pyenv/versions/3.12.1/bin/python3.12 + + $ which pypy3.10 + ~/.pyenv/versions/pypy3.10-7.3.13/bin/pypy3.10 + There is also a nox command for running all of our tests and multiple python versions.:: @@ -63,10 +66,12 @@ suite:: [ Nox will create virtualenv if needed, install the specified dependencies, and run the commands in order.] ....... ....... - nox > Session test-3.7 was successful. nox > Session test-3.8 was successful. nox > Session test-3.9 was successful. nox > Session test-3.10 was successful. + nox > Session test-3.11 was successful. + nox > Session test-3.12 was successful. + nox > Session test-3.13 was successful. nox > Session test-pypy was successful. Our test suite `runs continuously on GitHub Actions diff --git a/docs/reference/contrib/emscripten.rst b/docs/reference/contrib/emscripten.rst new file mode 100644 index 0000000000..9e85629f69 --- /dev/null +++ b/docs/reference/contrib/emscripten.rst @@ -0,0 +1,87 @@ +Pyodide, Emscripten, and PyScript +================================= + +From the Pyodide documentation, `Pyodide `_ is a Python distribution for the browser and Node.js based on WebAssembly and `Emscripten `_. +This technology also underpins the `PyScript framework `_ and `Jupyterlite `_, so should work in those environments too. + +Starting in version 2.2.0 urllib3 supports being used in a Pyodide runtime utilizing +the `JavaScript fetch API `_ +or falling back on `XMLHttpRequest `_ +if the fetch API isn't available (such as when cross-origin isolation +isn't active). This means you can use Python libraries to make HTTP requests from your browser! + +Because urllib3's Emscripten support is API-compatible, this means that +libraries that depend on urllib3 may now be usable from Emscripten and Pyodide environments, too. + + .. warning:: + + **Support for Emscripten and Pyodide is experimental**. Report all bugs to the `urllib3 issue tracker `_. + Currently only supports browsers, does not yet support running in Node.js. + +It's recommended to `run Pyodide in a Web Worker `_ +in order to take full advantage of features like the fetch API which enables streaming of HTTP response bodies. + +Getting started +--------------- + +Using urllib3 with Pyodide means you need to `get started with Pyodide first `_. +The Pyodide project provides a `useful online REPL `_ to try in your browser without +any setup or installation to test out the code examples below. + +urllib3's Emscripten support is automatically enabled if ``sys.platform`` is ``"emscripten"``, so no setup is required beyond installation and importing the module. + +You can install urllib3 in a Pyodide environment using micropip. +Try using the following code in a Pyodide console or `` + + + + + + + +
+ + + diff --git a/test/contrib/emscripten/test_emscripten.py b/test/contrib/emscripten/test_emscripten.py new file mode 100644 index 0000000000..17264d8c50 --- /dev/null +++ b/test/contrib/emscripten/test_emscripten.py @@ -0,0 +1,974 @@ +from __future__ import annotations + +import sys +import typing + +import pytest + +from urllib3.fields import _TYPE_FIELD_VALUE_TUPLE + +from ...port_helpers import find_unused_port + +if sys.version_info < (3, 11): + # pyodide only works on 3.11+ + pytest.skip(allow_module_level=True) + +# only run these tests if pytest_pyodide is installed +# so we don't break non-emscripten pytest running +pytest_pyodide = pytest.importorskip("pytest_pyodide") + +from pytest_pyodide import run_in_pyodide # type: ignore[import-not-found] # noqa: E402 +from pytest_pyodide.decorator import ( # type: ignore[import-not-found] # noqa: E402 + copy_files_to_pyodide, +) + +from .conftest import PyodideServerInfo, ServerRunnerInfo # noqa: E402 + +# make our ssl certificates work in chrome +pytest_pyodide.runner.CHROME_FLAGS.append("ignore-certificate-errors") + + +# copy our wheel file to pyodide and install it +def install_urllib3_wheel() -> ( + typing.Callable[ + [typing.Callable[..., typing.Any]], typing.Callable[..., typing.Any] + ] +): + return copy_files_to_pyodide( # type: ignore[no-any-return] + file_list=[("dist/*.whl", "/tmp")], install_wheels=True + ) + + +@install_urllib3_wheel() +def test_index( + selenium_coverage: typing.Any, testserver_http: PyodideServerInfo +) -> None: + @run_in_pyodide # type: ignore[misc] + def pyodide_test(selenium_coverage, host: str, port: int) -> None: # type: ignore[no-untyped-def] + from urllib3.connection import HTTPConnection + from urllib3.response import BaseHTTPResponse + + conn = HTTPConnection(host, port) + url = f"http://{host}:{port}/" + conn.request("GET", url) + response = conn.getresponse() + # check methods of response + assert isinstance(response, BaseHTTPResponse) + assert response.url == url + response.url = "http://woo" + assert response.url == "http://woo" + assert response.connection == conn + assert response.retries is None + data1 = response.data + decoded1 = data1.decode("utf-8") + data2 = response.data # check that getting data twice works + decoded2 = data2.decode("utf-8") + assert decoded1 == decoded2 == "Dummy server!" + + pyodide_test( + selenium_coverage, testserver_http.http_host, testserver_http.http_port + ) + + +@install_urllib3_wheel() +def test_pool_requests( + selenium_coverage: typing.Any, testserver_http: PyodideServerInfo +) -> None: + @run_in_pyodide # type: ignore[misc] + def pyodide_test(selenium_coverage, host: str, port: int, https_port: int) -> None: # type: ignore[no-untyped-def] + # first with PoolManager + import urllib3 + + http = urllib3.PoolManager() + resp = http.request("GET", f"http://{host}:{port}/") + assert resp.data.decode("utf-8") == "Dummy server!" + + resp2 = http.request("GET", f"http://{host}:{port}/index") + assert resp2.data.decode("utf-8") == "Dummy server!" + + # should all have come from one pool + assert len(http.pools) == 1 + + resp3 = http.request("GET", f"https://{host}:{https_port}/") + assert resp2.data.decode("utf-8") == "Dummy server!" + + # one http pool + one https pool + assert len(http.pools) == 2 + + # now with ConnectionPool + # because block == True, this will fail if the connection isn't + # returned to the pool correctly after the first request + pool = urllib3.HTTPConnectionPool(host, port, maxsize=1, block=True) + resp3 = pool.urlopen("GET", "/index") + assert resp3.data.decode("utf-8") == "Dummy server!" + + resp4 = pool.urlopen("GET", "/") + assert resp4.data.decode("utf-8") == "Dummy server!" + + # now with manual release of connection + # first - connection should be released once all + # data is read + pool2 = urllib3.HTTPConnectionPool(host, port, maxsize=1, block=True) + + resp5 = pool2.urlopen("GET", "/index", preload_content=False) + assert pool2.pool is not None + # at this point, the connection should not be in the pool + assert pool2.pool.qsize() == 0 + assert resp5.data.decode("utf-8") == "Dummy server!" + # now we've read all the data, connection should be back to the pool + assert pool2.pool.qsize() == 1 + resp6 = pool2.urlopen("GET", "/index", preload_content=False) + assert pool2.pool.qsize() == 0 + # force it back to the pool + resp6.release_conn() + assert pool2.pool.qsize() == 1 + read_str = resp6.read() + # for consistency with urllib3, this still returns the correct data even though + # we are in theory not using the connection any more + assert read_str.decode("utf-8") == "Dummy server!" + + pyodide_test( + selenium_coverage, + testserver_http.http_host, + testserver_http.http_port, + testserver_http.https_port, + ) + + +# wrong protocol / protocol error etc. should raise an exception of http.client.HTTPException +@install_urllib3_wheel() +def test_wrong_protocol( + selenium_coverage: typing.Any, testserver_http: PyodideServerInfo +) -> None: + @run_in_pyodide # type: ignore[misc] + def pyodide_test(selenium_coverage, host: str, port: int) -> None: # type: ignore[no-untyped-def] + import http.client + + import pytest + + from urllib3.connection import HTTPConnection + + conn = HTTPConnection(host, port) + with pytest.raises(http.client.HTTPException): + conn.request("GET", f"http://{host}:{port}/") + + pyodide_test( + selenium_coverage, testserver_http.http_host, testserver_http.https_port + ) + + +# wrong protocol / protocol error etc. should raise an exception of http.client.HTTPException +@install_urllib3_wheel() +def test_bad_method( + selenium_coverage: typing.Any, testserver_http: PyodideServerInfo +) -> None: + @run_in_pyodide(packages=("pytest",)) # type: ignore[misc] + def pyodide_test(selenium_coverage, host: str, port: int) -> None: # type: ignore[no-untyped-def] + import http.client + + import pytest + + from urllib3.connection import HTTPConnection + + conn = HTTPConnection(host, port) + with pytest.raises(http.client.HTTPException): + conn.request("TRACE", f"http://{host}:{port}/") + + pyodide_test( + selenium_coverage, testserver_http.http_host, testserver_http.https_port + ) + + +# no connection - should raise +@install_urllib3_wheel() +def test_no_response( + selenium_coverage: typing.Any, testserver_http: PyodideServerInfo +) -> None: + @run_in_pyodide(packages=("pytest",)) # type: ignore[misc] + def pyodide_test(selenium_coverage, host: str, port: int) -> None: # type: ignore[no-untyped-def] + import http.client + + import pytest + + from urllib3.connection import HTTPConnection + + conn = HTTPConnection(host, port) + with pytest.raises(http.client.HTTPException): + conn.request("GET", f"http://{host}:{port}/") + _ = conn.getresponse() + + pyodide_test(selenium_coverage, testserver_http.http_host, find_unused_port()) + + +@install_urllib3_wheel() +def test_404(selenium_coverage: typing.Any, testserver_http: PyodideServerInfo) -> None: + @run_in_pyodide # type: ignore[misc] + def pyodide_test(selenium_coverage, host: str, port: int) -> None: # type: ignore[no-untyped-def] + from urllib3.connection import HTTPConnection + from urllib3.response import BaseHTTPResponse + + conn = HTTPConnection(host, port) + conn.request("GET", f"http://{host}:{port}/status?status=404 NOT FOUND") + response = conn.getresponse() + assert isinstance(response, BaseHTTPResponse) + assert response.status == 404 + + pyodide_test( + selenium_coverage, testserver_http.http_host, testserver_http.http_port + ) + + +# setting timeout should show a warning to js console +# if we're on the ui thread, because XMLHttpRequest doesn't +# support timeout in async mode if globalThis == Window +@install_urllib3_wheel() +def test_timeout_warning( + selenium_coverage: typing.Any, testserver_http: PyodideServerInfo +) -> None: + @run_in_pyodide() # type: ignore[misc] + def pyodide_test(selenium_coverage, host: str, port: int) -> None: # type: ignore[no-untyped-def] + import js # type: ignore[import-not-found] + + import urllib3.contrib.emscripten.fetch + from urllib3.connection import HTTPConnection + + old_log = js.console.warn + log_msgs = [] + + def capture_log(*args): # type: ignore[no-untyped-def] + log_msgs.append(str(args)) + old_log(*args) + + js.console.warn = capture_log + + conn = HTTPConnection(host, port, timeout=1.0) + conn.request("GET", f"http://{host}:{port}/") + conn.getresponse() + js.console.warn = old_log + # should have shown timeout warning exactly once by now + assert len([x for x in log_msgs if x.find("Warning: Timeout") != -1]) == 1 + assert urllib3.contrib.emscripten.fetch._SHOWN_TIMEOUT_WARNING + + pyodide_test( + selenium_coverage, testserver_http.http_host, testserver_http.http_port + ) + + +@install_urllib3_wheel() +def test_timeout_in_worker_non_streaming( + selenium_coverage: typing.Any, + testserver_http: PyodideServerInfo, + run_from_server: ServerRunnerInfo, +) -> None: + worker_code = f""" + import pyodide_js as pjs + await pjs.loadPackage('http://{testserver_http.http_host}:{testserver_http.http_port}/wheel/dist.whl',deps=False) + from urllib3.exceptions import TimeoutError + from urllib3.connection import HTTPConnection + conn = HTTPConnection("{testserver_http.http_host}", {testserver_http.http_port},timeout=1.0) + result=-1 + try: + conn.request("GET","/slow") + _response = conn.getresponse() + result=-3 + except TimeoutError as e: + result=1 # we've got the correct exception + except BaseException as e: + result=-2 + assert result == 1 +""" + run_from_server.run_webworker(worker_code) + + +@install_urllib3_wheel() +def test_timeout_in_worker_streaming( + selenium_coverage: typing.Any, + testserver_http: PyodideServerInfo, + run_from_server: ServerRunnerInfo, +) -> None: + worker_code = f""" + import pyodide_js as pjs + await pjs.loadPackage('http://{testserver_http.http_host}:{testserver_http.http_port}/wheel/dist.whl',deps=False) + import urllib3.contrib.emscripten.fetch + await urllib3.contrib.emscripten.fetch.wait_for_streaming_ready() + from urllib3.exceptions import TimeoutError + from urllib3.connection import HTTPConnection + conn = HTTPConnection("{testserver_http.http_host}", {testserver_http.http_port},timeout=1.0) + result=-1 + try: + conn.request("GET","/slow",preload_content=False) + _response = conn.getresponse() + result=-3 + except TimeoutError as e: + result=1 # we've got the correct exception + except BaseException as e: + result=-2 + assert result == 1 +""" + run_from_server.run_webworker(worker_code) + + +@install_urllib3_wheel() +def test_index_https( + selenium_coverage: typing.Any, testserver_http: PyodideServerInfo +) -> None: + @run_in_pyodide # type: ignore[misc] + def pyodide_test(selenium_coverage, host: str, port: int) -> None: # type: ignore[no-untyped-def] + from urllib3.connection import HTTPSConnection + from urllib3.response import BaseHTTPResponse + + conn = HTTPSConnection(host, port) + conn.request("GET", f"https://{host}:{port}/") + response = conn.getresponse() + assert isinstance(response, BaseHTTPResponse) + data = response.data + assert data.decode("utf-8") == "Dummy server!" + + pyodide_test( + selenium_coverage, testserver_http.http_host, testserver_http.https_port + ) + + +@install_urllib3_wheel() +def test_non_streaming_no_fallback_warning( + selenium_coverage: typing.Any, testserver_http: PyodideServerInfo +) -> None: + @run_in_pyodide # type: ignore[misc] + def pyodide_test(selenium_coverage, host: str, port: int) -> None: # type: ignore[no-untyped-def] + import js + + import urllib3.contrib.emscripten.fetch + from urllib3.connection import HTTPSConnection + from urllib3.response import BaseHTTPResponse + + log_msgs = [] + old_log = js.console.warn + + def capture_log(*args): # type: ignore[no-untyped-def] + log_msgs.append(str(args)) + old_log(*args) + + js.console.warn = capture_log + conn = HTTPSConnection(host, port) + conn.request("GET", f"https://{host}:{port}/", preload_content=True) + response = conn.getresponse() + js.console.warn = old_log + assert isinstance(response, BaseHTTPResponse) + data = response.data + assert data.decode("utf-8") == "Dummy server!" + # no console warnings because we didn't ask it to stream the response + # check no log messages + assert ( + len([x for x in log_msgs if x.find("Can't stream HTTP requests") != -1]) + == 0 + ) + assert not urllib3.contrib.emscripten.fetch._SHOWN_STREAMING_WARNING + + pyodide_test( + selenium_coverage, testserver_http.http_host, testserver_http.https_port + ) + + +@install_urllib3_wheel() +def test_streaming_fallback_warning( + selenium_coverage: typing.Any, testserver_http: PyodideServerInfo +) -> None: + @run_in_pyodide # type: ignore[misc] + def pyodide_test(selenium_coverage, host: str, port: int) -> None: # type: ignore[no-untyped-def] + import js + + import urllib3.contrib.emscripten.fetch + from urllib3.connection import HTTPSConnection + from urllib3.response import BaseHTTPResponse + + # monkeypatch is_cross_origin_isolated so that it warns about that + # even if we're serving it so it is fine + urllib3.contrib.emscripten.fetch.is_cross_origin_isolated = lambda: False + + log_msgs = [] + old_log = js.console.warn + + def capture_log(*args): # type: ignore[no-untyped-def] + log_msgs.append(str(args)) + old_log(*args) + + js.console.warn = capture_log + + conn = HTTPSConnection(host, port) + conn.request("GET", f"https://{host}:{port}/", preload_content=False) + response = conn.getresponse() + js.console.warn = old_log + assert isinstance(response, BaseHTTPResponse) + data = response.data + assert data.decode("utf-8") == "Dummy server!" + # check that it has warned about falling back to non-streaming fetch exactly once + assert ( + len([x for x in log_msgs if x.find("Can't stream HTTP requests") != -1]) + == 1 + ) + assert urllib3.contrib.emscripten.fetch._SHOWN_STREAMING_WARNING + + pyodide_test( + selenium_coverage, testserver_http.http_host, testserver_http.https_port + ) + + +@install_urllib3_wheel() +def test_specific_method( + selenium_coverage: typing.Any, + testserver_http: PyodideServerInfo, + run_from_server: ServerRunnerInfo, +) -> None: + @run_in_pyodide # type: ignore[misc] + def pyodide_test(selenium_coverage, host: str, port: int) -> None: # type: ignore[no-untyped-def] + from urllib3 import HTTPSConnectionPool + + with HTTPSConnectionPool(host, port) as pool: + path = "/specific_method?method=POST" + response = pool.request("POST", path) + assert response.status == 200 + + response = pool.request("PUT", path) + assert response.status == 400 + + pyodide_test( + selenium_coverage, testserver_http.http_host, testserver_http.https_port + ) + + +@install_urllib3_wheel() +def test_streaming_download( + selenium_coverage: typing.Any, + testserver_http: PyodideServerInfo, + run_from_server: ServerRunnerInfo, +) -> None: + # test streaming download, which must be in a webworker + # as you can't do it on main thread + + # this should return the 17mb big file, and + # should not log any warning about falling back + bigfile_url = ( + f"http://{testserver_http.http_host}:{testserver_http.http_port}/bigfile" + ) + worker_code = f""" + import pyodide_js as pjs + await pjs.loadPackage('http://{testserver_http.http_host}:{testserver_http.http_port}/wheel/dist.whl',deps=False) + + import urllib3.contrib.emscripten.fetch + await urllib3.contrib.emscripten.fetch.wait_for_streaming_ready() + from urllib3.response import BaseHTTPResponse + from urllib3.connection import HTTPConnection + + conn = HTTPConnection("{testserver_http.http_host}", {testserver_http.http_port}) + conn.request("GET", "{bigfile_url}",preload_content=False) + response = conn.getresponse() + assert isinstance(response, BaseHTTPResponse) + assert urllib3.contrib.emscripten.fetch._SHOWN_STREAMING_WARNING==False + data=response.data.decode('utf-8') + assert len(data) == 17825792 +""" + run_from_server.run_webworker(worker_code) + + +@install_urllib3_wheel() +def test_streaming_close( + selenium_coverage: typing.Any, + testserver_http: PyodideServerInfo, + run_from_server: ServerRunnerInfo, +) -> None: + # test streaming download, which must be in a webworker + # as you can't do it on main thread + + # this should return the 17mb big file, and + # should not log any warning about falling back + url = f"http://{testserver_http.http_host}:{testserver_http.http_port}/" + worker_code = f""" + import pyodide_js as pjs + await pjs.loadPackage('http://{testserver_http.http_host}:{testserver_http.http_port}/wheel/dist.whl',deps=False) + + import urllib3.contrib.emscripten.fetch + await urllib3.contrib.emscripten.fetch.wait_for_streaming_ready() + from urllib3.response import BaseHTTPResponse + from urllib3.connection import HTTPConnection + from io import RawIOBase + + conn = HTTPConnection("{testserver_http.http_host}", {testserver_http.http_port}) + conn.request("GET", "{url}",preload_content=False) + response = conn.getresponse() + # check body is a RawIOBase stream and isn't seekable, writeable + body_internal = response._response.body + assert(isinstance(body_internal,RawIOBase)) + assert(body_internal.writable() is False) + assert(body_internal.seekable() is False) + assert(body_internal.readable() is True) + response.drain_conn() + x=response.read() + assert(not x) + response.close() + conn.close() + # try and make destructor be covered + # by killing everything + del response + del body_internal + del conn +""" + run_from_server.run_webworker(worker_code) + + +@install_urllib3_wheel() +def test_streaming_bad_url( + selenium_coverage: typing.Any, + testserver_http: PyodideServerInfo, + run_from_server: ServerRunnerInfo, +) -> None: + # this should cause an error + # because the protocol is bad + bad_url = f"hsffsdfttp://{testserver_http.http_host}:{testserver_http.http_port}/" + # this must be in a webworker + # as you can't do it on main thread + worker_code = f""" + import pytest + import pyodide_js as pjs + await pjs.loadPackage('http://{testserver_http.http_host}:{testserver_http.http_port}/wheel/dist.whl',deps=False) + import http.client + import urllib3.contrib.emscripten.fetch + await urllib3.contrib.emscripten.fetch.wait_for_streaming_ready() + from urllib3.response import BaseHTTPResponse + from urllib3.connection import HTTPConnection + + conn = HTTPConnection("{testserver_http.http_host}", {testserver_http.http_port}) + with pytest.raises(http.client.HTTPException): + conn.request("GET", "{bad_url}",preload_content=False) +""" + run_from_server.run_webworker(worker_code) + + +@install_urllib3_wheel() +def test_streaming_bad_method( + selenium_coverage: typing.Any, + testserver_http: PyodideServerInfo, + run_from_server: ServerRunnerInfo, +) -> None: + # this should cause an error + # because the protocol is bad + bad_url = f"http://{testserver_http.http_host}:{testserver_http.http_port}/" + # this must be in a webworker + # as you can't do it on main thread + worker_code = f""" + import pytest + import http.client + import pyodide_js as pjs + await pjs.loadPackage('http://{testserver_http.http_host}:{testserver_http.http_port}/wheel/dist.whl',deps=False) + + import urllib3.contrib.emscripten.fetch + await urllib3.contrib.emscripten.fetch.wait_for_streaming_ready() + from urllib3.response import BaseHTTPResponse + from urllib3.connection import HTTPConnection + + conn = HTTPConnection("{testserver_http.http_host}", {testserver_http.http_port}) + with pytest.raises(http.client.HTTPException): + # TRACE method should throw SecurityError in Javascript + conn.request("TRACE", "{bad_url}",preload_content=False) +""" + run_from_server.run_webworker(worker_code) + + +@install_urllib3_wheel() +def test_streaming_notready_warning( + selenium_coverage: typing.Any, + testserver_http: PyodideServerInfo, + run_from_server: ServerRunnerInfo, +) -> None: + # test streaming download but don't wait for + # worker to be ready - should fallback to non-streaming + # and log a warning + file_url = f"http://{testserver_http.http_host}:{testserver_http.http_port}/" + worker_code = f""" + import pyodide_js as pjs + await pjs.loadPackage('http://{testserver_http.http_host}:{testserver_http.http_port}/wheel/dist.whl',deps=False) + import js + import urllib3 + from urllib3.response import BaseHTTPResponse + from urllib3.connection import HTTPConnection + + log_msgs=[] + old_log=js.console.warn + def capture_log(*args): + log_msgs.append(str(args)) + old_log(*args) + js.console.warn=capture_log + + conn = HTTPConnection("{testserver_http.http_host}", {testserver_http.http_port}) + conn.request("GET", "{file_url}",preload_content=False) + js.console.warn=old_log + response = conn.getresponse() + assert isinstance(response, BaseHTTPResponse) + data=response.data.decode('utf-8') + assert len([x for x in log_msgs if x.find("Can't stream HTTP requests")!=-1])==1 + assert urllib3.contrib.emscripten.fetch._SHOWN_STREAMING_WARNING==True + """ + run_from_server.run_webworker(worker_code) + + +@install_urllib3_wheel() +def test_post_receive_json( + selenium_coverage: typing.Any, testserver_http: PyodideServerInfo +) -> None: + @run_in_pyodide # type: ignore[misc] + def pyodide_test(selenium_coverage, host: str, port: int) -> None: # type: ignore[no-untyped-def] + import json + + from urllib3.connection import HTTPConnection + from urllib3.response import BaseHTTPResponse + + json_data = { + "Bears": "like", + "to": {"eat": "buns", "with": ["marmalade", "and custard"]}, + } + conn = HTTPConnection(host, port) + conn.request( + "POST", + f"http://{host}:{port}/echo_json", + body=json.dumps(json_data).encode("utf-8"), + headers={"Content-type": "application/json"}, + ) + response = conn.getresponse() + assert isinstance(response, BaseHTTPResponse) + data = response.json() + assert data == json_data + + pyodide_test( + selenium_coverage, testserver_http.http_host, testserver_http.http_port + ) + + +@install_urllib3_wheel() +def test_upload( + selenium_coverage: typing.Any, testserver_http: PyodideServerInfo +) -> None: + @run_in_pyodide # type: ignore[misc] + def pyodide_test(selenium_coverage, host: str, port: int) -> None: # type: ignore[no-untyped-def] + from urllib3 import HTTPConnectionPool + + data = "I'm in ur multipart form-data, hazing a cheezburgr" + fields: dict[str, _TYPE_FIELD_VALUE_TUPLE] = { + "upload_param": "filefield", + "upload_filename": "lolcat.txt", + "filefield": ("lolcat.txt", data), + } + fields["upload_size"] = str(len(data)) + with HTTPConnectionPool(host, port) as pool: + r = pool.request("POST", "/upload", fields=fields) + assert r.status == 200 + + pyodide_test( + selenium_coverage, testserver_http.http_host, testserver_http.http_port + ) + + +@install_urllib3_wheel() +def test_streaming_not_ready_in_browser( + selenium_coverage: typing.Any, testserver_http: PyodideServerInfo +) -> None: + # streaming ready should always be false + # if we're in the main browser thread + selenium_coverage.run_async( + """ + import urllib3.contrib.emscripten.fetch + result=await urllib3.contrib.emscripten.fetch.wait_for_streaming_ready() + assert(result is False) + assert(urllib3.contrib.emscripten.fetch.streaming_ready() is None ) + """ + ) + + +@install_urllib3_wheel() +def test_requests_with_micropip( + selenium_coverage: typing.Any, testserver_http: PyodideServerInfo +) -> None: + # this can't be @run_in_pyodide because of the async code + selenium_coverage.run_async( + f""" + import micropip + await micropip.install("requests") + import requests + import json + r = requests.get("http://{testserver_http.http_host}:{testserver_http.http_port}/") + assert(r.status_code == 200) + assert(r.text == "Dummy server!") + json_data={{"woo":"yay"}} + # try posting some json with requests + r = requests.post("http://{testserver_http.http_host}:{testserver_http.http_port}/echo_json",json=json_data) + import js + assert(r.json() == json_data) + """ + ) + + +@install_urllib3_wheel() +def test_open_close( + selenium_coverage: typing.Any, testserver_http: PyodideServerInfo +) -> None: + @run_in_pyodide # type: ignore[misc] + def pyodide_test(selenium_coverage, host: str, port: int) -> None: # type: ignore[no-untyped-def] + from http.client import ResponseNotReady + + import pytest + + from urllib3.connection import HTTPConnection + + conn = HTTPConnection(host, port) + # initially connection should be closed + assert conn.is_closed is True + # connection should have no response + with pytest.raises(ResponseNotReady): + response = conn.getresponse() + # now make the response + conn.request("GET", f"http://{host}:{port}/") + # we never connect to proxy (or if we do, browser handles it) + assert conn.has_connected_to_proxy is False + # now connection should be open + assert conn.is_closed is False + # and should have a response + response = conn.getresponse() + assert response is not None + conn.close() + # now it is closed + assert conn.is_closed is True + # closed connection shouldn't have any response + with pytest.raises(ResponseNotReady): + conn.getresponse() + + pyodide_test( + selenium_coverage, testserver_http.http_host, testserver_http.http_port + ) + + +# check that various ways that the worker may be broken +# throw exceptions nicely, by deliberately breaking things +# this is for coverage +@install_urllib3_wheel() +def test_break_worker_streaming( + selenium_coverage: typing.Any, + testserver_http: PyodideServerInfo, + run_from_server: ServerRunnerInfo, +) -> None: + worker_code = f""" + import pyodide_js as pjs + await pjs.loadPackage('http://{testserver_http.http_host}:{testserver_http.http_port}/wheel/dist.whl',deps=False) + import pytest + import urllib3.contrib.emscripten.fetch + import js + import http.client + + await urllib3.contrib.emscripten.fetch.wait_for_streaming_ready() + from urllib3.exceptions import TimeoutError + from urllib3.connection import HTTPConnection + conn = HTTPConnection("{testserver_http.http_host}", {testserver_http.http_port},timeout=1.0) + # make the fetch worker return a bad response by: + # 1) Clearing the int buffer + # in the receive stream + with pytest.raises(http.client.HTTPException): + conn.request("GET","/",preload_content=False) + response = conn.getresponse() + body_internal = response._response.body + assert(body_internal.int_buffer!=None) + body_internal.int_buffer=None + data=response.read() + # 2) Monkeypatch postMessage so that it just sets an + # exception status + old_pm= body_internal.worker.postMessage + with pytest.raises(http.client.HTTPException): + conn.request("GET","/",preload_content=False) + response = conn.getresponse() + # make posted messages set an exception + body_internal = response._response.body + def set_exception(*args): + body_internal.worker.postMessage = old_pm + body_internal.int_buffer[1]=4 + body_internal.byte_buffer[0]=ord("W") + body_internal.byte_buffer[1]=ord("O") + body_internal.byte_buffer[2]=ord("O") + body_internal.byte_buffer[3]=ord("!") + body_internal.byte_buffer[4]=0 + js.Atomics.store(body_internal.int_buffer, 0, -4) + js.Atomics.notify(body_internal.int_buffer,0) + body_internal.worker.postMessage = set_exception + data=response.read() + # monkeypatch so it returns an unknown value for the magic number on initial fetch call + with pytest.raises(http.client.HTTPException): + # make posted messages set an exception + worker=urllib3.contrib.emscripten.fetch._fetcher.js_worker + def set_exception(self,*args): + array=js.Int32Array.new(args[0].buffer) + array[0]=-1234 + worker.postMessage=set_exception.__get__(worker,worker.__class__) + conn.request("GET","/",preload_content=False) + response = conn.getresponse() + data=response.read() + urllib3.contrib.emscripten.fetch._fetcher.js_worker.postMessage=old_pm + # 3) Stopping the worker receiving any messages which should cause a timeout error + # in the receive stream + with pytest.raises(TimeoutError): + conn.request("GET","/",preload_content=False) + response = conn.getresponse() + # make posted messages not be send + body_internal = response._response.body + def ignore_message(*args): + pass + old_pm= body_internal.worker.postMessage + body_internal.worker.postMessage = ignore_message + data=response.read() + body_internal.worker.postMessage = old_pm + +""" + run_from_server.run_webworker(worker_code) + + +@install_urllib3_wheel() +def test_response_init_length( + selenium_coverage: typing.Any, testserver_http: PyodideServerInfo +) -> None: + @run_in_pyodide # type: ignore[misc] + def pyodide_test(selenium_coverage, host: str, port: int) -> None: # type: ignore[no-untyped-def] + import pytest + + import urllib3.exceptions + from urllib3.connection import HTTPConnection + from urllib3.response import BaseHTTPResponse + + conn = HTTPConnection(host, port) + conn.request("GET", f"http://{host}:{port}/") + response = conn.getresponse() + assert isinstance(response, BaseHTTPResponse) + # head shouldn't have length + length = response._init_length("HEAD") + assert length == 0 + # multiple inconsistent lengths - should raise invalid header + with pytest.raises(urllib3.exceptions.InvalidHeader): + response.headers["Content-Length"] = "4,5,6" + length = response._init_length("GET") + # non-numeric length - should return None + response.headers["Content-Length"] = "anna" + length = response._init_length("GET") + assert length is None + # numeric length - should return it + response.headers["Content-Length"] = "54" + length = response._init_length("GET") + assert length == 54 + # negative length - should return None + response.headers["Content-Length"] = "-12" + length = response._init_length("GET") + assert length is None + # none -> None + del response.headers["Content-Length"] + length = response._init_length("GET") + assert length is None + + pyodide_test( + selenium_coverage, testserver_http.http_host, testserver_http.http_port + ) + + +@install_urllib3_wheel() +def test_response_close_connection( + selenium_coverage: typing.Any, testserver_http: PyodideServerInfo +) -> None: + @run_in_pyodide # type: ignore[misc] + def pyodide_test(selenium_coverage, host: str, port: int) -> None: # type: ignore[no-untyped-def] + from urllib3.connection import HTTPConnection + from urllib3.response import BaseHTTPResponse + + conn = HTTPConnection(host, port) + conn.request("GET", f"http://{host}:{port}/") + response = conn.getresponse() + assert isinstance(response, BaseHTTPResponse) + response.close() + assert conn.is_closed + + pyodide_test( + selenium_coverage, testserver_http.http_host, testserver_http.http_port + ) + + +@install_urllib3_wheel() +def test_read_chunked( + selenium_coverage: typing.Any, testserver_http: PyodideServerInfo +) -> None: + @run_in_pyodide # type: ignore[misc] + def pyodide_test(selenium_coverage, host: str, port: int) -> None: # type: ignore[no-untyped-def] + from urllib3.connection import HTTPConnection + + conn = HTTPConnection(host, port) + conn.request("GET", f"http://{host}:{port}/mediumfile", preload_content=False) + response = conn.getresponse() + count = 0 + for x in response.read_chunked(512): + count += 1 + if count < 10: + assert len(x) == 512 + + pyodide_test( + selenium_coverage, testserver_http.http_host, testserver_http.http_port + ) + + +@install_urllib3_wheel() +def test_retries( + selenium_coverage: typing.Any, testserver_http: PyodideServerInfo +) -> None: + @run_in_pyodide # type: ignore[misc] + def pyodide_test(selenium_coverage, host: str, port: int) -> None: # type: ignore[no-untyped-def] + import pytest + + import urllib3 + + pool = urllib3.HTTPConnectionPool( + host, + port, + maxsize=1, + block=True, + retries=urllib3.util.Retry(connect=5, read=5, redirect=5), + ) + + # monkeypatch connection class to count calls + old_request = urllib3.connection.HTTPConnection.request + count = 0 + + def count_calls(self, *args, **argv): # type: ignore[no-untyped-def] + nonlocal count + count += 1 + return old_request(self, *args, **argv) + + urllib3.connection.HTTPConnection.request = count_calls # type: ignore[method-assign] + with pytest.raises(urllib3.exceptions.MaxRetryError): + pool.urlopen("GET", "/") + # this should fail, but should have tried 6 times total + assert count == 6 + + pyodide_test(selenium_coverage, testserver_http.http_host, find_unused_port()) + + +@install_urllib3_wheel() +def test_insecure_requests_warning( + selenium_coverage: typing.Any, testserver_http: PyodideServerInfo +) -> None: + @run_in_pyodide # type: ignore[misc] + def pyodide_test(selenium_coverage, host: str, port: int, https_port: int) -> None: # type: ignore[no-untyped-def] + import warnings + + import urllib3 + import urllib3.exceptions + + http = urllib3.PoolManager() + + with warnings.catch_warnings(record=True) as w: + http.request("GET", f"https://{host}:{https_port}") + assert len(w) == 0 + + pyodide_test( + selenium_coverage, + testserver_http.http_host, + testserver_http.http_port, + testserver_http.https_port, + ) diff --git a/test/contrib/test_pyopenssl.py b/test/contrib/test_pyopenssl.py index b0231295a5..eaca77ba6f 100644 --- a/test/contrib/test_pyopenssl.py +++ b/test/contrib/test_pyopenssl.py @@ -7,7 +7,10 @@ try: from cryptography import x509 - from OpenSSL.crypto import FILETYPE_PEM, load_certificate # type: ignore[import] + from OpenSSL.crypto import ( # type: ignore[import-untyped] + FILETYPE_PEM, + load_certificate, + ) from urllib3.contrib.pyopenssl import _dnsname_to_stdlib, get_subj_alt_name except ImportError: @@ -35,7 +38,6 @@ def teardown_module() -> None: from ..test_ssl import TestSSL # noqa: E402, F401 from ..test_util import TestUtilSSL # noqa: E402, F401 from ..with_dummyserver.test_https import ( # noqa: E402, F401 - TestHTTPS, TestHTTPS_IPV4SAN, TestHTTPS_IPV6SAN, TestHTTPS_TLSv1, diff --git a/test/contrib/test_securetransport.py b/test/contrib/test_securetransport.py deleted file mode 100644 index ac41fe5caa..0000000000 --- a/test/contrib/test_securetransport.py +++ /dev/null @@ -1,68 +0,0 @@ -from __future__ import annotations - -import base64 -import contextlib -import socket -import ssl - -import pytest - -try: - from urllib3.contrib.securetransport import WrappedSocket -except ImportError: - pass - - -def setup_module() -> None: - try: - from urllib3.contrib.securetransport import inject_into_urllib3 - - inject_into_urllib3() - except ImportError as e: - pytest.skip(f"Could not import SecureTransport: {repr(e)}") - - -def teardown_module() -> None: - try: - from urllib3.contrib.securetransport import extract_from_urllib3 - - extract_from_urllib3() - except ImportError: - pass - - -from ..test_util import TestUtilSSL # noqa: E402, F401 - -# SecureTransport does not support TLSv1.3 -# https://github.com/urllib3/urllib3/issues/1674 -from ..with_dummyserver.test_https import ( # noqa: E402, F401 - TestHTTPS, - TestHTTPS_TLSv1, - TestHTTPS_TLSv1_1, - TestHTTPS_TLSv1_2, -) -from ..with_dummyserver.test_socketlevel import ( # noqa: E402, F401 - TestClientCerts, - TestSNI, - TestSocketClosing, - TestSSL, -) - - -def test_no_crash_with_empty_trust_bundle() -> None: - with contextlib.closing(socket.socket()) as s: - ws = WrappedSocket(s) - with pytest.raises(ssl.SSLError): - ws._custom_validate(True, b"") - - -def test_no_crash_with_invalid_trust_bundle() -> None: - invalid_cert = base64.b64encode(b"invalid-cert") - cert_bundle = ( - b"-----BEGIN CERTIFICATE-----\n" + invalid_cert + b"\n-----END CERTIFICATE-----" - ) - - with contextlib.closing(socket.socket()) as s: - ws = WrappedSocket(s) - with pytest.raises(ssl.SSLError): - ws._custom_validate(True, cert_bundle) diff --git a/test/contrib/test_socks.py b/test/contrib/test_socks.py index 2878cc8d8b..aea90f0321 100644 --- a/test/contrib/test_socks.py +++ b/test/contrib/test_socks.py @@ -9,9 +9,9 @@ from unittest.mock import Mock, patch import pytest -import socks as py_socks # type: ignore[import] +import socks as py_socks # type: ignore[import-not-found] -from dummyserver.server import DEFAULT_CA, DEFAULT_CERTS +from dummyserver.socketserver import DEFAULT_CA, DEFAULT_CERTS from dummyserver.testcase import IPV4SocketDummyServerTestCase from urllib3.contrib import socks from urllib3.exceptions import ConnectTimeoutError, NewConnectionError diff --git a/test/test_collections.py b/test/test_collections.py index 8d0c1ce26f..ae896e2689 100644 --- a/test/test_collections.py +++ b/test/test_collections.py @@ -282,7 +282,7 @@ def test_header_repeat(self, d: HTTPHeaderDict) -> None: ] assert list(d.items()) == expected_results - # make sure the values persist over copys + # make sure the values persist over copies assert list(d.copy().items()) == expected_results other_dict = HTTPHeaderDict() @@ -385,7 +385,7 @@ def test_dict_conversion(self, d: HTTPHeaderDict) -> None: hdict = { "Content-Length": "0", "Content-type": "text/plain", - "Server": "TornadoServer/1.2.3", + "Server": "Hypercorn/1.2.3", } h = dict(HTTPHeaderDict(hdict).items()) assert hdict == h diff --git a/test/test_connection.py b/test/test_connection.py index f49497bafe..a4bd8731fc 100644 --- a/test/test_connection.py +++ b/test/test_connection.py @@ -19,6 +19,7 @@ ) from urllib3.exceptions import HTTPError, ProxyError, SSLError from urllib3.util import ssl_ +from urllib3.util.request import SKIP_HEADER from urllib3.util.ssl_match_hostname import ( CertificateError as ImplementationCertificateError, ) @@ -265,3 +266,60 @@ def test_assert_hostname_closes_socket(self) -> None: conn.connect() context.wrap_socket.return_value.close.assert_called_once_with() + + @pytest.mark.parametrize( + "accept_encoding", + [ + "Accept-Encoding", + "accept-encoding", + b"Accept-Encoding", + b"accept-encoding", + None, + ], + ) + @pytest.mark.parametrize("host", ["Host", "host", b"Host", b"host", None]) + @pytest.mark.parametrize( + "user_agent", ["User-Agent", "user-agent", b"User-Agent", b"user-agent", None] + ) + @pytest.mark.parametrize("chunked", [True, False]) + def test_skip_header( + self, + accept_encoding: str | None, + host: str | None, + user_agent: str | None, + chunked: bool, + ) -> None: + headers = {} + if accept_encoding is not None: + headers[accept_encoding] = SKIP_HEADER + if host is not None: + headers[host] = SKIP_HEADER + if user_agent is not None: + headers[user_agent] = SKIP_HEADER + + # When dropping support for Python 3.9, this can be rewritten to parenthesized + # context managers + with mock.patch("urllib3.util.connection.create_connection"): + with mock.patch( + "urllib3.connection._HTTPConnection.putheader" + ) as http_client_putheader: + conn = HTTPConnection("") + conn.request("GET", "/headers", headers=headers, chunked=chunked) + + request_headers = {} + for call in http_client_putheader.call_args_list: + header, value = call.args + request_headers[header] = value + + if accept_encoding is None: + assert "Accept-Encoding" in request_headers + else: + assert accept_encoding not in request_headers + if host is None: + assert "Host" in request_headers + else: + assert host not in request_headers + if user_agent is None: + assert "User-Agent" in request_headers + else: + assert user_agent not in request_headers diff --git a/test/test_connectionpool.py b/test/test_connectionpool.py index d81d33f7bd..176fed4ae4 100644 --- a/test/test_connectionpool.py +++ b/test/test_connectionpool.py @@ -12,7 +12,7 @@ import pytest -from dummyserver.server import DEFAULT_CA +from dummyserver.socketserver import DEFAULT_CA from urllib3 import Retry from urllib3.connection import HTTPConnection from urllib3.connectionpool import ( diff --git a/test/test_poolmanager.py b/test/test_poolmanager.py index 821e218b18..ab5f20309b 100644 --- a/test/test_poolmanager.py +++ b/test/test_poolmanager.py @@ -259,11 +259,15 @@ def test_http_connection_from_context_case_insensitive(self) -> None: assert pool is other_pool assert all(isinstance(key, PoolKey) for key in p.pools.keys()) - def test_deprecated_no_scheme(self) -> None: + @patch("urllib3.poolmanager.PoolManager.connection_from_host") + def test_deprecated_no_scheme(self, connection_from_host: mock.MagicMock) -> None: + # Don't actually make a network connection, just verify the DeprecationWarning + connection_from_host.side_effect = ConnectionError("Not attempting connection") p = PoolManager() with pytest.warns(DeprecationWarning) as records: - p.request(method="GET", url="evil.com://good.com") + with pytest.raises(ConnectionError): + p.request(method="GET", url="evil.com://good.com") msg = ( "URLs without a scheme (ie 'https://') are deprecated and will raise an error " diff --git a/test/test_response.py b/test/test_response.py index c6d9d1528a..c0062771ec 100644 --- a/test/test_response.py +++ b/test/test_response.py @@ -4,7 +4,6 @@ import http.client as httplib import socket import ssl -import sys import typing import zlib from base64 import b64decode @@ -31,7 +30,6 @@ BytesQueueBuffer, HTTPResponse, brotli, - zstd, ) from urllib3.util.response import is_fp_closed from urllib3.util.retry import RequestHistory, Retry @@ -74,18 +72,48 @@ def test_multiple_chunks(self) -> None: assert buffer.get(4) == b"rbaz" assert len(buffer) == 0 - @pytest.mark.skipif( - sys.version_info < (3, 8), reason="pytest-memray requires Python 3.8+" + def test_get_all_empty(self) -> None: + q = BytesQueueBuffer() + assert q.get_all() == b"" + assert len(q) == 0 + + def test_get_all_single(self) -> None: + q = BytesQueueBuffer() + q.put(b"a") + assert q.get_all() == b"a" + assert len(q) == 0 + + def test_get_all_many(self) -> None: + q = BytesQueueBuffer() + q.put(b"a") + q.put(b"b") + q.put(b"c") + assert q.get_all() == b"abc" + assert len(q) == 0 + + @pytest.mark.parametrize( + "get_func", + (lambda b: b.get(len(b)), lambda b: b.get_all()), + ids=("get", "get_all"), ) @pytest.mark.limit_memory("12.5 MB") # assert that we're not doubling memory usage - def test_memory_usage(self) -> None: + def test_memory_usage( + self, get_func: typing.Callable[[BytesQueueBuffer], str] + ) -> None: # Allocate 10 1MiB chunks buffer = BytesQueueBuffer() for i in range(10): # This allocates 2MiB, putting the max at around 12MiB. Not sure why. buffer.put(bytes(2**20)) - assert len(buffer.get(10 * 2**20)) == 10 * 2**20 + assert len(get_func(buffer)) == 10 * 2**20 + + @pytest.mark.limit_memory("10.01 MB") + def test_get_all_memory_usage_single_chunk(self) -> None: + buffer = BytesQueueBuffer() + chunk = bytes(10 * 2**20) # 10 MiB + buffer.put(chunk) + assert buffer.get_all() is chunk # A known random (i.e, not-too-compressible) payload generated with: @@ -189,6 +217,53 @@ def test_reference_read(self) -> None: assert r.read() == b"" assert r.read() == b"" + @pytest.mark.parametrize("read_args", ((), (None,), (-1,))) + def test_reference_read_until_eof(self, read_args: tuple[typing.Any, ...]) -> None: + fp = BytesIO(b"foo") + r = HTTPResponse(fp, preload_content=False) + assert r.read(*read_args) == b"foo" + + def test_reference_read1(self) -> None: + fp = BytesIO(b"foobar") + r = HTTPResponse(fp, preload_content=False) + + assert r.read1(0) == b"" + assert r.read1(1) == b"f" + assert r.read1(2) == b"oo" + assert r.read1() == b"bar" + assert r.read1() == b"" + + @pytest.mark.parametrize("read1_args", ((), (None,), (-1,))) + def test_reference_read1_without_limit( + self, read1_args: tuple[typing.Any, ...] + ) -> None: + fp = BytesIO(b"foo") + r = HTTPResponse(fp, preload_content=False) + assert r.read1(*read1_args) == b"foo" + + def test_reference_read1_nodecode(self) -> None: + fp = BytesIO(b"foobar") + r = HTTPResponse(fp, preload_content=False, decode_content=False) + + assert r.read1(0) == b"" + assert r.read1(1) == b"f" + assert r.read1(2) == b"oo" + assert r.read1() == b"bar" + assert r.read1() == b"" + + def test_decoding_read1(self) -> None: + data = zlib.compress(b"foobar") + + fp = BytesIO(data) + r = HTTPResponse( + fp, headers={"content-encoding": "deflate"}, preload_content=False + ) + + assert r.read1(1) == b"f" + assert r.read1(2) == b"oo" + assert r.read1() == b"bar" + assert r.read1() == b"" + def test_decode_deflate(self) -> None: data = zlib.compress(b"foo") @@ -233,14 +308,15 @@ def test_chunked_decoding_deflate2(self) -> None: assert r.read() == b"" assert r.read() == b"" - def test_chunked_decoding_gzip(self) -> None: + @pytest.mark.parametrize("content_encoding", ["gzip", "x-gzip"]) + def test_chunked_decoding_gzip(self, content_encoding: str) -> None: compress = zlib.compressobj(6, zlib.DEFLATED, 16 + zlib.MAX_WBITS) data = compress.compress(b"foo") data += compress.flush() fp = BytesIO(data) r = HTTPResponse( - fp, headers={"content-encoding": "gzip"}, preload_content=False + fp, headers={"content-encoding": content_encoding}, preload_content=False ) assert r.read(1) == b"f" @@ -326,6 +402,8 @@ def test_decode_brotli_error(self) -> None: @onlyZstd() def test_decode_zstd(self) -> None: + import zstandard as zstd + data = zstd.compress(b"foo") fp = BytesIO(data) @@ -334,6 +412,8 @@ def test_decode_zstd(self) -> None: @onlyZstd() def test_decode_multiframe_zstd(self) -> None: + import zstandard as zstd + data = ( # Zstandard frame zstd.compress(b"foo") @@ -353,6 +433,8 @@ def test_decode_multiframe_zstd(self) -> None: @onlyZstd() def test_chunked_decoding_zstd(self) -> None: + import zstandard as zstd + data = zstd.compress(b"foobarbaz") fp = BytesIO(data) @@ -368,8 +450,13 @@ def test_chunked_decoding_zstd(self) -> None: break assert ret == b"foobarbaz" + decode_param_set = [ + b"foo", + b"x" * 100, + ] + @onlyZstd() - @pytest.mark.parametrize("data", [b"foo", b"x" * 100]) + @pytest.mark.parametrize("data", decode_param_set) def test_decode_zstd_error(self, data: bytes) -> None: fp = BytesIO(data) @@ -377,14 +464,74 @@ def test_decode_zstd_error(self, data: bytes) -> None: HTTPResponse(fp, headers={"content-encoding": "zstd"}) @onlyZstd() - @pytest.mark.parametrize("data", [b"foo", b"x" * 100]) - def test_decode_zstd_incomplete(self, data: bytes) -> None: + @pytest.mark.parametrize("data", decode_param_set) + def test_decode_zstd_incomplete_preload_content(self, data: bytes) -> None: + import zstandard as zstd + data = zstd.compress(data) fp = BytesIO(data[:-1]) with pytest.raises(DecodeError): HTTPResponse(fp, headers={"content-encoding": "zstd"}) + @onlyZstd() + @pytest.mark.parametrize("data", decode_param_set) + def test_decode_zstd_incomplete_read(self, data: bytes) -> None: + import zstandard as zstd + + data = zstd.compress(data) + fp = BytesIO(data[:-1]) # shorten the data to trigger DecodeError + + # create response object without(!) reading/decoding the content + r = HTTPResponse( + fp, headers={"content-encoding": "zstd"}, preload_content=False + ) + + # read/decode, expecting DecodeError + with pytest.raises(DecodeError): + r.read(decode_content=True) + + @onlyZstd() + @pytest.mark.parametrize("data", decode_param_set) + def test_decode_zstd_incomplete_read1(self, data: bytes) -> None: + import zstandard as zstd + + data = zstd.compress(data) + fp = BytesIO(data[:-1]) + + r = HTTPResponse( + fp, headers={"content-encoding": "zstd"}, preload_content=False + ) + + # read/decode via read1(!), expecting DecodeError + with pytest.raises(DecodeError): + amt_decoded = 0 + # loop, as read1() may return just partial data + while amt_decoded < len(data): + part = r.read1(decode_content=True) + amt_decoded += len(part) + + @onlyZstd() + @pytest.mark.parametrize("data", decode_param_set) + def test_decode_zstd_read1(self, data: bytes) -> None: + import zstandard as zstd + + encoded_data = zstd.compress(data) + fp = BytesIO(encoded_data) + + r = HTTPResponse( + fp, headers={"content-encoding": "zstd"}, preload_content=False + ) + + amt_decoded = 0 + decoded_data = b"" + # loop, as read1() may return just partial data + while amt_decoded < len(data): + part = r.read1(decode_content=True) + amt_decoded += len(part) + decoded_data += part + assert decoded_data == data + def test_multi_decoding_deflate_deflate(self) -> None: data = zlib.compress(zlib.compress(b"foo")) @@ -439,10 +586,12 @@ def test_body_blob(self) -> None: assert resp.data == b"foo" assert resp.closed + @pytest.mark.filterwarnings("ignore::pytest.PytestUnraisableExceptionWarning") def test_base_io(self) -> None: resp = BaseHTTPResponse( status=200, version=11, + version_string="HTTP/1.1", reason=None, decode_content=False, request_url=None, @@ -493,7 +642,7 @@ def test_io(self, sock: socket.socket) -> None: with pytest.raises(IOError): resp3.fileno() - def test_io_closed_consistently(self, sock: socket.socket) -> None: + def test_io_closed_consistently_by_read(self, sock: socket.socket) -> None: try: hlr = httplib.HTTPResponse(sock) hlr.fp = BytesIO(b"foo") # type: ignore[assignment] @@ -513,6 +662,99 @@ def test_io_closed_consistently(self, sock: socket.socket) -> None: finally: hlr.close() + @pytest.mark.parametrize("read_amt", (None, 3)) + @pytest.mark.parametrize("length_known", (True, False)) + def test_io_closed_consistently_by_read1( + self, sock: socket.socket, length_known: bool, read_amt: int | None + ) -> None: + with httplib.HTTPResponse(sock) as hlr: + hlr.fp = BytesIO(b"foo") # type: ignore[assignment] + hlr.chunked = 0 # type: ignore[assignment] + hlr.length = 3 if length_known else None + with HTTPResponse(hlr, preload_content=False) as resp: + if length_known: + resp.length_remaining = 3 + assert not resp.closed + assert resp._fp is not None + assert not resp._fp.isclosed() + assert not is_fp_closed(resp._fp) + assert not resp.isclosed() + resp.read1(read_amt) + # If content length is unknown, IO is not closed until + # the next read returning zero bytes. + if not length_known: + assert not resp.closed + assert resp._fp is not None + assert not resp._fp.isclosed() + assert not is_fp_closed(resp._fp) + assert not resp.isclosed() + resp.read1(read_amt) + assert resp.closed + assert resp._fp.isclosed() + assert is_fp_closed(resp._fp) + assert resp.isclosed() + + @pytest.mark.parametrize("length_known", (True, False)) + def test_io_not_closed_until_all_data_is_read( + self, sock: socket.socket, length_known: bool + ) -> None: + with httplib.HTTPResponse(sock) as hlr: + hlr.fp = BytesIO(b"foo") # type: ignore[assignment] + hlr.chunked = 0 # type: ignore[assignment] + length_remaining = 3 + hlr.length = length_remaining if length_known else None + with HTTPResponse(hlr, preload_content=False) as resp: + if length_known: + resp.length_remaining = length_remaining + while length_remaining: + assert not resp.closed + assert resp._fp is not None + assert not resp._fp.isclosed() + assert not is_fp_closed(resp._fp) + assert not resp.isclosed() + data = resp.read(1) + assert len(data) == 1 + length_remaining -= 1 + # If content length is unknown, IO is not closed until + # the next read returning zero bytes. + if not length_known: + assert not resp.closed + assert resp._fp is not None + assert not resp._fp.isclosed() + assert not is_fp_closed(resp._fp) + assert not resp.isclosed() + data = resp.read(1) + assert len(data) == 0 + assert resp.closed + assert resp._fp.isclosed() # type: ignore[union-attr] + assert is_fp_closed(resp._fp) + assert resp.isclosed() + + @pytest.mark.parametrize("length_known", (True, False)) + def test_io_not_closed_after_requesting_0_bytes( + self, sock: socket.socket, length_known: bool + ) -> None: + with httplib.HTTPResponse(sock) as hlr: + hlr.fp = BytesIO(b"foo") # type: ignore[assignment] + hlr.chunked = 0 # type: ignore[assignment] + length_remaining = 3 + hlr.length = length_remaining if length_known else None + with HTTPResponse(hlr, preload_content=False) as resp: + if length_known: + resp.length_remaining = length_remaining + assert not resp.closed + assert resp._fp is not None + assert not resp._fp.isclosed() + assert not is_fp_closed(resp._fp) + assert not resp.isclosed() + data = resp.read(0) + assert data == b"" + assert not resp.closed + assert resp._fp is not None + assert not resp._fp.isclosed() + assert not is_fp_closed(resp._fp) + assert not resp.isclosed() + def test_io_bufferedreader(self) -> None: fp = BytesIO(b"foo") resp = HTTPResponse(fp, preload_content=False) @@ -627,6 +869,35 @@ def test_read_with_illegal_mix_decode_toggle(self) -> None: ): resp.read(decode_content=False) + def test_read1_with_illegal_mix_decode_toggle(self) -> None: + data = zlib.compress(b"foo") + + fp = BytesIO(data) + + resp = HTTPResponse( + fp, headers={"content-encoding": "deflate"}, preload_content=False + ) + + assert resp.read1(1) == b"f" + + with pytest.raises( + RuntimeError, + match=( + r"Calling read1\(decode_content=False\) is not supported after " + r"read1\(decode_content=True\) was called" + ), + ): + resp.read1(1, decode_content=False) + + with pytest.raises( + RuntimeError, + match=( + r"Calling read1\(decode_content=False\) is not supported after " + r"read1\(decode_content=True\) was called" + ), + ): + resp.read1(decode_content=False) + def test_read_with_mix_decode_toggle(self) -> None: data = zlib.compress(b"foo") @@ -729,6 +1000,9 @@ def read(self, _: int) -> bytes: # type: ignore[override] return self.payloads.pop(0) return b"" + def read1(self, amt: int) -> bytes: # type: ignore[override] + return self.read(amt) + uncompressed_data = zlib.decompress(ZLIB_PAYLOAD) payload_part_size = len(ZLIB_PAYLOAD) // NUMBER_OF_READS @@ -751,7 +1025,7 @@ def read(self, _: int) -> bytes: # type: ignore[override] assert uncompressed_data == payload # Check that the positions in the stream are correct - # It is difficult to determine programatically what the positions + # It is difficult to determine programmatically what the positions # returned by `tell` will be because the `HTTPResponse.read` method may # call socket `read` a couple of times if it doesn't have enough data # in the buffer or not call socket `read` at all if it has enough. All @@ -812,12 +1086,18 @@ def test_empty_stream(self) -> None: next(stream) @pytest.mark.parametrize( - "preload_content, amt", - [(True, None), (False, None), (False, 10 * 2**20)], + "preload_content, amt, read_meth", + [ + (True, None, "read"), + (False, None, "read"), + (False, 10 * 2**20, "read"), + (False, None, "read1"), + (False, 10 * 2**20, "read1"), + ], ) @pytest.mark.limit_memory("25 MB") def test_buffer_memory_usage_decode_one_chunk( - self, preload_content: bool, amt: int + self, preload_content: bool, amt: int, read_meth: str ) -> None: content_length = 10 * 2**20 # 10 MiB fp = BytesIO(zlib.compress(bytes(content_length))) @@ -826,21 +1106,27 @@ def test_buffer_memory_usage_decode_one_chunk( preload_content=preload_content, headers={"content-encoding": "deflate"}, ) - data = resp.data if preload_content else resp.read(amt) + data = resp.data if preload_content else getattr(resp, read_meth)(amt) assert len(data) == content_length @pytest.mark.parametrize( - "preload_content, amt", - [(True, None), (False, None), (False, 10 * 2**20)], + "preload_content, amt, read_meth", + [ + (True, None, "read"), + (False, None, "read"), + (False, 10 * 2**20, "read"), + (False, None, "read1"), + (False, 10 * 2**20, "read1"), + ], ) @pytest.mark.limit_memory("10.5 MB") def test_buffer_memory_usage_no_decoding( - self, preload_content: bool, amt: int + self, preload_content: bool, amt: int, read_meth: str ) -> None: content_length = 10 * 2**20 # 10 MiB fp = BytesIO(bytes(content_length)) resp = HTTPResponse(fp, preload_content=preload_content, decode_content=False) - data = resp.data if preload_content else resp.read(amt) + data = resp.data if preload_content else getattr(resp, read_meth)(amt) assert len(data) == content_length def test_length_no_header(self) -> None: @@ -925,6 +1211,9 @@ def read(self, amt: int) -> bytes: return data + def read1(self, amt: int) -> bytes: + return self.read(1) + def close(self) -> None: self.fp = None @@ -988,7 +1277,10 @@ def test_mock_transfer_encoding_chunked_custom_read(self) -> None: response = list(resp.read_chunked(2)) assert expected_response == response - def test_mock_transfer_encoding_chunked_unlmtd_read(self) -> None: + @pytest.mark.parametrize("read_chunked_args", ((), (None,), (-1,))) + def test_mock_transfer_encoding_chunked_unlmtd_read( + self, read_chunked_args: tuple[typing.Any, ...] + ) -> None: stream = [b"foooo", b"bbbbaaaaar"] fp = MockChunkedEncodingResponse(stream) r = httplib.HTTPResponse(MockSock) # type: ignore[arg-type] @@ -998,7 +1290,7 @@ def test_mock_transfer_encoding_chunked_unlmtd_read(self) -> None: resp = HTTPResponse( r, preload_content=False, headers={"transfer-encoding": "chunked"} ) - assert stream == list(resp.read_chunked()) + assert stream == list(resp.read_chunked(*read_chunked_args)) def test_read_not_chunked_response_as_chunks(self) -> None: fp = BytesIO(b"foo") @@ -1032,7 +1324,7 @@ def test_buggy_incomplete_read(self) -> None: orig_ex = ctx.value.args[1] assert isinstance(orig_ex, IncompleteRead) - assert orig_ex.partial == 0 # type: ignore[comparison-overlap] + assert orig_ex.partial == 0 assert orig_ex.expected == content_length def test_incomplete_chunk(self) -> None: @@ -1073,6 +1365,21 @@ def test_invalid_chunk_length(self) -> None: assert isinstance(orig_ex, InvalidChunkLength) assert orig_ex.length == fp.BAD_LENGTH_LINE.encode() + def test_truncated_before_chunk(self) -> None: + stream = [b"foooo", b"bbbbaaaaar"] + fp = MockChunkedNoChunks(stream) + r = httplib.HTTPResponse(MockSock) # type: ignore[arg-type] + r.fp = fp # type: ignore[assignment] + r.chunked = True + r.chunk_left = None + resp = HTTPResponse( + r, preload_content=False, headers={"transfer-encoding": "chunked"} + ) + with pytest.raises(ProtocolError) as ctx: + next(resp.read_chunked()) + + assert str(ctx.value) == "Response ended prematurely" + def test_chunked_response_without_crlf_on_end(self) -> None: stream = [b"foo", b"bar", b"baz"] fp = MockChunkedEncodingWithoutCRLFOnEnd(stream) @@ -1228,6 +1535,27 @@ def make_bad_mac_fp() -> typing.Generator[BytesIO, None, None]: resp.read() assert e.value.args[0] == mac_error + def test_unexpected_body(self) -> None: + with pytest.raises(ProtocolError) as excinfo: + fp = BytesIO(b"12345") + headers = {"content-length": "5"} + resp = HTTPResponse(fp, status=204, headers=headers) + resp.read(16) + assert "Response may not contain content" in str(excinfo.value) + + with pytest.raises(ProtocolError): + fp = BytesIO(b"12345") + headers = {"content-length": "0"} + resp = HTTPResponse(fp, status=204, headers=headers) + resp.read(16) + assert "Response may not contain content" in str(excinfo.value) + + with pytest.raises(ProtocolError): + fp = BytesIO(b"12345") + resp = HTTPResponse(fp, status=204) + resp.read(16) + assert "Response may not contain content" in str(excinfo.value) + class MockChunkedEncodingResponse: def __init__(self, content: list[bytes]) -> None: @@ -1297,6 +1625,9 @@ def readline(self) -> bytes: def read(self, amt: int = -1) -> bytes: return self.pop_current_chunk(amt) + def read1(self, amt: int = -1) -> bytes: + return self.pop_current_chunk(amt) + def flush(self) -> None: # Python 3 wants this method. pass @@ -1331,6 +1662,11 @@ def _encode_chunk(self, chunk: bytes) -> bytes: return f"{len(chunk):X};asd=qwe\r\n{chunk.decode()}\r\n".encode() +class MockChunkedNoChunks(MockChunkedEncodingResponse): + def _encode_chunk(self, chunk: bytes) -> bytes: + return b"" + + class MockSock: @classmethod def makefile(cls, *args: typing.Any, **kwargs: typing.Any) -> None: diff --git a/test/test_retry.py b/test/test_retry.py index f71e7acc9e..ac3ce4ca73 100644 --- a/test/test_retry.py +++ b/test/test_retry.py @@ -334,7 +334,11 @@ def test_retry_method_not_allowed(self) -> None: def test_retry_default_remove_headers_on_redirect(self) -> None: retry = Retry() - assert retry.remove_headers_on_redirect == {"authorization", "cookie"} + assert retry.remove_headers_on_redirect == { + "authorization", + "proxy-authorization", + "cookie", + } def test_retry_set_remove_headers_on_redirect(self) -> None: retry = Retry(remove_headers_on_redirect=["X-API-Secret"]) diff --git a/test/test_ssl.py b/test/test_ssl.py index c886d4e51c..43073cb263 100644 --- a/test/test_ssl.py +++ b/test/test_ssl.py @@ -108,13 +108,28 @@ def test_wrap_socket_no_ssltransport(self) -> None: ssl_.ssl_wrap_socket(sock, tls_in_tls=True) @pytest.mark.parametrize( - ["pha", "expected_pha"], [(None, None), (False, True), (True, True)] + ["pha", "expected_pha", "cert_reqs"], + [ + (None, None, None), + (None, None, ssl.CERT_NONE), + (None, None, ssl.CERT_OPTIONAL), + (None, None, ssl.CERT_REQUIRED), + (False, True, None), + (False, True, ssl.CERT_NONE), + (False, True, ssl.CERT_OPTIONAL), + (False, True, ssl.CERT_REQUIRED), + (True, True, None), + (True, True, ssl.CERT_NONE), + (True, True, ssl.CERT_OPTIONAL), + (True, True, ssl.CERT_REQUIRED), + ], ) def test_create_urllib3_context_pha( self, monkeypatch: pytest.MonkeyPatch, pha: bool | None, expected_pha: bool | None, + cert_reqs: int | None, ) -> None: context = mock.create_autospec(ssl_.SSLContext) context.set_ciphers = mock.Mock() @@ -122,7 +137,7 @@ def test_create_urllib3_context_pha( context.post_handshake_auth = pha monkeypatch.setattr(ssl_, "SSLContext", lambda *_, **__: context) - assert ssl_.create_urllib3_context() is context + assert ssl_.create_urllib3_context(cert_reqs=cert_reqs) is context assert context.post_handshake_auth == expected_pha diff --git a/test/test_ssltransport.py b/test/test_ssltransport.py index cace51db96..b6d1f861eb 100644 --- a/test/test_ssltransport.py +++ b/test/test_ssltransport.py @@ -4,19 +4,17 @@ import select import socket import ssl +import threading import typing from unittest import mock import pytest -from dummyserver.server import DEFAULT_CA, DEFAULT_CERTS +from dummyserver.socketserver import DEFAULT_CA, DEFAULT_CERTS from dummyserver.testcase import SocketDummyServerTestCase, consume_socket from urllib3.util import ssl_ from urllib3.util.ssltransport import SSLTransport -if typing.TYPE_CHECKING: - from typing_extensions import Literal - # consume_socket can iterate forever, we add timeouts to prevent halting. PER_TEST_TIMEOUT = 60 @@ -34,12 +32,12 @@ def server_client_ssl_contexts() -> tuple[ssl.SSLContext, ssl.SSLContext]: @typing.overload -def sample_request(binary: Literal[True] = ...) -> bytes: +def sample_request(binary: typing.Literal[True] = ...) -> bytes: ... @typing.overload -def sample_request(binary: Literal[False]) -> str: +def sample_request(binary: typing.Literal[False]) -> str: ... @@ -54,7 +52,7 @@ def sample_request(binary: bool = True) -> bytes | str: def validate_request( - provided_request: bytearray, binary: Literal[False, True] = True + provided_request: bytearray, binary: typing.Literal[False, True] = True ) -> None: assert provided_request is not None expected_request = sample_request(binary) @@ -62,12 +60,12 @@ def validate_request( @typing.overload -def sample_response(binary: Literal[True] = ...) -> bytes: +def sample_response(binary: typing.Literal[True] = ...) -> bytes: ... @typing.overload -def sample_response(binary: Literal[False]) -> str: +def sample_response(binary: typing.Literal[False]) -> str: ... @@ -91,11 +89,11 @@ def validate_response( def validate_peercert(ssl_socket: SSLTransport) -> None: binary_cert = ssl_socket.getpeercert(binary_form=True) - assert type(binary_cert) == bytes + assert type(binary_cert) is bytes assert len(binary_cert) > 0 cert = ssl_socket.getpeercert() - assert type(cert) == dict + assert type(cert) is dict assert "serialNumber" in cert assert cert["serialNumber"] != "" @@ -111,20 +109,29 @@ def setup_class(cls) -> None: cls.server_context, cls.client_context = server_client_ssl_contexts() def start_dummy_server( - self, handler: typing.Callable[[socket.socket], None] | None = None + self, + handler: typing.Callable[[socket.socket], None] | None = None, + validate: bool = True, ) -> None: + quit_event = threading.Event() + def socket_handler(listener: socket.socket) -> None: sock = listener.accept()[0] try: with self.server_context.wrap_socket(sock, server_side=True) as ssock: - request = consume_socket(ssock) + request = consume_socket( + ssock, + quit_event=quit_event, + ) + if not validate: + return validate_request(request) ssock.send(sample_response()) except (ConnectionAbortedError, ConnectionResetError): return chosen_handler = handler if handler else socket_handler - self._start_server(chosen_handler) + self._start_server(chosen_handler, quit_event=quit_event) @pytest.mark.timeout(PER_TEST_TIMEOUT) def test_start_closed_socket(self) -> None: @@ -138,7 +145,7 @@ def test_start_closed_socket(self) -> None: @pytest.mark.timeout(PER_TEST_TIMEOUT) def test_close_after_handshake(self) -> None: """Socket errors should be bubbled up""" - self.start_dummy_server() + self.start_dummy_server(validate=False) sock = socket.create_connection((self.host, self.port)) with SSLTransport( @@ -184,33 +191,32 @@ def test_unwrap_existing_socket(self) -> None: """ def shutdown_handler(listener: socket.socket) -> None: - sock = listener.accept()[0] - ssl_sock = self.server_context.wrap_socket(sock, server_side=True) - - request = consume_socket(ssl_sock) - validate_request(request) - ssl_sock.sendall(sample_response()) - - unwrapped_sock = ssl_sock.unwrap() - - request = consume_socket(unwrapped_sock) - validate_request(request) - unwrapped_sock.sendall(sample_response()) + with listener.accept()[0] as sock, self.server_context.wrap_socket( + sock, server_side=True + ) as ssl_sock: + request = consume_socket(ssl_sock) + validate_request(request) + ssl_sock.sendall(sample_response()) + + with ssl_sock.unwrap() as unwrapped_sock: + request = consume_socket(unwrapped_sock) + validate_request(request) + unwrapped_sock.sendall(sample_response()) self.start_dummy_server(shutdown_handler) - sock = socket.create_connection((self.host, self.port)) - ssock = SSLTransport(sock, self.client_context, server_hostname="localhost") + with socket.create_connection((self.host, self.port)) as sock: + ssock = SSLTransport(sock, self.client_context, server_hostname="localhost") - # request/response over TLS. - ssock.sendall(sample_request()) - response = consume_socket(ssock) - validate_response(response) + # request/response over TLS. + ssock.sendall(sample_request()) + response = consume_socket(ssock) + validate_response(response) - # request/response over plaintext after unwrap. - ssock.unwrap() - sock.sendall(sample_request()) - response = consume_socket(sock) - validate_response(response) + # request/response over plaintext after unwrap. + ssock.unwrap() + sock.sendall(sample_request()) + response = consume_socket(sock) + validate_response(response) @pytest.mark.timeout(PER_TEST_TIMEOUT) def test_ssl_object_attributes(self) -> None: @@ -222,7 +228,7 @@ def test_ssl_object_attributes(self) -> None: sock, self.client_context, server_hostname="localhost" ) as ssock: cipher = ssock.cipher() - assert type(cipher) == tuple + assert type(cipher) is tuple # No chosen protocol through ALPN or NPN. assert ssock.selected_alpn_protocol() is None @@ -484,11 +490,11 @@ def test_tls_in_tls_makefile_rw_text(self) -> None: write.flush() response = read.read() - assert isinstance(response, str) + assert type(response) is str if "\r" not in response: # Carriage return will be removed when reading as a file on # some platforms. We add it before the comparison. - assert isinstance(response, str) + assert type(response) is str response = response.replace("\n", "\r\n") validate_response(response, binary=False) diff --git a/test/test_util.py b/test/test_util.py index 0c46aa1dd3..268f79f0dc 100644 --- a/test/test_util.py +++ b/test/test_util.py @@ -41,9 +41,6 @@ from . import clear_warnings -if typing.TYPE_CHECKING: - from typing_extensions import Literal - # This number represents a time in seconds, it doesn't mean anything in # isolation. Setting to a high-ish value to avoid conflicts with the smaller # numbers used for timeouts @@ -516,7 +513,7 @@ def test_netloc(self, url: str, expected_netloc: str | None) -> None: @pytest.mark.parametrize("url, expected_url", url_vulnerabilities) def test_url_vulnerabilities( - self, url: str, expected_url: Literal[False] | Url + self, url: str, expected_url: typing.Literal[False] | Url ) -> None: if expected_url is False: with pytest.raises(LocationParseError): @@ -748,7 +745,7 @@ def test_timeout_elapsed(self, time_monotonic: MagicMock) -> None: def test_is_fp_closed_object_supports_closed(self) -> None: class ClosedFile: @property - def closed(self) -> Literal[True]: + def closed(self) -> typing.Literal[True]: return True assert is_fp_closed(ClosedFile()) @@ -764,7 +761,7 @@ def fp(self) -> None: def test_is_fp_closed_object_has_fp(self) -> None: class FpFile: @property - def fp(self) -> Literal[True]: + def fp(self) -> typing.Literal[True]: return True assert not is_fp_closed(FpFile()) @@ -1075,8 +1072,7 @@ def test_ssl_wrap_socket_sni_none_no_warn(self) -> None: # Python OK -> reliable ("OpenSSL 1.1.1", 0x10101000, "cpython", (3, 9, 3), None, True), # PyPy: depends on the version - ("OpenSSL 1.1.1", 0x10101000, "pypy", (3, 6, 9), (7, 3, 7), False), - ("OpenSSL 1.1.1", 0x10101000, "pypy", (3, 7, 13), (7, 3, 9), False), + ("OpenSSL 1.1.1", 0x10101000, "pypy", (3, 9, 9), (7, 3, 7), False), ("OpenSSL 1.1.1", 0x101010CF, "pypy", (3, 8, 12), (7, 3, 8), True), # OpenSSL OK -> reliable ("OpenSSL 1.1.1", 0x101010CF, "cpython", (3, 9, 2), None, True), diff --git a/test/tz_stub.py b/test/tz_stub.py index 41b114bb2a..27f119e634 100644 --- a/test/tz_stub.py +++ b/test/tz_stub.py @@ -9,7 +9,7 @@ import pytest try: - import zoneinfo # type: ignore[import] + import zoneinfo except ImportError: # Python < 3.9 from backports import zoneinfo # type: ignore[no-redef] diff --git a/test/with_dummyserver/test_connection.py b/test/with_dummyserver/test_connection.py index 2442c8ad2d..29f3786e27 100644 --- a/test/with_dummyserver/test_connection.py +++ b/test/with_dummyserver/test_connection.py @@ -1,5 +1,6 @@ from __future__ import annotations +import contextlib import sys import typing from http.client import ResponseNotReady @@ -7,7 +8,7 @@ import pytest -from dummyserver.testcase import HTTPDummyServerTestCase as server +from dummyserver.testcase import HypercornDummyServerTestCase as server from urllib3 import HTTPConnectionPool from urllib3.response import HTTPResponse @@ -23,78 +24,59 @@ def pool() -> typing.Generator[HTTPConnectionPool, None, None]: def test_returns_urllib3_HTTPResponse(pool: HTTPConnectionPool) -> None: - conn = pool._get_conn() - - method = "GET" - path = "/" - - conn.request(method, path) - - response = conn.getresponse() - - assert isinstance(response, HTTPResponse) + with contextlib.closing(pool._get_conn()) as conn: + conn.request("GET", "/") + response = conn.getresponse() + assert isinstance(response, HTTPResponse) @pytest.mark.skipif(not hasattr(sys, "audit"), reason="requires python 3.8+") @mock.patch("urllib3.connection.sys.audit") def test_audit_event(audit_mock: mock.Mock, pool: HTTPConnectionPool) -> None: - conn = pool._get_conn() - conn.request("GET", "/") - audit_mock.assert_any_call("http.client.connect", conn, conn.host, conn.port) - # Ensure the event is raised only once. - connect_events = [ - call for call in audit_mock.mock_calls if call.args[0] == "http.client.connect" - ] - assert len(connect_events) == 1 + with contextlib.closing(pool._get_conn()) as conn: + conn.request("GET", "/") + audit_mock.assert_any_call("http.client.connect", conn, conn.host, conn.port) + # Ensure the event is raised only once. + connect_events = [ + call + for call in audit_mock.mock_calls + if call.args[0] == "http.client.connect" + ] + assert len(connect_events) == 1 def test_does_not_release_conn(pool: HTTPConnectionPool) -> None: - conn = pool._get_conn() - - method = "GET" - path = "/" + with contextlib.closing(pool._get_conn()) as conn: + conn.request("GET", "/") + response = conn.getresponse() - conn.request(method, path) - - response = conn.getresponse() - - response.release_conn() - assert pool.pool.qsize() == 0 # type: ignore[union-attr] + response.release_conn() + assert pool.pool.qsize() == 0 # type: ignore[union-attr] def test_releases_conn(pool: HTTPConnectionPool) -> None: - conn = pool._get_conn() - assert conn is not None - - method = "GET" - path = "/" - - conn.request(method, path) + with contextlib.closing(pool._get_conn()) as conn: + conn.request("GET", "/") + response = conn.getresponse() - response = conn.getresponse() - # If these variables are set by the pool - # then the response can release the connection - # back into the pool. - response._pool = pool # type: ignore[attr-defined] - response._connection = conn # type: ignore[attr-defined] + # If these variables are set by the pool + # then the response can release the connection + # back into the pool. + response._pool = pool # type: ignore[attr-defined] + response._connection = conn # type: ignore[attr-defined] - response.release_conn() - assert pool.pool.qsize() == 1 # type: ignore[union-attr] + response.release_conn() + assert pool.pool.qsize() == 1 # type: ignore[union-attr] def test_double_getresponse(pool: HTTPConnectionPool) -> None: - conn = pool._get_conn() - - method = "GET" - path = "/" - - conn.request(method, path) - - _ = conn.getresponse() + with contextlib.closing(pool._get_conn()) as conn: + conn.request("GET", "/") + _ = conn.getresponse() - # Calling getrepsonse() twice should cause an error - with pytest.raises(ResponseNotReady): - conn.getresponse() + # Calling getrepsonse() twice should cause an error + with pytest.raises(ResponseNotReady): + conn.getresponse() def test_connection_state_properties(pool: HTTPConnectionPool) -> None: diff --git a/test/with_dummyserver/test_connectionpool.py b/test/with_dummyserver/test_connectionpool.py index fdfb2c9aba..4fbe6a4f74 100644 --- a/test/with_dummyserver/test_connectionpool.py +++ b/test/with_dummyserver/test_connectionpool.py @@ -12,8 +12,8 @@ import pytest -from dummyserver.server import HAS_IPV6_AND_DNS, NoIPv6Warning -from dummyserver.testcase import HTTPDummyServerTestCase, SocketDummyServerTestCase +from dummyserver.socketserver import NoIPv6Warning +from dummyserver.testcase import HypercornDummyServerTestCase, SocketDummyServerTestCase from urllib3 import HTTPConnectionPool, encode_multipart_formdata from urllib3._collections import HTTPHeaderDict from urllib3.connection import _get_default_user_agent @@ -104,7 +104,7 @@ def test_timeout(self) -> None: delta = time.time() - now message = "timeout was pool-level SHORT_TIMEOUT rather than request-level LONG_TIMEOUT" - assert delta >= LONG_TIMEOUT, message + assert delta >= (LONG_TIMEOUT - 1e-5), message block_event.set() # Release request # Timeout passed directly to request should raise a request timeout @@ -198,7 +198,7 @@ def test_create_connection_timeout(self) -> None: conn.connect() -class TestConnectionPool(HTTPDummyServerTestCase): +class TestConnectionPool(HypercornDummyServerTestCase): def test_get(self) -> None: with HTTPConnectionPool(self.host, self.port) as pool: r = pool.request("GET", "/specific_method", fields={"method": "GET"}) @@ -231,7 +231,7 @@ def test_upload(self) -> None: "upload_filename": "lolcat.txt", "filefield": ("lolcat.txt", data), } - fields["upload_size"] = len(data) # type: ignore + fields["upload_size"] = len(data) # type: ignore[assignment] with HTTPConnectionPool(self.host, self.port) as pool: r = pool.request("POST", "/upload", fields=fields) @@ -270,7 +270,7 @@ def test_unicode_upload(self) -> None: "upload_filename": filename, fieldname: (filename, data), } - fields["upload_size"] = size # type: ignore + fields["upload_size"] = size # type: ignore[assignment] with HTTPConnectionPool(self.host, self.port) as pool: r = pool.request("POST", "/upload", fields=fields) assert r.status == 200, r.data @@ -363,7 +363,7 @@ def test_connection_error_retries(self) -> None: with HTTPConnectionPool(self.host, port) as pool: with pytest.raises(MaxRetryError) as e: pool.request("GET", "/", retries=Retry(connect=3)) - assert type(e.value.reason) == NewConnectionError + assert type(e.value.reason) is NewConnectionError def test_timeout_success(self) -> None: timeout = Timeout(connect=3, read=5, total=None) @@ -480,11 +480,22 @@ def test_redirect(self) -> None: assert r.status == 200 assert r.data == b"Dummy server!" + def test_303_redirect_makes_request_lose_body(self) -> None: + with HTTPConnectionPool(self.host, self.port) as pool: + response = pool.request( + "POST", + "/redirect", + fields={"target": "/headers_and_params", "status": "303 See Other"}, + ) + data = response.json() + assert data["params"] == {} + assert "Content-Type" not in HTTPHeaderDict(data["headers"]) + def test_bad_connect(self) -> None: with HTTPConnectionPool("badhost.invalid", self.port) as pool: with pytest.raises(MaxRetryError) as e: pool.request("GET", "/", retries=5) - assert type(e.value.reason) == NameResolutionError + assert type(e.value.reason) is NameResolutionError def test_keepalive(self) -> None: with HTTPConnectionPool(self.host, self.port, block=True, maxsize=1) as pool: @@ -769,7 +780,9 @@ def test_percent_encode_invalid_target_chars(self) -> None: def test_source_address(self) -> None: for addr, is_ipv6 in VALID_SOURCE_ADDRESSES: - if is_ipv6 and not HAS_IPV6_AND_DNS: + if is_ipv6: + # TODO enable if HAS_IPV6_AND_DNS when this is fixed: + # https://github.com/pgjones/hypercorn/issues/160 warnings.warn("No IPv6 support: skipping.", NoIPv6Warning) continue with HTTPConnectionPool( @@ -879,7 +892,7 @@ def test_preserves_path_dot_segments(self) -> None: """ConnectionPool preserves dot segments in the URI""" with HTTPConnectionPool(self.host, self.port) as pool: response = pool.request("GET", "/echo_uri/seg0/../seg2") - assert response.data == b"/echo_uri/seg0/../seg2" + assert response.data == b"/echo_uri/seg0/../seg2?" def test_default_user_agent_header(self) -> None: """ConnectionPool has a default user agent""" @@ -963,54 +976,6 @@ def test_no_user_agent_header(self) -> None: assert no_ua_headers["User-Agent"] == SKIP_HEADER assert pool_headers.get("User-Agent") == custom_ua - @pytest.mark.parametrize( - "accept_encoding", - [ - "Accept-Encoding", - "accept-encoding", - b"Accept-Encoding", - b"accept-encoding", - None, - ], - ) - @pytest.mark.parametrize("host", ["Host", "host", b"Host", b"host", None]) - @pytest.mark.parametrize( - "user_agent", ["User-Agent", "user-agent", b"User-Agent", b"user-agent", None] - ) - @pytest.mark.parametrize("chunked", [True, False]) - def test_skip_header( - self, - accept_encoding: str | None, - host: str | None, - user_agent: str | None, - chunked: bool, - ) -> None: - headers = {} - - if accept_encoding is not None: - headers[accept_encoding] = SKIP_HEADER - if host is not None: - headers[host] = SKIP_HEADER - if user_agent is not None: - headers[user_agent] = SKIP_HEADER - - with HTTPConnectionPool(self.host, self.port) as pool: - r = pool.request("GET", "/headers", headers=headers, chunked=chunked) - request_headers = r.json() - - if accept_encoding is None: - assert "Accept-Encoding" in request_headers - else: - assert accept_encoding not in request_headers - if host is None: - assert "Host" in request_headers - else: - assert host not in request_headers - if user_agent is None: - assert "User-Agent" in request_headers - else: - assert user_agent not in request_headers - @pytest.mark.parametrize("header", ["Content-Length", "content-length"]) @pytest.mark.parametrize("chunked", [True, False]) def test_skip_header_non_supported(self, header: str, chunked: bool) -> None: @@ -1050,9 +1015,11 @@ def test_headers_not_modified_by_request( else: conn = pool._get_conn() conn.request("GET", "/headers", chunked=chunked) + conn.getresponse().close() + conn.close() assert pool.headers == {"key": "val"} - assert isinstance(pool.headers, header_type) + assert type(pool.headers) is header_type with HTTPConnectionPool(self.host, self.port) as pool: if pool_request: @@ -1060,6 +1027,8 @@ def test_headers_not_modified_by_request( else: conn = pool._get_conn() conn.request("GET", "/headers", headers=headers, chunked=chunked) + conn.getresponse().close() + conn.close() assert headers == {"key": "val"} @@ -1079,6 +1048,7 @@ def test_request_chunked_is_deprecated( resp = conn.getresponse() assert resp.status == 200 assert resp.json()["Transfer-Encoding"] == "chunked" + conn.close() def test_bytes_header(self) -> None: with HTTPConnectionPool(self.host, self.port) as pool: @@ -1103,7 +1073,7 @@ def test_user_agent_non_ascii_user_agent(self, user_agent: str) -> None: assert request_headers["User-Agent"] == "Schönefeld/1.18.0" -class TestRetry(HTTPDummyServerTestCase): +class TestRetry(HypercornDummyServerTestCase): def test_max_retry(self) -> None: with HTTPConnectionPool(self.host, self.port) as pool: with pytest.raises(MaxRetryError): @@ -1267,7 +1237,7 @@ def test_multi_redirect_history(self) -> None: assert actual == expected -class TestRetryAfter(HTTPDummyServerTestCase): +class TestRetryAfter(HypercornDummyServerTestCase): def test_retry_after(self) -> None: # Request twice in a second to get a 429 response. with HTTPConnectionPool(self.host, self.port) as pool: @@ -1353,10 +1323,10 @@ def test_redirect_after(self) -> None: assert delta < 1 -class TestFileBodiesOnRetryOrRedirect(HTTPDummyServerTestCase): +class TestFileBodiesOnRetryOrRedirect(HypercornDummyServerTestCase): def test_retries_put_filehandle(self) -> None: """HTTP PUT retry with a file-like object should not timeout""" - with HTTPConnectionPool(self.host, self.port, timeout=0.1) as pool: + with HTTPConnectionPool(self.host, self.port, timeout=LONG_TIMEOUT) as pool: retry = Retry(total=3, status_forcelist=[418]) # httplib reads in 8k chunks; use a larger content length content_length = 65535 @@ -1379,7 +1349,7 @@ def test_retries_put_filehandle(self) -> None: def test_redirect_put_file(self) -> None: """PUT with file object should work with a redirection response""" - with HTTPConnectionPool(self.host, self.port, timeout=0.1) as pool: + with HTTPConnectionPool(self.host, self.port, timeout=LONG_TIMEOUT) as pool: retry = Retry(total=3, status_forcelist=[418]) # httplib reads in 8k chunks; use a larger content length content_length = 65535 @@ -1414,14 +1384,14 @@ def tell(self) -> typing.NoReturn: # httplib uses fileno if Content-Length isn't supplied, # which is unsupported by BytesIO. headers = {"Content-Length": "8"} - with HTTPConnectionPool(self.host, self.port, timeout=0.1) as pool: + with HTTPConnectionPool(self.host, self.port, timeout=LONG_TIMEOUT) as pool: with pytest.raises( UnrewindableBodyError, match="Unable to record file position for" ): pool.urlopen("PUT", url, headers=headers, body=body) -class TestRetryPoolSize(HTTPDummyServerTestCase): +class TestRetryPoolSize(HypercornDummyServerTestCase): def test_pool_size_retry(self) -> None: retries = Retry(total=1, raise_on_status=False, status_forcelist=[404]) with HTTPConnectionPool( @@ -1431,7 +1401,7 @@ def test_pool_size_retry(self) -> None: assert pool.num_connections == 1 -class TestRedirectPoolSize(HTTPDummyServerTestCase): +class TestRedirectPoolSize(HypercornDummyServerTestCase): def test_pool_size_redirect(self) -> None: retries = Retry( total=1, raise_on_status=False, status_forcelist=[404], redirect=True diff --git a/test/with_dummyserver/test_https.py b/test/with_dummyserver/test_https.py index ec37d92b02..b8353d758b 100644 --- a/test/with_dummyserver/test_https.py +++ b/test/with_dummyserver/test_https.py @@ -5,7 +5,6 @@ import os.path import shutil import ssl -import sys import tempfile import warnings from pathlib import Path @@ -13,9 +12,7 @@ LONG_TIMEOUT, SHORT_TIMEOUT, TARPIT_HOST, - notSecureTransport, requires_network, - requires_ssl_context_keyfile_password, resolvesLocalhostFQDN, ) from test.conftest import ServerConfig @@ -26,13 +23,13 @@ import urllib3.util as util import urllib3.util.ssl_ -from dummyserver.server import ( +from dummyserver.socketserver import ( DEFAULT_CA, DEFAULT_CA_KEY, DEFAULT_CERTS, encrypt_key_pem, ) -from dummyserver.testcase import HTTPSDummyServerTestCase +from dummyserver.testcase import HTTPSHypercornDummyServerTestCase from urllib3 import HTTPSConnectionPool from urllib3.connection import RECENT_DATE, HTTPSConnection, VerifiedHTTPSConnection from urllib3.exceptions import ( @@ -68,7 +65,7 @@ CLIENT_CERT = CLIENT_INTERMEDIATE_PEM -class TestHTTPS(HTTPSDummyServerTestCase): +class BaseTestHTTPS(HTTPSHypercornDummyServerTestCase): tls_protocol_name: str | None = None def tls_protocol_not_default(self) -> bool: @@ -86,11 +83,17 @@ def tls_version(self) -> ssl.TLSVersion: def ssl_version(self) -> int: if self.tls_protocol_name is None: return pytest.skip("Skipping base test class") - attribute = f"PROTOCOL_{self.tls_protocol_name.replace('.', '_')}" - ssl_version = getattr(ssl, attribute, None) - if ssl_version is None: - return pytest.skip(f"ssl.{attribute} isn't available") - return ssl_version # type: ignore[no-any-return] + + if self.tls_protocol_name == "TLSv1.3" and ssl.HAS_TLSv1_3: + return ssl.PROTOCOL_TLS_CLIENT + if self.tls_protocol_name == "TLSv1.2" and ssl.HAS_TLSv1_2: + return ssl.PROTOCOL_TLSv1_2 + if self.tls_protocol_name == "TLSv1.1" and ssl.HAS_TLSv1_1: + return ssl.PROTOCOL_TLSv1_1 + if self.tls_protocol_name == "TLSv1" and ssl.HAS_TLSv1: + return ssl.PROTOCOL_TLSv1 + else: + return pytest.skip(f"{self.tls_protocol_name} isn't available") @classmethod def setup_class(cls) -> None: @@ -132,7 +135,7 @@ def teardown_class(cls) -> None: shutil.rmtree(cls.certs_dir) - def test_simple(self) -> None: + def test_simple(self, http_version: str) -> None: with HTTPSConnectionPool( self.host, self.port, @@ -141,6 +144,8 @@ def test_simple(self) -> None: ) as https_pool: r = https_pool.request("GET", "/") assert r.status == 200, r.data + assert r.headers["server"] == f"hypercorn-{http_version}" + assert r.data == b"Dummy server!" @resolvesLocalhostFQDN() def test_dotted_fqdn(self) -> None: @@ -190,7 +195,6 @@ def test_client_no_intermediate(self) -> None: with pytest.raises((SSLError, ProtocolError)): https_pool.request("GET", "/certificate", retries=False) - @requires_ssl_context_keyfile_password() def test_client_key_password(self) -> None: with HTTPSConnectionPool( self.host, @@ -205,7 +209,6 @@ def test_client_key_password(self) -> None: subject = r.json() assert subject["organizationalUnitName"].startswith("Testing cert") - @requires_ssl_context_keyfile_password() def test_client_encrypted_key_requires_password(self) -> None: with HTTPSConnectionPool( self.host, @@ -218,7 +221,7 @@ def test_client_encrypted_key_requires_password(self) -> None: with pytest.raises(MaxRetryError, match="password is required") as e: https_pool.request("GET", "/certificate") - assert isinstance(e.value.reason, SSLError) + assert type(e.value.reason) is SSLError def test_verified(self) -> None: with HTTPSConnectionPool( @@ -228,8 +231,8 @@ def test_verified(self) -> None: ca_certs=DEFAULT_CA, ssl_minimum_version=self.tls_version(), ) as https_pool: - conn = https_pool._new_conn() - assert conn.__class__ == VerifiedHTTPSConnection + with contextlib.closing(https_pool._new_conn()) as conn: + assert conn.__class__ == VerifiedHTTPSConnection with warnings.catch_warnings(record=True) as w: r = https_pool.request("GET", "/") @@ -243,8 +246,8 @@ def test_verified_with_context(self) -> None: ) ctx.load_verify_locations(cafile=DEFAULT_CA) with HTTPSConnectionPool(self.host, self.port, ssl_context=ctx) as https_pool: - conn = https_pool._new_conn() - assert conn.__class__ == VerifiedHTTPSConnection + with contextlib.closing(https_pool._new_conn()) as conn: + assert conn.__class__ == VerifiedHTTPSConnection with mock.patch("warnings.warn") as warn: r = https_pool.request("GET", "/") @@ -258,15 +261,14 @@ def test_context_combines_with_ca_certs(self) -> None: with HTTPSConnectionPool( self.host, self.port, ca_certs=DEFAULT_CA, ssl_context=ctx ) as https_pool: - conn = https_pool._new_conn() - assert conn.__class__ == VerifiedHTTPSConnection + with contextlib.closing(https_pool._new_conn()) as conn: + assert conn.__class__ == VerifiedHTTPSConnection with mock.patch("warnings.warn") as warn: r = https_pool.request("GET", "/") assert r.status == 200 assert not warn.called, warn.call_args_list - @notSecureTransport() # SecureTransport does not support cert directories def test_ca_dir_verified(self, tmp_path: Path) -> None: # OpenSSL looks up certificates by the hash for their name, see c_rehash # TODO infer the bytes using `cryptography.x509.Name.public_bytes`. @@ -280,8 +282,8 @@ def test_ca_dir_verified(self, tmp_path: Path) -> None: ca_cert_dir=str(tmp_path), ssl_minimum_version=self.tls_version(), ) as https_pool: - conn = https_pool._new_conn() - assert conn.__class__ == VerifiedHTTPSConnection + with contextlib.closing(https_pool._new_conn()) as conn: + assert conn.__class__ == VerifiedHTTPSConnection with warnings.catch_warnings(record=True) as w: r = https_pool.request("GET", "/") @@ -299,7 +301,7 @@ def test_invalid_common_name(self) -> None: ) as https_pool: with pytest.raises(MaxRetryError) as e: https_pool.request("GET", "/", retries=0) - assert isinstance(e.value.reason, SSLError) + assert type(e.value.reason) is SSLError assert "doesn't match" in str( e.value.reason ) or "certificate verify failed" in str(e.value.reason) @@ -314,7 +316,7 @@ def test_verified_with_bad_ca_certs(self) -> None: ) as https_pool: with pytest.raises(MaxRetryError) as e: https_pool.request("GET", "/") - assert isinstance(e.value.reason, SSLError) + assert type(e.value.reason) is SSLError assert ( "certificate verify failed" in str(e.value.reason) # PyPy is more specific @@ -329,14 +331,11 @@ def test_wrap_socket_failure_resource_leak(self) -> None: ca_certs=self.bad_ca_path, ssl_minimum_version=self.tls_version(), ) as https_pool: - conn = https_pool._get_conn() - try: + with contextlib.closing(https_pool._get_conn()) as conn: with pytest.raises(ssl.SSLError): conn.connect() assert conn.sock is not None # type: ignore[attr-defined] - finally: - conn.close() def test_verified_without_ca_certs(self) -> None: # default is cert_reqs=None which is ssl.CERT_NONE @@ -348,7 +347,7 @@ def test_verified_without_ca_certs(self) -> None: ) as https_pool: with pytest.raises(MaxRetryError) as e: https_pool.request("GET", "/") - assert isinstance(e.value.reason, SSLError) + assert type(e.value.reason) is SSLError # there is a different error message depending on whether or # not pyopenssl is injected assert ( @@ -453,6 +452,8 @@ def test_server_hostname(self) -> None: # the python ssl module). if hasattr(conn.sock, "server_hostname"): # type: ignore[attr-defined] assert conn.sock.server_hostname == "localhost" # type: ignore[attr-defined] + conn.getresponse().close() + conn.close() def test_assert_fingerprint_md5(self) -> None: with HTTPSConnectionPool( @@ -496,7 +497,7 @@ def test_assert_invalid_fingerprint(self) -> None: def _test_request(pool: HTTPSConnectionPool) -> SSLError: with pytest.raises(MaxRetryError) as cm: pool.request("GET", "/", retries=0) - assert isinstance(cm.value.reason, SSLError) + assert type(cm.value.reason) is SSLError return cm.value.reason with HTTPSConnectionPool( @@ -539,7 +540,7 @@ def test_verify_none_and_bad_fingerprint(self) -> None: ) as https_pool: with pytest.raises(MaxRetryError) as cm: https_pool.request("GET", "/", retries=0) - assert isinstance(cm.value.reason, SSLError) + assert type(cm.value.reason) is SSLError def test_verify_none_and_good_fingerprint(self) -> None: with HTTPSConnectionPool( @@ -553,12 +554,7 @@ def test_verify_none_and_good_fingerprint(self) -> None: ) as https_pool: https_pool.request("GET", "/") - @notSecureTransport() def test_good_fingerprint_and_hostname_mismatch(self) -> None: - # This test doesn't run with SecureTransport because we don't turn off - # hostname validation without turning off all validation, which this - # test doesn't do (deliberately). We should revisit this if we make - # new decisions. with HTTPSConnectionPool( "127.0.0.1", self.port, @@ -623,8 +619,7 @@ def test_tunnel(self) -> None: cert_reqs="CERT_NONE", ssl_minimum_version=self.tls_version(), ) as https_pool: - conn = https_pool._new_conn() - try: + with contextlib.closing(https_pool._new_conn()) as conn: conn.set_tunnel(self.host, self.port) with mock.patch.object( conn, "_tunnel", create=True, return_value=None @@ -632,8 +627,6 @@ def test_tunnel(self) -> None: with pytest.warns(InsecureRequestWarning): https_pool._make_request(conn, "GET", "/") conn_tunnel.assert_called_once_with() - finally: - conn.close() @requires_network() def test_enhanced_timeout(self) -> None: @@ -644,14 +637,11 @@ def test_enhanced_timeout(self) -> None: retries=False, cert_reqs="CERT_REQUIRED", ) as https_pool: - conn = https_pool._new_conn() - try: + with contextlib.closing(https_pool._new_conn()) as conn: with pytest.raises(ConnectTimeoutError): https_pool.request("GET", "/") with pytest.raises(ConnectTimeoutError): https_pool._make_request(conn, "GET", "/") - finally: - conn.close() with HTTPSConnectionPool( TARPIT_HOST, @@ -670,14 +660,11 @@ def test_enhanced_timeout(self) -> None: retries=False, cert_reqs="CERT_REQUIRED", ) as https_pool: - conn = https_pool._new_conn() - try: + with contextlib.closing(https_pool._new_conn()) as conn: with pytest.raises(ConnectTimeoutError): https_pool.request( "GET", "/", timeout=Timeout(total=None, connect=SHORT_TIMEOUT) ) - finally: - conn.close() def test_enhanced_ssl_connection(self) -> None: fingerprint = "72:8B:55:4C:9A:FC:1E:88:A1:1C:AD:1B:B2:E7:CC:3E:DB:C8:F9:8A" @@ -805,29 +792,28 @@ def test_tls_protocol_name_of_socket(self) -> None: self.port, ca_certs=DEFAULT_CA, ssl_minimum_version=self.tls_version(), + ssl_maximum_version=self.tls_version(), ) as https_pool: - conn = https_pool._get_conn() - try: + with contextlib.closing(https_pool._get_conn()) as conn: conn.connect() if not hasattr(conn.sock, "version"): # type: ignore[attr-defined] pytest.skip("SSLSocket.version() not available") assert conn.sock.version() == self.tls_protocol_name # type: ignore[attr-defined] - finally: - conn.close() def test_ssl_version_is_deprecated(self) -> None: if self.tls_protocol_name is None: pytest.skip("Skipping base test class") + if self.ssl_version() == ssl.PROTOCOL_TLS_CLIENT: + pytest.skip( + "Skipping because ssl_version=ssl.PROTOCOL_TLS_CLIENT is not deprecated" + ) with HTTPSConnectionPool( self.host, self.port, ca_certs=DEFAULT_CA, ssl_version=self.ssl_version() ) as https_pool: - conn = https_pool._get_conn() - try: + with contextlib.closing(https_pool._get_conn()) as conn: with pytest.warns(DeprecationWarning) as w: conn.connect() - finally: - conn.close() assert len(w) >= 1 assert any(x.category == DeprecationWarning for x in w) @@ -856,12 +842,9 @@ def test_ssl_version_with_protocol_tls_or_client_not_deprecated( with HTTPSConnectionPool( self.host, self.port, ca_certs=DEFAULT_CA, ssl_version=ssl_version ) as https_pool: - conn = https_pool._get_conn() - try: + with contextlib.closing(https_pool._get_conn()) as conn: with warnings.catch_warnings(record=True) as w: conn.connect() - finally: - conn.close() assert [str(wm) for wm in w if wm.category != ResourceWarning] == [] @@ -877,12 +860,9 @@ def test_no_tls_version_deprecation_with_ssl_context(self) -> None: ca_certs=DEFAULT_CA, ssl_context=ctx, ) as https_pool: - conn = https_pool._get_conn() - try: + with contextlib.closing(https_pool._get_conn()) as conn: with warnings.catch_warnings(record=True) as w: conn.connect() - finally: - conn.close() assert [str(wm) for wm in w if wm.category != ResourceWarning] == [] @@ -909,11 +889,14 @@ def test_tls_version_maximum_and_minimum(self) -> None: conn = https_pool._get_conn() try: conn.connect() - assert conn.sock.version() == self.tls_protocol_name # type: ignore[attr-defined] + if maximum_version == TLSVersion.MAXIMUM_SUPPORTED: + # A higher protocol than tls_protocol_name could be negotiated + assert conn.sock.version() >= self.tls_protocol_name # type: ignore[attr-defined] + else: + assert conn.sock.version() == self.tls_protocol_name # type: ignore[attr-defined] finally: conn.close() - @pytest.mark.skipif(sys.version_info < (3, 8), reason="requires python 3.8+") def test_sslkeylogfile( self, tmp_path: Path, monkeypatch: pytest.MonkeyPatch ) -> None: @@ -977,13 +960,12 @@ def test_default_ssl_context_ssl_min_max_versions(self) -> None: ctx = urllib3.util.ssl_.create_urllib3_context() assert ctx.minimum_version == ssl.TLSVersion.TLSv1_2 # urllib3 sets a default maximum version only when it is - # injected with PyOpenSSL- or SecureTransport-backed - # SSL-support. + # injected with PyOpenSSL SSL-support. # Otherwise, the default maximum version is set by Python's # `ssl.SSLContext`. The value respects OpenSSL configuration and # can be different from `ssl.TLSVersion.MAXIMUM_SUPPORTED`. # https://github.com/urllib3/urllib3/issues/2477#issuecomment-1151452150 - if util.IS_PYOPENSSL or util.IS_SECURETRANSPORT: + if util.IS_PYOPENSSL: expected_maximum_version = ssl.TLSVersion.MAXIMUM_SUPPORTED else: expected_maximum_version = ssl.SSLContext( @@ -992,6 +974,11 @@ def test_default_ssl_context_ssl_min_max_versions(self) -> None: assert ctx.maximum_version == expected_maximum_version def test_ssl_context_ssl_version_uses_ssl_min_max_versions(self) -> None: + if self.ssl_version() == ssl.PROTOCOL_TLS_CLIENT: + pytest.skip( + "Skipping because ssl_version=ssl.PROTOCOL_TLS_CLIENT is not deprecated" + ) + with pytest.warns( DeprecationWarning, match=r"'ssl_version' option is deprecated and will be removed in " @@ -1005,25 +992,25 @@ def test_ssl_context_ssl_version_uses_ssl_min_max_versions(self) -> None: @pytest.mark.usefixtures("requires_tlsv1") -class TestHTTPS_TLSv1(TestHTTPS): +class TestHTTPS_TLSv1(BaseTestHTTPS): tls_protocol_name = "TLSv1" certs = TLSv1_CERTS @pytest.mark.usefixtures("requires_tlsv1_1") -class TestHTTPS_TLSv1_1(TestHTTPS): +class TestHTTPS_TLSv1_1(BaseTestHTTPS): tls_protocol_name = "TLSv1.1" certs = TLSv1_1_CERTS @pytest.mark.usefixtures("requires_tlsv1_2") -class TestHTTPS_TLSv1_2(TestHTTPS): +class TestHTTPS_TLSv1_2(BaseTestHTTPS): tls_protocol_name = "TLSv1.2" certs = TLSv1_2_CERTS @pytest.mark.usefixtures("requires_tlsv1_3") -class TestHTTPS_TLSv1_3(TestHTTPS): +class TestHTTPS_TLSv1_3(BaseTestHTTPS): tls_protocol_name = "TLSv1.3" certs = TLSv1_3_CERTS @@ -1112,7 +1099,7 @@ def test_hostname_checks_common_name_respected( # IP addresses should fail for commonName. else: assert err is not None - assert type(err.reason) == SSLError + assert type(err.reason) is SSLError assert isinstance( err.reason.args[0], (ssl.SSLCertVerificationError, CertificateError) ) @@ -1160,7 +1147,7 @@ def test_can_validate_ip_san(self, ipv4_san_server: ServerConfig) -> None: class TestHTTPS_IPV6SAN: @pytest.mark.parametrize("host", ["::1", "[::1]"]) def test_can_validate_ipv6_san( - self, ipv6_san_server: ServerConfig, host: str + self, ipv6_san_server: ServerConfig, host: str, http_version: str ) -> None: """Ensure that urllib3 can validate SANs with IPv6 addresses in them.""" with HTTPSConnectionPool( @@ -1171,3 +1158,4 @@ def test_can_validate_ipv6_san( ) as https_pool: r = https_pool.request("GET", "/") assert r.status == 200 + assert r.headers["server"] == f"hypercorn-{http_version}" diff --git a/test/with_dummyserver/test_no_ssl.py b/test/with_dummyserver/test_no_ssl.py index b89f703fac..9a28119abf 100644 --- a/test/with_dummyserver/test_no_ssl.py +++ b/test/with_dummyserver/test_no_ssl.py @@ -8,20 +8,23 @@ import pytest import urllib3 -from dummyserver.testcase import HTTPDummyServerTestCase, HTTPSDummyServerTestCase +from dummyserver.testcase import ( + HTTPSHypercornDummyServerTestCase, + HypercornDummyServerTestCase, +) from urllib3.exceptions import InsecureRequestWarning from ..test_no_ssl import TestWithoutSSL -class TestHTTPWithoutSSL(HTTPDummyServerTestCase, TestWithoutSSL): +class TestHTTPWithoutSSL(HypercornDummyServerTestCase, TestWithoutSSL): def test_simple(self) -> None: with urllib3.HTTPConnectionPool(self.host, self.port) as pool: r = pool.request("GET", "/") assert r.status == 200, r.data -class TestHTTPSWithoutSSL(HTTPSDummyServerTestCase, TestWithoutSSL): +class TestHTTPSWithoutSSL(HTTPSHypercornDummyServerTestCase, TestWithoutSSL): def test_simple(self) -> None: with urllib3.HTTPSConnectionPool( self.host, self.port, cert_reqs="NONE" diff --git a/test/with_dummyserver/test_poolmanager.py b/test/with_dummyserver/test_poolmanager.py index da802a38b3..af77241d6c 100644 --- a/test/with_dummyserver/test_poolmanager.py +++ b/test/with_dummyserver/test_poolmanager.py @@ -7,8 +7,11 @@ import pytest -from dummyserver.server import HAS_IPV6 -from dummyserver.testcase import HTTPDummyServerTestCase, IPv6HTTPDummyServerTestCase +from dummyserver.socketserver import HAS_IPV6 +from dummyserver.testcase import ( + HypercornDummyServerTestCase, + IPv6HypercornDummyServerTestCase, +) from urllib3 import HTTPHeaderDict, HTTPResponse, request from urllib3.connectionpool import port_by_scheme from urllib3.exceptions import MaxRetryError, URLSchemeUnknown @@ -16,7 +19,7 @@ from urllib3.util.retry import Retry -class TestPoolManager(HTTPDummyServerTestCase): +class TestPoolManager(HypercornDummyServerTestCase): @classmethod def setup_class(cls) -> None: super().setup_class() @@ -141,7 +144,11 @@ def test_redirect_cross_host_remove_headers(self) -> None: "GET", f"{self.base_url}/redirect", fields={"target": f"{self.base_url_alt}/headers"}, - headers={"Authorization": "foo", "Cookie": "foo=bar"}, + headers={ + "Authorization": "foo", + "Proxy-Authorization": "bar", + "Cookie": "foo=bar", + }, ) assert r.status == 200 @@ -149,13 +156,18 @@ def test_redirect_cross_host_remove_headers(self) -> None: data = r.json() assert "Authorization" not in data + assert "Proxy-Authorization" not in data assert "Cookie" not in data r = http.request( "GET", f"{self.base_url}/redirect", fields={"target": f"{self.base_url_alt}/headers"}, - headers={"authorization": "foo", "cookie": "foo=bar"}, + headers={ + "authorization": "foo", + "proxy-authorization": "baz", + "cookie": "foo=bar", + }, ) assert r.status == 200 @@ -164,6 +176,8 @@ def test_redirect_cross_host_remove_headers(self) -> None: assert "authorization" not in data assert "Authorization" not in data + assert "proxy-authorization" not in data + assert "Proxy-Authorization" not in data assert "cookie" not in data assert "Cookie" not in data @@ -173,7 +187,11 @@ def test_redirect_cross_host_no_remove_headers(self) -> None: "GET", f"{self.base_url}/redirect", fields={"target": f"{self.base_url_alt}/headers"}, - headers={"Authorization": "foo", "Cookie": "foo=bar"}, + headers={ + "Authorization": "foo", + "Proxy-Authorization": "bar", + "Cookie": "foo=bar", + }, retries=Retry(remove_headers_on_redirect=[]), ) @@ -182,6 +200,7 @@ def test_redirect_cross_host_no_remove_headers(self) -> None: data = r.json() assert data["Authorization"] == "foo" + assert data["Proxy-Authorization"] == "bar" assert data["Cookie"] == "foo=bar" def test_redirect_cross_host_set_removed_headers(self) -> None: @@ -193,6 +212,7 @@ def test_redirect_cross_host_set_removed_headers(self) -> None: headers={ "X-API-Secret": "foo", "Authorization": "bar", + "Proxy-Authorization": "baz", "Cookie": "foo=bar", }, retries=Retry(remove_headers_on_redirect=["X-API-Secret"]), @@ -204,11 +224,13 @@ def test_redirect_cross_host_set_removed_headers(self) -> None: assert "X-API-Secret" not in data assert data["Authorization"] == "bar" + assert data["Proxy-Authorization"] == "baz" assert data["Cookie"] == "foo=bar" headers = { "x-api-secret": "foo", "authorization": "bar", + "proxy-authorization": "baz", "cookie": "foo=bar", } r = http.request( @@ -226,12 +248,14 @@ def test_redirect_cross_host_set_removed_headers(self) -> None: assert "x-api-secret" not in data assert "X-API-Secret" not in data assert data["Authorization"] == "bar" + assert data["Proxy-Authorization"] == "baz" assert data["Cookie"] == "foo=bar" # Ensure the header argument itself is not modified in-place. assert headers == { "x-api-secret": "foo", "authorization": "bar", + "proxy-authorization": "baz", "cookie": "foo=bar", } @@ -244,6 +268,20 @@ def test_redirect_without_preload_releases_connection(self) -> None: assert r._pool.num_connections == 1 assert len(http.pools) == 1 + def test_303_redirect_makes_request_lose_body(self) -> None: + with PoolManager() as http: + response = http.request( + "POST", + f"{self.base_url}/redirect", + fields={ + "target": f"{self.base_url}/headers_and_params", + "status": "303 See Other", + }, + ) + data = response.json() + assert data["params"] == {} + assert "Content-Type" not in HTTPHeaderDict(data["headers"]) + def test_unknown_scheme(self) -> None: with PoolManager() as http: unknown_scheme = "unknown" @@ -435,7 +473,7 @@ def test_headers_http_multi_header_multipart(self) -> None: encode_multipart=True, ) returned_headers = r.json()["headers"] - assert returned_headers[4:] == [ + assert returned_headers[5:] == [ ["Multi", "1"], ["Multi", "2"], ["Content-Type", "multipart/form-data; boundary=b"], @@ -453,7 +491,7 @@ def test_headers_http_multi_header_multipart(self) -> None: encode_multipart=True, ) returned_headers = r.json()["headers"] - assert returned_headers[4:] == [ + assert returned_headers[5:] == [ ["Multi", "1"], ["Multi", "2"], # Uses the set value, not the one that would be generated. @@ -483,10 +521,12 @@ def test_http_with_ca_cert_dir(self) -> None: @pytest.mark.parametrize( ["target", "expected_target"], [ + # annoyingly quart.request.full_path adds a stray `?` + ("/echo_uri", b"/echo_uri?"), ("/echo_uri?q=1#fragment", b"/echo_uri?q=1"), ("/echo_uri?#", b"/echo_uri?"), - ("/echo_uri#?", b"/echo_uri"), - ("/echo_uri#?#", b"/echo_uri"), + ("/echo_uri#!", b"/echo_uri?"), + ("/echo_uri#!#", b"/echo_uri?"), ("/echo_uri??#", b"/echo_uri??"), ("/echo_uri?%3f#", b"/echo_uri?%3F"), ("/echo_uri?%3F#", b"/echo_uri?%3F"), @@ -600,18 +640,20 @@ def test_top_level_request_with_timeout(self) -> None: ], ) def test_request_with_json(self, headers: HTTPHeaderDict) -> None: + old_headers = None if headers is None else headers.copy() body = {"attribute": "value"} r = request( method="POST", url=f"{self.base_url}/echo_json", headers=headers, json=body ) assert r.status == 200 assert r.json() == body - if headers is not None and "application/json" not in headers.values(): - assert "text/plain" in r.headers["Content-Type"].replace(" ", "").split(",") - else: - assert "application/json" in r.headers["Content-Type"].replace( - " ", "" - ).split(",") + content_type = HTTPHeaderDict(old_headers).get( + "Content-Type", "application/json" + ) + assert content_type in r.headers["Content-Type"].replace(" ", "").split(",") + + # Ensure the header argument itself is not modified in-place. + assert headers == old_headers def test_top_level_request_with_json_with_httpheaderdict(self) -> None: body = {"attribute": "value"} @@ -648,7 +690,7 @@ def __repr__(self) -> str: @pytest.mark.skipif(not HAS_IPV6, reason="IPv6 is not supported on this system") -class TestIPv6PoolManager(IPv6HTTPDummyServerTestCase): +class TestIPv6PoolManager(IPv6HypercornDummyServerTestCase): @classmethod def setup_class(cls) -> None: super().setup_class() diff --git a/test/with_dummyserver/test_proxy_poolmanager.py b/test/with_dummyserver/test_proxy_poolmanager.py index f4620643f5..397181a9e6 100644 --- a/test/with_dummyserver/test_proxy_poolmanager.py +++ b/test/with_dummyserver/test_proxy_poolmanager.py @@ -1,6 +1,7 @@ from __future__ import annotations import binascii +import contextlib import hashlib import ipaddress import os.path @@ -9,15 +10,18 @@ import socket import ssl import tempfile -from test import LONG_TIMEOUT, SHORT_TIMEOUT, onlySecureTransport, withPyOpenSSL +from test import LONG_TIMEOUT, SHORT_TIMEOUT, resolvesLocalhostFQDN, withPyOpenSSL from test.conftest import ServerConfig import pytest import trustme import urllib3.exceptions -from dummyserver.server import DEFAULT_CA, HAS_IPV6, get_unreachable_address -from dummyserver.testcase import HTTPDummyProxyTestCase, IPv6HTTPDummyProxyTestCase +from dummyserver.socketserver import DEFAULT_CA, HAS_IPV6, get_unreachable_address +from dummyserver.testcase import ( + HypercornDummyProxyTestCase, + IPv6HypercornDummyProxyTestCase, +) from urllib3 import HTTPResponse from urllib3._collections import HTTPHeaderDict from urllib3.connection import VerifiedHTTPSConnection @@ -39,7 +43,17 @@ from .. import TARPIT_HOST, requires_network -class TestHTTPProxyManager(HTTPDummyProxyTestCase): +def assert_is_verified(pm: ProxyManager, *, proxy: bool, target: bool) -> None: + pool = list(pm.pools._container.values())[-1] # retrieve last pool entry + connection = ( + pool.pool.queue[-1] if pool.pool is not None else None + ) # retrieve last connection entry + + assert connection.proxy_is_verified is proxy + assert connection.is_verified is target + + +class TestHTTPProxyManager(HypercornDummyProxyTestCase): @classmethod def setup_class(cls) -> None: super().setup_class() @@ -47,6 +61,7 @@ def setup_class(cls) -> None: cls.http_url_alt = f"http://{cls.http_host_alt}:{int(cls.http_port)}" cls.https_url = f"https://{cls.https_host}:{int(cls.https_port)}" cls.https_url_alt = f"https://{cls.https_host_alt}:{int(cls.https_port)}" + cls.https_url_fqdn = f"https://{cls.https_host}.:{int(cls.https_port)}" cls.proxy_url = f"http://{cls.proxy_host}:{int(cls.proxy_port)}" cls.https_proxy_url = f"https://{cls.proxy_host}:{int(cls.https_proxy_port)}" @@ -78,6 +93,40 @@ def test_https_proxy(self) -> None: r = https.request("GET", f"{self.http_url}/") assert r.status == 200 + def test_is_verified_http_proxy_to_http_target(self) -> None: + with proxy_from_url(self.proxy_url, ca_certs=DEFAULT_CA) as http: + r = http.request("GET", f"{self.http_url}/") + assert r.status == 200 + assert_is_verified(http, proxy=False, target=False) + + def test_is_verified_http_proxy_to_https_target(self) -> None: + with proxy_from_url(self.proxy_url, ca_certs=DEFAULT_CA) as http: + r = http.request("GET", f"{self.https_url}/") + assert r.status == 200 + assert_is_verified(http, proxy=False, target=True) + + def test_is_verified_https_proxy_to_http_target(self) -> None: + with proxy_from_url(self.https_proxy_url, ca_certs=DEFAULT_CA) as https: + r = https.request("GET", f"{self.http_url}/") + assert r.status == 200 + assert_is_verified(https, proxy=True, target=False) + + def test_is_verified_https_proxy_to_https_target(self) -> None: + with proxy_from_url(self.https_proxy_url, ca_certs=DEFAULT_CA) as https: + r = https.request("GET", f"{self.https_url}/") + assert r.status == 200 + assert_is_verified(https, proxy=True, target=True) + + def test_http_and_https_kwarg_ca_cert_data_proxy(self) -> None: + with open(DEFAULT_CA) as pem_file: + pem_file_data = pem_file.read() + with proxy_from_url(self.https_proxy_url, ca_cert_data=pem_file_data) as https: + r = https.request("GET", f"{self.https_url}/") + assert r.status == 200 + + r = https.request("GET", f"{self.http_url}/") + assert r.status == 200 + def test_https_proxy_with_proxy_ssl_context(self) -> None: proxy_ssl_context = create_urllib3_context() proxy_ssl_context.load_verify_locations(DEFAULT_CA) @@ -103,17 +152,6 @@ def test_https_proxy_pyopenssl_not_supported(self) -> None: ): https.request("GET", f"{self.https_url}/") - @onlySecureTransport() - def test_https_proxy_securetransport_not_supported(self) -> None: - with proxy_from_url(self.https_proxy_url, ca_certs=DEFAULT_CA) as https: - r = https.request("GET", f"{self.http_url}/") - assert r.status == 200 - - with pytest.raises( - ProxySchemeUnsupported, match="isn't available on non-native SSLContext" - ): - https.request("GET", f"{self.https_url}/") - def test_https_proxy_forwarding_for_https(self) -> None: with proxy_from_url( self.https_proxy_url, @@ -132,7 +170,7 @@ def test_nagle_proxy(self) -> None: hc2 = http.connection_from_host(self.http_host, self.http_port) conn = hc2._get_conn() try: - hc2._make_request(conn, "GET", "/") + hc2._make_request(conn, "GET", f"{self.http_url}/") tcp_nodelay_setting = conn.sock.getsockopt( # type: ignore[attr-defined] socket.IPPROTO_TCP, socket.TCP_NODELAY ) @@ -159,10 +197,9 @@ def test_proxy_conn_fail_from_dns( with pytest.raises(MaxRetryError) as e: http.request("GET", f"{target_url}/") - assert type(e.value.reason) == ProxyError - assert ( - type(e.value.reason.original_error) - == urllib3.exceptions.NameResolutionError + assert isinstance(e.value.reason, ProxyError) + assert isinstance( + e.value.reason.original_error, urllib3.exceptions.NameResolutionError ) def test_oldapi(self) -> None: @@ -175,13 +212,21 @@ def test_oldapi(self) -> None: r = http.request("GET", f"{self.https_url}/") assert r.status == 200 + @resolvesLocalhostFQDN() + def test_proxy_https_fqdn(self) -> None: + with proxy_from_url(self.proxy_url, ca_certs=DEFAULT_CA) as http: + r = http.request("GET", f"{self.https_url_fqdn}/") + assert r.status == 200 + def test_proxy_verified(self) -> None: with proxy_from_url( self.proxy_url, cert_reqs="REQUIRED", ca_certs=self.bad_ca_path ) as http: - https_pool = http._new_pool("https", self.https_host, self.https_port) - with pytest.raises(MaxRetryError) as e: - https_pool.request("GET", "/", retries=0) + with http._new_pool( + "https", self.https_host, self.https_port + ) as https_pool: + with pytest.raises(MaxRetryError) as e: + https_pool.request("GET", "/", retries=0) assert isinstance(e.value.reason, SSLError) assert ( "certificate verify failed" in str(e.value.reason) @@ -192,22 +237,26 @@ def test_proxy_verified(self) -> None: http = proxy_from_url( self.proxy_url, cert_reqs="REQUIRED", ca_certs=DEFAULT_CA ) - https_pool = http._new_pool("https", self.https_host, self.https_port) - - conn = https_pool._new_conn() - assert conn.__class__ == VerifiedHTTPSConnection - https_pool.request("GET", "/") # Should succeed without exceptions. + with http._new_pool( + "https", self.https_host, self.https_port + ) as https_pool2: + with contextlib.closing(https_pool._new_conn()) as conn: + assert conn.__class__ == VerifiedHTTPSConnection + https_pool2.request( + "GET", "/" + ) # Should succeed without exceptions. http = proxy_from_url( self.proxy_url, cert_reqs="REQUIRED", ca_certs=DEFAULT_CA ) - https_fail_pool = http._new_pool("https", "127.0.0.1", self.https_port) - - with pytest.raises( - MaxRetryError, match="doesn't match|IP address mismatch" - ) as e: - https_fail_pool.request("GET", "/", retries=0) - assert isinstance(e.value.reason, SSLError) + with http._new_pool( + "https", "127.0.0.1", self.https_port + ) as https_fail_pool: + with pytest.raises( + MaxRetryError, match="doesn't match|IP address mismatch" + ) as e: + https_fail_pool.request("GET", "/", retries=0) + assert isinstance(e.value.reason, SSLError) def test_redirect(self) -> None: with proxy_from_url(self.proxy_url) as http: @@ -478,8 +527,11 @@ def test_forwarding_proxy_request_timeout( # We sent the request to the proxy but didn't get any response # so we're not sure if that's being caused by the proxy or the # target so we put the blame on the target. - assert type(e.value.reason) == ReadTimeoutError + assert isinstance(e.value.reason, ReadTimeoutError) + # stdlib http.client.HTTPConnection._tunnel() causes a ResourceWarning + # see https://github.com/python/cpython/issues/103472 + @pytest.mark.filterwarnings("default::ResourceWarning") @requires_network() @pytest.mark.parametrize( ["proxy_scheme", "target_scheme"], [("http", "https"), ("https", "https")] @@ -498,8 +550,11 @@ def test_tunneling_proxy_request_timeout( timeout = Timeout(connect=LONG_TIMEOUT, read=SHORT_TIMEOUT) proxy.request("GET", target_url, timeout=timeout) - assert type(e.value.reason) == ReadTimeoutError + assert isinstance(e.value.reason, ReadTimeoutError) + # stdlib http.client.HTTPConnection._tunnel() causes a ResourceWarning + # see https://github.com/python/cpython/issues/103472 + @pytest.mark.filterwarnings("default::ResourceWarning") @requires_network() @pytest.mark.parametrize( ["proxy_scheme", "target_scheme", "use_forwarding_for_https"], @@ -525,9 +580,12 @@ def test_forwarding_proxy_connect_timeout( with pytest.raises(MaxRetryError) as e: proxy.request("GET", target_url) - assert type(e.value.reason) == ProxyError - assert type(e.value.reason.original_error) == ConnectTimeoutError + assert isinstance(e.value.reason, ProxyError) + assert isinstance(e.value.reason.original_error, ConnectTimeoutError) + # stdlib http.client.HTTPConnection._tunnel() causes a ResourceWarning + # see https://github.com/python/cpython/issues/103472 + @pytest.mark.filterwarnings("default::ResourceWarning") @requires_network() @pytest.mark.parametrize( ["proxy_scheme", "target_scheme"], [("http", "https"), ("https", "https")] @@ -544,9 +602,12 @@ def test_tunneling_proxy_connect_timeout( with pytest.raises(MaxRetryError) as e: proxy.request("GET", target_url) - assert type(e.value.reason) == ProxyError - assert type(e.value.reason.original_error) == ConnectTimeoutError + assert isinstance(e.value.reason, ProxyError) + assert isinstance(e.value.reason.original_error, ConnectTimeoutError) + # stdlib http.client.HTTPConnection._tunnel() causes a ResourceWarning + # see https://github.com/python/cpython/issues/103472 + @pytest.mark.filterwarnings("default::ResourceWarning") @requires_network() @pytest.mark.parametrize( ["target_scheme", "use_forwarding_for_https"], @@ -568,9 +629,12 @@ def test_https_proxy_tls_error( ) as proxy: with pytest.raises(MaxRetryError) as e: proxy.request("GET", target_url) - assert type(e.value.reason) == ProxyError - assert type(e.value.reason.original_error) == SSLError + assert isinstance(e.value.reason, ProxyError) + assert isinstance(e.value.reason.original_error, SSLError) + # stdlib http.client.HTTPConnection._tunnel() causes a ResourceWarning + # see https://github.com/python/cpython/issues/103472 + @pytest.mark.filterwarnings("default::ResourceWarning") @requires_network() @pytest.mark.parametrize( ["proxy_scheme", "use_forwarding_for_https"], @@ -599,8 +663,11 @@ def test_proxy_https_target_tls_error( ) as proxy: with pytest.raises(MaxRetryError) as e: proxy.request("GET", self.https_url) - assert type(e.value.reason) == SSLError + assert isinstance(e.value.reason, SSLError) + # stdlib http.client.HTTPConnection._tunnel() causes a ResourceWarning + # see https://github.com/python/cpython/issues/103472 + @pytest.mark.filterwarnings("default::ResourceWarning") def test_scheme_host_case_insensitive(self) -> None: """Assert that upper-case schemes and hosts are normalized.""" with proxy_from_url(self.proxy_url.upper(), ca_certs=DEFAULT_CA) as http: @@ -627,22 +694,28 @@ def test_scheme_host_case_insensitive(self) -> None: ), ], ) + # stdlib http.client.HTTPConnection._tunnel() causes a ResourceWarning + # see https://github.com/python/cpython/issues/103472 + @pytest.mark.filterwarnings("default::ResourceWarning") def test_invalid_schema(self, url: str, error_msg: str) -> None: with pytest.raises(ProxySchemeUnknown, match=error_msg): proxy_from_url(url) @pytest.mark.skipif(not HAS_IPV6, reason="Only runs on IPv6 systems") -class TestIPv6HTTPProxyManager(IPv6HTTPDummyProxyTestCase): +class TestIPv6HTTPProxyManager(IPv6HypercornDummyProxyTestCase): @classmethod def setup_class(cls) -> None: - HTTPDummyProxyTestCase.setup_class() + super().setup_class() cls.http_url = f"http://{cls.http_host}:{int(cls.http_port)}" cls.http_url_alt = f"http://{cls.http_host_alt}:{int(cls.http_port)}" cls.https_url = f"https://{cls.https_host}:{int(cls.https_port)}" cls.https_url_alt = f"https://{cls.https_host_alt}:{int(cls.https_port)}" cls.proxy_url = f"http://[{cls.proxy_host}]:{int(cls.proxy_port)}" + # stdlib http.client.HTTPConnection._tunnel() causes a ResourceWarning + # see https://github.com/python/cpython/issues/103472 + @pytest.mark.filterwarnings("default::ResourceWarning") def test_basic_ipv6_proxy(self) -> None: with proxy_from_url(self.proxy_url, ca_certs=DEFAULT_CA) as http: r = http.request("GET", f"{self.http_url}/") @@ -674,6 +747,9 @@ def _get_certificate_formatted_proxy_host(host: str) -> str: # Transform ipv6 like '::1' to 0:0:0:0:0:0:0:1 via '0000:0000:0000:0000:0000:0000:0000:0001' return addr.exploded.replace("0000", "0").replace("000", "") + # stdlib http.client.HTTPConnection._tunnel() causes a ResourceWarning + # see https://github.com/python/cpython/issues/103472 + @pytest.mark.filterwarnings("default::ResourceWarning") def test_https_proxy_assert_fingerprint_md5( self, no_san_proxy_with_server: tuple[ServerConfig, ServerConfig] ) -> None: @@ -689,6 +765,9 @@ def test_https_proxy_assert_fingerprint_md5( ) as https: https.request("GET", destination_url) + # stdlib http.client.HTTPConnection._tunnel() causes a ResourceWarning + # see https://github.com/python/cpython/issues/103472 + @pytest.mark.filterwarnings("default::ResourceWarning") def test_https_proxy_assert_fingerprint_md5_non_matching( self, no_san_proxy_with_server: tuple[ServerConfig, ServerConfig] ) -> None: @@ -710,6 +789,9 @@ def test_https_proxy_assert_fingerprint_md5_non_matching( assert "Fingerprints did not match" in str(e) + # stdlib http.client.HTTPConnection._tunnel() causes a ResourceWarning + # see https://github.com/python/cpython/issues/103472 + @pytest.mark.filterwarnings("default::ResourceWarning") def test_https_proxy_assert_hostname( self, san_proxy_with_server: tuple[ServerConfig, ServerConfig] ) -> None: @@ -721,6 +803,9 @@ def test_https_proxy_assert_hostname( ) as https: https.request("GET", destination_url) + # stdlib http.client.HTTPConnection._tunnel() causes a ResourceWarning + # see https://github.com/python/cpython/issues/103472 + @pytest.mark.filterwarnings("default::ResourceWarning") def test_https_proxy_assert_hostname_non_matching( self, san_proxy_with_server: tuple[ServerConfig, ServerConfig] ) -> None: @@ -740,6 +825,9 @@ def test_https_proxy_assert_hostname_non_matching( msg = f"hostname \\'{proxy_hostname}\\' doesn\\'t match \\'{proxy_host}\\'" assert msg in str(e) + # stdlib http.client.HTTPConnection._tunnel() causes a ResourceWarning + # see https://github.com/python/cpython/issues/103472 + @pytest.mark.filterwarnings("default::ResourceWarning") def test_https_proxy_hostname_verification( self, no_localhost_san_server: ServerConfig ) -> None: @@ -769,6 +857,9 @@ def test_https_proxy_hostname_verification( ssl_error ) or "Hostname mismatch" in str(ssl_error) + # stdlib http.client.HTTPConnection._tunnel() causes a ResourceWarning + # see https://github.com/python/cpython/issues/103472 + @pytest.mark.filterwarnings("default::ResourceWarning") def test_https_proxy_ipv4_san( self, ipv4_san_proxy_with_server: tuple[ServerConfig, ServerConfig] ) -> None: diff --git a/test/with_dummyserver/test_socketlevel.py b/test/with_dummyserver/test_socketlevel.py index cae6b241c7..dceb5ee0ee 100644 --- a/test/with_dummyserver/test_socketlevel.py +++ b/test/with_dummyserver/test_socketlevel.py @@ -11,28 +11,20 @@ import shutil import socket import ssl -import sys import tempfile -import time +import threading import typing import zlib from collections import OrderedDict from pathlib import Path -from test import ( - LONG_TIMEOUT, - SHORT_TIMEOUT, - notSecureTransport, - notWindows, - requires_ssl_context_keyfile_password, - resolvesLocalhostFQDN, -) +from test import LONG_TIMEOUT, SHORT_TIMEOUT, notWindows, resolvesLocalhostFQDN from threading import Event from unittest import mock import pytest import trustme -from dummyserver.server import ( +from dummyserver.socketserver import ( DEFAULT_CA, DEFAULT_CERTS, encrypt_key_pem, @@ -329,11 +321,9 @@ def socket_handler(listener: socket.socket) -> None: done_receiving.set() done_receiving.set() - @requires_ssl_context_keyfile_password() def test_client_cert_with_string_password(self) -> None: self.run_client_cert_with_password_test("letmein") - @requires_ssl_context_keyfile_password() def test_client_cert_with_bytes_password(self) -> None: self.run_client_cert_with_password_test(b"letmein") @@ -385,7 +375,6 @@ def socket_handler(listener: socket.socket) -> None: assert len(client_certs) == 1 - @requires_ssl_context_keyfile_password() def test_load_keyfile_with_invalid_password(self) -> None: assert ssl_.SSLContext is not None context = ssl_.SSLContext(ssl_.PROTOCOL_SSLv23) @@ -396,9 +385,6 @@ def test_load_keyfile_with_invalid_password(self) -> None: password=b"letmei", ) - # For SecureTransport, the validation that would raise an error in - # this case is deferred. - @notSecureTransport() def test_load_invalid_cert_file(self) -> None: assert ssl_.SSLContext is not None context = ssl_.SSLContext(ssl_.PROTOCOL_SSLv23) @@ -970,7 +956,11 @@ def socket_handler(listener: socket.socket) -> None: assert response.connection is None def test_socket_close_socket_then_file(self) -> None: - def consume_ssl_socket(listener: socket.socket) -> None: + quit_event = threading.Event() + + def consume_ssl_socket( + listener: socket.socket, + ) -> None: try: with listener.accept()[0] as sock, original_ssl_wrap_socket( sock, @@ -979,11 +969,11 @@ def consume_ssl_socket(listener: socket.socket) -> None: certfile=DEFAULT_CERTS["certfile"], ca_certs=DEFAULT_CA, ) as ssl_sock: - consume_socket(ssl_sock) + consume_socket(ssl_sock, quit_event=quit_event) except (ConnectionResetError, ConnectionAbortedError, OSError): pass - self._start_server(consume_ssl_socket) + self._start_server(consume_ssl_socket, quit_event=quit_event) with socket.create_connection( (self.host, self.port) ) as sock, contextlib.closing( @@ -993,13 +983,13 @@ def consume_ssl_socket(listener: socket.socket) -> None: ) as f: ssl_sock.close() f.close() - # SecureTransport is supposed to raise OSError but raises - # ssl.SSLError when closed because ssl_sock.context is None - with pytest.raises((OSError, ssl.SSLError)): + with pytest.raises(OSError): ssl_sock.sendall(b"hello") assert ssl_sock.fileno() == -1 def test_socket_close_stays_open_with_makefile_open(self) -> None: + quit_event = threading.Event() + def consume_ssl_socket(listener: socket.socket) -> None: try: with listener.accept()[0] as sock, original_ssl_wrap_socket( @@ -1009,11 +999,11 @@ def consume_ssl_socket(listener: socket.socket) -> None: certfile=DEFAULT_CERTS["certfile"], ca_certs=DEFAULT_CA, ) as ssl_sock: - consume_socket(ssl_sock) + consume_socket(ssl_sock, quit_event=quit_event) except (ConnectionResetError, ConnectionAbortedError, OSError): pass - self._start_server(consume_ssl_socket) + self._start_server(consume_ssl_socket, quit_event=quit_event) with socket.create_connection( (self.host, self.port) ) as sock, contextlib.closing( @@ -1275,7 +1265,7 @@ def http_socket_handler(listener: socket.socket) -> None: errored.set() # Avoid a ConnectionAbortedError on Windows. - assert type(e.value.reason) == ProxyError + assert type(e.value.reason) is ProxyError assert "Your proxy appears to only use HTTP and not HTTPS" in str( e.value.reason ) @@ -1284,39 +1274,37 @@ def http_socket_handler(listener: socket.socket) -> None: class TestSSL(SocketDummyServerTestCase): def test_ssl_failure_midway_through_conn(self) -> None: def socket_handler(listener: socket.socket) -> None: - sock = listener.accept()[0] - sock2 = sock.dup() - ssl_sock = original_ssl_wrap_socket( - sock, - server_side=True, - keyfile=DEFAULT_CERTS["keyfile"], - certfile=DEFAULT_CERTS["certfile"], - ca_certs=DEFAULT_CA, - ) + with listener.accept()[0] as sock, sock.dup() as sock2: + ssl_sock = original_ssl_wrap_socket( + sock, + server_side=True, + keyfile=DEFAULT_CERTS["keyfile"], + certfile=DEFAULT_CERTS["certfile"], + ca_certs=DEFAULT_CA, + ) - buf = b"" - while not buf.endswith(b"\r\n\r\n"): - buf += ssl_sock.recv(65536) + buf = b"" + while not buf.endswith(b"\r\n\r\n"): + buf += ssl_sock.recv(65536) - # Deliberately send from the non-SSL socket. - sock2.send( - b"HTTP/1.1 200 OK\r\n" - b"Content-Type: text/plain\r\n" - b"Content-Length: 2\r\n" - b"\r\n" - b"Hi" - ) - sock2.close() - ssl_sock.close() + # Deliberately send from the non-SSL socket. + sock2.send( + b"HTTP/1.1 200 OK\r\n" + b"Content-Type: text/plain\r\n" + b"Content-Length: 2\r\n" + b"\r\n" + b"Hi" + ) + ssl_sock.close() self._start_server(socket_handler) with HTTPSConnectionPool(self.host, self.port, ca_certs=DEFAULT_CA) as pool: with pytest.raises( - SSLError, match=r"(wrong version number|record overflow)" + SSLError, + match=r"(wrong version number|record overflow|record layer failure)", ): pool.request("GET", "/", retries=False) - @notSecureTransport() def test_ssl_read_timeout(self) -> None: timed_out = Event() @@ -1371,19 +1359,21 @@ def socket_handler(listener: socket.socket) -> None: certfile=DEFAULT_CERTS["certfile"], ca_certs=DEFAULT_CA, ) - except (ssl.SSLError, ConnectionResetError): - if i == 1: - raise - return + except (ssl.SSLError, ConnectionResetError, ConnectionAbortedError): + pass - ssl_sock.send( - b"HTTP/1.1 200 OK\r\n" - b"Content-Type: text/plain\r\n" - b"Content-Length: 5\r\n\r\n" - b"Hello" - ) + else: + with ssl_sock: + try: + ssl_sock.send( + b"HTTP/1.1 200 OK\r\n" + b"Content-Type: text/plain\r\n" + b"Content-Length: 5\r\n\r\n" + b"Hello" + ) + except (ssl.SSLEOFError, ConnectionResetError, BrokenPipeError): + pass - ssl_sock.close() sock.close() self._start_server(socket_handler) @@ -1392,7 +1382,10 @@ def socket_handler(listener: socket.socket) -> None: def request() -> None: pool = HTTPSConnectionPool( - self.host, self.port, assert_fingerprint=fingerprint + self.host, + self.port, + assert_fingerprint=fingerprint, + cert_reqs="CERT_NONE", ) try: timeout = Timeout(connect=LONG_TIMEOUT, read=SHORT_TIMEOUT) @@ -1405,10 +1398,21 @@ def request() -> None: with pytest.raises(MaxRetryError) as cm: request() - assert isinstance(cm.value.reason, SSLError) + assert type(cm.value.reason) is SSLError + assert str(cm.value.reason) == ( + "Fingerprints did not match. Expected " + '"a0c4a74600eda72dc0becb9a8cb607ca58ee745e", got ' + '"728b554c9afc1e88a11cad1bb2e7cc3edbc8f98a"' + ) # Should not hang, see https://github.com/urllib3/urllib3/issues/529 - with pytest.raises(MaxRetryError): + with pytest.raises(MaxRetryError) as cm2: request() + assert type(cm2.value.reason) is SSLError + assert str(cm2.value.reason) == ( + "Fingerprints did not match. Expected " + '"a0c4a74600eda72dc0becb9a8cb607ca58ee745e", got ' + '"728b554c9afc1e88a11cad1bb2e7cc3edbc8f98a"' + ) def test_retry_ssl_error(self) -> None: def socket_handler(listener: socket.socket) -> None: @@ -1493,6 +1497,17 @@ def socket_handler(listener: socket.socket) -> None: context.load_default_certs = mock.Mock() context.options = 0 + class MockSSLSocket: + def __init__( + self, sock: socket.socket, *args: object, **kwargs: object + ) -> None: + self._sock = sock + + def close(self) -> None: + self._sock.close() + + context.wrap_socket = MockSSLSocket + with mock.patch("urllib3.util.ssl_.SSLContext", lambda *_, **__: context): self._start_server(socket_handler) with HTTPSConnectionPool(self.host, self.port) as pool: @@ -1535,6 +1550,17 @@ def socket_handler(listener: socket.socket) -> None: context.load_default_certs = mock.Mock() context.options = 0 + class MockSSLSocket: + def __init__( + self, sock: socket.socket, *args: object, **kwargs: object + ) -> None: + self._sock = sock + + def close(self) -> None: + self._sock.close() + + context.wrap_socket = MockSSLSocket + with mock.patch("urllib3.util.ssl_.SSLContext", lambda *_, **__: context): for kwargs in [ {"ca_certs": "/a"}, @@ -1600,18 +1626,47 @@ def socket_handler(listener: socket.socket) -> None: pool.request("GET", "/", retries=False, timeout=LONG_TIMEOUT) assert server_closed.wait(LONG_TIMEOUT), "The socket was not terminated" - # SecureTransport can read only small pieces of data at the moment. - # https://github.com/urllib3/urllib3/pull/2674 - @notSecureTransport() - @pytest.mark.skipif( - os.environ.get("CI") == "true" and sys.implementation.name == "pypy", - reason="too slow to run in CI", - ) + def _run_preload(self, pool: HTTPSConnectionPool, content_length: int) -> None: + response = pool.request("GET", "/") + assert len(response.data) == content_length + + def _run_read_None(self, pool: HTTPSConnectionPool, content_length: int) -> None: + response = pool.request("GET", "/", preload_content=False) + assert len(response.read(None)) == content_length + assert response.read(None) == b"" + + def _run_read_amt(self, pool: HTTPSConnectionPool, content_length: int) -> None: + response = pool.request("GET", "/", preload_content=False) + assert len(response.read(content_length)) == content_length + assert response.read(5) == b"" + + def _run_read1_None(self, pool: HTTPSConnectionPool, content_length: int) -> None: + response = pool.request("GET", "/", preload_content=False) + remaining = content_length + while True: + chunk = response.read1(None) + if not chunk: + break + remaining -= len(chunk) + assert remaining == 0 + + def _run_read1_amt(self, pool: HTTPSConnectionPool, content_length: int) -> None: + response = pool.request("GET", "/", preload_content=False) + remaining = content_length + while True: + chunk = response.read1(content_length) + if not chunk: + break + remaining -= len(chunk) + assert remaining == 0 + + @pytest.mark.integration @pytest.mark.parametrize( - "preload_content,read_amt", [(True, None), (False, None), (False, 2**31)] + "method", + [_run_preload, _run_read_None, _run_read_amt, _run_read1_None, _run_read1_amt], ) def test_requesting_large_resources_via_ssl( - self, preload_content: bool, read_amt: int | None + self, method: typing.Callable[[typing.Any, HTTPSConnectionPool, int], None] ) -> None: """ Ensure that it is possible to read 2 GiB or more via an SSL @@ -1653,9 +1708,7 @@ def socket_handler(listener: socket.socket) -> None: with HTTPSConnectionPool( self.host, self.port, ca_certs=DEFAULT_CA, retries=False ) as pool: - response = pool.request("GET", "/", preload_content=preload_content) - data = response.data if preload_content else response.read(read_amt) - assert len(data) == content_length + method(self, pool, content_length) class TestErrorWrapping(SocketDummyServerTestCase): @@ -1830,13 +1883,16 @@ def test_headers_sent_with_add( body: None | bytes | io.BytesIO if body_type is None: body = None + expected = b"\r\n\r\n" elif body_type == "bytes": body = b"my-body" + expected = b"\r\n\r\nmy-body" elif body_type == "bytes-io": body = io.BytesIO(b"bytes-io-body") body.seek(0, 0) + expected = b"bytes-io-body\r\n0\r\n\r\n" else: - raise ValueError("Unknonw body type") + raise ValueError("Unknown body type") buffer: bytes = b"" @@ -1845,12 +1901,9 @@ def socket_handler(listener: socket.socket) -> None: sock = listener.accept()[0] sock.settimeout(0) - start = time.time() - while time.time() - start < (LONG_TIMEOUT / 2): - try: + while expected not in buffer: + with contextlib.suppress(BlockingIOError): buffer += sock.recv(65536) - except OSError: - continue sock.sendall( b"HTTP/1.1 200 OK\r\n" @@ -1902,7 +1955,7 @@ def _test_broken_header_parsing( for record in logs: if ( "Failed to parse headers" in record.msg - and isinstance(record.args, tuple) + and type(record.args) is tuple and _url_from_pool(pool, "/") == record.args[0] ): if ( @@ -2186,11 +2239,28 @@ def socket_handler(listener: socket.socket) -> None: class TestMultipartResponse(SocketDummyServerTestCase): def test_multipart_assert_header_parsing_no_defects(self) -> None: + quit_event = threading.Event() + def socket_handler(listener: socket.socket) -> None: for _ in range(2): - sock = listener.accept()[0] - while not sock.recv(65536).endswith(b"\r\n\r\n"): - pass + listener.settimeout(LONG_TIMEOUT) + + while True: + if quit_event and quit_event.is_set(): + return + try: + sock = listener.accept()[0] + break + except (TimeoutError, socket.timeout): + continue + + sock.settimeout(LONG_TIMEOUT) + while True: + if quit_event and quit_event.is_set(): + sock.close() + return + if sock.recv(65536).endswith(b"\r\n\r\n"): + break sock.sendall( b"HTTP/1.1 404 Not Found\r\n" @@ -2206,7 +2276,7 @@ def socket_handler(listener: socket.socket) -> None: ) sock.close() - self._start_server(socket_handler) + self._start_server(socket_handler, quit_event=quit_event) from urllib3.connectionpool import log with mock.patch.object(log, "warning") as log_warning: @@ -2262,19 +2332,28 @@ def socket_handler(listener: socket.socket) -> None: def test_chunked_specified( self, method: str, chunked: bool, body_type: str ) -> None: + quit_event = threading.Event() buffer = bytearray() + expected_bytes = b"\r\n\r\na\r\nxxxxxxxxxx\r\n0\r\n\r\n" def socket_handler(listener: socket.socket) -> None: nonlocal buffer - sock = listener.accept()[0] - sock.settimeout(0) - - start = time.time() - while time.time() - start < (LONG_TIMEOUT / 2): + listener.settimeout(LONG_TIMEOUT) + while True: + if quit_event.is_set(): + return try: - buffer += sock.recv(65536) - except OSError: + sock = listener.accept()[0] + break + except (TimeoutError, socket.timeout): continue + sock.settimeout(LONG_TIMEOUT) + + while expected_bytes not in buffer: + if quit_event.is_set(): + return + with contextlib.suppress(BlockingIOError): + buffer += sock.recv(65536) sock.sendall( b"HTTP/1.1 200 OK\r\n" @@ -2283,7 +2362,7 @@ def socket_handler(listener: socket.socket) -> None: ) sock.close() - self._start_server(socket_handler) + self._start_server(socket_handler, quit_event=quit_event) body: typing.Any if body_type == "generator": @@ -2313,7 +2392,7 @@ def body_generator() -> typing.Generator[bytes, None, None]: assert b"Transfer-Encoding: chunked\r\n" in sent_bytes assert b"User-Agent: python-urllib3/" in sent_bytes assert b"content-length" not in sent_bytes.lower() - assert b"\r\n\r\na\r\nxxxxxxxxxx\r\n0\r\n\r\n" in sent_bytes + assert expected_bytes in sent_bytes @pytest.mark.parametrize("method", ["POST", "PUT", "PATCH"]) @pytest.mark.parametrize( @@ -2321,29 +2400,9 @@ def body_generator() -> typing.Generator[bytes, None, None]: ) def test_chunked_not_specified(self, method: str, body_type: str) -> None: buffer = bytearray() - - def socket_handler(listener: socket.socket) -> None: - nonlocal buffer - sock = listener.accept()[0] - sock.settimeout(0) - - start = time.time() - while time.time() - start < (LONG_TIMEOUT / 2): - try: - buffer += sock.recv(65536) - except OSError: - continue - - sock.sendall( - b"HTTP/1.1 200 OK\r\n" - b"Server: example.com\r\n" - b"Content-Length: 0\r\n\r\n" - ) - sock.close() - - self._start_server(socket_handler) - + expected_bytes: bytes body: typing.Any + if body_type == "generator": def body_generator() -> typing.Generator[bytes, None, None]: @@ -2351,25 +2410,44 @@ def body_generator() -> typing.Generator[bytes, None, None]: body = body_generator() should_be_chunked = True - elif body_type == "file": body = io.BytesIO(b"x" * 10) body.seek(0, 0) should_be_chunked = True - elif body_type == "file_text": body = io.StringIO("x" * 10) body.seek(0, 0) should_be_chunked = True - elif body_type == "bytearray": body = bytearray(b"x" * 10) should_be_chunked = False - else: body = b"x" * 10 should_be_chunked = False + if should_be_chunked: + expected_bytes = b"\r\n\r\na\r\nxxxxxxxxxx\r\n0\r\n\r\n" + else: + expected_bytes = b"\r\n\r\nxxxxxxxxxx" + + def socket_handler(listener: socket.socket) -> None: + nonlocal buffer + sock = listener.accept()[0] + sock.settimeout(0) + + while expected_bytes not in buffer: + with contextlib.suppress(BlockingIOError): + buffer += sock.recv(65536) + + sock.sendall( + b"HTTP/1.1 200 OK\r\n" + b"Server: example.com\r\n" + b"Content-Length: 0\r\n\r\n" + ) + sock.close() + + self._start_server(socket_handler) + with HTTPConnectionPool( self.host, self.port, timeout=LONG_TIMEOUT, retries=False ) as pool: @@ -2385,12 +2463,12 @@ def body_generator() -> typing.Generator[bytes, None, None]: if should_be_chunked: assert b"content-length" not in sent_bytes.lower() assert b"Transfer-Encoding: chunked\r\n" in sent_bytes - assert b"\r\n\r\na\r\nxxxxxxxxxx\r\n0\r\n\r\n" in sent_bytes + assert expected_bytes in sent_bytes else: assert b"Content-Length: 10\r\n" in sent_bytes assert b"transfer-encoding" not in sent_bytes.lower() - assert sent_bytes.endswith(b"\r\n\r\nxxxxxxxxxx") + assert sent_bytes.endswith(expected_bytes) @pytest.mark.parametrize( "header_transform", @@ -2421,12 +2499,9 @@ def socket_handler(listener: socket.socket) -> None: sock = listener.accept()[0] sock.settimeout(0) - start = time.time() - while time.time() - start < (LONG_TIMEOUT / 2): - try: + while expected not in buffer: + with contextlib.suppress(BlockingIOError): buffer += sock.recv(65536) - except OSError: - continue sock.sendall( b"HTTP/1.1 200 OK\r\n"