diff --git a/.github/workflows/cibuildwheel.yaml b/.github/workflows/cibuildwheel.yaml new file mode 100644 index 000000000..b3ec82cfa --- /dev/null +++ b/.github/workflows/cibuildwheel.yaml @@ -0,0 +1,141 @@ +# Build wheels using cibuildwheel (https://cibuildwheel.pypa.io/) +name: Build wheels + +on: + # Run when a release has been created + release: + types: [created] + + # NOTE(vytas): Also allow to release to Test PyPi manually. + workflow_dispatch: + +jobs: + build-sdist: + name: sdist + runs-on: ubuntu-latest + + steps: + - name: Checkout repository + uses: actions/checkout@v4 + with: + fetch-depth: 2 + + - name: Set up Python + uses: actions/setup-python@v5 + with: + python-version: "3.12" + + - name: Build sdist + run: | + pip install build + python -m build --sdist + + - name: Upload artifacts + uses: actions/upload-artifact@v4 + with: + name: cibw-sdist + path: dist/*.tar.gz + + build-wheels: + name: ${{ matrix.python }}-${{ matrix.platform.name }} + needs: build-sdist + runs-on: ${{ matrix.platform.os }} + strategy: + fail-fast: false + matrix: + platform: + - name: manylinux_x86_64 + os: ubuntu-latest + - name: musllinux_x86_64 + os: ubuntu-latest + - name: manylinux_aarch64 + os: ubuntu-latest + emulation: true + - name: musllinux_aarch64 + os: ubuntu-latest + emulation: true + - name: manylinux_s390x + os: ubuntu-latest + emulation: true + - name: macosx_x86_64 + os: macos-13 + - name: macosx_arm64 + os: macos-14 + - name: win_amd64 + os: windows-latest + python: + - cp39 + - cp310 + - cp311 + - cp312 + - cp313 + include: + - platform: + name: manylinux_x86_64 + os: ubuntu-latest + python: cp38 + - platform: + name: musllinux_x86_64 + os: ubuntu-latest + python: cp38 + + defaults: + run: + shell: bash + + steps: + - name: Checkout repository + uses: actions/checkout@v4 + with: + fetch-depth: 2 + + - name: Set up QEMU + uses: docker/setup-qemu-action@v3 + if: ${{ matrix.platform.emulation }} + with: + platforms: all + + - name: Build wheels + uses: pypa/cibuildwheel@v2.20.0 + env: + CIBW_ARCHS_LINUX: all + CIBW_BUILD: ${{ matrix.python }}-${{ matrix.platform.name }} + + - name: Upload artifacts + uses: actions/upload-artifact@v4 + with: + name: cibw-wheel-${{ matrix.python }}-${{ matrix.platform.name }} + path: wheelhouse/*.whl + + publish-wheels: + name: publish + needs: + - build-sdist + - build-wheels + runs-on: ubuntu-latest + + steps: + - name: Download packages + uses: actions/download-artifact@v4 + with: + pattern: cibw-* + path: dist + merge-multiple: true + + - name: Check collected artifacts + # TODO(vytas): Run a script to perform version sanity checks instead. + run: ls -l dist/ + + - name: Publish artifacts to TestPyPI + uses: pypa/gh-action-pypi-publish@release/v1 + if: github.event_name == 'workflow_dispatch' + with: + password: ${{ secrets.TEST_PYPI_TOKEN }} + repository-url: https://test.pypi.org/legacy/ + + # TODO(vytas): Enable this nuclear option once happy with other tests. + # - name: Publish artifacts to PyPI + # uses: pypa/gh-action-pypi-publish@release/v1 + # if: github.event_name == 'release' + # with: + # password: ${{ secrets.PYPI_TOKEN }} diff --git a/.github/workflows/create-wheels.yaml b/.github/workflows/create-wheels.yaml index 2a469dee4..6caf8c4df 100644 --- a/.github/workflows/create-wheels.yaml +++ b/.github/workflows/create-wheels.yaml @@ -24,8 +24,6 @@ jobs: - "windows-latest" - "macos-latest" python-version: - - "3.6" - - "3.7" - "3.8" - "3.9" - "3.10" @@ -38,12 +36,12 @@ jobs: steps: - name: Checkout repo - uses: actions/checkout@v3 + uses: actions/checkout@v4 with: fetch-depth: 2 - name: Set up Python - uses: actions/setup-python@v4 + uses: actions/setup-python@v5 with: python-version: ${{ matrix.python-version }} architecture: ${{ matrix.architecture }} @@ -95,7 +93,6 @@ jobs: - "ubuntu-latest" python-version: # the versions are - as specified in PEP 425. - - cp37-cp37m - cp38-cp38 - cp39-cp39 - cp310-cp310 @@ -108,7 +105,7 @@ jobs: steps: - name: Checkout repo - uses: actions/checkout@v3 + uses: actions/checkout@v4 with: fetch-depth: 2 @@ -123,7 +120,7 @@ jobs: echo "::set-output name=python-version::$version" - name: Set up Python - uses: actions/setup-python@v4 + uses: actions/setup-python@v5 with: python-version: ${{ steps.linux-py-version.outputs.python-version }} architecture: ${{ matrix.architecture }} @@ -181,12 +178,12 @@ jobs: steps: - name: Checkout repo - uses: actions/checkout@v3 + uses: actions/checkout@v4 with: fetch-depth: 2 - name: Set up Python - uses: actions/setup-python@v4 + uses: actions/setup-python@v5 with: python-version: ${{ matrix.python-version }} architecture: ${{ matrix.architecture }} @@ -227,7 +224,6 @@ jobs: - "ubuntu-latest" python-version: # the versions are - as specified in PEP 425. - - cp37-cp37m - cp38-cp38 - cp39-cp39 - cp310-cp310 @@ -241,7 +237,7 @@ jobs: steps: - name: Checkout repo - uses: actions/checkout@v3 + uses: actions/checkout@v4 with: fetch-depth: 2 @@ -308,7 +304,7 @@ jobs: files: 'dist/*manylinux*' - name: Set up Python - uses: actions/setup-python@v4 + uses: actions/setup-python@v5 with: python-version: "3.10" architecture: "x64" diff --git a/.github/workflows/mintest.yaml b/.github/workflows/mintest.yaml index 752ef2d38..480fc0cd3 100644 --- a/.github/workflows/mintest.yaml +++ b/.github/workflows/mintest.yaml @@ -22,16 +22,16 @@ jobs: steps: - name: Checkout repo - uses: actions/checkout@v3 + uses: actions/checkout@v4 - name: Set up Python - uses: actions/setup-python@v4 + uses: actions/setup-python@v5 if: ${{ matrix.python-version != '3.13' }} with: python-version: ${{ matrix.python-version }} - name: Set up Python 3.13 - uses: actions/setup-python@v4 + uses: actions/setup-python@v5 if: ${{ matrix.python-version == '3.13' }} with: python-version: "3.13.0-rc.1 - 3.13" diff --git a/.github/workflows/tests-emulated.yaml b/.github/workflows/tests-emulated.yaml index 5ed32f76c..5cae65c21 100644 --- a/.github/workflows/tests-emulated.yaml +++ b/.github/workflows/tests-emulated.yaml @@ -8,76 +8,29 @@ on: - master jobs: - run_tox_emulate: - name: tox -e py310_cython (${{ matrix.architecture }}) + test-emulated: + name: "cibuildwheel: ${{ matrix.platform }}" runs-on: ubuntu-latest strategy: fail-fast: false matrix: - architecture: - - s390x - - arm64v8 + platform: + - "cp312-manylinux_aarch64" + - "cp312-manylinux_s390x" steps: - name: Checkout repo - uses: actions/checkout@v3 + uses: actions/checkout@v4 with: fetch-depth: 2 - - name: Cache PIP - uses: actions/cache@v3 + - name: Set up QEMU + uses: docker/setup-qemu-action@v3 with: - path: | - .pip - key: python-${{ matrix.architecture }}-${{ hashFiles('requirements/tests') }} + platforms: all - - name: Set up emulation - run: | - docker run --rm --privileged multiarch/qemu-user-static --reset -p yes - - # TODO(vytas): Revisit second half of 2021 to see if we still need to pin tox - # - # See also: https://github.com/tox-dev/tox/issues/1777 - # - - name: Run tox s390x - if: ${{ matrix.architecture == 's390x' }} - uses: docker://s390x/python:3.10-buster - env: - PIP_CACHE_DIR: /github/workspace/.pip/ - with: - # NOTE: without 'pip install ujson' tox fails to install it with "Requested ujson from has different version in metadata: '0.0.0'" - # NOTE(vytas): installing fixtures/pbr because otherwise pip install - # inside tox fails with an import error on some older CPythons - args: | - /bin/bash -c " - lscpu && - mkdir -p $PIP_CACHE_DIR && - chown -R $(whoami) $PIP_CACHE_DIR && - pip install -U fixtures pip tox && - python --version && - pip --version && - tox --version && - pip install ujson && - tox -e py310_cython" - - - name: Run tox arm64v8 - if: ${{ matrix.architecture == 'arm64v8' }} - uses: docker://arm64v8/python:3.10-buster + - name: Build wheels + uses: pypa/cibuildwheel@v2.20.0 env: - PIP_CACHE_DIR: /github/workspace/.pip/ - with: - args: | - /bin/bash -c " - lscpu && - mkdir -p $PIP_CACHE_DIR && - chown -R $(whoami) $PIP_CACHE_DIR && - pip install -U fixtures pip tox && - python --version && - pip --version && - tox --version && - tox -e py310_cython" - - - name: Fix cache permission - run: | - sudo chmod -R 777 .pip - sudo chmod -R 777 .tox + CIBW_ARCHS_LINUX: all + CIBW_BUILD: ${{ matrix.platform }} diff --git a/.github/workflows/tests-mailman.yaml b/.github/workflows/tests-mailman.yaml index 60fd7691a..ccb6a00ec 100644 --- a/.github/workflows/tests-mailman.yaml +++ b/.github/workflows/tests-mailman.yaml @@ -3,12 +3,9 @@ name: Run tests (GNU Mailman 3) on: # Trigger the workflow on master but also allow it to run manually. workflow_dispatch: - - # NOTE(vytas): Disabled as it is failing as of 2023-09. - # Maybe @maxking just needs to update the Docker image (?) - # push: - # branches: - # - master + push: + branches: + - master jobs: run_tox: @@ -17,7 +14,7 @@ jobs: steps: - name: Checkout repo - uses: actions/checkout@v3 + uses: actions/checkout@v4 with: fetch-depth: 2 diff --git a/.github/workflows/tests.yaml b/.github/workflows/tests.yaml index 8d01c927f..42206d05d 100644 --- a/.github/workflows/tests.yaml +++ b/.github/workflows/tests.yaml @@ -52,9 +52,6 @@ jobs: - python-version: pypy3.9 os: ubuntu-latest toxenv: pypy3 - - python-version: "3.7" - os: ubuntu-latest - toxenv: py37 - python-version: "3.8" os: ubuntu-latest toxenv: py38 @@ -111,20 +108,20 @@ jobs: # Some are GitHub actions, others run shell commands. steps: - name: Checkout repo - uses: actions/checkout@v3 + uses: actions/checkout@v4 # NOTE(vytas): Work around # https://github.com/codecov/codecov-action/issues/190 with: fetch-depth: 2 - name: Set up Python - uses: actions/setup-python@v4 + uses: actions/setup-python@v5 if: ${{ !matrix.python-dev-version }} with: python-version: ${{ matrix.python-version }} - name: Set up Python (pre-release) - uses: actions/setup-python@v4 + uses: actions/setup-python@v5 if: ${{ matrix.python-dev-version }} with: python-version: ${{ matrix.python-dev-version }} diff --git a/.gitignore b/.gitignore index 20f6ac3c7..e8b6d5f7d 100644 --- a/.gitignore +++ b/.gitignore @@ -22,6 +22,7 @@ parts sdist var pip-wheel-metadata +wheelhouse # Installer logs pip-log.txt diff --git a/AUTHORS b/AUTHORS index fece25507..52ef92933 100644 --- a/AUTHORS +++ b/AUTHORS @@ -132,6 +132,13 @@ listed below by date of first contribution: * Aryan Iyappan (aryaniyaps) * Eujin Ong (euj1n0ng) * Libor Jelínek (liborjelinek) +* Piotr Kopalko (copalco) +* Kent Bull (kentbull) +* Mario Rivera (MRLab12) +* M-Mueller +* Derk Weijers (derkweijers) +* bssyousefi +* Pavel 宝尔米 (e-io) (et al.) diff --git a/README.rst b/README.rst index 82c93d832..cb1105237 100644 --- a/README.rst +++ b/README.rst @@ -23,7 +23,7 @@ clean design that embraces HTTP and the REST architectural style. Falcon apps work with any `WSGI `_ or `ASGI `_ server, and run like a -champ under CPython 3.7+ and PyPy 3.7+. +champ under CPython 3.8+ and PyPy 3.8+. Quick Links ----------- @@ -79,7 +79,7 @@ Falcon tries to do as little as possible while remaining highly effective. - Idiomatic HTTP error responses - Straightforward exception handling - Snappy testing with WSGI/ASGI helpers and mocks -- CPython 3.7+ and PyPy 3.7+ support +- CPython 3.8+ and PyPy 3.8+ support .. Patron list starts here. For Python package, we substitute this section with: Support Falcon Development @@ -210,7 +210,7 @@ PyPy ^^^^ `PyPy `__ is the fastest way to run your Falcon app. -PyPy3.7+ is supported as of PyPy v7.3.4+. +PyPy3.8+ is supported as of PyPy v7.3.7+. .. code:: bash @@ -226,7 +226,7 @@ CPython ^^^^^^^ Falcon also fully supports -`CPython `__ 3.7+. +`CPython `__ 3.8+. The latest stable version of Falcon can be installed directly from PyPI: diff --git a/docs/changes/4.0.0.rst b/docs/changes/4.0.0.rst index 9cdbf9913..8c769d230 100644 --- a/docs/changes/4.0.0.rst +++ b/docs/changes/4.0.0.rst @@ -15,13 +15,12 @@ Changes to Supported Platforms - CPython 3.11 is now fully supported. (`#2072 `__) - CPython 3.12 is now fully supported. (`#2196 `__) - CPython 3.13 is now fully supported. (`#2258 `__) -- End-of-life Python 3.5 & 3.6 are no longer supported. (`#2074 `__) -- End-of-life Python 3.7 and (soon end-of-life) 3.8 are no longer actively - supported, but the framework should still continue to install from source and - function. +- End-of-life Python 3.5, 3.6 & 3.7 are no longer supported. (`#2074 `__, `#2273 `__) +- Soon end-of-life Python 3.8 is no longer actively supported, but + the framework should still continue to install from source and function. - The Falcon 4.x series is guaranteed to support CPython 3.10 and PyPy3.10 (v7.3.16). - This means that we may drop the support for Python 3.7-3.9 altogether in a + This means that we may drop the support for Python 3.8 & 3.9 altogether in a later 4.x release, especially if we are faced with incompatible ecosystem changes in typing, Cython, etc. @@ -33,17 +32,24 @@ Contributors to this Release Many thanks to all of our talented and stylish contributors for this release! - `aryaniyaps `__ +- `bssyousefi `__ - `CaselIT `__ - `cclauss `__ +- `copalco `__ +- `derkweijers `__ +- `e-io `__ - `euj1n0ng `__ - `jkapica `__ - `jkklapp `__ - `john-g-g `__ - `kaichan1201 `__ +- `kentbull `__ - `kgriffs `__ +- `M-Mueller `__ - `meetshah133 `__ - `mgorny `__ - `mihaitodor `__ +- `MRLab12 `__ - `nfsec `__ - `RioAtHome `__ - `TigreModerata `__ diff --git a/docs/conf.py b/docs/conf.py index 1b4b4ecb9..4ef269e33 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -1,5 +1,3 @@ -# -*- coding: utf-8 -*- -# # Falcon documentation build configuration file, created by # sphinx-quickstart on Wed Mar 12 14:14:02 2014. # diff --git a/docs/index.rst b/docs/index.rst index 6827981c5..8e3ac1f0d 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -94,7 +94,7 @@ Falcon tries to do as little as possible while remaining highly effective. - Idiomatic :ref:`HTTP error ` responses - Straightforward exception handling - Snappy :ref:`testing ` with WSGI/ASGI helpers and mocks -- CPython 3.7+ and PyPy 3.7+ support +- CPython 3.8+ and PyPy 3.8+ support Who's Using Falcon? ------------------- diff --git a/docs/user/install.rst b/docs/user/install.rst index 435649463..ef4986802 100644 --- a/docs/user/install.rst +++ b/docs/user/install.rst @@ -7,7 +7,7 @@ PyPy ---- `PyPy `__ is the fastest way to run your Falcon app. -PyPy3.7+ is supported as of PyPy v7.3.4. +PyPy3.8+ is supported as of PyPy v7.3.7. .. code:: bash @@ -23,7 +23,7 @@ CPython ------- Falcon fully supports -`CPython `__ 3.7+. +`CPython `__ 3.8+. The latest stable version of Falcon can be installed directly from PyPI: diff --git a/docs/user/intro.rst b/docs/user/intro.rst index a86809a8e..c6c22f228 100644 --- a/docs/user/intro.rst +++ b/docs/user/intro.rst @@ -15,7 +15,7 @@ architectural style, and tries to do as little as possible while remaining highly effective. Falcon apps work with any WSGI server, and run like a champ under -CPython 3.7+ and PyPy 3.7+. +CPython 3.8+ and PyPy 3.8+. Features -------- @@ -35,7 +35,7 @@ Falcon tries to do as little as possible while remaining highly effective. - Idiomatic :ref:`HTTP error ` responses - Straightforward exception handling - Snappy :ref:`testing ` with WSGI/ASGI helpers and mocks -- CPython 3.7+ and PyPy 3.7+ support +- CPython 3.8+ and PyPy 3.8+ support How is Falcon different? ------------------------ diff --git a/docs/user/recipes/request-id.rst b/docs/user/recipes/request-id.rst index 688d4fbb4..9274a5ad8 100644 --- a/docs/user/recipes/request-id.rst +++ b/docs/user/recipes/request-id.rst @@ -48,4 +48,4 @@ In a pinch, you can also output the request ID directly: .. literalinclude:: ../../../examples/recipes/request_id_log.py :language: python -.. _thread-local: https://docs.python.org/3.7/library/threading.html#thread-local-data +.. _thread-local: https://docs.python.org/3/library/threading.html#thread-local-data diff --git a/docs/user/tutorial-asgi.rst b/docs/user/tutorial-asgi.rst index bbfd1ab2c..a58c96c23 100644 --- a/docs/user/tutorial-asgi.rst +++ b/docs/user/tutorial-asgi.rst @@ -32,7 +32,7 @@ WSGI tutorial:: └── app.py We'll create a *virtualenv* using the ``venv`` module from the standard library -(Falcon requires Python 3.7+):: +(Falcon requires Python 3.8+):: $ mkdir asgilook $ python3 -m venv asgilook/.venv diff --git a/e2e-tests/server/app.py b/e2e-tests/server/app.py index be9558985..46f52a90c 100644 --- a/e2e-tests/server/app.py +++ b/e2e-tests/server/app.py @@ -13,7 +13,8 @@ def create_app() -> falcon.asgi.App: - app = falcon.asgi.App() + # TODO(vytas): Type to App's constructor. + app = falcon.asgi.App() # type: ignore hub = Hub() app.add_route('/ping', Pong()) diff --git a/e2e-tests/server/hub.py b/e2e-tests/server/hub.py index e4e729b59..80213ecf6 100644 --- a/e2e-tests/server/hub.py +++ b/e2e-tests/server/hub.py @@ -1,3 +1,5 @@ +from __future__ import annotations + import asyncio import typing import uuid @@ -11,9 +13,9 @@ class Emitter: POLL_TIMEOUT = 3.0 - def __init__(self): - self._done = False - self._queue = asyncio.Queue() + def __init__(self) -> None: + self._done: bool = False + self._queue: asyncio.Queue[SSEvent] = asyncio.Queue() async def events(self) -> typing.AsyncGenerator[typing.Optional[SSEvent], None]: try: @@ -37,16 +39,16 @@ async def enqueue(self, message: str) -> None: await self._queue.put(event) @property - def done(self): + def done(self) -> bool: return self._done class Hub: - def __init__(self): - self._emitters = set() - self._users = {} + def __init__(self) -> None: + self._emitters: set[Emitter] = set() + self._users: dict[str, WebSocket] = {} - def _update_emitters(self) -> set: + def _update_emitters(self) -> set[Emitter]: done = {emitter for emitter in self._emitters if emitter.done} self._emitters.difference_update(done) return self._emitters.copy() diff --git a/e2e-tests/server/ping.py b/e2e-tests/server/ping.py index 7deb5f077..447db6658 100644 --- a/e2e-tests/server/ping.py +++ b/e2e-tests/server/ping.py @@ -9,4 +9,5 @@ class Pong: async def on_get(self, req: Request, resp: Response) -> None: resp.content_type = falcon.MEDIA_TEXT resp.text = 'PONG\n' - resp.status = HTTPStatus.OK + # TODO(vytas): Properly type Response.status. + resp.status = HTTPStatus.OK # type: ignore diff --git a/falcon/asgi/structures.py b/falcon/asgi/structures.py index 7a64e366f..a1b6828db 100644 --- a/falcon/asgi/structures.py +++ b/falcon/asgi/structures.py @@ -1,5 +1,8 @@ +from typing import Optional + from falcon.constants import MEDIA_JSON from falcon.media.json import _DEFAULT_JSON_HANDLER +from falcon.typing import JSONSerializable __all__ = ('SSEvent',) @@ -71,14 +74,14 @@ class SSEvent: def __init__( self, - data=None, - text=None, - json=None, - event=None, - event_id=None, - retry=None, - comment=None, - ): + data: Optional[bytes] = None, + text: Optional[str] = None, + json: JSONSerializable = None, + event: Optional[str] = None, + event_id: Optional[str] = None, + retry: Optional[int] = None, + comment: Optional[str] = None, + ) -> None: # NOTE(kgriffs): Check up front since this makes it a lot easier # to debug the source of the problem in the app vs. waiting for # an error to be raised from the framework when it calls serialize() @@ -111,7 +114,7 @@ def __init__( self.comment = comment - def serialize(self, handler=None): + def serialize(self, handler=None) -> bytes: """Serialize this event to string. Args: diff --git a/falcon/constants.py b/falcon/constants.py index c29c25a78..0f706af14 100644 --- a/falcon/constants.py +++ b/falcon/constants.py @@ -30,12 +30,12 @@ PYTHON_VERSION = tuple(sys.version_info[:3]) """Python version information triplet: (major, minor, micro).""" -FALCON_SUPPORTED = PYTHON_VERSION >= (3, 7, 0) +FALCON_SUPPORTED = PYTHON_VERSION >= (3, 8, 0) """Whether this version of Falcon supports the current Python version.""" if not FALCON_SUPPORTED: # pragma: nocover raise ImportError( - 'Falcon requires Python 3.7+. ' + 'Falcon requires Python 3.8+. ' '(Recent Pip should automatically pick a suitable Falcon version.)' ) diff --git a/falcon/cyutil/misc.pyx b/falcon/cyutil/misc.pyx index f4e2b1229..b7962aa62 100644 --- a/falcon/cyutil/misc.pyx +++ b/falcon/cyutil/misc.pyx @@ -13,33 +13,6 @@ # limitations under the License. -def isascii(unicode string not None): - """Return ``True`` if all characters in the string are ASCII. - - ASCII characters have code points in the range U+0000-U+007F. - - Note: - On Python 3.7+, this function is just aliased to ``str.isascii``. - - This is a Cython fallback for older CPython versions. For longer strings, - it is slightly less performant than the built-in ``str.isascii``. - - Args: - string (str): A string to test. - - Returns: - ``True`` if all characters are ASCII, ``False`` otherwise. - """ - - cdef Py_UCS4 ch - - for ch in string: - if ch > 0x007F: - return False - - return True - - def encode_items_to_latin1(dict data not None): cdef list result = [] cdef unicode key diff --git a/falcon/request.py b/falcon/request.py index f8fc6f4ab..83b4bad44 100644 --- a/falcon/request.py +++ b/falcon/request.py @@ -32,7 +32,6 @@ from falcon.media.json import _DEFAULT_JSON_HANDLER from falcon.stream import BoundedStream from falcon.util import structures -from falcon.util.misc import isascii from falcon.util.uri import parse_host from falcon.util.uri import parse_query_string from falcon.vendor import mimeparse @@ -489,7 +488,7 @@ def __init__(self, env, options=None): # perf(vytas): Only decode the tunnelled path in case it is not ASCII. # For ASCII-strings, the below decoding chain is a no-op. - if not isascii(path): + if not path.isascii(): path = path.encode('iso-8859-1').decode('utf-8', 'replace') if ( @@ -1212,8 +1211,8 @@ def get_header_as_int(self, header, required=False): HttpInvalidHeader: The header contained a malformed/invalid value. """ + http_int = self.get_header(header, required=required) try: - http_int = self.get_header(header, required=required) return int(http_int) except TypeError: # When the header does not exist and isn't required @@ -1246,8 +1245,8 @@ def get_header_as_datetime(self, header, required=False, obs_date=False): HttpInvalidHeader: The header contained a malformed/invalid value. """ + http_date = self.get_header(header, required=required) try: - http_date = self.get_header(header, required=required) return util.http_date_to_dt(http_date, obs_date=obs_date) except TypeError: # When the header does not exist and isn't required diff --git a/falcon/response_helpers.py b/falcon/response_helpers.py index 2570a8ff4..2e59ba78f 100644 --- a/falcon/response_helpers.py +++ b/falcon/response_helpers.py @@ -15,7 +15,6 @@ """Utilities for the Response class.""" from falcon.util import uri -from falcon.util.misc import isascii from falcon.util.misc import secure_filename @@ -91,7 +90,7 @@ def format_content_disposition(value, disposition_type='attachment'): # NOTE(vytas): RFC 6266, Appendix D. # Include a "filename" parameter when US-ASCII ([US-ASCII]) is # sufficiently expressive. - if isascii(value): + if value.isascii(): return '%s; filename="%s"' % (disposition_type, value) # NOTE(vytas): RFC 6266, Appendix D. diff --git a/falcon/status_codes.py b/falcon/status_codes.py index b098fc43a..80c0b5f82 100644 --- a/falcon/status_codes.py +++ b/falcon/status_codes.py @@ -14,135 +14,196 @@ """HTTP status line constants.""" +from typing import Final + # 1xx - Informational -HTTP_100 = HTTP_CONTINUE = '100 Continue' -HTTP_101 = HTTP_SWITCHING_PROTOCOLS = '101 Switching Protocols' -HTTP_102 = HTTP_PROCESSING = '102 Processing' +HTTP_100: Final[str] = '100 Continue' +HTTP_CONTINUE: Final[str] = HTTP_100 +HTTP_101: Final[str] = '101 Switching Protocols' +HTTP_SWITCHING_PROTOCOLS: Final[str] = HTTP_101 +HTTP_102: Final[str] = '102 Processing' +HTTP_PROCESSING: Final[str] = HTTP_102 # 2xx - Success -HTTP_200 = HTTP_OK = '200 OK' -HTTP_201 = HTTP_CREATED = '201 Created' -HTTP_202 = HTTP_ACCEPTED = '202 Accepted' -HTTP_203 = HTTP_NON_AUTHORITATIVE_INFORMATION = '203 Non-Authoritative Information' -HTTP_204 = HTTP_NO_CONTENT = '204 No Content' -HTTP_205 = HTTP_RESET_CONTENT = '205 Reset Content' -HTTP_206 = HTTP_PARTIAL_CONTENT = '206 Partial Content' -HTTP_207 = HTTP_MULTI_STATUS = '207 Multi-Status' -HTTP_208 = HTTP_ALREADY_REPORTED = '208 Already Reported' -HTTP_226 = HTTP_IM_USED = '226 IM Used' +HTTP_200: Final[str] = '200 OK' +HTTP_OK: Final[str] = HTTP_200 +HTTP_201: Final[str] = '201 Created' +HTTP_CREATED: Final[str] = HTTP_201 +HTTP_202: Final[str] = '202 Accepted' +HTTP_ACCEPTED: Final[str] = HTTP_202 +HTTP_203: Final[str] = '203 Non-Authoritative Information' +HTTP_NON_AUTHORITATIVE_INFORMATION: Final[str] = HTTP_203 +HTTP_204: Final[str] = '204 No Content' +HTTP_NO_CONTENT: Final[str] = HTTP_204 +HTTP_205: Final[str] = '205 Reset Content' +HTTP_RESET_CONTENT: Final[str] = HTTP_205 +HTTP_206: Final[str] = '206 Partial Content' +HTTP_PARTIAL_CONTENT: Final[str] = HTTP_206 +HTTP_207: Final[str] = '207 Multi-Status' +HTTP_MULTI_STATUS: Final[str] = HTTP_207 +HTTP_208: Final[str] = '208 Already Reported' +HTTP_ALREADY_REPORTED: Final[str] = HTTP_208 +HTTP_226: Final[str] = '226 IM Used' +HTTP_IM_USED: Final[str] = HTTP_226 # 3xx - Redirection -HTTP_300 = HTTP_MULTIPLE_CHOICES = '300 Multiple Choices' -HTTP_301 = HTTP_MOVED_PERMANENTLY = '301 Moved Permanently' -HTTP_302 = HTTP_FOUND = '302 Found' -HTTP_303 = HTTP_SEE_OTHER = '303 See Other' -HTTP_304 = HTTP_NOT_MODIFIED = '304 Not Modified' -HTTP_305 = HTTP_USE_PROXY = '305 Use Proxy' -HTTP_307 = HTTP_TEMPORARY_REDIRECT = '307 Temporary Redirect' -HTTP_308 = HTTP_PERMANENT_REDIRECT = '308 Permanent Redirect' +HTTP_300: Final[str] = '300 Multiple Choices' +HTTP_MULTIPLE_CHOICES: Final[str] = HTTP_300 +HTTP_301: Final[str] = '301 Moved Permanently' +HTTP_MOVED_PERMANENTLY: Final[str] = HTTP_301 +HTTP_302: Final[str] = '302 Found' +HTTP_FOUND: Final[str] = HTTP_302 +HTTP_303: Final[str] = '303 See Other' +HTTP_SEE_OTHER: Final[str] = HTTP_303 +HTTP_304: Final[str] = '304 Not Modified' +HTTP_NOT_MODIFIED: Final[str] = HTTP_304 +HTTP_305: Final[str] = '305 Use Proxy' +HTTP_USE_PROXY: Final[str] = HTTP_305 +HTTP_307: Final[str] = '307 Temporary Redirect' +HTTP_TEMPORARY_REDIRECT: Final[str] = HTTP_307 +HTTP_308: Final[str] = '308 Permanent Redirect' +HTTP_PERMANENT_REDIRECT: Final[str] = HTTP_308 # 4xx - Client Error -HTTP_400 = HTTP_BAD_REQUEST = '400 Bad Request' -HTTP_401 = HTTP_UNAUTHORIZED = '401 Unauthorized' # <-- Really means "unauthenticated" -HTTP_402 = HTTP_PAYMENT_REQUIRED = '402 Payment Required' -HTTP_403 = HTTP_FORBIDDEN = '403 Forbidden' # <-- Really means "unauthorized" -HTTP_404 = HTTP_NOT_FOUND = '404 Not Found' -HTTP_405 = HTTP_METHOD_NOT_ALLOWED = '405 Method Not Allowed' -HTTP_406 = HTTP_NOT_ACCEPTABLE = '406 Not Acceptable' -HTTP_407 = HTTP_PROXY_AUTHENTICATION_REQUIRED = '407 Proxy Authentication Required' -HTTP_408 = HTTP_REQUEST_TIMEOUT = '408 Request Timeout' -HTTP_409 = HTTP_CONFLICT = '409 Conflict' -HTTP_410 = HTTP_GONE = '410 Gone' -HTTP_411 = HTTP_LENGTH_REQUIRED = '411 Length Required' -HTTP_412 = HTTP_PRECONDITION_FAILED = '412 Precondition Failed' -HTTP_413 = HTTP_REQUEST_ENTITY_TOO_LARGE = '413 Payload Too Large' -HTTP_414 = HTTP_REQUEST_URI_TOO_LONG = '414 URI Too Long' -HTTP_415 = HTTP_UNSUPPORTED_MEDIA_TYPE = '415 Unsupported Media Type' -HTTP_416 = HTTP_REQUESTED_RANGE_NOT_SATISFIABLE = '416 Range Not Satisfiable' -HTTP_417 = HTTP_EXPECTATION_FAILED = '417 Expectation Failed' -HTTP_418 = HTTP_IM_A_TEAPOT = "418 I'm a teapot" -HTTP_422 = HTTP_UNPROCESSABLE_ENTITY = '422 Unprocessable Entity' -HTTP_423 = HTTP_LOCKED = '423 Locked' -HTTP_424 = HTTP_FAILED_DEPENDENCY = '424 Failed Dependency' -HTTP_426 = HTTP_UPGRADE_REQUIRED = '426 Upgrade Required' -HTTP_428 = HTTP_PRECONDITION_REQUIRED = '428 Precondition Required' -HTTP_429 = HTTP_TOO_MANY_REQUESTS = '429 Too Many Requests' -HTTP_431 = HTTP_REQUEST_HEADER_FIELDS_TOO_LARGE = '431 Request Header Fields Too Large' -HTTP_451 = HTTP_UNAVAILABLE_FOR_LEGAL_REASONS = '451 Unavailable For Legal Reasons' +HTTP_400: Final[str] = '400 Bad Request' +HTTP_BAD_REQUEST: Final[str] = HTTP_400 +HTTP_401: Final[str] = '401 Unauthorized' # <-- Really means "unauthenticated" +HTTP_UNAUTHORIZED: Final[str] = HTTP_401 +HTTP_402: Final[str] = '402 Payment Required' +HTTP_PAYMENT_REQUIRED: Final[str] = HTTP_402 +HTTP_403: Final[str] = '403 Forbidden' # <-- Really means "unauthorized" +HTTP_FORBIDDEN: Final[str] = HTTP_403 +HTTP_404: Final[str] = '404 Not Found' +HTTP_NOT_FOUND: Final[str] = HTTP_404 +HTTP_405: Final[str] = '405 Method Not Allowed' +HTTP_METHOD_NOT_ALLOWED: Final[str] = HTTP_405 +HTTP_406: Final[str] = '406 Not Acceptable' +HTTP_NOT_ACCEPTABLE: Final[str] = HTTP_406 +HTTP_407: Final[str] = '407 Proxy Authentication Required' +HTTP_PROXY_AUTHENTICATION_REQUIRED: Final[str] = HTTP_407 +HTTP_408: Final[str] = '408 Request Timeout' +HTTP_REQUEST_TIMEOUT: Final[str] = HTTP_408 +HTTP_409: Final[str] = '409 Conflict' +HTTP_CONFLICT: Final[str] = HTTP_409 +HTTP_410: Final[str] = '410 Gone' +HTTP_GONE: Final[str] = HTTP_410 +HTTP_411: Final[str] = '411 Length Required' +HTTP_LENGTH_REQUIRED: Final[str] = HTTP_411 +HTTP_412: Final[str] = '412 Precondition Failed' +HTTP_PRECONDITION_FAILED: Final[str] = HTTP_412 +HTTP_413: Final[str] = '413 Payload Too Large' +HTTP_REQUEST_ENTITY_TOO_LARGE: Final[str] = HTTP_413 +HTTP_414: Final[str] = '414 URI Too Long' +HTTP_REQUEST_URI_TOO_LONG: Final[str] = HTTP_414 +HTTP_415: Final[str] = '415 Unsupported Media Type' +HTTP_UNSUPPORTED_MEDIA_TYPE: Final[str] = HTTP_415 +HTTP_416: Final[str] = '416 Range Not Satisfiable' +HTTP_REQUESTED_RANGE_NOT_SATISFIABLE: Final[str] = HTTP_416 +HTTP_417: Final[str] = '417 Expectation Failed' +HTTP_EXPECTATION_FAILED: Final[str] = HTTP_417 +HTTP_418: Final[str] = "418 I'm a teapot" +HTTP_IM_A_TEAPOT: Final[str] = HTTP_418 +HTTP_422: Final[str] = '422 Unprocessable Entity' +HTTP_UNPROCESSABLE_ENTITY: Final[str] = HTTP_422 +HTTP_423: Final[str] = '423 Locked' +HTTP_LOCKED: Final[str] = HTTP_423 +HTTP_424: Final[str] = '424 Failed Dependency' +HTTP_FAILED_DEPENDENCY: Final[str] = HTTP_424 +HTTP_426: Final[str] = '426 Upgrade Required' +HTTP_UPGRADE_REQUIRED: Final[str] = HTTP_426 +HTTP_428: Final[str] = '428 Precondition Required' +HTTP_PRECONDITION_REQUIRED: Final[str] = HTTP_428 +HTTP_429: Final[str] = '429 Too Many Requests' +HTTP_TOO_MANY_REQUESTS: Final[str] = HTTP_429 +HTTP_431: Final[str] = '431 Request Header Fields Too Large' +HTTP_REQUEST_HEADER_FIELDS_TOO_LARGE: Final[str] = HTTP_431 +HTTP_451: Final[str] = '451 Unavailable For Legal Reasons' +HTTP_UNAVAILABLE_FOR_LEGAL_REASONS: Final[str] = HTTP_451 # 5xx - Server Error -HTTP_500 = HTTP_INTERNAL_SERVER_ERROR = '500 Internal Server Error' -HTTP_501 = HTTP_NOT_IMPLEMENTED = '501 Not Implemented' -HTTP_502 = HTTP_BAD_GATEWAY = '502 Bad Gateway' -HTTP_503 = HTTP_SERVICE_UNAVAILABLE = '503 Service Unavailable' -HTTP_504 = HTTP_GATEWAY_TIMEOUT = '504 Gateway Timeout' -HTTP_505 = HTTP_HTTP_VERSION_NOT_SUPPORTED = '505 HTTP Version Not Supported' -HTTP_507 = HTTP_INSUFFICIENT_STORAGE = '507 Insufficient Storage' -HTTP_508 = HTTP_LOOP_DETECTED = '508 Loop Detected' -HTTP_511 = HTTP_NETWORK_AUTHENTICATION_REQUIRED = '511 Network Authentication Required' +HTTP_500: Final[str] = '500 Internal Server Error' +HTTP_INTERNAL_SERVER_ERROR: Final[str] = HTTP_500 +HTTP_501: Final[str] = '501 Not Implemented' +HTTP_NOT_IMPLEMENTED: Final[str] = HTTP_501 +HTTP_502: Final[str] = '502 Bad Gateway' +HTTP_BAD_GATEWAY: Final[str] = HTTP_502 +HTTP_503: Final[str] = '503 Service Unavailable' +HTTP_SERVICE_UNAVAILABLE: Final[str] = HTTP_503 +HTTP_504: Final[str] = '504 Gateway Timeout' +HTTP_GATEWAY_TIMEOUT: Final[str] = HTTP_504 +HTTP_505: Final[str] = '505 HTTP Version Not Supported' +HTTP_HTTP_VERSION_NOT_SUPPORTED: Final[str] = HTTP_505 +HTTP_507: Final[str] = '507 Insufficient Storage' +HTTP_INSUFFICIENT_STORAGE: Final[str] = HTTP_507 +HTTP_508: Final[str] = '508 Loop Detected' +HTTP_LOOP_DETECTED: Final[str] = HTTP_508 +HTTP_511: Final[str] = '511 Network Authentication Required' +HTTP_NETWORK_AUTHENTICATION_REQUIRED: Final[str] = HTTP_511 # 70X - Inexcusable -HTTP_701 = '701 Meh' -HTTP_702 = '702 Emacs' -HTTP_703 = '703 Explosion' +HTTP_701: Final[str] = '701 Meh' +HTTP_702: Final[str] = '702 Emacs' +HTTP_703: Final[str] = '703 Explosion' # 71X - Novelty Implementations -HTTP_710 = '710 PHP' -HTTP_711 = '711 Convenience Store' -HTTP_712 = '712 NoSQL' -HTTP_719 = '719 I am not a teapot' +HTTP_710: Final[str] = '710 PHP' +HTTP_711: Final[str] = '711 Convenience Store' +HTTP_712: Final[str] = '712 NoSQL' +HTTP_719: Final[str] = '719 I am not a teapot' # 72X - Edge Cases -HTTP_720 = '720 Unpossible' -HTTP_721 = '721 Known Unknowns' -HTTP_722 = '722 Unknown Unknowns' -HTTP_723 = '723 Tricky' -HTTP_724 = '724 This line should be unreachable' -HTTP_725 = '725 It works on my machine' -HTTP_726 = "726 It's a feature, not a bug" -HTTP_727 = '727 32 bits is plenty' +HTTP_720: Final[str] = '720 Unpossible' +HTTP_721: Final[str] = '721 Known Unknowns' +HTTP_722: Final[str] = '722 Unknown Unknowns' +HTTP_723: Final[str] = '723 Tricky' +HTTP_724: Final[str] = '724 This line should be unreachable' +HTTP_725: Final[str] = '725 It works on my machine' +HTTP_726: Final[str] = "726 It's a feature, not a bug" +HTTP_727: Final[str] = '727 32 bits is plenty' # 74X - Meme Driven -HTTP_740 = '740 Computer says no' -HTTP_741 = '741 Compiling' -HTTP_742 = '742 A kitten dies' -HTTP_743 = '743 I thought I knew regular expressions' -HTTP_744 = '744 Y U NO write integration tests?' -HTTP_745 = "745 I don't always test my code, but when I do" 'I do it in production' -HTTP_748 = '748 Confounded by Ponies' -HTTP_749 = '749 Reserved for Chuck Norris' +HTTP_740: Final[str] = '740 Computer says no' +HTTP_741: Final[str] = '741 Compiling' +HTTP_742: Final[str] = '742 A kitten dies' +HTTP_743: Final[str] = '743 I thought I knew regular expressions' +HTTP_744: Final[str] = '744 Y U NO write integration tests?' +HTTP_745: Final[str] = ( + "745 I don't always test my code, but when I do I do it in production" +) +HTTP_748: Final[str] = '748 Confounded by Ponies' +HTTP_749: Final[str] = '749 Reserved for Chuck Norris' # 75X - Syntax Errors -HTTP_750 = "750 Didn't bother to compile it" -HTTP_753 = '753 Syntax Error' -HTTP_754 = '754 Too many semi-colons' -HTTP_755 = '755 Not enough semi-colons' -HTTP_759 = '759 Unexpected T_PAAMAYIM_NEKUDOTAYIM' +HTTP_750: Final[str] = "750 Didn't bother to compile it" +HTTP_753: Final[str] = '753 Syntax Error' +HTTP_754: Final[str] = '754 Too many semi-colons' +HTTP_755: Final[str] = '755 Not enough semi-colons' +HTTP_759: Final[str] = '759 Unexpected T_PAAMAYIM_NEKUDOTAYIM' # 77X - Predictable Problems -HTTP_771 = '771 Cached for too long' -HTTP_772 = '772 Not cached long enough' -HTTP_773 = '773 Not cached at all' -HTTP_774 = '774 Why was this cached?' -HTTP_776 = '776 Error on the Exception' -HTTP_777 = '777 Coincidence' -HTTP_778 = '778 Off By One Error' -HTTP_779 = '779 Off By Too Many To Count Error' +HTTP_771: Final[str] = '771 Cached for too long' +HTTP_772: Final[str] = '772 Not cached long enough' +HTTP_773: Final[str] = '773 Not cached at all' +HTTP_774: Final[str] = '774 Why was this cached?' +HTTP_776: Final[str] = '776 Error on the Exception' +HTTP_777: Final[str] = '777 Coincidence' +HTTP_778: Final[str] = '778 Off By One Error' +HTTP_779: Final[str] = '779 Off By Too Many To Count Error' # 78X - Somebody Else's Problem -HTTP_780 = '780 Project owner not responding' -HTTP_781 = '781 Operations' -HTTP_782 = '782 QA' -HTTP_783 = '783 It was a customer request, honestly' -HTTP_784 = '784 Management, obviously' -HTTP_785 = '785 TPS Cover Sheet not attached' -HTTP_786 = '786 Try it now' +HTTP_780: Final[str] = '780 Project owner not responding' +HTTP_781: Final[str] = '781 Operations' +HTTP_782: Final[str] = '782 QA' +HTTP_783: Final[str] = '783 It was a customer request, honestly' +HTTP_784: Final[str] = '784 Management, obviously' +HTTP_785: Final[str] = '785 TPS Cover Sheet not attached' +HTTP_786: Final[str] = '786 Try it now' # 79X - Internet crashed -HTTP_791 = '791 The Internet shut down due to copyright restrictions' -HTTP_792 = '792 Climate change driven catastrophic weather event' -HTTP_797 = '797 This is the last page of the Internet. Go back' -HTTP_799 = '799 End of the world' +HTTP_791: Final[str] = '791 The Internet shut down due to copyright restrictions' +HTTP_792: Final[str] = '792 Climate change driven catastrophic weather event' +HTTP_797: Final[str] = '797 This is the last page of the Internet. Go back' +HTTP_799: Final[str] = '799 End of the world' __all__ = ( 'HTTP_100', diff --git a/falcon/typing.py b/falcon/typing.py index e72c6fcb6..8d2785853 100644 --- a/falcon/typing.py +++ b/falcon/typing.py @@ -44,6 +44,17 @@ # Error serializers ErrorSerializer = Callable[['Request', 'Response', BaseException], Any] +JSONSerializable = Union[ + Dict[str, 'JSONSerializable'], + List['JSONSerializable'], + Tuple['JSONSerializable', ...], + bool, + float, + int, + str, + None, +] + # Sinks SinkPrefix = Union[str, Pattern] diff --git a/falcon/util/misc.py b/falcon/util/misc.py index a6a60a0dc..835dcbd29 100644 --- a/falcon/util/misc.py +++ b/falcon/util/misc.py @@ -44,12 +44,6 @@ except ImportError: _cy_encode_items_to_latin1 = None -try: - from falcon.cyutil.misc import isascii as _cy_isascii -except ImportError: - _cy_isascii = None - - __all__ = ( 'is_python_func', 'deprecated', @@ -506,30 +500,8 @@ def _encode_items_to_latin1(data: Dict[str, str]) -> List[Tuple[bytes, bytes]]: return result -def _isascii(string: str) -> bool: - """Return ``True`` if all characters in the string are ASCII. - - ASCII characters have code points in the range U+0000-U+007F. - - Note: - On Python 3.7+, this function is just aliased to ``str.isascii``. - - This is a pure-Python fallback for older CPython (where Cython is - unavailable) and PyPy versions. - - Args: - string (str): A string to test. - - Returns: - ``True`` if all characters are ASCII, ``False`` otherwise. - """ - - try: - string.encode('ascii') - return True - except ValueError: - return False - - _encode_items_to_latin1 = _cy_encode_items_to_latin1 or _encode_items_to_latin1 -isascii = getattr(str, 'isascii', _cy_isascii or _isascii) + +isascii = deprecated('This will be removed in V5. Please use `str.isascii`')( + str.isascii +) diff --git a/pyproject.toml b/pyproject.toml index d086c1811..4a58ef88c 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -3,8 +3,7 @@ requires = [ "setuptools>=47", "wheel>=0.34", - "cython>=0.29.21; python_implementation == 'CPython'", # Skip cython when using pypy - "typing-extensions; python_version<'3.8'", + "cython>=3.0.8; python_implementation == 'CPython'", # Skip cython when using pypy ] [tool.mypy] @@ -92,7 +91,7 @@ [tool.black] # this is kept to avoid reformatting all the code if one were to # inadvertently run black on the project - target-version = ["py37"] + target-version = ["py38"] skip-string-normalization = true line-length = 88 extend-exclude = "falcon/vendor" @@ -101,12 +100,12 @@ # NOTE(vytas): Before switching to Ruff, Falcon used the Blue formatter. # With the below settings, accidentally running blue should yield # only minor cosmetic changes in a handful of files. - target-version = ["py37"] + target-version = ["py38"] line-length = 88 extend-exclude = "falcon/vendor" [tool.ruff] - target-version = "py37" + target-version = "py38" format.quote-style = "single" line-length = 88 extend-exclude = ["falcon/vendor"] @@ -169,6 +168,14 @@ filterwarnings = [ "ignore:path is deprecated\\. Use files\\(\\) instead:DeprecationWarning", "ignore:This process \\(.+\\) is multi-threaded", ] +markers = [ + "slow: mark Falcon tests as slower (potentially taking more than ~500ms).", +] testpaths = [ "tests" ] + +[tool.cibuildwheel] +build-frontend = "build" +test-requires = ["-r requirements/cibwtest"] +test-command = "pytest {project}/tests" diff --git a/requirements/cibwtest b/requirements/cibwtest new file mode 100644 index 000000000..33dd318b6 --- /dev/null +++ b/requirements/cibwtest @@ -0,0 +1,5 @@ +msgpack +pytest +pytest-asyncio<0.22.0 +pyyaml +requests diff --git a/requirements/mintest b/requirements/mintest index 8fce419e3..65e9332a6 100644 --- a/requirements/mintest +++ b/requirements/mintest @@ -1,7 +1,4 @@ coverage>=4.1 -msgpack -mujson pytest -pyyaml +pytest-asyncio<0.22.0 requests -ujson diff --git a/requirements/tests b/requirements/tests index e3623da8d..dd1fa0451 100644 --- a/requirements/tests +++ b/requirements/tests @@ -28,6 +28,3 @@ python-rapidjson; platform_machine != 's390x' and platform_machine != 'aarch64' # wheels are missing some EoL interpreters and non-x86 platforms; build would fail unless rust is available orjson; platform_python_implementation != 'PyPy' and platform_machine != 's390x' and platform_machine != 'aarch64' - -# Images for 3.7 on emulated architectures seem to only have OpenSSL 1.0.2 -urllib3 < 2.0; python_version <= '3.7' diff --git a/setup.cfg b/setup.cfg index 163d28d95..5bfe8c99a 100644 --- a/setup.cfg +++ b/setup.cfg @@ -24,7 +24,6 @@ classifiers = Programming Language :: Python :: Implementation :: CPython Programming Language :: Python :: Implementation :: PyPy Programming Language :: Python :: 3 - Programming Language :: Python :: 3.7 Programming Language :: Python :: 3.8 Programming Language :: Python :: 3.9 Programming Language :: Python :: 3.10 @@ -53,7 +52,7 @@ project_urls = zip_safe = False include_package_data = True packages = find: -python_requires = >=3.7 +python_requires = >=3.8 install_requires = typing-extensions; python_version<"3.8" tests_require = diff --git a/tests/asgi/test_asgi_servers.py b/tests/asgi/test_asgi_servers.py index 6e790e0fd..5fdd9acde 100644 --- a/tests/asgi/test_asgi_servers.py +++ b/tests/asgi/test_asgi_servers.py @@ -9,12 +9,21 @@ import sys import time -import httpx import pytest import requests import requests.exceptions -import websockets -import websockets.exceptions + +try: + import httpx +except ImportError: + httpx = None # type: ignore + +try: + import websockets + import websockets.exceptions +except ImportError: + websockets = None # type: ignore + from falcon import testing @@ -166,6 +175,7 @@ def test_sse_client_disconnects_early(self, server_base_url): ) @pytest.mark.asyncio + @pytest.mark.skipif(httpx is None, reason='httpx is required for this test') async def test_stream_chunked_request(self, server_base_url): """Regression test for https://github.com/falconry/falcon/issues/2024""" @@ -183,6 +193,9 @@ async def emitter(): assert resp.json().get('drops') >= 1 +@pytest.mark.skipif( + websockets is None, reason='websockets is required for this test class' +) class TestWebSocket: @pytest.mark.asyncio @pytest.mark.parametrize('explicit_close', [True, False]) diff --git a/tests/asgi/test_boundedstream_asgi.py b/tests/asgi/test_boundedstream_asgi.py index db79b7f86..acb81215d 100644 --- a/tests/asgi/test_boundedstream_asgi.py +++ b/tests/asgi/test_boundedstream_asgi.py @@ -22,6 +22,7 @@ ) @pytest.mark.parametrize('extra_body', [True, False]) @pytest.mark.parametrize('set_content_length', [True, False]) +@pytest.mark.slow def test_read_all(body, extra_body, set_content_length): if extra_body and not set_content_length: pytest.skip( diff --git a/tests/asgi/test_buffered_reader.py b/tests/asgi/test_buffered_reader.py index f97744893..01cab8b1c 100644 --- a/tests/asgi/test_buffered_reader.py +++ b/tests/asgi/test_buffered_reader.py @@ -212,6 +212,7 @@ async def test_read(reader1, sizes, expected): @pytest.mark.parametrize('start_size', [1, 16777216]) +@pytest.mark.slow @falcon.runs_sync async def test_varying_read_size(reader2, start_size): size = start_size @@ -318,6 +319,7 @@ async def test_invalid_delimiter_length(reader1): (13372477, 51637898), ], ) +@pytest.mark.slow @falcon.runs_sync async def test_irregular_large_read_until(reader2, size1, size2): delimiter = b'--boundary1234567890--' @@ -376,6 +378,7 @@ async def test_small_reads(reader3): assert last.endswith(b'4') +@pytest.mark.slow @falcon.runs_sync async def test_small_reads_with_delimiter(reader3): ops = 0 diff --git a/tests/asgi/test_example_asgi.py b/tests/asgi/test_example_asgi.py deleted file mode 100644 index f67ee3af6..000000000 --- a/tests/asgi/test_example_asgi.py +++ /dev/null @@ -1,223 +0,0 @@ -# examples/things_advanced_asgi.py - -import json -import logging -import uuid - -import httpx - -import falcon -import falcon.asgi - - -class StorageEngine: - async def get_things(self, marker, limit): - return [{'id': str(uuid.uuid4()), 'color': 'green'}] - - async def add_thing(self, thing): - thing['id'] = str(uuid.uuid4()) - return thing - - -class StorageError(Exception): - @staticmethod - async def handle(ex, req, resp, params): - # TODO: Log the error, clean up, etc. before raising - raise falcon.HTTPInternalServerError() - - -class SinkAdapter: - engines = { - 'ddg': 'https://duckduckgo.com', - 'y': 'https://search.yahoo.com/search', - } - - async def __call__(self, req, resp, engine): - url = self.engines[engine] - params = {'q': req.get_param('q', True)} - - async with httpx.AsyncClient() as client: - result = await client.get(url, params=params) - - resp.status = result.status_code - resp.content_type = result.headers['content-type'] - resp.text = result.text - - -class AuthMiddleware: - async def process_request(self, req, resp): - token = req.get_header('Authorization') - account_id = req.get_header('Account-ID') - - challenges = ['Token type="Fernet"'] - - if token is None: - description = 'Please provide an auth token as part of the request.' - - raise falcon.HTTPUnauthorized( - title='Auth token required', - description=description, - challenges=challenges, - href='http://docs.example.com/auth', - ) - - if not self._token_is_valid(token, account_id): - description = ( - 'The provided auth token is not valid. ' - 'Please request a new token and try again.' - ) - - raise falcon.HTTPUnauthorized( - title='Authentication required', - description=description, - challenges=challenges, - href='http://docs.example.com/auth', - ) - - def _token_is_valid(self, token, account_id): - return True # Suuuuuure it's valid... - - -class RequireJSON: - async def process_request(self, req, resp): - if not req.client_accepts_json: - raise falcon.HTTPNotAcceptable( - description='This API only supports responses encoded as JSON.', - href='http://docs.examples.com/api/json', - ) - - if req.method in ('POST', 'PUT'): - if 'application/json' not in req.content_type: - raise falcon.HTTPUnsupportedMediaType( - description='This API only supports requests encoded as JSON.', - href='http://docs.examples.com/api/json', - ) - - -class JSONTranslator: - # NOTE: Normally you would simply use req.get_media() and resp.media for - # this particular use case; this example serves only to illustrate - # what is possible. - - async def process_request(self, req, resp): - # NOTE: Test explicitly for 0, since this property could be None in - # the case that the Content-Length header is missing (in which case we - # can't know if there is a body without actually attempting to read - # it from the request stream.) - if req.content_length == 0: - # Nothing to do - return - - body = await req.stream.read() - if not body: - raise falcon.HTTPBadRequest( - title='Empty request body', - description='A valid JSON document is required.', - ) - - try: - req.context.doc = json.loads(body.decode('utf-8')) - - except (ValueError, UnicodeDecodeError): - description = ( - 'Could not decode the request body. The ' - 'JSON was incorrect or not encoded as ' - 'UTF-8.' - ) - - raise falcon.HTTPBadRequest(title='Malformed JSON', description=description) - - async def process_response(self, req, resp, resource, req_succeeded): - if not hasattr(resp.context, 'result'): - return - - resp.text = json.dumps(resp.context.result) - - -def max_body(limit): - async def hook(req, resp, resource, params): - length = req.content_length - if length is not None and length > limit: - msg = ( - 'The size of the request is too large. The body must not ' - 'exceed ' + str(limit) + ' bytes in length.' - ) - - raise falcon.HTTPPayloadTooLarge( - title='Request body is too large', description=msg - ) - - return hook - - -class ThingsResource: - def __init__(self, db): - self.db = db - self.logger = logging.getLogger('thingsapp.' + __name__) - - async def on_get(self, req, resp, user_id): - marker = req.get_param('marker') or '' - limit = req.get_param_as_int('limit') or 50 - - try: - result = await self.db.get_things(marker, limit) - except Exception as ex: - self.logger.error(ex) - - description = ( - 'Aliens have attacked our base! We will ' - 'be back as soon as we fight them off. ' - 'We appreciate your patience.' - ) - - raise falcon.HTTPServiceUnavailable( - title='Service Outage', description=description, retry_after=30 - ) - - # NOTE: Normally you would use resp.media for this sort of thing; - # this example serves only to demonstrate how the context can be - # used to pass arbitrary values between middleware components, - # hooks, and resources. - resp.context.result = result - - resp.set_header('Powered-By', 'Falcon') - resp.status = falcon.HTTP_200 - - @falcon.before(max_body(64 * 1024)) - async def on_post(self, req, resp, user_id): - try: - doc = req.context.doc - except AttributeError: - raise falcon.HTTPBadRequest( - title='Missing thing', - description='A thing must be submitted in the request body.', - ) - - proper_thing = await self.db.add_thing(doc) - - resp.status = falcon.HTTP_201 - resp.location = '/%s/things/%s' % (user_id, proper_thing['id']) - - -# The app instance is an ASGI callable -app = falcon.asgi.App( - middleware=[ - # AuthMiddleware(), - RequireJSON(), - JSONTranslator(), - ] -) - -db = StorageEngine() -things = ThingsResource(db) -app.add_route('/{user_id}/things', things) - -# If a responder ever raises an instance of StorageError, pass control to -# the given handler. -app.add_error_handler(StorageError, StorageError.handle) - -# Proxy some things to another service; this example shows how you might -# send parts of an API off to a legacy system that hasn't been upgraded -# yet, or perhaps is a single cluster that all data centers have to share. -sink = SinkAdapter() -app.add_sink(sink, r'/search/(?Pddg|y)\Z') diff --git a/tests/asgi/test_hello_asgi.py b/tests/asgi/test_hello_asgi.py index cbc5d3dc3..fb19e3c61 100644 --- a/tests/asgi/test_hello_asgi.py +++ b/tests/asgi/test_hello_asgi.py @@ -3,13 +3,17 @@ import tempfile from _util import disable_asgi_non_coroutine_wrapping # NOQA -import aiofiles import pytest import falcon from falcon import testing import falcon.asgi +try: + import aiofiles # type: ignore +except ImportError: + aiofiles = None # type: ignore + SIZE_1_KB = 1024 @@ -308,6 +312,7 @@ def test_filelike_closing(self, client, stream_factory, assert_closed): if assert_closed: assert resource.stream.close_called + @pytest.mark.skipif(aiofiles is None, reason='aiofiles is required for this test') def test_filelike_closing_aiofiles(self, client): resource = AIOFilesHelloResource() try: diff --git a/tests/asgi/test_response_media_asgi.py b/tests/asgi/test_response_media_asgi.py index b911c1486..01236de2e 100644 --- a/tests/asgi/test_response_media_asgi.py +++ b/tests/asgi/test_response_media_asgi.py @@ -9,6 +9,11 @@ import falcon.asgi from falcon.util.deprecation import DeprecatedWarning +try: + import msgpack # type: ignore +except ImportError: + msgpack = None # type: ignore + def create_client(resource, handlers=None): app = falcon.asgi.App() @@ -89,6 +94,7 @@ def test_non_ascii_json_serialization(document): ('application/x-msgpack'), ], ) +@pytest.mark.skipif(msgpack is None, reason='msgpack is required for this test') def test_msgpack(media_type): class TestResource: async def on_get(self, req, resp): diff --git a/tests/asgi/test_scheduled_callbacks.py b/tests/asgi/test_scheduled_callbacks.py index aa47e2ad4..36d4f4e5b 100644 --- a/tests/asgi/test_scheduled_callbacks.py +++ b/tests/asgi/test_scheduled_callbacks.py @@ -9,6 +9,7 @@ from falcon.asgi import App +@pytest.mark.slow def test_multiple(): class SomeResource: def __init__(self): diff --git a/tests/asgi/test_sync.py b/tests/asgi/test_sync.py index 7b40faed3..6baab8c75 100644 --- a/tests/asgi/test_sync.py +++ b/tests/asgi/test_sync.py @@ -8,6 +8,7 @@ import falcon.util +@pytest.mark.slow def test_sync_helpers(): safely_values = [] unsafely_values = [] diff --git a/tests/asgi/test_testing_asgi.py b/tests/asgi/test_testing_asgi.py index f2de736cd..915fec4de 100644 --- a/tests/asgi/test_testing_asgi.py +++ b/tests/asgi/test_testing_asgi.py @@ -9,6 +9,7 @@ @pytest.mark.asyncio +@pytest.mark.slow async def test_asgi_request_event_emitter_hang(): # NOTE(kgriffs): This tests the ASGI server behavior that # ASGIRequestEventEmitter simulates when emit() is called diff --git a/tests/asgi/test_ws.py b/tests/asgi/test_ws.py index 48dae09ab..6fb7b7667 100644 --- a/tests/asgi/test_ws.py +++ b/tests/asgi/test_ws.py @@ -2,7 +2,6 @@ from collections import deque import os -import cbor2 import pytest import falcon @@ -15,9 +14,9 @@ from falcon.testing.helpers import _WebSocketState as ClientWebSocketState try: - import rapidjson # type: ignore + import cbor2 # type: ignore except ImportError: - rapidjson = None # type: ignore + cbor2 = None # type: ignore try: @@ -26,6 +25,12 @@ msgpack = None # type: ignore +try: + import rapidjson # type: ignore +except ImportError: + rapidjson = None # type: ignore + + # NOTE(kgriffs): We do not use codes defined in the framework because we # want to verify that the correct value is being used. class CloseCode: @@ -109,6 +114,7 @@ async def on_websocket(self, req, ws, explicit): @pytest.mark.asyncio +@pytest.mark.slow async def test_echo(): # noqa: C901 consumer_sleep = 0.01 producer_loop = 10 @@ -407,6 +413,7 @@ async def on_websocket(self, req, ws): @pytest.mark.asyncio @pytest.mark.parametrize('custom_text', [True, False]) @pytest.mark.parametrize('custom_data', [True, False]) +@pytest.mark.skipif(msgpack is None, reason='msgpack is required for this test') async def test_media(custom_text, custom_data, conductor): # NOQA: C901 # TODO(kgriffs): Refactor to reduce McCabe score @@ -471,6 +478,8 @@ def deserialize(self, payload: str) -> object: ) if custom_data: + if cbor2 is None: + pytest.skip('cbor2 is required for this test') class CBORHandler(media.BinaryBaseHandlerWS): def serialize(self, media: object) -> bytes: @@ -1017,6 +1026,7 @@ def test_ws_base_not_implemented(): @pytest.mark.asyncio +@pytest.mark.slow async def test_ws_context_timeout(conductor): class Resource: async def on_websocket(self, req, ws): @@ -1089,6 +1099,7 @@ class Resource: @pytest.mark.asyncio +@pytest.mark.slow async def test_ws_responder_never_ready(conductor, monkeypatch): async def noop_close(obj, code=None): pass diff --git a/tests/conftest.py b/tests/conftest.py index b021132cd..e26f0cefe 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -87,12 +87,14 @@ def as_params(*values, prefix=None): @staticmethod def load_module(filename, parent_dir=None, suffix=None): - root = FALCON_ROOT - root = root / parent_dir if parent_dir is not None else root - path = root / filename + if parent_dir: + filename = pathlib.Path(parent_dir) / filename + else: + filename = pathlib.Path(filename) + path = FALCON_ROOT / filename if suffix is not None: path = path.with_name(f'{path.stem}_{suffix}.py') - prefix = '.'.join(path.parent.parts) + prefix = '.'.join(filename.parent.parts) module_name = f'{prefix}.{path.stem}' spec = importlib.util.spec_from_file_location(module_name, path) diff --git a/tests/test_buffered_reader.py b/tests/test_buffered_reader.py index b20e5aa07..247676cb8 100644 --- a/tests/test_buffered_reader.py +++ b/tests/test_buffered_reader.py @@ -169,6 +169,7 @@ def test_read_until_with_size(buffered_reader, size): assert stream.read_until(b'--boundary1234567890--', size) == (TEST_DATA[:size]) +@pytest.mark.slow def test_read_until(buffered_reader): stream = buffered_reader() @@ -186,6 +187,7 @@ def test_read_until(buffered_reader): (13372477, 51637898), ], ) +@pytest.mark.slow def test_irregular_large_read_until(buffered_reader, size1, size2): stream = buffered_reader() delimiter = b'--boundary1234567890--' @@ -345,6 +347,7 @@ def test_duck_compatibility_with_io_base(shorter_stream): assert not shorter_stream.writeable() +@pytest.mark.slow def test_fragmented_reads(fragmented_stream): b = io.BytesIO() fragmented_stream.pipe_until(b'--boundary1234567890--', b) diff --git a/tests/test_cmd_inspect_app.py b/tests/test_cmd_inspect_app.py index 7a6866f74..f2b4e895f 100644 --- a/tests/test_cmd_inspect_app.py +++ b/tests/test_cmd_inspect_app.py @@ -11,7 +11,11 @@ from falcon.testing import redirected _WIN32 = sys.platform.startswith('win') -_MODULE = 'tests.test_cmd_inspect_app' + +# NOTE(vytas): This is not the cleanest way to import as we lack __init__.py, +# but it works as pytest (when operating in the default "prepend" import mode) +# inserts the directory of every test file into sys.path. +_MODULE = 'test_cmd_inspect_app' class DummyResource: diff --git a/tests/test_httperror.py b/tests/test_httperror.py index 2de1972ee..313fb00e8 100644 --- a/tests/test_httperror.py +++ b/tests/test_httperror.py @@ -1,5 +1,3 @@ -# -*- coding: utf-8 - import datetime import http import json @@ -8,12 +6,16 @@ from _util import create_app # NOQA import pytest -import yaml import falcon import falcon.testing as testing from falcon.util.deprecation import DeprecatedWarning +try: + import yaml # type: ignore +except ImportError: + yaml = None # type: ignore + @pytest.fixture def client(asgi): @@ -331,6 +333,7 @@ def test_client_does_not_accept_json_or_xml(self, client): assert response.headers['Vary'] == 'Accept' assert not response.content + @pytest.mark.skipif(yaml is None, reason='PyYAML is required for this test') def test_custom_error_serializer(self, client): headers = { 'X-Error-Title': 'Storage service down', diff --git a/tests/test_httpstatus.py b/tests/test_httpstatus.py index e7ff51c17..3a031ffc6 100644 --- a/tests/test_httpstatus.py +++ b/tests/test_httpstatus.py @@ -1,5 +1,3 @@ -# -*- coding: utf-8 - import http from _util import create_app # NOQA diff --git a/tests/test_inspect.py b/tests/test_inspect.py index 5da970ed2..5273fbde4 100644 --- a/tests/test_inspect.py +++ b/tests/test_inspect.py @@ -1,5 +1,6 @@ from functools import partial import os +import pathlib import sys import _inspect_fixture as i_f @@ -10,6 +11,8 @@ from falcon import routing import falcon.asgi +HERE = pathlib.Path(__file__).resolve().parent + def get_app(asgi, cors=True, **kw): if asgi: @@ -33,9 +36,7 @@ def make_app(): app.add_route('/bar', i_f.OtherResponder(), suffix='id') app.add_static_route('/fal', os.path.abspath('falcon')) - app.add_static_route( - '/tes', os.path.abspath('tests'), fallback_filename='conftest.py' - ) + app.add_static_route('/tes', HERE, fallback_filename='conftest.py') return app @@ -54,9 +55,7 @@ def make_app_async(): app.add_route('/bar', i_f.OtherResponderAsync(), suffix='id') app.add_static_route('/fal', os.path.abspath('falcon')) - app.add_static_route( - '/tes', os.path.abspath('tests'), fallback_filename='conftest.py' - ) + app.add_static_route('/tes', HERE, fallback_filename='conftest.py') return app @@ -154,7 +153,7 @@ def test_static_routes(self, asgi): assert routes[-1].directory == os.path.abspath('falcon') assert routes[-1].fallback_filename is None assert routes[-2].prefix == '/tes/' - assert routes[-2].directory == os.path.abspath('tests') + assert routes[-2].directory == str(HERE) assert routes[-2].fallback_filename.endswith('conftest.py') def test_sink(self, asgi): diff --git a/tests/test_media_handlers.py b/tests/test_media_handlers.py index e0442751b..f2dbb96c8 100644 --- a/tests/test_media_handlers.py +++ b/tests/test_media_handlers.py @@ -4,9 +4,7 @@ import platform from _util import create_app # NOQA -import mujson import pytest -import ujson import falcon from falcon import media @@ -14,13 +12,26 @@ from falcon.asgi.stream import BoundedStream from falcon.util.deprecation import DeprecatedWarning +mujson = None orjson = None rapidjson = None +ujson = None + +try: + import mujson # type: ignore +except ImportError: + pass + try: import rapidjson # type: ignore except ImportError: pass +try: + import ujson # type: ignore +except ImportError: + pass + if platform.python_implementation() == 'CPython': try: import orjson # type: ignore @@ -32,8 +43,6 @@ SERIALIZATION_PARAM_LIST = [ # Default json.dumps, with only ascii (None, {'test': 'value'}, b'{"test":"value"}'), - (partial(mujson.dumps, ensure_ascii=True), {'test': 'value'}, b'{"test":"value"}'), - (ujson.dumps, {'test': 'value'}, b'{"test":"value"}'), ( partial(lambda media, **kwargs: json.dumps([media, kwargs]), ensure_ascii=True), {'test': 'value'}, @@ -52,15 +61,25 @@ b'{"key": "value"}', {'key': 'VALUE'}, ), - (mujson.loads, b'{"test": "value"}', {'test': 'value'}), - (ujson.loads, b'{"test": "value"}', {'test': 'value'}), -] -ALL_JSON_IMPL = [ - (json.dumps, json.loads), - (partial(mujson.dumps, ensure_ascii=True), mujson.loads), - (ujson.dumps, ujson.loads), ] +ALL_JSON_IMPL = [(json.dumps, json.loads)] +ALL_JSON_IMPL_IDS = ['stdlib'] + + +if mujson: + SERIALIZATION_PARAM_LIST += [ + ( + partial(mujson.dumps, ensure_ascii=True), + {'test': 'value'}, + b'{"test":"value"}', + ), + ] + DESERIALIZATION_PARAM_LIST += [ + (mujson.loads, b'{"test": "value"}', {'test': 'value'}), + ] + ALL_JSON_IMPL += [(partial(mujson.dumps, ensure_ascii=True), mujson.loads)] + ALL_JSON_IMPL_IDS += ['mujson'] if orjson: SERIALIZATION_PARAM_LIST += [ @@ -70,6 +89,7 @@ (orjson.loads, b'{"test": "value"}', {'test': 'value'}), ] ALL_JSON_IMPL += [(orjson.dumps, orjson.loads)] + ALL_JSON_IMPL_IDS += ['orjson'] if rapidjson: SERIALIZATION_PARAM_LIST += [ @@ -79,6 +99,36 @@ (rapidjson.loads, b'{"test": "value"}', {'test': 'value'}), ] ALL_JSON_IMPL += [(rapidjson.dumps, rapidjson.loads)] + ALL_JSON_IMPL_IDS += ['rapidjson'] + +if ujson: + SERIALIZATION_PARAM_LIST += [ + (ujson.dumps, {'test': 'value'}, b'{"test":"value"}'), + ] + DESERIALIZATION_PARAM_LIST += [ + (ujson.loads, b'{"test": "value"}', {'test': 'value'}), + ] + ALL_JSON_IMPL += [(ujson.dumps, ujson.loads)] + ALL_JSON_IMPL_IDS += ['ujson'] + + +@pytest.mark.parametrize( + 'library, name', + [ + (mujson, 'mujson'), + (orjson, 'orjson'), + (rapidjson, 'rapidjson'), + (ujson, 'ujson'), + ], + ids=['mujson', 'orjson', 'rapidjson', 'ujson'], +) +def test_check_json_library(library, name): + # NOTE(vytas): A synthetic test just to visualize which JSON libraries + # are absent and skipped. + if library is None: + pytest.skip(f'{name} is not installed') + assert hasattr(library, 'dumps') + assert hasattr(library, 'loads') @pytest.mark.parametrize('func, body, expected', SERIALIZATION_PARAM_LIST) @@ -115,7 +165,7 @@ def test_deserialization(asgi, func, body, expected): assert result == expected -@pytest.mark.parametrize('dumps, loads', ALL_JSON_IMPL) +@pytest.mark.parametrize('dumps, loads', ALL_JSON_IMPL, ids=ALL_JSON_IMPL_IDS) @pytest.mark.parametrize('subclass', (True, False)) def test_full_app(asgi, dumps, loads, subclass): if subclass: diff --git a/tests/test_media_multipart.py b/tests/test_media_multipart.py index 1f91fe3e2..16b6b27e0 100644 --- a/tests/test_media_multipart.py +++ b/tests/test_media_multipart.py @@ -406,21 +406,19 @@ def _factory(options): multipart_handler = media.MultipartFormHandler() for key, value in options.items(): setattr(multipart_handler.parse_options, key, value) - req_handlers = media.Handlers( - { - falcon.MEDIA_JSON: media.JSONHandler(), - falcon.MEDIA_MULTIPART: multipart_handler, - } - ) + req_handlers = { + falcon.MEDIA_JSON: media.JSONHandler(), + falcon.MEDIA_MULTIPART: multipart_handler, + } + resp_handlers = { + falcon.MEDIA_JSON: media.JSONHandler(), + } + if msgpack: + resp_handlers[falcon.MEDIA_MSGPACK] = media.MessagePackHandler() app = create_app(asgi) - app.req_options.media_handlers = req_handlers - app.resp_options.media_handlers = media.Handlers( - { - falcon.MEDIA_JSON: media.JSONHandler(), - falcon.MEDIA_MSGPACK: media.MessagePackHandler(), - } - ) + app.req_options.media_handlers = media.Handlers(req_handlers) + app.resp_options.media_handlers = media.Handlers(resp_handlers) resource = AsyncMultipartAnalyzer() if asgi else MultipartAnalyzer() app.add_route('/submit', resource) diff --git a/tests/test_request_media.py b/tests/test_request_media.py index 4f3d8febc..79d5ba620 100644 --- a/tests/test_request_media.py +++ b/tests/test_request_media.py @@ -9,6 +9,11 @@ from falcon import testing from falcon import util +try: + import msgpack # type: ignore +except ImportError: + msgpack = None + def create_client(asgi, handlers=None, resource=None): if not resource: @@ -98,6 +103,7 @@ def test_json(client, media_type): ('application/x-msgpack'), ], ) +@pytest.mark.skipif(msgpack is None, reason='msgpack is required for this test') def test_msgpack(asgi, media_type): client = create_client( asgi, @@ -150,6 +156,7 @@ def test_unknown_media_type(asgi, media_type): @pytest.mark.parametrize('media_type', ['application/json', 'application/msgpack']) +@pytest.mark.skipif(msgpack is None, reason='msgpack is required for this test') def test_empty_body(asgi, media_type): client = _create_client_invalid_media( asgi, @@ -190,9 +197,8 @@ def test_invalid_json(asgi): assert str(client.resource.captured_error.value.__cause__) == str(e) +@pytest.mark.skipif(msgpack is None, reason='msgpack is required for this test') def test_invalid_msgpack(asgi): - import msgpack - handlers = {'application/msgpack': media.MessagePackHandler()} client = _create_client_invalid_media( asgi, errors.HTTPBadRequest, handlers=handlers diff --git a/tests/test_response_media.py b/tests/test_response_media.py index 4c72ce374..6bf71ab92 100644 --- a/tests/test_response_media.py +++ b/tests/test_response_media.py @@ -7,6 +7,11 @@ from falcon import media from falcon import testing +try: + import msgpack # type: ignore +except ImportError: + msgpack = None + @pytest.fixture def client(): @@ -94,6 +99,7 @@ def test_non_ascii_json_serialization(document): ('application/x-msgpack'), ], ) +@pytest.mark.skipif(msgpack is None, reason='msgpack is required for this test') def test_msgpack(media_type): client = create_client( { diff --git a/tests/test_static.py b/tests/test_static.py index 2b9907adb..a6546e2a5 100644 --- a/tests/test_static.py +++ b/tests/test_static.py @@ -2,6 +2,7 @@ import io import os import pathlib +import posixpath import _util # NOQA import pytest @@ -13,17 +14,40 @@ import falcon.testing as testing +def normalize_path(path): + # NOTE(vytas): On CPython 3.13, ntpath.isabs() no longer returns True for + # Unix-like absolute paths that start with a single \. + # We work around this in tests by prepending a fake drive D:\ on Windows. + # See also: https://github.com/python/cpython/issues/117352 + is_pathlib_path = isinstance(path, pathlib.Path) + if not is_pathlib_path and not posixpath.isabs(path): + return path + + path = os.path.normpath(path) + if path.startswith('\\'): + path = 'D:' + path + return pathlib.Path(path) if is_pathlib_path else path + + @pytest.fixture() -def client(asgi): +def client(asgi, monkeypatch): + def add_static_route_normalized(obj, prefix, directory, **kwargs): + add_static_route_orig(obj, prefix, normalize_path(directory), **kwargs) + app = _util.create_app(asgi=asgi) + + app_cls = type(app) + add_static_route_orig = app_cls.add_static_route + monkeypatch.setattr(app_cls, 'add_static_route', add_static_route_normalized) + client = testing.TestClient(app) client.asgi = asgi return client -def create_sr(asgi, *args, **kwargs): +def create_sr(asgi, prefix, directory, **kwargs): sr_type = StaticRouteAsync if asgi else StaticRoute - return sr_type(*args, **kwargs) + return sr_type(prefix, normalize_path(directory), **kwargs) @pytest.fixture @@ -114,8 +138,7 @@ def __init__(self, size): def test_bad_path(asgi, uri, patch_open): patch_open(b'') - sr_type = StaticRouteAsync if asgi else StaticRoute - sr = sr_type('/static', '/var/www/statics') + sr = create_sr(asgi, '/static', '/var/www/statics') req = _util.create_req(asgi, host='test.com', path=uri, root_path='statics') @@ -229,7 +252,7 @@ async def run(): body = resp.stream.read() assert resp.content_type in _MIME_ALTERNATIVE.get(mtype, (mtype,)) - assert body.decode() == os.path.normpath('/var/www/statics' + expected_path) + assert body.decode() == normalize_path('/var/www/statics' + expected_path) assert resp.headers.get('accept-ranges') == 'bytes' @@ -360,7 +383,7 @@ async def run(): sr(req, resp) body = resp.stream.read() - assert body.decode() == os.path.normpath('/var/www/statics/css/test.css') + assert body.decode() == normalize_path('/var/www/statics/css/test.css') def test_lifo(client, patch_open): @@ -371,11 +394,11 @@ def test_lifo(client, patch_open): response = client.simulate_request(path='/downloads/thing.zip') assert response.status == falcon.HTTP_200 - assert response.text == os.path.normpath('/opt/somesite/downloads/thing.zip') + assert response.text == normalize_path('/opt/somesite/downloads/thing.zip') response = client.simulate_request(path='/downloads/archive/thingtoo.zip') assert response.status == falcon.HTTP_200 - assert response.text == os.path.normpath('/opt/somesite/x/thingtoo.zip') + assert response.text == normalize_path('/opt/somesite/x/thingtoo.zip') def test_lifo_negative(client, patch_open): @@ -386,11 +409,11 @@ def test_lifo_negative(client, patch_open): response = client.simulate_request(path='/downloads/thing.zip') assert response.status == falcon.HTTP_200 - assert response.text == os.path.normpath('/opt/somesite/downloads/thing.zip') + assert response.text == normalize_path('/opt/somesite/downloads/thing.zip') response = client.simulate_request(path='/downloads/archive/thingtoo.zip') assert response.status == falcon.HTTP_200 - assert response.text == os.path.normpath( + assert response.text == normalize_path( '/opt/somesite/downloads/archive/thingtoo.zip' ) @@ -450,14 +473,12 @@ def test_fallback_filename( asgi, uri, default, expected, content_type, downloadable, patch_open, monkeypatch ): def validate(path): - if os.path.normpath(default) not in path: + if normalize_path(default) not in path: raise IOError() patch_open(validate=validate) - monkeypatch.setattr( - 'os.path.isfile', lambda file: os.path.normpath(default) in file - ) + monkeypatch.setattr('os.path.isfile', lambda file: normalize_path(default) in file) sr = create_sr( asgi, @@ -484,7 +505,7 @@ async def run(): body = resp.stream.read() assert sr.match(req.path) - expected_content = os.path.normpath(os.path.join('/var/www/statics', expected)) + expected_content = normalize_path(os.path.join('/var/www/statics', expected)) assert body.decode() == expected_content assert resp.content_type in _MIME_ALTERNATIVE.get(content_type, (content_type,)) assert resp.headers.get('accept-ranges') == 'bytes' @@ -529,7 +550,7 @@ def test(prefix, directory, expected): assert response.status == falcon.HTTP_404 else: assert response.status == falcon.HTTP_200 - assert response.text == os.path.normpath(directory + expected) + assert response.text == normalize_path(directory + expected) assert int(response.headers['Content-Length']) == len(response.text) test('/static', '/opt/somesite/static/', static_exp) diff --git a/tests/test_status_codes.py b/tests/test_status_codes.py new file mode 100644 index 000000000..b89736883 --- /dev/null +++ b/tests/test_status_codes.py @@ -0,0 +1,28 @@ +import http +import sys +from typing import Tuple + +import pytest + +from falcon import status_codes + + +class TestStatusCodes: + @pytest.mark.skipif( + sys.version_info < (3, 13), reason='Outdated http statuses definitions' + ) + @pytest.mark.parametrize('status', status_codes.__all__) + def test_statuses_are_in_compliance_with_http_from_python313(self, status): + status_code, message = self._status_code_and_message(status) + if status_code >= 700: + pytest.skip('Codes above 700 are not defined in http package') + http_status = http.HTTPStatus(status_code) + if status_code in [413, 418, 422]: + assert http_status.phrase != message + else: + assert http_status.phrase == message + + def _status_code_and_message(self, status: str) -> Tuple[int, str]: + status = getattr(status_codes, status) + value, message = status.split(' ', 1) + return int(value), message diff --git a/tests/test_utils.py b/tests/test_utils.py index 4d1b51ff6..1f267bff6 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -1,5 +1,3 @@ -# -*- coding: utf-8 -*- - from datetime import datetime from datetime import timezone import functools @@ -27,6 +25,11 @@ from falcon.util import structures from falcon.util import uri +try: + import msgpack # type: ignore +except ImportError: + msgpack = None + @pytest.fixture def app(asgi): @@ -645,25 +648,9 @@ def test_secure_filename_empty_value(self): with pytest.raises(ValueError): misc.secure_filename('') - @pytest.mark.parametrize( - 'string,expected_ascii', - [ - ('', True), - ('/', True), - ('/api', True), - ('/data/items/something?query=apples%20and%20oranges', True), - ('/food?item=ð\x9f\x8d\x94', False), - ('\x00\x00\x7f\x00\x00\x7f\x00', True), - ('\x00\x00\x7f\x00\x00\x80\x00', False), - ], - ) - @pytest.mark.parametrize('method', ['isascii', '_isascii']) - def test_misc_isascii(self, string, expected_ascii, method): - isascii = getattr(misc, method) - if expected_ascii: - assert isascii(string) - else: - assert not isascii(string) + def test_misc_isascii(self): + with pytest.warns(deprecation.DeprecatedWarning): + assert misc.isascii('foobar') @pytest.mark.parametrize( @@ -1112,6 +1099,7 @@ def on_post(self, req, resp): MEDIA_URLENCODED, ], ) + @pytest.mark.skipif(msgpack is None, reason='msgpack is required for this test') def test_simulate_content_type_extra_handler(self, asgi, content_type): class TestResourceAsync(testing.SimpleTestResourceAsync): def __init__(self): @@ -1427,9 +1415,6 @@ def a_function(a=1, b=2): assert 'a_function(...)' in str(recwarn[0].message) -@pytest.mark.skipif( - falcon.PYTHON_VERSION < (3, 7), reason='module __getattr__ requires python 3.7' -) def test_json_deprecation(): with pytest.warns(deprecation.DeprecatedWarning, match='json'): util.json diff --git a/tests/test_wsgi.py b/tests/test_wsgi.py index 65be90d74..b8f029df5 100644 --- a/tests/test_wsgi.py +++ b/tests/test_wsgi.py @@ -1,5 +1,6 @@ import multiprocessing import os +import os.path import time from wsgiref.simple_server import make_server @@ -9,6 +10,7 @@ import falcon import falcon.testing as testing +_HERE = os.path.abspath(os.path.dirname(__file__)) _SERVER_HOST = 'localhost' _SERVER_PORT = 9800 + os.getpid() % 100 # Facilitates parallel test execution _SERVER_BASE_URL = 'http://{}:{}/'.format(_SERVER_HOST, _SERVER_PORT) @@ -22,6 +24,13 @@ def test_get(self): assert resp.status_code == 200 assert resp.text == '127.0.0.1' + def test_get_file(self): + # NOTE(vytas): There was a breaking change in the behaviour of + # ntpath.isabs() in CPython 3.13, let us verify basic file serving. + resp = requests.get(_SERVER_BASE_URL + 'tests/test_wsgi.py') + assert resp.status_code == 200 + assert 'class TestWSGIServer:' in resp.text + def test_put(self): body = '{}' resp = requests.put(_SERVER_BASE_URL, data=body) @@ -91,6 +100,7 @@ def on_post(self, req, resp): api = application = falcon.App() api.add_route('/', Things()) api.add_route('/bucket', Bucket()) + api.add_static_route('/tests', _HERE) server = make_server(host, port, application) diff --git a/tools/testing/fetch_mailman.sh b/tools/testing/fetch_mailman.sh index 18135e786..6fe642c8c 100755 --- a/tools/testing/fetch_mailman.sh +++ b/tools/testing/fetch_mailman.sh @@ -19,14 +19,11 @@ cd $MAILMAN_PATH # git checkout tags/$MAILMAN_VERSION # NOTE(vytas): Patch tox.ini to introduce a new Falcon environment. -# TODO(vytas): Remove the shim pinning importlib-resources once -# https://gitlab.com/mailman/mailman/-/merge_requests/1130 is merged upstream. cat <> tox.ini [testenv:falcon-nocov] -basepython = python3.8 +basepython = python3.12 commands_pre = - pip install "importlib-resources < 6.0" pip uninstall -y falcon pip install $FALCON_ROOT EOT diff --git a/tox.ini b/tox.ini index 56a13d756..cd5444fb6 100644 --- a/tox.ini +++ b/tox.ini @@ -19,7 +19,6 @@ envlist = cleanup, mypy_tests, mintest, pytest, - pytest_sans_msgpack, coverage, towncrier @@ -83,10 +82,10 @@ commands = python "{toxinidir}/tools/clean.py" "{toxinidir}/falcon" [testenv:mintest] setenv = PIP_CONFIG_FILE={toxinidir}/pip.conf - PYTHONASYNCIODEBUG=1 + PYTHONASYNCIODEBUG=0 FALCON_DISABLE_CYTHON=Y deps = -r{toxinidir}/requirements/mintest -commands = coverage run -m pytest tests --ignore=tests/asgi [] +commands = coverage run -m pytest tests -k 'not slow' [] [testenv:pytest] deps = {[testenv]deps} @@ -182,12 +181,6 @@ setenv = PYTHONASYNCIODEBUG=1 commands = pytest tests [] -[testenv:py37_cython] -basepython = python3.7 -deps = {[with-cython]deps} -setenv = {[with-cython]setenv} -commands = {[with-cython]commands} - [testenv:py38_cython] basepython = python3.8 deps = {[with-cython]deps} @@ -471,7 +464,7 @@ commands = # -------------------------------------------------------------------- [testenv:hug] -basepython = python3.7 +basepython = python3.8 deps = virtualenv commands = {toxinidir}/tools/testing/install_hug.sh