From c04d9a49547f3db7cfeed0bb587c95a410ae37e6 Mon Sep 17 00:00:00 2001 From: Tomas Roun Date: Sun, 5 Jan 2025 20:44:07 +0100 Subject: [PATCH 1/4] Use a more precise return type for tokenize.untokenize() --- stdlib/tokenize.pyi | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/stdlib/tokenize.pyi b/stdlib/tokenize.pyi index 7b68f791a8c0..24c62de813ed 100644 --- a/stdlib/tokenize.pyi +++ b/stdlib/tokenize.pyi @@ -4,7 +4,7 @@ from collections.abc import Callable, Generator, Iterable, Sequence from re import Pattern from token import * from token import EXACT_TOKEN_TYPES as EXACT_TOKEN_TYPES -from typing import Any, NamedTuple, TextIO, type_check_only +from typing import NamedTuple, TextIO, type_check_only from typing_extensions import TypeAlias __all__ = [ @@ -132,7 +132,7 @@ class Untokenizer: # the docstring says "returns bytes" but is incorrect -- # if the ENCODING token is missing, it skips the encode -def untokenize(iterable: Iterable[_Token]) -> Any: ... +def untokenize(iterable: Iterable[_Token]) -> bytes | str: ... def detect_encoding(readline: Callable[[], bytes | bytearray]) -> tuple[str, Sequence[bytes]]: ... def tokenize(readline: Callable[[], bytes | bytearray]) -> Generator[TokenInfo, None, None]: ... def generate_tokens(readline: Callable[[], str]) -> Generator[TokenInfo, None, None]: ... From a3017c10f551f4795acb38ffb3780e5b2528dcf6 Mon Sep 17 00:00:00 2001 From: Akuli Date: Wed, 15 Jan 2025 18:52:02 +0200 Subject: [PATCH 2/4] Use the `Any` trick --- stdlib/tokenize.pyi | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/stdlib/tokenize.pyi b/stdlib/tokenize.pyi index 24c62de813ed..f61a0bd58b47 100644 --- a/stdlib/tokenize.pyi +++ b/stdlib/tokenize.pyi @@ -132,7 +132,7 @@ class Untokenizer: # the docstring says "returns bytes" but is incorrect -- # if the ENCODING token is missing, it skips the encode -def untokenize(iterable: Iterable[_Token]) -> bytes | str: ... +def untokenize(iterable: Iterable[_Token]) -> str | Any: ... # str in most use cases def detect_encoding(readline: Callable[[], bytes | bytearray]) -> tuple[str, Sequence[bytes]]: ... def tokenize(readline: Callable[[], bytes | bytearray]) -> Generator[TokenInfo, None, None]: ... def generate_tokens(readline: Callable[[], str]) -> Generator[TokenInfo, None, None]: ... From fcd9cc5557d76aabcb680663700cc4cd8ce2ad7e Mon Sep 17 00:00:00 2001 From: Akuli Date: Wed, 15 Jan 2025 18:53:58 +0200 Subject: [PATCH 3/4] Import `Any` --- stdlib/tokenize.pyi | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/stdlib/tokenize.pyi b/stdlib/tokenize.pyi index f61a0bd58b47..5f022844cea8 100644 --- a/stdlib/tokenize.pyi +++ b/stdlib/tokenize.pyi @@ -4,7 +4,7 @@ from collections.abc import Callable, Generator, Iterable, Sequence from re import Pattern from token import * from token import EXACT_TOKEN_TYPES as EXACT_TOKEN_TYPES -from typing import NamedTuple, TextIO, type_check_only +from typing import Any, NamedTuple, TextIO, type_check_only from typing_extensions import TypeAlias __all__ = [ From 9e5b62be113fd8eaf655ab2b29cdd45946af2aa4 Mon Sep 17 00:00:00 2001 From: Tomas Roun Date: Thu, 16 Jan 2025 20:17:30 +0100 Subject: [PATCH 4/4] Update comment --- stdlib/tokenize.pyi | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/stdlib/tokenize.pyi b/stdlib/tokenize.pyi index 5f022844cea8..2655f2f0266a 100644 --- a/stdlib/tokenize.pyi +++ b/stdlib/tokenize.pyi @@ -130,9 +130,8 @@ class Untokenizer: if sys.version_info >= (3, 12): def escape_brackets(self, token: str) -> str: ... -# the docstring says "returns bytes" but is incorrect -- -# if the ENCODING token is missing, it skips the encode -def untokenize(iterable: Iterable[_Token]) -> str | Any: ... # str in most use cases +# Returns str, unless the ENCODING token is present, in which case it returns bytes. +def untokenize(iterable: Iterable[_Token]) -> str | Any: ... def detect_encoding(readline: Callable[[], bytes | bytearray]) -> tuple[str, Sequence[bytes]]: ... def tokenize(readline: Callable[[], bytes | bytearray]) -> Generator[TokenInfo, None, None]: ... def generate_tokens(readline: Callable[[], str]) -> Generator[TokenInfo, None, None]: ...