From bb142e734eeccb3e980a634c84a383140e8c45c0 Mon Sep 17 00:00:00 2001 From: Hugo van Kemenade <1324225+hugovk@users.noreply.github.com> Date: Mon, 10 Nov 2025 11:53:37 +0200 Subject: [PATCH 1/4] Drop support for EOL Python 3.9 --- .flake8 | 3 +-- .github/ISSUE_TEMPLATE/bug_report.md | 2 +- .github/workflows/fuzz.yml | 2 +- .github/workflows/pypi_upload.yml | 2 +- .github/workflows/test.yml | 8 +++----- CHANGES.md | 2 ++ README.md | 5 +++-- autoload/black.vim | 4 ++-- docs/contributing/the_basics.md | 2 +- docs/getting_started.md | 5 +++-- docs/integrations/editors.md | 4 ++-- plugin/black.vim | 4 ++-- pyproject.toml | 11 +++++------ scripts/migrate-black.py | 2 +- src/black/__init__.py | 2 +- src/black/handle_ipynb_magics.py | 8 +------- src/black/nodes.py | 8 +------- src/black/strings.py | 4 ++-- tests/test_schema.py | 11 +++-------- tests/test_trans.py | 2 +- tox.ini | 2 +- 21 files changed, 38 insertions(+), 55 deletions(-) diff --git a/.flake8 b/.flake8 index f8dca18e7cf..0e5365a6eed 100644 --- a/.flake8 +++ b/.flake8 @@ -1,6 +1,5 @@ [flake8] -# B905 should be enabled when we drop support for 3.9 -ignore = E203, E266, E501, E701, E704, W503, B905, B907 +ignore = E203, E266, E501, E701, E704, W503, B907 # line length is intentionally set to 80 here because black uses Bugbear # See https://black.readthedocs.io/en/stable/guides/using_black_with_other_tools.html#bugbear for more details max-line-length = 80 diff --git a/.github/ISSUE_TEMPLATE/bug_report.md b/.github/ISSUE_TEMPLATE/bug_report.md index 48aa9291b05..e45a4b331da 100644 --- a/.github/ISSUE_TEMPLATE/bug_report.md +++ b/.github/ISSUE_TEMPLATE/bug_report.md @@ -41,7 +41,7 @@ this = "code" And run it with these arguments: ```sh -$ black file.py --target-version py39 +$ black file.py --target-version py310 ``` The resulting error is: diff --git a/.github/workflows/fuzz.yml b/.github/workflows/fuzz.yml index 6ebcf7d316c..1f817f70e84 100644 --- a/.github/workflows/fuzz.yml +++ b/.github/workflows/fuzz.yml @@ -22,7 +22,7 @@ jobs: strategy: fail-fast: false matrix: - python-version: ["3.9", "3.10", "3.11", "3.12.4", "3.13", "3.14"] + python-version: ["3.10", "3.11", "3.12", "3.13", "3.14"] steps: - uses: actions/checkout@v5 diff --git a/.github/workflows/pypi_upload.yml b/.github/workflows/pypi_upload.yml index 3d81c9d31f0..918429e2952 100644 --- a/.github/workflows/pypi_upload.yml +++ b/.github/workflows/pypi_upload.yml @@ -75,7 +75,7 @@ jobs: github.event_name == 'pull_request' && !contains(github.event.pull_request.labels.*.name, 'ci: build all wheels') run: | { - CIBW_BUILD="cp39-*" cibuildwheel --print-build-identifiers --platform linux | pyp 'json.dumps({"only": x, "os": "ubuntu-latest"})' + CIBW_BUILD="cp310-*" cibuildwheel --print-build-identifiers --platform linux | pyp 'json.dumps({"only": x, "os": "ubuntu-latest"})' CIBW_BUILD="cp314-*" cibuildwheel --print-build-identifiers --platform windows | pyp 'json.dumps({"only": x, "os": "windows-latest"})' } | pyp 'json.dumps(list(map(json.loads, lines)))' > /tmp/matrix env: diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index 7bc5ec2f671..d9440b04db5 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -31,16 +31,14 @@ jobs: strategy: fail-fast: false matrix: - python-version: ["3.9", "3.10", "3.11", "3.12.10", "3.13", "3.14", "pypy-3.9"] + python-version: ["3.10", "3.11", "3.12.10", "3.13", "3.14", "pypy-3.11"] os: [ubuntu-latest, macOS-latest, windows-latest, windows-11-arm] exclude: - # setup-python only supports 3.11+ on arm64 windows - - os: windows-11-arm - python-version: "3.9" + # setup-python only supports CPython 3.11+ on arm64 windows - os: windows-11-arm python-version: "3.10" - os: windows-11-arm - python-version: "pypy-3.9" + python-version: "pypy-3.11" steps: - uses: actions/checkout@v5 diff --git a/CHANGES.md b/CHANGES.md index fd54b5b8eef..a38bfe70b13 100644 --- a/CHANGES.md +++ b/CHANGES.md @@ -9,6 +9,8 @@ +- Black no longer supports running with Python 3.9 (#4842) + ### Stable style diff --git a/README.md b/README.md index 42fcba8e41a..30f5267155b 100644 --- a/README.md +++ b/README.md @@ -39,8 +39,9 @@ Try it out now using the [Black Playground](https://black.vercel.app). Watch the ### Installation -_Black_ can be installed by running `pip install black`. It requires Python 3.9+ to run. -If you want to format Jupyter Notebooks, install with `pip install "black[jupyter]"`. +_Black_ can be installed by running `pip install black`. It requires Python 3.10+ to +run. If you want to format Jupyter Notebooks, install with +`pip install "black[jupyter]"`. ### Usage diff --git a/autoload/black.vim b/autoload/black.vim index 15c5992ee3d..76f2bf30f48 100644 --- a/autoload/black.vim +++ b/autoload/black.vim @@ -84,8 +84,8 @@ def _initialize_black_env(upgrade=False): return True pyver = sys.version_info[:3] - if pyver < (3, 9): - print("Sorry, Black requires Python 3.9+ to run.") + if pyver < (3, 10): + print("Sorry, Black requires Python 3.10+ to run.") return False from pathlib import Path diff --git a/docs/contributing/the_basics.md b/docs/contributing/the_basics.md index a8a9b25fa4f..f69867eb08d 100644 --- a/docs/contributing/the_basics.md +++ b/docs/contributing/the_basics.md @@ -53,7 +53,7 @@ Further examples of invoking the tests (.venv)$ tox --parallel=auto # Run tests on a specific python version -(.venv)$ tox -e py39 +(.venv)$ tox -e py314 # Run an individual test (.venv)$ pytest -k diff --git a/docs/getting_started.md b/docs/getting_started.md index 98cc15739c2..27cbeed10df 100644 --- a/docs/getting_started.md +++ b/docs/getting_started.md @@ -16,8 +16,9 @@ Also, you can try out _Black_ online for minimal fuss on the ## Installation -_Black_ can be installed by running `pip install black`. It requires Python 3.9+ to run. -If you want to format Jupyter Notebooks, install with `pip install "black[jupyter]"`. +_Black_ can be installed by running `pip install black`. It requires Python 3.10+ to +run. If you want to format Jupyter Notebooks, install with +`pip install "black[jupyter]"`. If you use pipx, you can install Black with `pipx install black`. diff --git a/docs/integrations/editors.md b/docs/integrations/editors.md index 272cade7398..257112bc174 100644 --- a/docs/integrations/editors.md +++ b/docs/integrations/editors.md @@ -232,8 +232,8 @@ Configuration: #### Installation -This plugin **requires Vim 7.0+ built with Python 3.9+ support**. It needs Python 3.9 to -be able to run _Black_ inside the Vim process which is much faster than calling an +This plugin **requires Vim 7.0+ built with Python 3.10+ support**. It needs Python 3.10 +to be able to run _Black_ inside the Vim process which is much faster than calling an external command. ##### `vim-plug` diff --git a/plugin/black.vim b/plugin/black.vim index 0ecbef4c5ab..7180e80939f 100644 --- a/plugin/black.vim +++ b/plugin/black.vim @@ -27,7 +27,7 @@ if v:version < 700 || !has('python3') call add(messages, "vim7.0+") endif if !has('python3') - call add(messages, "Python 3.9 support") + call add(messages, "Python 3.10 support") endif echo "The black.vim plugin requires" join(messages, " and ") @@ -81,11 +81,11 @@ endif function BlackComplete(ArgLead, CmdLine, CursorPos) return [ -\ 'target_version=py39', \ 'target_version=py310', \ 'target_version=py311', \ 'target_version=py312', \ 'target_version=py313', +\ 'target_version=py314', \ ] endfunction diff --git a/pyproject.toml b/pyproject.toml index f2c329e3905..4f2c6726492 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -7,7 +7,7 @@ [tool.black] line-length = 88 -target-version = ['py39'] +target-version = ['py310'] include = '\.pyi?$' extend-exclude = ''' /( @@ -35,7 +35,7 @@ name = "black" description = "The uncompromising code formatter." license = "MIT" license-files = ["LICENSE"] -requires-python = ">=3.9" +requires-python = ">=3.10" authors = [ { name = "Ɓukasz Langa", email = "lukasz@langa.pl" }, ] @@ -55,7 +55,6 @@ classifiers = [ "Operating System :: OS Independent", "Programming Language :: Python", "Programming Language :: Python :: 3 :: Only", - "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", @@ -153,10 +152,10 @@ options = { debug_level = "0" } build-verbosity = 1 # So these are the environments we target: -# - Python: CPython 3.9+ only +# - Python: CPython 3.10+ only # - Architecture (64-bit only): amd64 / x86_64, universal2, and arm64 # - OS: Linux (no musl), Windows, and macOS -build = "cp3*" +build = "cp31*" skip = [ "*-manylinux_i686", "*-musllinux_*", @@ -229,7 +228,7 @@ branch = true # Specify the target platform details in config, so your developers are # free to run mypy on Windows, Linux, or macOS and get consistent # results. -python_version = "3.9" +python_version = "3.10" mypy_path = "src" strict = true strict_bytes = true diff --git a/scripts/migrate-black.py b/scripts/migrate-black.py index f410c96b0e3..3c1bb71125b 100755 --- a/scripts/migrate-black.py +++ b/scripts/migrate-black.py @@ -40,7 +40,7 @@ def blackify(base_branch: str, black_command: str, logger: logging.Logger) -> in git("checkout", base_branch, f"-b{current_branch}-black") - for last_commit, commit in zip(commits, commits[1:]): + for last_commit, commit in zip(commits, commits[1:], strict=True): allow_empty = ( b"--allow-empty" in run(["git", "apply", "-h"], stdout=PIPE).stdout ) diff --git a/src/black/__init__.py b/src/black/__init__.py index b71c1a6bdbd..fde594d558e 100644 --- a/src/black/__init__.py +++ b/src/black/__init__.py @@ -549,7 +549,7 @@ def main( # noqa: C901 """The uncompromising code formatter.""" ctx.ensure_object(dict) - assert sys.version_info >= (3, 9), "Black requires Python 3.9+" + assert sys.version_info >= (3, 10), "Black requires Python 3.10+" if sys.version_info[:3] == (3, 12, 5): out( "Python 3.12.5 has a memory safety issue that can cause Black's " diff --git a/src/black/handle_ipynb_magics.py b/src/black/handle_ipynb_magics.py index e9ef6ae3d04..ba0b3c24963 100644 --- a/src/black/handle_ipynb_magics.py +++ b/src/black/handle_ipynb_magics.py @@ -5,15 +5,9 @@ import dataclasses import re import secrets -import sys from functools import lru_cache from importlib.util import find_spec -from typing import Optional - -if sys.version_info >= (3, 10): - from typing import TypeGuard -else: - from typing_extensions import TypeGuard +from typing import Optional, TypeGuard from black.mode import Mode from black.output import out diff --git a/src/black/nodes.py b/src/black/nodes.py index 76d565c90dc..a4780c01c34 100644 --- a/src/black/nodes.py +++ b/src/black/nodes.py @@ -2,14 +2,8 @@ blib2to3 Node/Leaf transformation-related utility functions. """ -import sys from collections.abc import Iterator -from typing import Final, Generic, Literal, Optional, TypeVar, Union - -if sys.version_info >= (3, 10): - from typing import TypeGuard -else: - from typing_extensions import TypeGuard +from typing import Final, Generic, Literal, Optional, TypeGuard, TypeVar, Union from mypy_extensions import mypyc_attr diff --git a/src/black/strings.py b/src/black/strings.py index 2dac4260c06..78c9f258fe2 100644 --- a/src/black/strings.py +++ b/src/black/strings.py @@ -288,7 +288,7 @@ def normalize_fstring_quotes( # edge case: new_segments[-1] = new_segments[-1][:-1] + '\\"' - for middle, new_segment in zip(middles, new_segments): + for middle, new_segment in zip(middles, new_segments, strict=True): orig_escape_count = middle.value.count("\\") new_escape_count = new_segment.count("\\") @@ -298,7 +298,7 @@ def normalize_fstring_quotes( if new_escape_count == orig_escape_count and quote == '"': return middles, quote # Prefer double quotes - for middle, new_segment in zip(middles, new_segments): + for middle, new_segment in zip(middles, new_segments, strict=True): middle.value = new_segment return middles, new_quote diff --git a/tests/test_schema.py b/tests/test_schema.py index 1ea95130f86..1798b3955c6 100644 --- a/tests/test_schema.py +++ b/tests/test_schema.py @@ -1,15 +1,10 @@ import importlib.metadata -import sys def test_schema_entrypoint() -> None: - if sys.version_info < (3, 10): - eps = importlib.metadata.entry_points()["validate_pyproject.tool_schema"] - (black_ep,) = [ep for ep in eps if ep.name == "black"] - else: - (black_ep,) = importlib.metadata.entry_points( - group="validate_pyproject.tool_schema", name="black" - ) + (black_ep,) = importlib.metadata.entry_points( + group="validate_pyproject.tool_schema", name="black" + ) black_fn = black_ep.load() schema = black_fn() diff --git a/tests/test_trans.py b/tests/test_trans.py index 224659ec2c5..d04eac74b4d 100644 --- a/tests/test_trans.py +++ b/tests/test_trans.py @@ -10,7 +10,7 @@ def check( # Checking slices isn't strictly necessary, but it's easier to verify at # a glance than only spans assert len(spans) == len(expected_slices) - for (i, j), slice in zip(spans, expected_slices): + for (i, j), slice in zip(spans, expected_slices, strict=True): assert 0 <= i <= j <= len(string) assert string[i:j] == slice diff --git a/tox.ini b/tox.ini index 12493c42686..3bb08983c28 100644 --- a/tox.ini +++ b/tox.ini @@ -1,6 +1,6 @@ [tox] isolated_build = true -envlist = {,ci-}py{39,310,311,312,313,314,py3},fuzz,run_self,generate_schema +envlist = {,ci-}py{310,311,312,313,314,py3},fuzz,run_self,generate_schema [testenv] setenv = From b6fd98d05d5e0bb54253fa4544b14030fdfcc25d Mon Sep 17 00:00:00 2001 From: Hugo van Kemenade <1324225+hugovk@users.noreply.github.com> Date: Mon, 10 Nov 2025 12:13:08 +0200 Subject: [PATCH 2/4] pyupgrade --py310 except not profiling/ or tests/data/ --- action/main.py | 3 +- gallery/gallery.py | 20 +++---- scripts/check_pre_commit_rev_in_example.py | 4 +- src/black/__init__.py | 50 ++++++++-------- src/black/brackets.py | 8 +-- src/black/comments.py | 8 +-- src/black/concurrency.py | 10 ++-- src/black/debug.py | 2 +- src/black/files.py | 26 ++++---- src/black/handle_ipynb_magics.py | 6 +- src/black/linegen.py | 22 +++---- src/black/lines.py | 24 ++++---- src/black/nodes.py | 20 +++---- src/black/output.py | 10 ++-- src/black/ranges.py | 3 +- src/black/trans.py | 16 ++--- src/blackd/client.py | 8 +-- src/blib2to3/pgen2/driver.py | 16 ++--- src/blib2to3/pgen2/parse.py | 16 ++--- src/blib2to3/pgen2/pgen.py | 14 ++--- src/blib2to3/pgen2/tokenize.py | 7 +-- src/blib2to3/pytree.py | 70 +++++++++++----------- tests/test_black.py | 24 ++++---- tests/test_docs.py | 3 +- tests/util.py | 10 ++-- 25 files changed, 196 insertions(+), 204 deletions(-) diff --git a/action/main.py b/action/main.py index 2cabc9772ba..ff31eafc449 100644 --- a/action/main.py +++ b/action/main.py @@ -5,7 +5,6 @@ import sys from pathlib import Path from subprocess import PIPE, STDOUT, run -from typing import Union ACTION_PATH = Path(os.environ["GITHUB_ACTION_PATH"]) ENV_PATH = ACTION_PATH / ".black-env" @@ -95,7 +94,7 @@ def read_version_specifier_from_pyproject() -> str: return version -def find_black_version_in_array(array: object) -> Union[str, None]: +def find_black_version_in_array(array: object) -> str | None: if not isinstance(array, list): return None try: diff --git a/gallery/gallery.py b/gallery/gallery.py index a90650c4b20..d7fb3f6441f 100755 --- a/gallery/gallery.py +++ b/gallery/gallery.py @@ -11,7 +11,7 @@ from concurrent.futures import ThreadPoolExecutor from functools import lru_cache, partial from pathlib import Path -from typing import NamedTuple, Optional, Union, cast +from typing import NamedTuple, Union, cast from urllib.request import urlopen, urlretrieve PYPI_INSTANCE = "https://pypi.org/pypi" @@ -28,10 +28,10 @@ class BlackVersion(NamedTuple): version: str - config: Optional[str] = None + config: str | None = None -def get_pypi_download_url(package: str, version: Optional[str]) -> str: +def get_pypi_download_url(package: str, version: str | None) -> str: with urlopen(PYPI_INSTANCE + f"/{package}/json") as page: metadata = json.load(page) @@ -62,7 +62,7 @@ def get_top_packages() -> list[str]: return [package["project"] for package in result["rows"]] -def get_package_source(package: str, version: Optional[str]) -> str: +def get_package_source(package: str, version: str | None) -> str: if package == "cpython": if version is None: version = "main" @@ -93,7 +93,7 @@ def get_first_archive_member(archive: ArchiveKind) -> str: return archive.namelist()[0] -def download_and_extract(package: str, version: Optional[str], directory: Path) -> Path: +def download_and_extract(package: str, version: str | None, directory: Path) -> Path: source = get_package_source(package, version) local_file, _ = urlretrieve(source, directory / f"{package}-src") @@ -104,8 +104,8 @@ def download_and_extract(package: str, version: Optional[str], directory: Path) def get_package( - package: str, version: Optional[str], directory: Path -) -> Optional[Path]: + package: str, version: str | None, directory: Path +) -> Path | None: try: return download_and_extract(package, version, directory) except Exception: @@ -140,7 +140,7 @@ def git_add_and_commit(msg: str, repo: Path) -> None: def git_switch_branch( - branch: str, repo: Path, new: bool = False, from_branch: Optional[str] = None + branch: str, repo: Path, new: bool = False, from_branch: str | None = None ) -> None: args = ["git", "checkout"] if new: @@ -198,7 +198,7 @@ def black_runner(version: str, black_repo: Path) -> Path: def format_repo_with_version( repo: Path, - from_branch: Optional[str], + from_branch: str | None, black_repo: Path, black_version: BlackVersion, input_directory: Path, @@ -207,7 +207,7 @@ def format_repo_with_version( git_switch_branch(black_version.version, repo=black_repo) git_switch_branch(current_branch, repo=repo, new=True, from_branch=from_branch) - format_cmd: list[Union[Path, str]] = [ + format_cmd: list[Path | str] = [ black_runner(black_version.version, black_repo), (black_repo / "black.py").resolve(), ".", diff --git a/scripts/check_pre_commit_rev_in_example.py b/scripts/check_pre_commit_rev_in_example.py index cc45a31e1ed..0dbf0aca0ef 100644 --- a/scripts/check_pre_commit_rev_in_example.py +++ b/scripts/check_pre_commit_rev_in_example.py @@ -21,9 +21,9 @@ def main(changes: str, source_version_control: str) -> None: changes_html = commonmark.commonmark(changes) changes_soup = BeautifulSoup(changes_html, "html.parser") headers = changes_soup.find_all("h2") - latest_tag, *_ = [ + latest_tag, *_ = ( header.string for header in headers if header.string != "Unreleased" - ] + ) source_version_control_html = commonmark.commonmark(source_version_control) source_version_control_soup = BeautifulSoup( diff --git a/src/black/__init__.py b/src/black/__init__.py index fde594d558e..0cfed9d5282 100644 --- a/src/black/__init__.py +++ b/src/black/__init__.py @@ -19,7 +19,7 @@ from json.decoder import JSONDecodeError from pathlib import Path from re import Pattern -from typing import Any, Optional, Union +from typing import Any import click from click.core import ParameterSource @@ -114,8 +114,8 @@ def from_configuration( def read_pyproject_toml( - ctx: click.Context, param: click.Parameter, value: Optional[str] -) -> Optional[str]: + ctx: click.Context, param: click.Parameter, value: str | None +) -> str | None: """Inject Black configuration from "pyproject.toml" into defaults in `ctx`. Returns the path to a successfully found and read configuration file, None @@ -193,7 +193,7 @@ def spellcheck_pyproject_toml_keys( def target_version_option_callback( - c: click.Context, p: Union[click.Option, click.Parameter], v: tuple[str, ...] + c: click.Context, p: click.Option | click.Parameter, v: tuple[str, ...] ) -> list[TargetVersion]: """Compute the target versions from a --target-version flag. @@ -204,7 +204,7 @@ def target_version_option_callback( def enable_unstable_feature_callback( - c: click.Context, p: Union[click.Option, click.Parameter], v: tuple[str, ...] + c: click.Context, p: click.Option | click.Parameter, v: tuple[str, ...] ) -> list[Preview]: """Compute the features from an --enable-unstable-feature flag.""" return [Preview[val] for val in v] @@ -224,8 +224,8 @@ def re_compile_maybe_verbose(regex: str) -> Pattern[str]: def validate_regex( ctx: click.Context, param: click.Parameter, - value: Optional[str], -) -> Optional[Pattern[str]]: + value: str | None, +) -> Pattern[str] | None: try: return re_compile_maybe_verbose(value) if value is not None else None except re.error as e: @@ -516,7 +516,7 @@ def validate_regex( @click.pass_context def main( # noqa: C901 ctx: click.Context, - code: Optional[str], + code: str | None, line_length: int, target_version: list[TargetVersion], check: bool, @@ -535,15 +535,15 @@ def main( # noqa: C901 enable_unstable_feature: list[Preview], quiet: bool, verbose: bool, - required_version: Optional[str], + required_version: str | None, include: Pattern[str], - exclude: Optional[Pattern[str]], - extend_exclude: Optional[Pattern[str]], - force_exclude: Optional[Pattern[str]], - stdin_filename: Optional[str], - workers: Optional[int], + exclude: Pattern[str] | None, + extend_exclude: Pattern[str] | None, + force_exclude: Pattern[str] | None, + stdin_filename: str | None, + workers: int | None, src: tuple[str, ...], - config: Optional[str], + config: str | None, no_cache: bool, ) -> None: """The uncompromising code formatter.""" @@ -738,11 +738,11 @@ def get_sources( quiet: bool, verbose: bool, include: Pattern[str], - exclude: Optional[Pattern[str]], - extend_exclude: Optional[Pattern[str]], - force_exclude: Optional[Pattern[str]], + exclude: Pattern[str] | None, + extend_exclude: Pattern[str] | None, + force_exclude: Pattern[str] | None, report: "Report", - stdin_filename: Optional[str], + stdin_filename: str | None, ) -> set[Path]: """Compute the set of files to be formatted.""" sources: set[Path] = set() @@ -750,7 +750,7 @@ def get_sources( assert root.is_absolute(), f"INTERNAL ERROR: `root` must be absolute but is {root}" using_default_exclude = exclude is None exclude = re_compile_maybe_verbose(DEFAULT_EXCLUDES) if exclude is None else exclude - gitignore: Optional[dict[Path, PathSpec]] = None + gitignore: dict[Path, PathSpec] | None = None root_gitignore = get_gitignore(root) for s in src: @@ -994,7 +994,7 @@ def format_file_in_place( def format_stdin_to_stdout( fast: bool, *, - content: Optional[str] = None, + content: str | None = None, write_back: WriteBack = WriteBack.NO, mode: Mode, lines: Collection[tuple[int, int]] = (), @@ -1267,7 +1267,7 @@ def _format_str_once( } if supports_feature(versions, feature) } - block: Optional[LinesBlock] = None + block: LinesBlock | None = None for current_line in line_generator.visit(src_node): block = elt.maybe_empty_lines(current_line) dst_blocks.append(block) @@ -1335,7 +1335,7 @@ def decode_bytes(src: bytes, mode: Mode) -> tuple[FileContent, Encoding, NewLine def get_features_used( # noqa: C901 - node: Node, *, future_imports: Optional[set[str]] = None + node: Node, *, future_imports: set[str] | None = None ) -> set[Feature]: """Return a set of (relatively) new Python features used in this file. @@ -1496,7 +1496,7 @@ def get_features_used( # noqa: C901 return features -def _contains_asexpr(node: Union[Node, Leaf]) -> bool: +def _contains_asexpr(node: Node | Leaf) -> bool: """Return True if `node` contains an as-pattern.""" if node.type == syms.asexpr_test: return True @@ -1513,7 +1513,7 @@ def _contains_asexpr(node: Union[Node, Leaf]) -> bool: def detect_target_versions( - node: Node, *, future_imports: Optional[set[str]] = None + node: Node, *, future_imports: set[str] | None = None ) -> set[TargetVersion]: """Detect the version to target based on the nodes used.""" features = get_features_used(node, future_imports=future_imports) diff --git a/src/black/brackets.py b/src/black/brackets.py index c2e8be4348e..44a3c9a2946 100644 --- a/src/black/brackets.py +++ b/src/black/brackets.py @@ -2,7 +2,7 @@ from collections.abc import Iterable, Sequence from dataclasses import dataclass, field -from typing import Final, Optional, Union +from typing import Final, Union from black.nodes import ( BRACKET, @@ -63,7 +63,7 @@ class BracketTracker: depth: int = 0 bracket_match: dict[tuple[Depth, NodeType], Leaf] = field(default_factory=dict) delimiters: dict[LeafID, Priority] = field(default_factory=dict) - previous: Optional[Leaf] = None + previous: Leaf | None = None _for_loop_depths: list[int] = field(default_factory=list) _lambda_argument_depths: list[int] = field(default_factory=list) invisible: list[Leaf] = field(default_factory=list) @@ -211,7 +211,7 @@ def maybe_decrement_after_lambda_arguments(self, leaf: Leaf) -> bool: return False - def get_open_lsqb(self) -> Optional[Leaf]: + def get_open_lsqb(self) -> Leaf | None: """Return the most recent opening square bracket (if any).""" return self.bracket_match.get((self.depth - 1, token.RSQB)) @@ -230,7 +230,7 @@ def is_split_after_delimiter(leaf: Leaf) -> Priority: return 0 -def is_split_before_delimiter(leaf: Leaf, previous: Optional[Leaf] = None) -> Priority: +def is_split_before_delimiter(leaf: Leaf, previous: Leaf | None = None) -> Priority: """Return the priority of the `leaf` delimiter, given a line break before it. The delimiter priorities returned here are from those delimiters that would diff --git a/src/black/comments.py b/src/black/comments.py index 9a89c85e779..d7515b028f4 100644 --- a/src/black/comments.py +++ b/src/black/comments.py @@ -2,7 +2,7 @@ from collections.abc import Collection, Iterator from dataclasses import dataclass from functools import lru_cache -from typing import Final, Optional, Union +from typing import Final, Union from black.mode import Mode, Preview from black.nodes import ( @@ -414,7 +414,7 @@ def _handle_regular_fmt_block( # leaf (possibly followed by a DEDENT). hidden_value = hidden_value[:-1] - first_idx: Optional[int] = None + first_idx: int | None = None for ignored in ignored_nodes: index = ignored.remove() if first_idx is None: @@ -445,7 +445,7 @@ def generate_ignored_nodes( if _contains_fmt_directive(comment.value, FMT_SKIP): yield from _generate_ignored_nodes_from_fmt_skip(leaf, comment, mode) return - container: Optional[LN] = container_of(leaf) + container: LN | None = container_of(leaf) while container is not None and container.type != token.ENDMARKER: if is_fmt_on(container, mode=mode): return @@ -483,7 +483,7 @@ def generate_ignored_nodes( container = container.next_sibling -def _find_compound_statement_context(parent: Node) -> Optional[Node]: +def _find_compound_statement_context(parent: Node) -> Node | None: """Return the body node of a compound statement if we should respect fmt: skip. This handles one-line compound statements like: diff --git a/src/black/concurrency.py b/src/black/concurrency.py index 53a61456b63..e939dc34681 100644 --- a/src/black/concurrency.py +++ b/src/black/concurrency.py @@ -16,7 +16,7 @@ from concurrent.futures import Executor, ProcessPoolExecutor, ThreadPoolExecutor from multiprocessing import Manager from pathlib import Path -from typing import Any, Optional +from typing import Any from mypy_extensions import mypyc_attr @@ -41,7 +41,7 @@ def maybe_install_uvloop() -> None: pass -def cancel(tasks: Iterable["asyncio.Future[Any]"]) -> None: +def cancel(tasks: Iterable[asyncio.Future[Any]]) -> None: """asyncio signal handler that cancels all `tasks` and reports to stderr.""" err("Aborted!") for task in tasks: @@ -77,7 +77,7 @@ def reformat_many( write_back: WriteBack, mode: Mode, report: Report, - workers: Optional[int], + workers: int | None, no_cache: bool = False, ) -> None: """Reformat multiple files using a ProcessPoolExecutor.""" @@ -133,9 +133,9 @@ async def schedule_formatting( fast: bool, write_back: WriteBack, mode: Mode, - report: "Report", + report: Report, loop: asyncio.AbstractEventLoop, - executor: "Executor", + executor: Executor, no_cache: bool = False, ) -> None: """Run formatting of `sources` in parallel using the provided `executor`. diff --git a/src/black/debug.py b/src/black/debug.py index 939b20eee5e..f051c497d0b 100644 --- a/src/black/debug.py +++ b/src/black/debug.py @@ -44,7 +44,7 @@ def visit_default(self, node: LN) -> Iterator[T]: self.out(f" {node.value!r}", fg="blue", bold=False) @classmethod - def show(cls, code: Union[str, Leaf, Node]) -> None: + def show(cls, code: str | Leaf | Node) -> None: """Pretty-print the lib2to3 AST of a given string of `code`. Convenience method for debugging. diff --git a/src/black/files.py b/src/black/files.py index 72c5eddf9c0..17cabb3c5e5 100644 --- a/src/black/files.py +++ b/src/black/files.py @@ -5,7 +5,7 @@ from functools import lru_cache from pathlib import Path from re import Pattern -from typing import TYPE_CHECKING, Any, Optional, Union +from typing import TYPE_CHECKING, Any, Union from mypy_extensions import mypyc_attr from packaging.specifiers import InvalidSpecifier, Specifier, SpecifierSet @@ -33,7 +33,7 @@ @lru_cache -def _load_toml(path: Union[Path, str]) -> dict[str, Any]: +def _load_toml(path: Path | str) -> dict[str, Any]: with open(path, "rb") as f: return tomllib.load(f) @@ -45,7 +45,7 @@ def _cached_resolve(path: Path) -> Path: @lru_cache def find_project_root( - srcs: Sequence[str], stdin_filename: Optional[str] = None + srcs: Sequence[str], stdin_filename: str | None = None ) -> tuple[Path, str]: """Return a directory containing .git, .hg, or pyproject.toml. @@ -96,8 +96,8 @@ def find_project_root( def find_pyproject_toml( - path_search_start: tuple[str, ...], stdin_filename: Optional[str] = None -) -> Optional[str]: + path_search_start: tuple[str, ...], stdin_filename: str | None = None +) -> str | None: """Find the absolute filepath to a pyproject.toml if it exists""" path_project_root, _ = find_project_root(path_search_start, stdin_filename) path_pyproject_toml = path_project_root / "pyproject.toml" @@ -137,7 +137,7 @@ def parse_pyproject_toml(path_config: str) -> dict[str, Any]: def infer_target_version( pyproject_toml: dict[str, Any], -) -> Optional[list[TargetVersion]]: +) -> list[TargetVersion] | None: """Infer Black's target version from the project metadata in pyproject.toml. Supports the PyPA standard format (PEP 621): @@ -160,7 +160,7 @@ def infer_target_version( return None -def parse_req_python_version(requires_python: str) -> Optional[list[TargetVersion]]: +def parse_req_python_version(requires_python: str) -> list[TargetVersion] | None: """Parse a version string (i.e. ``"3.7"``) to a list of TargetVersion. If parsing fails, will raise a packaging.version.InvalidVersion error. @@ -175,7 +175,7 @@ def parse_req_python_version(requires_python: str) -> Optional[list[TargetVersio return None -def parse_req_python_specifier(requires_python: str) -> Optional[list[TargetVersion]]: +def parse_req_python_specifier(requires_python: str) -> list[TargetVersion] | None: """Parse a specifier string (i.e. ``">=3.7,<3.10"``) to a list of TargetVersion. If parsing fails, will raise a packaging.specifiers.InvalidSpecifier error. @@ -255,7 +255,7 @@ def get_gitignore(root: Path) -> PathSpec: def resolves_outside_root_or_cannot_stat( path: Path, root: Path, - report: Optional[Report] = None, + report: Report | None = None, ) -> bool: """ Returns whether the path is a symbolic link that points outside the @@ -311,7 +311,7 @@ def _path_is_ignored( def path_is_excluded( normalized_path: str, - pattern: Optional[Pattern[str]], + pattern: Pattern[str] | None, ) -> bool: match = pattern.search(normalized_path) if pattern else None return bool(match and match.group(0)) @@ -322,10 +322,10 @@ def gen_python_files( root: Path, include: Pattern[str], exclude: Pattern[str], - extend_exclude: Optional[Pattern[str]], - force_exclude: Optional[Pattern[str]], + extend_exclude: Pattern[str] | None, + force_exclude: Pattern[str] | None, report: Report, - gitignore_dict: Optional[dict[Path, PathSpec]], + gitignore_dict: dict[Path, PathSpec] | None, *, verbose: bool, quiet: bool, diff --git a/src/black/handle_ipynb_magics.py b/src/black/handle_ipynb_magics.py index ba0b3c24963..c84fe6219fb 100644 --- a/src/black/handle_ipynb_magics.py +++ b/src/black/handle_ipynb_magics.py @@ -7,7 +7,7 @@ import secrets from functools import lru_cache from importlib.util import find_spec -from typing import Optional, TypeGuard +from typing import TypeGuard from black.mode import Mode from black.output import out @@ -352,7 +352,7 @@ def _get_str_args(args: list[ast.expr]) -> list[str]: @dataclasses.dataclass(frozen=True) class CellMagic: name: str - params: Optional[str] + params: str | None body: str @property @@ -382,7 +382,7 @@ class CellMagicFinder(ast.NodeVisitor): and we look for instances of the latter. """ - def __init__(self, cell_magic: Optional[CellMagic] = None) -> None: + def __init__(self, cell_magic: CellMagic | None = None) -> None: self.cell_magic = cell_magic def visit_Expr(self, node: ast.Expr) -> None: diff --git a/src/black/linegen.py b/src/black/linegen.py index cf171c12c62..3bf599887e7 100644 --- a/src/black/linegen.py +++ b/src/black/linegen.py @@ -8,7 +8,7 @@ from dataclasses import replace from enum import Enum, auto from functools import partial, wraps -from typing import Optional, Union, cast +from typing import Union, cast from black.brackets import ( COMMA_PRIORITY, @@ -303,7 +303,7 @@ def visit_suite(self, node: Node) -> Iterator[Line]: def visit_simple_stmt(self, node: Node) -> Iterator[Line]: """Visit a statement without nested statements.""" - prev_type: Optional[int] = None + prev_type: int | None = None for child in node.children: if (prev_type is None or prev_type == token.SEMI) and is_arith_like(child): wrap_in_parentheses(node, child, visible=False) @@ -667,7 +667,7 @@ def _hugging_power_ops_line_to_string( line: Line, features: Collection[Feature], mode: Mode, -) -> Optional[str]: +) -> str | None: try: return line_to_string(next(hug_power_op(line, features, mode))) except CannotTransform: @@ -849,7 +849,7 @@ def left_hand_split( body_leaves: list[Leaf] = [] head_leaves: list[Leaf] = [] current_leaves = head_leaves - matching_bracket: Optional[Leaf] = None + matching_bracket: Leaf | None = None depth = 0 for index, leaf in enumerate(line.leaves): if index == 2 and leaf.type == token.LSQB: @@ -936,8 +936,8 @@ def _first_right_hand_split( body_leaves: list[Leaf] = [] head_leaves: list[Leaf] = [] current_leaves = tail_leaves - opening_bracket: Optional[Leaf] = None - closing_bracket: Optional[Leaf] = None + opening_bracket: Leaf | None = None + closing_bracket: Leaf | None = None for leaf in reversed(line.leaves): if current_leaves is body_leaves: if leaf is opening_bracket: @@ -958,7 +958,7 @@ def _first_right_hand_split( body_leaves.reverse() head_leaves.reverse() - body: Optional[Line] = None + body: Line | None = None if ( Preview.hug_parens_with_braces_and_square_brackets in line.mode and tail_leaves[0].value @@ -1296,7 +1296,7 @@ def split_wrapper( return split_wrapper -def _get_last_non_comment_leaf(line: Line) -> Optional[int]: +def _get_last_non_comment_leaf(line: Line) -> int | None: for leaf_idx in range(len(line.leaves) - 1, 0, -1): if line.leaves[leaf_idx].type != STANDALONE_COMMENT: return leaf_idx @@ -1632,7 +1632,7 @@ def _maybe_wrap_cms_in_parens( or node.children[1].type == syms.atom ): return - colon_index: Optional[int] = None + colon_index: int | None = None for i in range(2, len(node.children)): if node.children[i].type == token.COLON: colon_index = i @@ -1865,8 +1865,8 @@ def generate_trailers_to_omit(line: Line, line_length: int) -> Iterator[set[Leaf yield omit length = 4 * line.depth - opening_bracket: Optional[Leaf] = None - closing_bracket: Optional[Leaf] = None + opening_bracket: Leaf | None = None + closing_bracket: Leaf | None = None inner_brackets: set[LeafID] = set() for index, leaf, leaf_length in line.enumerate_with_length(is_reversed=True): length += leaf_length diff --git a/src/black/lines.py b/src/black/lines.py index 1d947ad37b5..cc66ff0e687 100644 --- a/src/black/lines.py +++ b/src/black/lines.py @@ -48,7 +48,7 @@ class Line: bracket_tracker: BracketTracker = field(default_factory=BracketTracker) inside_brackets: bool = False should_split_rhs: bool = False - magic_trailing_comma: Optional[Leaf] = None + magic_trailing_comma: Leaf | None = None def append( self, leaf: Leaf, preformatted: bool = False, track_bracket: bool = False @@ -156,7 +156,7 @@ def is_def(self) -> bool: return False try: - second_leaf: Optional[Leaf] = self.leaves[1] + second_leaf: Leaf | None = self.leaves[1] except IndexError: second_leaf = None return (first_leaf.type == token.NAME and first_leaf.value == "def") or ( @@ -219,7 +219,7 @@ def opens_block(self) -> bool: return self.leaves[-1].type == token.COLON def is_fmt_pass_converted( - self, *, first_leaf_matches: Optional[Callable[[Leaf], bool]] = None + self, *, first_leaf_matches: Callable[[Leaf], bool] | None = None ) -> bool: """Is this line converted from fmt off/skip code? @@ -540,10 +540,10 @@ class EmptyLineTracker: """ mode: Mode - previous_line: Optional[Line] = None - previous_block: Optional[LinesBlock] = None + previous_line: Line | None = None + previous_block: LinesBlock | None = None previous_defs: list[Line] = field(default_factory=list) - semantic_leading_comment: Optional[LinesBlock] = None + semantic_leading_comment: LinesBlock | None = None def maybe_empty_lines(self, current_line: Line) -> LinesBlock: """Return the number of extra empty lines before and after the `current_line`. @@ -727,7 +727,7 @@ def _maybe_empty_lines_for_class_or_def( # noqa: C901 return 0, 0 return 1 if user_had_newline else 0, 0 - comment_to_add_newlines: Optional[LinesBlock] = None + comment_to_add_newlines: LinesBlock | None = None if ( self.previous_line.is_comment and self.previous_line.depth == current_line.depth @@ -857,14 +857,14 @@ def is_line_short_enough( # noqa: C901 # is needed to determine nesting level of the MLS. # Includes special case for trailing commas. commas: list[int] = [] # tracks number of commas per depth level - multiline_string: Optional[Leaf] = None + multiline_string: Leaf | None = None # store the leaves that contain parts of the MLS multiline_string_contexts: list[LN] = [] - max_level_to_update: Union[int, float] = math.inf # track the depth of the MLS + max_level_to_update: int | float = math.inf # track the depth of the MLS for i, leaf in enumerate(line.leaves): if max_level_to_update == math.inf: - had_comma: Optional[int] = None + had_comma: int | None = None if leaf.bracket_depth + 1 > len(commas): commas.append(0) elif leaf.bracket_depth + 1 < len(commas): @@ -883,7 +883,7 @@ def is_line_short_enough( # noqa: C901 if leaf.bracket_depth <= max_level_to_update and leaf.type == token.COMMA: # Inside brackets, ignore trailing comma # directly after MLS/MLS-containing expression - ignore_ctxs: list[Optional[LN]] = [None] + ignore_ctxs: list[LN | None] = [None] ignore_ctxs += multiline_string_contexts if (line.inside_brackets or leaf.bracket_depth > 0) and ( i != len(line.leaves) - 1 or leaf.prev_sibling not in ignore_ctxs @@ -969,7 +969,7 @@ def can_omit_invisible_parens( # We need optional parens in order to split standalone comments to their own lines # if there are no nested parens around the standalone comments - closing_bracket: Optional[Leaf] = None + closing_bracket: Leaf | None = None for leaf in reversed(line.leaves): if closing_bracket and leaf is closing_bracket.opening_bracket: closing_bracket = None diff --git a/src/black/nodes.py b/src/black/nodes.py index a4780c01c34..a5401d6c0a7 100644 --- a/src/black/nodes.py +++ b/src/black/nodes.py @@ -3,7 +3,7 @@ """ from collections.abc import Iterator -from typing import Final, Generic, Literal, Optional, TypeGuard, TypeVar, Union +from typing import Final, Generic, Literal, TypeGuard, TypeVar, Union from mypy_extensions import mypyc_attr @@ -426,7 +426,7 @@ def make_simple_prefix(nl_count: int, form_feed: bool, empty_line: str = "\n") - return empty_line * nl_count -def preceding_leaf(node: Optional[LN]) -> Optional[Leaf]: +def preceding_leaf(node: LN | None) -> Leaf | None: """Return the first leaf that precedes `node`, if any.""" while node: res = node.prev_sibling @@ -444,7 +444,7 @@ def preceding_leaf(node: Optional[LN]) -> Optional[Leaf]: return None -def prev_siblings_are(node: Optional[LN], tokens: list[Optional[NodeType]]) -> bool: +def prev_siblings_are(node: LN | None, tokens: list[NodeType | None]) -> bool: """Return if the `node` and its previous siblings match types against the provided list of tokens; the provided `node`has its type matched against the last element in the list. `None` can be used as the first element to declare that the start of the @@ -460,7 +460,7 @@ def prev_siblings_are(node: Optional[LN], tokens: list[Optional[NodeType]]) -> b return prev_siblings_are(node.prev_sibling, tokens[:-1]) -def parent_type(node: Optional[LN]) -> Optional[NodeType]: +def parent_type(node: LN | None) -> NodeType | None: """ Returns: @node.parent.type, if @node is not None and has a parent. @@ -473,9 +473,9 @@ def parent_type(node: Optional[LN]) -> Optional[NodeType]: return node.parent.type -def child_towards(ancestor: Node, descendant: LN) -> Optional[LN]: +def child_towards(ancestor: Node, descendant: LN) -> LN | None: """Return the child of `ancestor` that contains `descendant`.""" - node: Optional[LN] = descendant + node: LN | None = descendant while node and node.parent != ancestor: node = node.parent return node @@ -523,7 +523,7 @@ def container_of(leaf: Leaf) -> LN: return container -def first_leaf_of(node: LN) -> Optional[Leaf]: +def first_leaf_of(node: LN) -> Leaf | None: """Returns the first leaf of the node tree.""" if isinstance(node, Leaf): return node @@ -987,7 +987,7 @@ def wrap_in_parentheses(parent: Node, child: LN, *, visible: bool = True) -> Non parent.insert_child(index, new_child) -def unwrap_singleton_parenthesis(node: LN) -> Optional[LN]: +def unwrap_singleton_parenthesis(node: LN) -> LN | None: """Returns `wrapped` if `node` is of the shape ( wrapped ). Parenthesis can be optional. Returns None otherwise""" @@ -1047,7 +1047,7 @@ def is_part_of_annotation(leaf: Leaf) -> bool: return get_annotation_type(leaf) is not None -def first_leaf(node: LN) -> Optional[Leaf]: +def first_leaf(node: LN) -> Leaf | None: """Returns the first leaf of the ancestor node.""" if isinstance(node, Leaf): return node @@ -1057,7 +1057,7 @@ def first_leaf(node: LN) -> Optional[Leaf]: return first_leaf(node.children[0]) -def last_leaf(node: LN) -> Optional[Leaf]: +def last_leaf(node: LN) -> Leaf | None: """Returns the last leaf of the ancestor node.""" if isinstance(node, Leaf): return node diff --git a/src/black/output.py b/src/black/output.py index 0dbd74e5e22..76c28c344a9 100644 --- a/src/black/output.py +++ b/src/black/output.py @@ -6,14 +6,14 @@ import json import re import tempfile -from typing import Any, Optional +from typing import Any from click import echo, style from mypy_extensions import mypyc_attr @mypyc_attr(patchable=True) -def _out(message: Optional[str] = None, nl: bool = True, **styles: Any) -> None: +def _out(message: str | None = None, nl: bool = True, **styles: Any) -> None: if message is not None: if "bold" not in styles: styles["bold"] = True @@ -22,7 +22,7 @@ def _out(message: Optional[str] = None, nl: bool = True, **styles: Any) -> None: @mypyc_attr(patchable=True) -def _err(message: Optional[str] = None, nl: bool = True, **styles: Any) -> None: +def _err(message: str | None = None, nl: bool = True, **styles: Any) -> None: if message is not None: if "fg" not in styles: styles["fg"] = "red" @@ -31,11 +31,11 @@ def _err(message: Optional[str] = None, nl: bool = True, **styles: Any) -> None: @mypyc_attr(patchable=True) -def out(message: Optional[str] = None, nl: bool = True, **styles: Any) -> None: +def out(message: str | None = None, nl: bool = True, **styles: Any) -> None: _out(message, nl=nl, **styles) -def err(message: Optional[str] = None, nl: bool = True, **styles: Any) -> None: +def err(message: str | None = None, nl: bool = True, **styles: Any) -> None: _err(message, nl=nl, **styles) diff --git a/src/black/ranges.py b/src/black/ranges.py index 26407cc7cfd..d7e003db83f 100644 --- a/src/black/ranges.py +++ b/src/black/ranges.py @@ -3,7 +3,6 @@ import difflib from collections.abc import Collection, Iterator, Sequence from dataclasses import dataclass -from typing import Union from black.nodes import ( LN, @@ -396,7 +395,7 @@ def _leaf_line_end(leaf: Leaf) -> int: return leaf.lineno + str(leaf).count("\n") -def _get_line_range(node_or_nodes: Union[LN, list[LN]]) -> set[int]: +def _get_line_range(node_or_nodes: LN | list[LN]) -> set[int]: """Returns the line range of this node or list of nodes.""" if isinstance(node_or_nodes, list): nodes = node_or_nodes diff --git a/src/black/trans.py b/src/black/trans.py index de24d723e1e..0cb6f6270c8 100644 --- a/src/black/trans.py +++ b/src/black/trans.py @@ -7,7 +7,7 @@ from collections import defaultdict from collections.abc import Callable, Collection, Iterable, Iterator, Sequence from dataclasses import dataclass -from typing import Any, ClassVar, Final, Literal, Optional, TypeVar, Union +from typing import Any, ClassVar, Final, Literal, TypeVar, Union from mypy_extensions import trait @@ -1261,7 +1261,7 @@ def _get_max_string_length(self, line: Line, string_idx: int) -> int: return max_string_length @staticmethod - def _prefer_paren_wrap_match(LL: list[Leaf]) -> Optional[int]: + def _prefer_paren_wrap_match(LL: list[Leaf]) -> int | None: """ Returns: string_idx such that @LL[string_idx] is equal to our target (i.e. @@ -1760,7 +1760,7 @@ def _get_illegal_split_indices(self, string: str) -> set[Index]: illegal_indices.update(range(begin, end)) return illegal_indices - def _get_break_idx(self, string: str, max_break_idx: int) -> Optional[int]: + def _get_break_idx(self, string: str, max_break_idx: int) -> int | None: """ This method contains the algorithm that StringSplitter uses to determine which character to split each string at. @@ -1985,7 +1985,7 @@ def do_splitter_match(self, line: Line) -> TMatchResult: return TErr("This line does not contain any non-atomic strings.") @staticmethod - def _return_match(LL: list[Leaf]) -> Optional[int]: + def _return_match(LL: list[Leaf]) -> int | None: """ Returns: string_idx such that @LL[string_idx] is equal to our target (i.e. @@ -2010,7 +2010,7 @@ def _return_match(LL: list[Leaf]) -> Optional[int]: return None @staticmethod - def _else_match(LL: list[Leaf]) -> Optional[int]: + def _else_match(LL: list[Leaf]) -> int | None: """ Returns: string_idx such that @LL[string_idx] is equal to our target (i.e. @@ -2037,7 +2037,7 @@ def _else_match(LL: list[Leaf]) -> Optional[int]: return None @staticmethod - def _assert_match(LL: list[Leaf]) -> Optional[int]: + def _assert_match(LL: list[Leaf]) -> int | None: """ Returns: string_idx such that @LL[string_idx] is equal to our target (i.e. @@ -2072,7 +2072,7 @@ def _assert_match(LL: list[Leaf]) -> Optional[int]: return None @staticmethod - def _assign_match(LL: list[Leaf]) -> Optional[int]: + def _assign_match(LL: list[Leaf]) -> int | None: """ Returns: string_idx such that @LL[string_idx] is equal to our target (i.e. @@ -2119,7 +2119,7 @@ def _assign_match(LL: list[Leaf]) -> Optional[int]: return None @staticmethod - def _dict_or_lambda_match(LL: list[Leaf]) -> Optional[int]: + def _dict_or_lambda_match(LL: list[Leaf]) -> int | None: """ Returns: string_idx such that @LL[string_idx] is equal to our target (i.e. diff --git a/src/blackd/client.py b/src/blackd/client.py index 99a44a1b928..c36759a7607 100644 --- a/src/blackd/client.py +++ b/src/blackd/client.py @@ -1,5 +1,3 @@ -from typing import Optional - import aiohttp from aiohttp.typedefs import StrOrURL @@ -12,15 +10,15 @@ class BlackDClient: def __init__( self, url: StrOrURL = "http://localhost:9090", - line_length: Optional[int] = None, + line_length: int | None = None, skip_source_first_line: bool = False, skip_string_normalization: bool = False, skip_magic_trailing_comma: bool = False, preview: bool = False, fast: bool = False, - python_variant: Optional[str] = None, + python_variant: str | None = None, diff: bool = False, - headers: Optional[dict[str, str]] = None, + headers: dict[str, str] | None = None, ): """ Initialize a BlackDClient object. diff --git a/src/blib2to3/pgen2/driver.py b/src/blib2to3/pgen2/driver.py index 7fb305d31be..083a20d2d3e 100644 --- a/src/blib2to3/pgen2/driver.py +++ b/src/blib2to3/pgen2/driver.py @@ -25,7 +25,7 @@ from contextlib import contextmanager from dataclasses import dataclass, field from logging import Logger -from typing import IO, Any, Optional, Union, cast +from typing import IO, Any, Union, cast from blib2to3.pgen2.grammar import Grammar from blib2to3.pgen2.tokenize import TokenInfo @@ -40,7 +40,7 @@ @dataclass class ReleaseRange: start: int - end: Optional[int] = None + end: int | None = None tokens: list[Any] = field(default_factory=list) def lock(self) -> None: @@ -106,7 +106,7 @@ def can_advance(self, to: int) -> bool: class Driver: - def __init__(self, grammar: Grammar, logger: Optional[Logger] = None) -> None: + def __init__(self, grammar: Grammar, logger: Logger | None = None) -> None: self.grammar = grammar if logger is None: logger = logging.getLogger(__name__) @@ -185,7 +185,7 @@ def parse_tokens(self, tokens: Iterable[TokenInfo], debug: bool = False) -> NL: return p.rootnode def parse_file( - self, filename: Path, encoding: Optional[str] = None, debug: bool = False + self, filename: Path, encoding: str | None = None, debug: bool = False ) -> NL: """Parse a file and return the syntax tree.""" with open(filename, encoding=encoding) as stream: @@ -227,7 +227,7 @@ def _partially_consume_prefix(self, prefix: str, column: int) -> tuple[str, str] return "".join(lines), current_line -def _generate_pickle_name(gt: Path, cache_dir: Optional[Path] = None) -> str: +def _generate_pickle_name(gt: Path, cache_dir: Path | None = None) -> str: head, tail = os.path.splitext(gt) if tail == ".txt": tail = "" @@ -240,10 +240,10 @@ def _generate_pickle_name(gt: Path, cache_dir: Optional[Path] = None) -> str: def load_grammar( gt: str = "Grammar.txt", - gp: Optional[str] = None, + gp: str | None = None, save: bool = True, force: bool = False, - logger: Optional[Logger] = None, + logger: Logger | None = None, ) -> Grammar: """Load the grammar (maybe from a pickle).""" if logger is None: @@ -273,7 +273,7 @@ def _newer(a: str, b: str) -> bool: def load_packaged_grammar( - package: str, grammar_source: str, cache_dir: Optional[Path] = None + package: str, grammar_source: str, cache_dir: Path | None = None ) -> grammar.Grammar: """Normally, loads a pickled grammar by doing pkgutil.get_data(package, pickled_grammar) diff --git a/src/blib2to3/pgen2/parse.py b/src/blib2to3/pgen2/parse.py index 06b3790b115..8e9b44a04d3 100644 --- a/src/blib2to3/pgen2/parse.py +++ b/src/blib2to3/pgen2/parse.py @@ -12,7 +12,7 @@ from collections.abc import Callable, Iterator from contextlib import contextmanager -from typing import TYPE_CHECKING, Any, Optional, Union, cast +from typing import TYPE_CHECKING, Any, Union, cast from blib2to3.pgen2.grammar import Grammar from blib2to3.pytree import NL, Context, Leaf, Node, RawNode, convert @@ -98,8 +98,8 @@ def add_token(self, tok_type: int, tok_val: str, raw: bool = False) -> None: self.parser.addtoken(tok_type, tok_val, self.context) def determine_route( - self, value: Optional[str] = None, force: bool = False - ) -> Optional[int]: + self, value: str | None = None, force: bool = False + ) -> int | None: alive_ilabels = self.ilabels if len(alive_ilabels) == 0: *_, most_successful_ilabel = self._dead_ilabels @@ -116,7 +116,7 @@ class ParseError(Exception): """Exception to signal the parser is stuck.""" def __init__( - self, msg: str, type: Optional[int], value: Optional[str], context: Context + self, msg: str, type: int | None, value: str | None, context: Context ) -> None: Exception.__init__( self, f"{msg}: type={type!r}, value={value!r}, context={context!r}" @@ -157,7 +157,7 @@ class Parser: """ - def __init__(self, grammar: Grammar, convert: Optional[Convert] = None) -> None: + def __init__(self, grammar: Grammar, convert: Convert | None = None) -> None: """Constructor. The grammar argument is a grammar.Grammar instance; see the @@ -195,9 +195,9 @@ def __init__(self, grammar: Grammar, convert: Optional[Convert] = None) -> None: # See note in docstring above. TL;DR this is ignored. self.convert = convert or lam_sub self.is_backtracking = False - self.last_token: Optional[int] = None + self.last_token: int | None = None - def setup(self, proxy: "TokenProxy", start: Optional[int] = None) -> None: + def setup(self, proxy: "TokenProxy", start: int | None = None) -> None: """Prepare for parsing. This *must* be called before starting to parse. @@ -218,7 +218,7 @@ def setup(self, proxy: "TokenProxy", start: Optional[int] = None) -> None: newnode: RawNode = (start, None, None, []) stackentry = (self.grammar.dfas[start], 0, newnode) self.stack: list[tuple[DFAS, int, RawNode]] = [stackentry] - self.rootnode: Optional[NL] = None + self.rootnode: NL | None = None self.used_names: set[str] = set() self.proxy = proxy self.last_token = None diff --git a/src/blib2to3/pgen2/pgen.py b/src/blib2to3/pgen2/pgen.py index dc76c999e98..7e6cdb480da 100644 --- a/src/blib2to3/pgen2/pgen.py +++ b/src/blib2to3/pgen2/pgen.py @@ -3,7 +3,7 @@ import os from collections.abc import Iterator, Sequence -from typing import IO, Any, NoReturn, Optional, Union +from typing import IO, Any, NoReturn, Union from blib2to3.pgen2 import grammar, token, tokenize from blib2to3.pgen2.tokenize import TokenInfo @@ -19,9 +19,9 @@ class ParserGenerator: filename: Path stream: IO[str] generator: Iterator[TokenInfo] - first: dict[str, Optional[dict[str, int]]] + first: dict[str, dict[str, int] | None] - def __init__(self, filename: Path, stream: Optional[IO[str]] = None) -> None: + def __init__(self, filename: Path, stream: IO[str] | None = None) -> None: close_stream = None if stream is None: stream = open(filename, encoding="utf-8") @@ -163,7 +163,7 @@ def calcfirst(self, name: str) -> None: def parse(self) -> tuple[dict[str, list["DFAState"]], str]: dfas = {} - startsymbol: Optional[str] = None + startsymbol: str | None = None # MSTART: (NEWLINE | RULE)* ENDMARKER while self.type != token.ENDMARKER: while self.type == token.NEWLINE: @@ -333,7 +333,7 @@ def parse_atom(self) -> tuple["NFAState", "NFAState"]: f"expected (...) or NAME or STRING, got {self.type}/{self.value}" ) - def expect(self, type: int, value: Optional[Any] = None) -> str: + def expect(self, type: int, value: Any | None = None) -> str: if self.type != type or (value is not None and self.value != value): self.raise_error(f"expected {type}/{value}, got {self.type}/{self.value}") value = self.value @@ -354,12 +354,12 @@ def raise_error(self, msg: str) -> NoReturn: class NFAState: - arcs: list[tuple[Optional[str], "NFAState"]] + arcs: list[tuple[str | None, "NFAState"]] def __init__(self) -> None: self.arcs = [] # list of (label, NFAState) pairs - def addarc(self, next: "NFAState", label: Optional[str] = None) -> None: + def addarc(self, next: "NFAState", label: str | None = None) -> None: assert label is None or isinstance(label, str) assert isinstance(next, NFAState) self.arcs.append((label, next)) diff --git a/src/blib2to3/pgen2/tokenize.py b/src/blib2to3/pgen2/tokenize.py index 2109757bb5e..18503973804 100644 --- a/src/blib2to3/pgen2/tokenize.py +++ b/src/blib2to3/pgen2/tokenize.py @@ -29,7 +29,6 @@ import sys from collections.abc import Iterator -from typing import Optional from blib2to3.pgen2.grammar import Grammar from blib2to3.pgen2.token import ( @@ -104,7 +103,7 @@ class TokenError(Exception): ... def transform_whitespace( - token: pytokens.Token, source: str, prev_token: Optional[pytokens.Token] + token: pytokens.Token, source: str, prev_token: pytokens.Token | None ) -> pytokens.Token: r""" Black treats `\\\n` at the end of a line as a 'NL' token, while it @@ -142,12 +141,12 @@ def transform_whitespace( return token -def tokenize(source: str, grammar: Optional[Grammar] = None) -> Iterator[TokenInfo]: +def tokenize(source: str, grammar: Grammar | None = None) -> Iterator[TokenInfo]: lines = source.split("\n") lines += [""] # For newline tokens in files that don't end in a newline line, column = 1, 0 - prev_token: Optional[pytokens.Token] = None + prev_token: pytokens.Token | None = None try: for token in pytokens.tokenize(source): token = transform_whitespace(token, source, prev_token) diff --git a/src/blib2to3/pytree.py b/src/blib2to3/pytree.py index 01229743253..cf44a43e54d 100644 --- a/src/blib2to3/pytree.py +++ b/src/blib2to3/pytree.py @@ -24,10 +24,10 @@ HUGE: int = 0x7FFFFFFF # maximum repeat count, default max -_type_reprs: dict[int, Union[str, int]] = {} +_type_reprs: dict[int, str | int] = {} -def type_repr(type_num: int) -> Union[str, int]: +def type_repr(type_num: int) -> str | int: global _type_reprs if not _type_reprs: from . import pygram @@ -125,7 +125,7 @@ def pre_order(self) -> Iterator[NL]: """ raise NotImplementedError - def replace(self, new: Union[NL, list[NL]]) -> None: + def replace(self, new: NL | list[NL]) -> None: """Replace this node with a new one in the parent.""" assert self.parent is not None, str(self) assert new is not None @@ -149,7 +149,7 @@ def replace(self, new: Union[NL, list[NL]]) -> None: x.parent = self.parent self.parent = None - def get_lineno(self) -> Optional[int]: + def get_lineno(self) -> int | None: """Return the line number which generated the invocant node.""" node = self while not isinstance(node, Leaf): @@ -165,7 +165,7 @@ def changed(self) -> None: self.parent.changed() self.was_changed = True - def remove(self) -> Optional[int]: + def remove(self) -> int | None: """ Remove the node from the tree. Returns the position of the node in its parent's children before it was removed. @@ -181,7 +181,7 @@ def remove(self) -> Optional[int]: return None @property - def next_sibling(self) -> Optional[NL]: + def next_sibling(self) -> NL | None: """ The node immediately following the invocant in their parent's children list. If the invocant does not have a next sibling, it is None @@ -195,7 +195,7 @@ def next_sibling(self) -> Optional[NL]: return self.parent.next_sibling_map[id(self)] @property - def prev_sibling(self) -> Optional[NL]: + def prev_sibling(self) -> NL | None: """ The node immediately preceding the invocant in their parent's children list. If the invocant does not have a previous sibling, it is None. @@ -232,16 +232,16 @@ def get_suffix(self) -> str: class Node(Base): """Concrete implementation for interior nodes.""" - fixers_applied: Optional[list[Any]] - used_names: Optional[set[str]] + fixers_applied: list[Any] | None + used_names: set[str] | None def __init__( self, type: int, children: list[NL], - context: Optional[Any] = None, - prefix: Optional[str] = None, - fixers_applied: Optional[list[Any]] = None, + context: Any | None = None, + prefix: str | None = None, + fixers_applied: list[Any] | None = None, ) -> None: """ Initializer. @@ -349,15 +349,15 @@ def append_child(self, child: NL) -> None: self.invalidate_sibling_maps() def invalidate_sibling_maps(self) -> None: - self.prev_sibling_map: Optional[dict[int, Optional[NL]]] = None - self.next_sibling_map: Optional[dict[int, Optional[NL]]] = None + self.prev_sibling_map: dict[int, NL | None] | None = None + self.next_sibling_map: dict[int, NL | None] | None = None def update_sibling_maps(self) -> None: - _prev: dict[int, Optional[NL]] = {} - _next: dict[int, Optional[NL]] = {} + _prev: dict[int, NL | None] = {} + _next: dict[int, NL | None] = {} self.prev_sibling_map = _prev self.next_sibling_map = _next - previous: Optional[NL] = None + previous: NL | None = None for current in self.children: _prev[id(current)] = previous _next[id(previous)] = current @@ -374,7 +374,7 @@ class Leaf(Base): bracket_depth: int # Changed later in brackets.py opening_bracket: Optional["Leaf"] = None - used_names: Optional[set[str]] + used_names: set[str] | None _prefix = "" # Whitespace and comments preceding this token in the input lineno: int = 0 # Line where this token starts in the input column: int = 0 # Column where this token starts in the input @@ -387,8 +387,8 @@ def __init__( self, type: int, value: str, - context: Optional[Context] = None, - prefix: Optional[str] = None, + context: Context | None = None, + prefix: str | None = None, fixers_applied: list[Any] = [], opening_bracket: Optional["Leaf"] = None, fmt_pass_converted_first_leaf: Optional["Leaf"] = None, @@ -407,7 +407,7 @@ def __init__( self.value = value if prefix is not None: self._prefix = prefix - self.fixers_applied: Optional[list[Any]] = fixers_applied[:] + self.fixers_applied: list[Any] | None = fixers_applied[:] self.children = [] self.opening_bracket = opening_bracket self.fmt_pass_converted_first_leaf = fmt_pass_converted_first_leaf @@ -507,10 +507,10 @@ class BasePattern: """ # Defaults for instance variables - type: Optional[int] + type: int | None type = None # Node type (token if < 256, symbol if >= 256) content: Any = None # Optional content matching pattern - name: Optional[str] = None # Optional name used to store match in results dict + name: str | None = None # Optional name used to store match in results dict def __new__(cls, *args, **kwds): """Constructor that prevents BasePattern from being instantiated.""" @@ -535,7 +535,7 @@ def optimize(self) -> "BasePattern": """ return self - def match(self, node: NL, results: Optional[_Results] = None) -> bool: + def match(self, node: NL, results: _Results | None = None) -> bool: """ Does this pattern exactly match a node? @@ -549,7 +549,7 @@ def match(self, node: NL, results: Optional[_Results] = None) -> bool: if self.type is not None and node.type != self.type: return False if self.content is not None: - r: Optional[_Results] = None + r: _Results | None = None if results is not None: r = {} if not self._submatch(node, r): @@ -561,7 +561,7 @@ def match(self, node: NL, results: Optional[_Results] = None) -> bool: results[self.name] = node return True - def match_seq(self, nodes: list[NL], results: Optional[_Results] = None) -> bool: + def match_seq(self, nodes: list[NL], results: _Results | None = None) -> bool: """ Does this pattern exactly match a sequence of nodes? @@ -585,9 +585,9 @@ def generate_matches(self, nodes: list[NL]) -> Iterator[tuple[int, _Results]]: class LeafPattern(BasePattern): def __init__( self, - type: Optional[int] = None, - content: Optional[str] = None, - name: Optional[str] = None, + type: int | None = None, + content: str | None = None, + name: str | None = None, ) -> None: """ Initializer. Takes optional type, content, and name. @@ -635,9 +635,9 @@ class NodePattern(BasePattern): def __init__( self, - type: Optional[int] = None, - content: Optional[Iterable[str]] = None, - name: Optional[str] = None, + type: int | None = None, + content: Iterable[str] | None = None, + name: str | None = None, ) -> None: """ Initializer. Takes optional type, content, and name. @@ -716,10 +716,10 @@ class WildcardPattern(BasePattern): def __init__( self, - content: Optional[str] = None, + content: str | None = None, min: int = 0, max: int = HUGE, - name: Optional[str] = None, + name: str | None = None, ) -> None: """ Initializer. @@ -908,7 +908,7 @@ def _recursive_matches(self, nodes, count) -> Iterator[tuple[int, _Results]]: class NegatedPattern(BasePattern): - def __init__(self, content: Optional[BasePattern] = None) -> None: + def __init__(self, content: BasePattern | None = None) -> None: """ Initializer. diff --git a/tests/test_black.py b/tests/test_black.py index 3b3c602e3a5..9927a126c21 100644 --- a/tests/test_black.py +++ b/tests/test_black.py @@ -20,7 +20,7 @@ from pathlib import Path, WindowsPath from platform import system from tempfile import TemporaryDirectory -from typing import Any, Optional, TypeVar, Union +from typing import Any, TypeVar from unittest.mock import MagicMock, patch import click @@ -1335,10 +1335,8 @@ def test_reformat_one_with_stdin_empty(self) -> None: def _new_wrapper( output: io.StringIO, io_TextIOWrapper: type[io.TextIOWrapper] - ) -> Callable[[Any, Any], Union[io.StringIO, io.TextIOWrapper]]: - def get_output( - *args: Any, **kwargs: Any - ) -> Union[io.StringIO, io.TextIOWrapper]: + ) -> Callable[[Any, Any], io.StringIO | io.TextIOWrapper]: + def get_output(*args: Any, **kwargs: Any) -> io.StringIO | io.TextIOWrapper: if args == (sys.stdout.buffer,): # It's `format_stdin_to_stdout()` calling `io.TextIOWrapper()`, # return our mock object. @@ -2440,15 +2438,15 @@ def test_cache_key(self) -> None: def assert_collected_sources( - src: Sequence[Union[str, Path]], - expected: Sequence[Union[str, Path]], + src: Sequence[str | Path], + expected: Sequence[str | Path], *, - root: Optional[Path] = None, - exclude: Optional[str] = None, - include: Optional[str] = None, - extend_exclude: Optional[str] = None, - force_exclude: Optional[str] = None, - stdin_filename: Optional[str] = None, + root: Path | None = None, + exclude: str | None = None, + include: str | None = None, + extend_exclude: str | None = None, + force_exclude: str | None = None, + stdin_filename: str | None = None, ) -> None: gs_src = tuple(str(Path(s)) for s in src) gs_expected = [Path(s) for s in expected] diff --git a/tests/test_docs.py b/tests/test_docs.py index 6b69055e94d..e09124acb2e 100644 --- a/tests/test_docs.py +++ b/tests/test_docs.py @@ -8,7 +8,6 @@ from collections.abc import Sequence from itertools import islice from pathlib import Path -from typing import Optional import pytest @@ -19,7 +18,7 @@ def check_feature_list( lines: Sequence[str], expected_feature_names: set[str], label: str -) -> Optional[str]: +) -> str | None: start_index = lines.index(f"(labels/{label}-features)=\n") if start_index == -1: return ( diff --git a/tests/util.py b/tests/util.py index a1b7f87df54..0acce4bed2b 100644 --- a/tests/util.py +++ b/tests/util.py @@ -9,7 +9,7 @@ from dataclasses import dataclass, field, replace from functools import partial from pathlib import Path -from typing import Any, Optional +from typing import Any import black from black.const import DEFAULT_LINE_LENGTH @@ -45,7 +45,7 @@ class TestCaseArgs: mode: black.Mode = field(default_factory=black.Mode) fast: bool = False - minimum_version: Optional[tuple[int, int]] = None + minimum_version: tuple[int, int] | None = None lines: Collection[tuple[int, int]] = () no_preview_line_length_1: bool = False @@ -96,7 +96,7 @@ def assert_format( mode: black.Mode = DEFAULT_MODE, *, fast: bool = False, - minimum_version: Optional[tuple[int, int]] = None, + minimum_version: tuple[int, int] | None = None, lines: Collection[tuple[int, int]] = (), no_preview_line_length_1: bool = False, ) -> None: @@ -161,11 +161,11 @@ def assert_format( def _assert_format_inner( source: str, - expected: Optional[str] = None, + expected: str | None = None, mode: black.Mode = DEFAULT_MODE, *, fast: bool = False, - minimum_version: Optional[tuple[int, int]] = None, + minimum_version: tuple[int, int] | None = None, lines: Collection[tuple[int, int]] = (), ) -> None: actual = black.format_str(source, mode=mode, lines=lines) From 916185b4278cbc3d6a1349964569d55a81a0766d Mon Sep 17 00:00:00 2001 From: Hugo van Kemenade <1324225+hugovk@users.noreply.github.com> Date: Thu, 13 Nov 2025 21:35:25 +0200 Subject: [PATCH 3/4] Apply suggestions from code review --- scripts/migrate-black.py | 2 +- src/blib2to3/pgen2/driver.py | 2 +- src/blib2to3/pgen2/parse.py | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/scripts/migrate-black.py b/scripts/migrate-black.py index 3c1bb71125b..c8faac41994 100755 --- a/scripts/migrate-black.py +++ b/scripts/migrate-black.py @@ -40,7 +40,7 @@ def blackify(base_branch: str, black_command: str, logger: logging.Logger) -> in git("checkout", base_branch, f"-b{current_branch}-black") - for last_commit, commit in zip(commits, commits[1:], strict=True): + for last_commit, commit in zip(commits, commits[1:], strict=False): allow_empty = ( b"--allow-empty" in run(["git", "apply", "-h"], stdout=PIPE).stdout ) diff --git a/src/blib2to3/pgen2/driver.py b/src/blib2to3/pgen2/driver.py index 083a20d2d3e..924c4bdca1e 100644 --- a/src/blib2to3/pgen2/driver.py +++ b/src/blib2to3/pgen2/driver.py @@ -25,7 +25,7 @@ from contextlib import contextmanager from dataclasses import dataclass, field from logging import Logger -from typing import IO, Any, Union, cast +from typing import Any, Union, cast from blib2to3.pgen2.grammar import Grammar from blib2to3.pgen2.tokenize import TokenInfo diff --git a/src/blib2to3/pgen2/parse.py b/src/blib2to3/pgen2/parse.py index 8e9b44a04d3..4efce755c24 100644 --- a/src/blib2to3/pgen2/parse.py +++ b/src/blib2to3/pgen2/parse.py @@ -12,7 +12,7 @@ from collections.abc import Callable, Iterator from contextlib import contextmanager -from typing import TYPE_CHECKING, Any, Union, cast +from typing import TYPE_CHECKING, Union, cast from blib2to3.pgen2.grammar import Grammar from blib2to3.pytree import NL, Context, Leaf, Node, RawNode, convert From fd648c720143ff3ed8a047a63c5550b95eff0682 Mon Sep 17 00:00:00 2001 From: Cooper Ry Lees Date: Sun, 16 Nov 2025 20:40:54 -0600 Subject: [PATCH 4/4] Fix optional.py typing so we can merge --- tests/optional.py | 27 ++++++++++++++++++++------- 1 file changed, 20 insertions(+), 7 deletions(-) diff --git a/tests/optional.py b/tests/optional.py index ccaf3a388d9..f9bceb6f9ff 100644 --- a/tests/optional.py +++ b/tests/optional.py @@ -18,7 +18,7 @@ import logging import re from functools import lru_cache -from typing import TYPE_CHECKING +from typing import TYPE_CHECKING, Any import pytest from pytest import StashKey @@ -54,14 +54,27 @@ def pytest_configure(config: "Config") -> None: Use the syntax in https://docs.pytest.org/en/stable/mark.html#registering-marks. """ - ot_ini = config.inicfg.get("optional-tests") or [] - ot_markers = set() + # Extract the configured optional-tests from pytest's ini config in a + # version-agnostic way. Depending on pytest version, the value can be a + # string, a list of strings, or a ConfigValue wrapper (with a `.value` attr). + raw_ot_ini: Any = config.inicfg.get("optional-tests") + ot_ini_lines: list[str] = [] + if raw_ot_ini: + value = getattr(raw_ot_ini, "value", raw_ot_ini) + if isinstance(value, str): + ot_ini_lines = value.strip().split("\n") + elif isinstance(value, list): + # Best-effort coercion to strings; pytest inis are textual. + ot_ini_lines = [str(v) for v in value] + else: + # Fallback: ignore unexpected shapes (non-iterable, etc.). + ot_ini_lines = [] + + ot_markers: set[str] = set() ot_run: set[str] = set() - if isinstance(ot_ini, str): - ot_ini = ot_ini.strip().split("\n") marker_re = re.compile(r"^\s*(?Pno_)?(?P\w+)(:\s*(?P.*))?") - # getattr shim here is so that we support both pytest>=9 and pytest<9 - for ot in getattr(ot_ini, "value", ot_ini): + # Iterate over configured markers discovered above. + for ot in ot_ini_lines: m = marker_re.match(ot) if not m: raise ValueError(f"{ot!r} doesn't match pytest marker syntax")