From 609365496657faeaf0c2390feb660d0aa3d95368 Mon Sep 17 00:00:00 2001 From: Hao Wang Date: Sun, 29 Dec 2024 01:31:07 +0800 Subject: [PATCH 1/3] use startwith tuple for speed --- src/blib2to3/pgen2/tokenize.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/blib2to3/pgen2/tokenize.py b/src/blib2to3/pgen2/tokenize.py index 97dd92b06f0..407c184dd74 100644 --- a/src/blib2to3/pgen2/tokenize.py +++ b/src/blib2to3/pgen2/tokenize.py @@ -221,7 +221,7 @@ def _combinations(*l: str) -> set[str]: | {f"{prefix}'" for prefix in _strprefixes | _fstring_prefixes} | {f'{prefix}"' for prefix in _strprefixes | _fstring_prefixes} ) -fstring_prefix: Final = ( +fstring_prefix: Final = tuple( {f"{prefix}'" for prefix in _fstring_prefixes} | {f'{prefix}"' for prefix in _fstring_prefixes} | {f"{prefix}'''" for prefix in _fstring_prefixes} @@ -459,7 +459,7 @@ def untokenize(iterable: Iterable[TokenInfo]) -> str: def is_fstring_start(token: str) -> bool: - return builtins.any(token.startswith(prefix) for prefix in fstring_prefix) + return token.startswith(fstring_prefix) def _split_fstring_start_and_middle(token: str) -> tuple[str, str]: From bc7e300db72dfd276613c746d3b5b6f64443d82c Mon Sep 17 00:00:00 2001 From: Hao Wang Date: Mon, 30 Dec 2024 00:51:18 +0800 Subject: [PATCH 2/3] update changelog --- CHANGES.md | 1 + 1 file changed, 1 insertion(+) diff --git a/CHANGES.md b/CHANGES.md index d2955d2df0a..613d25d39a7 100644 --- a/CHANGES.md +++ b/CHANGES.md @@ -43,6 +43,7 @@ ### Performance +- Speed up the `is_fstring_start` function in `blib2to3` tokenization using generic python `startswith` function (#4541) ### Output From 42fa1f33c1d356f95cc913ec882005642c778d37 Mon Sep 17 00:00:00 2001 From: Jelle Zijlstra Date: Sun, 29 Dec 2024 17:08:41 -0800 Subject: [PATCH 3/3] Update CHANGES.md --- CHANGES.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/CHANGES.md b/CHANGES.md index 613d25d39a7..77fe17c03d3 100644 --- a/CHANGES.md +++ b/CHANGES.md @@ -43,7 +43,7 @@ ### Performance -- Speed up the `is_fstring_start` function in `blib2to3` tokenization using generic python `startswith` function (#4541) +- Speed up the `is_fstring_start` function in Black's tokenizer (#4541) ### Output