diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 66fd399115a8..2b043c128938 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -11,7 +11,7 @@ repos: args: [--fix=lf] - id: check-case-conflict - repo: https://github.com/astral-sh/ruff-pre-commit - rev: v0.9.3 # must match requirements-tests.txt + rev: v0.9.6 # must match requirements-tests.txt hooks: - id: ruff name: Run ruff on stubs, tests and scripts diff --git a/lib/ts_utils/metadata.py b/lib/ts_utils/metadata.py index 793b3b7a6d1e..7d08fab244ca 100644 --- a/lib/ts_utils/metadata.py +++ b/lib/ts_utils/metadata.py @@ -250,7 +250,7 @@ def read_metadata(distribution: str) -> StubMetadata: f"Invalid upstream_repository for {distribution!r}: " "URLs for GitHub repositories always have two parts in their paths" ) - assert num_url_path_parts == 2, bad_github_url_msg + assert num_url_path_parts == 2, bad_github_url_msg # noqa: PLR2004 # astral-sh/ruff#10009 obsolete_since: object = data.get("obsolete_since") assert isinstance(obsolete_since, (str, type(None))) @@ -330,7 +330,7 @@ class PackageDependencies(NamedTuple): @cache def get_pypi_name_to_typeshed_name_mapping() -> Mapping[str, str]: - return {read_metadata(dir.name).stub_distribution: dir.name for dir in STUBS_PATH.iterdir()} + return {read_metadata(directory.name).stub_distribution: directory.name for directory in STUBS_PATH.iterdir()} @cache diff --git a/lib/ts_utils/paths.py b/lib/ts_utils/paths.py index 63119231720d..3252c0477bd6 100644 --- a/lib/ts_utils/paths.py +++ b/lib/ts_utils/paths.py @@ -35,5 +35,4 @@ def test_cases_path(distribution_name: str) -> Path: def allowlists_path(distribution_name: str) -> Path: if distribution_name == "stdlib": return tests_path("stdlib") / "stubtest_allowlists" - else: - return tests_path(distribution_name) + return tests_path(distribution_name) diff --git a/lib/ts_utils/utils.py b/lib/ts_utils/utils.py index 66d48bd78a7e..df6c4119c6b4 100644 --- a/lib/ts_utils/utils.py +++ b/lib/ts_utils/utils.py @@ -14,7 +14,7 @@ from packaging.requirements import Requirement try: - from termcolor import colored as colored # pyright: ignore[reportAssignmentType] + from termcolor import colored as colored # pyright: ignore[reportAssignmentType] # noqa: PLC0414 except ImportError: def colored(text: str, color: str | None = None, **kwargs: Any) -> str: # type: ignore[misc] # noqa: ARG001 @@ -119,8 +119,8 @@ def parse_stdlib_versions_file() -> SupportedVersionsDict: result: dict[str, tuple[VersionTuple, VersionTuple]] = {} with VERSIONS_PATH.open(encoding="UTF-8") as f: for line in f: - line = strip_comments(line) - if line == "": + line = strip_comments(line) # noqa: PLW2901 + if not line: continue m = VERSION_LINE_RE.match(line) assert m, f"invalid VERSIONS line: {line}" @@ -193,8 +193,7 @@ def allowlists(distribution_name: str) -> list[str]: if distribution_name == "stdlib": return ["common.txt", platform_allowlist, version_allowlist, combined_allowlist, local_version_allowlist] - else: - return ["stubtest_allowlist.txt", platform_allowlist] + return ["stubtest_allowlist.txt", platform_allowlist] # ==================================================================== diff --git a/pyproject.toml b/pyproject.toml index 39f196cdb595..73a3b6720ac0 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -12,7 +12,8 @@ force-exclude = ".*_pb2.pyi" line-length = 130 # Oldest supported Python version target-version = "py38" -fix = true +preview = true # Help catch typing-related lint issues early +# fix = true exclude = [ # virtual environment ".env", @@ -37,15 +38,31 @@ exclude = ["**/test_cases/**/*.py"] # tell ruff not to flag these as e.g. "unused noqa comments" external = ["F821", "NQA", "Y"] select = [ + # "PTH", # TODO ! + # "TD", # TODO ! + "A", # flake8-builtins "ARG", # flake8-unused-arguments + "ASYNC", # flake8-async "B", # flake8-bugbear + "BLE", # flake8-blind-except + "C4", # flake8-comprehensions "D", # pydocstyle + "DOC", # pydoclint + "DTZ", # flake8-datetimez "EXE", # flake8-executable "FA", # flake8-future-annotations + "FBT", # flake8-boolean-trap + "FLY", # flynt "I", # isort + "ISC", # flake8-implicit-str-concat "N", # pep8-naming "PGH", # pygrep-hooks + "PIE", # flake8-pie + "PL", # Pylint + "RSE", # flake8-raise "RUF", # Ruff-specific and unused-noqa + "S", # flake8-bandit + "SLOT", # flake8-slots "TRY", # tryceratops "UP", # pyupgrade "YTT", # flake8-2020 @@ -55,11 +72,6 @@ select = [ "W", # pycodestyle Warning # Only include flake8-annotations rules that are autofixable. Otherwise leave this to mypy+pyright "ANN2", - # Don't include TC rules that create a TYPE_CHECKING block or stringifies annotations - "TC004", # Move import `{qualified_name}` out of type-checking block. Import is used for more than type hinting. - "TC005", # Found empty type-checking block - # "TC008", # TODO: Enable when out of preview - "TC010", # Invalid string member in `X | Y`-style union type # Most refurb rules are in preview and can be opinionated, # consider them individually as they come out of preview (last check: 0.8.4) "FURB105", # Unnecessary empty string passed to `print` @@ -94,11 +106,34 @@ select = [ # "PYI061", # TODO: Enable when out of preview "PYI062", # Duplicate literal member `{}` "PYI064", # `Final[Literal[{literal}]]` can be replaced with a bare Final + # flake8-simplify, excluding rules that can reduce performance or readability due to long line formatting + "SIM101", # Multiple `isinstance` calls for `{name}`, merge into a single call + "SIM103", # Return the condition `{condition}` directly + "SIM107", # Don't use return in `try-except` and `finally` + "SIM109", # Use `{replacement}` instead of multiple equality comparisons + "SIM112", # Use capitalized environment variable `{expected}` instead of `{actual}` + "SIM113", # Use `enumerate()` for index variable `{index}` in `for` loop + "SIM114", # Combine `if` branches using logical `or` operator + "SIM115", # Use a context manager for opening files + "SIM118", # Use key `{operator}` dict instead of key `{operator} dict.keys()` + "SIM2", # flake8-simplify conditional ordering rules + "SIM300", # Yoda condition detected + "SIM401", # Use `{contents}` instead of an if block + "SIM910", # Use `{expected}` instead of `{actual}` (dict-get-with-none-default) + "SIM911", # Use `{expected}` instead of `{actual}` (zip-dict-keys-and-values) + # Don't include TC rules that create a TYPE_CHECKING block or stringifies annotations + "TC004", # Move import `{qualified_name}` out of type-checking block. Import is used for more than type hinting. + "TC005", # Found empty type-checking block + # "TC008", # TODO: Enable when out of preview + "TC010", # Invalid string member in `X | Y`-style union type ] extend-safe-fixes = [ "UP036", # Remove unnecessary `sys.version_info` blocks ] ignore = [ + # TODO + "ASYNC221", # I don't know how to improve subprocess.check_call calls to satisfy this + "RUF036", # None not at the end of the type annotation. # Request for autofix: astral-sh/ruff#15136 ### # Rules that can conflict with the formatter (Black) # https://docs.astral.sh/ruff/formatter/#conflicting-lint-rules @@ -112,18 +147,40 @@ ignore = [ ### # We're not a library, no need to document everything "D1", # Missing docstring in ... + # We want D211: No blank lines allowed before class docstring + "D203", # 1 blank line required before class docstring # Doesn't support split "summary line" "D205", # 1 blank line required between summary line and description - # Used for direct, non-subclass type comparison, for example: `type(val) is str` - # see https://github.com/astral-sh/ruff/issues/6465 - "E721", # Do not compare types, use `isinstance()` - # Mostly from scripts and tests, it's ok to have messages passed directly to exceptions + # We want D212: Multi-line docstring summary should start at the first line + "D213", # Multi-line docstring summary should start at the second line + "D401", # First line of docstring should be in imperative mood + # Return/yield type is enough documentation for us + "DOC201", # return is not documented in docstring + "DOC402", # yield is not documented in docstring + # We're not a public library, users are contributors that already directly reads teh code, clear error messages are sufficient + "DOC501", # Raised exception missing from docstring + # Prefer explicit, but allow implicit multiline + # (hence lint.flake8-implicit-str-concat.allow-multiline isn't set to false) + "ISC003", # Explicitly concatenated string should be implicitly concatenated + # Python 3.11 introduced "zero cost" exception handling, our tests & scripts run on modern Python versions + "PERF203", # try-except within a loop incurs performance overhead + "PLR09", # Too many ... + # Typeshed tests and scripts are never run in optimized mode + "S101", # Use of assert detected + # We use subprocess a lot in scripts and tests + "S404", # subprocess module is possibly insecure + # Prone to false positives astral-sh/ruff#4045 + "S603", # subprocess call: check for execution of untrusted input + # Full paths would make cross-environment compatibility a nightmare + "S607", # Starting a process with a partial executable path "TRY003", # Avoid specifying long messages outside the exception class # Slower and more verbose https://github.com/astral-sh/ruff/issues/7871 "UP038", # Use `X | Y` in `isinstance` call instead of `(X, Y)` ### # False-positives, but already checked by type-checkers ### + # Configuring namespace-packages = ["scripts/sync_protobuf"] doesn't work ? + "PLC2701", # Private name import {name} from external module {module} # Ruff doesn't support multi-file analysis yet: https://github.com/astral-sh/ruff/issues/5295 "RUF013", # PEP 484 prohibits implicit `Optional` ] @@ -133,11 +190,20 @@ ignore = [ # A lot of stubs are incomplete on purpose, and that's configured through pyright # Some ANN204 (special method) are autofixable in stubs, but not all. "ANN2", # Missing return type annotation for ... + # Rules that are out of the control of stub authors: + "A001", # builtin-variable-shadowing + "A002", # builtin-argument-shadowing + "A004", # builtin-import-shadowing + "F403", # `from . import *` used; unable to detect undefined names + "PIE796", # Enum contains duplicate value + "PLC2701", # Private name import from external module # https://github.com/astral-sh/ruff/issues/15294 and https://github.com/astral-sh/ruff/issues/15295 + "S105", # Possible hardcoded password assigned + "S106", # Possible hardcoded password assigned to argument + "S107", # Possible hardcoded password assigned to function default + "S3", # Use of insecure ... # Most pep8-naming rules don't apply for third-party stubs like typeshed. # N811 to N814 could apply, but we often use them to disambiguate a name whilst making it look like a more common one "N8", - # Rules that are out of the control of stub authors: - "F403", # `from . import *` used; unable to detect undefined names # Stubs can sometimes re-export entire modules. # Issues with using a star-imported name will be caught by type-checkers. "F405", # may be undefined, or defined from star imports diff --git a/requirements-tests.txt b/requirements-tests.txt index f49905636e83..6bd0689d278f 100644 --- a/requirements-tests.txt +++ b/requirements-tests.txt @@ -13,7 +13,7 @@ packaging==24.2 pathspec>=0.11.1 pre-commit # Required by create_baseline_stubs.py. Must match .pre-commit-config.yaml. -ruff==0.9.3 +ruff==0.9.6 stubdefaulter==0.1.0 termcolor>=2.3 tomli==2.2.1 diff --git a/scripts/create_baseline_stubs.py b/scripts/create_baseline_stubs.py index 46de29b026c7..f59d04d242c3 100755 --- a/scripts/create_baseline_stubs.py +++ b/scripts/create_baseline_stubs.py @@ -18,6 +18,7 @@ import subprocess import sys import urllib.parse +from http import HTTPStatus from importlib.metadata import distribution import aiohttp @@ -45,34 +46,33 @@ def get_installed_package_info(project: str) -> tuple[str, str] | None: Return (normalized project name, installed version) if successful. """ - r = subprocess.run(["pip", "freeze"], capture_output=True, text=True, check=True) - return search_pip_freeze_output(project, r.stdout) + return search_pip_freeze_output(project, subprocess.check_output(["pip", "freeze"], text=True)) def run_stubgen(package: str, output: str) -> None: print(f"Running stubgen: stubgen -o {output} -p {package}") - subprocess.run(["stubgen", "-o", output, "-p", package, "--export-less"], check=True) + subprocess.check_call(["stubgen", "-o", output, "-p", package, "--export-less"]) def run_stubdefaulter(stub_dir: str) -> None: print(f"Running stubdefaulter: stubdefaulter --packages {stub_dir}") - subprocess.run(["stubdefaulter", "--packages", stub_dir]) + subprocess.run(["stubdefaulter", "--packages", stub_dir], check=False) def run_black(stub_dir: str) -> None: print(f"Running Black: black {stub_dir}") - subprocess.run(["pre-commit", "run", "black", "--files", *glob.iglob(f"{stub_dir}/**/*.pyi")]) + subprocess.run(["pre-commit", "run", "black", "--files", *glob.iglob(f"{stub_dir}/**/*.pyi")], check=False) def run_ruff(stub_dir: str) -> None: print(f"Running Ruff: ruff check {stub_dir} --fix-only") - subprocess.run([sys.executable, "-m", "ruff", "check", stub_dir, "--fix-only"]) + subprocess.run([sys.executable, "-m", "ruff", "check", stub_dir, "--fix-only"], check=False) async def get_project_urls_from_pypi(project: str, session: aiohttp.ClientSession) -> dict[str, str]: pypi_root = f"https://pypi.org/pypi/{urllib.parse.quote(project)}" async with session.get(f"{pypi_root}/json") as response: - if response.status != 200: + if response.status != HTTPStatus.OK: return {} j: dict[str, dict[str, dict[str, str]]] j = await response.json() @@ -90,24 +90,23 @@ async def get_upstream_repo_url(project: str) -> str | None: # Order the project URLs so that we put the ones # that are most likely to point to the source code first - urls_to_check: list[str] = [] url_names_probably_pointing_to_source = ("Source", "Repository", "Homepage") - for url_name in url_names_probably_pointing_to_source: - if url := project_urls.get(url_name): - urls_to_check.append(url) + urls_to_check: list[str] = [ + url for url in (project_urls.get(url_name) for url_name in url_names_probably_pointing_to_source) if url + ] urls_to_check.extend( url for url_name, url in project_urls.items() if url_name not in url_names_probably_pointing_to_source ) for url in urls_to_check: # Remove `www.`; replace `http://` with `https://` - url = re.sub(r"^(https?://)?(www\.)?", "https://", url) + url = re.sub(r"^(https?://)?(www\.)?", "https://", url) # noqa: PLW2901 netloc = urllib.parse.urlparse(url).netloc if netloc in {"gitlab.com", "github.com", "bitbucket.org", "foss.heptapod.net"}: # truncate to https://site.com/user/repo upstream_repo_url = "/".join(url.split("/")[:5]) async with session.get(upstream_repo_url) as response: - if response.status == 200: + if response.status == HTTPStatus.OK: return upstream_repo_url return None diff --git a/scripts/stubsabot.py b/scripts/stubsabot.py index f121bed5f5f0..a5cb37fec23f 100755 --- a/scripts/stubsabot.py +++ b/scripts/stubsabot.py @@ -3,7 +3,6 @@ import argparse import asyncio -import contextlib import datetime import enum import functools @@ -92,7 +91,7 @@ class PypiInfo: def get_release(self, *, version: VersionString) -> PypiReleaseDownload: # prefer wheels, since it's what most users will get / it's pretty easy to mess up MANIFEST - release_info = sorted(self.releases[version], key=lambda x: bool(x["packagetype"] == "bdist_wheel"))[-1] + release_info = max(self.releases[version], key=lambda x: bool(x["packagetype"] == "bdist_wheel")) return PypiReleaseDownload( distribution=self.distribution, url=release_info["url"], @@ -248,8 +247,7 @@ async def find_first_release_with_py_typed(pypi_info: PypiInfo, *, session: aioh def get_updated_version_spec(spec: Specifier, version: packaging.version.Version) -> Specifier: - """ - Given the old specifier and an updated version, returns an updated specifier that has the + """Given the old specifier and an updated version, returns an updated specifier that has the specificity of the old specifier, but matches the updated version. For example: @@ -293,8 +291,7 @@ class GitHubInfo: async def get_github_repo_info(session: aiohttp.ClientSession, stub_info: StubMetadata) -> GitHubInfo | None: - """ - If the project represented by `stub_info` is hosted on GitHub, + """If the project represented by `stub_info` is hosted on GitHub, return information regarding the project as it exists on GitHub. Else, return None. @@ -305,10 +302,10 @@ async def get_github_repo_info(session: aiohttp.ClientSession, stub_info: StubMe split_url = urllib.parse.urlsplit(stub_info.upstream_repository) if split_url.netloc == "github.com": url_path = split_url.path.strip("/") - assert len(Path(url_path).parts) == 2 + assert len(Path(url_path).parts) == 2 # noqa: PLR2004 # astral-sh/ruff#10009 github_tags_info_url = f"https://api.github.com/repos/{url_path}/tags" async with session.get(github_tags_info_url, headers=get_github_api_headers()) as response: - if response.status == 200: + if response.status == HTTPStatus.OK: tags: list[dict[str, Any]] = await response.json() assert isinstance(tags, list) return GitHubInfo(repo_path=url_path, tags=tags) @@ -340,8 +337,10 @@ async def get_diff_info( # Some packages in typeshed have tag names # that are invalid to be passed to the Version() constructor, # e.g. v.1.4.2 - with contextlib.suppress(packaging.version.InvalidVersion): + try: versions_to_tags[packaging.version.Version(tag_name)] = tag_name + except packaging.version.InvalidVersion: + pass try: new_tag = versions_to_tags[pypi_version] @@ -374,8 +373,7 @@ class DiffAnalysis: @property def runtime_definitely_has_consistent_directory_structure_with_typeshed(self) -> bool: - """ - If 0 .py files in the GitHub diff exist in typeshed's stubs, + """If 0 .py files in the GitHub diff exist in typeshed's stubs, there's a possibility that the .py files might be found in a different directory at runtime. @@ -623,10 +621,8 @@ def latest_commit_is_different_to_last_commit_on_origin(branch: str) -> bool: # If the number of lines is >1, # it indicates that something about our commit is different to the last commit # (Could be the commit "content", or the commit message). - commit_comparison = subprocess.run( - ["git", "range-diff", f"origin/{branch}~1..origin/{branch}", "HEAD~1..HEAD"], check=True, capture_output=True - ) - return len(commit_comparison.stdout.splitlines()) > 1 + commit_comparison = subprocess.check_output(["git", "range-diff", f"origin/{branch}~1..origin/{branch}", "HEAD~1..HEAD"]) + return len(commit_comparison.splitlines()) > 1 except subprocess.CalledProcessError: # origin/branch does not exist return True @@ -660,7 +656,7 @@ def get_update_pr_body(update: Update, metadata: Mapping[str, Any]) -> str: body += f"\n\n{update.diff_analysis}" stubtest_settings: dict[str, Any] = metadata.get("tool", {}).get("stubtest", {}) - stubtest_will_run = not stubtest_settings.get("skip", False) + stubtest_will_run = not stubtest_settings.get("skip") if stubtest_will_run: body += textwrap.dedent( """ @@ -747,14 +743,11 @@ async def main() -> None: parser.add_argument("distributions", nargs="*", help="Distributions to update, default = all") args = parser.parse_args() - if args.distributions: - dists_to_update = args.distributions - else: - dists_to_update = [path.name for path in STUBS_PATH.iterdir()] + dists_to_update = args.distributions or [path.name for path in STUBS_PATH.iterdir()] if args.action_level > ActionLevel.nothing: - subprocess.run(["git", "update-index", "--refresh"], capture_output=True) - diff_result = subprocess.run(["git", "diff-index", "HEAD", "--name-only"], text=True, capture_output=True) + subprocess.run(["git", "update-index", "--refresh"], capture_output=True, check=False) + diff_result = subprocess.run(["git", "diff-index", "HEAD", "--name-only"], text=True, capture_output=True, check=False) if diff_result.returncode: print("Unexpected exception!") print(diff_result.stdout) @@ -765,15 +758,12 @@ async def main() -> None: print(f"Cannot run stubsabot, as uncommitted changes are present in {changed_files}!") sys.exit(1) - if args.action_level > ActionLevel.fork: - if os.environ.get("GITHUB_TOKEN") is None: - raise ValueError("GITHUB_TOKEN environment variable must be set") + if args.action_level > ActionLevel.fork and os.environ.get("GITHUB_TOKEN") is None: + raise ValueError("GITHUB_TOKEN environment variable must be set") denylist = {"gdb"} # gdb is not a pypi distribution - original_branch = subprocess.run( - ["git", "branch", "--show-current"], text=True, capture_output=True, check=True - ).stdout.strip() + original_branch = subprocess.check_output(["git", "branch", "--show-current"], text=True).strip() if args.action_level >= ActionLevel.local: subprocess.check_call(["git", "fetch", "--prune", "--all"]) diff --git a/scripts/sync_protobuf/_utils.py b/scripts/sync_protobuf/_utils.py index 0c49c5a6fa9a..2ef0db58a9e2 100644 --- a/scripts/sync_protobuf/_utils.py +++ b/scripts/sync_protobuf/_utils.py @@ -18,9 +18,11 @@ def download_file(url: str, destination: StrPath) -> None: + if not url.startswith(("http:", "https:")): + raise ValueError("URL must start with 'http:' or 'https:'") print(f"Downloading '{url}' to '{destination}'") resp: HTTPResponse - with urlopen(url) as resp, open(destination, "wb") as file: + with urlopen(url) as resp, open(destination, "wb") as file: # noqa: S310 # Validated file.write(resp.read()) @@ -34,9 +36,7 @@ def run_protoc( proto_paths: Iterable[StrPath], mypy_out: StrPath, proto_globs: Iterable[str], cwd: StrOrBytesPath | None = None ) -> str: """TODO: Describe parameters and return.""" - protoc_version = ( - subprocess.run([sys.executable, "-m", "grpc_tools.protoc", "--version"], capture_output=True).stdout.decode().strip() - ) + protoc_version = subprocess.check_output([sys.executable, "-m", "grpc_tools.protoc", "--version"], text=True).strip() print() print(protoc_version) protoc_args = [ @@ -46,5 +46,5 @@ def run_protoc( *proto_globs, ] print("Running: protoc\n " + "\n ".join(protoc_args) + "\n") - subprocess.run((sys.executable, "-m", "grpc_tools.protoc", *protoc_args), cwd=cwd, check=True) + subprocess.check_call((sys.executable, "-m", "grpc_tools.protoc", *protoc_args), cwd=cwd) return protoc_version diff --git a/scripts/sync_protobuf/google_protobuf.py b/scripts/sync_protobuf/google_protobuf.py old mode 100755 new mode 100644 index ee238f82618d..4bd727bd4a03 --- a/scripts/sync_protobuf/google_protobuf.py +++ b/scripts/sync_protobuf/google_protobuf.py @@ -33,7 +33,7 @@ def extract_python_version(file_path: Path) -> str: """Extract the Python version from https://github.com/protocolbuffers/protobuf/blob/main/version.json .""" - with open(file_path) as file: + with open(file_path, encoding="utf-8") as file: data: dict[str, Any] = json.load(file) # The root key will be the protobuf source code version version = next(iter(data.values()))["languages"]["python"] @@ -42,18 +42,16 @@ def extract_python_version(file_path: Path) -> str: def extract_proto_file_paths(temp_dir: Path) -> list[str]: - """ - Roughly reproduce the subset of .proto files on the public interface + """Roughly reproduce the subset of .proto files on the public interface as described in py_proto_library calls in https://github.com/protocolbuffers/protobuf/blob/main/python/dist/BUILD.bazel . """ - with open(temp_dir / EXTRACTED_PACKAGE_DIR / "python" / "dist" / "BUILD.bazel") as file: + with open(temp_dir / EXTRACTED_PACKAGE_DIR / "python" / "dist" / "BUILD.bazel", encoding="utf-8") as file: matched_lines = filter(None, (re.search(PROTO_FILE_PATTERN, line) for line in file)) - proto_files = [ + return [ EXTRACTED_PACKAGE_DIR + "/src/google/protobuf/" + match.group(1).replace("compiler_", "compiler/") + ".proto" for match in matched_lines ] - return proto_files def main() -> None: @@ -90,7 +88,7 @@ def main() -> None: print("Updated protobuf/METADATA.toml") # Run pre-commit to cleanup the stubs - subprocess.run((sys.executable, "-m", "pre_commit", "run", "--files", *STUBS_FOLDER.rglob("*_pb2.pyi"))) + subprocess.run((sys.executable, "-m", "pre_commit", "run", "--files", *STUBS_FOLDER.rglob("*_pb2.pyi")), check=False) if __name__ == "__main__": diff --git a/scripts/sync_protobuf/s2clientprotocol.py b/scripts/sync_protobuf/s2clientprotocol.py old mode 100755 new mode 100644 index 989f57a4cd8d..cee68e1edea9 --- a/scripts/sync_protobuf/s2clientprotocol.py +++ b/scripts/sync_protobuf/s2clientprotocol.py @@ -69,7 +69,7 @@ def main() -> None: print("Updated s2clientprotocol/METADATA.toml") # Run pre-commit to cleanup the stubs - subprocess.run((sys.executable, "-m", "pre_commit", "run", "--files", *STUBS_FOLDER.rglob("*_pb2.pyi"))) + subprocess.run((sys.executable, "-m", "pre_commit", "run", "--files", *STUBS_FOLDER.rglob("*_pb2.pyi")), check=False) if __name__ == "__main__": diff --git a/scripts/sync_protobuf/tensorflow.py b/scripts/sync_protobuf/tensorflow.py old mode 100755 new mode 100644 index b26ee90ccabf..3c84980f1cd8 --- a/scripts/sync_protobuf/tensorflow.py +++ b/scripts/sync_protobuf/tensorflow.py @@ -72,7 +72,7 @@ def post_creation() -> None: for path in STUBS_FOLDER.rglob("*_pb2.pyi"): print(f"Fixing imports in '{path}'") - with open(path) as file: + with open(path, encoding="utf-8") as file: filedata = file.read() # Replace the target string @@ -80,7 +80,7 @@ def post_creation() -> None: filedata = re.sub(XLA_IMPORT_PATTERN, "\\1tensorflow.compiler.xla.", filedata) # Write the file out again - with open(path, "w") as file: + with open(path, "w", encoding="utf-8") as file: file.write(filedata) print() @@ -137,7 +137,7 @@ def main() -> None: print("Updated tensorflow/METADATA.toml") # Run pre-commit to cleanup the stubs - subprocess.run((sys.executable, "-m", "pre_commit", "run", "--files", *STUBS_FOLDER.rglob("*_pb2.pyi"))) + subprocess.run((sys.executable, "-m", "pre_commit", "run", "--files", *STUBS_FOLDER.rglob("*_pb2.pyi")), check=False) if __name__ == "__main__": diff --git a/stdlib/_typeshed/__init__.pyi b/stdlib/_typeshed/__init__.pyi index 7201819b25ed..66847840688e 100644 --- a/stdlib/_typeshed/__init__.pyi +++ b/stdlib/_typeshed/__init__.pyi @@ -325,9 +325,9 @@ class structseq(Generic[_T_co]): # The second parameter will accept a dict of any kind without raising an exception, # but only has any meaning if you supply it a dict where the keys are strings. # https://github.com/python/typeshed/pull/6560#discussion_r767149830 - def __new__(cls: type[Self], sequence: Iterable[_T_co], dict: dict[str, Any] = ...) -> Self: ... + def __new__(cls: type[Self], sequence: Iterable[_T_co], dict: dict[str, Any] = ...) -> Self: ... # noqa: PYI019 if sys.version_info >= (3, 13): - def __replace__(self: Self, **kwargs: Any) -> Self: ... + def __replace__(self: Self, **kwargs: Any) -> Self: ... # noqa: PYI019 # Superset of typing.AnyStr that also includes LiteralString AnyOrLiteralStr = TypeVar("AnyOrLiteralStr", str, bytes, LiteralString) # noqa: Y001 diff --git a/stdlib/ast.pyi b/stdlib/ast.pyi index 7a4438a33fbc..1a6e4ff0d4c2 100644 --- a/stdlib/ast.pyi +++ b/stdlib/ast.pyi @@ -1494,11 +1494,11 @@ if sys.version_info >= (3, 10): class MatchSingleton(pattern): __match_args__ = ("value",) - value: Literal[True, False] | None - def __init__(self, value: Literal[True, False] | None, **kwargs: Unpack[_Attributes[int]]) -> None: ... + value: bool | None + def __init__(self, value: bool | None, **kwargs: Unpack[_Attributes[int]]) -> None: ... if sys.version_info >= (3, 14): - def __replace__(self, *, value: Literal[True, False] | None = ..., **kwargs: Unpack[_Attributes[int]]) -> Self: ... + def __replace__(self, *, value: bool | None = ..., **kwargs: Unpack[_Attributes[int]]) -> Self: ... class MatchSequence(pattern): __match_args__ = ("patterns",) diff --git a/stdlib/asyncio/__init__.pyi b/stdlib/asyncio/__init__.pyi index 7c3ac6ede4fe..2b40439f83a8 100644 --- a/stdlib/asyncio/__init__.pyi +++ b/stdlib/asyncio/__init__.pyi @@ -410,94 +410,7 @@ if sys.platform == "win32": "WindowsSelectorEventLoopPolicy", # from windows_events "WindowsProactorEventLoopPolicy", # from windows_events ) - elif sys.version_info >= (3, 10): - __all__ = ( - "BaseEventLoop", # from base_events - "Server", # from base_events - "coroutine", # from coroutines - "iscoroutinefunction", # from coroutines - "iscoroutine", # from coroutines - "AbstractEventLoopPolicy", # from events - "AbstractEventLoop", # from events - "AbstractServer", # from events - "Handle", # from events - "TimerHandle", # from events - "get_event_loop_policy", # from events - "set_event_loop_policy", # from events - "get_event_loop", # from events - "set_event_loop", # from events - "new_event_loop", # from events - "get_child_watcher", # from events - "set_child_watcher", # from events - "_set_running_loop", # from events - "get_running_loop", # from events - "_get_running_loop", # from events - "CancelledError", # from exceptions - "InvalidStateError", # from exceptions - "TimeoutError", # from exceptions - "IncompleteReadError", # from exceptions - "LimitOverrunError", # from exceptions - "SendfileNotAvailableError", # from exceptions - "Future", # from futures - "wrap_future", # from futures - "isfuture", # from futures - "Lock", # from locks - "Event", # from locks - "Condition", # from locks - "Semaphore", # from locks - "BoundedSemaphore", # from locks - "BaseProtocol", # from protocols - "Protocol", # from protocols - "DatagramProtocol", # from protocols - "SubprocessProtocol", # from protocols - "BufferedProtocol", # from protocols - "run", # from runners - "Queue", # from queues - "PriorityQueue", # from queues - "LifoQueue", # from queues - "QueueFull", # from queues - "QueueEmpty", # from queues - "StreamReader", # from streams - "StreamWriter", # from streams - "StreamReaderProtocol", # from streams - "open_connection", # from streams - "start_server", # from streams - "create_subprocess_exec", # from subprocess - "create_subprocess_shell", # from subprocess - "Task", # from tasks - "create_task", # from tasks - "FIRST_COMPLETED", # from tasks - "FIRST_EXCEPTION", # from tasks - "ALL_COMPLETED", # from tasks - "wait", # from tasks - "wait_for", # from tasks - "as_completed", # from tasks - "sleep", # from tasks - "gather", # from tasks - "shield", # from tasks - "ensure_future", # from tasks - "run_coroutine_threadsafe", # from tasks - "current_task", # from tasks - "all_tasks", # from tasks - "_register_task", # from tasks - "_unregister_task", # from tasks - "_enter_task", # from tasks - "_leave_task", # from tasks - "to_thread", # from threads - "BaseTransport", # from transports - "ReadTransport", # from transports - "WriteTransport", # from transports - "Transport", # from transports - "DatagramTransport", # from transports - "SubprocessTransport", # from transports - "SelectorEventLoop", # from windows_events - "ProactorEventLoop", # from windows_events - "IocpProactor", # from windows_events - "DefaultEventLoopPolicy", # from windows_events - "WindowsSelectorEventLoopPolicy", # from windows_events - "WindowsProactorEventLoopPolicy", # from windows_events - ) - elif sys.version_info >= (3, 9): + elif sys.version_info >= (3, 10) or sys.version_info >= (3, 9): __all__ = ( "BaseEventLoop", # from base_events "Server", # from base_events @@ -669,7 +582,7 @@ if sys.platform == "win32": "WindowsSelectorEventLoopPolicy", # from windows_events "WindowsProactorEventLoopPolicy", # from windows_events ) -else: +else: # noqa: PLR5501 if sys.version_info >= (3, 14): __all__ = ( "BaseEventLoop", # from base_events @@ -1059,98 +972,7 @@ else: "ThreadedChildWatcher", # from unix_events "DefaultEventLoopPolicy", # from unix_events ) - elif sys.version_info >= (3, 10): - __all__ = ( - "BaseEventLoop", # from base_events - "Server", # from base_events - "coroutine", # from coroutines - "iscoroutinefunction", # from coroutines - "iscoroutine", # from coroutines - "AbstractEventLoopPolicy", # from events - "AbstractEventLoop", # from events - "AbstractServer", # from events - "Handle", # from events - "TimerHandle", # from events - "get_event_loop_policy", # from events - "set_event_loop_policy", # from events - "get_event_loop", # from events - "set_event_loop", # from events - "new_event_loop", # from events - "get_child_watcher", # from events - "set_child_watcher", # from events - "_set_running_loop", # from events - "get_running_loop", # from events - "_get_running_loop", # from events - "CancelledError", # from exceptions - "InvalidStateError", # from exceptions - "TimeoutError", # from exceptions - "IncompleteReadError", # from exceptions - "LimitOverrunError", # from exceptions - "SendfileNotAvailableError", # from exceptions - "Future", # from futures - "wrap_future", # from futures - "isfuture", # from futures - "Lock", # from locks - "Event", # from locks - "Condition", # from locks - "Semaphore", # from locks - "BoundedSemaphore", # from locks - "BaseProtocol", # from protocols - "Protocol", # from protocols - "DatagramProtocol", # from protocols - "SubprocessProtocol", # from protocols - "BufferedProtocol", # from protocols - "run", # from runners - "Queue", # from queues - "PriorityQueue", # from queues - "LifoQueue", # from queues - "QueueFull", # from queues - "QueueEmpty", # from queues - "StreamReader", # from streams - "StreamWriter", # from streams - "StreamReaderProtocol", # from streams - "open_connection", # from streams - "start_server", # from streams - "open_unix_connection", # from streams - "start_unix_server", # from streams - "create_subprocess_exec", # from subprocess - "create_subprocess_shell", # from subprocess - "Task", # from tasks - "create_task", # from tasks - "FIRST_COMPLETED", # from tasks - "FIRST_EXCEPTION", # from tasks - "ALL_COMPLETED", # from tasks - "wait", # from tasks - "wait_for", # from tasks - "as_completed", # from tasks - "sleep", # from tasks - "gather", # from tasks - "shield", # from tasks - "ensure_future", # from tasks - "run_coroutine_threadsafe", # from tasks - "current_task", # from tasks - "all_tasks", # from tasks - "_register_task", # from tasks - "_unregister_task", # from tasks - "_enter_task", # from tasks - "_leave_task", # from tasks - "to_thread", # from threads - "BaseTransport", # from transports - "ReadTransport", # from transports - "WriteTransport", # from transports - "Transport", # from transports - "DatagramTransport", # from transports - "SubprocessTransport", # from transports - "SelectorEventLoop", # from unix_events - "AbstractChildWatcher", # from unix_events - "SafeChildWatcher", # from unix_events - "FastChildWatcher", # from unix_events - "PidfdChildWatcher", # from unix_events - "MultiLoopChildWatcher", # from unix_events - "ThreadedChildWatcher", # from unix_events - "DefaultEventLoopPolicy", # from unix_events - ) - elif sys.version_info >= (3, 9): + elif sys.version_info >= (3, 10) or sys.version_info >= (3, 9): __all__ = ( "BaseEventLoop", # from base_events "Server", # from base_events diff --git a/stdlib/builtins.pyi b/stdlib/builtins.pyi index b0912b8872d7..2d93aab119b8 100644 --- a/stdlib/builtins.pyi +++ b/stdlib/builtins.pyi @@ -90,14 +90,14 @@ _T2 = TypeVar("_T2") _T3 = TypeVar("_T3") _T4 = TypeVar("_T4") _T5 = TypeVar("_T5") -_SupportsNextT = TypeVar("_SupportsNextT", bound=SupportsNext[Any], covariant=True) -_SupportsAnextT = TypeVar("_SupportsAnextT", bound=SupportsAnext[Any], covariant=True) +_SupportsNextT_co = TypeVar("_SupportsNextT_co", bound=SupportsNext[Any], covariant=True) +_SupportsAnextT_co = TypeVar("_SupportsAnextT_co", bound=SupportsAnext[Any], covariant=True) _AwaitableT = TypeVar("_AwaitableT", bound=Awaitable[Any]) _AwaitableT_co = TypeVar("_AwaitableT_co", bound=Awaitable[Any], covariant=True) _P = ParamSpec("_P") -_StartT = TypeVar("_StartT", covariant=True, default=Any) -_StopT = TypeVar("_StopT", covariant=True, default=Any) -_StepT = TypeVar("_StepT", covariant=True, default=Any) +_StartT_co = TypeVar("_StartT_co", covariant=True, default=Any) +_StopT_co = TypeVar("_StopT_co", covariant=True, default=Any) +_StepT_co = TypeVar("_StepT_co", covariant=True, default=Any) class object: __doc__: str | None @@ -940,13 +940,13 @@ class bool(int): def __invert__(self) -> int: ... @final -class slice(Generic[_StartT, _StopT, _StepT]): +class slice(Generic[_StartT_co, _StopT_co, _StepT_co]): @property - def start(self) -> _StartT: ... + def start(self) -> _StartT_co: ... @property - def step(self) -> _StepT: ... + def step(self) -> _StepT_co: ... @property - def stop(self) -> _StopT: ... + def stop(self) -> _StopT_co: ... @overload def __new__(cls, stop: int | None, /) -> slice[int | MaybeNone, int | MaybeNone, int | MaybeNone]: ... @overload @@ -1303,7 +1303,7 @@ class _PathLike(Protocol[AnyStr_co]): def __fspath__(self) -> AnyStr_co: ... if sys.version_info >= (3, 10): - def aiter(async_iterable: SupportsAiter[_SupportsAnextT], /) -> _SupportsAnextT: ... + def aiter(async_iterable: SupportsAiter[_SupportsAnextT_co], /) -> _SupportsAnextT_co: ... class _SupportsSynchronousAnext(Protocol[_AwaitableT_co]): def __anext__(self) -> _AwaitableT_co: ... @@ -1465,7 +1465,7 @@ class _GetItemIterable(Protocol[_T_co]): def __getitem__(self, i: int, /) -> _T_co: ... @overload -def iter(object: SupportsIter[_SupportsNextT], /) -> _SupportsNextT: ... +def iter(object: SupportsIter[_SupportsNextT_co], /) -> _SupportsNextT_co: ... @overload def iter(object: _GetItemIterable[_T], /) -> Iterator[_T]: ... @overload @@ -1672,17 +1672,17 @@ def print( *values: object, sep: str | None = " ", end: str | None = "\n", file: _SupportsWriteAndFlush[str] | None = None, flush: bool ) -> None: ... -_E = TypeVar("_E", contravariant=True) -_M = TypeVar("_M", contravariant=True) +_E_contra = TypeVar("_E_contra", contravariant=True) +_M_contra = TypeVar("_M_contra", contravariant=True) -class _SupportsPow2(Protocol[_E, _T_co]): - def __pow__(self, other: _E, /) -> _T_co: ... +class _SupportsPow2(Protocol[_E_contra, _T_co]): + def __pow__(self, other: _E_contra, /) -> _T_co: ... -class _SupportsPow3NoneOnly(Protocol[_E, _T_co]): - def __pow__(self, other: _E, modulo: None = None, /) -> _T_co: ... +class _SupportsPow3NoneOnly(Protocol[_E_contra, _T_co]): + def __pow__(self, other: _E_contra, modulo: None = None, /) -> _T_co: ... -class _SupportsPow3(Protocol[_E, _M, _T_co]): - def __pow__(self, other: _E, modulo: _M, /) -> _T_co: ... +class _SupportsPow3(Protocol[_E_contra, _M_contra, _T_co]): + def __pow__(self, other: _E_contra, modulo: _M_contra, /) -> _T_co: ... _SupportsSomeKindOfPow = ( # noqa: Y026 # TODO: Use TypeAlias once mypy bugs are fixed _SupportsPow2[Any, Any] | _SupportsPow3NoneOnly[Any, Any] | _SupportsPow3[Any, Any, Any] @@ -1718,11 +1718,11 @@ def pow(base: float, exp: complex | _SupportsSomeKindOfPow, mod: None = None) -> @overload def pow(base: complex, exp: complex | _SupportsSomeKindOfPow, mod: None = None) -> complex: ... @overload -def pow(base: _SupportsPow2[_E, _T_co], exp: _E, mod: None = None) -> _T_co: ... # type: ignore[overload-overlap] +def pow(base: _SupportsPow2[_E_contra, _T_co], exp: _E_contra, mod: None = None) -> _T_co: ... # type: ignore[overload-overlap] @overload -def pow(base: _SupportsPow3NoneOnly[_E, _T_co], exp: _E, mod: None = None) -> _T_co: ... # type: ignore[overload-overlap] +def pow(base: _SupportsPow3NoneOnly[_E_contra, _T_co], exp: _E_contra, mod: None = None) -> _T_co: ... # type: ignore[overload-overlap] @overload -def pow(base: _SupportsPow3[_E, _M, _T_co], exp: _E, mod: _M) -> _T_co: ... +def pow(base: _SupportsPow3[_E_contra, _M_contra, _T_co], exp: _E_contra, mod: _M_contra) -> _T_co: ... @overload def pow(base: _SupportsSomeKindOfPow, exp: float, mod: None = None) -> Any: ... @overload diff --git a/stdlib/contextlib.pyi b/stdlib/contextlib.pyi index e1d5f91faf5b..38f921ae9d62 100644 --- a/stdlib/contextlib.pyi +++ b/stdlib/contextlib.pyi @@ -32,7 +32,7 @@ _T = TypeVar("_T") _T_co = TypeVar("_T_co", covariant=True) _T_io = TypeVar("_T_io", bound=IO[str] | None) _ExitT_co = TypeVar("_ExitT_co", covariant=True, bound=bool | None, default=bool | None) -_G = TypeVar("_G", bound=Generator[Any, Any, Any] | AsyncGenerator[Any, Any], covariant=True) +_G_co = TypeVar("_G_co", bound=Generator[Any, Any, Any] | AsyncGenerator[Any, Any], covariant=True) _P = ParamSpec("_P") _R = TypeVar("_R") @@ -72,11 +72,11 @@ class ContextDecorator: def _recreate_cm(self) -> Self: ... def __call__(self, func: Callable[_P, _R]) -> _WrappedCallable[_P, _R]: ... -class _GeneratorContextManagerBase(Generic[_G]): +class _GeneratorContextManagerBase(Generic[_G_co]): # Ideally this would use ParamSpec, but that requires (*args, **kwargs), which this isn't. see #6676 - def __init__(self, func: Callable[..., _G], args: tuple[Any, ...], kwds: dict[str, Any]) -> None: ... - gen: _G - func: Callable[..., _G] + def __init__(self, func: Callable[..., _G_co], args: tuple[Any, ...], kwds: dict[str, Any]) -> None: ... + gen: _G_co + func: Callable[..., _G_co] args: tuple[Any, ...] kwds: dict[str, Any] diff --git a/stdlib/hmac.pyi b/stdlib/hmac.pyi index efd649ec39a8..dbcdf614d3e0 100644 --- a/stdlib/hmac.pyi +++ b/stdlib/hmac.pyi @@ -1,5 +1,5 @@ import sys -from _hashlib import HASH as _HashlibHash +from _hashlib import HASH as _HashlibHash # Not a constant from _typeshed import ReadableBuffer, SizedBuffer from collections.abc import Callable from types import ModuleType diff --git a/stdlib/importlib/readers.pyi b/stdlib/importlib/readers.pyi index 41d7af966d58..8f6074a16738 100644 --- a/stdlib/importlib/readers.pyi +++ b/stdlib/importlib/readers.pyi @@ -12,9 +12,9 @@ from typing import Literal, NoReturn, TypeVar from typing_extensions import Never if sys.version_info >= (3, 11): - import importlib.resources.abc as abc + from importlib.resources import abc else: - import importlib.abc as abc + from importlib import abc if sys.version_info >= (3, 10): if sys.version_info >= (3, 11): diff --git a/stdlib/inspect.pyi b/stdlib/inspect.pyi index 43b3dd529887..229eb2135690 100644 --- a/stdlib/inspect.pyi +++ b/stdlib/inspect.pyi @@ -143,8 +143,8 @@ if sys.version_info >= (3, 11): _P = ParamSpec("_P") _T = TypeVar("_T") _F = TypeVar("_F", bound=Callable[..., Any]) -_T_cont = TypeVar("_T_cont", contravariant=True) -_V_cont = TypeVar("_V_cont", contravariant=True) +_T_contra = TypeVar("_T_contra", contravariant=True) +_V_contra = TypeVar("_V_contra", contravariant=True) # # Types and members @@ -228,11 +228,11 @@ def isasyncgenfunction(obj: Callable[_P, Any]) -> TypeGuard[Callable[_P, AsyncGe @overload def isasyncgenfunction(obj: object) -> TypeGuard[Callable[..., AsyncGeneratorType[Any, Any]]]: ... -class _SupportsSet(Protocol[_T_cont, _V_cont]): - def __set__(self, instance: _T_cont, value: _V_cont, /) -> None: ... +class _SupportsSet(Protocol[_T_contra, _V_contra]): + def __set__(self, instance: _T_contra, value: _V_contra, /) -> None: ... -class _SupportsDelete(Protocol[_T_cont]): - def __delete__(self, instance: _T_cont, /) -> None: ... +class _SupportsDelete(Protocol[_T_contra]): + def __delete__(self, instance: _T_contra, /) -> None: ... def isasyncgen(object: object) -> TypeIs[AsyncGeneratorType[Any, Any]]: ... def istraceback(object: object) -> TypeIs[TracebackType]: ... diff --git a/stdlib/mmap.pyi b/stdlib/mmap.pyi index c9b8358cde6c..bd9e7361b6e7 100644 --- a/stdlib/mmap.pyi +++ b/stdlib/mmap.pyi @@ -33,23 +33,22 @@ PAGESIZE: int class mmap: if sys.platform == "win32": def __init__(self, fileno: int, length: int, tagname: str | None = ..., access: int = ..., offset: int = ...) -> None: ... + elif sys.version_info >= (3, 13): + def __new__( + cls, + fileno: int, + length: int, + flags: int = ..., + prot: int = ..., + access: int = ..., + offset: int = ..., + *, + trackfd: bool = True, + ) -> Self: ... else: - if sys.version_info >= (3, 13): - def __new__( - cls, - fileno: int, - length: int, - flags: int = ..., - prot: int = ..., - access: int = ..., - offset: int = ..., - *, - trackfd: bool = True, - ) -> Self: ... - else: - def __new__( - cls, fileno: int, length: int, flags: int = ..., prot: int = ..., access: int = ..., offset: int = ... - ) -> Self: ... + def __new__( + cls, fileno: int, length: int, flags: int = ..., prot: int = ..., access: int = ..., offset: int = ... + ) -> Self: ... def close(self) -> None: ... def flush(self, offset: int = ..., size: int = ...) -> None: ... diff --git a/stdlib/multiprocessing/connection.pyi b/stdlib/multiprocessing/connection.pyi index 9998239d3119..cd4fa102c0f3 100644 --- a/stdlib/multiprocessing/connection.pyi +++ b/stdlib/multiprocessing/connection.pyi @@ -12,10 +12,10 @@ __all__ = ["Client", "Listener", "Pipe", "wait"] _Address: TypeAlias = str | tuple[str, int] # Defaulting to Any to avoid forcing generics on a lot of pre-existing code -_SendT = TypeVar("_SendT", contravariant=True, default=Any) -_RecvT = TypeVar("_RecvT", covariant=True, default=Any) +_SendT_contra = TypeVar("_SendT_contra", contravariant=True, default=Any) +_RecvT_co = TypeVar("_RecvT_co", covariant=True, default=Any) -class _ConnectionBase(Generic[_SendT, _RecvT]): +class _ConnectionBase(Generic[_SendT_contra, _RecvT_co]): def __init__(self, handle: SupportsIndex, readable: bool = True, writable: bool = True) -> None: ... @property def closed(self) -> bool: ... # undocumented @@ -26,10 +26,10 @@ class _ConnectionBase(Generic[_SendT, _RecvT]): def fileno(self) -> int: ... def close(self) -> None: ... def send_bytes(self, buf: ReadableBuffer, offset: int = 0, size: int | None = None) -> None: ... - def send(self, obj: _SendT) -> None: ... + def send(self, obj: _SendT_contra) -> None: ... def recv_bytes(self, maxlength: int | None = None) -> bytes: ... def recv_bytes_into(self, buf: Any, offset: int = 0) -> int: ... - def recv(self) -> _RecvT: ... + def recv(self) -> _RecvT_co: ... def poll(self, timeout: float | None = 0.0) -> bool: ... def __enter__(self) -> Self: ... def __exit__( @@ -37,10 +37,10 @@ class _ConnectionBase(Generic[_SendT, _RecvT]): ) -> None: ... def __del__(self) -> None: ... -class Connection(_ConnectionBase[_SendT, _RecvT]): ... +class Connection(_ConnectionBase[_SendT_contra, _RecvT_co]): ... if sys.platform == "win32": - class PipeConnection(_ConnectionBase[_SendT, _RecvT]): ... + class PipeConnection(_ConnectionBase[_SendT_contra, _RecvT_co]): ... class Listener: def __init__( @@ -66,8 +66,8 @@ else: def answer_challenge(connection: Connection[Any, Any], authkey: bytes) -> None: ... def wait( - object_list: Iterable[Connection[_SendT, _RecvT] | socket.socket | int], timeout: float | None = None -) -> list[Connection[_SendT, _RecvT] | socket.socket | int]: ... + object_list: Iterable[Connection[_SendT_contra, _RecvT_co] | socket.socket | int], timeout: float | None = None +) -> list[Connection[_SendT_contra, _RecvT_co] | socket.socket | int]: ... def Client(address: _Address, family: str | None = None, authkey: bytes | None = None) -> Connection[Any, Any]: ... # N.B. Keep this in sync with multiprocessing.context.BaseContext.Pipe. diff --git a/stdlib/sre_parse.pyi b/stdlib/sre_parse.pyi index c242bd2a065f..dbf43ded2590 100644 --- a/stdlib/sre_parse.pyi +++ b/stdlib/sre_parse.pyi @@ -2,7 +2,7 @@ import sys from collections.abc import Iterable from re import Match, Pattern as _Pattern from sre_constants import * -from sre_constants import _NamedIntConstant as _NIC, error as _Error +from sre_constants import _NamedIntConstant as _NIC, error as _error from typing import Any, overload from typing_extensions import TypeAlias @@ -74,7 +74,7 @@ class Tokenizer: def pos(self) -> int: ... def tell(self) -> int: ... def seek(self, index: int) -> None: ... - def error(self, msg: str, offset: int = 0) -> _Error: ... + def error(self, msg: str, offset: int = 0) -> _error: ... if sys.version_info >= (3, 12): def checkgroupname(self, name: str, offset: int) -> None: ... diff --git a/stdlib/typing.pyi b/stdlib/typing.pyi index 7c1b171a730b..8b2e538f261d 100644 --- a/stdlib/typing.pyi +++ b/stdlib/typing.pyi @@ -1,4 +1,4 @@ -# Since this module defines "overload" it is not recognized by Ruff as typing.overload +# Since this module defines "overload", it is not recognized by Ruff as typing.overload # ruff: noqa: F811 # TODO: The collections import is required, otherwise mypy crashes. # https://github.com/python/mypy/issues/16744 @@ -510,15 +510,15 @@ class Awaitable(Protocol[_T_co]): def __await__(self) -> Generator[Any, Any, _T_co]: ... # Non-default variations to accommodate couroutines, and `AwaitableGenerator` having a 4th type parameter. -_SendT_contra_nd = TypeVar("_SendT_contra_nd", contravariant=True) -_ReturnT_co_nd = TypeVar("_ReturnT_co_nd", covariant=True) +_SendT_nd_contra = TypeVar("_SendT_nd_contra", contravariant=True) +_ReturnT_nd_co = TypeVar("_ReturnT_nd_co", covariant=True) -class Coroutine(Awaitable[_ReturnT_co_nd], Generic[_YieldT_co, _SendT_contra_nd, _ReturnT_co_nd]): +class Coroutine(Awaitable[_ReturnT_nd_co], Generic[_YieldT_co, _SendT_nd_contra, _ReturnT_nd_co]): __name__: str __qualname__: str @abstractmethod - def send(self, value: _SendT_contra_nd, /) -> _YieldT_co: ... + def send(self, value: _SendT_nd_contra, /) -> _YieldT_co: ... @overload @abstractmethod def throw( @@ -534,9 +534,9 @@ class Coroutine(Awaitable[_ReturnT_co_nd], Generic[_YieldT_co, _SendT_contra_nd, # The parameters correspond to Generator, but the 4th is the original type. @type_check_only class AwaitableGenerator( - Awaitable[_ReturnT_co_nd], - Generator[_YieldT_co, _SendT_contra_nd, _ReturnT_co_nd], - Generic[_YieldT_co, _SendT_contra_nd, _ReturnT_co_nd, _S], + Awaitable[_ReturnT_nd_co], + Generator[_YieldT_co, _SendT_nd_contra, _ReturnT_nd_co], + Generic[_YieldT_co, _SendT_nd_contra, _ReturnT_nd_co, _S], metaclass=ABCMeta, ): ... @@ -957,7 +957,7 @@ class _TypedDict(Mapping[str, object], metaclass=ABCMeta): def setdefault(self, k: _Never, default: object) -> object: ... # Mypy plugin hook for 'pop' expects that 'default' has a type variable type. def pop(self, k: _Never, default: _T = ...) -> object: ... # pyright: ignore[reportInvalidTypeVarUse] - def update(self: _T, m: _T, /) -> None: ... + def update(self, m: typing_extensions.Self, /) -> None: ... def __delitem__(self, k: _Never) -> None: ... def items(self) -> dict_items[str, object]: ... def keys(self) -> dict_keys[str, object]: ... diff --git a/stdlib/typing_extensions.pyi b/stdlib/typing_extensions.pyi index 33af1a388aa5..2ba7eddb69ef 100644 --- a/stdlib/typing_extensions.pyi +++ b/stdlib/typing_extensions.pyi @@ -236,7 +236,7 @@ class _TypedDict(Mapping[str, object], metaclass=abc.ABCMeta): def setdefault(self, k: Never, default: object) -> object: ... # Mypy plugin hook for 'pop' expects that 'default' has a type variable type. def pop(self, k: Never, default: _T = ...) -> object: ... # pyright: ignore[reportInvalidTypeVarUse] - def update(self: _T, m: _T, /) -> None: ... + def update(self, m: Self, /) -> None: ... def items(self) -> dict_items[str, object]: ... def keys(self) -> dict_keys[str, object]: ... def values(self) -> dict_values[str, object]: ... diff --git a/stdlib/xml/dom/pulldom.pyi b/stdlib/xml/dom/pulldom.pyi index 50250de5cb2f..51f34e1fb08b 100644 --- a/stdlib/xml/dom/pulldom.pyi +++ b/stdlib/xml/dom/pulldom.pyi @@ -21,14 +21,14 @@ _Node: TypeAlias = Document | Element | Text _Event: TypeAlias = tuple[ Literal[ - Literal["START_ELEMENT"], - Literal["END_ELEMENT"], - Literal["COMMENT"], - Literal["START_DOCUMENT"], - Literal["END_DOCUMENT"], - Literal["PROCESSING_INSTRUCTION"], - Literal["IGNORABLE_WHITESPACE"], - Literal["CHARACTERS"], + "START_ELEMENT", + "END_ELEMENT", + "COMMENT", + "START_DOCUMENT", + "END_DOCUMENT", + "PROCESSING_INSTRUCTION", + "IGNORABLE_WHITESPACE", + "CHARACTERS", ], _Node, ] diff --git a/stubs/PyYAML/yaml/representer.pyi b/stubs/PyYAML/yaml/representer.pyi index c84c56218871..e3360eae7493 100644 --- a/stubs/PyYAML/yaml/representer.pyi +++ b/stubs/PyYAML/yaml/representer.pyi @@ -3,6 +3,7 @@ from _typeshed import Incomplete, ReadableBuffer, SupportsItems from collections.abc import Callable, Iterable, Mapping from types import BuiltinFunctionType, FunctionType, ModuleType from typing import Any, ClassVar, NoReturn, TypeVar +from typing_extensions import Self from yaml.error import YAMLError as YAMLError from yaml.nodes import MappingNode as MappingNode, Node as Node, ScalarNode as ScalarNode, SequenceNode as SequenceNode @@ -25,9 +26,9 @@ class BaseRepresenter: def represent(self, data) -> None: ... def represent_data(self, data) -> Node: ... @classmethod - def add_representer(cls: type[_R], data_type: type[_T], representer: Callable[[_R, _T], Node]) -> None: ... + def add_representer(cls, data_type: type[_T], representer: Callable[[Self, _T], Node]) -> None: ... @classmethod - def add_multi_representer(cls: type[_R], data_type: type[_T], representer: Callable[[_R, _T], Node]) -> None: ... + def add_multi_representer(cls, data_type: type[_T], representer: Callable[[Self, _T], Node]) -> None: ... def represent_scalar(self, tag: str, value, style: str | None = None) -> ScalarNode: ... def represent_sequence(self, tag: str, sequence: Iterable[Any], flow_style: bool | None = None) -> SequenceNode: ... def represent_mapping( diff --git a/stubs/WTForms/wtforms/validators.pyi b/stubs/WTForms/wtforms/validators.pyi index a5d1c4ca6304..cbe04b288fc4 100644 --- a/stubs/WTForms/wtforms/validators.pyi +++ b/stubs/WTForms/wtforms/validators.pyi @@ -42,7 +42,7 @@ __all__ = ( "Disabled", ) -_ValuesT = TypeVar("_ValuesT", bound=Collection[Any], contravariant=True) +_ValuesT_contra = TypeVar("_ValuesT_contra", bound=Collection[Any], contravariant=True) class ValidationError(ValueError): def __init__(self, message: str = "", *args: object) -> None: ... @@ -150,9 +150,13 @@ class AnyOf: @overload def __init__(self, values: Collection[Any], message: str | None = None, values_formatter: None = None) -> None: ... @overload - def __init__(self, values: _ValuesT, message: str | None, values_formatter: Callable[[_ValuesT], str]) -> None: ... + def __init__( + self, values: _ValuesT_contra, message: str | None, values_formatter: Callable[[_ValuesT_contra], str] + ) -> None: ... @overload - def __init__(self, values: _ValuesT, message: str | None = None, *, values_formatter: Callable[[_ValuesT], str]) -> None: ... + def __init__( + self, values: _ValuesT_contra, message: str | None = None, *, values_formatter: Callable[[_ValuesT_contra], str] + ) -> None: ... def __call__(self, form: BaseForm, field: Field) -> None: ... @staticmethod def default_values_formatter(values: Iterable[object]) -> str: ... @@ -164,9 +168,13 @@ class NoneOf: @overload def __init__(self, values: Collection[Any], message: str | None = None, values_formatter: None = None) -> None: ... @overload - def __init__(self, values: _ValuesT, message: str | None, values_formatter: Callable[[_ValuesT], str]) -> None: ... + def __init__( + self, values: _ValuesT_contra, message: str | None, values_formatter: Callable[[_ValuesT_contra], str] + ) -> None: ... @overload - def __init__(self, values: _ValuesT, message: str | None = None, *, values_formatter: Callable[[_ValuesT], str]) -> None: ... + def __init__( + self, values: _ValuesT_contra, message: str | None = None, *, values_formatter: Callable[[_ValuesT_contra], str] + ) -> None: ... def __call__(self, form: BaseForm, field: Field) -> None: ... @staticmethod def default_values_formatter(v: Iterable[object]) -> str: ... diff --git a/stubs/gevent/gevent/_threading.pyi b/stubs/gevent/gevent/_threading.pyi index 0eeaaf454716..93b3d155a443 100644 --- a/stubs/gevent/gevent/_threading.pyi +++ b/stubs/gevent/gevent/_threading.pyi @@ -1,4 +1,4 @@ -from _thread import LockType, allocate_lock as Lock +from _thread import LockType, allocate_lock as Lock # Shouldn't apply to re-exports from typing import Generic, NewType, TypeVar __all__ = ["Lock", "Queue", "EmptyTimeout"] diff --git a/stubs/gevent/gevent/hub.pyi b/stubs/gevent/gevent/hub.pyi index b949c9817ae0..44a8282ae316 100644 --- a/stubs/gevent/gevent/hub.pyi +++ b/stubs/gevent/gevent/hub.pyi @@ -29,7 +29,7 @@ class _DefaultReturnProperty(Protocol[_T]): @overload def __get__(self, obj: object, owner: type[object] | None = None) -> _T: ... def __set__(self, obj: object, value: _T | None) -> None: ... - def __del__(self, obj: object) -> None: ... + def __del__(self) -> None: ... def spawn_raw(function: Callable[..., object], *args: object, **kwargs: object) -> greenlet.greenlet: ... def sleep(seconds: float = 0, ref: bool = True) -> None: ... diff --git a/stubs/gevent/gevent/libev/corecext.pyi b/stubs/gevent/gevent/libev/corecext.pyi index 994793ee8a25..35a46bbf3655 100644 --- a/stubs/gevent/gevent/libev/corecext.pyi +++ b/stubs/gevent/gevent/libev/corecext.pyi @@ -5,9 +5,9 @@ from types import TracebackType from typing import Any from typing_extensions import ParamSpec -import gevent.libev.watcher as watcher from gevent._ffi.loop import _ErrorHandler from gevent._types import _Callback +from gevent.libev import watcher # this c extension is only available on posix if sys.platform != "win32": diff --git a/stubs/gevent/gevent/libev/corecffi.pyi b/stubs/gevent/gevent/libev/corecffi.pyi index 4dc59a997f30..e814fb9b90b3 100644 --- a/stubs/gevent/gevent/libev/corecffi.pyi +++ b/stubs/gevent/gevent/libev/corecffi.pyi @@ -2,8 +2,8 @@ import sys from _typeshed import FileDescriptor from collections.abc import Sequence -import gevent.libev.watcher as watcher from gevent._ffi.loop import AbstractLoop +from gevent.libev import watcher def get_version() -> str: ... def get_header_version() -> str: ... diff --git a/stubs/gevent/gevent/libuv/loop.pyi b/stubs/gevent/gevent/libuv/loop.pyi index e5d539acb3c0..950b82118375 100644 --- a/stubs/gevent/gevent/libuv/loop.pyi +++ b/stubs/gevent/gevent/libuv/loop.pyi @@ -2,9 +2,9 @@ import sys from _typeshed import FileDescriptor from typing import NamedTuple -import gevent.libuv.watcher as watcher from gevent._ffi.loop import AbstractLoop from gevent._types import _IoWatcher +from gevent.libuv import watcher def get_version() -> str: ... def get_header_version() -> str: ... diff --git a/stubs/mysqlclient/MySQLdb/cursors.pyi b/stubs/mysqlclient/MySQLdb/cursors.pyi index f572c38e1ab3..a659540c913f 100644 --- a/stubs/mysqlclient/MySQLdb/cursors.pyi +++ b/stubs/mysqlclient/MySQLdb/cursors.pyi @@ -10,7 +10,7 @@ _Arguments: TypeAlias = dict[str, _Literal] | dict[bytes, _Literal] | Iterable[_ RE_INSERT_VALUES: Pattern[str] class BaseCursor: - from ._exceptions import ( + from ._exceptions import ( # noqa: PLC0415 DatabaseError as DatabaseError, DataError as DataError, Error as Error, diff --git a/stubs/networkx/networkx/algorithms/operators/binary.pyi b/stubs/networkx/networkx/algorithms/operators/binary.pyi index ef7f4943ca52..52b58c2b19d7 100644 --- a/stubs/networkx/networkx/algorithms/operators/binary.pyi +++ b/stubs/networkx/networkx/algorithms/operators/binary.pyi @@ -13,12 +13,12 @@ def difference(G, H): ... @_dispatchable def symmetric_difference(G, H): ... -_X = TypeVar("_X", bound=Hashable, covariant=True) -_Y = TypeVar("_Y", bound=Hashable, covariant=True) +_X_co = TypeVar("_X_co", bound=Hashable, covariant=True) +_Y_co = TypeVar("_Y_co", bound=Hashable, covariant=True) # GT = TypeVar('GT', bound=Graph[_Node]) # TODO: This does not handle the cases when graphs of different types are passed which is allowed @_dispatchable -def compose(G: DiGraph[_X], H: DiGraph[_Y]) -> DiGraph[_X | _Y]: ... +def compose(G: DiGraph[_X_co], H: DiGraph[_Y_co]) -> DiGraph[_X_co | _Y_co]: ... @_dispatchable -def union(G: DiGraph[_X], H: DiGraph[_Y], rename=()) -> DiGraph[_X | _Y]: ... +def union(G: DiGraph[_X_co], H: DiGraph[_Y_co], rename=()) -> DiGraph[_X_co | _Y_co]: ... diff --git a/stubs/pexpect/pexpect/socket_pexpect.pyi b/stubs/pexpect/pexpect/socket_pexpect.pyi index f3c8d42d0b56..56aa0fde21cb 100644 --- a/stubs/pexpect/pexpect/socket_pexpect.pyi +++ b/stubs/pexpect/pexpect/socket_pexpect.pyi @@ -1,5 +1,5 @@ from collections.abc import Iterable -from socket import socket as Socket +from socket import socket from typing import AnyStr from .spawnbase import SpawnBase, _Logfile @@ -9,14 +9,14 @@ __all__ = ["SocketSpawn"] class SocketSpawn(SpawnBase[AnyStr]): args: None command: None - socket: Socket + socket: socket child_fd: int closed: bool name: str use_poll: bool def __init__( self, - socket: Socket, + socket: socket, args: None = None, timeout: float | None = 30, maxread: int = 2000, diff --git a/stubs/protobuf/google/protobuf/internal/containers.pyi b/stubs/protobuf/google/protobuf/internal/containers.pyi index aaa970439216..0ce35d235676 100644 --- a/stubs/protobuf/google/protobuf/internal/containers.pyi +++ b/stubs/protobuf/google/protobuf/internal/containers.pyi @@ -1,5 +1,6 @@ from collections.abc import Callable, Iterable, Iterator, MutableMapping, Sequence from typing import Any, Protocol, SupportsIndex, TypeVar, overload +from typing_extensions import Self from google.protobuf.descriptor import Descriptor from google.protobuf.internal.message_listener import MessageListener @@ -33,7 +34,7 @@ class RepeatedScalarFieldContainer(BaseContainer[_ScalarV]): def append(self, value: _ScalarV) -> None: ... def insert(self, key: int, value: _ScalarV) -> None: ... def extend(self, elem_seq: Iterable[_ScalarV] | None) -> None: ... - def MergeFrom(self: _M, other: _M | Iterable[_ScalarV]) -> None: ... + def MergeFrom(self, other: Self | Iterable[_ScalarV]) -> None: ... def remove(self, elem: _ScalarV) -> None: ... def pop(self, key: int = -1) -> _ScalarV: ... @overload @@ -49,7 +50,7 @@ class RepeatedCompositeFieldContainer(BaseContainer[_MessageV]): def append(self, value: _MessageV) -> None: ... def insert(self, key: int, value: _MessageV) -> None: ... def extend(self, elem_seq: Iterable[_MessageV]) -> None: ... - def MergeFrom(self: _M, other: _M | Iterable[_MessageV]) -> None: ... + def MergeFrom(self, other: Self | Iterable[_MessageV]) -> None: ... def remove(self, elem: _MessageV) -> None: ... def pop(self, key: int = -1) -> _MessageV: ... def __delitem__(self, key: int | slice) -> None: ... @@ -73,7 +74,7 @@ class ScalarMap(MutableMapping[_K, _ScalarV]): def get(self, key: _K, default: None = None) -> _ScalarV | None: ... @overload def get(self, key: _K, default: _ScalarV | _T) -> _ScalarV | _T: ... - def MergeFrom(self: _M, other: _M): ... + def MergeFrom(self, other: Self): ... def InvalidateIterators(self) -> None: ... def GetEntryClass(self) -> GeneratedProtocolMessageType: ... @@ -96,6 +97,6 @@ class MessageMap(MutableMapping[_K, _MessageV]): @overload def get(self, key: _K, default: _MessageV | _T) -> _MessageV | _T: ... def get_or_create(self, key: _K) -> _MessageV: ... - def MergeFrom(self: _M, other: _M): ... + def MergeFrom(self, other: Self): ... def InvalidateIterators(self) -> None: ... def GetEntryClass(self) -> GeneratedProtocolMessageType: ... diff --git a/stubs/protobuf/google/protobuf/message.pyi b/stubs/protobuf/google/protobuf/message.pyi index 819ad7aad5d2..f4ab9b3accaf 100644 --- a/stubs/protobuf/google/protobuf/message.pyi +++ b/stubs/protobuf/google/protobuf/message.pyi @@ -27,11 +27,11 @@ class Message: def SerializePartialToString(self, *, deterministic: bool = ...) -> bytes: ... def ListFields(self) -> Sequence[tuple[FieldDescriptor, Any]]: ... # The TypeVar must be bound to `Message` or we get mypy errors, so we cannot use `Self` for `HasExtension` & `ClearExtension` - def HasExtension(self: _M, field_descriptor: _ExtensionFieldDescriptor[_M, Any]) -> bool: ... - def ClearExtension(self: _M, field_descriptor: _ExtensionFieldDescriptor[_M, Any]) -> None: ... + def HasExtension(self, field_descriptor: _ExtensionFieldDescriptor[Self, Any]) -> bool: ... + def ClearExtension(self, field_descriptor: _ExtensionFieldDescriptor[Self, Any]) -> None: ... # The TypeVar must be bound to `Message` or we get mypy errors, so we cannot use `Self` for `Extensions` @property - def Extensions(self: _M) -> _ExtensionDict[_M]: ... + def Extensions(self) -> _ExtensionDict[Self]: ... def ByteSize(self) -> int: ... @classmethod def FromString(cls, s: bytes) -> Self: ... diff --git a/stubs/pyserial/serial/tools/miniterm.pyi b/stubs/pyserial/serial/tools/miniterm.pyi index b53d8d074a61..3fc0bff77aa9 100644 --- a/stubs/pyserial/serial/tools/miniterm.pyi +++ b/stubs/pyserial/serial/tools/miniterm.pyi @@ -8,10 +8,10 @@ from typing_extensions import Self from serial import Serial -_AnyStr_T = TypeVar("_AnyStr_T", contravariant=True) +_AnyStr_T_contra = TypeVar("_AnyStr_T_contra", contravariant=True) @type_check_only -class _SupportsWriteAndFlush(SupportsWrite[_AnyStr_T], SupportsFlush, Protocol): ... +class _SupportsWriteAndFlush(SupportsWrite[_AnyStr_T_contra], SupportsFlush, Protocol): ... @type_check_only class _SupportsRead(Protocol): diff --git a/stubs/python-xlib/Xlib/protocol/rq.pyi b/stubs/python-xlib/Xlib/protocol/rq.pyi index b429499d38a0..2fa6b4edf021 100644 --- a/stubs/python-xlib/Xlib/protocol/rq.pyi +++ b/stubs/python-xlib/Xlib/protocol/rq.pyi @@ -1,8 +1,6 @@ from _typeshed import ConvertibleToInt, SliceableBuffer, Unused from array import array - -# Avoid name collision with List.type -from builtins import type as Type +from builtins import type as Type # Avoid name collision with List.type from collections.abc import Callable, Iterable, Sequence from typing import Any, Final, Literal, SupportsIndex, TypeVar, overload, type_check_only from typing_extensions import LiteralString, TypeAlias diff --git a/stubs/pyxdg/xdg/Menu.pyi b/stubs/pyxdg/xdg/Menu.pyi index 77738a4615a7..256334668478 100644 --- a/stubs/pyxdg/xdg/Menu.pyi +++ b/stubs/pyxdg/xdg/Menu.pyi @@ -94,7 +94,7 @@ class MenuEntry: TYPE_SYSTEM: Literal["System"] TYPE_BOTH: Literal["Both"] DesktopEntry: DesktopEntry - Show: Literal[True, False, "Deleted", "NoDisplay", "Hidden", "Empty", "NotShowIn", "NoExec"] + Show: Literal["Deleted", "NoDisplay", "Hidden", "Empty", "NotShowIn", "NoExec"] | bool Visible: Literal[1, 0, "Deleted", "NoDisplay", "Hidden", "Empty", "NotShowIn", "NoExec"] Original: MenuEntry | None Parents: list[Menu] diff --git a/stubs/regex/regex/regex.pyi b/stubs/regex/regex/regex.pyi index 8b35a370ad8c..642bdd8468e7 100644 --- a/stubs/regex/regex/regex.pyi +++ b/stubs/regex/regex/regex.pyi @@ -1,5 +1,5 @@ import sys -from _typeshed import ReadableBuffer +from _typeshed import ReadableBuffer, Unused from collections.abc import Callable, Mapping from typing import Any, AnyStr, Generic, Literal, TypeVar, final, overload from typing_extensions import Self @@ -568,7 +568,7 @@ class Pattern(Generic[AnyStr]): timeout: float | None = None, ) -> _regex.Scanner[bytes]: ... def __copy__(self) -> Self: ... - def __deepcopy__(self) -> Self: ... + def __deepcopy__(self, memo: Unused) -> Self: ... if sys.version_info >= (3, 9): def __class_getitem__(cls, item: Any, /) -> GenericAlias: ... @@ -647,6 +647,6 @@ class Match(Generic[AnyStr]): @overload def __getitem__(self, key: int | str, /) -> AnyStr | Any: ... def __copy__(self) -> Self: ... - def __deepcopy__(self) -> Self: ... + def __deepcopy__(self, memo: Unused) -> Self: ... if sys.version_info >= (3, 9): def __class_getitem__(cls, item: Any, /) -> GenericAlias: ... diff --git a/stubs/tensorflow/tensorflow/data/__init__.pyi b/stubs/tensorflow/tensorflow/data/__init__.pyi index 0ff8a7ea6a91..305043f7248f 100644 --- a/stubs/tensorflow/tensorflow/data/__init__.pyi +++ b/stubs/tensorflow/tensorflow/data/__init__.pyi @@ -14,21 +14,21 @@ from tensorflow.dtypes import DType from tensorflow.io import _CompressionTypes from tensorflow.python.trackable.base import Trackable -_T1 = TypeVar("_T1", covariant=True) +_T1_co = TypeVar("_T1_co", covariant=True) _T2 = TypeVar("_T2") _T3 = TypeVar("_T3") -class Iterator(_Iterator[_T1], Trackable, ABC): +class Iterator(_Iterator[_T1_co], Trackable, ABC): @property @abstractmethod def element_spec(self) -> ContainerGeneric[TypeSpec[Any]]: ... @abstractmethod - def get_next(self) -> _T1: ... + def get_next(self) -> _T1_co: ... @abstractmethod - def get_next_as_optional(self) -> tf.experimental.Optional[_T1]: ... + def get_next_as_optional(self) -> tf.experimental.Optional[_T1_co]: ... -class Dataset(ABC, Generic[_T1]): - def apply(self, transformation_func: Callable[[Dataset[_T1]], Dataset[_T2]]) -> Dataset[_T2]: ... +class Dataset(ABC, Generic[_T1_co]): + def apply(self, transformation_func: Callable[[Dataset[_T1_co]], Dataset[_T2]]) -> Dataset[_T2]: ... def as_numpy_iterator(self) -> Iterator[np.ndarray[Any, Any]]: ... def batch( self, @@ -37,10 +37,10 @@ class Dataset(ABC, Generic[_T1]): num_parallel_calls: int | None = None, deterministic: bool | None = None, name: str | None = None, - ) -> Dataset[_T1]: ... + ) -> Dataset[_T1_co]: ... def bucket_by_sequence_length( self, - element_length_func: Callable[[_T1], ScalarTensorCompatible], + element_length_func: Callable[[_T1_co], ScalarTensorCompatible], bucket_boundaries: Sequence[int], bucket_batch_sizes: Sequence[int], padded_shapes: ContainerGeneric[tf.TensorShape | TensorCompatible] | None = None, @@ -49,14 +49,14 @@ class Dataset(ABC, Generic[_T1]): no_padding: bool = False, drop_remainder: bool = False, name: str | None = None, - ) -> Dataset[_T1]: ... - def cache(self, filename: str = "", name: str | None = None) -> Dataset[_T1]: ... + ) -> Dataset[_T1_co]: ... + def cache(self, filename: str = "", name: str | None = None) -> Dataset[_T1_co]: ... def cardinality(self) -> int: ... @staticmethod def choose_from_datasets( datasets: Sequence[Dataset[_T2]], choice_dataset: Dataset[tf.Tensor], stop_on_empty_dataset: bool = True ) -> Dataset[_T2]: ... - def concatenate(self, dataset: Dataset[_T1], name: str | None = None) -> Dataset[_T1]: ... + def concatenate(self, dataset: Dataset[_T1_co], name: str | None = None) -> Dataset[_T1_co]: ... @staticmethod def counter( start: ScalarTensorCompatible = 0, step: ScalarTensorCompatible = 1, dtype: DType = ..., name: str | None = None @@ -64,9 +64,9 @@ class Dataset(ABC, Generic[_T1]): @property @abstractmethod def element_spec(self) -> ContainerGeneric[TypeSpec[Any]]: ... - def enumerate(self, start: ScalarTensorCompatible = 0, name: str | None = None) -> Dataset[tuple[int, _T1]]: ... - def filter(self, predicate: Callable[[_T1], bool | tf.Tensor], name: str | None = None) -> Dataset[_T1]: ... - def flat_map(self, map_func: Callable[[_T1], Dataset[_T2]], name: str | None = None) -> Dataset[_T2]: ... + def enumerate(self, start: ScalarTensorCompatible = 0, name: str | None = None) -> Dataset[tuple[int, _T1_co]]: ... + def filter(self, predicate: Callable[[_T1_co], bool | tf.Tensor], name: str | None = None) -> Dataset[_T1_co]: ... + def flat_map(self, map_func: Callable[[_T1_co], Dataset[_T2]], name: str | None = None) -> Dataset[_T2]: ... # PEP 646 can be used here for a more precise type when better supported. @staticmethod def from_generator( @@ -81,26 +81,26 @@ class Dataset(ABC, Generic[_T1]): def from_tensors(tensors: Any, name: str | None = None) -> Dataset[Any]: ... @staticmethod def from_tensor_slices(tensors: TensorCompatible, name: str | None = None) -> Dataset[Any]: ... - def get_single_element(self, name: str | None = None) -> _T1: ... + def get_single_element(self, name: str | None = None) -> _T1_co: ... def group_by_window( self, - key_func: Callable[[_T1], tf.Tensor], - reduce_func: Callable[[tf.Tensor, Dataset[_T1]], Dataset[_T2]], + key_func: Callable[[_T1_co], tf.Tensor], + reduce_func: Callable[[tf.Tensor, Dataset[_T1_co]], Dataset[_T2]], window_size: ScalarTensorCompatible | None = None, window_size_func: Callable[[tf.Tensor], tf.Tensor] | None = None, name: str | None = None, ) -> Dataset[_T2]: ... - def ignore_errors(self, log_warning: bool = False, name: str | None = None) -> Dataset[_T1]: ... + def ignore_errors(self, log_warning: bool = False, name: str | None = None) -> Dataset[_T1_co]: ... def interleave( self, - map_func: Callable[[_T1], Dataset[_T2]], + map_func: Callable[[_T1_co], Dataset[_T2]], cycle_length: int | None = None, block_length: int | None = None, num_parallel_calls: int | None = None, deterministic: bool | None = None, name: str | None = None, ) -> Dataset[_T2]: ... - def __iter__(self) -> Iterator[_T1]: ... + def __iter__(self) -> Iterator[_T1_co]: ... @staticmethod def list_files( file_pattern: str | Sequence[str] | TensorCompatible, @@ -134,8 +134,8 @@ class Dataset(ABC, Generic[_T1]): padding_values: ContainerGeneric[ScalarTensorCompatible] | None = None, drop_remainder: bool = False, name: str | None = None, - ) -> Dataset[_T1]: ... - def prefetch(self, buffer_size: ScalarTensorCompatible, name: str | None = None) -> Dataset[_T1]: ... + ) -> Dataset[_T1_co]: ... + def prefetch(self, buffer_size: ScalarTensorCompatible, name: str | None = None) -> Dataset[_T1_co]: ... def ragged_batch( self, batch_size: ScalarTensorCompatible, @@ -162,62 +162,62 @@ class Dataset(ABC, Generic[_T1]): ) -> Dataset[tf.Tensor]: ... def rebatch( self, batch_size: ScalarTensorCompatible, drop_remainder: bool = False, name: str | None = None - ) -> Dataset[_T1]: ... - def reduce(self, initial_state: _T2, reduce_func: Callable[[_T2, _T1], _T2], name: str | None = None) -> _T2: ... + ) -> Dataset[_T1_co]: ... + def reduce(self, initial_state: _T2, reduce_func: Callable[[_T2, _T1_co], _T2], name: str | None = None) -> _T2: ... def rejection_resample( self, - class_func: Callable[[_T1], ScalarTensorCompatible], + class_func: Callable[[_T1_co], ScalarTensorCompatible], target_dist: TensorCompatible, initial_dist: TensorCompatible | None = None, seed: int | None = None, name: str | None = None, - ) -> Dataset[_T1]: ... - def repeat(self, count: ScalarTensorCompatible | None = None, name: str | None = None) -> Dataset[_T1]: ... + ) -> Dataset[_T1_co]: ... + def repeat(self, count: ScalarTensorCompatible | None = None, name: str | None = None) -> Dataset[_T1_co]: ... @staticmethod def sample_from_datasets( - datasets: Sequence[Dataset[_T1]], + datasets: Sequence[Dataset[_T1_co]], weights: TensorCompatible | None = None, seed: int | None = None, stop_on_empty_dataset: bool = False, rerandomize_each_iteration: bool | None = None, - ) -> Dataset[_T1]: ... + ) -> Dataset[_T1_co]: ... # Incomplete as tf.train.CheckpointOptions not yet covered. def save( self, path: str, compression: _CompressionTypes = None, - shard_func: Callable[[_T1], int] | None = None, + shard_func: Callable[[_T1_co], int] | None = None, checkpoint_args: Incomplete | None = None, ) -> None: ... def scan( - self, initial_state: _T2, scan_func: Callable[[_T2, _T1], tuple[_T2, _T3]], name: str | None = None + self, initial_state: _T2, scan_func: Callable[[_T2, _T1_co], tuple[_T2, _T3]], name: str | None = None ) -> Dataset[_T3]: ... def shard( self, num_shards: ScalarTensorCompatible, index: ScalarTensorCompatible, name: str | None = None - ) -> Dataset[_T1]: ... + ) -> Dataset[_T1_co]: ... def shuffle( self, buffer_size: ScalarTensorCompatible, seed: int | None = None, reshuffle_each_iteration: bool = True, name: str | None = None, - ) -> Dataset[_T1]: ... - def skip(self, count: ScalarTensorCompatible, name: str | None = None) -> Dataset[_T1]: ... + ) -> Dataset[_T1_co]: ... + def skip(self, count: ScalarTensorCompatible, name: str | None = None) -> Dataset[_T1_co]: ... def snapshot( self, path: str, compression: _CompressionTypes = "AUTO", - reader_func: Callable[[Dataset[Dataset[_T1]]], Dataset[_T1]] | None = None, - shard_func: Callable[[_T1], ScalarTensorCompatible] | None = None, + reader_func: Callable[[Dataset[Dataset[_T1_co]]], Dataset[_T1_co]] | None = None, + shard_func: Callable[[_T1_co], ScalarTensorCompatible] | None = None, name: str | None = None, - ) -> Dataset[_T1]: ... + ) -> Dataset[_T1_co]: ... def sparse_batch( self, batch_size: ScalarTensorCompatible, row_shape: tf.TensorShape | TensorCompatible, name: str | None = None ) -> Dataset[tf.SparseTensor]: ... - def take(self, count: ScalarTensorCompatible, name: str | None = None) -> Dataset[_T1]: ... - def take_while(self, predicate: Callable[[_T1], ScalarTensorCompatible], name: str | None = None) -> Dataset[_T1]: ... - def unbatch(self, name: str | None = None) -> Dataset[_T1]: ... - def unique(self, name: str | None = None) -> Dataset[_T1]: ... + def take(self, count: ScalarTensorCompatible, name: str | None = None) -> Dataset[_T1_co]: ... + def take_while(self, predicate: Callable[[_T1_co], ScalarTensorCompatible], name: str | None = None) -> Dataset[_T1_co]: ... + def unbatch(self, name: str | None = None) -> Dataset[_T1_co]: ... + def unique(self, name: str | None = None) -> Dataset[_T1_co]: ... def window( self, size: ScalarTensorCompatible, @@ -225,8 +225,8 @@ class Dataset(ABC, Generic[_T1]): stride: ScalarTensorCompatible = 1, drop_remainder: bool = False, name: str | None = None, - ) -> Dataset[Dataset[_T1]]: ... - def with_options(self, options: Options, name: str | None = None) -> Dataset[_T1]: ... + ) -> Dataset[Dataset[_T1_co]]: ... + def with_options(self, options: Options, name: str | None = None) -> Dataset[_T1_co]: ... @overload @staticmethod def zip( diff --git a/stubs/tensorflow/tensorflow/distribute/experimental/coordinator.pyi b/stubs/tensorflow/tensorflow/distribute/experimental/coordinator.pyi index cc59692e72ec..92d1f6bcb9ed 100644 --- a/stubs/tensorflow/tensorflow/distribute/experimental/coordinator.pyi +++ b/stubs/tensorflow/tensorflow/distribute/experimental/coordinator.pyi @@ -3,10 +3,10 @@ from typing import Generic, TypeVar from tensorflow._aliases import AnyArray -_Value = TypeVar("_Value", covariant=True) +_Value_co = TypeVar("_Value_co", covariant=True) -class RemoteValue(Generic[_Value]): +class RemoteValue(Generic[_Value_co]): def fetch(self) -> AnyArray: ... - def get(self) -> _Value: ... + def get(self) -> _Value_co: ... def __getattr__(name: str) -> Incomplete: ... diff --git a/stubs/tensorflow/tensorflow/keras/layers/__init__.pyi b/stubs/tensorflow/tensorflow/keras/layers/__init__.pyi index a2a2211b56cf..21e4606f6680 100644 --- a/stubs/tensorflow/tensorflow/keras/layers/__init__.pyi +++ b/stubs/tensorflow/tensorflow/keras/layers/__init__.pyi @@ -11,8 +11,8 @@ from tensorflow.keras.constraints import Constraint from tensorflow.keras.initializers import _Initializer from tensorflow.keras.regularizers import Regularizer, _Regularizer -_InputT = TypeVar("_InputT", contravariant=True) -_OutputT = TypeVar("_OutputT", covariant=True) +_InputT_contra = TypeVar("_InputT_contra", contravariant=True) +_OutputT_co = TypeVar("_OutputT_co", covariant=True) class InputSpec: dtype: str | None @@ -39,9 +39,9 @@ class InputSpec: # Most layers have input and output type of just Tensor and when we support default type variables, # maybe worth trying. -class Layer(tf.Module, Generic[_InputT, _OutputT]): +class Layer(tf.Module, Generic[_InputT_contra, _OutputT_co]): # The most general type is ContainerGeneric[InputSpec] as it really - # depends on _InputT. For most Layers it is just InputSpec + # depends on _InputT_contra. For most Layers it is just InputSpec # though. Maybe describable with HKT? input_spec: InputSpec | Any @@ -65,11 +65,13 @@ class Layer(tf.Module, Generic[_InputT, _OutputT]): # *args/**kwargs are allowed, but have obscure footguns and tensorflow documentation discourages their usage. # First argument will automatically be cast to layer's compute dtype, but any other tensor arguments will not be. # Also various tensorflow tools/apis can misbehave if they encounter a layer with *args/**kwargs. - def __call__(self, inputs: _InputT, *, training: bool = False, mask: TensorCompatible | None = None) -> _OutputT: ... - def call(self, inputs: _InputT, /) -> _OutputT: ... + def __call__( + self, inputs: _InputT_contra, *, training: bool = False, mask: TensorCompatible | None = None + ) -> _OutputT_co: ... + def call(self, inputs: _InputT_contra, /) -> _OutputT_co: ... - # input_shape's real type depends on _InputT, but we can't express that without HKT. - # For example _InputT tf.Tensor -> tf.TensorShape, _InputT dict[str, tf.Tensor] -> dict[str, tf.TensorShape]. + # input_shape's real type depends on _InputT_contra, but we can't express that without HKT. + # For example _InputT_contra tf.Tensor -> tf.TensorShape, _InputT_contra dict[str, tf.Tensor] -> dict[str, tf.TensorShape]. def build(self, input_shape: Any, /) -> None: ... @overload def compute_output_shape(self: Layer[tf.Tensor, tf.Tensor], input_shape: tf.TensorShape, /) -> tf.TensorShape: ... diff --git a/stubs/tensorflow/tensorflow/keras/models.pyi b/stubs/tensorflow/tensorflow/keras/models.pyi index e024ed5c26fb..262bd49c897f 100644 --- a/stubs/tensorflow/tensorflow/keras/models.pyi +++ b/stubs/tensorflow/tensorflow/keras/models.pyi @@ -9,14 +9,14 @@ import numpy.typing as npt import tensorflow as tf from tensorflow import Variable from tensorflow._aliases import ContainerGeneric, ShapeLike, TensorCompatible -from tensorflow.keras.layers import Layer, _InputT, _OutputT +from tensorflow.keras.layers import Layer, _InputT_contra, _OutputT_co from tensorflow.keras.optimizers import Optimizer _Loss: TypeAlias = str | tf.keras.losses.Loss | Callable[[TensorCompatible, TensorCompatible], tf.Tensor] _Metric: TypeAlias = str | tf.keras.metrics.Metric | Callable[[TensorCompatible, TensorCompatible], tf.Tensor] | None # Missing keras.src.backend.tensorflow.trainer.TensorFlowTrainer as a base class, which is not exposed by tensorflow -class Model(Layer[_InputT, _OutputT]): +class Model(Layer[_InputT_contra, _OutputT_co]): _train_counter: tf.Variable _test_counter: tf.Variable optimizer: Optimizer | None @@ -27,13 +27,15 @@ class Model(Layer[_InputT, _OutputT]): ) -> tf.Tensor | None: ... stop_training: bool - def __new__(cls, *args: Any, **kwargs: Any) -> Model[_InputT, _OutputT]: ... + def __new__(cls, *args: Any, **kwargs: Any) -> Model[_InputT_contra, _OutputT_co]: ... def __init__(self, *args: Any, **kwargs: Any) -> None: ... def __setattr__(self, name: str, value: Any) -> None: ... def __reduce__(self): ... def build(self, input_shape: ShapeLike) -> None: ... - def __call__(self, inputs: _InputT, *, training: bool = False, mask: TensorCompatible | None = None) -> _OutputT: ... - def call(self, inputs: _InputT, training: bool | None = None, mask: TensorCompatible | None = None) -> _OutputT: ... + def __call__( + self, inputs: _InputT_contra, *, training: bool = False, mask: TensorCompatible | None = None + ) -> _OutputT_co: ... + def call(self, inputs: _InputT_contra, training: bool | None = None, mask: TensorCompatible | None = None) -> _OutputT_co: ... # Ideally loss/metrics/output would share the same structure but higher kinded types are not supported. def compile( self, @@ -106,8 +108,8 @@ class Model(Layer[_InputT, _OutputT]): return_dict: bool = False, **kwargs: Any, ) -> float | list[float]: ... - def predict_step(self, data: _InputT) -> _OutputT: ... - def make_predict_function(self, force: bool = False) -> Callable[[tf.data.Iterator[Incomplete]], _OutputT]: ... + def predict_step(self, data: _InputT_contra) -> _OutputT_co: ... + def make_predict_function(self, force: bool = False) -> Callable[[tf.data.Iterator[Incomplete]], _OutputT_co]: ... def predict( self, x: TensorCompatible | tf.data.Dataset[Incomplete], @@ -115,7 +117,7 @@ class Model(Layer[_InputT, _OutputT]): verbose: Literal["auto", 0, 1, 2] = "auto", steps: int | None = None, callbacks: list[tf.keras.callbacks.Callback] | None = None, - ) -> _OutputT: ... + ) -> _OutputT_co: ... def reset_metrics(self) -> None: ... def train_on_batch( self, @@ -132,7 +134,7 @@ class Model(Layer[_InputT, _OutputT]): sample_weight: npt.NDArray[np.float64] | None = None, return_dict: bool = False, ) -> float | list[float]: ... - def predict_on_batch(self, x: Iterator[_InputT]) -> npt.NDArray[Incomplete]: ... + def predict_on_batch(self, x: Iterator[_InputT_contra]) -> npt.NDArray[Incomplete]: ... @property def trainable_weights(self) -> list[Variable]: ... @property diff --git a/stubs/tensorflow/tensorflow/saved_model/__init__.pyi b/stubs/tensorflow/tensorflow/saved_model/__init__.pyi index 538e8acabdbe..203d144c751f 100644 --- a/stubs/tensorflow/tensorflow/saved_model/__init__.pyi +++ b/stubs/tensorflow/tensorflow/saved_model/__init__.pyi @@ -10,7 +10,7 @@ from tensorflow.saved_model.experimental import VariablePolicy from tensorflow.types.experimental import ConcreteFunction, PolymorphicFunction _P = ParamSpec("_P") -_R = TypeVar("_R", covariant=True) +_R_co = TypeVar("_R_co", covariant=True) class Asset: @property @@ -77,10 +77,10 @@ class SaveOptions: def contains_saved_model(export_dir: str | Path) -> bool: ... -class _LoadedAttributes(Generic[_P, _R]): - signatures: Mapping[str, ConcreteFunction[_P, _R]] +class _LoadedAttributes(Generic[_P, _R_co]): + signatures: Mapping[str, ConcreteFunction[_P, _R_co]] -class _LoadedModel(AutoTrackable, _LoadedAttributes[_P, _R]): +class _LoadedModel(AutoTrackable, _LoadedAttributes[_P, _R_co]): variables: list[tf.Variable] trainable_variables: list[tf.Variable] # TF1 model artifact specific diff --git a/stubs/tensorflow/tensorflow/types/experimental.pyi b/stubs/tensorflow/tensorflow/types/experimental.pyi index 15d2ee854093..5d4a792cb2e3 100644 --- a/stubs/tensorflow/tensorflow/types/experimental.pyi +++ b/stubs/tensorflow/tensorflow/types/experimental.pyi @@ -7,23 +7,23 @@ import tensorflow as tf from tensorflow._aliases import ContainerGeneric _P = ParamSpec("_P") -_R = TypeVar("_R", covariant=True) +_R_co = TypeVar("_R_co", covariant=True) -class Callable(Generic[_P, _R], metaclass=abc.ABCMeta): - def __call__(self, *args: _P.args, **kwargs: _P.kwargs) -> _R: ... +class Callable(Generic[_P, _R_co], metaclass=abc.ABCMeta): + def __call__(self, *args: _P.args, **kwargs: _P.kwargs) -> _R_co: ... -class ConcreteFunction(Callable[_P, _R], metaclass=abc.ABCMeta): - def __call__(self, *args: _P.args, **kwargs: _P.kwargs) -> _R: ... +class ConcreteFunction(Callable[_P, _R_co], metaclass=abc.ABCMeta): + def __call__(self, *args: _P.args, **kwargs: _P.kwargs) -> _R_co: ... -class PolymorphicFunction(Callable[_P, _R], metaclass=abc.ABCMeta): +class PolymorphicFunction(Callable[_P, _R_co], metaclass=abc.ABCMeta): @overload @abc.abstractmethod - def get_concrete_function(self, *args: _P.args, **kwargs: _P.kwargs) -> ConcreteFunction[_P, _R]: ... + def get_concrete_function(self, *args: _P.args, **kwargs: _P.kwargs) -> ConcreteFunction[_P, _R_co]: ... @overload @abc.abstractmethod def get_concrete_function( self, *args: ContainerGeneric[tf.TypeSpec[Any]], **kwargs: ContainerGeneric[tf.TypeSpec[Any]] - ) -> ConcreteFunction[_P, _R]: ... + ) -> ConcreteFunction[_P, _R_co]: ... def experimental_get_compiler_ir(self, *args, **kwargs): ... GenericFunction = PolymorphicFunction diff --git a/tests/check_typeshed_structure.py b/tests/check_typeshed_structure.py index 81adb8c74269..0be9f1a1011e 100755 --- a/tests/check_typeshed_structure.py +++ b/tests/check_typeshed_structure.py @@ -1,7 +1,6 @@ #!/usr/bin/env python3 -""" -Check that the typeshed repository contains the correct files in the +"""Check that the typeshed repository contains the correct files in the correct places, and that various configuration files are correct. """ diff --git a/tests/mypy_test.py b/tests/mypy_test.py index 15cd44bb8b8a..7a44f2527709 100755 --- a/tests/mypy_test.py +++ b/tests/mypy_test.py @@ -122,7 +122,7 @@ class TestConfig: def log(args: TestConfig, *varargs: object) -> None: - if args.verbose >= 2: + if args.verbose >= 2: # noqa: PLR2004 # astral-sh/ruff#10009 print(colored(" ".join(map(str, varargs)), "blue")) @@ -214,7 +214,7 @@ def run_mypy( env_vars = dict(os.environ) if mypypath is not None: env_vars["MYPYPATH"] = mypypath - with tempfile.NamedTemporaryFile("w+") as temp: + with tempfile.NamedTemporaryFile("w+", encoding="utf-8") as temp: temp.write("[mypy]\n") for dist_conf in configurations: temp.write(f"[mypy-{dist_conf.module_name}]\n") @@ -256,7 +256,7 @@ def run_mypy( mypy_command = [python_path, "-m", "mypy", *mypy_args] if args.verbose: print(colored(f"running {' '.join(mypy_command)}", "blue")) - result = subprocess.run(mypy_command, capture_output=True, text=True, env=env_vars) + result = subprocess.run(mypy_command, capture_output=True, text=True, env=env_vars, check=False) if result.returncode: print_error(f"failure (exit code {result.returncode})\n") if result.stdout: @@ -265,16 +265,15 @@ def run_mypy( print_error(result.stderr) if non_types_dependencies and args.verbose: print("Ran with the following environment:") - subprocess.run(["uv", "pip", "freeze"], env={**os.environ, "VIRTUAL_ENV": str(venv_dir)}) + subprocess.run(["uv", "pip", "freeze"], env={**os.environ, "VIRTUAL_ENV": str(venv_dir)}, check=False) print() else: print_success_msg() if result.returncode == 0: return MypyResult.SUCCESS - elif result.returncode == 1: + if result.returncode == 1: return MypyResult.FAILURE - else: - return MypyResult.CRASH + return MypyResult.CRASH def add_third_party_files( @@ -338,7 +337,7 @@ def test_third_party_distribution( def test_stdlib(args: TestConfig) -> TestResult: files: list[Path] = [] for file in STDLIB_PATH.iterdir(): - if file.name in ("VERSIONS", TESTS_DIR) or file.name.startswith("."): + if file.name in {"VERSIONS", TESTS_DIR} or file.name.startswith("."): continue add_files(files, file, args) @@ -413,7 +412,7 @@ def setup_venv_for_external_requirements_set( uv_command = ["uv", "venv", str(venv_dir)] if not args.verbose: uv_command.append("--quiet") - subprocess.run(uv_command, check=True) + subprocess.check_call(uv_command) return requirements_set, venv_dir @@ -427,7 +426,7 @@ def install_requirements_for_venv(venv_dir: Path, args: TestConfig, external_req else: uv_command.append("--quiet") try: - subprocess.run(uv_command, check=True, text=True, env={**os.environ, "VIRTUAL_ENV": str(venv_dir)}) + subprocess.check_call(uv_command, text=True, env={**os.environ, "VIRTUAL_ENV": str(venv_dir)}) except subprocess.CalledProcessError as e: print(e.stderr) raise @@ -587,15 +586,15 @@ def test_typeshed(args: TestConfig, tempdir: Path) -> TestSummary: def main() -> None: args = parser.parse_args(namespace=CommandLineArgs()) - versions = args.python_version or SUPPORTED_VERSIONS - platforms = args.platform or [sys.platform] - filter = args.filter or DIRECTORIES_TO_TEST - exclude = args.exclude or [] + args.python_version = args.python_version or SUPPORTED_VERSIONS + args.platform = args.platform or [sys.platform] + args.filter = args.filter or DIRECTORIES_TO_TEST + args.exclude = args.exclude or [] summary = TestSummary() with tempfile.TemporaryDirectory() as td: td_path = Path(td) - for version, platform in product(versions, platforms): - config = TestConfig(args.verbose, filter, exclude, version, platform) + for version, platform in product(args.python_version, args.platform): + config = TestConfig(args.verbose, args.filter, args.exclude, version, platform) version_summary = test_typeshed(args=config, tempdir=td_path) summary.merge(version_summary) diff --git a/tests/pyright_test.py b/tests/pyright_test.py index 46986a112b4d..21bdf404896f 100755 --- a/tests/pyright_test.py +++ b/tests/pyright_test.py @@ -24,7 +24,7 @@ def main() -> None: sys.exit(1) try: - subprocess.run([npx, "--version"]) + subprocess.check_call([npx, "--version"]) except OSError: print("error running npx; is Node.js installed?", file=sys.stderr) sys.exit(1) @@ -40,7 +40,7 @@ def main() -> None: command = [npx, f"pyright@{pyright_version}"] + sys.argv[1:] print_command(command) - ret = subprocess.run(command).returncode + ret = subprocess.run(command, check=False).returncode sys.exit(ret) diff --git a/tests/pytype_test.py b/tests/pytype_test.py index 7e3eeb7354bb..84b1f7750392 100755 --- a/tests/pytype_test.py +++ b/tests/pytype_test.py @@ -87,7 +87,7 @@ def run_pytype(*, filename: str, python_version: str, missing_modules: Iterable[ with pytype_config.verbosity_from(options): ast = loader.load_file(_get_module_name(filename), filename) loader.finish_and_verify_ast(ast) - except Exception: + except Exception: # noqa: BLE001 # We do want to catch any and all exceptions here stderr = traceback.format_exc() else: stderr = None @@ -172,7 +172,7 @@ def _get_pkgs_associated_with_requirement(req_name: str) -> list[str]: toplevel_txt_contents = dist.read_text("top_level.txt") if toplevel_txt_contents is None: if dist.files is None: - raise RuntimeError("Can't read find the packages associated with requirement {req_name!r}") + raise RuntimeError(f"Can't read find the packages associated with requirement {req_name!r}") maybe_modules = [f.parts[0] if len(f.parts) > 1 else inspect.getmodulename(f) for f in dist.files] packages = [name for name in maybe_modules if name is not None and "." not in name] else: @@ -208,14 +208,14 @@ def get_missing_modules(files_to_test: Sequence[str]) -> Iterable[str]: test_dir = os.path.dirname(__file__) exclude_list = os.path.join(test_dir, "pytype_exclude_list.txt") - with open(exclude_list) as f: + with open(exclude_list, encoding="utf-8") as f: excluded_files = f.readlines() for fi in excluded_files: if not fi.startswith("stubs/"): # Skips comments, empty lines, and stdlib files, which are in # the exclude list because pytype has its own version. continue - unused_stubs_prefix, unused_pkg, mod_path = fi.split("/", 2) # pyright: ignore[reportUnusedVariable] + _stubs_prefix, _pkg, mod_path = fi.split("/", 2) # pyright: ignore[reportUnusedVariable] missing_modules.add(os.path.splitext(mod_path)[0]) return missing_modules diff --git a/tests/regr_test.py b/tests/regr_test.py index 248708b90d64..319a75d2b84c 100755 --- a/tests/regr_test.py +++ b/tests/regr_test.py @@ -144,7 +144,7 @@ def setup_testcase_dir(package: DistributionTests, tempdir: Path, verbosity: Ver if requirements.external_pkgs: venv_location = str(tempdir / VENV_DIR) - subprocess.run(["uv", "venv", venv_location], check=True, capture_output=True) + subprocess.check_output(["uv", "venv", venv_location]) ext_requirements = [str(r) for r in requirements.external_pkgs] uv_command = ["uv", "pip", "install", get_mypy_req(), *ext_requirements] if sys.platform == "win32": @@ -224,7 +224,7 @@ def run_testcases( msg += f"{description}: MYPYPATH not set" msg += "\n" verbose_log(msg) - return subprocess.run(mypy_command, capture_output=True, text=True, env=env_vars) + return subprocess.run(mypy_command, capture_output=True, text=True, env=env_vars, check=False) @dataclass(frozen=True) diff --git a/tests/runtests.py b/tests/runtests.py index e0aad9e95bd2..e52fa5380a00 100755 --- a/tests/runtests.py +++ b/tests/runtests.py @@ -29,8 +29,7 @@ def _parse_jsonc(json_text: str) -> str: # strip comments from the file lines = [line for line in json_text.split("\n") if not line.strip().startswith("//")] # strip trailing commas from the file - valid_json = re.sub(r",(\s*?[\}\]])", r"\1", "\n".join(lines)) - return valid_json + return re.sub(r",(\s*?[\}\]])", r"\1", "\n".join(lines)) def _get_strict_params(stub_path: str) -> list[str]: @@ -65,7 +64,7 @@ def main() -> None: python_version: str = args.python_version path_tokens = Path(path).parts - if len(path_tokens) != 2: + if len(path_tokens) != 2: # noqa: PLR2004 # astral-sh/ruff#10009 parser.error("'path' argument should be in format /.") folder, stub = path_tokens if folder not in {"stdlib", "stubs"}: @@ -76,10 +75,10 @@ def main() -> None: pytype_result: subprocess.CompletedProcess[bytes] | None = None print("\nRunning pre-commit...") - pre_commit_result = subprocess.run(["pre-commit", "run", "--files", *Path(path).rglob("*")]) + pre_commit_result = subprocess.run(["pre-commit", "run", "--files", *Path(path).rglob("*")], check=False) print("\nRunning check_typeshed_structure.py...") - check_structure_result = subprocess.run([sys.executable, "tests/check_typeshed_structure.py"]) + check_structure_result = subprocess.run([sys.executable, "tests/check_typeshed_structure.py"], check=False) strict_params = _get_strict_params(path) print(f"\nRunning Pyright ({'stricter' if strict_params else 'base' } configs) for Python {python_version}...") @@ -87,6 +86,7 @@ def main() -> None: [sys.executable, "tests/pyright_test.py", path, "--pythonversion", python_version, *strict_params], stderr=subprocess.PIPE, text=True, + check=False, ) if re.match(_NPX_ERROR_PATTERN, pyright_result.stderr): print(_NPX_ERROR_MESSAGE) @@ -98,31 +98,30 @@ def main() -> None: pyright_skipped = False print(f"\nRunning mypy for Python {python_version}...") - mypy_result = subprocess.run([sys.executable, "tests/mypy_test.py", path, "--python-version", python_version]) + mypy_result = subprocess.run([sys.executable, "tests/mypy_test.py", path, "--python-version", python_version], check=False) # If mypy failed, stubtest will fail without any helpful error if mypy_result.returncode == 0: if folder == "stdlib": print("\nRunning stubtest...") - stubtest_result = subprocess.run([sys.executable, "tests/stubtest_stdlib.py", stub]) + stubtest_result = subprocess.run([sys.executable, "tests/stubtest_stdlib.py", stub], check=False) + elif run_stubtest: + print("\nRunning stubtest...") + stubtest_result = subprocess.run([sys.executable, "tests/stubtest_third_party.py", stub], check=False) else: - if run_stubtest: - print("\nRunning stubtest...") - stubtest_result = subprocess.run([sys.executable, "tests/stubtest_third_party.py", stub]) - else: - print( - colored( - f"\nSkipping stubtest for {stub!r}..." - + "\nNOTE: Running third-party stubtest involves downloading and executing arbitrary code from PyPI." - + f"\nOnly run stubtest if you trust the {stub!r} package.", - "yellow", - ) + print( + colored( + f"\nSkipping stubtest for {stub!r}..." + + "\nNOTE: Running third-party stubtest involves downloading and executing arbitrary code from PyPI." + + f"\nOnly run stubtest if you trust the {stub!r} package.", + "yellow", ) + ) else: print(colored("\nSkipping stubtest since mypy failed.", "yellow")) if find_spec("pytype"): print("\nRunning pytype...") - pytype_result = subprocess.run([sys.executable, "tests/pytype_test.py", path]) + pytype_result = subprocess.run([sys.executable, "tests/pytype_test.py", path], check=False) else: print(colored("\nSkipping pytype on Windows. You need to install it first: `pip install pytype`.", "yellow")) @@ -144,7 +143,7 @@ def main() -> None: "-p", _TESTCASES_CONFIG_FILE, ] - pyright_testcases_result = subprocess.run(command, stderr=subprocess.PIPE, text=True) + pyright_testcases_result = subprocess.run(command, stderr=subprocess.PIPE, text=True, check=False) if re.match(_NPX_ERROR_PATTERN, pyright_testcases_result.stderr): print(_NPX_ERROR_MESSAGE) pyright_testcases_returncode = 0 @@ -159,6 +158,7 @@ def main() -> None: [sys.executable, "tests/regr_test.py", "stdlib" if folder == "stdlib" else stub, "--python-version", python_version], stderr=subprocess.PIPE, text=True, + check=False, ) # No test means they all ran successfully (0 out of 0). Not all 3rd-party stubs have regression tests. if "No test cases found" in regr_test_result.stderr: diff --git a/tests/stubtest_stdlib.py b/tests/stubtest_stdlib.py index a2d2afa90ba2..1a0718ee8cb4 100755 --- a/tests/stubtest_stdlib.py +++ b/tests/stubtest_stdlib.py @@ -37,7 +37,7 @@ def run_stubtest(typeshed_dir: Path) -> int: cmd += ["--ignore-positional-only"] print(" ".join(cmd), file=sys.stderr) try: - subprocess.run(cmd, check=True) + subprocess.check_call(cmd) except subprocess.CalledProcessError as e: print( "\nNB: stubtest output depends on the Python version (and system) it is run with. " diff --git a/tests/stubtest_third_party.py b/tests/stubtest_third_party.py index cf6f213d9736..218abfe78832 100755 --- a/tests/stubtest_third_party.py +++ b/tests/stubtest_third_party.py @@ -125,13 +125,11 @@ def run_stubtest( stubtest_env = os.environ | {"MYPYPATH": mypypath, "MYPY_FORCE_COLOR": "1"} # Perform some black magic in order to run stubtest inside uWSGI - if dist_name == "uWSGI": - if not setup_uwsgi_stubtest_command(dist, venv_dir, stubtest_cmd): - return False + if dist_name == "uWSGI" and not setup_uwsgi_stubtest_command(dist, venv_dir, stubtest_cmd): + return False - if dist_name == "gdb": - if not setup_gdb_stubtest_command(venv_dir, stubtest_cmd): - return False + if dist_name == "gdb" and not setup_gdb_stubtest_command(venv_dir, stubtest_cmd): + return False try: subprocess.run(stubtest_cmd, env=stubtest_env, check=True, capture_output=True) @@ -148,11 +146,11 @@ def run_stubtest( print_divider() print("Python version: ", end="", flush=True) - ret = subprocess.run([sys.executable, "-VV"], capture_output=True) + ret = subprocess.run([sys.executable, "-VV"], capture_output=True, check=False) print_command_output(ret) print("\nRan with the following environment:") - ret = subprocess.run([pip_exe, "freeze", "--all"], capture_output=True) + ret = subprocess.run([pip_exe, "freeze", "--all"], capture_output=True, check=False) print_command_output(ret) if keep_tmp_dir: print("Path to virtual environment:", venv_dir, flush=True) @@ -164,7 +162,7 @@ def run_stubtest( print() else: print(f"Re-running stubtest with --generate-allowlist.\nAdd the following to {main_allowlist_path}:") - ret = subprocess.run([*stubtest_cmd, "--generate-allowlist"], env=stubtest_env, capture_output=True) + ret = subprocess.run([*stubtest_cmd, "--generate-allowlist"], env=stubtest_env, capture_output=True, check=False) print_command_output(ret) print_divider() @@ -189,8 +187,7 @@ def run_stubtest( def setup_gdb_stubtest_command(venv_dir: Path, stubtest_cmd: list[str]) -> bool: - """ - Use wrapper scripts to run stubtest inside gdb. + """Use wrapper scripts to run stubtest inside gdb. The wrapper script is used to pass the arguments to the gdb script. """ if sys.platform == "win32": @@ -390,10 +387,7 @@ def main() -> NoReturn: parser.add_argument("dists", metavar="DISTRIBUTION", type=str, nargs=argparse.ZERO_OR_MORE) args = parser.parse_args() - if len(args.dists) == 0: - dists = sorted(STUBS_PATH.iterdir()) - else: - dists = [STUBS_PATH / d for d in args.dists] + dists = sorted(STUBS_PATH.iterdir()) if len(args.dists) == 0 else [STUBS_PATH / d for d in args.dists] result = 0 for i, dist in enumerate(dists): diff --git a/tests/typecheck_typeshed.py b/tests/typecheck_typeshed.py index 2fa853ad7d13..a70ddf9b4a83 100755 --- a/tests/typecheck_typeshed.py +++ b/tests/typecheck_typeshed.py @@ -72,7 +72,7 @@ def run_mypy_as_subprocess(directory: str, platform: str, version: str) -> Retur "--custom-typeshed-dir", ".", ] - result = subprocess.run(command, capture_output=True, text=True) + result = subprocess.run(command, capture_output=True, text=True, check=False) if result.stderr: print_error(result.stderr) if result.stdout: