From 1a2baff6e42fe00719787fb5eb0b977ceb5737d2 Mon Sep 17 00:00:00 2001 From: Jason Allen Date: Thu, 5 Sep 2024 13:15:27 +0100 Subject: [PATCH 01/36] First commit for using the official gherkin parser (trying to maintain public API and current codebase as much as possible) --- poetry.lock | 15 +- pyproject.toml | 1 + src/pytest_bdd/parser.py | 441 +++++++--------------- src/pytest_bdd/scenario.py | 7 +- src/pytest_bdd/steps.py | 2 - src/pytest_bdd/types.py | 8 - tests/feature/test_background.py | 6 +- tests/feature/test_multiline.py | 96 +---- tests/feature/test_no_scenario.py | 2 +- tests/feature/test_outline.py | 6 +- tests/feature/test_scenario.py | 46 --- tests/feature/test_scenarios.py | 2 + tests/feature/test_steps.py | 104 +++-- tests/feature/test_tags.py | 63 ---- tests/feature/test_wrong.py | 2 +- tests/generation/test_generate_missing.py | 6 - tests/steps/test_common.py | 22 -- 17 files changed, 243 insertions(+), 586 deletions(-) diff --git a/poetry.lock b/poetry.lock index b72242e53..f082c5147 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry 1.8.2 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.8.3 and should not be changed by hand. [[package]] name = "cachetools" @@ -155,6 +155,17 @@ docs = ["furo (>=2023.9.10)", "sphinx (>=7.2.6)", "sphinx-autodoc-typehints (>=1 testing = ["covdefaults (>=2.3)", "coverage (>=7.3.2)", "diff-cover (>=8)", "pytest (>=7.4.3)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)", "pytest-timeout (>=2.2)"] typing = ["typing-extensions (>=4.8)"] +[[package]] +name = "gherkin-official" +version = "29.0.0" +description = "Gherkin parser (official, by Cucumber team)" +optional = false +python-versions = "*" +files = [ + {file = "gherkin_official-29.0.0-py3-none-any.whl", hash = "sha256:26967b0d537a302119066742669e0e8b663e632769330be675457ae993e1d1bc"}, + {file = "gherkin_official-29.0.0.tar.gz", hash = "sha256:dbea32561158f02280d7579d179b019160d072ce083197625e2f80a6776bb9eb"}, +] + [[package]] name = "iniconfig" version = "2.0.0" @@ -554,4 +565,4 @@ test = ["covdefaults (>=2.3)", "coverage (>=7.2.7)", "coverage-enable-subprocess [metadata] lock-version = "2.0" python-versions = ">=3.8" -content-hash = "b40d47067f444deec4964404014795593f1b602f8a2f6376279bb5a27d5e18be" +content-hash = "6b52d5b35db2892ae49a2d655a8f19fb430b59b3f8c4dc6881526f0729424580" diff --git a/pyproject.toml b/pyproject.toml index f8464ec77..7d874a5a0 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -42,6 +42,7 @@ parse-type = "*" pytest = ">=6.2.0" typing-extensions = "*" packaging = "*" +gherkin-official = "^29.0.0" [tool.poetry.group.dev.dependencies] tox = ">=4.11.3" diff --git a/src/pytest_bdd/parser.py b/src/pytest_bdd/parser.py index 533bb4ff1..18a2928d8 100644 --- a/src/pytest_bdd/parser.py +++ b/src/pytest_bdd/parser.py @@ -1,205 +1,47 @@ from __future__ import annotations +import linecache import os.path import re import textwrap -import typing from collections import OrderedDict from dataclasses import dataclass, field -from functools import cached_property -from typing import cast +from typing import Any, Dict, Iterable, List, Mapping, Optional, Sequence, Tuple -from . import exceptions, types +from gherkin.errors import CompositeParserException +from gherkin.parser import Parser +from gherkin.token_scanner import TokenScanner + +from .exceptions import FeatureError +from .types import GIVEN, THEN, WHEN -SPLIT_LINE_RE = re.compile(r"(?") COMMENT_RE = re.compile(r"(^|(?<=\s))#") -STEP_PREFIXES = [ - ("Feature: ", types.FEATURE), - ("Scenario Outline: ", types.SCENARIO_OUTLINE), - ("Examples:", types.EXAMPLES), - ("Scenario: ", types.SCENARIO), - ("Background:", types.BACKGROUND), - ("Given ", types.GIVEN), - ("When ", types.WHEN), - ("Then ", types.THEN), - ("@", types.TAG), - # Continuation of the previously mentioned step type - ("And ", None), - ("But ", None), -] - -TYPES_WITH_DESCRIPTIONS = [types.FEATURE, types.SCENARIO, types.SCENARIO_OUTLINE] - -if typing.TYPE_CHECKING: - from typing import Any, Iterable, Mapping, Match, Sequence - - -def split_line(line: str) -> list[str]: - """Split the given Examples line. - - :param str|unicode line: Feature file Examples line. - - :return: List of strings. - """ - return [cell.replace("\\|", "|").strip() for cell in SPLIT_LINE_RE.split(line)[1:-1]] - - -def parse_line(line: str) -> tuple[str, str]: - """Parse step line to get the step prefix (Scenario, Given, When, Then or And) and the actual step name. - - :param line: Line of the Feature file. - - :return: `tuple` in form ("", ""). - """ - for prefix, _ in STEP_PREFIXES: - if line.startswith(prefix): - return prefix.strip(), line[len(prefix) :].strip() - return "", line def strip_comments(line: str) -> str: - """Remove comments. - - :param str line: Line of the Feature file. - - :return: Stripped line. - """ + """Remove comments from a line of text.""" if res := COMMENT_RE.search(line): line = line[: res.start()] return line.strip() -def get_step_type(line: str) -> str | None: - """Detect step type by the beginning of the line. - - :param str line: Line of the Feature file. - - :return: SCENARIO, GIVEN, WHEN, THEN, or `None` if can't be detected. - """ - for prefix, _type in STEP_PREFIXES: - if line.startswith(prefix): - return _type - return None - - def parse_feature(basedir: str, filename: str, encoding: str = "utf-8") -> Feature: - """Parse the feature file. - - :param str basedir: Feature files base directory. - :param str filename: Relative path to the feature file. - :param str encoding: Feature file encoding (utf-8 by default). - """ - __tracebackhide__ = True + """Parse a feature file into a Feature object.""" abs_filename = os.path.abspath(os.path.join(basedir, filename)) rel_filename = os.path.join(os.path.basename(basedir), filename) - feature = Feature( - scenarios=OrderedDict(), - filename=abs_filename, - rel_filename=rel_filename, - line_number=1, - name=None, - tags=set(), - background=None, - description="", - ) - scenario: ScenarioTemplate | None = None - mode: str | None = None - prev_mode = None - description: list[str] = [] - step = None - multiline_step = False - prev_line = None - with open(abs_filename, encoding=encoding) as f: - content = f.read() - - for line_number, line in enumerate(content.splitlines(), start=1): - unindented_line = line.lstrip() - line_indent = len(line) - len(unindented_line) - if step and (step.indent < line_indent or ((not unindented_line) and multiline_step)): - multiline_step = True - # multiline step, so just add line and continue - step.add_line(line) - continue - else: - step = None - multiline_step = False - stripped_line = line.strip() - clean_line = strip_comments(line) - if not clean_line and (not prev_mode or prev_mode not in TYPES_WITH_DESCRIPTIONS): - # Blank lines are included in feature and scenario descriptions - continue - mode = get_step_type(clean_line) or mode - - allowed_prev_mode = (types.BACKGROUND, types.GIVEN, types.WHEN) - - if not scenario and prev_mode not in allowed_prev_mode and mode in types.STEP_TYPES: - raise exceptions.FeatureError( - "Step definition outside of a Scenario or a Background", line_number, clean_line, filename - ) - - if mode == types.FEATURE: - if prev_mode is None or prev_mode == types.TAG: - _, feature.name = parse_line(clean_line) - feature.line_number = line_number - feature.tags = get_tags(prev_line) - elif prev_mode == types.FEATURE: - # Do not include comments in descriptions - if not stripped_line.startswith("#"): - description.append(clean_line) - else: - raise exceptions.FeatureError( - "Multiple features are not allowed in a single feature file", - line_number, - clean_line, - filename, - ) - - prev_mode = mode - - # Remove Feature, Given, When, Then, And - keyword, parsed_line = parse_line(clean_line) - - if mode in [types.SCENARIO, types.SCENARIO_OUTLINE]: - # Lines between the scenario declaration - # and the scenario's first step line - # are considered part of the scenario description. - if scenario and not keyword: - # Do not include comments in descriptions - if not stripped_line.startswith("#"): - scenario.add_description_line(clean_line) - continue - tags = get_tags(prev_line) - scenario = ScenarioTemplate( - feature=feature, - name=parsed_line, - line_number=line_number, - tags=tags, - templated=mode == types.SCENARIO_OUTLINE, - ) - feature.scenarios[parsed_line] = scenario - elif mode == types.BACKGROUND: - feature.background = Background(feature=feature, line_number=line_number) - elif mode == types.EXAMPLES: - mode = types.EXAMPLES_HEADERS - scenario.examples.line_number = line_number - elif mode == types.EXAMPLES_HEADERS: - scenario.examples.set_param_names([l for l in split_line(parsed_line) if l]) - mode = types.EXAMPLE_LINE - elif mode == types.EXAMPLE_LINE: - scenario.examples.add_example(list(split_line(stripped_line))) - elif mode and mode not in (types.FEATURE, types.TAG): - step = Step(name=parsed_line, type=mode, indent=line_indent, line_number=line_number, keyword=keyword) - if feature.background and not scenario: - feature.background.add_step(step) - else: - scenario = cast(ScenarioTemplate, scenario) - scenario.add_step(step) - prev_line = clean_line - - feature.description = "\n".join(description).strip() - return feature + file_contents = f.read() + try: + gherkin_document = Parser().parse(TokenScanner(file_contents)) + except CompositeParserException as e: + raise FeatureError( + e.args[0], + e.errors[0].location["line"], + linecache.getline(abs_filename, e.errors[0].location["line"]).rstrip("\n"), + abs_filename, + ) from e + return dict_to_feature(abs_filename, rel_filename, gherkin_document) @dataclass(eq=False) @@ -215,20 +57,37 @@ class Feature: @dataclass(eq=False) -class ScenarioTemplate: - """A scenario template. +class Examples: + line_number: int | None = None + name: str | None = None + example_params: list[str] = field(default_factory=list) + examples: list[Sequence[str]] = field(default_factory=list) - Created when parsing the feature file, it will then be combined with the examples to create a Scenario. - """ + def set_param_names(self, keys: Iterable[str]) -> None: + self.example_params = [str(key) for key in keys] + + def add_example(self, values: Sequence[str]) -> None: + self.examples.append([str(value) if value is not None else "" for value in values]) + + def as_contexts(self) -> Iterable[dict[str, Any]]: + for row in self.examples: + assert len(self.example_params) == len(row) + yield dict(zip(self.example_params, row)) + def __bool__(self) -> bool: + return bool(self.examples) + + +@dataclass(eq=False) +class ScenarioTemplate: feature: Feature name: str line_number: int templated: bool + description: str | None = None tags: set[str] = field(default_factory=set) - examples: Examples | None = field(default_factory=lambda: Examples()) _steps: list[Step] = field(init=False, default_factory=list) - _description_lines: list[str] = field(init=False, default_factory=list) + examples: Examples | None = field(default_factory=Examples) def add_step(self, step: Step) -> None: step.scenario = self @@ -236,24 +95,20 @@ def add_step(self, step: Step) -> None: @property def steps(self) -> list[Step]: - background = self.feature.background - return (background.steps if background else []) + self._steps + return (self.feature.background.steps if self.feature.background else []) + self._steps def render(self, context: Mapping[str, Any]) -> Scenario: background_steps = self.feature.background.steps if self.feature.background else [] - if not self.templated: - scenario_steps = self._steps - else: - scenario_steps = [ - Step( - name=step.render(context), - type=step.type, - indent=step.indent, - line_number=step.line_number, - keyword=step.keyword, - ) - for step in self._steps - ] + scenario_steps = [ + Step( + name=step.render(context), + type=step.type, + indent=step.indent, + line_number=step.line_number, + keyword=step.keyword, + ) + for step in self._steps + ] steps = background_steps + scenario_steps return Scenario( feature=self.feature, @@ -261,22 +116,9 @@ def render(self, context: Mapping[str, Any]) -> Scenario: line_number=self.line_number, steps=steps, tags=self.tags, - description=self._description_lines, + description=self.description, ) - def add_description_line(self, description_line): - """Add a description line to the scenario. - :param str description_line: - """ - self._description_lines.append(description_line) - - @property - def description(self): - """Get the scenario's description. - :return: The scenario description - """ - return "\n".join(self._description_lines) - @dataclass(eq=False) class Scenario: @@ -284,8 +126,8 @@ class Scenario: name: str line_number: int steps: list[Step] + description: str | None = None tags: set[str] = field(default_factory=set) - description: list[str] = field(default_factory=list) @dataclass(eq=False) @@ -307,50 +149,7 @@ def __init__(self, name: str, type: str, indent: int, line_number: int, keyword: self.line_number = line_number self.keyword = keyword - self.failed = False - self.scenario = None - self.background = None - self.lines = [] - - def add_line(self, line: str) -> None: - """Add line to the multiple step. - - :param str line: Line of text - the continuation of the step name. - """ - self.lines.append(line) - self._invalidate_full_name_cache() - - @cached_property - def full_name(self) -> str: - multilines_content = textwrap.dedent("\n".join(self.lines)) if self.lines else "" - - # Remove the multiline quotes, if present. - multilines_content = re.sub( - pattern=r'^"""\n(?P.*)\n"""$', - repl=r"\g", - string=multilines_content, - flags=re.DOTALL, # Needed to make the "." match also new lines - ) - - lines = [self._name] + [multilines_content] - return "\n".join(lines).strip() - - def _invalidate_full_name_cache(self) -> None: - """Invalidate the full_name cache.""" - if "full_name" in self.__dict__: - del self.full_name - - @property - def name(self) -> str: - return self.full_name - - @name.setter - def name(self, value: str) -> None: - self._name = value - self._invalidate_full_name_cache() - def __str__(self) -> str: - """Full step name including the type.""" return f'{self.type.capitalize()} "{self.name}"' @property @@ -358,9 +157,9 @@ def params(self) -> tuple[str, ...]: return tuple(frozenset(STEP_PARAM_RE.findall(self.name))) def render(self, context: Mapping[str, Any]) -> str: - def replacer(m: Match): + def replacer(m: re.Match) -> str: varname = m.group(1) - return str(context[varname]) + return str(context.get(varname, f"")) return STEP_PARAM_RE.sub(replacer, self.name) @@ -372,48 +171,94 @@ class Background: steps: list[Step] = field(init=False, default_factory=list) def add_step(self, step: Step) -> None: - """Add step to the background.""" step.background = self self.steps.append(step) -@dataclass(eq=False) -class Examples: - """Example table.""" - - line_number: int | None = field(default=None) - name: str | None = field(default=None) - - example_params: list[str] = field(init=False, default_factory=list) - examples: list[Sequence[str]] = field(init=False, default_factory=list) - - def set_param_names(self, keys: Iterable[str]) -> None: - self.example_params = [str(key) for key in keys] - - def add_example(self, values: Sequence[str]) -> None: - self.examples.append(values) - - def as_contexts(self) -> Iterable[dict[str, Any]]: - if not self.examples: - return - - header, rows = self.example_params, self.examples - - for row in rows: - assert len(header) == len(row) - yield dict(zip(header, row)) - - def __bool__(self) -> bool: - return bool(self.examples) - +def dict_to_feature(abs_filename: str, rel_filename: str, data: dict) -> Feature: + def get_tag_names(tag_data: list[dict]) -> set[str]: + return {tag["name"].lstrip("@") for tag in tag_data} + + def get_step_type(keyword: str) -> str | None: + return { + "given": GIVEN, + "when": WHEN, + "then": THEN, + }.get(keyword) + + def parse_steps(steps_data: list[dict]) -> list[Step]: + steps = [] + current_step_type = None + for step_data in steps_data: + keyword = step_data["keyword"].strip().lower() + current_step_type = get_step_type(keyword) or current_step_type + name = strip_comments(step_data["text"]) + if "docString" in step_data: + doc_string = textwrap.dedent(step_data["docString"]["content"]) + name = f"{name}\n{doc_string}" + steps.append( + Step( + name=name, + type=current_step_type, + indent=step_data["location"]["column"] - 1, + line_number=step_data["location"]["line"], + keyword=keyword.title(), + ) + ) + return steps + + def parse_scenario(scenario_data: dict, feature: Feature) -> ScenarioTemplate: + scenario = ScenarioTemplate( + feature=feature, + name=strip_comments(scenario_data["name"]), + line_number=scenario_data["location"]["line"], + templated=False, + tags=get_tag_names(scenario_data["tags"]), + description=textwrap.dedent(scenario_data.get("description", "")), + ) + for step in parse_steps(scenario_data["steps"]): + scenario.add_step(step) + + if "examples" in scenario_data: + for example_data in scenario_data["examples"]: + examples = Examples( + line_number=example_data["location"]["line"], + name=example_data["name"], + ) + param_names = [cell["value"] for cell in example_data["tableHeader"]["cells"]] + examples.set_param_names(param_names) + for row in example_data["tableBody"]: + values = [cell["value"] or "" for cell in row["cells"]] + examples.add_example(values) + scenario.examples = examples + + return scenario + + def parse_background(background_data: dict, feature: Feature) -> Background: + background = Background( + feature=feature, + line_number=background_data["location"]["line"], + ) + background.steps = parse_steps(background_data["steps"]) + return background -def get_tags(line: str | None) -> set[str]: - """Get tags out of the given line. + feature_data = data["feature"] + feature = Feature( + scenarios=OrderedDict(), + filename=abs_filename, + rel_filename=rel_filename, + name=strip_comments(feature_data["name"]), + tags=get_tag_names(feature_data["tags"]), + background=None, + line_number=feature_data["location"]["line"], + description=textwrap.dedent(feature_data.get("description", "")), + ) - :param str line: Feature file text line. + for child in feature_data["children"]: + if "background" in child: + feature.background = parse_background(child["background"], feature) + elif "scenario" in child: + scenario = parse_scenario(child["scenario"], feature) + feature.scenarios[scenario.name] = scenario - :return: List of tags. - """ - if not line or not line.strip().startswith("@"): - return set() - return {tag.lstrip("@") for tag in line.strip().split(" @") if len(tag) > 1} + return feature diff --git a/src/pytest_bdd/scenario.py b/src/pytest_bdd/scenario.py index 709288139..80c6a0283 100644 --- a/src/pytest_bdd/scenario.py +++ b/src/pytest_bdd/scenario.py @@ -17,7 +17,7 @@ import logging import os import re -from typing import TYPE_CHECKING, Any, Callable, Iterable, Iterator, TypeVar, cast +from typing import TYPE_CHECKING, Any, Callable, Iterable, Iterator, Optional, TypeVar, cast import pytest from _pytest.fixtures import FixtureDef, FixtureManager, FixtureRequest, call_fixture_func @@ -90,7 +90,7 @@ def iterparentnodeids(nodeid: str) -> Iterator[str]: """ SEP = "/" pos = 0 - first_colons: Optional[int] = nodeid.find("::") + first_colons: int | None = nodeid.find("::") if first_colons == -1: first_colons = None # The root Session node - always present. @@ -312,6 +312,7 @@ def scenario( :param str feature_name: Feature file name. Absolute or relative to the configured feature base path. :param str scenario_name: Scenario name. :param str encoding: Feature file encoding. + :param features_base_dir: Optional base dir location for locating feature files. If not set, it will try and resolve using property set in .ini file, then the caller_module_path. """ __tracebackhide__ = True scenario_name = scenario_name @@ -347,7 +348,7 @@ def get_features_base_dir(caller_module_path: str) -> str: def get_from_ini(key: str, default: str) -> str: """Get value from ini config. Return default if value has not been set. - Use if the default value is dynamic. Otherwise set default on addini call. + Use if the default value is dynamic. Otherwise, set default on addini call. """ config = CONFIG_STACK[-1] value = config.getini(key) diff --git a/src/pytest_bdd/steps.py b/src/pytest_bdd/steps.py index 7642a6e84..81967fa11 100644 --- a/src/pytest_bdd/steps.py +++ b/src/pytest_bdd/steps.py @@ -43,10 +43,8 @@ def _(article): from typing import Any, Callable, Iterable, Literal, TypeVar import pytest -from _pytest.fixtures import FixtureRequest from typing_extensions import ParamSpec -from . import compat from .parser import Step from .parsers import StepParser, get_parser from .types import GIVEN, THEN, WHEN diff --git a/src/pytest_bdd/types.py b/src/pytest_bdd/types.py index 8faf940a4..66f20df68 100644 --- a/src/pytest_bdd/types.py +++ b/src/pytest_bdd/types.py @@ -2,16 +2,8 @@ from __future__ import annotations -FEATURE = "feature" -SCENARIO_OUTLINE = "scenario outline" -EXAMPLES = "examples" -EXAMPLES_HEADERS = "example headers" -EXAMPLE_LINE = "example line" -SCENARIO = "scenario" -BACKGROUND = "background" GIVEN = "given" WHEN = "when" THEN = "then" -TAG = "tag" STEP_TYPES = (GIVEN, WHEN, THEN) diff --git a/tests/feature/test_background.py b/tests/feature/test_background.py index be0490e83..4f7fc0c86 100644 --- a/tests/feature/test_background.py +++ b/tests/feature/test_background.py @@ -2,14 +2,16 @@ import textwrap -FEATURE = """\ +FEATURE = '''\ Feature: Background support Background: Given foo has a value "bar" And a background step with multiple lines: + """ one two + """ Scenario: Basic usage @@ -21,7 +23,7 @@ Then foo should have value "dummy" And foo should not have value "bar" -""" +''' STEPS = r"""\ import re diff --git a/tests/feature/test_multiline.py b/tests/feature/test_multiline.py index 2d531b5d5..ff407e487 100644 --- a/tests/feature/test_multiline.py +++ b/tests/feature/test_multiline.py @@ -24,52 +24,7 @@ ''' ), "Some\n\nExtra\nLines", - ), - ( - textwrap.dedent( - """\ - Feature: Multiline - Scenario: Multiline step using sub indentation - Given I have a step with: - Some - - Extra - Lines - Then the text should be parsed with correct indentation - """ - ), - "Some\n\nExtra\nLines", - ), - ( - textwrap.dedent( - """\ - Feature: Multiline - Scenario: Multiline step using sub indentation - Given I have a step with: - Some - - Extra - Lines - - Then the text should be parsed with correct indentation - """ - ), - " Some\n\n Extra\nLines", - ), - ( - textwrap.dedent( - """\ - Feature: Multiline - Scenario: Multiline step using sub indentation - Given I have a step with: - Some - Extra - Lines - - """ - ), - "Some\nExtra\nLines", - ), + ) ], ) def test_multiline(pytester, feature_text, expected_text): @@ -104,52 +59,3 @@ def _(text): ) result = pytester.runpytest() result.assert_outcomes(passed=1) - - -def test_multiline_wrong_indent(pytester): - """Multiline step using sub indentation wrong indent.""" - - pytester.makefile( - ".feature", - multiline=textwrap.dedent( - """\ - - Feature: Multiline - Scenario: Multiline step using sub indentation wrong indent - Given I have a step with: - Some - - Extra - Lines - Then the text should be parsed with correct indentation - - """ - ), - ) - - pytester.makepyfile( - textwrap.dedent( - """\ - from pytest_bdd import parsers, given, then, scenario - - - @scenario("multiline.feature", "Multiline step using sub indentation wrong indent") - def test_multiline(request): - pass - - - @given(parsers.parse("I have a step with:\\n{{text}}"), target_fixture="text") - def _(text): - return text - - - @then("the text should be parsed with correct indentation") - def _(text): - assert text == expected_text - - """ - ) - ) - result = pytester.runpytest() - result.assert_outcomes(failed=1) - result.stdout.fnmatch_lines("*StepDefinitionNotFoundError: Step definition is not found:*") diff --git a/tests/feature/test_no_scenario.py b/tests/feature/test_no_scenario.py index f3bcd7d3c..5eb68e11c 100644 --- a/tests/feature/test_no_scenario.py +++ b/tests/feature/test_no_scenario.py @@ -27,4 +27,4 @@ def test_no_scenarios(pytester): ) ) result = pytester.runpytest() - result.stdout.fnmatch_lines(["*FeatureError: Step definition outside of a Scenario or a Background.*"]) + result.stdout.fnmatch_lines(["*FeatureError*"]) diff --git a/tests/feature/test_outline.py b/tests/feature/test_outline.py index c8bfe9c48..b1a635bc9 100644 --- a/tests/feature/test_outline.py +++ b/tests/feature/test_outline.py @@ -171,7 +171,7 @@ def test_outline_with_escaped_pipes(pytester): pytester.makefile( ".feature", outline=textwrap.dedent( - r"""\ + r""" Feature: Outline With Special characters Scenario Outline: Outline with escaped pipe character @@ -217,6 +217,6 @@ def _(string): r"bork |", r"bork||bork", r"|", - r"bork \\", - r"bork \\|", + "bork \\", + "bork \\|", ] diff --git a/tests/feature/test_scenario.py b/tests/feature/test_scenario.py index f494d8cef..c23d5e040 100644 --- a/tests/feature/test_scenario.py +++ b/tests/feature/test_scenario.py @@ -146,49 +146,3 @@ def _(): ) result = pytester.runpytest_subprocess(*pytest_params) result.assert_outcomes(passed=1) - - -def test_angular_brakets_are_not_parsed(pytester): - """Test that angular brackets are not parsed for "Scenario"s. - - (They should be parsed only when used in "Scenario Outline") - - """ - pytester.makefile( - ".feature", - simple=""" - Feature: Simple feature - Scenario: Simple scenario - Given I have a - Then pass - - Scenario Outline: Outlined scenario - Given I have a templated - Then pass - - Examples: - | foo | - | bar | - """, - ) - pytester.makepyfile( - """ - from pytest_bdd import scenarios, given, then, parsers - - scenarios("simple.feature") - - @given("I have a ") - def _(): - return "tag" - - @given(parsers.parse("I have a templated {foo}")) - def _(foo): - return "foo" - - @then("pass") - def _(): - pass - """ - ) - result = pytester.runpytest() - result.assert_outcomes(passed=2) diff --git a/tests/feature/test_scenarios.py b/tests/feature/test_scenarios.py index ccfcf14a2..9e0407c4f 100644 --- a/tests/feature/test_scenarios.py +++ b/tests/feature/test_scenarios.py @@ -26,6 +26,7 @@ def _(): features.joinpath("test.feature").write_text( textwrap.dedent( """ +Feature: Test scenarios Scenario: Test scenario Given I have a bar """ @@ -37,6 +38,7 @@ def _(): subfolder.joinpath("test.feature").write_text( textwrap.dedent( """ +Feature: Test scenarios Scenario: Test subfolder scenario Given I have a bar diff --git a/tests/feature/test_steps.py b/tests/feature/test_steps.py index 30b731c0a..95a3a9893 100644 --- a/tests/feature/test_steps.py +++ b/tests/feature/test_steps.py @@ -354,6 +354,7 @@ def test_step_hooks(pytester): pytester.makefile( ".feature", test=""" +Feature: StepHandler hooks Scenario: When step has hook on failure Given I have a bar When it fails @@ -471,16 +472,21 @@ def test_step_trace(pytester): pytester.makefile( ".feature", test=""" - Scenario: When step has failure - Given I have a bar - When it fails + Feature: StepHandler hooks + Scenario: When step has hook on failure + Given I have a bar + When it fails - Scenario: When step is not found - Given not found + Scenario: When step's dependency a has failure + Given I have a bar + When it's dependency fails - Scenario: When step validation error happens - Given foo - And foo + Scenario: When step is not found + Given not found + + Scenario: When step validation error happens + Given foo + And foo """, ) pytester.makepyfile( @@ -489,19 +495,27 @@ def test_step_trace(pytester): from pytest_bdd import given, when, scenario @given('I have a bar') - def _(): + def i_have_bar(): return 'bar' @when('it fails') - def _(): + def when_it_fails(): raise Exception('when fails') - @scenario('test.feature', 'When step has failure') - def test_when_fails_inline(): + @pytest.fixture + def dependency(): + raise Exception('dependency fails') + + @when("it's dependency fails") + def when_dependency_fails(dependency): pass - @scenario('test.feature', 'When step has failure') - def test_when_fails_decorated(): + @scenario('test.feature', "When step's dependency a has failure") + def test_when_dependency_fails(): + pass + + @scenario('test.feature', 'When step has hook on failure') + def test_when_fails(): pass @scenario('test.feature', 'When step is not found') @@ -509,7 +523,7 @@ def test_when_not_found(): pass @when('foo') - def _(): + def foo(): return 'foo' @scenario('test.feature', 'When step validation error happens') @@ -517,25 +531,47 @@ def test_when_step_validation_error(): pass """ ) - result = pytester.runpytest("-k test_when_fails_inline", "-vv") - result.assert_outcomes(failed=1) - result.stdout.fnmatch_lines(["*test_when_fails_inline*FAILED"]) - assert "INTERNALERROR" not in result.stdout.str() - - result = pytester.runpytest("-k test_when_fails_decorated", "-vv") - result.assert_outcomes(failed=1) - result.stdout.fnmatch_lines(["*test_when_fails_decorated*FAILED"]) - assert "INTERNALERROR" not in result.stdout.str() - - result = pytester.runpytest("-k test_when_not_found", "-vv") - result.assert_outcomes(failed=1) - result.stdout.fnmatch_lines(["*test_when_not_found*FAILED"]) - assert "INTERNALERROR" not in result.stdout.str() - - result = pytester.runpytest("-k test_when_step_validation_error", "-vv") - result.assert_outcomes(failed=1) - result.stdout.fnmatch_lines(["*test_when_step_validation_error*FAILED"]) - assert "INTERNALERROR" not in result.stdout.str() + reprec = pytester.inline_run("-k test_when_fails") + reprec.assertoutcome(failed=1) + + calls = reprec.getcalls("pytest_bdd_before_scenario") + assert calls[0].request + + calls = reprec.getcalls("pytest_bdd_after_scenario") + assert calls[0].request + + calls = reprec.getcalls("pytest_bdd_before_step") + assert calls[0].request + + calls = reprec.getcalls("pytest_bdd_before_step_call") + assert calls[0].request + + calls = reprec.getcalls("pytest_bdd_after_step") + assert calls[0].request + + calls = reprec.getcalls("pytest_bdd_step_error") + assert calls[0].request + + reprec = pytester.inline_run("-k test_when_not_found") + reprec.assertoutcome(failed=1) + + calls = reprec.getcalls("pytest_bdd_step_func_lookup_error") + assert calls[0].request + + reprec = pytester.inline_run("-k test_when_step_validation_error") + reprec.assertoutcome(failed=1) + + reprec = pytester.inline_run("-k test_when_dependency_fails", "-vv") + reprec.assertoutcome(failed=1) + + calls = reprec.getcalls("pytest_bdd_before_step") + assert len(calls) == 2 + + calls = reprec.getcalls("pytest_bdd_before_step_call") + assert len(calls) == 1 + + calls = reprec.getcalls("pytest_bdd_step_error") + assert calls[0].request def test_steps_with_yield(pytester): diff --git a/tests/feature/test_tags.py b/tests/feature/test_tags.py index f1dea8035..20a64dc8e 100644 --- a/tests/feature/test_tags.py +++ b/tests/feature/test_tags.py @@ -4,8 +4,6 @@ import pytest -from pytest_bdd.parser import get_tags - def test_tags_selector(pytester): """Test tests selection by tags.""" @@ -162,51 +160,6 @@ def _(): result.stdout.fnmatch_lines(["*= 1 skipped, 1 xpassed * =*"]) -def test_tag_with_spaces(pytester): - pytester.makefile( - ".ini", - pytest=textwrap.dedent( - """ - [pytest] - markers = - test with spaces - """ - ), - ) - pytester.makeconftest( - """ - import pytest - - @pytest.hookimpl(tryfirst=True) - def pytest_bdd_apply_tag(tag, function): - assert tag == 'test with spaces' - """ - ) - pytester.makefile( - ".feature", - test=""" - Feature: Tag with spaces - - @test with spaces - Scenario: Tags - Given I have a bar - """, - ) - pytester.makepyfile( - """ - from pytest_bdd import given, scenarios - - @given('I have a bar') - def _(): - return 'bar' - - scenarios('test.feature') - """ - ) - result = pytester.runpytest_subprocess() - result.stdout.fnmatch_lines(["*= 1 passed * =*"]) - - def test_at_in_scenario(pytester): pytester.makefile( ".feature", @@ -238,19 +191,3 @@ def _(): strict_option = "--strict-markers" result = pytester.runpytest_subprocess(strict_option) result.stdout.fnmatch_lines(["*= 2 passed * =*"]) - - -@pytest.mark.parametrize( - "line, expected", - [ - ("@foo @bar", {"foo", "bar"}), - ("@with spaces @bar", {"with spaces", "bar"}), - ("@double @double", {"double"}), - (" @indented", {"indented"}), - (None, set()), - ("foobar", set()), - ("", set()), - ], -) -def test_get_tags(line, expected): - assert get_tags(line) == expected diff --git a/tests/feature/test_wrong.py b/tests/feature/test_wrong.py index f8c405439..002cd671c 100644 --- a/tests/feature/test_wrong.py +++ b/tests/feature/test_wrong.py @@ -50,4 +50,4 @@ def test_wrong(): ) result = pytester.runpytest() result.assert_outcomes(errors=1) - result.stdout.fnmatch_lines("*FeatureError: Multiple features are not allowed in a single feature file.*") + result.stdout.fnmatch_lines("*FeatureError: *") diff --git a/tests/generation/test_generate_missing.py b/tests/generation/test_generate_missing.py index d6be9be6f..4d02e0f4c 100644 --- a/tests/generation/test_generate_missing.py +++ b/tests/generation/test_generate_missing.py @@ -29,11 +29,9 @@ def test_generate_missing(pytester): Scenario: Scenario tests which are already bound to the tests stay as is Given I have a bar - Scenario: Code is generated for scenarios which are not bound to any tests Given I have a bar - Scenario: Code is generated for scenario steps which are not yet defined(implemented) Given I have a custom bar """ @@ -80,10 +78,6 @@ def test_missing_steps(): ] ) - result.stdout.fnmatch_lines( - ['Step Given "I have a foobar" is not defined in the background of the feature "Missing code generation" *'] - ) - result.stdout.fnmatch_lines(["Please place the code above to the test file(s):"]) diff --git a/tests/steps/test_common.py b/tests/steps/test_common.py index 7108aaab5..1342b6d25 100644 --- a/tests/steps/test_common.py +++ b/tests/steps/test_common.py @@ -316,25 +316,3 @@ def _(n): objects = collect_dumped_objects(result) assert objects == ["foo", ("foo parametrized", 1), "foo", ("foo parametrized", 2), "foo", ("foo parametrized", 3)] - - -def test_step_name_is_cached(): - """Test that the step name is cached and not re-computed eache time.""" - step = parser.Step(name="step name", type="given", indent=8, line_number=3, keyword="Given") - assert step.name == "step name" - - # manipulate the step name directly and validate the cache value is still returned - step._name = "incorrect step name" - assert step.name == "step name" - - # change the step name using the property and validate the cache has been invalidated - step.name = "new step name" - assert step.name == "new step name" - - # manipulate the step lines and validate the cache value is still returned - step.lines.append("step line 1") - assert step.name == "new step name" - - # add a step line and validate the cache has been invalidated - step.add_line("step line 2") - assert step.name == "new step name\nstep line 1\nstep line 2" From 65c06e42569da133511cfa64b10ab86c65c280ad Mon Sep 17 00:00:00 2001 From: Jason Allen Date: Thu, 5 Sep 2024 13:24:54 +0100 Subject: [PATCH 02/36] Improve docstrings in parser.py --- src/pytest_bdd/parser.py | 206 ++++++++++++++++++++++++++++++++++++++- 1 file changed, 204 insertions(+), 2 deletions(-) diff --git a/src/pytest_bdd/parser.py b/src/pytest_bdd/parser.py index 18a2928d8..c6d819a52 100644 --- a/src/pytest_bdd/parser.py +++ b/src/pytest_bdd/parser.py @@ -20,14 +20,33 @@ def strip_comments(line: str) -> str: - """Remove comments from a line of text.""" + """Remove comments from a line of text. + + Args: + line (str): The line of text from which to remove comments. + + Returns: + str: The line of text without comments, with leading and trailing whitespace removed. + """ if res := COMMENT_RE.search(line): line = line[: res.start()] return line.strip() def parse_feature(basedir: str, filename: str, encoding: str = "utf-8") -> Feature: - """Parse a feature file into a Feature object.""" + """Parse a feature file into a Feature object. + + Args: + basedir (str): The base directory of the feature file. + filename (str): The name of the feature file. + encoding (str): The encoding of the feature file (default is "utf-8"). + + Returns: + Feature: A Feature object representing the parsed feature file. + + Raises: + FeatureError: If there is an error parsing the feature file. + """ abs_filename = os.path.abspath(os.path.join(basedir, filename)) rel_filename = os.path.join(os.path.basename(basedir), filename) with open(abs_filename, encoding=encoding) as f: @@ -46,6 +65,19 @@ def parse_feature(basedir: str, filename: str, encoding: str = "utf-8") -> Featu @dataclass(eq=False) class Feature: + """Represents a feature parsed from a feature file. + + Attributes: + scenarios (OrderedDict[str, ScenarioTemplate]): A dictionary of scenarios in the feature. + filename (str): The absolute path of the feature file. + rel_filename (str): The relative path of the feature file. + name (Optional[str]): The name of the feature. + tags (set[str]): A set of tags associated with the feature. + background (Optional[Background]): The background steps for the feature, if any. + line_number (int): The line number where the feature starts in the file. + description (str): The description of the feature. + """ + scenarios: OrderedDict[str, ScenarioTemplate] filename: str rel_filename: str @@ -58,28 +90,70 @@ class Feature: @dataclass(eq=False) class Examples: + """Represents examples used in scenarios for parameterization. + + Attributes: + line_number (Optional[int]): The line number where the examples start. + name (Optional[str]): The name of the examples. + example_params (List[str]): The names of the parameters for the examples. + examples (List[Sequence[str]]): The list of example rows. + """ + line_number: int | None = None name: str | None = None example_params: list[str] = field(default_factory=list) examples: list[Sequence[str]] = field(default_factory=list) def set_param_names(self, keys: Iterable[str]) -> None: + """Set the parameter names for the examples. + + Args: + keys (Iterable[str]): The parameter names to set. + """ self.example_params = [str(key) for key in keys] def add_example(self, values: Sequence[str]) -> None: + """Add a new example row. + + Args: + values (Sequence[str]): The values for the example row. + """ self.examples.append([str(value) if value is not None else "" for value in values]) def as_contexts(self) -> Iterable[dict[str, Any]]: + """Generate contexts for the examples. + + Yields: + Dict[str, Any]: A dictionary mapping parameter names to their values for each example row. + """ for row in self.examples: assert len(self.example_params) == len(row) yield dict(zip(self.example_params, row)) def __bool__(self) -> bool: + """Check if there are any examples. + + Returns: + bool: True if there are examples, False otherwise. + """ return bool(self.examples) @dataclass(eq=False) class ScenarioTemplate: + """Represents a scenario template within a feature. + + Attributes: + feature (Feature): The feature to which this scenario belongs. + name (str): The name of the scenario. + line_number (int): The line number where the scenario starts in the file. + templated (bool): Whether the scenario is templated. + description (Optional[str]): The description of the scenario. + tags (set[str]): A set of tags associated with the scenario. + _steps (List[Step]): The list of steps in the scenario (internal use only). + examples (Optional[Examples]): The examples used for parameterization in the scenario. + """ + feature: Feature name: str line_number: int @@ -90,14 +164,32 @@ class ScenarioTemplate: examples: Examples | None = field(default_factory=Examples) def add_step(self, step: Step) -> None: + """Add a step to the scenario. + + Args: + step (Step): The step to add. + """ step.scenario = self self._steps.append(step) @property def steps(self) -> list[Step]: + """Get all steps for the scenario, including background steps. + + Returns: + List[Step]: A list of steps, including any background steps from the feature. + """ return (self.feature.background.steps if self.feature.background else []) + self._steps def render(self, context: Mapping[str, Any]) -> Scenario: + """Render the scenario with the given context. + + Args: + context (Mapping[str, Any]): The context for rendering steps. + + Returns: + Scenario: A Scenario object with steps rendered based on the context. + """ background_steps = self.feature.background.steps if self.feature.background else [] scenario_steps = [ Step( @@ -122,6 +214,17 @@ def render(self, context: Mapping[str, Any]) -> Scenario: @dataclass(eq=False) class Scenario: + """Represents a scenario with steps. + + Attributes: + feature (Feature): The feature to which this scenario belongs. + name (str): The name of the scenario. + line_number (int): The line number where the scenario starts in the file. + steps (List[Step]): The list of steps in the scenario. + description (Optional[str]): The description of the scenario. + tags (set[str]): A set of tags associated with the scenario. + """ + feature: Feature name: str line_number: int @@ -132,6 +235,20 @@ class Scenario: @dataclass(eq=False) class Step: + """Represents a step within a scenario or background. + + Attributes: + type (str): The type of step (e.g., 'given', 'when', 'then'). + _name (str): The name of the step. + line_number (int): The line number where the step starts in the file. + indent (int): The indentation level of the step. + keyword (str): The keyword used for the step (e.g., 'Given', 'When', 'Then'). + failed (bool): Whether the step has failed (internal use only). + scenario (Optional[ScenarioTemplate]): The scenario to which this step belongs (internal use only). + background (Optional[Background]): The background to which this step belongs (internal use only). + lines (List[str]): Additional lines for the step (internal use only). + """ + type: str _name: str line_number: int @@ -143,6 +260,15 @@ class Step: lines: list[str] = field(init=False, default_factory=list) def __init__(self, name: str, type: str, indent: int, line_number: int, keyword: str) -> None: + """Initialize a step. + + Args: + name (str): The name of the step. + type (str): The type of the step (e.g., 'given', 'when', 'then'). + indent (int): The indentation level of the step. + line_number (int): The line number where the step starts in the file. + keyword (str): The keyword used for the step (e.g., 'Given', 'When', 'Then'). + """ self.name = name self.type = type self.indent = indent @@ -150,13 +276,32 @@ def __init__(self, name: str, type: str, indent: int, line_number: int, keyword: self.keyword = keyword def __str__(self) -> str: + """Return a string representation of the step. + + Returns: + str: A string representation of the step. + """ return f'{self.type.capitalize()} "{self.name}"' @property def params(self) -> tuple[str, ...]: + """Get the parameters in the step name. + + Returns: + Tuple[str, ...]: A tuple of parameter names found in the step name. + """ return tuple(frozenset(STEP_PARAM_RE.findall(self.name))) def render(self, context: Mapping[str, Any]) -> str: + """Render the step name with the given context. + + Args: + context (Mapping[str, Any]): The context for rendering the step name. + + Returns: + str: The rendered step name with parameters replaced by their values from the context. + """ + def replacer(m: re.Match) -> str: varname = m.group(1) return str(context.get(varname, f"")) @@ -166,20 +311,60 @@ def replacer(m: re.Match) -> str: @dataclass(eq=False) class Background: + """Represents the background steps for a feature. + + Attributes: + feature (Feature): The feature to which this background belongs. + line_number (int): The line number where the background starts in the file. + steps (List[Step]): The list of steps in the background. + """ + feature: Feature line_number: int steps: list[Step] = field(init=False, default_factory=list) def add_step(self, step: Step) -> None: + """Add a step to the background. + + Args: + step (Step): The step to add. + """ step.background = self self.steps.append(step) def dict_to_feature(abs_filename: str, rel_filename: str, data: dict) -> Feature: + """Convert a dictionary representation of a feature into a Feature object. + + Args: + abs_filename (str): The absolute path of the feature file. + rel_filename (str): The relative path of the feature file. + data (dict): The dictionary containing the feature data. + + Returns: + Feature: A Feature object representing the parsed feature data. + """ + def get_tag_names(tag_data: list[dict]) -> set[str]: + """Extract tag names from tag data. + + Args: + tag_data (List[dict]): The tag data to extract names from. + + Returns: + set[str]: A set of tag names. + """ return {tag["name"].lstrip("@") for tag in tag_data} def get_step_type(keyword: str) -> str | None: + """Map a step keyword to its corresponding type. + + Args: + keyword (str): The keyword for the step (e.g., 'given', 'when', 'then'). + + Returns: + str | None: The type of the step, or None if the keyword is unknown. + """ return { "given": GIVEN, "when": WHEN, @@ -187,6 +372,14 @@ def get_step_type(keyword: str) -> str | None: }.get(keyword) def parse_steps(steps_data: list[dict]) -> list[Step]: + """Parse a list of step data into Step objects. + + Args: + steps_data (List[dict]): The list of step data. + + Returns: + List[Step]: A list of Step objects. + """ steps = [] current_step_type = None for step_data in steps_data: @@ -208,6 +401,15 @@ def parse_steps(steps_data: list[dict]) -> list[Step]: return steps def parse_scenario(scenario_data: dict, feature: Feature) -> ScenarioTemplate: + """Parse a scenario data dictionary into a ScenarioTemplate object. + + Args: + scenario_data (dict): The dictionary containing scenario data. + feature (Feature): The feature to which this scenario belongs. + + Returns: + ScenarioTemplate: A ScenarioTemplate object representing the parsed scenario. + """ scenario = ScenarioTemplate( feature=feature, name=strip_comments(scenario_data["name"]), From abe5e7935651bf6ab23cc88d1e450807734b4343 Mon Sep 17 00:00:00 2001 From: Jason Allen Date: Thu, 5 Sep 2024 13:25:20 +0100 Subject: [PATCH 03/36] Improve docstrings in parser.py --- src/pytest_bdd/parser.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/pytest_bdd/parser.py b/src/pytest_bdd/parser.py index c6d819a52..7d6bade20 100644 --- a/src/pytest_bdd/parser.py +++ b/src/pytest_bdd/parser.py @@ -6,7 +6,7 @@ import textwrap from collections import OrderedDict from dataclasses import dataclass, field -from typing import Any, Dict, Iterable, List, Mapping, Optional, Sequence, Tuple +from typing import Any, Iterable, List, Mapping, Optional, Sequence from gherkin.errors import CompositeParserException from gherkin.parser import Parser From 240ac6d2a89d9618aee4fc0668c8d5dd29506b1c Mon Sep 17 00:00:00 2001 From: Jason Allen Date: Thu, 5 Sep 2024 18:41:47 +0100 Subject: [PATCH 04/36] Fix issues and create a FeatureParser class to consolidate parsing logic --- src/pytest_bdd/feature.py | 4 +- src/pytest_bdd/parser.py | 136 ++++++++++++++++----------------- tests/feature/test_scenario.py | 46 +++++++++++ tests/feature/test_steps.py | 2 +- 4 files changed, 116 insertions(+), 72 deletions(-) diff --git a/src/pytest_bdd/feature.py b/src/pytest_bdd/feature.py index 54a15e3af..ee4bd90b8 100644 --- a/src/pytest_bdd/feature.py +++ b/src/pytest_bdd/feature.py @@ -29,7 +29,7 @@ import glob import os.path -from .parser import Feature, parse_feature +from .parser import Feature, FeatureParser # Global features dictionary features: dict[str, Feature] = {} @@ -52,7 +52,7 @@ def get_feature(base_path: str, filename: str, encoding: str = "utf-8") -> Featu full_name = os.path.abspath(os.path.join(base_path, filename)) feature = features.get(full_name) if not feature: - feature = parse_feature(base_path, filename, encoding=encoding) + feature = FeatureParser(base_path, filename, encoding).parse() features[full_name] = feature return feature diff --git a/src/pytest_bdd/parser.py b/src/pytest_bdd/parser.py index 7d6bade20..fca4fa600 100644 --- a/src/pytest_bdd/parser.py +++ b/src/pytest_bdd/parser.py @@ -6,7 +6,7 @@ import textwrap from collections import OrderedDict from dataclasses import dataclass, field -from typing import Any, Iterable, List, Mapping, Optional, Sequence +from typing import Any, Dict, Iterable, List, Mapping, Optional, Sequence from gherkin.errors import CompositeParserException from gherkin.parser import Parser @@ -33,36 +33,6 @@ def strip_comments(line: str) -> str: return line.strip() -def parse_feature(basedir: str, filename: str, encoding: str = "utf-8") -> Feature: - """Parse a feature file into a Feature object. - - Args: - basedir (str): The base directory of the feature file. - filename (str): The name of the feature file. - encoding (str): The encoding of the feature file (default is "utf-8"). - - Returns: - Feature: A Feature object representing the parsed feature file. - - Raises: - FeatureError: If there is an error parsing the feature file. - """ - abs_filename = os.path.abspath(os.path.join(basedir, filename)) - rel_filename = os.path.join(os.path.basename(basedir), filename) - with open(abs_filename, encoding=encoding) as f: - file_contents = f.read() - try: - gherkin_document = Parser().parse(TokenScanner(file_contents)) - except CompositeParserException as e: - raise FeatureError( - e.args[0], - e.errors[0].location["line"], - linecache.getline(abs_filename, e.errors[0].location["line"]).rstrip("\n"), - abs_filename, - ) from e - return dict_to_feature(abs_filename, rel_filename, gherkin_document) - - @dataclass(eq=False) class Feature: """Represents a feature parsed from a feature file. @@ -293,18 +263,19 @@ def params(self) -> tuple[str, ...]: return tuple(frozenset(STEP_PARAM_RE.findall(self.name))) def render(self, context: Mapping[str, Any]) -> str: - """Render the step name with the given context. + """Render the step name with the given context, but avoid replacing text inside angle brackets if context is missing. Args: context (Mapping[str, Any]): The context for rendering the step name. Returns: - str: The rendered step name with parameters replaced by their values from the context. + str: The rendered step name with parameters replaced only if they exist in the context. """ def replacer(m: re.Match) -> str: varname = m.group(1) - return str(context.get(varname, f"")) + # If the context contains the variable, replace it. Otherwise, leave it unchanged. + return str(context.get(varname, f"<{varname}>")) return STEP_PARAM_RE.sub(replacer, self.name) @@ -333,18 +304,21 @@ def add_step(self, step: Step) -> None: self.steps.append(step) -def dict_to_feature(abs_filename: str, rel_filename: str, data: dict) -> Feature: - """Convert a dictionary representation of a feature into a Feature object. +class FeatureParser: + """Converts a feature file into a Feature object. Args: - abs_filename (str): The absolute path of the feature file. - rel_filename (str): The relative path of the feature file. - data (dict): The dictionary containing the feature data. - - Returns: - Feature: A Feature object representing the parsed feature data. + basedir (str): The basedir for locating feature files. + filename (str): The filename of the feature file. + encoding (str): File encoding of the feature file to parse. """ + def __init__(self, basedir: str, filename: str, encoding: str = "utf-8"): + self.abs_filename = os.path.abspath(os.path.join(basedir, filename)) + self.rel_filename = os.path.join(os.path.basename(basedir), filename) + self.encoding = encoding + + @staticmethod def get_tag_names(tag_data: list[dict]) -> set[str]: """Extract tag names from tag data. @@ -356,6 +330,7 @@ def get_tag_names(tag_data: list[dict]) -> set[str]: """ return {tag["name"].lstrip("@") for tag in tag_data} + @staticmethod def get_step_type(keyword: str) -> str | None: """Map a step keyword to its corresponding type. @@ -371,7 +346,7 @@ def get_step_type(keyword: str) -> str | None: "then": THEN, }.get(keyword) - def parse_steps(steps_data: list[dict]) -> list[Step]: + def parse_steps(self, steps_data: list[dict]) -> list[Step]: """Parse a list of step data into Step objects. Args: @@ -384,7 +359,7 @@ def parse_steps(steps_data: list[dict]) -> list[Step]: current_step_type = None for step_data in steps_data: keyword = step_data["keyword"].strip().lower() - current_step_type = get_step_type(keyword) or current_step_type + current_step_type = self.get_step_type(keyword) or current_step_type name = strip_comments(step_data["text"]) if "docString" in step_data: doc_string = textwrap.dedent(step_data["docString"]["content"]) @@ -400,7 +375,7 @@ def parse_steps(steps_data: list[dict]) -> list[Step]: ) return steps - def parse_scenario(scenario_data: dict, feature: Feature) -> ScenarioTemplate: + def parse_scenario(self, scenario_data: dict, feature: Feature) -> ScenarioTemplate: """Parse a scenario data dictionary into a ScenarioTemplate object. Args: @@ -415,10 +390,10 @@ def parse_scenario(scenario_data: dict, feature: Feature) -> ScenarioTemplate: name=strip_comments(scenario_data["name"]), line_number=scenario_data["location"]["line"], templated=False, - tags=get_tag_names(scenario_data["tags"]), + tags=self.get_tag_names(scenario_data["tags"]), description=textwrap.dedent(scenario_data.get("description", "")), ) - for step in parse_steps(scenario_data["steps"]): + for step in self.parse_steps(scenario_data["steps"]): scenario.add_step(step) if "examples" in scenario_data: @@ -436,31 +411,54 @@ def parse_scenario(scenario_data: dict, feature: Feature) -> ScenarioTemplate: return scenario - def parse_background(background_data: dict, feature: Feature) -> Background: + def parse_background(self, background_data: dict, feature: Feature) -> Background: background = Background( feature=feature, line_number=background_data["location"]["line"], ) - background.steps = parse_steps(background_data["steps"]) + background.steps = self.parse_steps(background_data["steps"]) return background - feature_data = data["feature"] - feature = Feature( - scenarios=OrderedDict(), - filename=abs_filename, - rel_filename=rel_filename, - name=strip_comments(feature_data["name"]), - tags=get_tag_names(feature_data["tags"]), - background=None, - line_number=feature_data["location"]["line"], - description=textwrap.dedent(feature_data.get("description", "")), - ) - - for child in feature_data["children"]: - if "background" in child: - feature.background = parse_background(child["background"], feature) - elif "scenario" in child: - scenario = parse_scenario(child["scenario"], feature) - feature.scenarios[scenario.name] = scenario - - return feature + def _parse_feature_file(self) -> dict: + """Parse a feature file into a Feature object. + + Returns: + Dict: A Gherkin document representation of the feature file. + + Raises: + FeatureError: If there is an error parsing the feature file. + """ + with open(self.abs_filename, encoding=self.encoding) as f: + file_contents = f.read() + try: + return Parser().parse(TokenScanner(file_contents)) + except CompositeParserException as e: + raise FeatureError( + e.args[0], + e.errors[0].location["line"], + linecache.getline(self.abs_filename, e.errors[0].location["line"]).rstrip("\n"), + self.abs_filename, + ) from e + + def parse(self): + data = self._parse_feature_file() + feature_data = data["feature"] + feature = Feature( + scenarios=OrderedDict(), + filename=self.abs_filename, + rel_filename=self.rel_filename, + name=strip_comments(feature_data["name"]), + tags=self.get_tag_names(feature_data["tags"]), + background=None, + line_number=feature_data["location"]["line"], + description=textwrap.dedent(feature_data.get("description", "")), + ) + + for child in feature_data["children"]: + if "background" in child: + feature.background = self.parse_background(child["background"], feature) + elif "scenario" in child: + scenario = self.parse_scenario(child["scenario"], feature) + feature.scenarios[scenario.name] = scenario + + return feature diff --git a/tests/feature/test_scenario.py b/tests/feature/test_scenario.py index c23d5e040..669d45caf 100644 --- a/tests/feature/test_scenario.py +++ b/tests/feature/test_scenario.py @@ -146,3 +146,49 @@ def _(): ) result = pytester.runpytest_subprocess(*pytest_params) result.assert_outcomes(passed=1) + + +def test_angular_brackets_are_not_parsed(pytester): + """Test that angular brackets are not parsed for "Scenario"s. + + (They should be parsed only when used in "Scenario Outline") + + """ + pytester.makefile( + ".feature", + simple=""" + Feature: Simple feature + Scenario: Simple scenario + Given I have a + Then pass + + Scenario Outline: Outlined scenario + Given I have a templated + Then pass + + Examples: + | foo | + | bar | + """, + ) + pytester.makepyfile( + """ + from pytest_bdd import scenarios, given, then, parsers + + scenarios("simple.feature") + + @given("I have a ") + def _(): + return "tag" + + @given(parsers.parse("I have a templated {foo}")) + def _(foo): + return "foo" + + @then("pass") + def _(): + pass + """ + ) + result = pytester.runpytest() + result.assert_outcomes(passed=2) diff --git a/tests/feature/test_steps.py b/tests/feature/test_steps.py index 95a3a9893..94c11bcc3 100644 --- a/tests/feature/test_steps.py +++ b/tests/feature/test_steps.py @@ -523,7 +523,7 @@ def test_when_not_found(): pass @when('foo') - def foo(): + def _(): return 'foo' @scenario('test.feature', 'When step validation error happens') From e7b5326a43830c5eea0ab79585769347a79f6fce Mon Sep 17 00:00:00 2001 From: Jason Allen Date: Fri, 6 Sep 2024 10:34:17 +0100 Subject: [PATCH 05/36] Forgot to go back and implement the templated bool --- src/pytest_bdd/parser.py | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/src/pytest_bdd/parser.py b/src/pytest_bdd/parser.py index fca4fa600..2c0641cf7 100644 --- a/src/pytest_bdd/parser.py +++ b/src/pytest_bdd/parser.py @@ -295,7 +295,7 @@ class Background: steps: list[Step] = field(init=False, default_factory=list) def add_step(self, step: Step) -> None: - """Add a step to the background. + """Add a step to txhe background. Args: step (Step): The step to add. @@ -385,18 +385,19 @@ def parse_scenario(self, scenario_data: dict, feature: Feature) -> ScenarioTempl Returns: ScenarioTemplate: A ScenarioTemplate object representing the parsed scenario. """ + templated = "examples" in scenario_data scenario = ScenarioTemplate( feature=feature, name=strip_comments(scenario_data["name"]), line_number=scenario_data["location"]["line"], - templated=False, + templated=templated, tags=self.get_tag_names(scenario_data["tags"]), description=textwrap.dedent(scenario_data.get("description", "")), ) for step in self.parse_steps(scenario_data["steps"]): scenario.add_step(step) - if "examples" in scenario_data: + if templated: for example_data in scenario_data["examples"]: examples = Examples( line_number=example_data["location"]["line"], From 2f3e029200bc13b42dd98d03dc6f150ab1912fa5 Mon Sep 17 00:00:00 2001 From: Jason Allen Date: Fri, 6 Sep 2024 10:41:25 +0100 Subject: [PATCH 06/36] Remove unused import --- src/pytest_bdd/scenario.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/pytest_bdd/scenario.py b/src/pytest_bdd/scenario.py index 80c6a0283..870ae014f 100644 --- a/src/pytest_bdd/scenario.py +++ b/src/pytest_bdd/scenario.py @@ -17,7 +17,7 @@ import logging import os import re -from typing import TYPE_CHECKING, Any, Callable, Iterable, Iterator, Optional, TypeVar, cast +from typing import TYPE_CHECKING, Any, Callable, Iterable, Iterator, TypeVar, cast import pytest from _pytest.fixtures import FixtureDef, FixtureManager, FixtureRequest, call_fixture_func From cc9b37fc7f108479ce26cf0b9b2638e88553f21f Mon Sep 17 00:00:00 2001 From: Jason Allen Date: Fri, 6 Sep 2024 15:49:49 +0100 Subject: [PATCH 07/36] Move Gherkin parsing to pydantic models for easier future reference of available data and implementing features. --- poetry.lock | 162 ++++++++++++++++++++++++++++++- pyproject.toml | 1 + src/pytest_bdd/gherkin_parser.py | 131 +++++++++++++++++++++++++ src/pytest_bdd/parser.py | 111 ++++++++++----------- 4 files changed, 343 insertions(+), 62 deletions(-) create mode 100644 src/pytest_bdd/gherkin_parser.py diff --git a/poetry.lock b/poetry.lock index f082c5147..2d77af476 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,5 +1,19 @@ # This file is automatically @generated by Poetry 1.8.3 and should not be changed by hand. +[[package]] +name = "annotated-types" +version = "0.7.0" +description = "Reusable constraint types to use with typing.Annotated" +optional = false +python-versions = ">=3.8" +files = [ + {file = "annotated_types-0.7.0-py3-none-any.whl", hash = "sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53"}, + {file = "annotated_types-0.7.0.tar.gz", hash = "sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89"}, +] + +[package.dependencies] +typing-extensions = {version = ">=4.0.0", markers = "python_version < \"3.9\""} + [[package]] name = "cachetools" version = "5.3.3" @@ -395,6 +409,130 @@ files = [ dev = ["pre-commit", "tox"] testing = ["pytest", "pytest-benchmark"] +[[package]] +name = "pydantic" +version = "2.9.0" +description = "Data validation using Python type hints" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pydantic-2.9.0-py3-none-any.whl", hash = "sha256:f66a7073abd93214a20c5f7b32d56843137a7a2e70d02111f3be287035c45370"}, + {file = "pydantic-2.9.0.tar.gz", hash = "sha256:c7a8a9fdf7d100afa49647eae340e2d23efa382466a8d177efcd1381e9be5598"}, +] + +[package.dependencies] +annotated-types = ">=0.4.0" +pydantic-core = "2.23.2" +typing-extensions = [ + {version = ">=4.12.2", markers = "python_version >= \"3.13\""}, + {version = ">=4.6.1", markers = "python_version < \"3.13\""}, +] +tzdata = {version = "*", markers = "python_version >= \"3.9\""} + +[package.extras] +email = ["email-validator (>=2.0.0)"] + +[[package]] +name = "pydantic-core" +version = "2.23.2" +description = "Core functionality for Pydantic validation and serialization" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pydantic_core-2.23.2-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:7d0324a35ab436c9d768753cbc3c47a865a2cbc0757066cb864747baa61f6ece"}, + {file = "pydantic_core-2.23.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:276ae78153a94b664e700ac362587c73b84399bd1145e135287513442e7dfbc7"}, + {file = "pydantic_core-2.23.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:964c7aa318da542cdcc60d4a648377ffe1a2ef0eb1e996026c7f74507b720a78"}, + {file = "pydantic_core-2.23.2-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:1cf842265a3a820ebc6388b963ead065f5ce8f2068ac4e1c713ef77a67b71f7c"}, + {file = "pydantic_core-2.23.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ae90b9e50fe1bd115b24785e962b51130340408156d34d67b5f8f3fa6540938e"}, + {file = "pydantic_core-2.23.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8ae65fdfb8a841556b52935dfd4c3f79132dc5253b12c0061b96415208f4d622"}, + {file = "pydantic_core-2.23.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5c8aa40f6ca803f95b1c1c5aeaee6237b9e879e4dfb46ad713229a63651a95fb"}, + {file = "pydantic_core-2.23.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:c53100c8ee5a1e102766abde2158077d8c374bee0639201f11d3032e3555dfbc"}, + {file = "pydantic_core-2.23.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:d6b9dd6aa03c812017411734e496c44fef29b43dba1e3dd1fa7361bbacfc1354"}, + {file = "pydantic_core-2.23.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:b18cf68255a476b927910c6873d9ed00da692bb293c5b10b282bd48a0afe3ae2"}, + {file = "pydantic_core-2.23.2-cp310-none-win32.whl", hash = "sha256:e460475719721d59cd54a350c1f71c797c763212c836bf48585478c5514d2854"}, + {file = "pydantic_core-2.23.2-cp310-none-win_amd64.whl", hash = "sha256:5f3cf3721eaf8741cffaf092487f1ca80831202ce91672776b02b875580e174a"}, + {file = "pydantic_core-2.23.2-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:7ce8e26b86a91e305858e018afc7a6e932f17428b1eaa60154bd1f7ee888b5f8"}, + {file = "pydantic_core-2.23.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:7e9b24cca4037a561422bf5dc52b38d390fb61f7bfff64053ce1b72f6938e6b2"}, + {file = "pydantic_core-2.23.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:753294d42fb072aa1775bfe1a2ba1012427376718fa4c72de52005a3d2a22178"}, + {file = "pydantic_core-2.23.2-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:257d6a410a0d8aeb50b4283dea39bb79b14303e0fab0f2b9d617701331ed1515"}, + {file = "pydantic_core-2.23.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c8319e0bd6a7b45ad76166cc3d5d6a36c97d0c82a196f478c3ee5346566eebfd"}, + {file = "pydantic_core-2.23.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7a05c0240f6c711eb381ac392de987ee974fa9336071fb697768dfdb151345ce"}, + {file = "pydantic_core-2.23.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8d5b0ff3218858859910295df6953d7bafac3a48d5cd18f4e3ed9999efd2245f"}, + {file = "pydantic_core-2.23.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:96ef39add33ff58cd4c112cbac076726b96b98bb8f1e7f7595288dcfb2f10b57"}, + {file = "pydantic_core-2.23.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:0102e49ac7d2df3379ef8d658d3bc59d3d769b0bdb17da189b75efa861fc07b4"}, + {file = "pydantic_core-2.23.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:a6612c2a844043e4d10a8324c54cdff0042c558eef30bd705770793d70b224aa"}, + {file = "pydantic_core-2.23.2-cp311-none-win32.whl", hash = "sha256:caffda619099cfd4f63d48462f6aadbecee3ad9603b4b88b60cb821c1b258576"}, + {file = "pydantic_core-2.23.2-cp311-none-win_amd64.whl", hash = "sha256:6f80fba4af0cb1d2344869d56430e304a51396b70d46b91a55ed4959993c0589"}, + {file = "pydantic_core-2.23.2-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:4c83c64d05ffbbe12d4e8498ab72bdb05bcc1026340a4a597dc647a13c1605ec"}, + {file = "pydantic_core-2.23.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:6294907eaaccf71c076abdd1c7954e272efa39bb043161b4b8aa1cd76a16ce43"}, + {file = "pydantic_core-2.23.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4a801c5e1e13272e0909c520708122496647d1279d252c9e6e07dac216accc41"}, + {file = "pydantic_core-2.23.2-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:cc0c316fba3ce72ac3ab7902a888b9dc4979162d320823679da270c2d9ad0cad"}, + {file = "pydantic_core-2.23.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6b06c5d4e8701ac2ba99a2ef835e4e1b187d41095a9c619c5b185c9068ed2a49"}, + {file = "pydantic_core-2.23.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:82764c0bd697159fe9947ad59b6db6d7329e88505c8f98990eb07e84cc0a5d81"}, + {file = "pydantic_core-2.23.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2b1a195efd347ede8bcf723e932300292eb13a9d2a3c1f84eb8f37cbbc905b7f"}, + {file = "pydantic_core-2.23.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b7efb12e5071ad8d5b547487bdad489fbd4a5a35a0fc36a1941517a6ad7f23e0"}, + {file = "pydantic_core-2.23.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:5dd0ec5f514ed40e49bf961d49cf1bc2c72e9b50f29a163b2cc9030c6742aa73"}, + {file = "pydantic_core-2.23.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:820f6ee5c06bc868335e3b6e42d7ef41f50dfb3ea32fbd523ab679d10d8741c0"}, + {file = "pydantic_core-2.23.2-cp312-none-win32.whl", hash = "sha256:3713dc093d5048bfaedbba7a8dbc53e74c44a140d45ede020dc347dda18daf3f"}, + {file = "pydantic_core-2.23.2-cp312-none-win_amd64.whl", hash = "sha256:e1895e949f8849bc2757c0dbac28422a04be031204df46a56ab34bcf98507342"}, + {file = "pydantic_core-2.23.2-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:da43cbe593e3c87d07108d0ebd73771dc414488f1f91ed2e204b0370b94b37ac"}, + {file = "pydantic_core-2.23.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:64d094ea1aa97c6ded4748d40886076a931a8bf6f61b6e43e4a1041769c39dd2"}, + {file = "pydantic_core-2.23.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:084414ffe9a85a52940b49631321d636dadf3576c30259607b75516d131fecd0"}, + {file = "pydantic_core-2.23.2-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:043ef8469f72609c4c3a5e06a07a1f713d53df4d53112c6d49207c0bd3c3bd9b"}, + {file = "pydantic_core-2.23.2-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3649bd3ae6a8ebea7dc381afb7f3c6db237fc7cebd05c8ac36ca8a4187b03b30"}, + {file = "pydantic_core-2.23.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6db09153d8438425e98cdc9a289c5fade04a5d2128faff8f227c459da21b9703"}, + {file = "pydantic_core-2.23.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5668b3173bb0b2e65020b60d83f5910a7224027232c9f5dc05a71a1deac9f960"}, + {file = "pydantic_core-2.23.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:1c7b81beaf7c7ebde978377dc53679c6cba0e946426fc7ade54251dfe24a7604"}, + {file = "pydantic_core-2.23.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:ae579143826c6f05a361d9546446c432a165ecf1c0b720bbfd81152645cb897d"}, + {file = "pydantic_core-2.23.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:19f1352fe4b248cae22a89268720fc74e83f008057a652894f08fa931e77dced"}, + {file = "pydantic_core-2.23.2-cp313-none-win32.whl", hash = "sha256:e1a79ad49f346aa1a2921f31e8dbbab4d64484823e813a002679eaa46cba39e1"}, + {file = "pydantic_core-2.23.2-cp313-none-win_amd64.whl", hash = "sha256:582871902e1902b3c8e9b2c347f32a792a07094110c1bca6c2ea89b90150caac"}, + {file = "pydantic_core-2.23.2-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:743e5811b0c377eb830150d675b0847a74a44d4ad5ab8845923d5b3a756d8100"}, + {file = "pydantic_core-2.23.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:6650a7bbe17a2717167e3e23c186849bae5cef35d38949549f1c116031b2b3aa"}, + {file = "pydantic_core-2.23.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:56e6a12ec8d7679f41b3750ffa426d22b44ef97be226a9bab00a03365f217b2b"}, + {file = "pydantic_core-2.23.2-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:810ca06cca91de9107718dc83d9ac4d2e86efd6c02cba49a190abcaf33fb0472"}, + {file = "pydantic_core-2.23.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:785e7f517ebb9890813d31cb5d328fa5eda825bb205065cde760b3150e4de1f7"}, + {file = "pydantic_core-2.23.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3ef71ec876fcc4d3bbf2ae81961959e8d62f8d74a83d116668409c224012e3af"}, + {file = "pydantic_core-2.23.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d50ac34835c6a4a0d456b5db559b82047403c4317b3bc73b3455fefdbdc54b0a"}, + {file = "pydantic_core-2.23.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:16b25a4a120a2bb7dab51b81e3d9f3cde4f9a4456566c403ed29ac81bf49744f"}, + {file = "pydantic_core-2.23.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:41ae8537ad371ec018e3c5da0eb3f3e40ee1011eb9be1da7f965357c4623c501"}, + {file = "pydantic_core-2.23.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:07049ec9306ec64e955b2e7c40c8d77dd78ea89adb97a2013d0b6e055c5ee4c5"}, + {file = "pydantic_core-2.23.2-cp38-none-win32.whl", hash = "sha256:086c5db95157dc84c63ff9d96ebb8856f47ce113c86b61065a066f8efbe80acf"}, + {file = "pydantic_core-2.23.2-cp38-none-win_amd64.whl", hash = "sha256:67b6655311b00581914aba481729971b88bb8bc7996206590700a3ac85e457b8"}, + {file = "pydantic_core-2.23.2-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:358331e21a897151e54d58e08d0219acf98ebb14c567267a87e971f3d2a3be59"}, + {file = "pydantic_core-2.23.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c4d9f15ffe68bcd3898b0ad7233af01b15c57d91cd1667f8d868e0eacbfe3f87"}, + {file = "pydantic_core-2.23.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0123655fedacf035ab10c23450163c2f65a4174f2bb034b188240a6cf06bb123"}, + {file = "pydantic_core-2.23.2-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e6e3ccebdbd6e53474b0bb7ab8b88e83c0cfe91484b25e058e581348ee5a01a5"}, + {file = "pydantic_core-2.23.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fc535cb898ef88333cf317777ecdfe0faac1c2a3187ef7eb061b6f7ecf7e6bae"}, + {file = "pydantic_core-2.23.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:aab9e522efff3993a9e98ab14263d4e20211e62da088298089a03056980a3e69"}, + {file = "pydantic_core-2.23.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:05b366fb8fe3d8683b11ac35fa08947d7b92be78ec64e3277d03bd7f9b7cda79"}, + {file = "pydantic_core-2.23.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:7568f682c06f10f30ef643a1e8eec4afeecdafde5c4af1b574c6df079e96f96c"}, + {file = "pydantic_core-2.23.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:cdd02a08205dc90238669f082747612cb3c82bd2c717adc60f9b9ecadb540f80"}, + {file = "pydantic_core-2.23.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:1a2ab4f410f4b886de53b6bddf5dd6f337915a29dd9f22f20f3099659536b2f6"}, + {file = "pydantic_core-2.23.2-cp39-none-win32.whl", hash = "sha256:0448b81c3dfcde439551bb04a9f41d7627f676b12701865c8a2574bcea034437"}, + {file = "pydantic_core-2.23.2-cp39-none-win_amd64.whl", hash = "sha256:4cebb9794f67266d65e7e4cbe5dcf063e29fc7b81c79dc9475bd476d9534150e"}, + {file = "pydantic_core-2.23.2-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:e758d271ed0286d146cf7c04c539a5169a888dd0b57026be621547e756af55bc"}, + {file = "pydantic_core-2.23.2-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:f477d26183e94eaafc60b983ab25af2a809a1b48ce4debb57b343f671b7a90b6"}, + {file = "pydantic_core-2.23.2-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:da3131ef2b940b99106f29dfbc30d9505643f766704e14c5d5e504e6a480c35e"}, + {file = "pydantic_core-2.23.2-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:329a721253c7e4cbd7aad4a377745fbcc0607f9d72a3cc2102dd40519be75ed2"}, + {file = "pydantic_core-2.23.2-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:7706e15cdbf42f8fab1e6425247dfa98f4a6f8c63746c995d6a2017f78e619ae"}, + {file = "pydantic_core-2.23.2-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:e64ffaf8f6e17ca15eb48344d86a7a741454526f3a3fa56bc493ad9d7ec63936"}, + {file = "pydantic_core-2.23.2-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:dd59638025160056687d598b054b64a79183f8065eae0d3f5ca523cde9943940"}, + {file = "pydantic_core-2.23.2-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:12625e69b1199e94b0ae1c9a95d000484ce9f0182f9965a26572f054b1537e44"}, + {file = "pydantic_core-2.23.2-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:5d813fd871b3d5c3005157622ee102e8908ad6011ec915a18bd8fde673c4360e"}, + {file = "pydantic_core-2.23.2-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:1eb37f7d6a8001c0f86dc8ff2ee8d08291a536d76e49e78cda8587bb54d8b329"}, + {file = "pydantic_core-2.23.2-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7ce7eaf9a98680b4312b7cebcdd9352531c43db00fca586115845df388f3c465"}, + {file = "pydantic_core-2.23.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f087879f1ffde024dd2788a30d55acd67959dcf6c431e9d3682d1c491a0eb474"}, + {file = "pydantic_core-2.23.2-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:6ce883906810b4c3bd90e0ada1f9e808d9ecf1c5f0b60c6b8831d6100bcc7dd6"}, + {file = "pydantic_core-2.23.2-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:a8031074a397a5925d06b590121f8339d34a5a74cfe6970f8a1124eb8b83f4ac"}, + {file = "pydantic_core-2.23.2-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:23af245b8f2f4ee9e2c99cb3f93d0e22fb5c16df3f2f643f5a8da5caff12a653"}, + {file = "pydantic_core-2.23.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:c57e493a0faea1e4c38f860d6862ba6832723396c884fbf938ff5e9b224200e2"}, + {file = "pydantic_core-2.23.2.tar.gz", hash = "sha256:95d6bf449a1ac81de562d65d180af5d8c19672793c81877a2eda8fde5d08f2fd"}, +] + +[package.dependencies] +typing-extensions = ">=4.6.0,<4.7.0 || >4.7.0" + [[package]] name = "pygments" version = "2.17.2" @@ -542,6 +680,28 @@ files = [ {file = "typing_extensions-4.10.0.tar.gz", hash = "sha256:b0abd7c89e8fb96f98db18d86106ff1d90ab692004eb746cf6eda2682f91b3cb"}, ] +[[package]] +name = "typing-extensions" +version = "4.12.2" +description = "Backported and Experimental Type Hints for Python 3.8+" +optional = false +python-versions = ">=3.8" +files = [ + {file = "typing_extensions-4.12.2-py3-none-any.whl", hash = "sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d"}, + {file = "typing_extensions-4.12.2.tar.gz", hash = "sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8"}, +] + +[[package]] +name = "tzdata" +version = "2024.1" +description = "Provider of IANA time zone data" +optional = false +python-versions = ">=2" +files = [ + {file = "tzdata-2024.1-py2.py3-none-any.whl", hash = "sha256:9068bc196136463f5245e51efda838afa15aaeca9903f49050dfa2679db4d252"}, + {file = "tzdata-2024.1.tar.gz", hash = "sha256:2674120f8d891909751c38abcdfd386ac0a5a1127954fbc332af6b5ceae07efd"}, +] + [[package]] name = "virtualenv" version = "20.25.1" @@ -565,4 +725,4 @@ test = ["covdefaults (>=2.3)", "coverage (>=7.2.7)", "coverage-enable-subprocess [metadata] lock-version = "2.0" python-versions = ">=3.8" -content-hash = "6b52d5b35db2892ae49a2d655a8f19fb430b59b3f8c4dc6881526f0729424580" +content-hash = "14509c113eb897776c0adc8e930775188765786b3dd9022359ba19733833b363" diff --git a/pyproject.toml b/pyproject.toml index 7d874a5a0..a5d9a15c2 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -43,6 +43,7 @@ pytest = ">=6.2.0" typing-extensions = "*" packaging = "*" gherkin-official = "^29.0.0" +pydantic = "^2.9.0" [tool.poetry.group.dev.dependencies] tox = ">=4.11.3" diff --git a/src/pytest_bdd/gherkin_parser.py b/src/pytest_bdd/gherkin_parser.py new file mode 100644 index 000000000..35ac61a97 --- /dev/null +++ b/src/pytest_bdd/gherkin_parser.py @@ -0,0 +1,131 @@ +import linecache +from pathlib import Path +from typing import List, Optional, Union + +from gherkin.errors import CompositeParserException +from gherkin.parser import Parser +from gherkin.token_scanner import TokenScanner +from pydantic import BaseModel + + +class Location(BaseModel): + column: int + line: int + + +class Comment(BaseModel): + location: Location + text: str + + +class Cell(BaseModel): + location: Location + value: str + + +class Row(BaseModel): + id: str + location: Location + cells: List[Cell] + + +class DataTable(BaseModel): + name: Optional[str] = None + location: Location + tableHeader: Optional[Row] = None + tableBody: Optional[List[Row]] = None + + +class DocString(BaseModel): + content: str + delimiter: str + location: Location + + +class Step(BaseModel): + id: str + keyword: str + keywordType: str + location: Location + text: str + dataTable: Optional[DataTable] = None + docString: Optional[DocString] = None + + +class Tag(BaseModel): + id: str + location: Location + name: str + + +class Scenario(BaseModel): + id: str + keyword: str + location: Location + name: str + description: str + steps: List[Step] + tags: List[Tag] + examples: Optional[List[DataTable]] = None + + +class Rule(BaseModel): + id: str + keyword: str + location: Location + name: str + description: str + tags: List[Tag] + children: List[Scenario] + + +class Background(BaseModel): + id: str + keyword: str + location: Location + name: str + description: str + steps: List[Step] + + +class Child(BaseModel): + background: Optional[Background] = None + rule: Optional[Rule] = None + scenario: Optional[Scenario] = None + + +class Feature(BaseModel): + keyword: str + location: Location + tags: List[Tag] + name: str + description: str + children: List[Child] + + +class GherkinDocument(BaseModel): + feature: Feature + comments: List[Comment] + + +class GherkinParser: + def __init__(self, abs_filename: str = None, encoding: str = "utf-8"): + self.abs_filename = Path(abs_filename) if abs_filename else None + self.encoding = encoding + + with open(self.abs_filename, encoding=self.encoding) as f: + self.feature_file_text = f.read() + try: + self.gherkin_data = Parser().parse(TokenScanner(self.feature_file_text)) + except CompositeParserException as e: + from src.pytest_bdd import exceptions + + raise exceptions.FeatureError( + e.args[0], + e.errors[0].location["line"], + linecache.getline(str(self.abs_filename), e.errors[0].location["line"]).rstrip("\n"), + self.abs_filename, + ) from e + + def to_gherkin_document(self) -> GherkinDocument: + return GherkinDocument(**self.gherkin_data) diff --git a/src/pytest_bdd/parser.py b/src/pytest_bdd/parser.py index 2c0641cf7..b411575ab 100644 --- a/src/pytest_bdd/parser.py +++ b/src/pytest_bdd/parser.py @@ -1,6 +1,5 @@ from __future__ import annotations -import linecache import os.path import re import textwrap @@ -8,11 +7,12 @@ from dataclasses import dataclass, field from typing import Any, Dict, Iterable, List, Mapping, Optional, Sequence -from gherkin.errors import CompositeParserException -from gherkin.parser import Parser -from gherkin.token_scanner import TokenScanner - -from .exceptions import FeatureError +from .gherkin_parser import Background as GherkinBackground +from .gherkin_parser import Feature as GherkinFeature +from .gherkin_parser import GherkinDocument, GherkinParser +from .gherkin_parser import Scenario as GherkinScenario +from .gherkin_parser import Step as GherkinStep +from .gherkin_parser import Tag as GherkinTag from .types import GIVEN, THEN, WHEN STEP_PARAM_RE = re.compile(r"<(.+?)>") @@ -295,7 +295,7 @@ class Background: steps: list[Step] = field(init=False, default_factory=list) def add_step(self, step: Step) -> None: - """Add a step to txhe background. + """Add a step to the background. Args: step (Step): The step to add. @@ -319,7 +319,7 @@ def __init__(self, basedir: str, filename: str, encoding: str = "utf-8"): self.encoding = encoding @staticmethod - def get_tag_names(tag_data: list[dict]) -> set[str]: + def get_tag_names(tag_data: list[GherkinTag]) -> set[str]: """Extract tag names from tag data. Args: @@ -328,7 +328,7 @@ def get_tag_names(tag_data: list[dict]) -> set[str]: Returns: set[str]: A set of tag names. """ - return {tag["name"].lstrip("@") for tag in tag_data} + return {tag.name.lstrip("@") for tag in tag_data} @staticmethod def get_step_type(keyword: str) -> str | None: @@ -346,7 +346,7 @@ def get_step_type(keyword: str) -> str | None: "then": THEN, }.get(keyword) - def parse_steps(self, steps_data: list[dict]) -> list[Step]: + def parse_steps(self, steps_data: list[GherkinStep]) -> list[Step]: """Parse a list of step data into Step objects. Args: @@ -358,24 +358,24 @@ def parse_steps(self, steps_data: list[dict]) -> list[Step]: steps = [] current_step_type = None for step_data in steps_data: - keyword = step_data["keyword"].strip().lower() + keyword = step_data.keyword.strip().lower() current_step_type = self.get_step_type(keyword) or current_step_type - name = strip_comments(step_data["text"]) - if "docString" in step_data: - doc_string = textwrap.dedent(step_data["docString"]["content"]) + name = strip_comments(step_data.text) + if step_data.docString: + doc_string = textwrap.dedent(step_data.docString.content) name = f"{name}\n{doc_string}" steps.append( Step( name=name, type=current_step_type, - indent=step_data["location"]["column"] - 1, - line_number=step_data["location"]["line"], + indent=step_data.location.column - 1, + line_number=step_data.location.line, keyword=keyword.title(), ) ) return steps - def parse_scenario(self, scenario_data: dict, feature: Feature) -> ScenarioTemplate: + def parse_scenario(self, scenario_data: GherkinScenario, feature: Feature) -> ScenarioTemplate: """Parse a scenario data dictionary into a ScenarioTemplate object. Args: @@ -385,42 +385,41 @@ def parse_scenario(self, scenario_data: dict, feature: Feature) -> ScenarioTempl Returns: ScenarioTemplate: A ScenarioTemplate object representing the parsed scenario. """ - templated = "examples" in scenario_data + templated = bool(scenario_data.examples) scenario = ScenarioTemplate( feature=feature, - name=strip_comments(scenario_data["name"]), - line_number=scenario_data["location"]["line"], + name=strip_comments(scenario_data.name), + line_number=scenario_data.location.line, templated=templated, - tags=self.get_tag_names(scenario_data["tags"]), - description=textwrap.dedent(scenario_data.get("description", "")), + tags=self.get_tag_names(scenario_data.tags), + description=textwrap.dedent(scenario_data.description), ) - for step in self.parse_steps(scenario_data["steps"]): + for step in self.parse_steps(scenario_data.steps): scenario.add_step(step) - if templated: - for example_data in scenario_data["examples"]: - examples = Examples( - line_number=example_data["location"]["line"], - name=example_data["name"], - ) - param_names = [cell["value"] for cell in example_data["tableHeader"]["cells"]] - examples.set_param_names(param_names) - for row in example_data["tableBody"]: - values = [cell["value"] or "" for cell in row["cells"]] - examples.add_example(values) - scenario.examples = examples + for example_data in scenario_data.examples: + examples = Examples( + line_number=example_data.location.line, + name=example_data.name, + ) + param_names = [cell.value for cell in example_data.tableHeader.cells] + examples.set_param_names(param_names) + for row in example_data.tableBody: + values = [cell.value or "" for cell in row.cells] + examples.add_example(values) + scenario.examples = examples return scenario - def parse_background(self, background_data: dict, feature: Feature) -> Background: + def parse_background(self, background_data: GherkinBackground, feature: Feature) -> Background: background = Background( feature=feature, - line_number=background_data["location"]["line"], + line_number=background_data.location.line, ) - background.steps = self.parse_steps(background_data["steps"]) + background.steps = self.parse_steps(background_data.steps) return background - def _parse_feature_file(self) -> dict: + def _parse_feature_file(self) -> GherkinDocument: """Parse a feature file into a Feature object. Returns: @@ -429,37 +428,27 @@ def _parse_feature_file(self) -> dict: Raises: FeatureError: If there is an error parsing the feature file. """ - with open(self.abs_filename, encoding=self.encoding) as f: - file_contents = f.read() - try: - return Parser().parse(TokenScanner(file_contents)) - except CompositeParserException as e: - raise FeatureError( - e.args[0], - e.errors[0].location["line"], - linecache.getline(self.abs_filename, e.errors[0].location["line"]).rstrip("\n"), - self.abs_filename, - ) from e + return GherkinParser(self.abs_filename, self.encoding).to_gherkin_document() def parse(self): - data = self._parse_feature_file() - feature_data = data["feature"] + gherkin_doc: GherkinDocument = self._parse_feature_file() + feature_data: GherkinFeature = gherkin_doc.feature feature = Feature( scenarios=OrderedDict(), filename=self.abs_filename, rel_filename=self.rel_filename, - name=strip_comments(feature_data["name"]), - tags=self.get_tag_names(feature_data["tags"]), + name=strip_comments(feature_data.name), + tags=self.get_tag_names(feature_data.tags), background=None, - line_number=feature_data["location"]["line"], - description=textwrap.dedent(feature_data.get("description", "")), + line_number=feature_data.location.line, + description=textwrap.dedent(feature_data.description), ) - for child in feature_data["children"]: - if "background" in child: - feature.background = self.parse_background(child["background"], feature) - elif "scenario" in child: - scenario = self.parse_scenario(child["scenario"], feature) + for child in feature_data.children: + if child.background: + feature.background = self.parse_background(child.background, feature) + elif child.scenario: + scenario = self.parse_scenario(child.scenario, feature) feature.scenarios[scenario.name] = scenario return feature From 4e17ccb21714fd8bd0ed3d94bac2dee8beb4965f Mon Sep 17 00:00:00 2001 From: Jason Allen Date: Fri, 6 Sep 2024 17:27:00 +0100 Subject: [PATCH 08/36] Move the calculating of given/when/then to pydantic models, as well as removing tabbing from docstring in steps (aka multiline steps) --- src/pytest_bdd/gherkin_parser.py | 47 +++++++++++++++++++++++++++++--- src/pytest_bdd/parser.py | 15 +++------- 2 files changed, 47 insertions(+), 15 deletions(-) diff --git a/src/pytest_bdd/gherkin_parser.py b/src/pytest_bdd/gherkin_parser.py index 35ac61a97..b2b248004 100644 --- a/src/pytest_bdd/gherkin_parser.py +++ b/src/pytest_bdd/gherkin_parser.py @@ -1,11 +1,15 @@ import linecache +import textwrap from pathlib import Path -from typing import List, Optional, Union +from typing import List, Optional from gherkin.errors import CompositeParserException from gherkin.parser import Parser from gherkin.token_scanner import TokenScanner -from pydantic import BaseModel +from pydantic import BaseModel, field_validator, model_validator + +from . import exceptions +from .types import STEP_TYPES class Location(BaseModel): @@ -41,6 +45,10 @@ class DocString(BaseModel): delimiter: str location: Location + @field_validator("content", mode="before") + def dedent_content(cls, value: str) -> str: + return textwrap.dedent(value) + class Step(BaseModel): id: str @@ -51,6 +59,18 @@ class Step(BaseModel): dataTable: Optional[DataTable] = None docString: Optional[DocString] = None + @field_validator("keyword", mode="before") + def normalize_keyword(cls, value: str) -> str: + return value.lower().strip() + + @property + def given_when_then(self) -> str: + return self._gwt + + @given_when_then.setter + def given_when_then(self, gwt: str) -> None: + self._gwt = gwt + class Tag(BaseModel): id: str @@ -68,6 +88,12 @@ class Scenario(BaseModel): tags: List[Tag] examples: Optional[List[DataTable]] = None + @model_validator(mode="after") + def process_steps(cls, instance): + steps = instance.steps + instance.steps = _compute_given_when_then(steps) + return instance + class Rule(BaseModel): id: str @@ -87,6 +113,12 @@ class Background(BaseModel): description: str steps: List[Step] + @model_validator(mode="after") + def process_steps(cls, instance): + steps = instance.steps + instance.steps = _compute_given_when_then(steps) + return instance + class Child(BaseModel): background: Optional[Background] = None @@ -108,6 +140,15 @@ class GherkinDocument(BaseModel): comments: List[Comment] +def _compute_given_when_then(steps: list[Step]) -> list[Step]: + last_gwt = None + for step in steps: + if step.keyword in STEP_TYPES: + last_gwt = step.keyword + step.given_when_then = last_gwt + return steps + + class GherkinParser: def __init__(self, abs_filename: str = None, encoding: str = "utf-8"): self.abs_filename = Path(abs_filename) if abs_filename else None @@ -118,8 +159,6 @@ def __init__(self, abs_filename: str = None, encoding: str = "utf-8"): try: self.gherkin_data = Parser().parse(TokenScanner(self.feature_file_text)) except CompositeParserException as e: - from src.pytest_bdd import exceptions - raise exceptions.FeatureError( e.args[0], e.errors[0].location["line"], diff --git a/src/pytest_bdd/parser.py b/src/pytest_bdd/parser.py index b411575ab..1c4adc0f4 100644 --- a/src/pytest_bdd/parser.py +++ b/src/pytest_bdd/parser.py @@ -220,7 +220,7 @@ class Step: """ type: str - _name: str + name: str line_number: int indent: int keyword: str @@ -356,21 +356,17 @@ def parse_steps(self, steps_data: list[GherkinStep]) -> list[Step]: List[Step]: A list of Step objects. """ steps = [] - current_step_type = None for step_data in steps_data: - keyword = step_data.keyword.strip().lower() - current_step_type = self.get_step_type(keyword) or current_step_type name = strip_comments(step_data.text) if step_data.docString: - doc_string = textwrap.dedent(step_data.docString.content) - name = f"{name}\n{doc_string}" + name = f"{name}\n{step_data.docString.content}" steps.append( Step( name=name, - type=current_step_type, + type=step_data.given_when_then, indent=step_data.location.column - 1, line_number=step_data.location.line, - keyword=keyword.title(), + keyword=step_data.keyword.title(), ) ) return steps @@ -424,9 +420,6 @@ def _parse_feature_file(self) -> GherkinDocument: Returns: Dict: A Gherkin document representation of the feature file. - - Raises: - FeatureError: If there is an error parsing the feature file. """ return GherkinParser(self.abs_filename, self.encoding).to_gherkin_document() From 57b9e55723aa4f7a32d4accb70547039182ca168 Mon Sep 17 00:00:00 2001 From: Jason Allen Date: Fri, 6 Sep 2024 17:31:50 +0100 Subject: [PATCH 09/36] Fix silly mistakes --- src/pytest_bdd/gherkin_parser.py | 2 +- src/pytest_bdd/parser.py | 3 ++- 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/src/pytest_bdd/gherkin_parser.py b/src/pytest_bdd/gherkin_parser.py index b2b248004..2d6b35d91 100644 --- a/src/pytest_bdd/gherkin_parser.py +++ b/src/pytest_bdd/gherkin_parser.py @@ -151,7 +151,7 @@ def _compute_given_when_then(steps: list[Step]) -> list[Step]: class GherkinParser: def __init__(self, abs_filename: str = None, encoding: str = "utf-8"): - self.abs_filename = Path(abs_filename) if abs_filename else None + self.abs_filename = Path(abs_filename) self.encoding = encoding with open(self.abs_filename, encoding=self.encoding) as f: diff --git a/src/pytest_bdd/parser.py b/src/pytest_bdd/parser.py index 1c4adc0f4..57c8e0a99 100644 --- a/src/pytest_bdd/parser.py +++ b/src/pytest_bdd/parser.py @@ -346,7 +346,8 @@ def get_step_type(keyword: str) -> str | None: "then": THEN, }.get(keyword) - def parse_steps(self, steps_data: list[GherkinStep]) -> list[Step]: + @staticmethod + def parse_steps(steps_data: list[GherkinStep]) -> list[Step]: """Parse a list of step data into Step objects. Args: From ff1a92606f12fee4e6c69d5a74f924784f375960 Mon Sep 17 00:00:00 2001 From: Jason Allen Date: Fri, 6 Sep 2024 18:50:33 +0100 Subject: [PATCH 10/36] Fix type hints for py3.8 --- src/pytest_bdd/gherkin_parser.py | 2 +- src/pytest_bdd/parser.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/src/pytest_bdd/gherkin_parser.py b/src/pytest_bdd/gherkin_parser.py index 2d6b35d91..f26e69e8c 100644 --- a/src/pytest_bdd/gherkin_parser.py +++ b/src/pytest_bdd/gherkin_parser.py @@ -140,7 +140,7 @@ class GherkinDocument(BaseModel): comments: List[Comment] -def _compute_given_when_then(steps: list[Step]) -> list[Step]: +def _compute_given_when_then(steps: List[Step]) -> List[Step]: last_gwt = None for step in steps: if step.keyword in STEP_TYPES: diff --git a/src/pytest_bdd/parser.py b/src/pytest_bdd/parser.py index 57c8e0a99..4a9c8e289 100644 --- a/src/pytest_bdd/parser.py +++ b/src/pytest_bdd/parser.py @@ -338,7 +338,7 @@ def get_step_type(keyword: str) -> str | None: keyword (str): The keyword for the step (e.g., 'given', 'when', 'then'). Returns: - str | None: The type of the step, or None if the keyword is unknown. + Optional[str]: The type of the step, or None if the keyword is unknown. """ return { "given": GIVEN, From 21afdb162b4d8c11132f067ee6b64b7cbb1f0f88 Mon Sep 17 00:00:00 2001 From: Jason Allen Date: Sun, 8 Sep 2024 22:59:23 +0100 Subject: [PATCH 11/36] Response to feedback --- poetry.lock | 515 ++++++++++++------------------- pyproject.toml | 2 +- src/pytest_bdd/gherkin_parser.py | 367 ++++++++++++++-------- src/pytest_bdd/parser.py | 5 +- src/pytest_bdd/steps.py | 2 +- tests/feature/test_outline.py | 4 +- tests/feature/test_steps.py | 12 +- 7 files changed, 452 insertions(+), 455 deletions(-) diff --git a/poetry.lock b/poetry.lock index 2d77af476..b447dd0b1 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,28 +1,33 @@ # This file is automatically @generated by Poetry 1.8.3 and should not be changed by hand. [[package]] -name = "annotated-types" -version = "0.7.0" -description = "Reusable constraint types to use with typing.Annotated" +name = "attrs" +version = "24.2.0" +description = "Classes Without Boilerplate" optional = false -python-versions = ">=3.8" +python-versions = ">=3.7" files = [ - {file = "annotated_types-0.7.0-py3-none-any.whl", hash = "sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53"}, - {file = "annotated_types-0.7.0.tar.gz", hash = "sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89"}, + {file = "attrs-24.2.0-py3-none-any.whl", hash = "sha256:81921eb96de3191c8258c199618104dd27ac608d9366f5e35d011eae1867ede2"}, + {file = "attrs-24.2.0.tar.gz", hash = "sha256:5cfb1b9148b5b086569baec03f20d7b6bf3bcacc9a42bebf87ffaaca362f6346"}, ] -[package.dependencies] -typing-extensions = {version = ">=4.0.0", markers = "python_version < \"3.9\""} +[package.extras] +benchmark = ["cloudpickle", "hypothesis", "mypy (>=1.11.1)", "pympler", "pytest (>=4.3.0)", "pytest-codspeed", "pytest-mypy-plugins", "pytest-xdist[psutil]"] +cov = ["cloudpickle", "coverage[toml] (>=5.3)", "hypothesis", "mypy (>=1.11.1)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] +dev = ["cloudpickle", "hypothesis", "mypy (>=1.11.1)", "pre-commit", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] +docs = ["cogapp", "furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier (<24.7)"] +tests = ["cloudpickle", "hypothesis", "mypy (>=1.11.1)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] +tests-mypy = ["mypy (>=1.11.1)", "pytest-mypy-plugins"] [[package]] name = "cachetools" -version = "5.3.3" +version = "5.5.0" description = "Extensible memoizing collections and decorators" optional = false python-versions = ">=3.7" files = [ - {file = "cachetools-5.3.3-py3-none-any.whl", hash = "sha256:0abad1021d3f8325b2fc1d2e9c8b9c9d57b04c3932657a72465447332c24d945"}, - {file = "cachetools-5.3.3.tar.gz", hash = "sha256:ba29e2dfa0b8b556606f097407ed1aa62080ee108ab0dc5ec9d6a723a007d105"}, + {file = "cachetools-5.5.0-py3-none-any.whl", hash = "sha256:02134e8439cdc2ffb62023ce1debca2944c3f289d66bb17ead3ab3dede74b292"}, + {file = "cachetools-5.5.0.tar.gz", hash = "sha256:2cc24fb4cbe39633fb7badd9db9ca6295d766d9c2995f245725a46715d050f2a"}, ] [[package]] @@ -49,63 +54,83 @@ files = [ [[package]] name = "coverage" -version = "7.4.3" +version = "7.6.1" description = "Code coverage measurement for Python" optional = false python-versions = ">=3.8" files = [ - {file = "coverage-7.4.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:8580b827d4746d47294c0e0b92854c85a92c2227927433998f0d3320ae8a71b6"}, - {file = "coverage-7.4.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:718187eeb9849fc6cc23e0d9b092bc2348821c5e1a901c9f8975df0bc785bfd4"}, - {file = "coverage-7.4.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:767b35c3a246bcb55b8044fd3a43b8cd553dd1f9f2c1eeb87a302b1f8daa0524"}, - {file = "coverage-7.4.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ae7f19afe0cce50039e2c782bff379c7e347cba335429678450b8fe81c4ef96d"}, - {file = "coverage-7.4.3-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba3a8aaed13770e970b3df46980cb068d1c24af1a1968b7818b69af8c4347efb"}, - {file = "coverage-7.4.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:ee866acc0861caebb4f2ab79f0b94dbfbdbfadc19f82e6e9c93930f74e11d7a0"}, - {file = "coverage-7.4.3-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:506edb1dd49e13a2d4cac6a5173317b82a23c9d6e8df63efb4f0380de0fbccbc"}, - {file = "coverage-7.4.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fd6545d97c98a192c5ac995d21c894b581f1fd14cf389be90724d21808b657e2"}, - {file = "coverage-7.4.3-cp310-cp310-win32.whl", hash = "sha256:f6a09b360d67e589236a44f0c39218a8efba2593b6abdccc300a8862cffc2f94"}, - {file = "coverage-7.4.3-cp310-cp310-win_amd64.whl", hash = "sha256:18d90523ce7553dd0b7e23cbb28865db23cddfd683a38fb224115f7826de78d0"}, - {file = "coverage-7.4.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:cbbe5e739d45a52f3200a771c6d2c7acf89eb2524890a4a3aa1a7fa0695d2a47"}, - {file = "coverage-7.4.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:489763b2d037b164846ebac0cbd368b8a4ca56385c4090807ff9fad817de4113"}, - {file = "coverage-7.4.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:451f433ad901b3bb00184d83fd83d135fb682d780b38af7944c9faeecb1e0bfe"}, - {file = "coverage-7.4.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fcc66e222cf4c719fe7722a403888b1f5e1682d1679bd780e2b26c18bb648cdc"}, - {file = "coverage-7.4.3-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b3ec74cfef2d985e145baae90d9b1b32f85e1741b04cd967aaf9cfa84c1334f3"}, - {file = "coverage-7.4.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:abbbd8093c5229c72d4c2926afaee0e6e3140de69d5dcd918b2921f2f0c8baba"}, - {file = "coverage-7.4.3-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:35eb581efdacf7b7422af677b92170da4ef34500467381e805944a3201df2079"}, - {file = "coverage-7.4.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:8249b1c7334be8f8c3abcaaa996e1e4927b0e5a23b65f5bf6cfe3180d8ca7840"}, - {file = "coverage-7.4.3-cp311-cp311-win32.whl", hash = "sha256:cf30900aa1ba595312ae41978b95e256e419d8a823af79ce670835409fc02ad3"}, - {file = "coverage-7.4.3-cp311-cp311-win_amd64.whl", hash = "sha256:18c7320695c949de11a351742ee001849912fd57e62a706d83dfc1581897fa2e"}, - {file = "coverage-7.4.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:b51bfc348925e92a9bd9b2e48dad13431b57011fd1038f08316e6bf1df107d10"}, - {file = "coverage-7.4.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:d6cdecaedea1ea9e033d8adf6a0ab11107b49571bbb9737175444cea6eb72328"}, - {file = "coverage-7.4.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3b2eccb883368f9e972e216c7b4c7c06cabda925b5f06dde0650281cb7666a30"}, - {file = "coverage-7.4.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6c00cdc8fa4e50e1cc1f941a7f2e3e0f26cb2a1233c9696f26963ff58445bac7"}, - {file = "coverage-7.4.3-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b9a4a8dd3dcf4cbd3165737358e4d7dfbd9d59902ad11e3b15eebb6393b0446e"}, - {file = "coverage-7.4.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:062b0a75d9261e2f9c6d071753f7eef0fc9caf3a2c82d36d76667ba7b6470003"}, - {file = "coverage-7.4.3-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:ebe7c9e67a2d15fa97b77ea6571ce5e1e1f6b0db71d1d5e96f8d2bf134303c1d"}, - {file = "coverage-7.4.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:c0a120238dd71c68484f02562f6d446d736adcc6ca0993712289b102705a9a3a"}, - {file = "coverage-7.4.3-cp312-cp312-win32.whl", hash = "sha256:37389611ba54fd6d278fde86eb2c013c8e50232e38f5c68235d09d0a3f8aa352"}, - {file = "coverage-7.4.3-cp312-cp312-win_amd64.whl", hash = "sha256:d25b937a5d9ffa857d41be042b4238dd61db888533b53bc76dc082cb5a15e914"}, - {file = "coverage-7.4.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:28ca2098939eabab044ad68850aac8f8db6bf0b29bc7f2887d05889b17346454"}, - {file = "coverage-7.4.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:280459f0a03cecbe8800786cdc23067a8fc64c0bd51dc614008d9c36e1659d7e"}, - {file = "coverage-7.4.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6c0cdedd3500e0511eac1517bf560149764b7d8e65cb800d8bf1c63ebf39edd2"}, - {file = "coverage-7.4.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9a9babb9466fe1da12417a4aed923e90124a534736de6201794a3aea9d98484e"}, - {file = "coverage-7.4.3-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dec9de46a33cf2dd87a5254af095a409ea3bf952d85ad339751e7de6d962cde6"}, - {file = "coverage-7.4.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:16bae383a9cc5abab9bb05c10a3e5a52e0a788325dc9ba8499e821885928968c"}, - {file = "coverage-7.4.3-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:2c854ce44e1ee31bda4e318af1dbcfc929026d12c5ed030095ad98197eeeaed0"}, - {file = "coverage-7.4.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:ce8c50520f57ec57aa21a63ea4f325c7b657386b3f02ccaedeccf9ebe27686e1"}, - {file = "coverage-7.4.3-cp38-cp38-win32.whl", hash = "sha256:708a3369dcf055c00ddeeaa2b20f0dd1ce664eeabde6623e516c5228b753654f"}, - {file = "coverage-7.4.3-cp38-cp38-win_amd64.whl", hash = "sha256:1bf25fbca0c8d121a3e92a2a0555c7e5bc981aee5c3fdaf4bb7809f410f696b9"}, - {file = "coverage-7.4.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3b253094dbe1b431d3a4ac2f053b6d7ede2664ac559705a704f621742e034f1f"}, - {file = "coverage-7.4.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:77fbfc5720cceac9c200054b9fab50cb2a7d79660609200ab83f5db96162d20c"}, - {file = "coverage-7.4.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6679060424faa9c11808598504c3ab472de4531c571ab2befa32f4971835788e"}, - {file = "coverage-7.4.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4af154d617c875b52651dd8dd17a31270c495082f3d55f6128e7629658d63765"}, - {file = "coverage-7.4.3-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8640f1fde5e1b8e3439fe482cdc2b0bb6c329f4bb161927c28d2e8879c6029ee"}, - {file = "coverage-7.4.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:69b9f6f66c0af29642e73a520b6fed25ff9fd69a25975ebe6acb297234eda501"}, - {file = "coverage-7.4.3-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:0842571634f39016a6c03e9d4aba502be652a6e4455fadb73cd3a3a49173e38f"}, - {file = "coverage-7.4.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a78ed23b08e8ab524551f52953a8a05d61c3a760781762aac49f8de6eede8c45"}, - {file = "coverage-7.4.3-cp39-cp39-win32.whl", hash = "sha256:c0524de3ff096e15fcbfe8f056fdb4ea0bf497d584454f344d59fce069d3e6e9"}, - {file = "coverage-7.4.3-cp39-cp39-win_amd64.whl", hash = "sha256:0209a6369ccce576b43bb227dc8322d8ef9e323d089c6f3f26a597b09cb4d2aa"}, - {file = "coverage-7.4.3-pp38.pp39.pp310-none-any.whl", hash = "sha256:7cbde573904625509a3f37b6fecea974e363460b556a627c60dc2f47e2fffa51"}, - {file = "coverage-7.4.3.tar.gz", hash = "sha256:276f6077a5c61447a48d133ed13e759c09e62aff0dc84274a68dc18660104d52"}, + {file = "coverage-7.6.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:b06079abebbc0e89e6163b8e8f0e16270124c154dc6e4a47b413dd538859af16"}, + {file = "coverage-7.6.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:cf4b19715bccd7ee27b6b120e7e9dd56037b9c0681dcc1adc9ba9db3d417fa36"}, + {file = "coverage-7.6.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e61c0abb4c85b095a784ef23fdd4aede7a2628478e7baba7c5e3deba61070a02"}, + {file = "coverage-7.6.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fd21f6ae3f08b41004dfb433fa895d858f3f5979e7762d052b12aef444e29afc"}, + {file = "coverage-7.6.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f59d57baca39b32db42b83b2a7ba6f47ad9c394ec2076b084c3f029b7afca23"}, + {file = "coverage-7.6.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:a1ac0ae2b8bd743b88ed0502544847c3053d7171a3cff9228af618a068ed9c34"}, + {file = "coverage-7.6.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:e6a08c0be454c3b3beb105c0596ebdc2371fab6bb90c0c0297f4e58fd7e1012c"}, + {file = "coverage-7.6.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:f5796e664fe802da4f57a168c85359a8fbf3eab5e55cd4e4569fbacecc903959"}, + {file = "coverage-7.6.1-cp310-cp310-win32.whl", hash = "sha256:7bb65125fcbef8d989fa1dd0e8a060999497629ca5b0efbca209588a73356232"}, + {file = "coverage-7.6.1-cp310-cp310-win_amd64.whl", hash = "sha256:3115a95daa9bdba70aea750db7b96b37259a81a709223c8448fa97727d546fe0"}, + {file = "coverage-7.6.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:7dea0889685db8550f839fa202744652e87c60015029ce3f60e006f8c4462c93"}, + {file = "coverage-7.6.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ed37bd3c3b063412f7620464a9ac1314d33100329f39799255fb8d3027da50d3"}, + {file = "coverage-7.6.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d85f5e9a5f8b73e2350097c3756ef7e785f55bd71205defa0bfdaf96c31616ff"}, + {file = "coverage-7.6.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9bc572be474cafb617672c43fe989d6e48d3c83af02ce8de73fff1c6bb3c198d"}, + {file = "coverage-7.6.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0c0420b573964c760df9e9e86d1a9a622d0d27f417e1a949a8a66dd7bcee7bc6"}, + {file = "coverage-7.6.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:1f4aa8219db826ce6be7099d559f8ec311549bfc4046f7f9fe9b5cea5c581c56"}, + {file = "coverage-7.6.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:fc5a77d0c516700ebad189b587de289a20a78324bc54baee03dd486f0855d234"}, + {file = "coverage-7.6.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:b48f312cca9621272ae49008c7f613337c53fadca647d6384cc129d2996d1133"}, + {file = "coverage-7.6.1-cp311-cp311-win32.whl", hash = "sha256:1125ca0e5fd475cbbba3bb67ae20bd2c23a98fac4e32412883f9bcbaa81c314c"}, + {file = "coverage-7.6.1-cp311-cp311-win_amd64.whl", hash = "sha256:8ae539519c4c040c5ffd0632784e21b2f03fc1340752af711f33e5be83a9d6c6"}, + {file = "coverage-7.6.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:95cae0efeb032af8458fc27d191f85d1717b1d4e49f7cb226cf526ff28179778"}, + {file = "coverage-7.6.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:5621a9175cf9d0b0c84c2ef2b12e9f5f5071357c4d2ea6ca1cf01814f45d2391"}, + {file = "coverage-7.6.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:260933720fdcd75340e7dbe9060655aff3af1f0c5d20f46b57f262ab6c86a5e8"}, + {file = "coverage-7.6.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:07e2ca0ad381b91350c0ed49d52699b625aab2b44b65e1b4e02fa9df0e92ad2d"}, + {file = "coverage-7.6.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c44fee9975f04b33331cb8eb272827111efc8930cfd582e0320613263ca849ca"}, + {file = "coverage-7.6.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:877abb17e6339d96bf08e7a622d05095e72b71f8afd8a9fefc82cf30ed944163"}, + {file = "coverage-7.6.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:3e0cadcf6733c09154b461f1ca72d5416635e5e4ec4e536192180d34ec160f8a"}, + {file = "coverage-7.6.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:c3c02d12f837d9683e5ab2f3d9844dc57655b92c74e286c262e0fc54213c216d"}, + {file = "coverage-7.6.1-cp312-cp312-win32.whl", hash = "sha256:e05882b70b87a18d937ca6768ff33cc3f72847cbc4de4491c8e73880766718e5"}, + {file = "coverage-7.6.1-cp312-cp312-win_amd64.whl", hash = "sha256:b5d7b556859dd85f3a541db6a4e0167b86e7273e1cdc973e5b175166bb634fdb"}, + {file = "coverage-7.6.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:a4acd025ecc06185ba2b801f2de85546e0b8ac787cf9d3b06e7e2a69f925b106"}, + {file = "coverage-7.6.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:a6d3adcf24b624a7b778533480e32434a39ad8fa30c315208f6d3e5542aeb6e9"}, + {file = "coverage-7.6.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d0c212c49b6c10e6951362f7c6df3329f04c2b1c28499563d4035d964ab8e08c"}, + {file = "coverage-7.6.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6e81d7a3e58882450ec4186ca59a3f20a5d4440f25b1cff6f0902ad890e6748a"}, + {file = "coverage-7.6.1-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:78b260de9790fd81e69401c2dc8b17da47c8038176a79092a89cb2b7d945d060"}, + {file = "coverage-7.6.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:a78d169acd38300060b28d600344a803628c3fd585c912cacc9ea8790fe96862"}, + {file = "coverage-7.6.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:2c09f4ce52cb99dd7505cd0fc8e0e37c77b87f46bc9c1eb03fe3bc9991085388"}, + {file = "coverage-7.6.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:6878ef48d4227aace338d88c48738a4258213cd7b74fd9a3d4d7582bb1d8a155"}, + {file = "coverage-7.6.1-cp313-cp313-win32.whl", hash = "sha256:44df346d5215a8c0e360307d46ffaabe0f5d3502c8a1cefd700b34baf31d411a"}, + {file = "coverage-7.6.1-cp313-cp313-win_amd64.whl", hash = "sha256:8284cf8c0dd272a247bc154eb6c95548722dce90d098c17a883ed36e67cdb129"}, + {file = "coverage-7.6.1-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:d3296782ca4eab572a1a4eca686d8bfb00226300dcefdf43faa25b5242ab8a3e"}, + {file = "coverage-7.6.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:502753043567491d3ff6d08629270127e0c31d4184c4c8d98f92c26f65019962"}, + {file = "coverage-7.6.1-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6a89ecca80709d4076b95f89f308544ec8f7b4727e8a547913a35f16717856cb"}, + {file = "coverage-7.6.1-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a318d68e92e80af8b00fa99609796fdbcdfef3629c77c6283566c6f02c6d6704"}, + {file = "coverage-7.6.1-cp313-cp313t-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:13b0a73a0896988f053e4fbb7de6d93388e6dd292b0d87ee51d106f2c11b465b"}, + {file = "coverage-7.6.1-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:4421712dbfc5562150f7554f13dde997a2e932a6b5f352edcce948a815efee6f"}, + {file = "coverage-7.6.1-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:166811d20dfea725e2e4baa71fffd6c968a958577848d2131f39b60043400223"}, + {file = "coverage-7.6.1-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:225667980479a17db1048cb2bf8bfb39b8e5be8f164b8f6628b64f78a72cf9d3"}, + {file = "coverage-7.6.1-cp313-cp313t-win32.whl", hash = "sha256:170d444ab405852903b7d04ea9ae9b98f98ab6d7e63e1115e82620807519797f"}, + {file = "coverage-7.6.1-cp313-cp313t-win_amd64.whl", hash = "sha256:b9f222de8cded79c49bf184bdbc06630d4c58eec9459b939b4a690c82ed05657"}, + {file = "coverage-7.6.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6db04803b6c7291985a761004e9060b2bca08da6d04f26a7f2294b8623a0c1a0"}, + {file = "coverage-7.6.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:f1adfc8ac319e1a348af294106bc6a8458a0f1633cc62a1446aebc30c5fa186a"}, + {file = "coverage-7.6.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a95324a9de9650a729239daea117df21f4b9868ce32e63f8b650ebe6cef5595b"}, + {file = "coverage-7.6.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b43c03669dc4618ec25270b06ecd3ee4fa94c7f9b3c14bae6571ca00ef98b0d3"}, + {file = "coverage-7.6.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8929543a7192c13d177b770008bc4e8119f2e1f881d563fc6b6305d2d0ebe9de"}, + {file = "coverage-7.6.1-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:a09ece4a69cf399510c8ab25e0950d9cf2b42f7b3cb0374f95d2e2ff594478a6"}, + {file = "coverage-7.6.1-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:9054a0754de38d9dbd01a46621636689124d666bad1936d76c0341f7d71bf569"}, + {file = "coverage-7.6.1-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:0dbde0f4aa9a16fa4d754356a8f2e36296ff4d83994b2c9d8398aa32f222f989"}, + {file = "coverage-7.6.1-cp38-cp38-win32.whl", hash = "sha256:da511e6ad4f7323ee5702e6633085fb76c2f893aaf8ce4c51a0ba4fc07580ea7"}, + {file = "coverage-7.6.1-cp38-cp38-win_amd64.whl", hash = "sha256:3f1156e3e8f2872197af3840d8ad307a9dd18e615dc64d9ee41696f287c57ad8"}, + {file = "coverage-7.6.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:abd5fd0db5f4dc9289408aaf34908072f805ff7792632250dcb36dc591d24255"}, + {file = "coverage-7.6.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:547f45fa1a93154bd82050a7f3cddbc1a7a4dd2a9bf5cb7d06f4ae29fe94eaf8"}, + {file = "coverage-7.6.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:645786266c8f18a931b65bfcefdbf6952dd0dea98feee39bd188607a9d307ed2"}, + {file = "coverage-7.6.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9e0b2df163b8ed01d515807af24f63de04bebcecbd6c3bfeff88385789fdf75a"}, + {file = "coverage-7.6.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:609b06f178fe8e9f89ef676532760ec0b4deea15e9969bf754b37f7c40326dbc"}, + {file = "coverage-7.6.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:702855feff378050ae4f741045e19a32d57d19f3e0676d589df0575008ea5004"}, + {file = "coverage-7.6.1-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:2bdb062ea438f22d99cba0d7829c2ef0af1d768d1e4a4f528087224c90b132cb"}, + {file = "coverage-7.6.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:9c56863d44bd1c4fe2abb8a4d6f5371d197f1ac0ebdee542f07f35895fc07f36"}, + {file = "coverage-7.6.1-cp39-cp39-win32.whl", hash = "sha256:6e2cd258d7d927d09493c8df1ce9174ad01b381d4729a9d8d4e38670ca24774c"}, + {file = "coverage-7.6.1-cp39-cp39-win_amd64.whl", hash = "sha256:06a737c882bd26d0d6ee7269b20b12f14a8704807a01056c80bb881a4b2ce6ca"}, + {file = "coverage-7.6.1-pp38.pp39.pp310-none-any.whl", hash = "sha256:e9a6e0eb86070e8ccaedfbd9d38fec54864f3125ab95419970575b42af7541df"}, + {file = "coverage-7.6.1.tar.gz", hash = "sha256:953510dfb7b12ab69d20135a0662397f077c59b1e6379a768e97c59d852ee51d"}, ] [package.dependencies] @@ -127,13 +152,13 @@ files = [ [[package]] name = "exceptiongroup" -version = "1.2.0" +version = "1.2.2" description = "Backport of PEP 654 (exception groups)" optional = false python-versions = ">=3.7" files = [ - {file = "exceptiongroup-1.2.0-py3-none-any.whl", hash = "sha256:4bfd3996ac73b41e9b9628b04e079f193850720ea5945fc96a08633c66912f14"}, - {file = "exceptiongroup-1.2.0.tar.gz", hash = "sha256:91f5c769735f051a4290d52edd0858999b57e5876e9f85937691bd4c9fa3ed68"}, + {file = "exceptiongroup-1.2.2-py3-none-any.whl", hash = "sha256:3111b9d131c238bec2f8f516e123e14ba243563fb135d3fe885990585aa7795b"}, + {file = "exceptiongroup-1.2.2.tar.gz", hash = "sha256:47c2edf7c6738fafb49fd34290706d1a1a2f4d1c6df275526b62cbb4aa5393cc"}, ] [package.extras] @@ -141,13 +166,13 @@ test = ["pytest (>=6)"] [[package]] name = "execnet" -version = "2.0.2" +version = "2.1.1" description = "execnet: rapid multi-Python deployment" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "execnet-2.0.2-py3-none-any.whl", hash = "sha256:88256416ae766bc9e8895c76a87928c0012183da3cc4fc18016e6f050e025f41"}, - {file = "execnet-2.0.2.tar.gz", hash = "sha256:cc59bc4423742fd71ad227122eb0dd44db51efb3dc4095b45ac9a08c770096af"}, + {file = "execnet-2.1.1-py3-none-any.whl", hash = "sha256:26dee51f1b80cebd6d0ca8e74dd8745419761d3bef34163928cbebbdc4749fdc"}, + {file = "execnet-2.1.1.tar.gz", hash = "sha256:5189b52c6121c24feae288166ab41b32549c7e2348652736540b9e6e7d4e72e3"}, ] [package.extras] @@ -155,19 +180,19 @@ testing = ["hatch", "pre-commit", "pytest", "tox"] [[package]] name = "filelock" -version = "3.13.1" +version = "3.16.0" description = "A platform independent file lock." optional = false python-versions = ">=3.8" files = [ - {file = "filelock-3.13.1-py3-none-any.whl", hash = "sha256:57dbda9b35157b05fb3e58ee91448612eb674172fab98ee235ccb0b5bee19a1c"}, - {file = "filelock-3.13.1.tar.gz", hash = "sha256:521f5f56c50f8426f5e03ad3b281b490a87ef15bc6c526f168290f0c7148d44e"}, + {file = "filelock-3.16.0-py3-none-any.whl", hash = "sha256:f6ed4c963184f4c84dd5557ce8fece759a3724b37b80c6c4f20a2f63a4dc6609"}, + {file = "filelock-3.16.0.tar.gz", hash = "sha256:81de9eb8453c769b63369f87f11131a7ab04e367f8d97ad39dc230daa07e3bec"}, ] [package.extras] -docs = ["furo (>=2023.9.10)", "sphinx (>=7.2.6)", "sphinx-autodoc-typehints (>=1.24)"] -testing = ["covdefaults (>=2.3)", "coverage (>=7.3.2)", "diff-cover (>=8)", "pytest (>=7.4.3)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)", "pytest-timeout (>=2.2)"] -typing = ["typing-extensions (>=4.8)"] +docs = ["furo (>=2024.8.6)", "sphinx (>=8.0.2)", "sphinx-autodoc-typehints (>=2.4)"] +testing = ["covdefaults (>=2.3)", "coverage (>=7.6.1)", "diff-cover (>=9.1.1)", "pytest (>=8.3.2)", "pytest-asyncio (>=0.24)", "pytest-cov (>=5)", "pytest-mock (>=3.14)", "pytest-timeout (>=2.3.1)", "virtualenv (>=20.26.3)"] +typing = ["typing-extensions (>=4.12.2)"] [[package]] name = "gherkin-official" @@ -193,13 +218,13 @@ files = [ [[package]] name = "mako" -version = "1.3.2" +version = "1.3.5" description = "A super-fast templating language that borrows the best ideas from the existing templating languages." optional = false python-versions = ">=3.8" files = [ - {file = "Mako-1.3.2-py3-none-any.whl", hash = "sha256:32a99d70754dfce237019d17ffe4a282d2d3351b9c476e90d8a60e63f133b80c"}, - {file = "Mako-1.3.2.tar.gz", hash = "sha256:2a0c8ad7f6274271b3bb7467dd37cf9cc6dab4bc19cb69a4ef10669402de698e"}, + {file = "Mako-1.3.5-py3-none-any.whl", hash = "sha256:260f1dbc3a519453a9c856dedfe4beb4e50bd5a26d96386cb6c80856556bb91a"}, + {file = "Mako-1.3.5.tar.gz", hash = "sha256:48dbc20568c1d276a2698b36d968fa76161bf127194907ea6fc594fa81f943bc"}, ] [package.dependencies] @@ -281,44 +306,44 @@ files = [ [[package]] name = "mypy" -version = "1.9.0" +version = "1.11.2" description = "Optional static typing for Python" optional = false python-versions = ">=3.8" files = [ - {file = "mypy-1.9.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:f8a67616990062232ee4c3952f41c779afac41405806042a8126fe96e098419f"}, - {file = "mypy-1.9.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d357423fa57a489e8c47b7c85dfb96698caba13d66e086b412298a1a0ea3b0ed"}, - {file = "mypy-1.9.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:49c87c15aed320de9b438ae7b00c1ac91cd393c1b854c2ce538e2a72d55df150"}, - {file = "mypy-1.9.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:48533cdd345c3c2e5ef48ba3b0d3880b257b423e7995dada04248725c6f77374"}, - {file = "mypy-1.9.0-cp310-cp310-win_amd64.whl", hash = "sha256:4d3dbd346cfec7cb98e6cbb6e0f3c23618af826316188d587d1c1bc34f0ede03"}, - {file = "mypy-1.9.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:653265f9a2784db65bfca694d1edd23093ce49740b2244cde583aeb134c008f3"}, - {file = "mypy-1.9.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3a3c007ff3ee90f69cf0a15cbcdf0995749569b86b6d2f327af01fd1b8aee9dc"}, - {file = "mypy-1.9.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2418488264eb41f69cc64a69a745fad4a8f86649af4b1041a4c64ee61fc61129"}, - {file = "mypy-1.9.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:68edad3dc7d70f2f17ae4c6c1b9471a56138ca22722487eebacfd1eb5321d612"}, - {file = "mypy-1.9.0-cp311-cp311-win_amd64.whl", hash = "sha256:85ca5fcc24f0b4aeedc1d02f93707bccc04733f21d41c88334c5482219b1ccb3"}, - {file = "mypy-1.9.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:aceb1db093b04db5cd390821464504111b8ec3e351eb85afd1433490163d60cd"}, - {file = "mypy-1.9.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:0235391f1c6f6ce487b23b9dbd1327b4ec33bb93934aa986efe8a9563d9349e6"}, - {file = "mypy-1.9.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d4d5ddc13421ba3e2e082a6c2d74c2ddb3979c39b582dacd53dd5d9431237185"}, - {file = "mypy-1.9.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:190da1ee69b427d7efa8aa0d5e5ccd67a4fb04038c380237a0d96829cb157913"}, - {file = "mypy-1.9.0-cp312-cp312-win_amd64.whl", hash = "sha256:fe28657de3bfec596bbeef01cb219833ad9d38dd5393fc649f4b366840baefe6"}, - {file = "mypy-1.9.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:e54396d70be04b34f31d2edf3362c1edd023246c82f1730bbf8768c28db5361b"}, - {file = "mypy-1.9.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:5e6061f44f2313b94f920e91b204ec600982961e07a17e0f6cd83371cb23f5c2"}, - {file = "mypy-1.9.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:81a10926e5473c5fc3da8abb04119a1f5811a236dc3a38d92015cb1e6ba4cb9e"}, - {file = "mypy-1.9.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:b685154e22e4e9199fc95f298661deea28aaede5ae16ccc8cbb1045e716b3e04"}, - {file = "mypy-1.9.0-cp38-cp38-win_amd64.whl", hash = "sha256:5d741d3fc7c4da608764073089e5f58ef6352bedc223ff58f2f038c2c4698a89"}, - {file = "mypy-1.9.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:587ce887f75dd9700252a3abbc9c97bbe165a4a630597845c61279cf32dfbf02"}, - {file = "mypy-1.9.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:f88566144752999351725ac623471661c9d1cd8caa0134ff98cceeea181789f4"}, - {file = "mypy-1.9.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:61758fabd58ce4b0720ae1e2fea5cfd4431591d6d590b197775329264f86311d"}, - {file = "mypy-1.9.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:e49499be624dead83927e70c756970a0bc8240e9f769389cdf5714b0784ca6bf"}, - {file = "mypy-1.9.0-cp39-cp39-win_amd64.whl", hash = "sha256:571741dc4194b4f82d344b15e8837e8c5fcc462d66d076748142327626a1b6e9"}, - {file = "mypy-1.9.0-py3-none-any.whl", hash = "sha256:a260627a570559181a9ea5de61ac6297aa5af202f06fd7ab093ce74e7181e43e"}, - {file = "mypy-1.9.0.tar.gz", hash = "sha256:3cc5da0127e6a478cddd906068496a97a7618a21ce9b54bde5bf7e539c7af974"}, + {file = "mypy-1.11.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d42a6dd818ffce7be66cce644f1dff482f1d97c53ca70908dff0b9ddc120b77a"}, + {file = "mypy-1.11.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:801780c56d1cdb896eacd5619a83e427ce436d86a3bdf9112527f24a66618fef"}, + {file = "mypy-1.11.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:41ea707d036a5307ac674ea172875f40c9d55c5394f888b168033177fce47383"}, + {file = "mypy-1.11.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:6e658bd2d20565ea86da7d91331b0eed6d2eee22dc031579e6297f3e12c758c8"}, + {file = "mypy-1.11.2-cp310-cp310-win_amd64.whl", hash = "sha256:478db5f5036817fe45adb7332d927daa62417159d49783041338921dcf646fc7"}, + {file = "mypy-1.11.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:75746e06d5fa1e91bfd5432448d00d34593b52e7e91a187d981d08d1f33d4385"}, + {file = "mypy-1.11.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a976775ab2256aadc6add633d44f100a2517d2388906ec4f13231fafbb0eccca"}, + {file = "mypy-1.11.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:cd953f221ac1379050a8a646585a29574488974f79d8082cedef62744f0a0104"}, + {file = "mypy-1.11.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:57555a7715c0a34421013144a33d280e73c08df70f3a18a552938587ce9274f4"}, + {file = "mypy-1.11.2-cp311-cp311-win_amd64.whl", hash = "sha256:36383a4fcbad95f2657642a07ba22ff797de26277158f1cc7bd234821468b1b6"}, + {file = "mypy-1.11.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:e8960dbbbf36906c5c0b7f4fbf2f0c7ffb20f4898e6a879fcf56a41a08b0d318"}, + {file = "mypy-1.11.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:06d26c277962f3fb50e13044674aa10553981ae514288cb7d0a738f495550b36"}, + {file = "mypy-1.11.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:6e7184632d89d677973a14d00ae4d03214c8bc301ceefcdaf5c474866814c987"}, + {file = "mypy-1.11.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:3a66169b92452f72117e2da3a576087025449018afc2d8e9bfe5ffab865709ca"}, + {file = "mypy-1.11.2-cp312-cp312-win_amd64.whl", hash = "sha256:969ea3ef09617aff826885a22ece0ddef69d95852cdad2f60c8bb06bf1f71f70"}, + {file = "mypy-1.11.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:37c7fa6121c1cdfcaac97ce3d3b5588e847aa79b580c1e922bb5d5d2902df19b"}, + {file = "mypy-1.11.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:4a8a53bc3ffbd161b5b2a4fff2f0f1e23a33b0168f1c0778ec70e1a3d66deb86"}, + {file = "mypy-1.11.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2ff93107f01968ed834f4256bc1fc4475e2fecf6c661260066a985b52741ddce"}, + {file = "mypy-1.11.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:edb91dded4df17eae4537668b23f0ff6baf3707683734b6a818d5b9d0c0c31a1"}, + {file = "mypy-1.11.2-cp38-cp38-win_amd64.whl", hash = "sha256:ee23de8530d99b6db0573c4ef4bd8f39a2a6f9b60655bf7a1357e585a3486f2b"}, + {file = "mypy-1.11.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:801ca29f43d5acce85f8e999b1e431fb479cb02d0e11deb7d2abb56bdaf24fd6"}, + {file = "mypy-1.11.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:af8d155170fcf87a2afb55b35dc1a0ac21df4431e7d96717621962e4b9192e70"}, + {file = "mypy-1.11.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:f7821776e5c4286b6a13138cc935e2e9b6fde05e081bdebf5cdb2bb97c9df81d"}, + {file = "mypy-1.11.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:539c570477a96a4e6fb718b8d5c3e0c0eba1f485df13f86d2970c91f0673148d"}, + {file = "mypy-1.11.2-cp39-cp39-win_amd64.whl", hash = "sha256:3f14cd3d386ac4d05c5a39a51b84387403dadbd936e17cb35882134d4f8f0d24"}, + {file = "mypy-1.11.2-py3-none-any.whl", hash = "sha256:b499bc07dbdcd3de92b0a8b29fdf592c111276f6a12fe29c30f6c417dd546d12"}, + {file = "mypy-1.11.2.tar.gz", hash = "sha256:7f9993ad3e0ffdc95c2a14b66dee63729f021968bff8ad911867579c65d13a79"}, ] [package.dependencies] mypy-extensions = ">=1.0.0" tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} -typing-extensions = ">=4.1.0" +typing-extensions = ">=4.6.0" [package.extras] dmypy = ["psutil (>=4.0)"] @@ -339,35 +364,35 @@ files = [ [[package]] name = "packaging" -version = "24.0" +version = "24.1" description = "Core utilities for Python packages" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "packaging-24.0-py3-none-any.whl", hash = "sha256:2ddfb553fdf02fb784c234c7ba6ccc288296ceabec964ad2eae3777778130bc5"}, - {file = "packaging-24.0.tar.gz", hash = "sha256:eb82c5e3e56209074766e6885bb04b8c38a0c015d0a30036ebe7ece34c9989e9"}, + {file = "packaging-24.1-py3-none-any.whl", hash = "sha256:5b8f2217dbdbd2f7f384c41c628544e6d52f2d0f53c6d0c3ea61aa5d1d7ff124"}, + {file = "packaging-24.1.tar.gz", hash = "sha256:026ed72c8ed3fcce5bf8950572258698927fd1dbda10a5e981cdf0ac37f4f002"}, ] [[package]] name = "parse" -version = "1.20.1" +version = "1.20.2" description = "parse() is the opposite of format()" optional = false python-versions = "*" files = [ - {file = "parse-1.20.1-py2.py3-none-any.whl", hash = "sha256:76ddd5214255ae711db4c512be636151fbabaa948c6f30115aecc440422ca82c"}, - {file = "parse-1.20.1.tar.gz", hash = "sha256:09002ca350ad42e76629995f71f7b518670bcf93548bdde3684fd55d2be51975"}, + {file = "parse-1.20.2-py2.py3-none-any.whl", hash = "sha256:967095588cb802add9177d0c0b6133b5ba33b1ea9007ca800e526f42a85af558"}, + {file = "parse-1.20.2.tar.gz", hash = "sha256:b41d604d16503c79d81af5165155c0b20f6c8d6c559efa66b4b695c3e5a0a0ce"}, ] [[package]] name = "parse-type" -version = "0.6.2" +version = "0.6.3" description = "Simplifies to build parse types based on the parse module" optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*" +python-versions = "!=3.0.*,!=3.1.*,>=2.7" files = [ - {file = "parse_type-0.6.2-py2.py3-none-any.whl", hash = "sha256:06d39a8b70fde873eb2a131141a0e79bb34a432941fb3d66fad247abafc9766c"}, - {file = "parse_type-0.6.2.tar.gz", hash = "sha256:79b1f2497060d0928bc46016793f1fca1057c4aacdf15ef876aa48d75a73a355"}, + {file = "parse_type-0.6.3-py2.py3-none-any.whl", hash = "sha256:8d94a52e0197fbad63fee8f70df16e6ed689e5e4f105b705c9afa7a30397a5aa"}, + {file = "parse_type-0.6.3.tar.gz", hash = "sha256:8e99d2f52fab2f0f1f3d68ba9d026060140bf0e53680aada0111fb27b2f0e93a"}, ] [package.dependencies] @@ -375,207 +400,83 @@ parse = {version = ">=1.18.0", markers = "python_version >= \"3.0\""} six = ">=1.15" [package.extras] -develop = ["build (>=0.5.1)", "coverage (>=4.4)", "pylint", "pytest (<5.0)", "pytest (>=5.0)", "pytest-cov", "pytest-html (>=1.19.0)", "ruff", "tox (>=2.8,<4.0)", "twine (>=1.13.0)", "virtualenv (<20.22.0)", "virtualenv (>=20.0.0)"] +develop = ["build (>=0.5.1)", "coverage (>=4.4)", "pylint", "pytest (<5.0)", "pytest (>=5.0)", "pytest-cov", "pytest-html (>=1.19.0)", "ruff", "setuptools", "tox (>=2.8,<4.0)", "twine (>=1.13.0)", "virtualenv (<20.22.0)", "virtualenv (>=20.0.0)"] docs = ["Sphinx (>=1.6)", "sphinx-bootstrap-theme (>=0.6.0)"] testing = ["pytest (<5.0)", "pytest (>=5.0)", "pytest-html (>=1.19.0)"] [[package]] name = "platformdirs" -version = "4.2.0" -description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." +version = "4.3.2" +description = "A small Python package for determining appropriate platform-specific dirs, e.g. a `user data dir`." optional = false python-versions = ">=3.8" files = [ - {file = "platformdirs-4.2.0-py3-none-any.whl", hash = "sha256:0614df2a2f37e1a662acbd8e2b25b92ccf8632929bc6d43467e17fe89c75e068"}, - {file = "platformdirs-4.2.0.tar.gz", hash = "sha256:ef0cc731df711022c174543cb70a9b5bd22e5a9337c8624ef2c2ceb8ddad8768"}, + {file = "platformdirs-4.3.2-py3-none-any.whl", hash = "sha256:eb1c8582560b34ed4ba105009a4badf7f6f85768b30126f351328507b2beb617"}, + {file = "platformdirs-4.3.2.tar.gz", hash = "sha256:9e5e27a08aa095dd127b9f2e764d74254f482fef22b0970773bfba79d091ab8c"}, ] [package.extras] -docs = ["furo (>=2023.9.10)", "proselint (>=0.13)", "sphinx (>=7.2.6)", "sphinx-autodoc-typehints (>=1.25.2)"] -test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4.3)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)"] +docs = ["furo (>=2024.8.6)", "proselint (>=0.14)", "sphinx (>=8.0.2)", "sphinx-autodoc-typehints (>=2.4)"] +test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=8.3.2)", "pytest-cov (>=5)", "pytest-mock (>=3.14)"] +type = ["mypy (>=1.11.2)"] [[package]] name = "pluggy" -version = "1.4.0" +version = "1.5.0" description = "plugin and hook calling mechanisms for python" optional = false python-versions = ">=3.8" files = [ - {file = "pluggy-1.4.0-py3-none-any.whl", hash = "sha256:7db9f7b503d67d1c5b95f59773ebb58a8c1c288129a88665838012cfb07b8981"}, - {file = "pluggy-1.4.0.tar.gz", hash = "sha256:8c85c2876142a764e5b7548e7d9a0e0ddb46f5185161049a79b7e974454223be"}, + {file = "pluggy-1.5.0-py3-none-any.whl", hash = "sha256:44e1ad92c8ca002de6377e165f3e0f1be63266ab4d554740532335b9d75ea669"}, + {file = "pluggy-1.5.0.tar.gz", hash = "sha256:2cffa88e94fdc978c4c574f15f9e59b7f4201d439195c3715ca9e2486f1d0cf1"}, ] [package.extras] dev = ["pre-commit", "tox"] testing = ["pytest", "pytest-benchmark"] -[[package]] -name = "pydantic" -version = "2.9.0" -description = "Data validation using Python type hints" -optional = false -python-versions = ">=3.8" -files = [ - {file = "pydantic-2.9.0-py3-none-any.whl", hash = "sha256:f66a7073abd93214a20c5f7b32d56843137a7a2e70d02111f3be287035c45370"}, - {file = "pydantic-2.9.0.tar.gz", hash = "sha256:c7a8a9fdf7d100afa49647eae340e2d23efa382466a8d177efcd1381e9be5598"}, -] - -[package.dependencies] -annotated-types = ">=0.4.0" -pydantic-core = "2.23.2" -typing-extensions = [ - {version = ">=4.12.2", markers = "python_version >= \"3.13\""}, - {version = ">=4.6.1", markers = "python_version < \"3.13\""}, -] -tzdata = {version = "*", markers = "python_version >= \"3.9\""} - -[package.extras] -email = ["email-validator (>=2.0.0)"] - -[[package]] -name = "pydantic-core" -version = "2.23.2" -description = "Core functionality for Pydantic validation and serialization" -optional = false -python-versions = ">=3.8" -files = [ - {file = "pydantic_core-2.23.2-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:7d0324a35ab436c9d768753cbc3c47a865a2cbc0757066cb864747baa61f6ece"}, - {file = "pydantic_core-2.23.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:276ae78153a94b664e700ac362587c73b84399bd1145e135287513442e7dfbc7"}, - {file = "pydantic_core-2.23.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:964c7aa318da542cdcc60d4a648377ffe1a2ef0eb1e996026c7f74507b720a78"}, - {file = "pydantic_core-2.23.2-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:1cf842265a3a820ebc6388b963ead065f5ce8f2068ac4e1c713ef77a67b71f7c"}, - {file = "pydantic_core-2.23.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ae90b9e50fe1bd115b24785e962b51130340408156d34d67b5f8f3fa6540938e"}, - {file = "pydantic_core-2.23.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8ae65fdfb8a841556b52935dfd4c3f79132dc5253b12c0061b96415208f4d622"}, - {file = "pydantic_core-2.23.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5c8aa40f6ca803f95b1c1c5aeaee6237b9e879e4dfb46ad713229a63651a95fb"}, - {file = "pydantic_core-2.23.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:c53100c8ee5a1e102766abde2158077d8c374bee0639201f11d3032e3555dfbc"}, - {file = "pydantic_core-2.23.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:d6b9dd6aa03c812017411734e496c44fef29b43dba1e3dd1fa7361bbacfc1354"}, - {file = "pydantic_core-2.23.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:b18cf68255a476b927910c6873d9ed00da692bb293c5b10b282bd48a0afe3ae2"}, - {file = "pydantic_core-2.23.2-cp310-none-win32.whl", hash = "sha256:e460475719721d59cd54a350c1f71c797c763212c836bf48585478c5514d2854"}, - {file = "pydantic_core-2.23.2-cp310-none-win_amd64.whl", hash = "sha256:5f3cf3721eaf8741cffaf092487f1ca80831202ce91672776b02b875580e174a"}, - {file = "pydantic_core-2.23.2-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:7ce8e26b86a91e305858e018afc7a6e932f17428b1eaa60154bd1f7ee888b5f8"}, - {file = "pydantic_core-2.23.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:7e9b24cca4037a561422bf5dc52b38d390fb61f7bfff64053ce1b72f6938e6b2"}, - {file = "pydantic_core-2.23.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:753294d42fb072aa1775bfe1a2ba1012427376718fa4c72de52005a3d2a22178"}, - {file = "pydantic_core-2.23.2-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:257d6a410a0d8aeb50b4283dea39bb79b14303e0fab0f2b9d617701331ed1515"}, - {file = "pydantic_core-2.23.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c8319e0bd6a7b45ad76166cc3d5d6a36c97d0c82a196f478c3ee5346566eebfd"}, - {file = "pydantic_core-2.23.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7a05c0240f6c711eb381ac392de987ee974fa9336071fb697768dfdb151345ce"}, - {file = "pydantic_core-2.23.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8d5b0ff3218858859910295df6953d7bafac3a48d5cd18f4e3ed9999efd2245f"}, - {file = "pydantic_core-2.23.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:96ef39add33ff58cd4c112cbac076726b96b98bb8f1e7f7595288dcfb2f10b57"}, - {file = "pydantic_core-2.23.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:0102e49ac7d2df3379ef8d658d3bc59d3d769b0bdb17da189b75efa861fc07b4"}, - {file = "pydantic_core-2.23.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:a6612c2a844043e4d10a8324c54cdff0042c558eef30bd705770793d70b224aa"}, - {file = "pydantic_core-2.23.2-cp311-none-win32.whl", hash = "sha256:caffda619099cfd4f63d48462f6aadbecee3ad9603b4b88b60cb821c1b258576"}, - {file = "pydantic_core-2.23.2-cp311-none-win_amd64.whl", hash = "sha256:6f80fba4af0cb1d2344869d56430e304a51396b70d46b91a55ed4959993c0589"}, - {file = "pydantic_core-2.23.2-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:4c83c64d05ffbbe12d4e8498ab72bdb05bcc1026340a4a597dc647a13c1605ec"}, - {file = "pydantic_core-2.23.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:6294907eaaccf71c076abdd1c7954e272efa39bb043161b4b8aa1cd76a16ce43"}, - {file = "pydantic_core-2.23.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4a801c5e1e13272e0909c520708122496647d1279d252c9e6e07dac216accc41"}, - {file = "pydantic_core-2.23.2-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:cc0c316fba3ce72ac3ab7902a888b9dc4979162d320823679da270c2d9ad0cad"}, - {file = "pydantic_core-2.23.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6b06c5d4e8701ac2ba99a2ef835e4e1b187d41095a9c619c5b185c9068ed2a49"}, - {file = "pydantic_core-2.23.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:82764c0bd697159fe9947ad59b6db6d7329e88505c8f98990eb07e84cc0a5d81"}, - {file = "pydantic_core-2.23.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2b1a195efd347ede8bcf723e932300292eb13a9d2a3c1f84eb8f37cbbc905b7f"}, - {file = "pydantic_core-2.23.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b7efb12e5071ad8d5b547487bdad489fbd4a5a35a0fc36a1941517a6ad7f23e0"}, - {file = "pydantic_core-2.23.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:5dd0ec5f514ed40e49bf961d49cf1bc2c72e9b50f29a163b2cc9030c6742aa73"}, - {file = "pydantic_core-2.23.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:820f6ee5c06bc868335e3b6e42d7ef41f50dfb3ea32fbd523ab679d10d8741c0"}, - {file = "pydantic_core-2.23.2-cp312-none-win32.whl", hash = "sha256:3713dc093d5048bfaedbba7a8dbc53e74c44a140d45ede020dc347dda18daf3f"}, - {file = "pydantic_core-2.23.2-cp312-none-win_amd64.whl", hash = "sha256:e1895e949f8849bc2757c0dbac28422a04be031204df46a56ab34bcf98507342"}, - {file = "pydantic_core-2.23.2-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:da43cbe593e3c87d07108d0ebd73771dc414488f1f91ed2e204b0370b94b37ac"}, - {file = "pydantic_core-2.23.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:64d094ea1aa97c6ded4748d40886076a931a8bf6f61b6e43e4a1041769c39dd2"}, - {file = "pydantic_core-2.23.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:084414ffe9a85a52940b49631321d636dadf3576c30259607b75516d131fecd0"}, - {file = "pydantic_core-2.23.2-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:043ef8469f72609c4c3a5e06a07a1f713d53df4d53112c6d49207c0bd3c3bd9b"}, - {file = "pydantic_core-2.23.2-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3649bd3ae6a8ebea7dc381afb7f3c6db237fc7cebd05c8ac36ca8a4187b03b30"}, - {file = "pydantic_core-2.23.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6db09153d8438425e98cdc9a289c5fade04a5d2128faff8f227c459da21b9703"}, - {file = "pydantic_core-2.23.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5668b3173bb0b2e65020b60d83f5910a7224027232c9f5dc05a71a1deac9f960"}, - {file = "pydantic_core-2.23.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:1c7b81beaf7c7ebde978377dc53679c6cba0e946426fc7ade54251dfe24a7604"}, - {file = "pydantic_core-2.23.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:ae579143826c6f05a361d9546446c432a165ecf1c0b720bbfd81152645cb897d"}, - {file = "pydantic_core-2.23.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:19f1352fe4b248cae22a89268720fc74e83f008057a652894f08fa931e77dced"}, - {file = "pydantic_core-2.23.2-cp313-none-win32.whl", hash = "sha256:e1a79ad49f346aa1a2921f31e8dbbab4d64484823e813a002679eaa46cba39e1"}, - {file = "pydantic_core-2.23.2-cp313-none-win_amd64.whl", hash = "sha256:582871902e1902b3c8e9b2c347f32a792a07094110c1bca6c2ea89b90150caac"}, - {file = "pydantic_core-2.23.2-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:743e5811b0c377eb830150d675b0847a74a44d4ad5ab8845923d5b3a756d8100"}, - {file = "pydantic_core-2.23.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:6650a7bbe17a2717167e3e23c186849bae5cef35d38949549f1c116031b2b3aa"}, - {file = "pydantic_core-2.23.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:56e6a12ec8d7679f41b3750ffa426d22b44ef97be226a9bab00a03365f217b2b"}, - {file = "pydantic_core-2.23.2-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:810ca06cca91de9107718dc83d9ac4d2e86efd6c02cba49a190abcaf33fb0472"}, - {file = "pydantic_core-2.23.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:785e7f517ebb9890813d31cb5d328fa5eda825bb205065cde760b3150e4de1f7"}, - {file = "pydantic_core-2.23.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3ef71ec876fcc4d3bbf2ae81961959e8d62f8d74a83d116668409c224012e3af"}, - {file = "pydantic_core-2.23.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d50ac34835c6a4a0d456b5db559b82047403c4317b3bc73b3455fefdbdc54b0a"}, - {file = "pydantic_core-2.23.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:16b25a4a120a2bb7dab51b81e3d9f3cde4f9a4456566c403ed29ac81bf49744f"}, - {file = "pydantic_core-2.23.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:41ae8537ad371ec018e3c5da0eb3f3e40ee1011eb9be1da7f965357c4623c501"}, - {file = "pydantic_core-2.23.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:07049ec9306ec64e955b2e7c40c8d77dd78ea89adb97a2013d0b6e055c5ee4c5"}, - {file = "pydantic_core-2.23.2-cp38-none-win32.whl", hash = "sha256:086c5db95157dc84c63ff9d96ebb8856f47ce113c86b61065a066f8efbe80acf"}, - {file = "pydantic_core-2.23.2-cp38-none-win_amd64.whl", hash = "sha256:67b6655311b00581914aba481729971b88bb8bc7996206590700a3ac85e457b8"}, - {file = "pydantic_core-2.23.2-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:358331e21a897151e54d58e08d0219acf98ebb14c567267a87e971f3d2a3be59"}, - {file = "pydantic_core-2.23.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c4d9f15ffe68bcd3898b0ad7233af01b15c57d91cd1667f8d868e0eacbfe3f87"}, - {file = "pydantic_core-2.23.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0123655fedacf035ab10c23450163c2f65a4174f2bb034b188240a6cf06bb123"}, - {file = "pydantic_core-2.23.2-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e6e3ccebdbd6e53474b0bb7ab8b88e83c0cfe91484b25e058e581348ee5a01a5"}, - {file = "pydantic_core-2.23.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fc535cb898ef88333cf317777ecdfe0faac1c2a3187ef7eb061b6f7ecf7e6bae"}, - {file = "pydantic_core-2.23.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:aab9e522efff3993a9e98ab14263d4e20211e62da088298089a03056980a3e69"}, - {file = "pydantic_core-2.23.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:05b366fb8fe3d8683b11ac35fa08947d7b92be78ec64e3277d03bd7f9b7cda79"}, - {file = "pydantic_core-2.23.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:7568f682c06f10f30ef643a1e8eec4afeecdafde5c4af1b574c6df079e96f96c"}, - {file = "pydantic_core-2.23.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:cdd02a08205dc90238669f082747612cb3c82bd2c717adc60f9b9ecadb540f80"}, - {file = "pydantic_core-2.23.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:1a2ab4f410f4b886de53b6bddf5dd6f337915a29dd9f22f20f3099659536b2f6"}, - {file = "pydantic_core-2.23.2-cp39-none-win32.whl", hash = "sha256:0448b81c3dfcde439551bb04a9f41d7627f676b12701865c8a2574bcea034437"}, - {file = "pydantic_core-2.23.2-cp39-none-win_amd64.whl", hash = "sha256:4cebb9794f67266d65e7e4cbe5dcf063e29fc7b81c79dc9475bd476d9534150e"}, - {file = "pydantic_core-2.23.2-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:e758d271ed0286d146cf7c04c539a5169a888dd0b57026be621547e756af55bc"}, - {file = "pydantic_core-2.23.2-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:f477d26183e94eaafc60b983ab25af2a809a1b48ce4debb57b343f671b7a90b6"}, - {file = "pydantic_core-2.23.2-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:da3131ef2b940b99106f29dfbc30d9505643f766704e14c5d5e504e6a480c35e"}, - {file = "pydantic_core-2.23.2-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:329a721253c7e4cbd7aad4a377745fbcc0607f9d72a3cc2102dd40519be75ed2"}, - {file = "pydantic_core-2.23.2-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:7706e15cdbf42f8fab1e6425247dfa98f4a6f8c63746c995d6a2017f78e619ae"}, - {file = "pydantic_core-2.23.2-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:e64ffaf8f6e17ca15eb48344d86a7a741454526f3a3fa56bc493ad9d7ec63936"}, - {file = "pydantic_core-2.23.2-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:dd59638025160056687d598b054b64a79183f8065eae0d3f5ca523cde9943940"}, - {file = "pydantic_core-2.23.2-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:12625e69b1199e94b0ae1c9a95d000484ce9f0182f9965a26572f054b1537e44"}, - {file = "pydantic_core-2.23.2-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:5d813fd871b3d5c3005157622ee102e8908ad6011ec915a18bd8fde673c4360e"}, - {file = "pydantic_core-2.23.2-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:1eb37f7d6a8001c0f86dc8ff2ee8d08291a536d76e49e78cda8587bb54d8b329"}, - {file = "pydantic_core-2.23.2-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7ce7eaf9a98680b4312b7cebcdd9352531c43db00fca586115845df388f3c465"}, - {file = "pydantic_core-2.23.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f087879f1ffde024dd2788a30d55acd67959dcf6c431e9d3682d1c491a0eb474"}, - {file = "pydantic_core-2.23.2-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:6ce883906810b4c3bd90e0ada1f9e808d9ecf1c5f0b60c6b8831d6100bcc7dd6"}, - {file = "pydantic_core-2.23.2-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:a8031074a397a5925d06b590121f8339d34a5a74cfe6970f8a1124eb8b83f4ac"}, - {file = "pydantic_core-2.23.2-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:23af245b8f2f4ee9e2c99cb3f93d0e22fb5c16df3f2f643f5a8da5caff12a653"}, - {file = "pydantic_core-2.23.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:c57e493a0faea1e4c38f860d6862ba6832723396c884fbf938ff5e9b224200e2"}, - {file = "pydantic_core-2.23.2.tar.gz", hash = "sha256:95d6bf449a1ac81de562d65d180af5d8c19672793c81877a2eda8fde5d08f2fd"}, -] - -[package.dependencies] -typing-extensions = ">=4.6.0,<4.7.0 || >4.7.0" - [[package]] name = "pygments" -version = "2.17.2" +version = "2.18.0" description = "Pygments is a syntax highlighting package written in Python." optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "pygments-2.17.2-py3-none-any.whl", hash = "sha256:b27c2826c47d0f3219f29554824c30c5e8945175d888647acd804ddd04af846c"}, - {file = "pygments-2.17.2.tar.gz", hash = "sha256:da46cec9fd2de5be3a8a784f434e4c4ab670b4ff54d605c4c2717e9d49c4c367"}, + {file = "pygments-2.18.0-py3-none-any.whl", hash = "sha256:b8e6aca0523f3ab76fee51799c488e38782ac06eafcf95e7ba832985c8e7b13a"}, + {file = "pygments-2.18.0.tar.gz", hash = "sha256:786ff802f32e91311bff3889f6e9a86e81505fe99f2735bb6d60ae0c5004f199"}, ] [package.extras] -plugins = ["importlib-metadata"] windows-terminal = ["colorama (>=0.4.6)"] [[package]] name = "pyproject-api" -version = "1.6.1" +version = "1.7.1" description = "API to interact with the python pyproject.toml based projects" optional = false python-versions = ">=3.8" files = [ - {file = "pyproject_api-1.6.1-py3-none-any.whl", hash = "sha256:4c0116d60476b0786c88692cf4e325a9814965e2469c5998b830bba16b183675"}, - {file = "pyproject_api-1.6.1.tar.gz", hash = "sha256:1817dc018adc0d1ff9ca1ed8c60e1623d5aaca40814b953af14a9cf9a5cae538"}, + {file = "pyproject_api-1.7.1-py3-none-any.whl", hash = "sha256:2dc1654062c2b27733d8fd4cdda672b22fe8741ef1dde8e3a998a9547b071eeb"}, + {file = "pyproject_api-1.7.1.tar.gz", hash = "sha256:7ebc6cd10710f89f4cf2a2731710a98abce37ebff19427116ff2174c9236a827"}, ] [package.dependencies] -packaging = ">=23.1" +packaging = ">=24.1" tomli = {version = ">=2.0.1", markers = "python_version < \"3.11\""} [package.extras] -docs = ["furo (>=2023.8.19)", "sphinx (<7.2)", "sphinx-autodoc-typehints (>=1.24)"] -testing = ["covdefaults (>=2.3)", "pytest (>=7.4)", "pytest-cov (>=4.1)", "pytest-mock (>=3.11.1)", "setuptools (>=68.1.2)", "wheel (>=0.41.2)"] +docs = ["furo (>=2024.5.6)", "sphinx-autodoc-typehints (>=2.2.1)"] +testing = ["covdefaults (>=2.3)", "pytest (>=8.2.2)", "pytest-cov (>=5)", "pytest-mock (>=3.14)", "setuptools (>=70.1)"] [[package]] name = "pytest" -version = "8.1.1" +version = "8.3.2" description = "pytest: simple powerful testing with Python" optional = false python-versions = ">=3.8" files = [ - {file = "pytest-8.1.1-py3-none-any.whl", hash = "sha256:2a8386cfc11fa9d2c50ee7b2a57e7d898ef90470a7a34c4b949ff59662bb78b7"}, - {file = "pytest-8.1.1.tar.gz", hash = "sha256:ac978141a75948948817d360297b7aae0fcb9d6ff6bc9ec6d514b85d5a65c044"}, + {file = "pytest-8.3.2-py3-none-any.whl", hash = "sha256:4ba08f9ae7dcf84ded419494d229b48d0903ea6407b030eaec46df5e6a73bba5"}, + {file = "pytest-8.3.2.tar.gz", hash = "sha256:c132345d12ce551242c87269de812483f5bcc87cdbb4722e48487ba194f9fdce"}, ] [package.dependencies] @@ -583,26 +484,26 @@ colorama = {version = "*", markers = "sys_platform == \"win32\""} exceptiongroup = {version = ">=1.0.0rc8", markers = "python_version < \"3.11\""} iniconfig = "*" packaging = "*" -pluggy = ">=1.4,<2.0" +pluggy = ">=1.5,<2" tomli = {version = ">=1", markers = "python_version < \"3.11\""} [package.extras] -testing = ["argcomplete", "attrs (>=19.2)", "hypothesis (>=3.56)", "mock", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"] +dev = ["argcomplete", "attrs (>=19.2)", "hypothesis (>=3.56)", "mock", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"] [[package]] name = "pytest-xdist" -version = "3.5.0" +version = "3.6.1" description = "pytest xdist plugin for distributed testing, most importantly across multiple CPUs" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "pytest-xdist-3.5.0.tar.gz", hash = "sha256:cbb36f3d67e0c478baa57fa4edc8843887e0f6cfc42d677530a36d7472b32d8a"}, - {file = "pytest_xdist-3.5.0-py3-none-any.whl", hash = "sha256:d075629c7e00b611df89f490a5063944bee7a4362a5ff11c7cc7824a03dfce24"}, + {file = "pytest_xdist-3.6.1-py3-none-any.whl", hash = "sha256:9ed4adfb68a016610848639bb7e02c9352d5d9f03d04809919e2dafc3be4cca7"}, + {file = "pytest_xdist-3.6.1.tar.gz", hash = "sha256:ead156a4db231eec769737f57668ef58a2084a34b2e55c4a8fa20d861107300d"}, ] [package.dependencies] -execnet = ">=1.1" -pytest = ">=6.2.0" +execnet = ">=2.1" +pytest = ">=7.0.0" [package.extras] psutil = ["psutil (>=3.0)"] @@ -633,51 +534,40 @@ files = [ [[package]] name = "tox" -version = "4.14.1" +version = "4.18.1" description = "tox is a generic virtualenv management and test command line tool" optional = false python-versions = ">=3.8" files = [ - {file = "tox-4.14.1-py3-none-any.whl", hash = "sha256:b03754b6ee6dadc70f2611da82b4ed8f625fcafd247e15d1d0cb056f90a06d3b"}, - {file = "tox-4.14.1.tar.gz", hash = "sha256:f0ad758c3bbf7e237059c929d3595479363c3cdd5a06ac3e49d1dd020ffbee45"}, + {file = "tox-4.18.1-py3-none-any.whl", hash = "sha256:35d472032ee1f73fe20c3e0e73d7073a4e85075c86ff02c576f9fc7c6a15a578"}, + {file = "tox-4.18.1.tar.gz", hash = "sha256:3c0c96bc3a568a5c7e66387a4cfcf8c875b52e09f4d47c9f7a277ec82f1a0b11"}, ] [package.dependencies] -cachetools = ">=5.3.2" +cachetools = ">=5.5" chardet = ">=5.2" colorama = ">=0.4.6" -filelock = ">=3.13.1" -packaging = ">=23.2" -platformdirs = ">=4.1" -pluggy = ">=1.3" -pyproject-api = ">=1.6.1" +filelock = ">=3.15.4" +packaging = ">=24.1" +platformdirs = ">=4.2.2" +pluggy = ">=1.5" +pyproject-api = ">=1.7.1" tomli = {version = ">=2.0.1", markers = "python_version < \"3.11\""} -virtualenv = ">=20.25" +virtualenv = ">=20.26.3" [package.extras] -docs = ["furo (>=2023.9.10)", "sphinx (>=7.2.6)", "sphinx-argparse-cli (>=1.11.1)", "sphinx-autodoc-typehints (>=1.25.2)", "sphinx-copybutton (>=0.5.2)", "sphinx-inline-tabs (>=2023.4.21)", "sphinxcontrib-towncrier (>=0.2.1a0)", "towncrier (>=23.11)"] -testing = ["build[virtualenv] (>=1.0.3)", "covdefaults (>=2.3)", "detect-test-pollution (>=1.2)", "devpi-process (>=1)", "diff-cover (>=8.0.2)", "distlib (>=0.3.8)", "flaky (>=3.7)", "hatch-vcs (>=0.4)", "hatchling (>=1.21)", "psutil (>=5.9.7)", "pytest (>=7.4.4)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)", "pytest-xdist (>=3.5)", "re-assert (>=1.1)", "time-machine (>=2.13)", "wheel (>=0.42)"] +docs = ["furo (>=2024.8.6)", "sphinx (>=8.0.2)", "sphinx-argparse-cli (>=1.17)", "sphinx-autodoc-typehints (>=2.4)", "sphinx-copybutton (>=0.5.2)", "sphinx-inline-tabs (>=2023.4.21)", "sphinxcontrib-towncrier (>=0.2.1a0)", "towncrier (>=24.8)"] +testing = ["build[virtualenv] (>=1.2.2)", "covdefaults (>=2.3)", "detect-test-pollution (>=1.2)", "devpi-process (>=1)", "diff-cover (>=9.1.1)", "distlib (>=0.3.8)", "flaky (>=3.8.1)", "hatch-vcs (>=0.4)", "hatchling (>=1.25)", "psutil (>=6)", "pytest (>=8.3.2)", "pytest-cov (>=5)", "pytest-mock (>=3.14)", "pytest-xdist (>=3.6.1)", "re-assert (>=1.1)", "setuptools (>=74.1.2)", "time-machine (>=2.15)", "wheel (>=0.44)"] [[package]] name = "types-setuptools" -version = "69.1.0.20240310" +version = "74.1.0.20240907" description = "Typing stubs for setuptools" optional = false python-versions = ">=3.8" files = [ - {file = "types-setuptools-69.1.0.20240310.tar.gz", hash = "sha256:fc0e1082f55c974611bce844b1e5beb2d1a895501f4a464e48305592a4268100"}, - {file = "types_setuptools-69.1.0.20240310-py3-none-any.whl", hash = "sha256:7801245ecaf371d24f1154924c8f1f0efdc53977339bf79886b5b10890af6478"}, -] - -[[package]] -name = "typing-extensions" -version = "4.10.0" -description = "Backported and Experimental Type Hints for Python 3.8+" -optional = false -python-versions = ">=3.8" -files = [ - {file = "typing_extensions-4.10.0-py3-none-any.whl", hash = "sha256:69b1a937c3a517342112fb4c6df7e72fc39a38e7891a5730ed4985b5214b5475"}, - {file = "typing_extensions-4.10.0.tar.gz", hash = "sha256:b0abd7c89e8fb96f98db18d86106ff1d90ab692004eb746cf6eda2682f91b3cb"}, + {file = "types-setuptools-74.1.0.20240907.tar.gz", hash = "sha256:0abdb082552ca966c1e5fc244e4853adc62971f6cd724fb1d8a3713b580e5a65"}, + {file = "types_setuptools-74.1.0.20240907-py3-none-any.whl", hash = "sha256:15b38c8e63ca34f42f6063ff4b1dd662ea20086166d5ad6a102e670a52574120"}, ] [[package]] @@ -691,26 +581,15 @@ files = [ {file = "typing_extensions-4.12.2.tar.gz", hash = "sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8"}, ] -[[package]] -name = "tzdata" -version = "2024.1" -description = "Provider of IANA time zone data" -optional = false -python-versions = ">=2" -files = [ - {file = "tzdata-2024.1-py2.py3-none-any.whl", hash = "sha256:9068bc196136463f5245e51efda838afa15aaeca9903f49050dfa2679db4d252"}, - {file = "tzdata-2024.1.tar.gz", hash = "sha256:2674120f8d891909751c38abcdfd386ac0a5a1127954fbc332af6b5ceae07efd"}, -] - [[package]] name = "virtualenv" -version = "20.25.1" +version = "20.26.4" description = "Virtual Python Environment builder" optional = false python-versions = ">=3.7" files = [ - {file = "virtualenv-20.25.1-py3-none-any.whl", hash = "sha256:961c026ac520bac5f69acb8ea063e8a4f071bcc9457b9c1f28f6b085c511583a"}, - {file = "virtualenv-20.25.1.tar.gz", hash = "sha256:e08e13ecdca7a0bd53798f356d5831434afa5b07b93f0abdf0797b7a06ffe197"}, + {file = "virtualenv-20.26.4-py3-none-any.whl", hash = "sha256:48f2695d9809277003f30776d155615ffc11328e6a0a8c1f0ec80188d7874a55"}, + {file = "virtualenv-20.26.4.tar.gz", hash = "sha256:c17f4e0f3e6036e9f26700446f85c76ab11df65ff6d8a9cbfad9f71aabfcf23c"}, ] [package.dependencies] @@ -719,10 +598,10 @@ filelock = ">=3.12.2,<4" platformdirs = ">=3.9.1,<5" [package.extras] -docs = ["furo (>=2023.7.26)", "proselint (>=0.13)", "sphinx (>=7.1.2)", "sphinx-argparse (>=0.4)", "sphinxcontrib-towncrier (>=0.2.1a0)", "towncrier (>=23.6)"] +docs = ["furo (>=2023.7.26)", "proselint (>=0.13)", "sphinx (>=7.1.2,!=7.3)", "sphinx-argparse (>=0.4)", "sphinxcontrib-towncrier (>=0.2.1a0)", "towncrier (>=23.6)"] test = ["covdefaults (>=2.3)", "coverage (>=7.2.7)", "coverage-enable-subprocess (>=1)", "flaky (>=3.7)", "packaging (>=23.1)", "pytest (>=7.4)", "pytest-env (>=0.8.2)", "pytest-freezer (>=0.4.8)", "pytest-mock (>=3.11.1)", "pytest-randomly (>=3.12)", "pytest-timeout (>=2.1)", "setuptools (>=68)", "time-machine (>=2.10)"] [metadata] lock-version = "2.0" python-versions = ">=3.8" -content-hash = "14509c113eb897776c0adc8e930775188765786b3dd9022359ba19733833b363" +content-hash = "455db4f2d48159aebda8bcbfb75fbf435af1029eeeaeceb1391efc8820f871da" diff --git a/pyproject.toml b/pyproject.toml index a5d9a15c2..ed8d3e3fe 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -43,7 +43,7 @@ pytest = ">=6.2.0" typing-extensions = "*" packaging = "*" gherkin-official = "^29.0.0" -pydantic = "^2.9.0" +attrs = "^24.2.0" [tool.poetry.group.dev.dependencies] tox = ">=4.11.3" diff --git a/src/pytest_bdd/gherkin_parser.py b/src/pytest_bdd/gherkin_parser.py index f26e69e8c..bb93f7c75 100644 --- a/src/pytest_bdd/gherkin_parser.py +++ b/src/pytest_bdd/gherkin_parser.py @@ -1,143 +1,259 @@ import linecache import textwrap -from pathlib import Path -from typing import List, Optional +from typing import Any, Dict, List, Optional +import attr from gherkin.errors import CompositeParserException from gherkin.parser import Parser from gherkin.token_scanner import TokenScanner -from pydantic import BaseModel, field_validator, model_validator from . import exceptions from .types import STEP_TYPES -class Location(BaseModel): - column: int - line: int +@attr.s +class Location: + column = attr.ib(type=int) + line = attr.ib(type=int) + @classmethod + def from_dict(cls, data: Dict[str, Any]) -> "Location": + return cls(column=data["column"], line=data["line"]) -class Comment(BaseModel): - location: Location - text: str +@attr.s +class Comment: + location = attr.ib(type=Location) + text = attr.ib(type=str) -class Cell(BaseModel): - location: Location - value: str + @classmethod + def from_dict(cls, data: Dict[str, Any]) -> "Comment": + return cls(location=Location.from_dict(data["location"]), text=data["text"]) -class Row(BaseModel): - id: str - location: Location - cells: List[Cell] +@attr.s +class Cell: + location = attr.ib(type=Location) + value = attr.ib(type=str) + @classmethod + def from_dict(cls, data: Dict[str, Any]) -> "Cell": + return cls(location=Location.from_dict(data["location"]), value=_convert_to_raw_string(data["value"])) -class DataTable(BaseModel): - name: Optional[str] = None - location: Location - tableHeader: Optional[Row] = None - tableBody: Optional[List[Row]] = None +@attr.s +class Row: + id = attr.ib(type=str) + location = attr.ib(type=Location) + cells = attr.ib(type=List[Cell]) -class DocString(BaseModel): - content: str - delimiter: str - location: Location + @classmethod + def from_dict(cls, data: Dict[str, Any]) -> "Row": + return cls( + id=data["id"], + location=Location.from_dict(data["location"]), + cells=[Cell.from_dict(cell) for cell in data["cells"]], + ) - @field_validator("content", mode="before") - def dedent_content(cls, value: str) -> str: - return textwrap.dedent(value) +@attr.s +class DataTable: + location = attr.ib(type=Location) + name = attr.ib(type=Optional[str], default=None) + tableHeader = attr.ib(type=Optional[Row], default=None) + tableBody = attr.ib(type=Optional[List[Row]], factory=list) -class Step(BaseModel): - id: str - keyword: str - keywordType: str - location: Location - text: str - dataTable: Optional[DataTable] = None - docString: Optional[DocString] = None + @classmethod + def from_dict(cls, data: Dict[str, Any]) -> "DataTable": + return cls( + location=Location.from_dict(data["location"]), + name=data.get("name"), + tableHeader=Row.from_dict(data["tableHeader"]) if data.get("tableHeader") else None, + tableBody=[Row.from_dict(row) for row in data.get("tableBody", [])], + ) - @field_validator("keyword", mode="before") - def normalize_keyword(cls, value: str) -> str: - return value.lower().strip() - - @property - def given_when_then(self) -> str: - return self._gwt - - @given_when_then.setter - def given_when_then(self, gwt: str) -> None: - self._gwt = gwt - - -class Tag(BaseModel): - id: str - location: Location - name: str - - -class Scenario(BaseModel): - id: str - keyword: str - location: Location - name: str - description: str - steps: List[Step] - tags: List[Tag] - examples: Optional[List[DataTable]] = None - - @model_validator(mode="after") - def process_steps(cls, instance): - steps = instance.steps - instance.steps = _compute_given_when_then(steps) - return instance +@attr.s +class DocString: + content = attr.ib(type=str) + delimiter = attr.ib(type=str) + location = attr.ib(type=Location) -class Rule(BaseModel): - id: str - keyword: str - location: Location - name: str - description: str - tags: List[Tag] - children: List[Scenario] + def __attrs_post_init__(self): + self.content = textwrap.dedent(self.content) + @classmethod + def from_dict(cls, data: Dict[str, Any]) -> "DocString": + return cls(content=data["content"], delimiter=data["delimiter"], location=Location.from_dict(data["location"])) -class Background(BaseModel): - id: str - keyword: str - location: Location - name: str - description: str - steps: List[Step] - @model_validator(mode="after") - def process_steps(cls, instance): - steps = instance.steps - instance.steps = _compute_given_when_then(steps) - return instance +@attr.s +class Step: + id = attr.ib(type=str) + keyword = attr.ib(type=str) + keywordType = attr.ib(type=str) + location = attr.ib(type=Location) + text = attr.ib(type=str) + dataTable = attr.ib(type=Optional[DataTable], default=None) + docString = attr.ib(type=Optional[DocString], default=None) + def __attrs_post_init__(self): + self.keyword = self.keyword.lower().strip() -class Child(BaseModel): - background: Optional[Background] = None - rule: Optional[Rule] = None - scenario: Optional[Scenario] = None - - -class Feature(BaseModel): - keyword: str - location: Location - tags: List[Tag] - name: str - description: str - children: List[Child] + @property + def given_when_then(self) -> str: + return getattr(self, "_gwt", "") + @given_when_then.setter + def given_when_then(self, gwt: str) -> None: + self._gwt = gwt -class GherkinDocument(BaseModel): - feature: Feature - comments: List[Comment] + @classmethod + def from_dict(cls, data: Dict[str, Any]) -> "Step": + return cls( + id=data["id"], + keyword=data["keyword"], + keywordType=data["keywordType"], + location=Location.from_dict(data["location"]), + text=data["text"], + dataTable=DataTable.from_dict(data["dataTable"]) if data.get("dataTable") else None, + docString=DocString.from_dict(data["docString"]) if data.get("docString") else None, + ) + + +@attr.s +class Tag: + id = attr.ib(type=str) + location = attr.ib(type=Location) + name = attr.ib(type=str) + + @classmethod + def from_dict(cls, data: Dict[str, Any]) -> "Tag": + return cls(id=data["id"], location=Location.from_dict(data["location"]), name=data["name"]) + + +@attr.s +class Scenario: + id = attr.ib(type=str) + keyword = attr.ib(type=str) + location = attr.ib(type=Location) + name = attr.ib(type=str) + description = attr.ib(type=str) + steps = attr.ib(type=List[Step]) + tags = attr.ib(type=List[Tag]) + examples = attr.ib(type=Optional[List[DataTable]], factory=list) + + def __attrs_post_init__(self): + self.steps = _compute_given_when_then(self.steps) + + @classmethod + def from_dict(cls, data: Dict[str, Any]) -> "Scenario": + return cls( + id=data["id"], + keyword=data["keyword"], + location=Location.from_dict(data["location"]), + name=data["name"], + description=data["description"], + steps=[Step.from_dict(step) for step in data["steps"]], + tags=[Tag.from_dict(tag) for tag in data["tags"]], + examples=[DataTable.from_dict(example) for example in data.get("examples", [])], + ) + + +@attr.s +class Rule: + id = attr.ib(type=str) + keyword = attr.ib(type=str) + location = attr.ib(type=Location) + name = attr.ib(type=str) + description = attr.ib(type=str) + tags = attr.ib(type=List[Tag]) + children = attr.ib(type=List[Scenario]) + + @classmethod + def from_dict(cls, data: Dict[str, Any]) -> "Rule": + return cls( + id=data["id"], + keyword=data["keyword"], + location=Location.from_dict(data["location"]), + name=data["name"], + description=data["description"], + tags=[Tag.from_dict(tag) for tag in data["tags"]], + children=[Scenario.from_dict(child) for child in data["children"]], + ) + + +@attr.s +class Background: + id = attr.ib(type=str) + keyword = attr.ib(type=str) + location = attr.ib(type=Location) + name = attr.ib(type=str) + description = attr.ib(type=str) + steps = attr.ib(type=List[Step]) + + def __attrs_post_init__(self): + self.steps = _compute_given_when_then(self.steps) + + @classmethod + def from_dict(cls, data: Dict[str, Any]) -> "Background": + return cls( + id=data["id"], + keyword=data["keyword"], + location=Location.from_dict(data["location"]), + name=data["name"], + description=data["description"], + steps=[Step.from_dict(step) for step in data["steps"]], + ) + + +@attr.s +class Child: + background = attr.ib(type=Optional[Background], default=None) + rule = attr.ib(type=Optional[Rule], default=None) + scenario = attr.ib(type=Optional[Scenario], default=None) + + @classmethod + def from_dict(cls, data: Dict[str, Any]) -> "Child": + return cls( + background=Background.from_dict(data["background"]) if data.get("background") else None, + rule=Rule.from_dict(data["rule"]) if data.get("rule") else None, + scenario=Scenario.from_dict(data["scenario"]) if data.get("scenario") else None, + ) + + +@attr.s +class Feature: + keyword = attr.ib(type=str) + location = attr.ib(type=Location) + tags = attr.ib(type=List[Tag]) + name = attr.ib(type=str) + description = attr.ib(type=str) + children = attr.ib(type=List[Child]) + + @classmethod + def from_dict(cls, data: Dict[str, Any]) -> "Feature": + return cls( + keyword=data["keyword"], + location=Location.from_dict(data["location"]), + tags=[Tag.from_dict(tag) for tag in data["tags"]], + name=data["name"], + description=data["description"], + children=[Child.from_dict(child) for child in data["children"]], + ) + + +@attr.s +class GherkinDocument: + feature = attr.ib(type=Feature) + comments = attr.ib(type=List[Comment]) + + @classmethod + def from_dict(cls, data: Dict[str, Any]) -> "GherkinDocument": + return cls( + feature=Feature.from_dict(data["feature"]), + comments=[Comment.from_dict(comment) for comment in data["comments"]], + ) def _compute_given_when_then(steps: List[Step]) -> List[Step]: @@ -149,22 +265,23 @@ def _compute_given_when_then(steps: List[Step]) -> List[Step]: return steps -class GherkinParser: - def __init__(self, abs_filename: str = None, encoding: str = "utf-8"): - self.abs_filename = Path(abs_filename) - self.encoding = encoding - - with open(self.abs_filename, encoding=self.encoding) as f: - self.feature_file_text = f.read() - try: - self.gherkin_data = Parser().parse(TokenScanner(self.feature_file_text)) - except CompositeParserException as e: - raise exceptions.FeatureError( - e.args[0], - e.errors[0].location["line"], - linecache.getline(str(self.abs_filename), e.errors[0].location["line"]).rstrip("\n"), - self.abs_filename, - ) from e - - def to_gherkin_document(self) -> GherkinDocument: - return GherkinDocument(**self.gherkin_data) +def _convert_to_raw_string(normal_string: str) -> str: + return normal_string.replace("\\", "\\\\") + + +def get_gherkin_document(abs_filename: str = None, encoding: str = "utf-8") -> GherkinDocument: + with open(abs_filename, encoding=encoding) as f: + feature_file_text = f.read() + + try: + gherkin_data = Parser().parse(TokenScanner(feature_file_text)) + except CompositeParserException as e: + raise exceptions.FeatureError( + e.args[0], + e.errors[0].location["line"], + linecache.getline(abs_filename, e.errors[0].location["line"]).rstrip("\n"), + abs_filename, + ) from e + + # Assuming gherkin_data is a dictionary with the structure expected by from_dict methods + return GherkinDocument.from_dict(gherkin_data) diff --git a/src/pytest_bdd/parser.py b/src/pytest_bdd/parser.py index 4a9c8e289..bd0db5394 100644 --- a/src/pytest_bdd/parser.py +++ b/src/pytest_bdd/parser.py @@ -9,10 +9,11 @@ from .gherkin_parser import Background as GherkinBackground from .gherkin_parser import Feature as GherkinFeature -from .gherkin_parser import GherkinDocument, GherkinParser +from .gherkin_parser import GherkinDocument from .gherkin_parser import Scenario as GherkinScenario from .gherkin_parser import Step as GherkinStep from .gherkin_parser import Tag as GherkinTag +from .gherkin_parser import get_gherkin_document from .types import GIVEN, THEN, WHEN STEP_PARAM_RE = re.compile(r"<(.+?)>") @@ -422,7 +423,7 @@ def _parse_feature_file(self) -> GherkinDocument: Returns: Dict: A Gherkin document representation of the feature file. """ - return GherkinParser(self.abs_filename, self.encoding).to_gherkin_document() + return get_gherkin_document(self.abs_filename, self.encoding) def parse(self): gherkin_doc: GherkinDocument = self._parse_feature_file() diff --git a/src/pytest_bdd/steps.py b/src/pytest_bdd/steps.py index 81967fa11..42b0d6837 100644 --- a/src/pytest_bdd/steps.py +++ b/src/pytest_bdd/steps.py @@ -149,7 +149,7 @@ def step( :return: Decorator function for the step. Example: - >>> @step("there is an wallet", target_fixture="wallet") + >>> @step("there is a wallet", target_fixture="wallet") >>> def _() -> dict[str, int]: >>> return {"eur": 0, "usd": 0} diff --git a/tests/feature/test_outline.py b/tests/feature/test_outline.py index b1a635bc9..db591266f 100644 --- a/tests/feature/test_outline.py +++ b/tests/feature/test_outline.py @@ -217,6 +217,6 @@ def _(string): r"bork |", r"bork||bork", r"|", - "bork \\", - "bork \\|", + r"bork \\", + r"bork \\|", ] diff --git a/tests/feature/test_steps.py b/tests/feature/test_steps.py index 94c11bcc3..56af6b154 100644 --- a/tests/feature/test_steps.py +++ b/tests/feature/test_steps.py @@ -361,7 +361,7 @@ def test_step_hooks(pytester): Scenario: When step's dependency a has failure Given I have a bar - When it's dependency fails + When its dependency fails Scenario: When step is not found Given not found @@ -392,7 +392,7 @@ def _(): def dependency(): raise Exception('dependency fails') - @when("it's dependency fails") + @when("its dependency fails") def _(dependency): pass @@ -479,7 +479,7 @@ def test_step_trace(pytester): Scenario: When step's dependency a has failure Given I have a bar - When it's dependency fails + When its dependency fails Scenario: When step is not found Given not found @@ -495,18 +495,18 @@ def test_step_trace(pytester): from pytest_bdd import given, when, scenario @given('I have a bar') - def i_have_bar(): + def _(): return 'bar' @when('it fails') - def when_it_fails(): + def _(): raise Exception('when fails') @pytest.fixture def dependency(): raise Exception('dependency fails') - @when("it's dependency fails") + @when("its dependency fails") def when_dependency_fails(dependency): pass From fec82709c752059dbdbb9aad75f2f447155e73b2 Mon Sep 17 00:00:00 2001 From: Jason Allen Date: Sun, 8 Sep 2024 23:04:01 +0100 Subject: [PATCH 12/36] Another grammar fix --- src/pytest_bdd/scenario.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/pytest_bdd/scenario.py b/src/pytest_bdd/scenario.py index 870ae014f..4a939109d 100644 --- a/src/pytest_bdd/scenario.py +++ b/src/pytest_bdd/scenario.py @@ -424,7 +424,7 @@ def _scenario() -> None: for test_name in get_python_name_generator(scenario_name): if test_name not in caller_locals: - # found an unique test name + # found a unique test name caller_locals[test_name] = _scenario break found = True From 6676692d8669b5ea36f64728fd34fcf069ad4cab Mon Sep 17 00:00:00 2001 From: Jason Allen Date: Sun, 8 Sep 2024 23:33:06 +0100 Subject: [PATCH 13/36] Use dataclasses and not attr --- poetry.lock | 21 +--- pyproject.toml | 1 - src/pytest_bdd/gherkin_parser.py | 161 +++++++++++++++---------------- 3 files changed, 81 insertions(+), 102 deletions(-) diff --git a/poetry.lock b/poetry.lock index b447dd0b1..4e8578f0a 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,24 +1,5 @@ # This file is automatically @generated by Poetry 1.8.3 and should not be changed by hand. -[[package]] -name = "attrs" -version = "24.2.0" -description = "Classes Without Boilerplate" -optional = false -python-versions = ">=3.7" -files = [ - {file = "attrs-24.2.0-py3-none-any.whl", hash = "sha256:81921eb96de3191c8258c199618104dd27ac608d9366f5e35d011eae1867ede2"}, - {file = "attrs-24.2.0.tar.gz", hash = "sha256:5cfb1b9148b5b086569baec03f20d7b6bf3bcacc9a42bebf87ffaaca362f6346"}, -] - -[package.extras] -benchmark = ["cloudpickle", "hypothesis", "mypy (>=1.11.1)", "pympler", "pytest (>=4.3.0)", "pytest-codspeed", "pytest-mypy-plugins", "pytest-xdist[psutil]"] -cov = ["cloudpickle", "coverage[toml] (>=5.3)", "hypothesis", "mypy (>=1.11.1)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] -dev = ["cloudpickle", "hypothesis", "mypy (>=1.11.1)", "pre-commit", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] -docs = ["cogapp", "furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier (<24.7)"] -tests = ["cloudpickle", "hypothesis", "mypy (>=1.11.1)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] -tests-mypy = ["mypy (>=1.11.1)", "pytest-mypy-plugins"] - [[package]] name = "cachetools" version = "5.5.0" @@ -604,4 +585,4 @@ test = ["covdefaults (>=2.3)", "coverage (>=7.2.7)", "coverage-enable-subprocess [metadata] lock-version = "2.0" python-versions = ">=3.8" -content-hash = "455db4f2d48159aebda8bcbfb75fbf435af1029eeeaeceb1391efc8820f871da" +content-hash = "6b52d5b35db2892ae49a2d655a8f19fb430b59b3f8c4dc6881526f0729424580" diff --git a/pyproject.toml b/pyproject.toml index ed8d3e3fe..7d874a5a0 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -43,7 +43,6 @@ pytest = ">=6.2.0" typing-extensions = "*" packaging = "*" gherkin-official = "^29.0.0" -attrs = "^24.2.0" [tool.poetry.group.dev.dependencies] tox = ">=4.11.3" diff --git a/src/pytest_bdd/gherkin_parser.py b/src/pytest_bdd/gherkin_parser.py index bb93f7c75..543670803 100644 --- a/src/pytest_bdd/gherkin_parser.py +++ b/src/pytest_bdd/gherkin_parser.py @@ -1,8 +1,8 @@ import linecache import textwrap +from dataclasses import dataclass, field from typing import Any, Dict, List, Optional -import attr from gherkin.errors import CompositeParserException from gherkin.parser import Parser from gherkin.token_scanner import TokenScanner @@ -11,41 +11,41 @@ from .types import STEP_TYPES -@attr.s +@dataclass class Location: - column = attr.ib(type=int) - line = attr.ib(type=int) + column: int + line: int @classmethod def from_dict(cls, data: Dict[str, Any]) -> "Location": return cls(column=data["column"], line=data["line"]) -@attr.s +@dataclass class Comment: - location = attr.ib(type=Location) - text = attr.ib(type=str) + location: Location + text: str @classmethod def from_dict(cls, data: Dict[str, Any]) -> "Comment": return cls(location=Location.from_dict(data["location"]), text=data["text"]) -@attr.s +@dataclass class Cell: - location = attr.ib(type=Location) - value = attr.ib(type=str) + location: Location + value: str @classmethod def from_dict(cls, data: Dict[str, Any]) -> "Cell": return cls(location=Location.from_dict(data["location"]), value=_convert_to_raw_string(data["value"])) -@attr.s +@dataclass class Row: - id = attr.ib(type=str) - location = attr.ib(type=Location) - cells = attr.ib(type=List[Cell]) + id: str + location: Location + cells: List[Cell] @classmethod def from_dict(cls, data: Dict[str, Any]) -> "Row": @@ -56,12 +56,12 @@ def from_dict(cls, data: Dict[str, Any]) -> "Row": ) -@attr.s +@dataclass class DataTable: - location = attr.ib(type=Location) - name = attr.ib(type=Optional[str], default=None) - tableHeader = attr.ib(type=Optional[Row], default=None) - tableBody = attr.ib(type=Optional[List[Row]], factory=list) + location: Location + name: Optional[str] = None + tableHeader: Optional[Row] = None + tableBody: Optional[List[Row]] = field(default_factory=list) @classmethod def from_dict(cls, data: Dict[str, Any]) -> "DataTable": @@ -73,13 +73,13 @@ def from_dict(cls, data: Dict[str, Any]) -> "DataTable": ) -@attr.s +@dataclass class DocString: - content = attr.ib(type=str) - delimiter = attr.ib(type=str) - location = attr.ib(type=Location) + content: str + delimiter: str + location: Location - def __attrs_post_init__(self): + def __post_init__(self): self.content = textwrap.dedent(self.content) @classmethod @@ -87,17 +87,17 @@ def from_dict(cls, data: Dict[str, Any]) -> "DocString": return cls(content=data["content"], delimiter=data["delimiter"], location=Location.from_dict(data["location"])) -@attr.s +@dataclass class Step: - id = attr.ib(type=str) - keyword = attr.ib(type=str) - keywordType = attr.ib(type=str) - location = attr.ib(type=Location) - text = attr.ib(type=str) - dataTable = attr.ib(type=Optional[DataTable], default=None) - docString = attr.ib(type=Optional[DocString], default=None) - - def __attrs_post_init__(self): + id: str + keyword: str + keywordType: str + location: Location + text: str + dataTable: Optional[DataTable] = None + docString: Optional[DocString] = None + + def __post_init__(self): self.keyword = self.keyword.lower().strip() @property @@ -121,29 +121,29 @@ def from_dict(cls, data: Dict[str, Any]) -> "Step": ) -@attr.s +@dataclass class Tag: - id = attr.ib(type=str) - location = attr.ib(type=Location) - name = attr.ib(type=str) + id: str + location: Location + name: str @classmethod def from_dict(cls, data: Dict[str, Any]) -> "Tag": return cls(id=data["id"], location=Location.from_dict(data["location"]), name=data["name"]) -@attr.s +@dataclass class Scenario: - id = attr.ib(type=str) - keyword = attr.ib(type=str) - location = attr.ib(type=Location) - name = attr.ib(type=str) - description = attr.ib(type=str) - steps = attr.ib(type=List[Step]) - tags = attr.ib(type=List[Tag]) - examples = attr.ib(type=Optional[List[DataTable]], factory=list) - - def __attrs_post_init__(self): + id: str + keyword: str + location: Location + name: str + description: str + steps: List[Step] + tags: List[Tag] + examples: Optional[List[DataTable]] = field(default_factory=list) + + def __post_init__(self): self.steps = _compute_given_when_then(self.steps) @classmethod @@ -160,15 +160,15 @@ def from_dict(cls, data: Dict[str, Any]) -> "Scenario": ) -@attr.s +@dataclass class Rule: - id = attr.ib(type=str) - keyword = attr.ib(type=str) - location = attr.ib(type=Location) - name = attr.ib(type=str) - description = attr.ib(type=str) - tags = attr.ib(type=List[Tag]) - children = attr.ib(type=List[Scenario]) + id: str + keyword: str + location: Location + name: str + description: str + tags: List[Tag] + children: List[Scenario] @classmethod def from_dict(cls, data: Dict[str, Any]) -> "Rule": @@ -183,16 +183,16 @@ def from_dict(cls, data: Dict[str, Any]) -> "Rule": ) -@attr.s +@dataclass class Background: - id = attr.ib(type=str) - keyword = attr.ib(type=str) - location = attr.ib(type=Location) - name = attr.ib(type=str) - description = attr.ib(type=str) - steps = attr.ib(type=List[Step]) - - def __attrs_post_init__(self): + id: str + keyword: str + location: Location + name: str + description: str + steps: List[Step] + + def __post_init__(self): self.steps = _compute_given_when_then(self.steps) @classmethod @@ -207,11 +207,11 @@ def from_dict(cls, data: Dict[str, Any]) -> "Background": ) -@attr.s +@dataclass class Child: - background = attr.ib(type=Optional[Background], default=None) - rule = attr.ib(type=Optional[Rule], default=None) - scenario = attr.ib(type=Optional[Scenario], default=None) + background: Optional[Background] = None + rule: Optional[Rule] = None + scenario: Optional[Scenario] = None @classmethod def from_dict(cls, data: Dict[str, Any]) -> "Child": @@ -222,14 +222,14 @@ def from_dict(cls, data: Dict[str, Any]) -> "Child": ) -@attr.s +@dataclass class Feature: - keyword = attr.ib(type=str) - location = attr.ib(type=Location) - tags = attr.ib(type=List[Tag]) - name = attr.ib(type=str) - description = attr.ib(type=str) - children = attr.ib(type=List[Child]) + keyword: str + location: Location + tags: List[Tag] + name: str + description: str + children: List[Child] @classmethod def from_dict(cls, data: Dict[str, Any]) -> "Feature": @@ -243,10 +243,10 @@ def from_dict(cls, data: Dict[str, Any]) -> "Feature": ) -@attr.s +@dataclass class GherkinDocument: - feature = attr.ib(type=Feature) - comments = attr.ib(type=List[Comment]) + feature: Feature + comments: List[Comment] @classmethod def from_dict(cls, data: Dict[str, Any]) -> "GherkinDocument": @@ -283,5 +283,4 @@ def get_gherkin_document(abs_filename: str = None, encoding: str = "utf-8") -> G abs_filename, ) from e - # Assuming gherkin_data is a dictionary with the structure expected by from_dict methods return GherkinDocument.from_dict(gherkin_data) From becfed2749ae89bc6a27fec6ae2b80b85737a997 Mon Sep 17 00:00:00 2001 From: Jason Allen Date: Thu, 12 Sep 2024 21:34:29 +0100 Subject: [PATCH 14/36] Response to feedback --- CHANGES.rst | 4 + src/pytest_bdd/exceptions.py | 59 ++++++++- src/pytest_bdd/gherkin_parser.py | 207 +++++++++++++++++++----------- src/pytest_bdd/parser.py | 10 +- tests/feature/test_multiline.py | 8 +- tests/feature/test_no_scenario.py | 2 +- tests/feature/test_outline.py | 3 +- tests/feature/test_wrong.py | 2 +- tests/parser/test.feature | 110 ++++++++++++++++ tests/parser/test_parser.py | 16 +++ 10 files changed, 330 insertions(+), 91 deletions(-) create mode 100644 tests/parser/test.feature create mode 100644 tests/parser/test_parser.py diff --git a/CHANGES.rst b/CHANGES.rst index ec0138271..dba727e7c 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -3,6 +3,10 @@ Changelog Unreleased ---------- +- Use `gherkin-official` parser to replace custom parsing logic. +- Multiline steps must now always use triple-quotes for the additional lines. +- All feature files must now use the keyword `Feature:` to be considered valid. +- Tags can no longer have spaces (e.g. "@tag one" "@tag two" are no longer valid). 7.2.0 ---------- diff --git a/src/pytest_bdd/exceptions.py b/src/pytest_bdd/exceptions.py index a7bf60ff0..df3a34dae 100644 --- a/src/pytest_bdd/exceptions.py +++ b/src/pytest_bdd/exceptions.py @@ -27,11 +27,58 @@ class NoScenariosFound(Exception): """No scenarios found.""" -class FeatureError(Exception): - """Feature parse error.""" +class GherkinParseError(Exception): + """Base class for all Gherkin parsing errors.""" - message = "{0}.\nLine number: {1}.\nLine: {2}.\nFile: {3}" + def __init__(self, message, line, line_content, filename): + super().__init__(message) + self.message = message + self.line = line + self.line_content = line_content + self.filename = filename + self.line = line + self.line_content = line_content + self.filename = filename - def __str__(self) -> str: - """String representation.""" - return self.message.format(*self.args) + def __str__(self): + return f"{self.__class__.__name__}: {self.message}\nLine number: {self.line}\nLine: {self.line_content}\nFile: {self.filename}" + + +class FeatureError(GherkinParseError): + pass + + +class BackgroundError(GherkinParseError): + pass + + +class ScenarioOutlineError(GherkinParseError): + pass + + +class ScenarioError(GherkinParseError): + pass + + +class ExamplesError(GherkinParseError): + pass + + +class StepError(GherkinParseError): + pass + + +class TagError(GherkinParseError): + pass + + +class RuleError(GherkinParseError): + pass + + +class DocStringError(GherkinParseError): + pass + + +class TokenError(GherkinParseError): + pass diff --git a/src/pytest_bdd/gherkin_parser.py b/src/pytest_bdd/gherkin_parser.py index 543670803..77658cb5e 100644 --- a/src/pytest_bdd/gherkin_parser.py +++ b/src/pytest_bdd/gherkin_parser.py @@ -1,5 +1,9 @@ +from __future__ import annotations + import linecache +import re import textwrap +import typing from dataclasses import dataclass, field from typing import Any, Dict, List, Optional @@ -8,7 +12,68 @@ from gherkin.token_scanner import TokenScanner from . import exceptions -from .types import STEP_TYPES + +if typing.TYPE_CHECKING: + from typing import Self + + +ERROR_PATTERNS = [ + ( + re.compile(r"expected:.*got 'Feature.*'"), + exceptions.FeatureError, + "Multiple features are not allowed in a single feature file.", + ), + ( + re.compile(r"expected:.*got '(?:Given|When|Then|And|But).*'"), + exceptions.FeatureError, + "Step definition outside of a Scenario or a Background.", + ), + ( + re.compile(r"expected:.*got 'Background.*'"), + exceptions.BackgroundError, + "Multiple 'Background' sections detected. Only one 'Background' is allowed per feature.", + ), + ( + re.compile(r"expected:.*got 'Scenario Outline.*'"), + exceptions.ScenarioOutlineError, + "'Scenario Outline' requires steps before 'Examples'.", + ), + ( + re.compile(r"expected:.*got 'Scenario.*'"), + exceptions.ScenarioError, + "Misplaced or incorrect 'Scenario' keyword. Ensure it's correctly placed.", + ), + ( + re.compile(r"expected:.*got 'Examples.*'"), + exceptions.ExamplesError, + "'Examples' must follow a valid 'Scenario Outline' and contain table rows.", + ), + ( + re.compile(r"expected:.*got 'Given.*'"), + exceptions.StepError, + "Improper step keyword detected. Ensure correct order and indentation for steps (Given, When, Then, etc.).", + ), + ( + re.compile(r"expected:.*got 'TagLine.*'"), + exceptions.TagError, + "Tags are misplaced. They should be directly above features, scenarios, or outlines.", + ), + ( + re.compile(r"expected:.*got 'Rule.*'"), + exceptions.RuleError, + "Misplaced or incorrectly formatted 'Rule'. Ensure it follows the feature structure.", + ), + ( + re.compile(r"expected:.*got 'DocString.*'"), + exceptions.DocStringError, + 'DocString must be enclosed in triple quotes ("""). Ensure proper formatting.', + ), + ( + re.compile(r"expected:.*got '.*'"), + exceptions.TokenError, + "Unexpected token found. Check Gherkin syntax near the reported error.", + ), +] @dataclass @@ -17,7 +82,7 @@ class Location: line: int @classmethod - def from_dict(cls, data: Dict[str, Any]) -> "Location": + def from_dict(cls, data: dict[str, Any]) -> Self: return cls(column=data["column"], line=data["line"]) @@ -27,7 +92,7 @@ class Comment: text: str @classmethod - def from_dict(cls, data: Dict[str, Any]) -> "Comment": + def from_dict(cls, data: dict[str, Any]) -> Self: return cls(location=Location.from_dict(data["location"]), text=data["text"]) @@ -37,18 +102,18 @@ class Cell: value: str @classmethod - def from_dict(cls, data: Dict[str, Any]) -> "Cell": - return cls(location=Location.from_dict(data["location"]), value=_convert_to_raw_string(data["value"])) + def from_dict(cls, data: dict[str, Any]) -> Self: + return cls(location=Location.from_dict(data["location"]), value=_to_raw_string(data["value"])) @dataclass class Row: id: str location: Location - cells: List[Cell] + cells: list[Cell] @classmethod - def from_dict(cls, data: Dict[str, Any]) -> "Row": + def from_dict(cls, data: dict[str, Any]) -> Self: return cls( id=data["id"], location=Location.from_dict(data["location"]), @@ -59,12 +124,12 @@ def from_dict(cls, data: Dict[str, Any]) -> "Row": @dataclass class DataTable: location: Location - name: Optional[str] = None - tableHeader: Optional[Row] = None - tableBody: Optional[List[Row]] = field(default_factory=list) + name: str | None = None + tableHeader: Row | None = None + tableBody: list[Row] | None = field(default_factory=list) @classmethod - def from_dict(cls, data: Dict[str, Any]) -> "DataTable": + def from_dict(cls, data: dict[str, Any]) -> Self: return cls( location=Location.from_dict(data["location"]), name=data.get("name"), @@ -79,12 +144,13 @@ class DocString: delimiter: str location: Location - def __post_init__(self): - self.content = textwrap.dedent(self.content) - @classmethod - def from_dict(cls, data: Dict[str, Any]) -> "DocString": - return cls(content=data["content"], delimiter=data["delimiter"], location=Location.from_dict(data["location"])) + def from_dict(cls, data: dict[str, Any]) -> Self: + return cls( + content=textwrap.dedent(data["content"]), + delimiter=data["delimiter"], + location=Location.from_dict(data["location"]), + ) @dataclass @@ -94,25 +160,14 @@ class Step: keywordType: str location: Location text: str - dataTable: Optional[DataTable] = None - docString: Optional[DocString] = None - - def __post_init__(self): - self.keyword = self.keyword.lower().strip() - - @property - def given_when_then(self) -> str: - return getattr(self, "_gwt", "") - - @given_when_then.setter - def given_when_then(self, gwt: str) -> None: - self._gwt = gwt + dataTable: DataTable | None = None + docString: DocString | None = None @classmethod - def from_dict(cls, data: Dict[str, Any]) -> "Step": + def from_dict(cls, data: dict[str, Any]) -> Self: return cls( id=data["id"], - keyword=data["keyword"], + keyword=data["keyword"].strip(), keywordType=data["keywordType"], location=Location.from_dict(data["location"]), text=data["text"], @@ -128,7 +183,7 @@ class Tag: name: str @classmethod - def from_dict(cls, data: Dict[str, Any]) -> "Tag": + def from_dict(cls, data: dict[str, Any]) -> Self: return cls(id=data["id"], location=Location.from_dict(data["location"]), name=data["name"]) @@ -139,15 +194,12 @@ class Scenario: location: Location name: str description: str - steps: List[Step] - tags: List[Tag] - examples: Optional[List[DataTable]] = field(default_factory=list) - - def __post_init__(self): - self.steps = _compute_given_when_then(self.steps) + steps: list[Step] + tags: list[Tag] + examples: list[DataTable] | None = field(default_factory=list) @classmethod - def from_dict(cls, data: Dict[str, Any]) -> "Scenario": + def from_dict(cls, data: dict[str, Any]) -> Self: return cls( id=data["id"], keyword=data["keyword"], @@ -167,11 +219,11 @@ class Rule: location: Location name: str description: str - tags: List[Tag] - children: List[Scenario] + tags: list[Tag] + children: list[Child] @classmethod - def from_dict(cls, data: Dict[str, Any]) -> "Rule": + def from_dict(cls, data: dict[str, Any]) -> Self: return cls( id=data["id"], keyword=data["keyword"], @@ -179,7 +231,7 @@ def from_dict(cls, data: Dict[str, Any]) -> "Rule": name=data["name"], description=data["description"], tags=[Tag.from_dict(tag) for tag in data["tags"]], - children=[Scenario.from_dict(child) for child in data["children"]], + children=[Child.from_dict(child) for child in data["children"]], ) @@ -190,13 +242,10 @@ class Background: location: Location name: str description: str - steps: List[Step] - - def __post_init__(self): - self.steps = _compute_given_when_then(self.steps) + steps: list[Step] @classmethod - def from_dict(cls, data: Dict[str, Any]) -> "Background": + def from_dict(cls, data: dict[str, Any]) -> Self: return cls( id=data["id"], keyword=data["keyword"], @@ -209,12 +258,12 @@ def from_dict(cls, data: Dict[str, Any]) -> "Background": @dataclass class Child: - background: Optional[Background] = None - rule: Optional[Rule] = None - scenario: Optional[Scenario] = None + background: Background | None = None + rule: Rule | None = None + scenario: Scenario | None = None @classmethod - def from_dict(cls, data: Dict[str, Any]) -> "Child": + def from_dict(cls, data: dict[str, Any]) -> Self: return cls( background=Background.from_dict(data["background"]) if data.get("background") else None, rule=Rule.from_dict(data["rule"]) if data.get("rule") else None, @@ -226,13 +275,13 @@ def from_dict(cls, data: Dict[str, Any]) -> "Child": class Feature: keyword: str location: Location - tags: List[Tag] + tags: list[Tag] name: str description: str - children: List[Child] + children: list[Child] @classmethod - def from_dict(cls, data: Dict[str, Any]) -> "Feature": + def from_dict(cls, data: dict[str, Any]) -> Self: return cls( keyword=data["keyword"], location=Location.from_dict(data["location"]), @@ -246,26 +295,17 @@ def from_dict(cls, data: Dict[str, Any]) -> "Feature": @dataclass class GherkinDocument: feature: Feature - comments: List[Comment] + comments: list[Comment] @classmethod - def from_dict(cls, data: Dict[str, Any]) -> "GherkinDocument": + def from_dict(cls, data: dict[str, Any]) -> Self: return cls( feature=Feature.from_dict(data["feature"]), comments=[Comment.from_dict(comment) for comment in data["comments"]], ) -def _compute_given_when_then(steps: List[Step]) -> List[Step]: - last_gwt = None - for step in steps: - if step.keyword in STEP_TYPES: - last_gwt = step.keyword - step.given_when_then = last_gwt - return steps - - -def _convert_to_raw_string(normal_string: str) -> str: +def _to_raw_string(normal_string: str) -> str: return normal_string.replace("\\", "\\\\") @@ -276,11 +316,30 @@ def get_gherkin_document(abs_filename: str = None, encoding: str = "utf-8") -> G try: gherkin_data = Parser().parse(TokenScanner(feature_file_text)) except CompositeParserException as e: - raise exceptions.FeatureError( - e.args[0], - e.errors[0].location["line"], - linecache.getline(abs_filename, e.errors[0].location["line"]).rstrip("\n"), - abs_filename, - ) from e - + message = e.args[0] + line = e.errors[0].location["line"] + line_content = linecache.getline(abs_filename, e.errors[0].location["line"]).rstrip("\n") + filename = abs_filename + gherkin_error_handler = GherkinParserErrorHandler() + gherkin_error_handler(message, line, line_content, filename) + # If no patterns matched, raise a generic GherkinParserError + raise exceptions.GherkinParseError(f"Unknown parsing error: {message}", line, line_content, filename) + + # At this point, the `gherkin_data` should be valid if no exception was raised return GherkinDocument.from_dict(gherkin_data) + + +class GherkinParserErrorHandler: + """Parses raw Gherkin parser errors and converts them to human-readable exceptions.""" + + def __call__(self, raw_error: str, line: int, line_content: str, filename: str): + """Map the error message to a specific exception type and raise it.""" + # Split the raw_error into individual lines + error_lines = raw_error.splitlines() + + # Check each line against all error patterns + for error_line in error_lines: + for pattern, exception_class, message in ERROR_PATTERNS: + if pattern.search(error_line): + # If a match is found, raise the corresponding exception with the formatted message + raise exception_class(message, line, line_content, filename) diff --git a/src/pytest_bdd/parser.py b/src/pytest_bdd/parser.py index bd0db5394..7a9ba4a7c 100644 --- a/src/pytest_bdd/parser.py +++ b/src/pytest_bdd/parser.py @@ -14,7 +14,7 @@ from .gherkin_parser import Step as GherkinStep from .gherkin_parser import Tag as GherkinTag from .gherkin_parser import get_gherkin_document -from .types import GIVEN, THEN, WHEN +from .types import GIVEN, STEP_TYPES, THEN, WHEN STEP_PARAM_RE = re.compile(r"<(.+?)>") COMMENT_RE = re.compile(r"(^|(?<=\s))#") @@ -210,7 +210,7 @@ class Step: Attributes: type (str): The type of step (e.g., 'given', 'when', 'then'). - _name (str): The name of the step. + name (str): The name of the step. line_number (int): The line number where the step starts in the file. indent (int): The indentation level of the step. keyword (str): The keyword used for the step (e.g., 'Given', 'When', 'Then'). @@ -358,14 +358,18 @@ def parse_steps(steps_data: list[GherkinStep]) -> list[Step]: List[Step]: A list of Step objects. """ steps = [] + current_type = None for step_data in steps_data: name = strip_comments(step_data.text) if step_data.docString: name = f"{name}\n{step_data.docString.content}" + keyword = step_data.keyword.lower() + if keyword in STEP_TYPES: + current_type = keyword steps.append( Step( name=name, - type=step_data.given_when_then, + type=current_type, indent=step_data.location.column - 1, line_number=step_data.location.line, keyword=step_data.keyword.title(), diff --git a/tests/feature/test_multiline.py b/tests/feature/test_multiline.py index ff407e487..4c4f387f6 100644 --- a/tests/feature/test_multiline.py +++ b/tests/feature/test_multiline.py @@ -10,18 +10,18 @@ [ ( textwrap.dedent( - '''\ + """\ Feature: Multiline Scenario: Multiline step using sub indentation Given I have a step with: - """ + \"\"\" Some Extra Lines - """ + \"\"\" Then the text should be parsed with correct indentation - ''' + """ ), "Some\n\nExtra\nLines", ) diff --git a/tests/feature/test_no_scenario.py b/tests/feature/test_no_scenario.py index 5eb68e11c..f3bcd7d3c 100644 --- a/tests/feature/test_no_scenario.py +++ b/tests/feature/test_no_scenario.py @@ -27,4 +27,4 @@ def test_no_scenarios(pytester): ) ) result = pytester.runpytest() - result.stdout.fnmatch_lines(["*FeatureError*"]) + result.stdout.fnmatch_lines(["*FeatureError: Step definition outside of a Scenario or a Background.*"]) diff --git a/tests/feature/test_outline.py b/tests/feature/test_outline.py index db591266f..875d0674e 100644 --- a/tests/feature/test_outline.py +++ b/tests/feature/test_outline.py @@ -171,8 +171,7 @@ def test_outline_with_escaped_pipes(pytester): pytester.makefile( ".feature", outline=textwrap.dedent( - r""" - Feature: Outline With Special characters + r"""Feature: Outline With Special characters Scenario Outline: Outline with escaped pipe character # Just print the string so that we can assert later what it was by reading the output diff --git a/tests/feature/test_wrong.py b/tests/feature/test_wrong.py index 002cd671c..f8c405439 100644 --- a/tests/feature/test_wrong.py +++ b/tests/feature/test_wrong.py @@ -50,4 +50,4 @@ def test_wrong(): ) result = pytester.runpytest() result.assert_outcomes(errors=1) - result.stdout.fnmatch_lines("*FeatureError: *") + result.stdout.fnmatch_lines("*FeatureError: Multiple features are not allowed in a single feature file.*") diff --git a/tests/parser/test.feature b/tests/parser/test.feature new file mode 100644 index 000000000..62950bff4 --- /dev/null +++ b/tests/parser/test.feature @@ -0,0 +1,110 @@ +# This is a comment + +Feature: User login + + As a registered user + I want to be able to log in + So that I can access my account + + Background: + # Background steps run before each scenario + Given the login page is open + + # Scenario within the rule + Scenario: Successful login with valid credentials + Given the user enters a valid username + And the user enters a valid password + When the user clicks the login button + Then the user should see the dashboard + + Scenario Outline: Unsuccessful login with invalid credentials + Given the user enters "" as username + And the user enters "" as password + When the user clicks the login button + Then the user should see an error message "" + + # Examples table provides data for the scenario outline + Examples: + | username | password | error_message | + | invalidUser | wrongPass | Invalid username or password | + | user123 | incorrect | Invalid username or password | + + Scenario: Login with empty username + Given the user enters an empty username + And the user enters a valid password + When the user clicks the login button + Then the user should see an error message "Username cannot be empty" + + Scenario: Login with empty password + Given the user enters a valid username + And the user enters an empty password + When the user clicks the login button + Then the user should see an error message "Password cannot be empty" + + Scenario: Login with SQL injection attempt + Given the user enters "admin' OR '1'='1" as username + And the user enters "password" as password + When the user clicks the login button + Then the user should see an error message "Invalid username or password" + + @login @critical + Scenario: Login button disabled for empty fields + Given the user has not entered any username or password + Then the login button should be disabled + + # Tags can be used to categorize scenarios + @smoke + Scenario: Login page loads correctly + Given the login page is loaded + Then the login form should be visible + + # Using Data Tables for more complex data + Scenario: Login with multiple sets of credentials + Given the following users are registered: + | username | password | + | user1 | pass1 | + | user2 | pass2 | + | user3 | pass3 | + When the user tries to log in with the following credentials: + | username | password | + | user1 | pass1 | + | user2 | wrongPass | + Then the login attempts should result in: + | username | result | + | user1 | success | + | user2 | failure | + + # Using Doc Strings for multi-line text + Scenario: Check login error message with detailed explanation + Given the user enters invalid credentials + When the user clicks the login button + Then the user should see the following error message: + """ + Your login attempt was unsuccessful. + Please check your username and password and try again. + If the problem persists, contact support. + """ + + @some-tag + Rule: a sale cannot happen if there is no stock + # Unhappy path + Example: No chocolates left + Given the customer has 100 cents + And there are no chocolate bars in stock + When the customer tries to buy a 1 cent chocolate bar + Then the sale should not happen + + Rule: A sale cannot happen if the customer does not have enough money + # Unhappy path + Example: Not enough money + Given the customer has 100 cents + And there are chocolate bars in stock + When the customer tries to buy a 125 cent chocolate bar + Then the sale should not happen + + # Happy path + Example: Enough money + Given the customer has 100 cents + And there are chocolate bars in stock + When the customer tries to buy a 75 cent chocolate bar + Then the sale should happen diff --git a/tests/parser/test_parser.py b/tests/parser/test_parser.py new file mode 100644 index 000000000..1cac4441e --- /dev/null +++ b/tests/parser/test_parser.py @@ -0,0 +1,16 @@ +from pathlib import Path + +from src.pytest_bdd.gherkin_parser import get_gherkin_document + + +def test_parser(): + # Get the directory of the current file + test_dir = Path(__file__).parent + + # Resolve the path to the feature file relative to the test directory + feature_file = test_dir / "test.feature" + + # Convert to string if necessary, but Path objects are often used directly + feature_file_path = str(feature_file.resolve()) + + get_gherkin_document(feature_file_path) From 2c8455b5199a61c81d62ae224adf470208743114 Mon Sep 17 00:00:00 2001 From: Jason Allen Date: Thu, 12 Sep 2024 21:34:39 +0100 Subject: [PATCH 15/36] Response to feedback --- tests/parser/__init__.py | 0 1 file changed, 0 insertions(+), 0 deletions(-) create mode 100644 tests/parser/__init__.py diff --git a/tests/parser/__init__.py b/tests/parser/__init__.py new file mode 100644 index 000000000..e69de29bb From c3008c1debe61dd2cc9d6dc799db7048b1467c25 Mon Sep 17 00:00:00 2001 From: Jason Allen Date: Thu, 12 Sep 2024 21:39:15 +0100 Subject: [PATCH 16/36] Couple of tidy ups --- tests/feature/test_background.py | 8 ++++---- tests/parser/test_parser.py | 5 ----- 2 files changed, 4 insertions(+), 9 deletions(-) diff --git a/tests/feature/test_background.py b/tests/feature/test_background.py index 4f7fc0c86..17565ec1e 100644 --- a/tests/feature/test_background.py +++ b/tests/feature/test_background.py @@ -2,16 +2,16 @@ import textwrap -FEATURE = '''\ +FEATURE = """\ Feature: Background support Background: Given foo has a value "bar" And a background step with multiple lines: - """ + \"\"\" one two - """ + \"\"\" Scenario: Basic usage @@ -23,7 +23,7 @@ Then foo should have value "dummy" And foo should not have value "bar" -''' +""" STEPS = r"""\ import re diff --git a/tests/parser/test_parser.py b/tests/parser/test_parser.py index 1cac4441e..029886152 100644 --- a/tests/parser/test_parser.py +++ b/tests/parser/test_parser.py @@ -4,13 +4,8 @@ def test_parser(): - # Get the directory of the current file test_dir = Path(__file__).parent - - # Resolve the path to the feature file relative to the test directory feature_file = test_dir / "test.feature" - - # Convert to string if necessary, but Path objects are often used directly feature_file_path = str(feature_file.resolve()) get_gherkin_document(feature_file_path) From 9c12dbf60f8f73a191098eaecf195383eb446d5a Mon Sep 17 00:00:00 2001 From: Jason Allen Date: Thu, 12 Sep 2024 21:55:19 +0100 Subject: [PATCH 17/36] Forgot to fix background in steps and revert test that was skipped --- src/pytest_bdd/parser.py | 2 ++ tests/generation/test_generate_missing.py | 6 ++++++ 2 files changed, 8 insertions(+) diff --git a/src/pytest_bdd/parser.py b/src/pytest_bdd/parser.py index 7a9ba4a7c..701f5c716 100644 --- a/src/pytest_bdd/parser.py +++ b/src/pytest_bdd/parser.py @@ -419,6 +419,8 @@ def parse_background(self, background_data: GherkinBackground, feature: Feature) line_number=background_data.location.line, ) background.steps = self.parse_steps(background_data.steps) + for step in background.steps: + step.background = background return background def _parse_feature_file(self) -> GherkinDocument: diff --git a/tests/generation/test_generate_missing.py b/tests/generation/test_generate_missing.py index 4d02e0f4c..d6be9be6f 100644 --- a/tests/generation/test_generate_missing.py +++ b/tests/generation/test_generate_missing.py @@ -29,9 +29,11 @@ def test_generate_missing(pytester): Scenario: Scenario tests which are already bound to the tests stay as is Given I have a bar + Scenario: Code is generated for scenarios which are not bound to any tests Given I have a bar + Scenario: Code is generated for scenario steps which are not yet defined(implemented) Given I have a custom bar """ @@ -78,6 +80,10 @@ def test_missing_steps(): ] ) + result.stdout.fnmatch_lines( + ['Step Given "I have a foobar" is not defined in the background of the feature "Missing code generation" *'] + ) + result.stdout.fnmatch_lines(["Please place the code above to the test file(s):"]) From 93a11ae120f28cd2d5228fd2f0d39f7f0775c279 Mon Sep 17 00:00:00 2001 From: Alessio Bogon <778703+youtux@users.noreply.github.com> Date: Sat, 14 Sep 2024 22:56:07 +0200 Subject: [PATCH 18/36] Fix import (Python < 3.11 compat) --- src/pytest_bdd/gherkin_parser.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/pytest_bdd/gherkin_parser.py b/src/pytest_bdd/gherkin_parser.py index 77658cb5e..2bdd7a6f6 100644 --- a/src/pytest_bdd/gherkin_parser.py +++ b/src/pytest_bdd/gherkin_parser.py @@ -5,7 +5,7 @@ import textwrap import typing from dataclasses import dataclass, field -from typing import Any, Dict, List, Optional +from typing import Any from gherkin.errors import CompositeParserException from gherkin.parser import Parser @@ -14,7 +14,7 @@ from . import exceptions if typing.TYPE_CHECKING: - from typing import Self + from typing_extensions import Self ERROR_PATTERNS = [ From fee0eb95802715895e520b585d1c31c630211e3d Mon Sep 17 00:00:00 2001 From: Alessio Bogon <778703+youtux@users.noreply.github.com> Date: Sat, 14 Sep 2024 23:00:57 +0200 Subject: [PATCH 19/36] Remove default to None --- src/pytest_bdd/gherkin_parser.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/pytest_bdd/gherkin_parser.py b/src/pytest_bdd/gherkin_parser.py index 2bdd7a6f6..daba96d6a 100644 --- a/src/pytest_bdd/gherkin_parser.py +++ b/src/pytest_bdd/gherkin_parser.py @@ -309,7 +309,7 @@ def _to_raw_string(normal_string: str) -> str: return normal_string.replace("\\", "\\\\") -def get_gherkin_document(abs_filename: str = None, encoding: str = "utf-8") -> GherkinDocument: +def get_gherkin_document(abs_filename: str, encoding: str = "utf-8") -> GherkinDocument: with open(abs_filename, encoding=encoding) as f: feature_file_text = f.read() From 7cbfc4757baa99d3993a4ce686e55f32c108c971 Mon Sep 17 00:00:00 2001 From: Alessio Bogon <778703+youtux@users.noreply.github.com> Date: Sun, 15 Sep 2024 11:01:01 +0200 Subject: [PATCH 20/36] Revert string literals to their original form --- tests/feature/test_background.py | 8 +++--- tests/feature/test_description.py | 46 ++++++++++++------------------- tests/feature/test_multiline.py | 8 +++--- 3 files changed, 26 insertions(+), 36 deletions(-) diff --git a/tests/feature/test_background.py b/tests/feature/test_background.py index 17565ec1e..4f7fc0c86 100644 --- a/tests/feature/test_background.py +++ b/tests/feature/test_background.py @@ -2,16 +2,16 @@ import textwrap -FEATURE = """\ +FEATURE = '''\ Feature: Background support Background: Given foo has a value "bar" And a background step with multiple lines: - \"\"\" + """ one two - \"\"\" + """ Scenario: Basic usage @@ -23,7 +23,7 @@ Then foo should have value "dummy" And foo should not have value "bar" -""" +''' STEPS = r"""\ import re diff --git a/tests/feature/test_description.py b/tests/feature/test_description.py index 5d0dcb96f..83d3a12b7 100644 --- a/tests/feature/test_description.py +++ b/tests/feature/test_description.py @@ -30,39 +30,29 @@ def test_description(pytester): pytester.makepyfile( textwrap.dedent( - """\ - import textwrap - from pytest_bdd import given, scenario - - @scenario("description.feature", "Description") - def test_description(): - pass + r''' + import textwrap + from pytest_bdd import given, scenario + @scenario("description.feature", "Description") + def test_description(): + pass - @given("I have a bar") - def _(): - return "bar" - def test_feature_description(): - assert test_description.__scenario__.feature.description == textwrap.dedent( - \"\"\"\\ - In order to achieve something - I want something - Because it will be cool + @given("I have a bar") + def _(): + return "bar" + def test_feature_description(): + assert test_description.__scenario__.feature.description == textwrap.dedent( + "In order to achieve something\nI want something\nBecause it will be cool\n\n\nSome description goes here." + ) - Some description goes here.\"\"\" - ) - - def test_scenario_description(): - assert test_description.__scenario__.description == textwrap.dedent( - \"\"\"\\ - Also, the scenario can have a description. - - It goes here between the scenario name - and the first step.\"\"\" - ) - """ + def test_scenario_description(): + assert test_description.__scenario__.description == textwrap.dedent( + "Also, the scenario can have a description.\n\nIt goes here between the scenario name\nand the first step.""" + ) + ''' ) ) diff --git a/tests/feature/test_multiline.py b/tests/feature/test_multiline.py index 4c4f387f6..ff407e487 100644 --- a/tests/feature/test_multiline.py +++ b/tests/feature/test_multiline.py @@ -10,18 +10,18 @@ [ ( textwrap.dedent( - """\ + '''\ Feature: Multiline Scenario: Multiline step using sub indentation Given I have a step with: - \"\"\" + """ Some Extra Lines - \"\"\" + """ Then the text should be parsed with correct indentation - """ + ''' ), "Some\n\nExtra\nLines", ) From 2f3acbd0dcf971bded3bfcdb5c769579d8db139f Mon Sep 17 00:00:00 2001 From: Jason Allen Date: Sun, 15 Sep 2024 13:37:36 +0100 Subject: [PATCH 21/36] Response to feedback and make mypy happy. --- src/pytest_bdd/compat.py | 4 +- src/pytest_bdd/cucumber_json.py | 8 +- src/pytest_bdd/exceptions.py | 22 +- src/pytest_bdd/feature.py | 13 +- src/pytest_bdd/generation.py | 6 +- src/pytest_bdd/gherkin_parser.py | 72 +- src/pytest_bdd/gherkin_terminal_reporter.py | 2 +- src/pytest_bdd/parser.py | 55 +- src/pytest_bdd/reporting.py | 11 +- src/pytest_bdd/scenario.py | 12 +- src/pytest_bdd/steps.py | 8 +- src/pytest_bdd/types.py | 11 +- src/pytest_bdd/utils.py | 6 +- tests/feature/test_cucumber_json.py | 4 +- tests/feature/test_report.py | 3 +- tests/parser/refactor_parser.py | 19 + tests/parser/test.feature | 1 - tests/parser/test_errors.py | 253 +++++++ tests/parser/test_parser.py | 764 +++++++++++++++++++- 19 files changed, 1147 insertions(+), 127 deletions(-) create mode 100644 tests/parser/refactor_parser.py create mode 100644 tests/parser/test_errors.py diff --git a/src/pytest_bdd/compat.py b/src/pytest_bdd/compat.py index 079f7de01..f7e5d0f37 100644 --- a/src/pytest_bdd/compat.py +++ b/src/pytest_bdd/compat.py @@ -34,7 +34,7 @@ def inject_fixture(request: FixtureRequest, arg: str, value: Any) -> None: else: def getfixturedefs(fixturemanager: FixtureManager, fixturename: str, node: Node) -> Sequence[FixtureDef] | None: - return fixturemanager.getfixturedefs(fixturename, node.nodeid) + return fixturemanager.getfixturedefs(fixturename, node.nodeid) # type: ignore def inject_fixture(request: FixtureRequest, arg: str, value: Any) -> None: """Inject fixture into pytest fixture request. @@ -44,7 +44,7 @@ def inject_fixture(request: FixtureRequest, arg: str, value: Any) -> None: :param value: argument value """ fd = FixtureDef( - fixturemanager=request._fixturemanager, + fixturemanager=request._fixturemanager, # type: ignore baseid=None, argname=arg, func=lambda: value, diff --git a/src/pytest_bdd/cucumber_json.py b/src/pytest_bdd/cucumber_json.py index ab0c4a98e..d51866a6f 100644 --- a/src/pytest_bdd/cucumber_json.py +++ b/src/pytest_bdd/cucumber_json.py @@ -35,14 +35,14 @@ def configure(config: Config) -> None: cucumber_json_path = config.option.cucumber_json_path # prevent opening json log on worker nodes (xdist) if cucumber_json_path and not hasattr(config, "workerinput"): - config._bddcucumberjson = LogBDDCucumberJSON(cucumber_json_path) - config.pluginmanager.register(config._bddcucumberjson) + config._bddcucumberjson = LogBDDCucumberJSON(cucumber_json_path) # type: ignore[attr-defined] + config.pluginmanager.register(config._bddcucumberjson) # type: ignore[attr-defined] def unconfigure(config: Config) -> None: - xml = getattr(config, "_bddcucumberjson", None) + xml = getattr(config, "_bddcucumberjson", None) # type: ignore[attr-defined] if xml is not None: - del config._bddcucumberjson + del config._bddcucumberjson # type: ignore[attr-defined] config.pluginmanager.unregister(xml) diff --git a/src/pytest_bdd/exceptions.py b/src/pytest_bdd/exceptions.py index df3a34dae..4c3fe7d2c 100644 --- a/src/pytest_bdd/exceptions.py +++ b/src/pytest_bdd/exceptions.py @@ -15,10 +15,6 @@ class ScenarioNotFound(ScenarioValidationError): """Scenario Not Found.""" -class ExamplesNotValidError(ScenarioValidationError): - """Example table is not valid.""" - - class StepDefinitionNotFoundError(Exception): """Step definition not found.""" @@ -41,7 +37,7 @@ def __init__(self, message, line, line_content, filename): self.filename = filename def __str__(self): - return f"{self.__class__.__name__}: {self.message}\nLine number: {self.line}\nLine: {self.line_content}\nFile: {self.filename}" + return f"{self.message}\nLine number: {self.line}\nLine: {self.line_content}\nFile: {self.filename}" class FeatureError(GherkinParseError): @@ -52,33 +48,17 @@ class BackgroundError(GherkinParseError): pass -class ScenarioOutlineError(GherkinParseError): - pass - - class ScenarioError(GherkinParseError): pass -class ExamplesError(GherkinParseError): - pass - - class StepError(GherkinParseError): pass -class TagError(GherkinParseError): - pass - - class RuleError(GherkinParseError): pass -class DocStringError(GherkinParseError): - pass - - class TokenError(GherkinParseError): pass diff --git a/src/pytest_bdd/feature.py b/src/pytest_bdd/feature.py index ee4bd90b8..3a29a7fcd 100644 --- a/src/pytest_bdd/feature.py +++ b/src/pytest_bdd/feature.py @@ -65,17 +65,16 @@ def get_features(paths: list[str], **kwargs) -> list[Feature]: :return: `list` of `Feature` objects. """ seen_names = set() - features = [] + _features = [] for path in paths: if path not in seen_names: seen_names.add(path) if os.path.isdir(path): - features.extend( - get_features(glob.iglob(os.path.join(path, "**", "*.feature"), recursive=True), **kwargs) - ) + file_paths = list(glob.iglob(os.path.join(path, "**", "*.feature"), recursive=True)) + _features.extend(get_features(file_paths, **kwargs)) else: base, name = os.path.split(path) feature = get_feature(base, name, **kwargs) - features.append(feature) - features.sort(key=lambda feature: feature.name or feature.filename) - return features + _features.append(feature) + _features.sort(key=lambda _feature: _feature.name or _feature.filename) + return _features diff --git a/src/pytest_bdd/generation.py b/src/pytest_bdd/generation.py index bfde4a9b7..73988ef8c 100644 --- a/src/pytest_bdd/generation.py +++ b/src/pytest_bdd/generation.py @@ -7,7 +7,7 @@ from typing import TYPE_CHECKING, cast from _pytest._io import TerminalWriter -from mako.lookup import TemplateLookup +from mako.lookup import TemplateLookup # type: ignore from .compat import getfixturedefs from .feature import get_features @@ -181,11 +181,11 @@ def _show_missing_code_main(config: Config, session: Session) -> None: features, scenarios, steps = parse_feature_files(config.option.features) for item in session.items: - if scenario := getattr(item.obj, "__scenario__", None): + if scenario := getattr(item.obj, "__scenario__", None): # type: ignore if scenario in scenarios: scenarios.remove(scenario) for step in scenario.steps: - if _find_step_fixturedef(fm, item, step=step): + if _find_step_fixturedef(fm, item, step=step): # type: ignore try: steps.remove(step) except ValueError: diff --git a/src/pytest_bdd/gherkin_parser.py b/src/pytest_bdd/gherkin_parser.py index 77658cb5e..287a5a7f1 100644 --- a/src/pytest_bdd/gherkin_parser.py +++ b/src/pytest_bdd/gherkin_parser.py @@ -5,16 +5,15 @@ import textwrap import typing from dataclasses import dataclass, field -from typing import Any, Dict, List, Optional +from typing import Any -from gherkin.errors import CompositeParserException -from gherkin.parser import Parser -from gherkin.token_scanner import TokenScanner +from gherkin.errors import CompositeParserException # type: ignore +from gherkin.parser import Parser # type: ignore from . import exceptions if typing.TYPE_CHECKING: - from typing import Self + from typing_extensions import Self ERROR_PATTERNS = [ @@ -33,41 +32,21 @@ exceptions.BackgroundError, "Multiple 'Background' sections detected. Only one 'Background' is allowed per feature.", ), - ( - re.compile(r"expected:.*got 'Scenario Outline.*'"), - exceptions.ScenarioOutlineError, - "'Scenario Outline' requires steps before 'Examples'.", - ), ( re.compile(r"expected:.*got 'Scenario.*'"), exceptions.ScenarioError, - "Misplaced or incorrect 'Scenario' keyword. Ensure it's correctly placed.", - ), - ( - re.compile(r"expected:.*got 'Examples.*'"), - exceptions.ExamplesError, - "'Examples' must follow a valid 'Scenario Outline' and contain table rows.", + "Misplaced or incorrect 'Scenario' keyword. Ensure it's correctly placed. There might be a missing Feature section.", ), ( re.compile(r"expected:.*got 'Given.*'"), exceptions.StepError, "Improper step keyword detected. Ensure correct order and indentation for steps (Given, When, Then, etc.).", ), - ( - re.compile(r"expected:.*got 'TagLine.*'"), - exceptions.TagError, - "Tags are misplaced. They should be directly above features, scenarios, or outlines.", - ), ( re.compile(r"expected:.*got 'Rule.*'"), exceptions.RuleError, "Misplaced or incorrectly formatted 'Rule'. Ensure it follows the feature structure.", ), - ( - re.compile(r"expected:.*got 'DocString.*'"), - exceptions.DocStringError, - 'DocString must be enclosed in triple quotes ("""). Ensure proper formatting.', - ), ( re.compile(r"expected:.*got '.*'"), exceptions.TokenError, @@ -196,7 +175,7 @@ class Scenario: description: str steps: list[Step] tags: list[Tag] - examples: list[DataTable] | None = field(default_factory=list) + examples: list[DataTable] = field(default_factory=list) @classmethod def from_dict(cls, data: dict[str, Any]) -> Self: @@ -208,7 +187,7 @@ def from_dict(cls, data: dict[str, Any]) -> Self: description=data["description"], steps=[Step.from_dict(step) for step in data["steps"]], tags=[Tag.from_dict(tag) for tag in data["tags"]], - examples=[DataTable.from_dict(example) for example in data.get("examples", [])], + examples=[DataTable.from_dict(example) for example in data["examples"]], ) @@ -309,37 +288,38 @@ def _to_raw_string(normal_string: str) -> str: return normal_string.replace("\\", "\\\\") -def get_gherkin_document(abs_filename: str = None, encoding: str = "utf-8") -> GherkinDocument: +def get_gherkin_document(abs_filename: str, encoding: str = "utf-8") -> GherkinDocument: with open(abs_filename, encoding=encoding) as f: feature_file_text = f.read() try: - gherkin_data = Parser().parse(TokenScanner(feature_file_text)) + gherkin_data = Parser().parse(feature_file_text) except CompositeParserException as e: message = e.args[0] line = e.errors[0].location["line"] line_content = linecache.getline(abs_filename, e.errors[0].location["line"]).rstrip("\n") filename = abs_filename - gherkin_error_handler = GherkinParserErrorHandler() - gherkin_error_handler(message, line, line_content, filename) + handle_gherkin_parser_error(message, line, line_content, filename, e) # If no patterns matched, raise a generic GherkinParserError - raise exceptions.GherkinParseError(f"Unknown parsing error: {message}", line, line_content, filename) + raise exceptions.GherkinParseError(f"Unknown parsing error: {message}", line, line_content, filename) from e # At this point, the `gherkin_data` should be valid if no exception was raised return GherkinDocument.from_dict(gherkin_data) -class GherkinParserErrorHandler: - """Parses raw Gherkin parser errors and converts them to human-readable exceptions.""" - - def __call__(self, raw_error: str, line: int, line_content: str, filename: str): - """Map the error message to a specific exception type and raise it.""" - # Split the raw_error into individual lines - error_lines = raw_error.splitlines() - - # Check each line against all error patterns - for error_line in error_lines: - for pattern, exception_class, message in ERROR_PATTERNS: - if pattern.search(error_line): - # If a match is found, raise the corresponding exception with the formatted message +def handle_gherkin_parser_error( + raw_error: str, line: int, line_content: str, filename: str, original_exception: Exception | None = None +): + """Map the error message to a specific exception type and raise it.""" + # Split the raw_error into individual lines + error_lines = raw_error.splitlines() + + # Check each line against all error patterns + for error_line in error_lines: + for pattern, exception_class, message in ERROR_PATTERNS: + if pattern.search(error_line): + # If a match is found, raise the corresponding exception with the formatted message + if original_exception: + raise exception_class(message, line, line_content, filename) from original_exception + else: raise exception_class(message, line, line_content, filename) diff --git a/src/pytest_bdd/gherkin_terminal_reporter.py b/src/pytest_bdd/gherkin_terminal_reporter.py index b26a8a7db..fb73ebb6f 100644 --- a/src/pytest_bdd/gherkin_terminal_reporter.py +++ b/src/pytest_bdd/gherkin_terminal_reporter.py @@ -43,7 +43,7 @@ def configure(config: Config) -> None: raise Exception("gherkin-terminal-reporter is not compatible with 'xdist' plugin.") -class GherkinTerminalReporter(TerminalReporter): +class GherkinTerminalReporter(TerminalReporter): # type: ignore def __init__(self, config: Config) -> None: super().__init__(config) diff --git a/src/pytest_bdd/parser.py b/src/pytest_bdd/parser.py index 701f5c716..48fe57d6a 100644 --- a/src/pytest_bdd/parser.py +++ b/src/pytest_bdd/parser.py @@ -7,6 +7,7 @@ from dataclasses import dataclass, field from typing import Any, Dict, Iterable, List, Mapping, Optional, Sequence +from .exceptions import StepError from .gherkin_parser import Background as GherkinBackground from .gherkin_parser import Feature as GherkinFeature from .gherkin_parser import GherkinDocument @@ -347,8 +348,7 @@ def get_step_type(keyword: str) -> str | None: "then": THEN, }.get(keyword) - @staticmethod - def parse_steps(steps_data: list[GherkinStep]) -> list[Step]: + def parse_steps(self, steps_data: list[GherkinStep]) -> list[Step]: """Parse a list of step data into Step objects. Args: @@ -357,22 +357,39 @@ def parse_steps(steps_data: list[GherkinStep]) -> list[Step]: Returns: List[Step]: A list of Step objects. """ + + def get_step_content(_gherkin_step): + step_name = strip_comments(_gherkin_step.text) + if _gherkin_step.docString: + step_name = f"{step_name}\n{_gherkin_step.docString.content}" + return step_name + + if not steps_data: + return [] + + first_step = steps_data[0] + if first_step.keyword.lower() not in STEP_TYPES: + raise StepError( + message=f"First step in a scenario or background must start with 'Given', 'When' or 'Then', but got {first_step.keyword}.", + line=first_step.location.line, + line_content=get_step_content(first_step), + filename=self.abs_filename, + ) + steps = [] - current_type = None - for step_data in steps_data: - name = strip_comments(step_data.text) - if step_data.docString: - name = f"{name}\n{step_data.docString.content}" - keyword = step_data.keyword.lower() + current_type = first_step.keyword.lower() + for step in steps_data: + name = get_step_content(step) + keyword = step.keyword.lower() if keyword in STEP_TYPES: current_type = keyword steps.append( Step( name=name, type=current_type, - indent=step_data.location.column - 1, - line_number=step_data.location.line, - keyword=step_data.keyword.title(), + indent=step.location.column - 1, + line_number=step.location.line, + keyword=step.keyword.title(), ) ) return steps @@ -404,12 +421,14 @@ def parse_scenario(self, scenario_data: GherkinScenario, feature: Feature) -> Sc line_number=example_data.location.line, name=example_data.name, ) - param_names = [cell.value for cell in example_data.tableHeader.cells] - examples.set_param_names(param_names) - for row in example_data.tableBody: - values = [cell.value or "" for cell in row.cells] - examples.add_example(values) - scenario.examples = examples + if example_data.tableHeader is not None: + param_names = [cell.value for cell in example_data.tableHeader.cells] + examples.set_param_names(param_names) + if example_data.tableBody is not None: + for row in example_data.tableBody: + values = [cell.value or "" for cell in row.cells] + examples.add_example(values) + scenario.examples = examples return scenario @@ -431,7 +450,7 @@ def _parse_feature_file(self) -> GherkinDocument: """ return get_gherkin_document(self.abs_filename, self.encoding) - def parse(self): + def parse(self) -> Feature: gherkin_doc: GherkinDocument = self._parse_feature_file() feature_data: GherkinFeature = gherkin_doc.feature feature = Feature( diff --git a/src/pytest_bdd/reporting.py b/src/pytest_bdd/reporting.py index f0a3d0145..39ae691eb 100644 --- a/src/pytest_bdd/reporting.py +++ b/src/pytest_bdd/reporting.py @@ -137,13 +137,10 @@ def fail(self) -> None: def runtest_makereport(item: Item, call: CallInfo, rep: TestReport) -> None: """Store item in the report object.""" - try: - scenario_report: ScenarioReport = item.__scenario_report__ - except AttributeError: - pass - else: - rep.scenario = scenario_report.serialize() - rep.item = {"name": item.name} + scenario_report = getattr(item, "__scenario_report__", None) + if scenario_report is not None: + rep.scenario = scenario_report.serialize() # type: ignore + rep.item = {"name": item.name} # type: ignore def before_scenario(request: FixtureRequest, feature: Feature, scenario: Scenario) -> None: diff --git a/src/pytest_bdd/scenario.py b/src/pytest_bdd/scenario.py index 4a939109d..33db74089 100644 --- a/src/pytest_bdd/scenario.py +++ b/src/pytest_bdd/scenario.py @@ -17,7 +17,7 @@ import logging import os import re -from typing import TYPE_CHECKING, Any, Callable, Iterable, Iterator, TypeVar, cast +from typing import TYPE_CHECKING, Any, Callable, Iterable, Iterator, List, Optional, TypeVar, cast import pytest from _pytest.fixtures import FixtureDef, FixtureManager, FixtureRequest, call_fixture_func @@ -62,8 +62,8 @@ def find_fixturedefs_for_step(step: Step, fixturemanager: FixtureManager, node: if not match: continue - fixturedefs = getfixturedefs(fixturemanager, fixturename, node) - if fixturedef not in (fixturedefs or []): + fixturedefs = cast(List[FixtureDef[Any]], getfixturedefs(fixturemanager, fixturename, node) or []) + if fixturedef not in fixturedefs: continue yield fixturedef @@ -295,6 +295,8 @@ def scenario_wrapper(request: FixtureRequest, _pytest_bdd_example: dict[str, str def collect_example_parametrizations( templated_scenario: ScenarioTemplate, ) -> list[ParameterSet] | None: + if templated_scenario.examples is None: + return None if contexts := list(templated_scenario.examples.as_contexts()): return [pytest.param(context, id="-".join(context.values())) for context in contexts] else: @@ -338,14 +340,14 @@ def scenario( def get_features_base_dir(caller_module_path: str) -> str: - d = get_from_ini("bdd_features_base_dir", None) + d = get_from_ini("bdd_features_base_dir") if d is None: return os.path.dirname(caller_module_path) rootdir = CONFIG_STACK[-1].rootpath return os.path.join(rootdir, d) -def get_from_ini(key: str, default: str) -> str: +def get_from_ini(key: str, default: str | None = None) -> str | None: """Get value from ini config. Return default if value has not been set. Use if the default value is dynamic. Otherwise, set default on addini call. diff --git a/src/pytest_bdd/steps.py b/src/pytest_bdd/steps.py index 42b0d6837..b4171e3c4 100644 --- a/src/pytest_bdd/steps.py +++ b/src/pytest_bdd/steps.py @@ -171,7 +171,7 @@ def decorator(func: Callable[P, T]) -> Callable[P, T]: def step_function_marker() -> StepFunctionContext: return context - step_function_marker._pytest_bdd_step_context = context + step_function_marker._pytest_bdd_step_context = context # type: ignore caller_locals = get_caller_module_locals(stacklevel=stacklevel) fixture_step_name = find_unique_name( @@ -184,7 +184,7 @@ def step_function_marker() -> StepFunctionContext: def find_unique_name(name: str, seen: Iterable[str]) -> str: - """Find unique name among a set of strings. + """Find a unique name among a set of strings. New names are generated by appending an increasing number at the end of the name. @@ -196,7 +196,11 @@ def find_unique_name(name: str, seen: Iterable[str]) -> str: if name not in seen: return name + # Generate new names with increasing numbers for i in count(1): new_name = f"{name}_{i}" if new_name not in seen: return new_name + + # This line will never be reached, but it's here to satisfy mypy + raise RuntimeError("Unable to find a unique name") diff --git a/src/pytest_bdd/types.py b/src/pytest_bdd/types.py index 66f20df68..ef123b509 100644 --- a/src/pytest_bdd/types.py +++ b/src/pytest_bdd/types.py @@ -2,8 +2,13 @@ from __future__ import annotations -GIVEN = "given" -WHEN = "when" -THEN = "then" +import typing + +if typing.TYPE_CHECKING: + from typing_extensions import Literal + +GIVEN: Literal["given"] = "given" +WHEN: Literal["when"] = "when" +THEN: Literal["then"] = "then" STEP_TYPES = (GIVEN, WHEN, THEN) diff --git a/src/pytest_bdd/utils.py b/src/pytest_bdd/utils.py index efe16ff6c..34263f7ee 100644 --- a/src/pytest_bdd/utils.py +++ b/src/pytest_bdd/utils.py @@ -78,8 +78,8 @@ def collect_dumped_objects(result: RunResult) -> list: def setdefault(obj: object, name: str, default: T) -> T: """Just like dict.setdefault, but for objects.""" - try: - return getattr(obj, name) - except AttributeError: + if hasattr(obj, name): + return getattr(obj, name) # type: ignore + else: setattr(obj, name, default) return default diff --git a/tests/feature/test_cucumber_json.py b/tests/feature/test_cucumber_json.py index 71a129768..836d3af5a 100644 --- a/tests/feature/test_cucumber_json.py +++ b/tests/feature/test_cucumber_json.py @@ -8,6 +8,8 @@ from typing import TYPE_CHECKING, Any if TYPE_CHECKING: + from typing import Optional + from _pytest.pytester import Pytester, RunResult @@ -23,7 +25,7 @@ def runandparse(pytester: Pytester, *args: Any) -> tuple[RunResult, list[dict[st class OfType: """Helper object to help compare object type to initialization type""" - def __init__(self, type: type = None) -> None: + def __init__(self, type: type | None = None) -> None: self.type = type def __eq__(self, other: object) -> bool: diff --git a/tests/feature/test_report.py b/tests/feature/test_report.py index bdc89855c..4b943a308 100644 --- a/tests/feature/test_report.py +++ b/tests/feature/test_report.py @@ -1,6 +1,7 @@ """Test scenario reporting.""" import textwrap +from typing import Optional import pytest @@ -8,7 +9,7 @@ class OfType: """Helper object comparison to which is always 'equal'.""" - def __init__(self, type: type = None) -> None: + def __init__(self, type: Optional[type] = None) -> None: self.type = type def __eq__(self, other: object) -> bool: diff --git a/tests/parser/refactor_parser.py b/tests/parser/refactor_parser.py new file mode 100644 index 000000000..c38253c21 --- /dev/null +++ b/tests/parser/refactor_parser.py @@ -0,0 +1,19 @@ +import re + + +def replace_line_numbers(text): + def replace(match): + return f"line={int(match.group(1)) - 1}" + + return re.sub(r"line=(\d+)", replace, text) + + +# Sample usage +if __name__ == "__main__": + with open("./test_parser.py") as file: + content = file.read() + + updated_content = replace_line_numbers(content) + + with open("./test_parser.py", "w") as file: + file.write(updated_content) diff --git a/tests/parser/test.feature b/tests/parser/test.feature index 62950bff4..5515bcb14 100644 --- a/tests/parser/test.feature +++ b/tests/parser/test.feature @@ -1,5 +1,4 @@ # This is a comment - Feature: User login As a registered user diff --git a/tests/parser/test_errors.py b/tests/parser/test_errors.py new file mode 100644 index 000000000..93ff90388 --- /dev/null +++ b/tests/parser/test_errors.py @@ -0,0 +1,253 @@ +import textwrap + + +def test_multiple_features_error(pytester): + """Test multiple features in a single feature file.""" + features = pytester.mkdir("features") + features.joinpath("test.feature").write_text( + textwrap.dedent( + """ + Feature: First Feature + Scenario: First Scenario + Given a step + + Feature: Second Feature + Scenario: Second Scenario + Given another step + """ + ), + encoding="utf-8", + ) + pytester.makepyfile( + textwrap.dedent( + """ + from pytest_bdd import scenarios + + scenarios('features') + """ + ) + ) + + result = pytester.runpytest() + result.stdout.fnmatch_lines(["*FeatureError: Multiple features are not allowed in a single feature file.*"]) + + +def test_step_outside_scenario_or_background_error(pytester): + """Test step outside of a Scenario or Background.""" + features = pytester.mkdir("features") + features.joinpath("test.feature").write_text( + textwrap.dedent( + """ + Feature: Invalid Feature + # Step not inside a scenario or background + Given a step that is not inside a scenario or background + + Scenario: A valid scenario + Given a step inside a scenario + + """ + ), + encoding="utf-8", + ) + + pytester.makepyfile( + textwrap.dedent( + """ + from pytest_bdd import scenarios, given + + @given("a step inside a scenario") + def step_inside_scenario(): + pass + + scenarios('features') + """ + ) + ) + + result = pytester.runpytest() + + # Expect the FeatureError for the step outside of scenario or background + result.stdout.fnmatch_lines(["*FeatureError: Step definition outside of a Scenario or a Background.*"]) + + +def test_multiple_backgrounds_error(pytester): + """Test multiple backgrounds in a single feature.""" + features = pytester.mkdir("features") + features.joinpath("test.feature").write_text( + textwrap.dedent( + """ + Feature: Feature with multiple backgrounds + Background: First background + Given a first background step + + Background: Second background + Given a second background step + + Scenario: A valid scenario + Given a step in the scenario + """ + ), + encoding="utf-8", + ) + pytester.makepyfile( + textwrap.dedent( + """ + from pytest_bdd import scenarios + + scenarios('features') + """ + ) + ) + + result = pytester.runpytest() + result.stdout.fnmatch_lines( + ["*BackgroundError: Multiple 'Background' sections detected. Only one 'Background' is allowed per feature.*"] + ) + + +def test_misplaced_scenario_error(pytester): + """Test misplaced or incorrect Scenario keywords.""" + features = pytester.mkdir("features") + features.joinpath("test.feature").write_text( + textwrap.dedent( + """ + Scenario: First scenario + Given a step + + Scenario: Misplaced scenario + Given another step + When I have something wrong + """ + ), + encoding="utf-8", + ) + pytester.makepyfile( + textwrap.dedent( + """ + from pytest_bdd import scenarios, given, when + + @given("a step") + def a_step(): + pass + + @given("another step") + def another_step(): + pass + + @when("I have something wrong") + def something_wrong(): + pass + + scenarios('features') + """ + ) + ) + + result = pytester.runpytest() + + # Expect that no ScenarioError will actually be raised here + result.stdout.fnmatch_lines( + [ + "*ScenarioError: Misplaced or incorrect 'Scenario' keyword. Ensure it's correctly placed. There might be a missing Feature section.*" + ] + ) + + +def test_misplaced_rule_error(pytester): + """Test misplaced or incorrectly formatted Rule.""" + features = pytester.mkdir("features") + features.joinpath("test.feature").write_text( + textwrap.dedent( + """ + Rule: Misplaced rule + Feature: Feature with misplaced rule + Scenario: A scenario inside a rule + Given a step + """ + ), + encoding="utf-8", + ) + pytester.makepyfile( + textwrap.dedent( + """ + from pytest_bdd import given, scenarios + + scenarios('features') + + @given("a step") + def a_step(): + pass + """ + ) + ) + + result = pytester.runpytest() + result.stdout.fnmatch_lines( + ["*RuleError: Misplaced or incorrectly formatted 'Rule'. Ensure it follows the feature structure.*"] + ) + + +def test_improper_step_error(pytester): + """Test improper step without keyword.""" + features = pytester.mkdir("features") + features.joinpath("test.feature").write_text( + textwrap.dedent( + """ + Feature: Feature with improper step + Scenario: Scenario with improper step + Given a valid step + InvalidStep I have an invalid step + """ + ), + encoding="utf-8", + ) + pytester.makepyfile( + textwrap.dedent( + """ + from pytest_bdd import scenarios + + scenarios('features') + """ + ) + ) + + result = pytester.runpytest() + result.stdout.fnmatch_lines(["*TokenError: Unexpected token found. Check Gherkin syntax near the reported error.*"]) + + +def test_improper_initial_keyword(pytester): + """Test first step using incorrect initial keyword.""" + features = pytester.mkdir("features") + features.joinpath("test.feature").write_text( + textwrap.dedent( + """ + Feature: Incorrect initial keyword + + Scenario: No initial Given, When or Then + And foo + """ + ), + encoding="utf-8", + ) + pytester.makepyfile( + textwrap.dedent( + """ + from pytest_bdd import given, scenarios + + scenarios('features') + + @given("foo") + def foo(): + pass + + @then("bar") + def bar(): + pass + """ + ) + ) + + result = pytester.runpytest() + result.stdout.fnmatch_lines( + ["*StepError: First step in a scenario or background must start with 'Given', 'When' or 'Then', but got And.*"] + ) diff --git a/tests/parser/test_parser.py b/tests/parser/test_parser.py index 029886152..26421143d 100644 --- a/tests/parser/test_parser.py +++ b/tests/parser/test_parser.py @@ -1,6 +1,22 @@ from pathlib import Path -from src.pytest_bdd.gherkin_parser import get_gherkin_document +from src.pytest_bdd.gherkin_parser import ( + Background, + Cell, + Child, + Comment, + DataTable, + DocString, + Feature, + GherkinDocument, + Location, + Row, + Rule, + Scenario, + Step, + Tag, + get_gherkin_document, +) def test_parser(): @@ -8,4 +24,748 @@ def test_parser(): feature_file = test_dir / "test.feature" feature_file_path = str(feature_file.resolve()) - get_gherkin_document(feature_file_path) + # Call the function to parse the Gherkin document + gherkin_doc = get_gherkin_document(feature_file_path) + + # Define the expected structure + expected_document = GherkinDocument( + feature=Feature( + keyword="Feature", + location=Location(column=1, line=2), + tags=[], + name="User login", + description=" As a registered user\n I want to be able to log in\n So that I can access my account", + children=[ + Child( + background=Background( + id="1", + keyword="Background", + location=Location(column=3, line=8), + name="", + description="", + steps=[ + Step( + id="0", + keyword="Given", + keywordType="Context", + location=Location(column=5, line=10), + text="the login page is open", + dataTable=None, + docString=None, + ) + ], + ), + rule=None, + scenario=None, + ), + Child( + background=None, + rule=None, + scenario=Scenario( + id="6", + keyword="Scenario", + location=Location(column=3, line=13), + name="Successful login with valid credentials", + description="", + steps=[ + Step( + id="2", + keyword="Given", + keywordType="Context", + location=Location(column=5, line=14), + text="the user enters a valid username", + dataTable=None, + docString=None, + ), + Step( + id="3", + keyword="And", + keywordType="Conjunction", + location=Location(column=5, line=15), + text="the user enters a valid password", + dataTable=None, + docString=None, + ), + Step( + id="4", + keyword="When", + keywordType="Action", + location=Location(column=5, line=16), + text="the user clicks the login button", + dataTable=None, + docString=None, + ), + Step( + id="5", + keyword="Then", + keywordType="Outcome", + location=Location(column=5, line=17), + text="the user should see the dashboard", + dataTable=None, + docString=None, + ), + ], + tags=[], + examples=[], + ), + ), + Child( + background=None, + rule=None, + scenario=Scenario( + id="15", + keyword="Scenario Outline", + location=Location(column=3, line=19), + name="Unsuccessful login with invalid credentials", + description="", + steps=[ + Step( + id="7", + keyword="Given", + keywordType="Context", + location=Location(column=5, line=20), + text='the user enters "" as username', + dataTable=None, + docString=None, + ), + Step( + id="8", + keyword="And", + keywordType="Conjunction", + location=Location(column=5, line=21), + text='the user enters "" as password', + dataTable=None, + docString=None, + ), + Step( + id="9", + keyword="When", + keywordType="Action", + location=Location(column=5, line=22), + text="the user clicks the login button", + dataTable=None, + docString=None, + ), + Step( + id="10", + keyword="Then", + keywordType="Outcome", + location=Location(column=5, line=23), + text='the user should see an error message ""', + dataTable=None, + docString=None, + ), + ], + tags=[], + examples=[ + DataTable( + location=Location(column=5, line=26), + name="", + tableHeader=Row( + id="11", + location=Location(column=7, line=27), + cells=[ + Cell( + location=Location(column=9, line=27), + value="username", + ), + Cell( + location=Location(column=23, line=27), + value="password", + ), + Cell( + location=Location(column=35, line=27), + value="error_message", + ), + ], + ), + tableBody=[ + Row( + id="12", + location=Location(column=7, line=28), + cells=[ + Cell( + location=Location(column=9, line=28), + value="invalidUser", + ), + Cell( + location=Location(column=23, line=28), + value="wrongPass", + ), + Cell( + location=Location(column=35, line=28), + value="Invalid username or password", + ), + ], + ), + Row( + id="13", + location=Location(column=7, line=29), + cells=[ + Cell( + location=Location(column=9, line=29), + value="user123", + ), + Cell( + location=Location(column=23, line=29), + value="incorrect", + ), + Cell( + location=Location(column=35, line=29), + value="Invalid username or password", + ), + ], + ), + ], + ) + ], + ), + ), + Child( + background=None, + rule=None, + scenario=Scenario( + id="20", + keyword="Scenario", + location=Location(column=3, line=31), + name="Login with empty username", + description="", + steps=[ + Step( + id="16", + keyword="Given", + keywordType="Context", + location=Location(column=5, line=32), + text="the user enters an empty username", + dataTable=None, + docString=None, + ), + Step( + id="17", + keyword="And", + keywordType="Conjunction", + location=Location(column=5, line=33), + text="the user enters a valid password", + dataTable=None, + docString=None, + ), + Step( + id="18", + keyword="When", + keywordType="Action", + location=Location(column=5, line=34), + text="the user clicks the login button", + dataTable=None, + docString=None, + ), + Step( + id="19", + keyword="Then", + keywordType="Outcome", + location=Location(column=5, line=35), + text='the user should see an error message "Username cannot be empty"', + dataTable=None, + docString=None, + ), + ], + tags=[], + examples=[], + ), + ), + Child( + background=None, + rule=None, + scenario=Scenario( + id="25", + keyword="Scenario", + location=Location(column=3, line=37), + name="Login with empty password", + description="", + steps=[ + Step( + id="21", + keyword="Given", + keywordType="Context", + location=Location(column=5, line=38), + text="the user enters a valid username", + dataTable=None, + docString=None, + ), + Step( + id="22", + keyword="And", + keywordType="Conjunction", + location=Location(column=5, line=39), + text="the user enters an empty password", + dataTable=None, + docString=None, + ), + Step( + id="23", + keyword="When", + keywordType="Action", + location=Location(column=5, line=40), + text="the user clicks the login button", + dataTable=None, + docString=None, + ), + Step( + id="24", + keyword="Then", + keywordType="Outcome", + location=Location(column=5, line=41), + text='the user should see an error message "Password cannot be empty"', + dataTable=None, + docString=None, + ), + ], + tags=[], + examples=[], + ), + ), + Child( + background=None, + rule=None, + scenario=Scenario( + id="30", + keyword="Scenario", + location=Location(column=3, line=43), + name="Login with SQL injection attempt", + description="", + steps=[ + Step( + id="26", + keyword="Given", + keywordType="Context", + location=Location(column=5, line=44), + text="the user enters \"admin' OR '1'='1\" as username", + dataTable=None, + docString=None, + ), + Step( + id="27", + keyword="And", + keywordType="Conjunction", + location=Location(column=5, line=45), + text='the user enters "password" as password', + dataTable=None, + docString=None, + ), + Step( + id="28", + keyword="When", + keywordType="Action", + location=Location(column=5, line=46), + text="the user clicks the login button", + dataTable=None, + docString=None, + ), + Step( + id="29", + keyword="Then", + keywordType="Outcome", + location=Location(column=5, line=47), + text='the user should see an error message "Invalid username or password"', + dataTable=None, + docString=None, + ), + ], + tags=[], + examples=[], + ), + ), + Child( + background=None, + rule=None, + scenario=Scenario( + id="35", + keyword="Scenario", + location=Location(column=3, line=50), + name="Login button disabled for empty fields", + description="", + steps=[ + Step( + id="31", + keyword="Given", + keywordType="Context", + location=Location(column=5, line=51), + text="the user has not entered any username or password", + dataTable=None, + docString=None, + ), + Step( + id="32", + keyword="Then", + keywordType="Outcome", + location=Location(column=5, line=52), + text="the login button should be disabled", + dataTable=None, + docString=None, + ), + ], + tags=[ + Tag(id="33", location=Location(column=3, line=49), name="@login"), + Tag( + id="34", + location=Location(column=10, line=49), + name="@critical", + ), + ], + examples=[], + ), + ), + Child( + background=None, + rule=None, + scenario=Scenario( + id="39", + keyword="Scenario", + location=Location(column=3, line=56), + name="Login page loads correctly", + description="", + steps=[ + Step( + id="36", + keyword="Given", + keywordType="Context", + location=Location(column=5, line=57), + text="the login page is loaded", + dataTable=None, + docString=None, + ), + Step( + id="37", + keyword="Then", + keywordType="Outcome", + location=Location(column=5, line=58), + text="the login form should be visible", + dataTable=None, + docString=None, + ), + ], + tags=[Tag(id="38", location=Location(column=3, line=55), name="@smoke")], + examples=[], + ), + ), + Child( + background=None, + rule=None, + scenario=Scenario( + id="53", + keyword="Scenario", + location=Location(column=3, line=61), + name="Login with multiple sets of credentials", + description="", + steps=[ + Step( + id="44", + keyword="Given", + keywordType="Context", + location=Location(column=5, line=62), + text="the following users are registered:", + dataTable=DataTable( + location=Location(column=7, line=63), + name=None, + tableHeader=None, + tableBody=[], + ), + docString=None, + ), + Step( + id="48", + keyword="When", + keywordType="Action", + location=Location(column=5, line=67), + text="the user tries to log in with the following credentials:", + dataTable=DataTable( + location=Location(column=7, line=68), + name=None, + tableHeader=None, + tableBody=[], + ), + docString=None, + ), + Step( + id="52", + keyword="Then", + keywordType="Outcome", + location=Location(column=5, line=71), + text="the login attempts should result in:", + dataTable=DataTable( + location=Location(column=7, line=72), + name=None, + tableHeader=None, + tableBody=[], + ), + docString=None, + ), + ], + tags=[], + examples=[], + ), + ), + Child( + background=None, + rule=None, + scenario=Scenario( + id="57", + keyword="Scenario", + location=Location(column=3, line=77), + name="Check login error message with detailed explanation", + description="", + steps=[ + Step( + id="54", + keyword="Given", + keywordType="Context", + location=Location(column=5, line=78), + text="the user enters invalid credentials", + dataTable=None, + docString=None, + ), + Step( + id="55", + keyword="When", + keywordType="Action", + location=Location(column=5, line=79), + text="the user clicks the login button", + dataTable=None, + docString=None, + ), + Step( + id="56", + keyword="Then", + keywordType="Outcome", + location=Location(column=5, line=80), + text="the user should see the following error message:", + dataTable=None, + docString=DocString( + content="Your login attempt was unsuccessful.\nPlease check your username and password and try again.\nIf the problem persists, contact support.", + delimiter='"""', + location=Location(column=7, line=81), + ), + ), + ], + tags=[], + examples=[], + ), + ), + Child( + background=None, + rule=Rule( + id="64", + keyword="Rule", + location=Location(column=3, line=88), + name="a sale cannot happen if there is no stock", + description="", + tags=[ + Tag( + id="63", + location=Location(column=3, line=87), + name="@some-tag", + ) + ], + children=[ + Child( + background=None, + rule=None, + scenario=Scenario( + id="62", + keyword="Example", + location=Location(column=3, line=90), + name="No chocolates left", + description="", + steps=[ + Step( + id="58", + keyword="Given", + keywordType="Context", + location=Location(column=5, line=91), + text="the customer has 100 cents", + dataTable=None, + docString=None, + ), + Step( + id="59", + keyword="And", + keywordType="Conjunction", + location=Location(column=5, line=92), + text="there are no chocolate bars in stock", + dataTable=None, + docString=None, + ), + Step( + id="60", + keyword="When", + keywordType="Action", + location=Location(column=5, line=93), + text="the customer tries to buy a 1 cent chocolate bar", + dataTable=None, + docString=None, + ), + Step( + id="61", + keyword="Then", + keywordType="Outcome", + location=Location(column=5, line=94), + text="the sale should not happen", + dataTable=None, + docString=None, + ), + ], + tags=[], + examples=[], + ), + ) + ], + ), + scenario=None, + ), + Child( + background=None, + rule=Rule( + id="75", + keyword="Rule", + location=Location(column=3, line=96), + name="A sale cannot happen if the customer does not have enough money", + description="", + tags=[], + children=[ + Child( + background=None, + rule=None, + scenario=Scenario( + id="69", + keyword="Example", + location=Location(column=5, line=98), + name="Not enough money", + description="", + steps=[ + Step( + id="65", + keyword="Given", + keywordType="Context", + location=Location(column=7, line=99), + text="the customer has 100 cents", + dataTable=None, + docString=None, + ), + Step( + id="66", + keyword="And", + keywordType="Conjunction", + location=Location(column=7, line=100), + text="there are chocolate bars in stock", + dataTable=None, + docString=None, + ), + Step( + id="67", + keyword="When", + keywordType="Action", + location=Location(column=7, line=101), + text="the customer tries to buy a 125 cent chocolate bar", + dataTable=None, + docString=None, + ), + Step( + id="68", + keyword="Then", + keywordType="Outcome", + location=Location(column=7, line=102), + text="the sale should not happen", + dataTable=None, + docString=None, + ), + ], + tags=[], + examples=[], + ), + ), + Child( + background=None, + rule=None, + scenario=Scenario( + id="74", + keyword="Example", + location=Location(column=5, line=105), + name="Enough money", + description="", + steps=[ + Step( + id="70", + keyword="Given", + keywordType="Context", + location=Location(column=7, line=106), + text="the customer has 100 cents", + dataTable=None, + docString=None, + ), + Step( + id="71", + keyword="And", + keywordType="Conjunction", + location=Location(column=7, line=107), + text="there are chocolate bars in stock", + dataTable=None, + docString=None, + ), + Step( + id="72", + keyword="When", + keywordType="Action", + location=Location(column=7, line=108), + text="the customer tries to buy a 75 cent chocolate bar", + dataTable=None, + docString=None, + ), + Step( + id="73", + keyword="Then", + keywordType="Outcome", + location=Location(column=7, line=109), + text="the sale should happen", + dataTable=None, + docString=None, + ), + ], + tags=[], + examples=[], + ), + ), + ], + ), + scenario=None, + ), + ], + ), + comments=[ + Comment(location=Location(column=1, line=1), text="# This is a comment"), + Comment( + location=Location(column=1, line=9), + text=" # Background steps run before each scenario", + ), + Comment(location=Location(column=1, line=12), text=" # Scenario within the rule"), + Comment( + location=Location(column=1, line=25), + text=" # Examples table provides data for the scenario outline", + ), + Comment( + location=Location(column=1, line=54), + text=" # Tags can be used to categorize scenarios", + ), + Comment( + location=Location(column=1, line=60), + text=" # Using Data Tables for more complex data", + ), + Comment( + location=Location(column=1, line=76), + text=" # Using Doc Strings for multi-line text", + ), + Comment(location=Location(column=1, line=89), text=" # Unhappy path"), + Comment(location=Location(column=1, line=97), text=" # Unhappy path"), + Comment(location=Location(column=1, line=104), text=" # Happy path"), + ], + ) + + assert gherkin_doc == expected_document From a3a5195a3bffee5614c9d67721e71e5eb67001c0 Mon Sep 17 00:00:00 2001 From: Alessio Bogon <778703+youtux@users.noreply.github.com> Date: Mon, 16 Sep 2024 20:49:47 +0200 Subject: [PATCH 22/36] Do not fail the CI job if we can't upload to Codecov. Also bump codecov-action --- .github/workflows/main.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index 611ba0f2a..4377ca8fb 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -81,10 +81,10 @@ jobs: coverage combine coverage xml - - uses: codecov/codecov-action@v3 + - uses: codecov/codecov-action@v4 with: # Explicitly using the token to avoid Codecov rate limit errors # See https://community.codecov.com/t/upload-issues-unable-to-locate-build-via-github-actions-api/3954 token: ${{ secrets.CODECOV_TOKEN }} - fail_ci_if_error: true + fail_ci_if_error: false verbose: true # optional (default = false) From 656c7ceff64c2d6234302abeb1121cb36eb98abd Mon Sep 17 00:00:00 2001 From: Alessio Bogon <778703+youtux@users.noreply.github.com> Date: Mon, 16 Sep 2024 20:53:55 +0200 Subject: [PATCH 23/36] Ignore py3.13 failures for now They are not really failures, just deprecation warnings that we treat as errors. --- .github/workflows/main.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index 4377ca8fb..91233266e 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -34,7 +34,7 @@ jobs: - python-version: "3.13-dev" toxfactor: py3.13 ignore-typecheck-outcome: true - ignore-test-outcome: false + ignore-test-outcome: true steps: - uses: actions/checkout@v3 From d11096fdad4928f22cd048bdf41e374726c4a1a5 Mon Sep 17 00:00:00 2001 From: Alessio Bogon <778703+youtux@users.noreply.github.com> Date: Mon, 16 Sep 2024 21:05:13 +0200 Subject: [PATCH 24/36] Fix matching result in case there are warnings --- tests/feature/test_tags.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/feature/test_tags.py b/tests/feature/test_tags.py index 20a64dc8e..050ad6fae 100644 --- a/tests/feature/test_tags.py +++ b/tests/feature/test_tags.py @@ -157,7 +157,7 @@ def _(): ) result = pytester.runpytest("-rsx") result.stdout.fnmatch_lines(["SKIP*: Not implemented yet"]) - result.stdout.fnmatch_lines(["*= 1 skipped, 1 xpassed * =*"]) + result.stdout.fnmatch_lines(["*= 1 skipped, 1 xpassed*=*"]) def test_at_in_scenario(pytester): From 0a7778271f8ed1bf71cac75a8230d003c86fbdaf Mon Sep 17 00:00:00 2001 From: Jason Allen Date: Sat, 21 Sep 2024 19:24:19 +0100 Subject: [PATCH 25/36] Remove accidentally committed local file. Fix test file name --- ...t_gherkin.py => test_no_strict_gherkin.py} | 0 tests/parser/refactor_parser.py | 19 ------------------- 2 files changed, 19 deletions(-) rename tests/feature/{test_no_sctrict_gherkin.py => test_no_strict_gherkin.py} (100%) delete mode 100644 tests/parser/refactor_parser.py diff --git a/tests/feature/test_no_sctrict_gherkin.py b/tests/feature/test_no_strict_gherkin.py similarity index 100% rename from tests/feature/test_no_sctrict_gherkin.py rename to tests/feature/test_no_strict_gherkin.py diff --git a/tests/parser/refactor_parser.py b/tests/parser/refactor_parser.py deleted file mode 100644 index c38253c21..000000000 --- a/tests/parser/refactor_parser.py +++ /dev/null @@ -1,19 +0,0 @@ -import re - - -def replace_line_numbers(text): - def replace(match): - return f"line={int(match.group(1)) - 1}" - - return re.sub(r"line=(\d+)", replace, text) - - -# Sample usage -if __name__ == "__main__": - with open("./test_parser.py") as file: - content = file.read() - - updated_content = replace_line_numbers(content) - - with open("./test_parser.py", "w") as file: - file.write(updated_content) From 093b6a8bdaa4df3d6ffefc813fe6ffd6c71af739 Mon Sep 17 00:00:00 2001 From: Alessio Bogon <778703+youtux@users.noreply.github.com> Date: Sat, 21 Sep 2024 21:58:38 +0200 Subject: [PATCH 26/36] Update release notes --- CHANGES.rst | 9 ++++----- 1 file changed, 4 insertions(+), 5 deletions(-) diff --git a/CHANGES.rst b/CHANGES.rst index b72a087a4..0df11c0da 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -3,16 +3,15 @@ Changelog Unreleased ---------- - +- Use `gherkin-official` parser to replace custom parsing logic. +- Multiline steps must now always use triple-quotes for the additional lines. +- All feature files must now use the keyword `Feature:` to be considered valid. +- Tags can no longer have spaces (e.g. "@tag one" "@tag two" are no longer valid). 7.3.0 ---------- - Fix an issue when only the first Step would inject a fixture, while later steps would not be able to. - Test against the latest versions of pytest (8.2, 8.3). -- Use `gherkin-official` parser to replace custom parsing logic. -- Multiline steps must now always use triple-quotes for the additional lines. -- All feature files must now use the keyword `Feature:` to be considered valid. -- Tags can no longer have spaces (e.g. "@tag one" "@tag two" are no longer valid). 7.2.0 ---------- From 5f717c4278da21e4b157b980b8688d7ff192eef2 Mon Sep 17 00:00:00 2001 From: Alessio Bogon <778703+youtux@users.noreply.github.com> Date: Sat, 21 Sep 2024 21:58:54 +0200 Subject: [PATCH 27/36] Require tests to pass on 3.13 --- .github/workflows/main.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index 91233266e..4377ca8fb 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -34,7 +34,7 @@ jobs: - python-version: "3.13-dev" toxfactor: py3.13 ignore-typecheck-outcome: true - ignore-test-outcome: true + ignore-test-outcome: false steps: - uses: actions/checkout@v3 From 0fe44191c8763895741a74be6e72f03681dfcc18 Mon Sep 17 00:00:00 2001 From: Alessio Bogon <778703+youtux@users.noreply.github.com> Date: Sat, 21 Sep 2024 22:29:01 +0200 Subject: [PATCH 28/36] Only treat warnings from `pytest_bdd` as exceptions --- pytest.ini | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/pytest.ini b/pytest.ini index f4763470e..308cad0fd 100644 --- a/pytest.ini +++ b/pytest.ini @@ -1,4 +1,5 @@ [pytest] testpaths = tests filterwarnings = - error + # only ignore errors from the pytest_bdd package + error:::(src)?\.pytest_bdd.* From 9b263f41ae908d7a1bc9f4080381305ef992997d Mon Sep 17 00:00:00 2001 From: Alessio Bogon <778703+youtux@users.noreply.github.com> Date: Sat, 21 Sep 2024 22:30:50 +0200 Subject: [PATCH 29/36] Add type annotations --- src/pytest_bdd/exceptions.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/pytest_bdd/exceptions.py b/src/pytest_bdd/exceptions.py index 4c3fe7d2c..3db49ab2f 100644 --- a/src/pytest_bdd/exceptions.py +++ b/src/pytest_bdd/exceptions.py @@ -26,7 +26,7 @@ class NoScenariosFound(Exception): class GherkinParseError(Exception): """Base class for all Gherkin parsing errors.""" - def __init__(self, message, line, line_content, filename): + def __init__(self, message: str, line: int, line_content: str, filename: str): super().__init__(message) self.message = message self.line = line @@ -36,7 +36,7 @@ def __init__(self, message, line, line_content, filename): self.line_content = line_content self.filename = filename - def __str__(self): + def __str__(self) -> str: return f"{self.message}\nLine number: {self.line}\nLine: {self.line_content}\nFile: {self.filename}" From 6fe8e4092f384c870b338df3c5d1a810c1970798 Mon Sep 17 00:00:00 2001 From: Alessio Bogon <778703+youtux@users.noreply.github.com> Date: Sat, 21 Sep 2024 22:30:58 +0200 Subject: [PATCH 30/36] remove redundant statements --- src/pytest_bdd/exceptions.py | 3 --- 1 file changed, 3 deletions(-) diff --git a/src/pytest_bdd/exceptions.py b/src/pytest_bdd/exceptions.py index 3db49ab2f..8e3c223ab 100644 --- a/src/pytest_bdd/exceptions.py +++ b/src/pytest_bdd/exceptions.py @@ -32,9 +32,6 @@ def __init__(self, message: str, line: int, line_content: str, filename: str): self.line = line self.line_content = line_content self.filename = filename - self.line = line - self.line_content = line_content - self.filename = filename def __str__(self) -> str: return f"{self.message}\nLine number: {self.line}\nLine: {self.line_content}\nFile: {self.filename}" From 0573c6fe8a71ee32107d59dcf5cfebe69da8ab4e Mon Sep 17 00:00:00 2001 From: Alessio Bogon <778703+youtux@users.noreply.github.com> Date: Sat, 21 Sep 2024 22:31:44 +0200 Subject: [PATCH 31/36] Fix typing issue --- src/pytest_bdd/compat.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/pytest_bdd/compat.py b/src/pytest_bdd/compat.py index 10b98c79f..bdce0074c 100644 --- a/src/pytest_bdd/compat.py +++ b/src/pytest_bdd/compat.py @@ -34,7 +34,7 @@ def inject_fixture(request: FixtureRequest, arg: str, value: Any) -> None: # if there was already one registered, so we need to force its value # to the one we want to inject. fixture_def = request._get_active_fixturedef(arg) - fixture_def.cached_result = (value, None, None) + fixture_def.cached_result = (value, None, None) # type: ignore else: From 8629438559679060dae07f242e4585cb12e930bf Mon Sep 17 00:00:00 2001 From: Alessio Bogon <778703+youtux@users.noreply.github.com> Date: Sat, 21 Sep 2024 22:48:46 +0200 Subject: [PATCH 32/36] Add missing type annotations, and require all files to be typed --- pyproject.toml | 1 + src/pytest_bdd/exceptions.py | 2 +- src/pytest_bdd/feature.py | 6 ++-- src/pytest_bdd/gherkin_parser.py | 2 +- src/pytest_bdd/hooks.py | 47 ++++++++++++++++++++++++++------ src/pytest_bdd/parser.py | 2 +- src/pytest_bdd/scenario.py | 4 +-- 7 files changed, 48 insertions(+), 16 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index c3af2324b..ff9714bea 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -94,6 +94,7 @@ python_version = "3.8" warn_return_any = true warn_unused_configs = true files = "src/pytest_bdd/**/*.py" +disallow_untyped_defs = true [[tool.mypy.overrides]] module = ["parse", "parse_type"] diff --git a/src/pytest_bdd/exceptions.py b/src/pytest_bdd/exceptions.py index 8e3c223ab..1baf617eb 100644 --- a/src/pytest_bdd/exceptions.py +++ b/src/pytest_bdd/exceptions.py @@ -26,7 +26,7 @@ class NoScenariosFound(Exception): class GherkinParseError(Exception): """Base class for all Gherkin parsing errors.""" - def __init__(self, message: str, line: int, line_content: str, filename: str): + def __init__(self, message: str, line: int, line_content: str, filename: str) -> None: super().__init__(message) self.message = message self.line = line diff --git a/src/pytest_bdd/feature.py b/src/pytest_bdd/feature.py index 3a29a7fcd..6a7aa55a1 100644 --- a/src/pytest_bdd/feature.py +++ b/src/pytest_bdd/feature.py @@ -57,7 +57,7 @@ def get_feature(base_path: str, filename: str, encoding: str = "utf-8") -> Featu return feature -def get_features(paths: list[str], **kwargs) -> list[Feature]: +def get_features(paths: list[str], encoding: str = "utf-8") -> list[Feature]: """Get features for given paths. :param list paths: `list` of paths (file or dirs) @@ -71,10 +71,10 @@ def get_features(paths: list[str], **kwargs) -> list[Feature]: seen_names.add(path) if os.path.isdir(path): file_paths = list(glob.iglob(os.path.join(path, "**", "*.feature"), recursive=True)) - _features.extend(get_features(file_paths, **kwargs)) + _features.extend(get_features(file_paths, encoding=encoding)) else: base, name = os.path.split(path) - feature = get_feature(base, name, **kwargs) + feature = get_feature(base, name, encoding=encoding) _features.append(feature) _features.sort(key=lambda _feature: _feature.name or _feature.filename) return _features diff --git a/src/pytest_bdd/gherkin_parser.py b/src/pytest_bdd/gherkin_parser.py index 287a5a7f1..b44af4a27 100644 --- a/src/pytest_bdd/gherkin_parser.py +++ b/src/pytest_bdd/gherkin_parser.py @@ -309,7 +309,7 @@ def get_gherkin_document(abs_filename: str, encoding: str = "utf-8") -> GherkinD def handle_gherkin_parser_error( raw_error: str, line: int, line_content: str, filename: str, original_exception: Exception | None = None -): +) -> None: """Map the error message to a specific exception type and raise it.""" # Split the raw_error into individual lines error_lines = raw_error.splitlines() diff --git a/src/pytest_bdd/hooks.py b/src/pytest_bdd/hooks.py index 9351b2e30..48e1cedbc 100644 --- a/src/pytest_bdd/hooks.py +++ b/src/pytest_bdd/hooks.py @@ -1,40 +1,71 @@ from __future__ import annotations +from collections.abc import Callable + import pytest +from _pytest.fixtures import FixtureRequest + +from pytest_bdd.parser import Feature, Scenario, Step """Pytest-bdd pytest hooks.""" -def pytest_bdd_before_scenario(request, feature, scenario): +def pytest_bdd_before_scenario(request: FixtureRequest, feature: Feature, scenario: Scenario) -> object: """Called before scenario is executed.""" -def pytest_bdd_after_scenario(request, feature, scenario): +def pytest_bdd_after_scenario(request: FixtureRequest, feature: Feature, scenario: Scenario) -> object: """Called after scenario is executed.""" -def pytest_bdd_before_step(request, feature, scenario, step, step_func): +def pytest_bdd_before_step( + request: FixtureRequest, feature: Feature, scenario: Scenario, step: Step, step_func: Callable[..., object] +) -> object: """Called before step function is set up.""" -def pytest_bdd_before_step_call(request, feature, scenario, step, step_func, step_func_args): +def pytest_bdd_before_step_call( + request: FixtureRequest, + feature: Feature, + scenario: Scenario, + step: Step, + step_func: Callable[..., object], + step_func_args: dict[str, object], +) -> object: """Called before step function is executed.""" -def pytest_bdd_after_step(request, feature, scenario, step, step_func, step_func_args): +def pytest_bdd_after_step( + request: FixtureRequest, + feature: Feature, + scenario: Scenario, + step: Step, + step_func: Callable[..., object], + step_func_args: dict[str, object], +) -> object: """Called after step function is successfully executed.""" -def pytest_bdd_step_error(request, feature, scenario, step, step_func, step_func_args, exception): +def pytest_bdd_step_error( + request: FixtureRequest, + feature: Feature, + scenario: Scenario, + step: Step, + step_func: Callable[..., object], + step_func_args: dict[str, object], + exception: Exception, +) -> object: """Called when step function failed to execute.""" -def pytest_bdd_step_func_lookup_error(request, feature, scenario, step, exception): +def pytest_bdd_step_func_lookup_error( + request: FixtureRequest, feature: Feature, scenario: Scenario, step: Step, exception: Exception +) -> object: """Called when step lookup failed.""" @pytest.hookspec(firstresult=True) -def pytest_bdd_apply_tag(tag, function): +def pytest_bdd_apply_tag(tag: str, function: Callable[..., object]) -> object: """Apply a tag (from a ``.feature`` file) to the given scenario. The default implementation does the equivalent of diff --git a/src/pytest_bdd/parser.py b/src/pytest_bdd/parser.py index 48fe57d6a..d6618637e 100644 --- a/src/pytest_bdd/parser.py +++ b/src/pytest_bdd/parser.py @@ -358,7 +358,7 @@ def parse_steps(self, steps_data: list[GherkinStep]) -> list[Step]: List[Step]: A list of Step objects. """ - def get_step_content(_gherkin_step): + def get_step_content(_gherkin_step: GherkinStep) -> str: step_name = strip_comments(_gherkin_step.text) if _gherkin_step.docString: step_name = f"{step_name}\n{_gherkin_step.docString.content}" diff --git a/src/pytest_bdd/scenario.py b/src/pytest_bdd/scenario.py index 33db74089..49d85fb91 100644 --- a/src/pytest_bdd/scenario.py +++ b/src/pytest_bdd/scenario.py @@ -17,7 +17,7 @@ import logging import os import re -from typing import TYPE_CHECKING, Any, Callable, Iterable, Iterator, List, Optional, TypeVar, cast +from typing import TYPE_CHECKING, Any, Callable, Iterable, Iterator, List, TypeVar, cast import pytest from _pytest.fixtures import FixtureDef, FixtureManager, FixtureRequest, call_fixture_func @@ -151,7 +151,7 @@ def get_fixture_path(fixture_def: FixtureDef) -> list[str]: del fixturemanager._arg2fixturedefs[bdd_name] -def get_step_function(request, step: Step) -> StepFunctionContext | None: +def get_step_function(request: FixtureRequest, step: Step) -> StepFunctionContext | None: """Get the step function (context) for the given step. We first figure out what's the step fixture name that we have to inject. From ace6ca527b4995adf0e1a20cdb19dfc20191da9f Mon Sep 17 00:00:00 2001 From: Alessio Bogon <778703+youtux@users.noreply.github.com> Date: Sat, 21 Sep 2024 22:49:39 +0200 Subject: [PATCH 33/36] mypy is passing, require typecheck outcome to be positive --- .github/workflows/main.yml | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index 4377ca8fb..1cbd188fd 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -13,27 +13,27 @@ jobs: include: - python-version: "3.8" toxfactor: py3.8 - ignore-typecheck-outcome: true + ignore-typecheck-outcome: false ignore-test-outcome: false - python-version: "3.9" toxfactor: py3.9 - ignore-typecheck-outcome: true + ignore-typecheck-outcome: false ignore-test-outcome: false - python-version: "3.10" toxfactor: py3.10 - ignore-typecheck-outcome: true + ignore-typecheck-outcome: false ignore-test-outcome: false - python-version: "3.11" toxfactor: py3.11 - ignore-typecheck-outcome: true + ignore-typecheck-outcome: false ignore-test-outcome: false - python-version: "3.12" toxfactor: py3.12 - ignore-typecheck-outcome: true + ignore-typecheck-outcome: false ignore-test-outcome: false - python-version: "3.13-dev" toxfactor: py3.13 - ignore-typecheck-outcome: true + ignore-typecheck-outcome: false ignore-test-outcome: false steps: From c12d8c10332955ff0e0499189ed8f6ac9bf857b0 Mon Sep 17 00:00:00 2001 From: Alessio Bogon <778703+youtux@users.noreply.github.com> Date: Sat, 21 Sep 2024 22:58:11 +0200 Subject: [PATCH 34/36] Remove declaration for field that was never initialised --- src/pytest_bdd/parser.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/src/pytest_bdd/parser.py b/src/pytest_bdd/parser.py index d6618637e..a18b93f64 100644 --- a/src/pytest_bdd/parser.py +++ b/src/pytest_bdd/parser.py @@ -218,7 +218,6 @@ class Step: failed (bool): Whether the step has failed (internal use only). scenario (Optional[ScenarioTemplate]): The scenario to which this step belongs (internal use only). background (Optional[Background]): The background to which this step belongs (internal use only). - lines (List[str]): Additional lines for the step (internal use only). """ type: str @@ -229,7 +228,6 @@ class Step: failed: bool = field(init=False, default=False) scenario: ScenarioTemplate | None = field(init=False, default=None) background: Background | None = field(init=False, default=None) - lines: list[str] = field(init=False, default_factory=list) def __init__(self, name: str, type: str, indent: int, line_number: int, keyword: str) -> None: """Initialize a step. From 8378a34728eee720e82a9de3ca2f953c999515df Mon Sep 17 00:00:00 2001 From: Alessio Bogon <778703+youtux@users.noreply.github.com> Date: Sat, 21 Sep 2024 23:05:20 +0200 Subject: [PATCH 35/36] Remove unused function --- src/pytest_bdd/parser.py | 16 ---------------- 1 file changed, 16 deletions(-) diff --git a/src/pytest_bdd/parser.py b/src/pytest_bdd/parser.py index a18b93f64..0ff8666a3 100644 --- a/src/pytest_bdd/parser.py +++ b/src/pytest_bdd/parser.py @@ -330,22 +330,6 @@ def get_tag_names(tag_data: list[GherkinTag]) -> set[str]: """ return {tag.name.lstrip("@") for tag in tag_data} - @staticmethod - def get_step_type(keyword: str) -> str | None: - """Map a step keyword to its corresponding type. - - Args: - keyword (str): The keyword for the step (e.g., 'given', 'when', 'then'). - - Returns: - Optional[str]: The type of the step, or None if the keyword is unknown. - """ - return { - "given": GIVEN, - "when": WHEN, - "then": THEN, - }.get(keyword) - def parse_steps(self, steps_data: list[GherkinStep]) -> list[Step]: """Parse a list of step data into Step objects. From 572934c7c113734025232842c7e8d3d762859ba3 Mon Sep 17 00:00:00 2001 From: Alessio Bogon <778703+youtux@users.noreply.github.com> Date: Sat, 21 Sep 2024 23:09:50 +0200 Subject: [PATCH 36/36] Update poetry --- .github/workflows/main.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index 1cbd188fd..1f844e480 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -47,7 +47,7 @@ jobs: - name: Install poetry run: | - python -m pip install poetry==1.8.2 + python -m pip install poetry==1.8.3 - name: Configure poetry run: |