Skip to content
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
8 changes: 7 additions & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -160,6 +160,12 @@ __migrator:
- package1
- package2

# The bot only looks at host/run requirements of a recipe to determine which feedstocks
# to migrate to. This option is added to make the bot look at build requirements too.
# This is only needed when doing a migration for a package with a strong run exports.
# i.e. for a compiler.
include_build_requirements: false

# If this key is set to dict, the conda-forge.yml will be modified by the migration
# with the contents of this dict. This can be used to add keys to the conda-forge.yml
# or to change them. You can replace subkeys by using a dot in the key name (e.g., `a.b.c`
Expand All @@ -182,7 +188,7 @@ __migrator:
# natural version ordering. Each changed pin can be mapped to a list
# that determines the ordering. The highest (e.g., item with highest list index)
# version is kept for version migrations.
oridering:
ordering:
pin1:
- value1
- value2
Expand Down
4 changes: 4 additions & 0 deletions conda_forge_tick/make_graph.py
Original file line number Diff line number Diff line change
Expand Up @@ -45,6 +45,10 @@
"cxx_compiler_stub",
"fortran_compiler_stub",
"cuda_compiler_stub",
"m2w64_c_compiler_stub",
"m2w64_c_stdlib_stub",
"m2w64_cxx_compiler_stub",
"m2w64_fortran_compiler_stub",
]


Expand Down
8 changes: 8 additions & 0 deletions conda_forge_tick/make_migrators.py
Original file line number Diff line number Diff line change
Expand Up @@ -218,6 +218,7 @@ def add_rebuild_migration_yaml(
nominal_pr_limit: int = PR_LIMIT,
max_solver_attempts: int = 3,
force_pr_after_solver_attempts: int = MAX_SOLVER_ATTEMPTS * 2,
include_build_requirements: bool = False,
) -> None:
"""Adds rebuild migrator.

Expand Down Expand Up @@ -245,6 +246,8 @@ def add_rebuild_migration_yaml(
The number of PRs per hour, defaults to 5
force_pr_after_solver_attempts : int, optional
The number of solver attempts after which to force a PR, defaults to 100.
include_build_requirements : bool, optional
Check build requirements for package_names, defaults to false.
"""

total_graph = create_rebuild_graph(
Expand All @@ -253,6 +256,7 @@ def add_rebuild_migration_yaml(
excluded_feedstocks,
exclude_pinned_pkgs=exclude_pinned_pkgs,
include_noarch=config.get("include_noarch", False),
include_build_requirements=include_build_requirements,
)

# Note at this point the graph is made of all packages that have a
Expand Down Expand Up @@ -439,6 +443,9 @@ def migration_factory(
set(loaded_yaml) | {ly.replace("_", "-") for ly in loaded_yaml}
) & all_package_names
exclude_pinned_pkgs = migrator_config.get("exclude_pinned_pkgs", True)
include_build_requirements = migrator_config.get(
"include_build_requirements", False
)

age = time.time() - loaded_yaml.get("migrator_ts", time.time())
age /= 24 * 60 * 60
Expand Down Expand Up @@ -480,6 +487,7 @@ def migration_factory(
nominal_pr_limit=_pr_limit,
max_solver_attempts=max_solver_attempts,
force_pr_after_solver_attempts=force_pr_after_solver_attempts,
include_build_requirements=include_build_requirements,
)
if skip_solver_checks:
assert not migrators[-1].check_solvable
Expand Down
5 changes: 4 additions & 1 deletion conda_forge_tick/migrators/migration_yaml.py
Original file line number Diff line number Diff line change
Expand Up @@ -648,6 +648,7 @@ def create_rebuild_graph(
excluded_feedstocks: MutableSet[str] = None,
exclude_pinned_pkgs: bool = True,
include_noarch: bool = False,
include_build_requirements: bool = False,
) -> nx.DiGraph:
total_graph = copy.deepcopy(gx)
excluded_feedstocks = set() if excluded_feedstocks is None else excluded_feedstocks
Expand All @@ -669,6 +670,8 @@ def create_rebuild_graph(
host = requirements.get("host", set())
build = requirements.get("build", set())
bh = host or build
if include_build_requirements:
bh = bh | build
only_python = "python" in package_names
inclusion_criteria = bh & set(package_names) and (
include_noarch or not all_noarch(attrs, only_python=only_python)
Expand All @@ -678,7 +681,7 @@ def create_rebuild_graph(
all_reqs = requirements.get("run", set())
if inclusion_criteria:
all_reqs = all_reqs | requirements.get("test", set())
all_reqs = all_reqs | (host or build)
all_reqs = all_reqs | bh
rq = get_deps_from_outputs_lut(
all_reqs,
gx.graph["outputs_lut"],
Expand Down