Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 3 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -37,6 +37,9 @@ A brief description of the categories of changes:
_default_ visibility of generated targets. See the [docs][python_default_visibility]
for details.

* (wheel) Add support for `data_files` attributes in py_wheel rule
([#1777](https://github.com/bazelbuild/rules_python/issues/1777))

[0.XX.0]: https://github.com/bazelbuild/rules_python/releases/tag/0.XX.0
[python_default_visibility]: gazelle/README.md#directive-python_default_visibility

Expand Down
15 changes: 15 additions & 0 deletions examples/wheel/BUILD.bazel
Original file line number Diff line number Diff line change
Expand Up @@ -312,6 +312,20 @@ py_wheel(
deps = [":example_pkg"],
)

# Package just a specific py_libraries, without their dependencies
py_wheel(
name = "minimal_data_files",
testonly = True, # Set this to verify the generated .dist target doesn't break things

# Re-using some files already checked into the repo.
data_files = {
"//examples/wheel:NOTICE": "scripts/NOTICE",
"README.md": "data/target/path/README.md",
},
distribution = "minimal_data_files",
version = "0.0.1",
)

py_test(
name = "wheel_test",
srcs = ["wheel_test.py"],
Expand All @@ -321,6 +335,7 @@ py_test(
":custom_package_root_multi_prefix_reverse_order",
":customized",
":filename_escaping",
":minimal_data_files",
":minimal_with_py_library",
":minimal_with_py_library_with_stamp",
":minimal_with_py_package",
Expand Down
17 changes: 17 additions & 0 deletions examples/wheel/wheel_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -472,6 +472,23 @@ def test_requires_file_and_extra_requires_files(self):
requires,
)

def test_minimal_data_files(self):
filename = self._get_path("minimal_data_files-0.0.1-py3-none-any.whl")

with zipfile.ZipFile(filename) as zf:
self.assertAllEntriesHasReproducibleMetadata(zf)
metadata_file = None
self.assertEqual(
zf.namelist(),
[
"minimal_data_files-0.0.1.dist-info/WHEEL",
"minimal_data_files-0.0.1.dist-info/METADATA",
"minimal_data_files-0.0.1.data/data/target/path/README.md",
"minimal_data_files-0.0.1.data/scripts/NOTICE",
"minimal_data_files-0.0.1.dist-info/RECORD",
]
)


if __name__ == "__main__":
unittest.main()
28 changes: 28 additions & 0 deletions python/private/py_wheel.bzl
Original file line number Diff line number Diff line change
Expand Up @@ -120,6 +120,7 @@ See [`py_wheel_dist`](#py_wheel_dist) for more info.

_feature_flags = {}

ALLOWED_DATA_FILE_PREFIX = ("purelib", "platlib", "headers", "scripts", "data")
_requirement_attrs = {
"extra_requires": attr.string_list_dict(
doc = ("A mapping of [extras](https://peps.python.org/pep-0508/#extras) options to lists of requirements (similar to `requires`). This attribute " +
Expand Down Expand Up @@ -172,6 +173,11 @@ _other_attrs = {
"classifiers": attr.string_list(
doc = "A list of strings describing the categories for the package. For valid classifiers see https://pypi.org/classifiers",
),
"data_files": attr.label_keyed_string_dict(
doc = ("Any file that is not normally installed inside site-packages goes into the .data directory, named " +
"as the .dist-info directory but with the .data/ extension. Allowed paths: {prefixes}".format(prefixes = ALLOWED_DATA_FILE_PREFIX)),
allow_files = True,
),
"description_content_type": attr.string(
doc = ("The type of contents in description_file. " +
"If not provided, the type will be inferred from the extension of description_file. " +
Expand Down Expand Up @@ -473,6 +479,28 @@ def _py_wheel_impl(ctx):
filename + ";" + target_files[0].path,
)

for target, filename in ctx.attr.data_files.items():
target_files = target.files.to_list()
if len(target_files) != 1:
fail(
"Multi-file target listed in data_files %s",
filename,
)

if filename.partition("/")[0] not in ALLOWED_DATA_FILE_PREFIX:
fail(
"The target data file must start with one of these prefixes: '%s'. Target filepath: '%s'" %
(
",".join(ALLOWED_DATA_FILE_PREFIX),
filename,
),
)
other_inputs.extend(target_files)
args.add(
"--data_files",
filename + ";" + target_files[0].path,
)

ctx.actions.run(
mnemonic = "PyWheel",
inputs = depset(direct = other_inputs, transitive = [inputs_to_package]),
Expand Down
3 changes: 2 additions & 1 deletion python/private/repack_whl.py
Original file line number Diff line number Diff line change
Expand Up @@ -150,8 +150,9 @@ def main(sys_argv):
logging.debug(f"Found dist-info dir: {distinfo_dir}")
record_path = distinfo_dir / "RECORD"
record_contents = record_path.read_text() if record_path.exists() else ""
distribution_prefix = distinfo_dir.with_suffix("").name

with _WhlFile(args.output, mode="w", distinfo_dir=distinfo_dir) as out:
with _WhlFile(args.output, mode="w", distribution_prefix=distribution_prefix) as out:
for p in _files_to_pack(patched_wheel_dir, record_contents):
rel_path = p.relative_to(patched_wheel_dir)
out.add_file(str(rel_path), p)
Expand Down
74 changes: 74 additions & 0 deletions tests/py_wheel/py_wheel_tests.bzl
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,7 @@
"""Test for py_wheel."""

load("@rules_testing//lib:analysis_test.bzl", "analysis_test", "test_suite")
load("@rules_testing//lib:truth.bzl", "matching")
load("@rules_testing//lib:util.bzl", rt_util = "util")
load("//python:packaging.bzl", "py_wheel")
load("//python/private:py_wheel_normalize_pep440.bzl", "normalize_pep440") # buildifier: disable=bzl-visibility
Expand Down Expand Up @@ -46,6 +47,79 @@ def _test_metadata_impl(env, target):

_tests.append(_test_metadata)

def _test_data(name):
rt_util.helper_target(
py_wheel,
name = name + "_data",
distribution = "mydist_" + name,
version = "0.0.0",
data_files = {
"source_name": "scripts/wheel_name",
},
)
analysis_test(
name = name,
impl = _test_data_impl,
target = name + "_data",
)

def _test_data_impl(env, target):
action = env.expect.that_target(target).action_named(
"PyWheel",
)
action.contains_at_least_args(["--data_files", "scripts/wheel_name;tests/py_wheel/source_name"])
action.contains_at_least_inputs(["tests/py_wheel/source_name"])

_tests.append(_test_data)

def _test_data_bad_path(name):
rt_util.helper_target(
py_wheel,
name = name + "_data",
distribution = "mydist_" + name,
version = "0.0.0",
data_files = {
"source_name": "unsupported_path/wheel_name",
},
)
analysis_test(
name = name,
impl = _test_data_bad_path_impl,
target = name + "_data",
expect_failure = True,
)

def _test_data_bad_path_impl(env, target):
env.expect.that_target(target).failures().contains_predicate(
matching.str_matches("target data file must start with"),
)

_tests.append(_test_data_bad_path)

def _test_data_bad_path_but_right_prefix(name):
rt_util.helper_target(
py_wheel,
name = name + "_data",
distribution = "mydist_" + name,
version = "0.0.0",
data_files = {
"source_name": "scripts2/wheel_name",
},
)
analysis_test(
name = name,
impl = _test_data_bad_path_but_right_prefix_impl,
target = name + "_data",
expect_failure = True,
)

def _test_data_bad_path_but_right_prefix_impl(env, target):
env.expect.that_target(target).failures().contains_predicate(
matching.str_matches("target data file must start with"),
)

_tests.append(_test_data_bad_path_but_right_prefix)

def _test_content_type_from_attr(name):
rt_util.helper_target(
py_wheel,
Expand Down
65 changes: 39 additions & 26 deletions tools/wheelmaker.py
Original file line number Diff line number Diff line change
Expand Up @@ -102,29 +102,33 @@ def __init__(
filename,
*,
mode,
distinfo_dir: str | Path,
distribution_prefix: str,
strip_path_prefixes=None,
compression=zipfile.ZIP_DEFLATED,
**kwargs,
):
self._distinfo_dir: str = Path(distinfo_dir).name
self._distribution_prefix = distribution_prefix

self._strip_path_prefixes = strip_path_prefixes or []
# Entries for the RECORD file as (filename, hash, size) tuples.
self._record = []

super().__init__(filename, mode=mode, compression=compression, **kwargs)

def distinfo_path(self, basename):
return f"{self._distinfo_dir}/{basename}"
return f"{self._distribution_prefix}.dist-info/{basename}"

def data_path(self, basename):
return f"{self._distribution_prefix}.data/{basename}"

def add_file(self, package_filename, real_filename):
"""Add given file to the distribution."""

def arcname_from(name):
# Always use unix path separators.
normalized_arcname = name.replace(os.path.sep, "/")
# Don't manipulate names filenames in the .distinfo directory.
if normalized_arcname.startswith(self._distinfo_dir):
# Don't manipulate names filenames in the .distinfo or .data directories.
if normalized_arcname.startswith(self._distribution_prefix):
return normalized_arcname
for prefix in self._strip_path_prefixes:
if normalized_arcname.startswith(prefix):
Expand Down Expand Up @@ -237,11 +241,9 @@ def __init__(
self._wheelname_fragment_distribution_name = escape_filename_distribution_name(
self._name
)
self._distinfo_dir = (
self._wheelname_fragment_distribution_name
+ "-"
+ self._version
+ ".dist-info/"

self._distribution_prefix = (
self._wheelname_fragment_distribution_name + "-" + self._version
)

self._whlfile = None
Expand All @@ -250,7 +252,7 @@ def __enter__(self):
self._whlfile = _WhlFile(
self.filename(),
mode="w",
distinfo_dir=self._distinfo_dir,
distribution_prefix=self._distribution_prefix,
strip_path_prefixes=self._strip_path_prefixes,
)
return self
Expand Down Expand Up @@ -280,6 +282,9 @@ def disttags(self):
def distinfo_path(self, basename):
return self._whlfile.distinfo_path(basename)

def data_path(self, basename):
return self._whlfile.data_path(basename)

def add_file(self, package_filename, real_filename):
"""Add given file to the distribution."""
self._whlfile.add_file(package_filename, real_filename)
Expand Down Expand Up @@ -436,6 +441,12 @@ def parse_args() -> argparse.Namespace:
help="'filename;real_path' pairs listing extra files to include in"
"dist-info directory. Can be supplied multiple times.",
)
contents_group.add_argument(
"--data_files",
action="append",
help="'filename;real_path' pairs listing data files to include in"
"data directory. Can be supplied multiple times.",
)

build_group = parser.add_argument_group("Building requirements")
build_group.add_argument(
Expand All @@ -452,25 +463,25 @@ def parse_args() -> argparse.Namespace:
return parser.parse_args(sys.argv[1:])


def _parse_file_pairs(content: List[str]) -> List[List[str]]:
"""
Parse ; delimited lists of files into a 2D list.
"""
return [i.split(";", maxsplit=1) for i in content or []]


def main() -> None:
arguments = parse_args()

if arguments.input_file:
input_files = [i.split(";") for i in arguments.input_file]
else:
input_files = []

if arguments.extra_distinfo_file:
extra_distinfo_file = [i.split(";") for i in arguments.extra_distinfo_file]
else:
extra_distinfo_file = []
input_files = _parse_file_pairs(arguments.input_file)
extra_distinfo_file = _parse_file_pairs(arguments.extra_distinfo_file)
data_files = _parse_file_pairs(arguments.data_files)

if arguments.input_file_list:
for input_file in arguments.input_file_list:
with open(input_file) as _file:
input_file_list = _file.read().splitlines()
for _input_file in input_file_list:
input_files.append(_input_file.split(";"))
for input_file in arguments.input_file_list:
with open(input_file) as _file:
input_file_list = _file.read().splitlines()
for _input_file in input_file_list:
input_files.append(_input_file.split(";"))

all_files = get_files_to_package(input_files)
# Sort the files for reproducible order in the archive.
Expand Down Expand Up @@ -570,6 +581,8 @@ def main() -> None:
)

# Sort the files for reproducible order in the archive.
for filename, real_path in sorted(data_files):
maker.add_file(maker.data_path(filename), real_path)
for filename, real_path in sorted(extra_distinfo_file):
maker.add_file(maker.distinfo_path(filename), real_path)

Expand Down