buildcache push: ensure bool arguments for include_* (#35632)

Fixes a bug introduced in 44ed0de8c0
where the push method of binary_distribution now takes named args
include_root and include_depedencies, to avoid the **kwarg hole.

But the call site wasn't update and we passed a dict of keys/values instead
of arguments, which resulted in a call like this:

```
push(include_root={"include_root": True, "include_dependencies": False})
```

This commit fixes that, and adds a test to see if we push the correct packages.
This commit is contained in:
Harmen Stoppels 2023-02-23 01:44:47 +01:00 committed by GitHub
parent e27d3c4f75
commit 3d41b71664
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
3 changed files with 73 additions and 3 deletions

View file

@ -1332,7 +1332,7 @@ def nodes_to_be_packaged(specs, root=True, dependencies=True):
return list(filter(packageable, nodes))
def push(specs, push_url, include_root=True, include_dependencies=True, **kwargs):
def push(specs, push_url, include_root: bool = True, include_dependencies: bool = True, **kwargs):
"""Create a binary package for each of the specs passed as input and push them
to a given push URL.
@ -1345,6 +1345,10 @@ def push(specs, push_url, include_root=True, include_dependencies=True, **kwargs
**kwargs: TODO
"""
# Be explicit about the arugment type
if type(include_root) != bool or type(include_dependencies) != bool:
raise ValueError("Expected include_root/include_dependencies to be True/False")
nodes = nodes_to_be_packaged(specs, root=include_root, dependencies=include_dependencies)
# TODO: This seems to be an easy target for task

View file

@ -1437,9 +1437,8 @@ def _push_mirror_contents(env, specfile_path, sign_binaries, mirror_url):
hashes = env.all_hashes() if env else None
matches = spack.store.specfile_matches(specfile_path, hashes=hashes)
push_url = spack.mirror.Mirror.from_url(mirror_url).push_url
spec_kwargs = {"include_root": True, "include_dependencies": False}
kwargs = {"force": True, "allow_root": True, "unsigned": unsigned}
bindist.push(matches, push_url, spec_kwargs, **kwargs)
bindist.push(matches, push_url, include_root=True, include_dependencies=False, **kwargs)
def push_mirror_contents(env, specfile_path, mirror_url, sign_binaries):

View file

@ -885,3 +885,70 @@ def urlopen(request: urllib.request.Request):
with pytest.raises(bindist.FetchIndexError, match="Could not fetch index"):
fetcher.conditional_fetch()
@pytest.mark.parametrize(
"root,deps,expected",
[
(
True,
True,
[
"dttop",
"dtbuild1",
"dtbuild2",
"dtlink2",
"dtrun2",
"dtlink1",
"dtlink3",
"dtlink4",
"dtrun1",
"dtlink5",
"dtrun3",
"dtbuild3",
],
),
(
False,
True,
[
"dtbuild1",
"dtbuild2",
"dtlink2",
"dtrun2",
"dtlink1",
"dtlink3",
"dtlink4",
"dtrun1",
"dtlink5",
"dtrun3",
"dtbuild3",
],
),
(True, False, ["dttop"]),
(False, False, []),
],
)
def test_correct_specs_are_pushed(
root, deps, expected, default_mock_concretization, tmpdir, temporary_store, monkeypatch
):
# Concretize dttop and add it to the temporary database (without prefixes)
spec = default_mock_concretization("dttop")
temporary_store.db.add(spec, directory_layout=None)
# Create a mirror push url
push_url = spack.mirror.Mirror.from_local_path(str(tmpdir)).push_url
packages_to_push = []
def fake_build_tarball(node, push_url, **kwargs):
assert push_url == push_url
assert not kwargs
assert isinstance(node, Spec)
packages_to_push.append(node.name)
monkeypatch.setattr(bindist, "_build_tarball", fake_build_tarball)
bindist.push([spec], push_url, include_root=root, include_dependencies=deps)
assert packages_to_push == expected