Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[pre-commit.ci] pre-commit autoupdate #5582

Merged
merged 2 commits into from
Jan 29, 2025
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 2 additions & 2 deletions .pre-commit-config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -58,7 +58,7 @@ repos:
# auto format Python codes within docstrings
- id: blacken-docs
- repo: https://github.com/astral-sh/ruff-pre-commit
rev: v0.8.6
rev: v0.9.3
hooks:
# lint & attempt to correct failures (e.g. pyupgrade)
- id: ruff
Expand Down Expand Up @@ -87,7 +87,7 @@ repos:
tests/
)
- repo: https://github.com/python-jsonschema/check-jsonschema
rev: 0.30.0
rev: 0.31.0
hooks:
# verify github syntaxes
- id: check-github-workflows
Expand Down
36 changes: 18 additions & 18 deletions conda_build/build.py
Original file line number Diff line number Diff line change
Expand Up @@ -292,9 +292,9 @@ def regex_files_rg(
# match_line_number = match['data']['line_number']
# match_absolute_offset = match['data']['absolute_offset']
if old_stage == "begin":
assert (
match_filename_begin == match_filename
), f"{match_filename_begin} != \n {match_filename}"
assert match_filename_begin == match_filename, (
f"{match_filename_begin} != \n {match_filename}"
)
if match_filename not in match_records:
if debug_this:
# We could add: #'line': match_line, 'line_number': match_line_number but it would
Expand Down Expand Up @@ -975,21 +975,21 @@ def get_all_replacements(variant):
return []

repl = variant["replacements"]
assert isinstance(
repl, dict
), f"Found 'replacements' ({repl}), but it is not a dict"
assert (
"all_replacements" in repl
), f"Found 'replacements' ({repl}), but it doesn't contain 'all_replacements'"
assert isinstance(repl, dict), (
f"Found 'replacements' ({repl}), but it is not a dict"
)
assert "all_replacements" in repl, (
f"Found 'replacements' ({repl}), but it doesn't contain 'all_replacements'"
)

repl = repl["all_replacements"]
assert isinstance(
repl, list
), f"Found 'all_replacements' ({repl}), but it is not a list"
assert isinstance(repl, list), (
f"Found 'all_replacements' ({repl}), but it is not a list"
)
if repl:
assert isinstance(
repl[0], dict
), f"Found 'all_replacements[0]' ({repl[0]}), but it is not a dict"
assert isinstance(repl[0], dict), (
f"Found 'all_replacements[0]' ({repl[0]}), but it is not a dict"
)

return repl

Expand Down Expand Up @@ -2654,9 +2654,9 @@ def build(
os.path.join(m.config.work_dir, test_script),
)

assert (
output_d.get("type") != "conda" or m.final
), f"output metadata for {m.dist()} is not finalized"
assert output_d.get("type") != "conda" or m.final, (
f"output metadata for {m.dist()} is not finalized"
)
pkg_path = bldpkg_path(m)
if pkg_path not in built_packages and pkg_path not in new_pkgs:
log.info(f"Packaging {m.name()}")
Expand Down
2 changes: 1 addition & 1 deletion conda_build/cli/validators.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@
CONDA_PKG_OR_RECIPE_ERROR_MESSAGE = (
"\nUnable to parse provided recipe directory or package file.\n\n"
f"Please make sure this argument is either a valid package \n"
f'file ({" or ".join(CONDA_PACKAGE_EXTENSIONS)}) or points to a directory containing recipe.'
f"file ({' or '.join(CONDA_PACKAGE_EXTENSIONS)}) or points to a directory containing recipe."
)


Expand Down
12 changes: 6 additions & 6 deletions conda_build/metadata.py
Original file line number Diff line number Diff line change
Expand Up @@ -1466,9 +1466,9 @@ def get_value(self, name, default=None, autotype=True):

section_data = self.get_section(section)
if isinstance(section_data, dict):
assert (
not index
), f"Got non-zero index ({index}), but section {section} is not a list."
assert not index, (
f"Got non-zero index ({index}), but section {section} is not a list."
)
elif isinstance(section_data, list):
# The 'source' section can be written a list, in which case the name
# is passed in with an index, e.g. get_value('source/0/git_url')
Expand All @@ -1483,9 +1483,9 @@ def get_value(self, name, default=None, autotype=True):
section_data = {}
else:
section_data = section_data[index]
assert isinstance(
section_data, dict
), f"Expected {section}/{index} to be a dict"
assert isinstance(section_data, dict), (
f"Expected {section}/{index} to be a dict"
)

value = section_data.get(key, default)

Expand Down
2 changes: 1 addition & 1 deletion conda_build/os_utils/liefldd.py
Original file line number Diff line number Diff line change
Expand Up @@ -772,7 +772,7 @@ def _parse_ar_hdr(content, index):
obj_ends.add(offsets[i])
if debug_static_archives:
print(
f"symname {syms[i]}, offset {offsets[i]}, name {name}, elf? {content[index2:index2 + 4]}"
f"symname {syms[i]}, offset {offsets[i]}, name {name}, elf? {content[index2 : index2 + 4]}"
)
elif name.startswith(b"__.SYMDEF"):
# Reference:
Expand Down
6 changes: 3 additions & 3 deletions conda_build/os_utils/pyldd.py
Original file line number Diff line number Diff line change
Expand Up @@ -1263,9 +1263,9 @@ def main_maybe_test():
else:
that = this
print("\n".join(this))
assert (
set(this) == set(that)
), f"py-ldd result incorrect for {codefile}, this:\n{set(this)}\nvs that:\n{set(that)}"
assert set(this) == set(that), (
f"py-ldd result incorrect for {codefile}, this:\n{set(this)}\nvs that:\n{set(that)}"
)
else:
return main(sys.argv)

Expand Down
8 changes: 4 additions & 4 deletions conda_build/skeletons/cran.py
Original file line number Diff line number Diff line change
Expand Up @@ -1577,10 +1577,10 @@ def skeletonize(
inputs_dict = package_to_inputs_dict(
output_dir, output_suffix, git_tag, lower_name, None
)
assert (
lower_name == inputs_dict["pkg-name"]
), "name {} != inputs_dict['pkg-name'] {}".format(
name, inputs_dict["pkg-name"]
assert lower_name == inputs_dict["pkg-name"], (
"name {} != inputs_dict['pkg-name'] {}".format(
name, inputs_dict["pkg-name"]
)
)
assert lower_name not in package_list
package_dicts.update(
Expand Down
9 changes: 4 additions & 5 deletions conda_build/skeletons/pypi.py
Original file line number Diff line number Diff line change
Expand Up @@ -595,8 +595,7 @@ def add_parser(repos):
pypi.add_argument(
"--pin-numpy",
action="store_true",
help="Ensure that the generated recipe pins the version of numpy"
"to CONDA_NPY.",
help="Ensure that the generated recipe pins the version of numpyto CONDA_NPY.",
)

pypi.add_argument(
Expand Down Expand Up @@ -931,9 +930,9 @@ def _spec_from_line(line):
return name + cc.replace("=", " ")
elif pc:
if pc.startswith("~= "):
assert (
pc.count("~=") == 1
), f"Overly complex 'Compatible release' spec not handled {line}"
assert pc.count("~=") == 1, (
f"Overly complex 'Compatible release' spec not handled {line}"
)
assert pc.count("."), f"No '.' in 'Compatible release' version {line}"
ver = pc.replace("~= ", "")
ver2 = ".".join(ver.split(".")[:-1]) + ".*"
Expand Down
6 changes: 3 additions & 3 deletions conda_build/skeletons/rpm.py
Original file line number Diff line number Diff line change
Expand Up @@ -333,9 +333,9 @@ def get_repo_dict(repomd_url, data_type, dict_massager, cdt, src_cache):
cached_path, cached_csum = cache_file(
src_cache, xmlgz_file, None, cdt["checksummer"]
)
assert (
csum == cached_csum
), f"Checksum for {xmlgz_file} does not match value in {repomd_url}"
assert csum == cached_csum, (
f"Checksum for {xmlgz_file} does not match value in {repomd_url}"
)
with gzip.open(cached_path, "rb") as gz:
xml_content = gz.read()
xml_csum = cdt["checksummer"]()
Expand Down
2 changes: 1 addition & 1 deletion conda_build/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -1130,7 +1130,7 @@ def convert_path_for_cygwin_or_msys2(exe, path):
def get_skip_message(m: MetaData) -> str:
return (
f"Skipped: {m.name()} from {m.path} defines build/skip for this configuration "
f"({({k: m.config.variant[k] for k in m.get_used_vars()})})."
f"({ ({k: m.config.variant[k] for k in m.get_used_vars()}) })."
)


Expand Down
6 changes: 3 additions & 3 deletions tests/cli/test_main_render.py
Original file line number Diff line number Diff line change
Expand Up @@ -44,9 +44,9 @@ def test_render_add_channel(tmp_path: Path) -> None:
"Expected version number on successful "
f"rendering, but got only {required_package_details}"
)
assert (
required_package_details[1] == "1.0"
), f"Expected version number 1.0 on successful rendering, but got {required_package_details[1]}"
assert required_package_details[1] == "1.0", (
f"Expected version number 1.0 on successful rendering, but got {required_package_details[1]}"
)


def test_render_with_empty_channel_fails(tmp_path: Path, empty_channel: Path) -> None:
Expand Down
36 changes: 18 additions & 18 deletions tests/test_api_build.py
Original file line number Diff line number Diff line change
Expand Up @@ -960,17 +960,17 @@ def test_skip_compile_pyc(testing_config):
_, ext = os.path.splitext(filename)
basename = filename.split(".", 1)[0]
if basename == "skip_compile_pyc":
assert (
not ext == ".pyc"
), f"a skip_compile_pyc .pyc was compiled: {filename}"
assert not ext == ".pyc", (
f"a skip_compile_pyc .pyc was compiled: {filename}"
)
if ext == ".pyc":
assert (
basename == "compile_pyc"
), f"an unexpected .pyc was compiled: {filename}"
assert basename == "compile_pyc", (
f"an unexpected .pyc was compiled: {filename}"
)
pyc_count = pyc_count + 1
assert (
pyc_count == 2
), f"there should be 2 .pyc files, instead there were {pyc_count}"
assert pyc_count == 2, (
f"there should be 2 .pyc files, instead there were {pyc_count}"
)


def test_detect_binary_files_with_prefix(testing_config):
Expand All @@ -991,9 +991,9 @@ def test_detect_binary_files_with_prefix(testing_config):
or entry.endswith('"binary-has-prefix"')
]
assert len(matches) == 1, "binary-has-prefix not recorded in info/has_prefix"
assert (
" binary " in matches[0]
), "binary-has-prefix not recorded as binary in info/has_prefix"
assert " binary " in matches[0], (
"binary-has-prefix not recorded as binary in info/has_prefix"
)


def test_skip_detect_binary_files_with_prefix(testing_config):
Expand Down Expand Up @@ -1026,9 +1026,9 @@ def test_fix_permissions(testing_config):
outputs = api.build(recipe, config=testing_config)
with tarfile.open(outputs[0]) as tf:
for f in tf.getmembers():
assert (
f.mode & 0o444 == 0o444
), f"tar member '{f.name}' has invalid (read) mode"
assert f.mode & 0o444 == 0o444, (
f"tar member '{f.name}' has invalid (read) mode"
)


@pytest.mark.sanity
Expand Down Expand Up @@ -1685,9 +1685,9 @@ def test_pin_depends(testing_config):
assert requires
if hasattr(requires, "decode"):
requires = requires.decode()
assert re.search(
r"python\=[23]\.", requires
), "didn't find pinned python in info/requires"
assert re.search(r"python\=[23]\.", requires), (
"didn't find pinned python in info/requires"
)


@pytest.mark.sanity
Expand Down
6 changes: 3 additions & 3 deletions tests/test_metadata.py
Original file line number Diff line number Diff line change
Expand Up @@ -348,9 +348,9 @@ def test_hash_build_id(testing_metadata):
if hdeps_tp == hdeps:
found = True
break
assert (
found
), f"Did not find build that matched {hdeps} when testing each of DEFAULT_SUBDIRS"
assert found, (
f"Did not find build that matched {hdeps} when testing each of DEFAULT_SUBDIRS"
)
assert testing_metadata.build_id() == hdeps + "_1"


Expand Down
18 changes: 9 additions & 9 deletions tests/test_subpackages.py
Original file line number Diff line number Diff line change
Expand Up @@ -124,9 +124,9 @@ def test_intradependencies(testing_config):
outputs2 = api.build(recipe, config=testing_config)
assert len(outputs2) == 11
outputs2_set = {os.path.basename(p) for p in outputs2}
assert (
outputs1_set == outputs2_set
), f"pkgs differ :: get_output_file_paths()={outputs1_set} but build()={outputs2_set}"
assert outputs1_set == outputs2_set, (
f"pkgs differ :: get_output_file_paths()={outputs1_set} but build()={outputs2_set}"
)


def test_git_in_output_version(testing_config, conda_build_test_recipe_envvar: str):
Expand Down Expand Up @@ -448,12 +448,12 @@ def test_inherit_build_number(testing_config):
recipe = os.path.join(subpackage_dir, "_inherit_build_number")
metadata_tuples = api.render(recipe, config=testing_config)
for metadata, _, _ in metadata_tuples:
assert (
"number" in metadata.meta["build"]
), "build number was not inherited at all"
assert (
int(metadata.meta["build"]["number"]) == 1
), "build number should have been inherited as '1'"
assert "number" in metadata.meta["build"], (
"build number was not inherited at all"
)
assert int(metadata.meta["build"]["number"]) == 1, (
"build number should have been inherited as '1'"
)


def test_circular_deps_cross(testing_config):
Expand Down
16 changes: 5 additions & 11 deletions tests/test_variants.py
Original file line number Diff line number Diff line change
Expand Up @@ -471,9 +471,9 @@ def test_reduced_hashing_behavior(testing_config):
finalize=False,
bypass_env_check=True,
)[0][0]
assert (
"c_compiler" in metadata.get_hash_contents()
), "hash contents should contain c_compiler"
assert "c_compiler" in metadata.get_hash_contents(), (
"hash contents should contain c_compiler"
)
assert re.search(
"h[0-9a-f]{%d}" % testing_config.hash_length, # noqa: UP031
metadata.build_id(),
Expand Down Expand Up @@ -886,21 +886,15 @@ def test_find_used_variables_in_text(vars, text, found_vars):
def test_find_used_variables_in_shell_script(tmp_path: Path) -> None:
variants = ("FOO", "BAR", "BAZ", "QUX")
(script := tmp_path / "script.sh").write_text(
f"${variants[0]}\n"
f"${{{variants[1]}}}\n"
f"${{{{{variants[2]}}}}}\n"
f"$${variants[3]}\n"
f"${variants[0]}\n${{{variants[1]}}}\n${{{{{variants[2]}}}}}\n$${variants[3]}\n"
)
assert find_used_variables_in_shell_script(variants, script) == {"FOO", "BAR"}


def test_find_used_variables_in_batch_script(tmp_path: Path) -> None:
variants = ("FOO", "BAR", "BAZ", "QUX")
(script := tmp_path / "script.sh").write_text(
f"%{variants[0]}%\n"
f"%%{variants[1]}%%\n"
f"${variants[2]}\n"
f"${{{variants[3]}}}\n"
f"%{variants[0]}%\n%%{variants[1]}%%\n${variants[2]}\n${{{variants[3]}}}\n"
)
assert find_used_variables_in_batch_script(variants, script) == {"FOO", "BAR"}

Expand Down
Loading