diff --git a/.pylintrc b/.pylintrc
index 1f7fff44c..1ee35ccca 100644
--- a/.pylintrc
+++ b/.pylintrc
@@ -95,7 +95,7 @@ confidence=
# be nice, but too much work atm.
#
# 3) Messages related to code formatting
-# Since we use Black to format code automatically, there's no need for
+# Since we use Ruff to format code automatically, there's no need for
# pylint to also check for those things.
#
@@ -146,7 +146,7 @@ disable=,
unused-private-member,
##################################################
- # Formatting-related messages, enforced by Black #
+ # Formatting-related messages, enforced by Ruff #
##################################################
line-too-long,
diff --git a/doc/source/hacking/coding_guidelines.rst b/doc/source/hacking/coding_guidelines.rst
index eafb0c7d9..68e42b36a 100644
--- a/doc/source/hacking/coding_guidelines.rst
+++ b/doc/source/hacking/coding_guidelines.rst
@@ -32,7 +32,7 @@ Approximate PEP-8 Style
~~~~~~~~~~~~~~~~~~~~~~~
Python coding style for BuildStream is approximately `pep8 `_.
-The coding style is automatically enforced by `black `_.
+The coding style is automatically enforced by `ruff `_.
Formatting will be checked automatically when running the testsuite on CI. For
details on how to format your code locally, see :ref:`formatting code `.
diff --git a/doc/source/hacking/using_the_testsuite.rst b/doc/source/hacking/using_the_testsuite.rst
index 418bdb7d9..7c30b13e4 100644
--- a/doc/source/hacking/using_the_testsuite.rst
+++ b/doc/source/hacking/using_the_testsuite.rst
@@ -193,7 +193,7 @@ consists of running the ``pylint`` tool, run the following::
Formatting code
~~~~~~~~~~~~~~~
Similar to linting, code formatting is also done via a ``tox`` environment. To
-format the code using the ``black`` tool, run the following::
+format the code using the ``ruff`` tool, run the following::
tox -e format
diff --git a/pyproject.toml b/pyproject.toml
index 7c6008b57..45657ab5d 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -23,23 +23,6 @@ requires = [
]
build-backend = "setuptools.build_meta"
-[tool.black]
-line-length = 119
-exclude = '''
-(
- /(
- \.eggs
- | \.git
- | \.mypy_cache
- | \.tox
- | _build
- | build
- | dist
- )/
- | src/buildstream/_protos
-)
-'''
-
[tool.cibuildwheel]
build-frontend = "build"
environment = { BST_BUNDLE_BUILDBOX = "1" }
@@ -64,3 +47,9 @@ skip = [
# The prebuilt BuildBox binaries link against GLibc so will work on manylinux but not musllinux
"*-musllinux_*",
]
+
+[tool.ruff]
+extend-exclude = [
+ "src/buildstream/_protos",
+]
+line-length = 119
diff --git a/setup.py b/setup.py
index fac86b726..921000da7 100755
--- a/setup.py
+++ b/setup.py
@@ -98,9 +98,9 @@ def list_buildbox_binaries():
if missing_binaries:
paths_text = "\n".join([" * {}".format(path) for path in missing_binaries])
print(
- "Expected BuildBox binaries were not found. "
- "Set BST_BUNDLE_BUILDBOX=0 or provide:\n\n"
- "{}\n".format(paths_text),
+ "Expected BuildBox binaries were not found. Set BST_BUNDLE_BUILDBOX=0 or provide:\n\n{}\n".format(
+ paths_text
+ ),
file=sys.stderr,
)
raise SystemExit(1)
diff --git a/src/buildstream/__init__.py b/src/buildstream/__init__.py
index 54d509494..ccb930d50 100644
--- a/src/buildstream/__init__.py
+++ b/src/buildstream/__init__.py
@@ -18,7 +18,6 @@
import os
if "_BST_COMPLETION" not in os.environ:
-
# Special sauce to get the version from versioneer
from ._version import get_versions
diff --git a/src/buildstream/_artifact.py b/src/buildstream/_artifact.py
index 7ccc61c6a..eb03c6720 100644
--- a/src/buildstream/_artifact.py
+++ b/src/buildstream/_artifact.py
@@ -37,6 +37,7 @@
from .sandbox._config import SandboxConfig
from ._variables import Variables
+
# An Artifact class to abstract artifact operations
# from the Element class
#
@@ -48,7 +49,6 @@
# weak_key (str): The elements weak cache key
#
class Artifact:
-
version = 2
def __init__(self, element, context, *, strong_key=None, strict_key=None, weak_key=None):
@@ -214,7 +214,6 @@ def cache(
environment,
sandboxconfig,
):
-
context = self._context
element = self._element
size = 0
@@ -336,7 +335,6 @@ def cache(
# if a buildroot is present, not its contents.
#
def cached_buildroot(self):
-
buildroot_digest = self._get_field_digest("buildroot")
if buildroot_digest:
return self._cas.contains_directory(buildroot_digest)
@@ -352,7 +350,6 @@ def cached_buildroot(self):
# (bool): True if artifact was created with buildroot
#
def buildroot_exists(self):
-
artifact = self._get_proto()
return bool(str(artifact.buildroot))
@@ -368,7 +365,6 @@ def buildroot_exists(self):
# if a buildtree is present, not its contents.
#
def cached_buildtree(self):
-
buildtree_digest = self._get_field_digest("buildtree")
if buildtree_digest:
return self._cas.contains_directory(buildtree_digest)
@@ -384,7 +380,6 @@ def cached_buildtree(self):
# (bool): True if artifact was created with buildtree
#
def buildtree_exists(self):
-
artifact = self._get_proto()
return bool(str(artifact.buildtree))
@@ -397,7 +392,6 @@ def buildtree_exists(self):
# are not available.
#
def cached_sources(self):
-
sources_digest = self._get_field_digest("sources")
if sources_digest:
return self._cas.contains_directory(sources_digest)
@@ -412,7 +406,6 @@ def cached_sources(self):
# (dict): The artifacts cached public data
#
def load_public_data(self):
-
# Load the public data from the artifact
artifact = self._get_proto()
with self._cas.open(artifact.public_data) as meta_file:
@@ -429,7 +422,6 @@ def load_public_data(self):
# The stored SandboxConfig object
#
def load_sandbox_config(self) -> SandboxConfig:
-
# Load the sandbox data from the artifact
artifact = self._get_proto()
meta_file = self._cas.objpath(artifact.low_diversity_meta)
@@ -449,7 +441,6 @@ def load_sandbox_config(self) -> SandboxConfig:
# The environment variables
#
def load_environment(self) -> Dict[str, str]:
-
# Load the sandbox data from the artifact
artifact = self._get_proto()
meta_file = self._cas.objpath(artifact.low_diversity_meta)
@@ -469,7 +460,6 @@ def load_environment(self) -> Dict[str, str]:
# The element variables
#
def load_variables(self) -> Variables:
-
# Load the sandbox data from the artifact
artifact = self._get_proto()
meta_file = self._cas.objpath(artifact.high_diversity_meta)
@@ -489,7 +479,6 @@ def load_variables(self) -> Variables:
# (str): Detailed description of the result
#
def load_build_result(self):
-
artifact = self._get_proto()
build_result = (artifact.build_success, artifact.build_error, artifact.build_error_details)
@@ -505,7 +494,6 @@ def load_build_result(self):
# The weak key
#
def get_metadata_keys(self) -> Tuple[str, str, str]:
-
if self._metadata_keys is not None:
return self._metadata_keys
@@ -528,7 +516,6 @@ def get_metadata_keys(self) -> Tuple[str, str, str]:
# (bool): Whether the given artifact was workspaced
#
def get_metadata_workspaced(self):
-
if self._metadata_workspaced is not None:
return self._metadata_workspaced
@@ -547,7 +534,6 @@ def get_metadata_workspaced(self):
# (list): List of which dependencies are workspaced
#
def get_metadata_workspaced_dependencies(self):
-
if self._metadata_workspaced_dependencies is not None:
return self._metadata_workspaced_dependencies
diff --git a/src/buildstream/_artifactcache.py b/src/buildstream/_artifactcache.py
index a531c453c..654ac27d1 100644
--- a/src/buildstream/_artifactcache.py
+++ b/src/buildstream/_artifactcache.py
@@ -247,7 +247,6 @@ def link_key(self, element, oldkey, newkey):
# missing_blobs (list): The Digests of the blobs to fetch
#
def fetch_missing_blobs(self, project, missing_blobs):
-
index_remotes, _ = self.get_remotes(project.name, False)
for remote in index_remotes:
if not missing_blobs:
@@ -389,7 +388,6 @@ def _push_artifact_blobs(self, artifact, artifact_digest, remote):
# artifact already existing.
#
def _push_artifact_proto(self, element, artifact, artifact_digest, remote):
-
artifact_proto = artifact._get_proto()
keys = list(utils._deduplicate([artifact_proto.strong_key, artifact_proto.weak_key]))
diff --git a/src/buildstream/_artifactelement.py b/src/buildstream/_artifactelement.py
index 7a1c336dd..d52175919 100644
--- a/src/buildstream/_artifactelement.py
+++ b/src/buildstream/_artifactelement.py
@@ -39,7 +39,6 @@
# ref (str): The artifact ref
#
class ArtifactElement(Element):
-
# A hash of ArtifactElement by ref
__instantiated_artifacts: Dict[str, "ArtifactElement"] = {}
@@ -70,7 +69,6 @@ def __init__(self, context, ref):
#
@classmethod
def new_from_artifact_name(cls, artifact_name: str, context: "Context", task: Optional["Task"] = None):
-
# Initial lookup for already loaded artifact.
with suppress(KeyError):
return cls.__instantiated_artifacts[artifact_name]
diff --git a/src/buildstream/_artifactproject.py b/src/buildstream/_artifactproject.py
index 3defd4e0d..19d6beba0 100644
--- a/src/buildstream/_artifactproject.py
+++ b/src/buildstream/_artifactproject.py
@@ -37,11 +37,9 @@
# project_name: The name of this project
#
class ArtifactProject(Project):
-
__loaded_artifact_projects = {} # type: Dict[str, ArtifactProject]
def __init__(self, project_name: str, context: Context):
-
#
# Chain up to the Project constructor, and allow it to initialize
# without loading anything
diff --git a/src/buildstream/_assetcache.py b/src/buildstream/_assetcache.py
index 59fe1506a..402dfeb8f 100644
--- a/src/buildstream/_assetcache.py
+++ b/src/buildstream/_assetcache.py
@@ -321,7 +321,6 @@ def __init__(self, context):
# Release resources used by AssetCache.
#
def release_resources(self):
-
# Close all remotes and their gRPC channels
for remote in self._remotes.values():
if remote.index:
@@ -338,7 +337,6 @@ def release_resources(self):
# project_specs: List of specs for each project
#
def setup_remotes(self, specs: Iterable[RemoteSpec], project_specs: Dict[str, List[RemoteSpec]]):
-
# Hold on to the project specs
self._project_specs = project_specs
@@ -387,7 +385,6 @@ def get_remotes(self, project_name: str, push: bool) -> Tuple[List[AssetRemote],
index_remotes = []
storage_remotes = []
for spec in project_specs:
-
if push and not spec.push:
continue
diff --git a/src/buildstream/_cas/casdprocessmanager.py b/src/buildstream/_cas/casdprocessmanager.py
index fd1692d9f..a8a805aa6 100644
--- a/src/buildstream/_cas/casdprocessmanager.py
+++ b/src/buildstream/_cas/casdprocessmanager.py
@@ -77,7 +77,7 @@ def __init__(
*,
reserved=None,
low_watermark=None,
- local_jobs=None
+ local_jobs=None,
):
os.makedirs(path, exist_ok=True)
@@ -159,7 +159,7 @@ def __init__(
stdout=logfile_fp,
stderr=subprocess.STDOUT,
env=self.__buildbox_casd_env(),
- **process_group_kwargs
+ **process_group_kwargs,
)
self._casd_channel = None
diff --git a/src/buildstream/_cas/casremote.py b/src/buildstream/_cas/casremote.py
index e7d812167..828ad0bcf 100644
--- a/src/buildstream/_cas/casremote.py
+++ b/src/buildstream/_cas/casremote.py
@@ -81,7 +81,6 @@ def _configure_protocols(self):
# (CASRemoteError): if there was an error
#
def push_message(self, message):
-
message_buffer = message.SerializeToString()
self.init()
diff --git a/src/buildstream/_context.py b/src/buildstream/_context.py
index 02942c8b0..18527888e 100644
--- a/src/buildstream/_context.py
+++ b/src/buildstream/_context.py
@@ -76,7 +76,6 @@ def new_from_node(cls, node: MappingNode) -> "_CacheConfig":
#
class Context:
def __init__(self, *, use_casd: bool = True) -> None:
-
# Whether we are running as part of a test suite. This is only relevant
# for developing BuildStream itself.
self.is_running_in_test_suite: bool = "BST_TEST_SUITE" in os.environ
@@ -601,7 +600,6 @@ def initialize_remotes(
ignore_project_artifact_remotes: bool = False,
ignore_project_source_remotes: bool = False,
) -> None:
-
# Ensure all projects are fully loaded.
for project in self._projects:
project.ensure_fully_loaded()
@@ -782,7 +780,6 @@ def _resolve_specs_for_project(
override_key: str,
project_attribute: str,
) -> List[RemoteSpec]:
-
# Early return if the CLI is taking full control
if cli_override and cli_remotes:
return list(cli_remotes)
diff --git a/src/buildstream/_elementproxy.py b/src/buildstream/_elementproxy.py
index 861bc9a15..6b9ff1493 100644
--- a/src/buildstream/_elementproxy.py
+++ b/src/buildstream/_elementproxy.py
@@ -98,9 +98,8 @@ def stage_artifact(
action: str = OverlapAction.WARNING,
include: Optional[List[str]] = None,
exclude: Optional[List[str]] = None,
- orphans: bool = True
+ orphans: bool = True,
) -> FileListResult:
-
owner = cast("Element", self._owner)
element = cast("Element", self._plugin)
@@ -122,7 +121,7 @@ def stage_dependency_artifacts(
action: str = OverlapAction.WARNING,
include: Optional[List[str]] = None,
exclude: Optional[List[str]] = None,
- orphans: bool = True
+ orphans: bool = True,
) -> None:
#
# Same approach used here as in Element.dependencies()
@@ -171,7 +170,7 @@ def _stage_artifact(
include: Optional[List[str]] = None,
exclude: Optional[List[str]] = None,
orphans: bool = True,
- owner: Optional["Element"] = None
+ owner: Optional["Element"] = None,
) -> FileListResult:
owner = cast("Element", self._owner)
element = cast("Element", self._plugin)
diff --git a/src/buildstream/_elementsources.py b/src/buildstream/_elementsources.py
index 39241493a..e26a5f543 100644
--- a/src/buildstream/_elementsources.py
+++ b/src/buildstream/_elementsources.py
@@ -32,10 +32,10 @@
# pylint: enable=cyclic-import
+
# An ElementSources object represents the combined sources of an element.
class ElementSources:
def __init__(self, context: Context, project: "Project", plugin: Plugin):
-
self._context = context
self._project = project
self._plugin = plugin
@@ -396,8 +396,9 @@ def preflight(self):
from .element import ElementError # pylint: disable=cyclic-import
raise ElementError(
- "{}: {} cannot be the first source of an element "
- "as it requires access to previous sources".format(self, self._sources[0])
+ "{}: {} cannot be the first source of an element as it requires access to previous sources".format(
+ self, self._sources[0]
+ )
)
# Preflight the sources
diff --git a/src/buildstream/_exceptions.py b/src/buildstream/_exceptions.py
index 2cff0d62a..857d481ca 100644
--- a/src/buildstream/_exceptions.py
+++ b/src/buildstream/_exceptions.py
@@ -252,7 +252,6 @@ def __init__(self, message, *, detail=None, reason=None):
#
class StreamError(BstError):
def __init__(self, message=None, *, detail=None, reason=None, terminated=False):
-
# The empty string should never appear to a user,
# this only allows us to treat this internal error as
# a BstError from the frontend.
diff --git a/src/buildstream/_frontend/app.py b/src/buildstream/_frontend/app.py
index 577d80d4d..8cfdc9584 100644
--- a/src/buildstream/_frontend/app.py
+++ b/src/buildstream/_frontend/app.py
@@ -55,7 +55,6 @@
#
class App:
def __init__(self, main_options):
-
#
# Public members
#
@@ -285,7 +284,6 @@ def initialized(self, *, session_name=None):
fetch_subprojects=self.stream.fetch_subprojects,
)
except LoadError as e:
-
# If there was no project.conf at all then there was just no project found.
#
# Don't error out in this case, as Stream() supports some operations which
@@ -454,7 +452,6 @@ def init_project(
# (str): The formatted prompt to display in the shell
#
def shell_prompt(self, element):
-
element_name = element._get_full_name()
display_key = element._get_display_key()
@@ -524,7 +521,6 @@ def _message(self, message_type, message, **kwargs):
# Exception handler
#
def _global_exception_handler(self, etype, value, tb, exc=True):
-
# Print the regular BUG message
formatted = None
if exc:
@@ -586,7 +582,6 @@ def _render_status(self):
# Handle ^C SIGINT interruptions in the scheduling main loop
#
def _interrupt_handler(self):
-
# Only handle ^C interactively in interactive mode
if not self.interactive:
self._status.clear()
@@ -679,7 +674,6 @@ def _handle_failure(self, element, task, failure):
# Handle non interactive mode setting of what to do when a job fails.
if not self._interactive_failures:
-
if self.context.sched_error_action == _SchedulerErrorAction.TERMINATE:
self.stream.terminate()
elif self.context.sched_error_action == _SchedulerErrorAction.QUIT:
@@ -690,7 +684,6 @@ def _handle_failure(self, element, task, failure):
# Interactive mode for element failures
with self._interrupted():
-
summary = (
"\n{} failure on element: {}\n".format(failure.action_name, full_name)
+ "\n"
@@ -826,7 +819,6 @@ def _error_exit(self, error, prefix=None):
# Handle messages from the pipeline
#
def _message_handler(self, message, is_silenced):
-
# Drop status messages from the UI if not verbose, we'll still see
# info messages and status messages will still go to the log files.
if not self.context.log_verbose and message.message_type == MessageType.STATUS:
@@ -907,7 +899,6 @@ def _assert_element_path(self, element_path):
# element_path (str): The user selected element path
#
def _init_project_interactive(self, project_name, min_version, element_path):
-
bst_major, bst_minor = utils._get_bst_api_version()
def project_name_proc(user_input):
diff --git a/src/buildstream/_frontend/cli.py b/src/buildstream/_frontend/cli.py
index 9d7619bae..967253840 100644
--- a/src/buildstream/_frontend/cli.py
+++ b/src/buildstream/_frontend/cli.py
@@ -74,6 +74,7 @@ def __repr__(self):
# Override of click's main entry point #
##################################################################
+
# search_command()
#
# Helper function to get a command and context object
@@ -179,7 +180,6 @@ def complete_artifact(orig_args, args, incomplete):
from .._context import Context
with Context(use_casd=False) as ctx:
-
config = None
if orig_args:
for i, arg in enumerate(orig_args):
@@ -245,12 +245,10 @@ def validate_output_streams():
def override_main(self, args=None, prog_name=None, complete_var=None, standalone_mode=True, **extra):
-
# Hook for the Bash completion. This only activates if the Bash
# completion is actually enabled, otherwise this is quite a fast
# noop.
if main_bashcomplete(self, prog_name, partial(override_completions, args)):
-
# If we're running tests we cant just go calling exit()
# from the main process.
#
@@ -631,7 +629,6 @@ def show(app, elements, deps, except_, order, format_):
$'---------- %{name} ----------\\n%{vars}'
"""
with app.initialized():
-
if not format_:
format_ = app.context.log_element_format
@@ -1206,7 +1203,6 @@ def workspace_reset(app, soft, all_, elements):
# Check that the workspaces in question exist
with app.initialized():
-
if not (all_ or elements):
element = app.project.get_default_target()
if element:
@@ -1392,8 +1388,7 @@ def artifact_checkout(
sys.exit(-1)
if compression and inferred_compression != "" and inferred_compression != compression:
click.echo(
- "WARNING: File extension and compression differ."
- "File extension has been overridden by --compression",
+ "WARNING: File extension and compression differ.File extension has been overridden by --compression",
err=True,
)
if not compression:
diff --git a/src/buildstream/_frontend/complete.py b/src/buildstream/_frontend/complete.py
index 6fef9d2c7..78683a4eb 100644
--- a/src/buildstream/_frontend/complete.py
+++ b/src/buildstream/_frontend/complete.py
@@ -109,7 +109,6 @@ def entry_is_dir(entry):
return os.path.isdir(entry)
def fix_path(path):
-
# Append slashes to any entries which are directories, or
# spaces for other files since they cannot be further completed
if entry_is_dir(path) and not path.endswith(os.sep):
@@ -142,13 +141,11 @@ def fix_path(path):
# https://github.com/pallets/click/issues/780
#
def get_param_type_completion(param_type, incomplete):
-
if isinstance(param_type, click.Choice):
return [c + " " for c in param_type.choices]
elif isinstance(param_type, click.File):
return complete_path("File", incomplete)
elif isinstance(param_type, click.Path):
-
# Workaround click 8.x API break:
#
# https://github.com/pallets/click/issues/2037
diff --git a/src/buildstream/_frontend/linuxapp.py b/src/buildstream/_frontend/linuxapp.py
index aa9e05065..895b0bee1 100644
--- a/src/buildstream/_frontend/linuxapp.py
+++ b/src/buildstream/_frontend/linuxapp.py
@@ -24,11 +24,9 @@
# to the terminal.
#
def _osc_777_supported():
-
term = os.environ.get("TERM")
if term and (term.startswith("xterm") or term.startswith("vte")):
-
# Since vte version 4600, upstream silently ignores
# the OSC 777 without printing garbage to the terminal.
#
@@ -52,7 +50,6 @@ def _osc_777_supported():
#
class LinuxApp(App):
def notify(self, title, text):
-
# Currently we only try this notification method
# of sending an escape sequence to the terminal
#
diff --git a/src/buildstream/_frontend/status.py b/src/buildstream/_frontend/status.py
index 675867be6..945144a98 100644
--- a/src/buildstream/_frontend/status.py
+++ b/src/buildstream/_frontend/status.py
@@ -42,12 +42,10 @@
# stream (Stream): The Stream
#
class Status:
-
# Table of the terminal capabilities we require and use
_TERM_CAPABILITIES = {"move_up": "cuu1", "move_x": "hpa", "clear_eol": "el"}
def __init__(self, context, state, content_profile, format_profile, success_profile, error_profile, stream):
-
self._context = context
self._state = state
self._content_profile = content_profile
@@ -86,7 +84,6 @@ def __init__(self, context, state, content_profile, format_profile, success_prof
# status.render()
#
def clear(self):
-
if not self._term_caps:
return
@@ -103,7 +100,6 @@ def clear(self):
# the status area, for instance in a timeout, then it is
# not necessary to call clear().
def render(self):
-
if not self._term_caps:
return
@@ -176,7 +172,6 @@ def _job_changed(self, task_id):
# of the required capabilities.
#
def _init_terminal(self):
-
# We need both output streams to be connected to a terminal
if not (sys.stdout.isatty() and sys.stderr.isatty()):
return None
@@ -326,7 +321,6 @@ def _remove_job(self, task_id):
#
class _StatusHeader:
def __init__(self, context, state, content_profile, format_profile, success_profile, error_profile, stream):
-
#
# Public members
#
@@ -384,7 +378,6 @@ def render(self, line_length, elapsed):
# Format and calculate size for each queue progress
for index, task_group in enumerate(self._state.task_groups.values()):
-
# Add spacing
if index > 0:
size += 2
diff --git a/src/buildstream/_frontend/widget.py b/src/buildstream/_frontend/widget.py
index 09b824ff3..8e87f80ff 100644
--- a/src/buildstream/_frontend/widget.py
+++ b/src/buildstream/_frontend/widget.py
@@ -48,7 +48,6 @@ class FormattingError(Exception):
#
class Widget:
def __init__(self, context, content_profile, format_profile):
-
# The context
self.context = context
@@ -89,7 +88,6 @@ def __init__(self, context, content_profile, format_profile, output_format=False
super().__init__(context, content_profile, format_profile)
def render(self, message):
-
fields = [
self.content_profile.fmt("{:02d}".format(x))
for x in [
@@ -148,7 +146,6 @@ def render_time(self, elapsed):
# A widget for rendering the MessageType
class TypeName(Widget):
-
_action_colors = {
MessageType.DEBUG: "cyan",
MessageType.STATUS: "cyan",
@@ -208,7 +205,6 @@ def __init__(self, context, content_profile, format_profile, err_profile):
self._key_length = context.log_key_length
def render(self, message):
-
if not self._key_length:
return ""
@@ -439,7 +435,6 @@ def show_pipeline(self, dependencies, format_):
# Source Information
if "%{source-info" in format_:
-
# Get all the SourceInfo objects
#
all_source_infos = []
@@ -632,7 +627,6 @@ def format_spec(spec):
# log_file (file): An optional file handle for additional logging
#
def print_summary(self, stream, log_file):
-
# Early silent return if there are no queues, can happen
# only in the case that the stream early returned due to
# an inconsistent pipeline state.
@@ -713,7 +707,6 @@ def print_summary(self, stream, log_file):
###################################################
def render(self, message):
-
# Track logfiles for later use
element_name = message.element_name
if message.message_type in ERROR_MESSAGES and element_name is not None:
@@ -752,7 +745,6 @@ def _parse_logfile_format(self, format_string, content_profile, format_profile):
return logfile_tokens
def _render(self, message):
-
# Render the column widgets first
text = ""
for widget in self._columns:
@@ -764,7 +756,6 @@ def _render(self, message):
# Now add some custom things
if message.detail:
-
# Identify frontend messages, we never abbreviate these
frontend_message = not message.element_name
@@ -894,7 +885,6 @@ def _format_values(self, values, *, style_key=False, style_value=True, indent=1)
max_key_len = max(len(key), max_key_len)
for key, value in values.items():
-
key = str(key)
text += self._indent * indent
if style_key:
@@ -978,7 +968,6 @@ def pretty_print_element_contents(self, values, long_=False, style_value=True):
text = ""
for element_name, directory in values.items():
-
text += self.format_profile.fmt(" {}:".format(element_name))
rendered_files = []
@@ -1023,7 +1012,6 @@ def show_state_of_artifacts(self, targets):
report = ""
p = Profile()
for element in targets:
-
#
# Here we selectively show the element name or artifact name
# depending on whether we were asked about an artifact or an element.
diff --git a/src/buildstream/_includes.py b/src/buildstream/_includes.py
index 5eefd0b59..9764af33f 100644
--- a/src/buildstream/_includes.py
+++ b/src/buildstream/_includes.py
@@ -172,10 +172,10 @@ def _include_file(self, include, loader):
# guaranteed at this stage to be fully loaded.
#
if current_loader != loader:
- assert (
- current_loader.project.base_variables is not None
- ), "{}: Attempted to include file from a subproject that isn't fully loaded".format(
- include.get_provenance()
+ assert current_loader.project.base_variables is not None, (
+ "{}: Attempted to include file from a subproject that isn't fully loaded".format(
+ include.get_provenance()
+ )
)
variables_node = current_loader.project.base_variables.clone()
variables = Variables(variables_node)
diff --git a/src/buildstream/_loader/loadcontext.py b/src/buildstream/_loader/loadcontext.py
index 5f3f4eda4..351e821f0 100644
--- a/src/buildstream/_loader/loadcontext.py
+++ b/src/buildstream/_loader/loadcontext.py
@@ -25,7 +25,6 @@
#
class ProjectLoaders:
def __init__(self, project_name):
-
# The project name
self._name = project_name
@@ -153,7 +152,6 @@ def _raise_conflict(self, duplicates, internals):
# (str): A string representing how this loader was loaded
#
def _loader_description(self, loader, duplicates, internals):
-
line = "{}\n".format(loader)
# Mention projects which have marked this project as a duplicate
@@ -180,7 +178,6 @@ def _loader_description(self, loader, duplicates, internals):
#
class LoadContext:
def __init__(self, context):
-
# Keep track of global context required throughout the recursive load
self.context = context
self.rewritable = False
diff --git a/src/buildstream/_loader/loader.py b/src/buildstream/_loader/loader.py
index b36dfe068..3ac7ef02d 100644
--- a/src/buildstream/_loader/loader.py
+++ b/src/buildstream/_loader/loader.py
@@ -45,7 +45,6 @@
#
class Loader:
def __init__(self, project, *, parent=None, provenance_node=None):
-
# Ensure we have an absolute path for the base directory
basedir = project.element_path
if not os.path.isabs(basedir):
@@ -111,14 +110,14 @@ def __str__(self):
# (list): The corresponding LoadElement instances matching the `targets`
#
def load(self, targets):
-
for filename in targets:
if os.path.isabs(filename):
# XXX Should this just be an assertion ?
# Expect that the caller gives us the right thing at least ?
raise LoadError(
- "Target '{}' was not specified as a relative "
- "path to the base project directory: {}".format(filename, self._basedir),
+ "Target '{}' was not specified as a relative path to the base project directory: {}".format(
+ filename, self._basedir
+ ),
LoadErrorReason.INVALID_DATA,
)
@@ -200,7 +199,6 @@ def get_loader(self, name, provenance_node, *, load_subprojects=True):
circular_provenance_node = self._loader_search_provenances.get(name, None)
if circular_provenance_node and load_subprojects:
-
assert provenance_node
detail = None
@@ -274,7 +272,6 @@ def foreach_parent(parent):
# (LoadElement): A partially-loaded LoadElement
#
def _load_file_no_deps(self, filename, provenance_node=None):
-
self._assert_element_name(filename, provenance_node)
# Load the data and process any conditional statements therein
@@ -285,7 +282,6 @@ def _load_file_no_deps(self, filename, provenance_node=None):
)
except LoadError as e:
if e.reason == LoadErrorReason.MISSING_FILE:
-
if self.project.junction:
message = "Could not find element '{}' in project referred to by junction element '{}'".format(
filename, self.project.junction.name
@@ -379,7 +375,6 @@ def _resolve_link(self, link_path, target_path):
# (str): The same path with any links expanded
#
def _expand_link(self, path):
-
# FIXME: This simply returns the first link, maybe
# this needs to be more iterative, or sorted by
# number of path components, or smth
@@ -404,7 +399,6 @@ def _expand_link(self, path):
# or None, if loading of the subproject is disabled.
#
def _load_one_file(self, filename, provenance_node, *, load_subprojects=True):
-
element = None
# First check the cache, the cache might contain shallow loaded
@@ -421,7 +415,6 @@ def _load_one_file(self, filename, provenance_node, *, load_subprojects=True):
return element
except KeyError:
-
# Shallow load if it's not yet loaded.
element = self._load_file_no_deps(filename, provenance_node)
@@ -472,7 +465,6 @@ def _load_one_file(self, filename, provenance_node, *, load_subprojects=True):
# (LoadElement): A loaded LoadElementor None, if loading of the subproject is disabled.
#
def _load_file(self, filename, provenance_node, *, load_subprojects=True):
-
top_element = self._load_one_file(filename, provenance_node, load_subprojects=load_subprojects)
if not top_element:
@@ -513,7 +505,6 @@ def _load_file(self, filename, provenance_node, *, load_subprojects=True):
dep_element = loader._load_file(dep.name, dep.node)
else:
-
dep_element = self._load_one_file(dep.name, dep.node, load_subprojects=load_subprojects)
if not dep_element:
@@ -524,7 +515,6 @@ def _load_file(self, filename, provenance_node, *, load_subprojects=True):
# If the loaded element is not fully loaded, queue up the dependencies to be loaded in this loop.
#
if not dep_element.fully_loaded:
-
# Mark the dep_element as fully_loaded, as we're already queueing it's deps
dep_element.mark_fully_loaded()
@@ -563,7 +553,6 @@ def _load_file(self, filename, provenance_node, *, load_subprojects=True):
#
@staticmethod
def _check_circular_deps(top_element):
-
sequence = [top_element]
sequence_indices = [0]
check_elements = set(sequence)
@@ -682,7 +671,6 @@ def _search_for_overrides(self, filename):
# (Loader): The loader to use, in case @filename was overridden, otherwise None.
#
def _search_for_override_loader(self, filename):
-
overriding_loaders = self._search_for_overrides(filename)
# If there are any overriding loaders, use the highest one in
@@ -890,7 +878,8 @@ def provenance_str():
except LoadError as e:
if e.reason == LoadErrorReason.MISSING_PROJECT_CONF:
message = (
- provenance_str() + "Could not find the project.conf file in the project "
+ provenance_str()
+ + "Could not find the project.conf file in the project "
"referred to by junction element '{}'.".format(element.name)
)
if element.path:
@@ -932,7 +921,6 @@ def _shallow_load_overrides(self):
# are not consequential.
#
for override_path, override_target in junction.overrides.items():
-
# Ensure that we resolve indirect links, in case that shallow loading
# an element results in loading a link, we need to discover if it's
# target is also a link.
diff --git a/src/buildstream/_loader/metasource.py b/src/buildstream/_loader/metasource.py
index 8df034227..f93e40179 100644
--- a/src/buildstream/_loader/metasource.py
+++ b/src/buildstream/_loader/metasource.py
@@ -16,7 +16,6 @@
class MetaSource:
-
# MetaSource()
#
# An abstract object holding data suitable for constructing a Source
diff --git a/src/buildstream/_message.py b/src/buildstream/_message.py
index 89f958c51..7b9325de1 100644
--- a/src/buildstream/_message.py
+++ b/src/buildstream/_message.py
@@ -61,7 +61,7 @@ def __init__(
elapsed: Optional[datetime.timedelta] = None,
logfile: Optional[str] = None,
sandbox: bool = False,
- scheduler: bool = False
+ scheduler: bool = False,
):
self.message_type: str = message_type # Message type
self.message: str = message # The message string
diff --git a/src/buildstream/_messenger.py b/src/buildstream/_messenger.py
index 23c4c063c..324e1b4df 100644
--- a/src/buildstream/_messenger.py
+++ b/src/buildstream/_messenger.py
@@ -341,7 +341,7 @@ def simple_task(
task_name: Optional[str] = None,
detail: Optional[str] = None,
silent_nested: bool = False,
- **kwargs
+ **kwargs,
) -> Iterator[Optional[Task]]:
# Bypass use of State when none exists (e.g. tests)
if not self._state:
@@ -428,10 +428,8 @@ def recorded_messages(self, filename: str, logdir: str) -> Iterator[str]:
os.makedirs(directory, exist_ok=True)
with open(self._locals.log_filename, "a", encoding="utf-8") as logfile:
-
# Write one last line to the log and flush it to disk
def flush_log():
-
# If the process currently had something happening in the I/O stack
# then trying to reenter the I/O stack will fire a runtime error.
#
@@ -522,7 +520,6 @@ def _silent_messages(self) -> bool:
# message: The message to record
#
def _record_message(self, message: Message) -> None:
-
if self._locals.log_handle is None:
return
diff --git a/src/buildstream/_options/option.py b/src/buildstream/_options/option.py
index d8b5a94e5..0020a0c75 100644
--- a/src/buildstream/_options/option.py
+++ b/src/buildstream/_options/option.py
@@ -36,7 +36,6 @@
# instances.
#
class Option:
-
# Subclasses use this to specify the type name used
# for the yaml format and error messages
OPTION_TYPE = None # type: Optional[str]
diff --git a/src/buildstream/_options/optionarch.py b/src/buildstream/_options/optionarch.py
index 3d75226ad..91377b32e 100644
--- a/src/buildstream/_options/optionarch.py
+++ b/src/buildstream/_options/optionarch.py
@@ -33,7 +33,6 @@
# specifically specified
#
class OptionArch(OptionEnum):
-
OPTION_TYPE: str = "arch"
def load(self, node):
@@ -71,7 +70,6 @@ def load_default_value(self, node):
return default_value
def resolve(self):
-
# Validate that the default machine arch reported by uname() is
# explicitly supported by the project, only if it was not
# overridden by user configuration or cli.
diff --git a/src/buildstream/_options/optionbool.py b/src/buildstream/_options/optionbool.py
index 122f803ed..8b4f85484 100644
--- a/src/buildstream/_options/optionbool.py
+++ b/src/buildstream/_options/optionbool.py
@@ -24,11 +24,9 @@
# A boolean project option
#
class OptionBool(Option):
-
OPTION_TYPE: str = "bool"
def load(self, node):
-
super().load(node)
node.validate_keys(OPTION_SYMBOLS + ["default"])
self.value = node.get_bool("default")
diff --git a/src/buildstream/_options/optioneltmask.py b/src/buildstream/_options/optioneltmask.py
index ca19a5c31..6ada3e14c 100644
--- a/src/buildstream/_options/optioneltmask.py
+++ b/src/buildstream/_options/optioneltmask.py
@@ -24,7 +24,6 @@
# names as values.
#
class OptionEltMask(OptionFlags):
-
OPTION_TYPE: str = "element-mask"
def load(self, node):
diff --git a/src/buildstream/_options/optionenum.py b/src/buildstream/_options/optionenum.py
index a287fbb03..79dea8052 100644
--- a/src/buildstream/_options/optionenum.py
+++ b/src/buildstream/_options/optionenum.py
@@ -24,7 +24,6 @@
# An enumeration project option
#
class OptionEnum(Option):
-
OPTION_TYPE: str = "enum"
def __init__(self, name, definition, pool):
diff --git a/src/buildstream/_options/optionflags.py b/src/buildstream/_options/optionflags.py
index 6b3ea89ea..85d175200 100644
--- a/src/buildstream/_options/optionflags.py
+++ b/src/buildstream/_options/optionflags.py
@@ -24,7 +24,6 @@
# A flags project option
#
class OptionFlags(Option):
-
OPTION_TYPE: str = "flags"
def __init__(self, name, definition, pool):
diff --git a/src/buildstream/_options/optionos.py b/src/buildstream/_options/optionos.py
index f51def2d9..5196aa633 100644
--- a/src/buildstream/_options/optionos.py
+++ b/src/buildstream/_options/optionos.py
@@ -21,7 +21,6 @@
# OptionOS
#
class OptionOS(OptionEnum):
-
OPTION_TYPE: str = "os"
def load(self, node):
@@ -31,7 +30,6 @@ def load_default_value(self, node):
return platform.uname().system
def resolve(self):
-
# Validate that the default OS reported by uname() is explicitly
# supported by the project, if not overridden by user config or cli.
self.validate(self.value)
diff --git a/src/buildstream/_options/optionpool.py b/src/buildstream/_options/optionpool.py
index 52ae0b35e..fe4112de7 100644
--- a/src/buildstream/_options/optionpool.py
+++ b/src/buildstream/_options/optionpool.py
@@ -70,9 +70,7 @@ def __init__(self, element_path):
# node (dict): The loaded YAML options
#
def load(self, options):
-
for option_name, option_definition in options.items():
-
# Assert that the option name is a valid symbol
_assert_symbol_name(option_name, "option name", ref_node=option_definition, allow_dashes=False)
@@ -178,7 +176,6 @@ def printable_variables(self, variables):
# root of "node",
#
def process_node(self, node, *, restricted=None):
-
# A conditional will result in composition, which can
# in turn add new conditionals to the root.
#
@@ -216,7 +213,6 @@ def process_node(self, node, *, restricted=None):
# LoadError: If the expression failed to resolve for any reason
#
def _evaluate(self, expression):
-
#
# Variables must be resolved at this point.
#
diff --git a/src/buildstream/_overlapcollector.py b/src/buildstream/_overlapcollector.py
index 79f62b156..d21a5b8e8 100644
--- a/src/buildstream/_overlapcollector.py
+++ b/src/buildstream/_overlapcollector.py
@@ -41,7 +41,6 @@
#
class OverlapCollector:
def __init__(self, element: "Element"):
-
# The Element we are staging for, on which we'll issue warnings
self._element = element # type: Element
@@ -109,7 +108,6 @@ def collect_stage_result(self, element: "Element", result: FileListResult):
#
class OverlapCollectorSession:
def __init__(self, element: "Element", action: str, location: str):
-
# The Element we are staging for, on which we'll issue warnings
self._element = element # type: Element
@@ -137,14 +135,11 @@ def __init__(self, element: "Element", action: str, location: str):
# result (FileListResult): The result of Element.stage_artifact()
#
def collect_stage_result(self, element: "Element", result: FileListResult):
-
for overwritten_file in result.overwritten:
-
overlap_list = None
try:
overlap_list = self._overlaps[overwritten_file]
except KeyError:
-
# Create a fresh list
#
self._overlaps[overwritten_file] = overlap_list = []
@@ -178,7 +173,6 @@ def collect_stage_result(self, element: "Element", result: FileListResult):
# sessions (list): List of previously completed sessions
#
def warnings(self, sessions: List["OverlapCollectorSession"]):
-
# Collect a table of filenames which overlapped something from outside of this session.
#
external_overlaps = {} # type: Dict[str, int]
@@ -190,7 +184,6 @@ def warnings(self, sessions: List["OverlapCollectorSession"]):
overlap_warning = False
detail = "Staged files overwrite existing files in staging area: {}\n".format(self._location)
for filename, element_ids in self._overlaps.items():
-
# If there is only one element in the overlap list, it means it has
# overlapped a file from a previous session.
#
diff --git a/src/buildstream/_pluginfactory/__init__.py b/src/buildstream/_pluginfactory/__init__.py
index 3ed1cf1f3..22db19acb 100644
--- a/src/buildstream/_pluginfactory/__init__.py
+++ b/src/buildstream/_pluginfactory/__init__.py
@@ -33,7 +33,6 @@
# (PluginOrigin): The newly created PluginOrigin
#
def load_plugin_origin(project, origin_node):
-
origin_type = origin_node.get_enum("origin", PluginOriginType)
if origin_type == PluginOriginType.LOCAL:
diff --git a/src/buildstream/_pluginfactory/pluginfactory.py b/src/buildstream/_pluginfactory/pluginfactory.py
index 6052684bd..c397ef22e 100644
--- a/src/buildstream/_pluginfactory/pluginfactory.py
+++ b/src/buildstream/_pluginfactory/pluginfactory.py
@@ -47,7 +47,6 @@
#
class PluginFactory:
def __init__(self, plugin_base, plugin_type):
-
# For pickling across processes, make sure this context has a unique
# identifier, which we prepend to the identifier of each PluginSource.
# This keeps plugins loaded during the first and second pass distinct
@@ -212,9 +211,7 @@ def get_plugin_paths(self, kind: str) -> Tuple[Optional[str], Optional[str], Opt
# (PluginError): In case something went wrong loading the plugin
#
def _ensure_plugin(self, kind: str, provenance_node: Node) -> Tuple[Type[Plugin], Optional[str]]:
-
if kind not in self._types:
-
# Get the directory on disk where the plugin exists, and
# the optional accompanying .yaml file for the plugin, should
# one have been provided.
@@ -222,7 +219,6 @@ def _ensure_plugin(self, kind: str, provenance_node: Node) -> Tuple[Type[Plugin]
location, defaults, display = self.get_plugin_paths(kind)
if location:
-
# Make the PluginSource object
#
source = self._plugin_base.make_plugin_source(
@@ -269,7 +265,6 @@ def _ensure_plugin(self, kind: str, provenance_node: Node) -> Tuple[Type[Plugin]
# (PluginError): In case something went wrong loading the plugin
#
def _load_plugin(self, source: PluginSource, kind: str) -> Type[Plugin]:
-
try:
plugin = source.load_plugin(kind)
@@ -308,8 +303,9 @@ def _load_plugin(self, source: PluginSource, kind: str) -> Type[Plugin]:
def _assert_plugin(self, kind: str, plugin_type: Type[Plugin]):
if kind in self._types:
raise PluginError(
- "Tried to register {} plugin for existing kind '{}' "
- "(already registered {})".format(self._plugin_type, kind, self._types[kind].__name__)
+ "Tried to register {} plugin for existing kind '{}' (already registered {})".format(
+ self._plugin_type, kind, self._types[kind].__name__
+ )
)
base_type: Type[Plugin]
@@ -350,7 +346,6 @@ def _assert_plugin(self, kind: str, plugin_type: Type[Plugin]):
# (PluginError): In case something went wrong loading the plugin
#
def _assert_min_version(self, kind, plugin_type):
-
if plugin_type.BST_MIN_VERSION is None:
raise PluginError(
"{} plugin '{}' did not specify BST_MIN_VERSION".format(self._plugin_type, kind),
diff --git a/src/buildstream/_pluginfactory/pluginorigin.py b/src/buildstream/_pluginfactory/pluginorigin.py
index fd0e42892..9f45b9826 100644
--- a/src/buildstream/_pluginfactory/pluginorigin.py
+++ b/src/buildstream/_pluginfactory/pluginorigin.py
@@ -23,7 +23,6 @@
# A type of plugin
#
class PluginType(FastEnum):
-
# A Source plugin
SOURCE = "source"
@@ -42,7 +41,6 @@ def __str__(self):
# An enumeration depicting the type of plugin origin
#
class PluginOriginType(FastEnum):
-
# A local plugin
LOCAL = "local"
@@ -69,12 +67,10 @@ def __init__(self, kind, allow_deprecated):
# Base class holding common properties of all origins.
#
class PluginOrigin:
-
# Common fields valid for all plugin origins
_COMMON_CONFIG_KEYS = ["origin", "sources", "elements", "source-mirrors", "allow-deprecated"]
def __init__(self, origin_type):
-
# Public
self.origin_type = origin_type # The PluginOriginType
self.elements = {} # A dictionary of PluginConfiguration
@@ -102,7 +98,6 @@ def __init__(self, origin_type):
# origin_node (MappingNode): The node defining this origin
#
def initialize(self, project, origin_node):
-
self.provenance_node = origin_node
self.project = project
self.load_config(origin_node)
@@ -167,9 +162,7 @@ def load_config(self, origin_node):
# dictionary (dict): The location to store the results
#
def _load_plugin_configurations(self, sequence_node, dictionary):
-
for node in sequence_node:
-
# Parse as a simple string
if type(node) is ScalarNode: # pylint: disable=unidiomatic-typecheck
kind = node.as_str()
diff --git a/src/buildstream/_pluginfactory/pluginoriginjunction.py b/src/buildstream/_pluginfactory/pluginoriginjunction.py
index ddbb95c6d..a1cc75155 100644
--- a/src/buildstream/_pluginfactory/pluginoriginjunction.py
+++ b/src/buildstream/_pluginfactory/pluginoriginjunction.py
@@ -28,7 +28,6 @@ def __init__(self):
self._junction = None
def get_plugin_paths(self, kind, plugin_type):
-
# Get access to the project indicated by the junction,
# possibly loading it as a side effect.
#
@@ -82,7 +81,6 @@ def get_plugin_paths(self, kind, plugin_type):
return location, defaults, "junction: {} ({})".format(project_path, display)
def load_config(self, origin_node):
-
origin_node.validate_keys(["junction", *PluginOrigin._COMMON_CONFIG_KEYS])
self._junction = origin_node.get_str("junction")
diff --git a/src/buildstream/_pluginfactory/pluginoriginlocal.py b/src/buildstream/_pluginfactory/pluginoriginlocal.py
index e7875dab4..97836c555 100644
--- a/src/buildstream/_pluginfactory/pluginoriginlocal.py
+++ b/src/buildstream/_pluginfactory/pluginoriginlocal.py
@@ -36,7 +36,6 @@ def get_plugin_paths(self, kind, plugin_type):
return path, defaults, "project directory: {}".format(self._path)
def load_config(self, origin_node):
-
origin_node.validate_keys(["path", *PluginOrigin._COMMON_CONFIG_KEYS])
path_node = origin_node.get_scalar("path")
diff --git a/src/buildstream/_pluginfactory/pluginoriginpip.py b/src/buildstream/_pluginfactory/pluginoriginpip.py
index 507339b7e..5a6bb8fe0 100644
--- a/src/buildstream/_pluginfactory/pluginoriginpip.py
+++ b/src/buildstream/_pluginfactory/pluginoriginpip.py
@@ -32,7 +32,6 @@ def __init__(self):
self._package_name = None
def get_plugin_paths(self, kind, plugin_type):
-
from packaging.requirements import Requirement, InvalidRequirement
if sys.version_info >= (3, 10):
@@ -105,6 +104,5 @@ def get_plugin_paths(self, kind, plugin_type):
)
def load_config(self, origin_node):
-
origin_node.validate_keys(["package-name", *PluginOrigin._COMMON_CONFIG_KEYS])
self._package_name = origin_node.get_str("package-name")
diff --git a/src/buildstream/_pluginproxy.py b/src/buildstream/_pluginproxy.py
index 048040845..596048469 100644
--- a/src/buildstream/_pluginproxy.py
+++ b/src/buildstream/_pluginproxy.py
@@ -53,7 +53,6 @@ class PluginProxyError(Exception):
#
class PluginProxy:
def __init__(self, owner: Plugin, plugin: Plugin):
-
# These members are considered internal, they are accessed by subclasses
# which extend the PluginProxy, but hidden from the client Plugin implementations
# which the proxy objects are handed off to.
diff --git a/src/buildstream/_profile.py b/src/buildstream/_profile.py
index 543c98095..fe4b96716 100644
--- a/src/buildstream/_profile.py
+++ b/src/buildstream/_profile.py
@@ -118,7 +118,6 @@ def __init__(self, settings):
@contextlib.contextmanager
def profile(self, topic, key, message=None):
-
# Check if the user enabled topics are valid
# NOTE: This is done in the first PROFILER.profile() call and
# not __init__ to ensure we handle the exception. This also means
diff --git a/src/buildstream/_project.py b/src/buildstream/_project.py
index 334462df0..fd89f802a 100644
--- a/src/buildstream/_project.py
+++ b/src/buildstream/_project.py
@@ -90,7 +90,7 @@ def __init__(
parent_loader: Optional[Loader] = None,
provenance_node: Optional[ProvenanceInformation] = None,
search_for_project: bool = True,
- fetch_subprojects=None
+ fetch_subprojects=None,
):
#
# Public members
@@ -240,7 +240,6 @@ def get_alias_url(self, alias: str, *, first_pass: bool = False) -> Optional[str
# fully qualified urls based on the shorthand which is allowed
# to be specified in the YAML
def translate_url(self, url, *, source, first_pass=False):
-
if url and utils._ALIAS_SEPARATOR in url:
url_alias, url_body = url.split(utils._ALIAS_SEPARATOR, 1)
alias_url = self.get_alias_url(url_alias, first_pass=first_pass)
@@ -312,14 +311,14 @@ def get_path_from_node(self, node, *, check_is_file=False, check_is_dir=False):
if full_path.is_symlink():
provenance = node.get_provenance()
raise LoadError(
- "{}: Specified path '{}' must not point to " "symbolic links ".format(provenance, path_str),
+ "{}: Specified path '{}' must not point to symbolic links ".format(provenance, path_str),
LoadErrorReason.PROJ_PATH_INVALID_KIND,
)
if path.parts and path.parts[0] == "..":
provenance = node.get_provenance()
raise LoadError(
- "{}: Specified path '{}' first component must " "not be '..'".format(provenance, path_str),
+ "{}: Specified path '{}' first component must not be '..'".format(provenance, path_str),
LoadErrorReason.PROJ_PATH_INVALID,
)
@@ -338,23 +337,23 @@ def get_path_from_node(self, node, *, check_is_file=False, check_is_dir=False):
if not is_inside:
provenance = node.get_provenance()
raise LoadError(
- "{}: Specified path '{}' must not lead outside of the "
- "project directory".format(provenance, path_str),
+ "{}: Specified path '{}' must not lead outside of the project directory".format(provenance, path_str),
LoadErrorReason.PROJ_PATH_INVALID,
)
if path.is_absolute():
provenance = node.get_provenance()
raise LoadError(
- "{}: Absolute path: '{}' invalid.\n"
- "Please specify a path relative to the project's root.".format(provenance, path),
+ "{}: Absolute path: '{}' invalid.\nPlease specify a path relative to the project's root.".format(
+ provenance, path
+ ),
LoadErrorReason.PROJ_PATH_INVALID,
)
if full_resolved_path.is_socket() or (full_resolved_path.is_fifo() or full_resolved_path.is_block_device()):
provenance = node.get_provenance()
raise LoadError(
- "{}: Specified path '{}' points to an unsupported " "file kind".format(provenance, path_str),
+ "{}: Specified path '{}' points to an unsupported file kind".format(provenance, path_str),
LoadErrorReason.PROJ_PATH_INVALID_KIND,
)
@@ -456,7 +455,6 @@ def alias_exists(self, alias, *, source, first_pass=False):
def get_alias_uris(
self, alias: str, *, first_pass: bool = False, tracking: bool = False
) -> Sequence[Optional[AliasSubstitution]]:
-
if first_pass:
config = self.first_pass_config
else:
@@ -504,7 +502,6 @@ def get_alias_uris(
# (list): A list of loaded Element
#
def load_elements(self, targets):
-
with self._context.messenger.simple_task("Loading elements", silent_nested=True) as task:
self.load_context.set_task(task)
load_elements = self.loader.load(targets)
@@ -587,7 +584,6 @@ def get_default_target(self):
# This is for commands that accept multiple target elements.
#
def get_default_targets(self):
-
# If _invoked_from_workspace_element has a value,
# a workspace element was found before a project config
# Therefore the workspace does not contain a project
@@ -626,7 +622,6 @@ def get_default_targets(self):
# (bool): Whether the loader is specified as duplicate
#
def junction_is_duplicated(self, project_name, loader):
-
junctions = self._junction_duplicates.get(project_name, {})
# Iterate over all paths specified by this project and see
@@ -656,7 +651,6 @@ def junction_is_duplicated(self, project_name, loader):
# (bool): Whether the loader is specified as internal
#
def junction_is_internal(self, loader):
-
# Iterate over all paths specified by this project and see
# if we find a match for the specified loader.
#
@@ -760,7 +754,6 @@ def _validate_toplevel_node(self, node, *, first_pass=False):
# Raises: LoadError if there was a problem with the project.conf
#
def _validate_version(self, config_node):
-
bst_major, bst_minor = utils._get_bst_api_version()
# Use a custom error message for the absence of the required "min-version"
@@ -823,7 +816,6 @@ def _validate_version(self, config_node):
# Raises: LoadError if there was a problem with the project.conf
#
def _load(self, *, parent_loader=None, provenance_node=None):
-
# Load builtin default
projectfile = os.path.join(self.directory, _PROJECT_CONF_FILE)
self._default_config_node = _yaml.load(_site.default_project_config, shortname="projectconfig.yaml")
@@ -1017,7 +1009,6 @@ def _load_second_pass(self):
# ignore_unknown (bool) - Whether option loader shoud ignore unknown options.
#
def _load_pass(self, config, output, *, ignore_unknown=False):
-
# Load project options
options_node = config.get_mapping("options", default={})
output.options.load(options_node)
diff --git a/src/buildstream/_projectrefs.py b/src/buildstream/_projectrefs.py
index 931dc436d..9d951da7f 100644
--- a/src/buildstream/_projectrefs.py
+++ b/src/buildstream/_projectrefs.py
@@ -25,7 +25,6 @@
#
# Indicates the type of ref storage
class ProjectRefStorage:
-
# Source references are stored inline
#
INLINE = "inline"
@@ -102,11 +101,9 @@ def load(self, options):
# (node): The YAML dictionary where the ref is stored
#
def lookup_ref(self, project, element, source_index, *, write=False):
-
node = self._lookup(self._toplevel_node, project, element, source_index)
if write:
-
# If we couldnt find the orignal, create a new one.
#
if node is None:
diff --git a/src/buildstream/_remotespec.py b/src/buildstream/_remotespec.py
index 3c9aa2fa3..83b47287f 100644
--- a/src/buildstream/_remotespec.py
+++ b/src/buildstream/_remotespec.py
@@ -77,7 +77,6 @@ def __init__(
connection_config: Optional[MappingNode] = None,
spec_node: Optional[MappingNode] = None,
) -> None:
-
#
# Public members
#
@@ -394,7 +393,6 @@ def new_from_string(cls, string: str, purpose: int = RemoteSpecPurpose.ALL) -> "
)
)
elif key == "push":
-
# Provide a sensible error for `bst artifact push --remote url=http://pony.com,push=False ...`
if purpose != RemoteSpecPurpose.ALL:
raise RemoteError("The 'push' key is invalid and assumed to be {}".format(push))
@@ -467,7 +465,6 @@ def _resolve_path(cls, path: str, basedir: Optional[str]) -> str:
def _parse_auth(
cls, auth_node: MappingNode, basedir: Optional[str] = None
) -> Tuple[Optional[str], Optional[str], Optional[str], Optional[str], Optional[int]]:
-
auth_path_keys = ["server-cert", "client-key", "client-cert", "access-token"]
auth_int_keys = ["access-token-reload-interval"]
auth_values = {}
diff --git a/src/buildstream/_scheduler/jobs/elementjob.py b/src/buildstream/_scheduler/jobs/elementjob.py
index bd19d4583..4c1f97cd9 100644
--- a/src/buildstream/_scheduler/jobs/elementjob.py
+++ b/src/buildstream/_scheduler/jobs/elementjob.py
@@ -76,6 +76,5 @@ def parent_complete(self, status, result):
self._complete_cb(self, self._element, status, self._result)
def child_process(self):
-
# Run the action
return self._action_cb(self._element)
diff --git a/src/buildstream/_scheduler/jobs/job.py b/src/buildstream/_scheduler/jobs/job.py
index 3ca76aafe..655b8f024 100644
--- a/src/buildstream/_scheduler/jobs/job.py
+++ b/src/buildstream/_scheduler/jobs/job.py
@@ -79,7 +79,6 @@ class Job:
_id_generator = itertools.count(1)
def __init__(self, scheduler, action_name, logfile, *, max_retries=0):
-
#
# Public members
#
@@ -120,7 +119,6 @@ def set_name(self, name):
# Starts the job.
#
def start(self):
-
assert not self._terminated, "Attempted to start a job which was already terminated"
self._tries += 1
@@ -206,7 +204,7 @@ def message(self, message_type, message, **kwargs):
message,
element_name=self._message_element_name,
element_key=self._message_element_key,
- **kwargs
+ **kwargs,
)
self._messenger.message(message)
@@ -382,7 +380,6 @@ def child_action(self):
#
return _ReturnCode.FAIL if retry_flag else _ReturnCode.PERM_FAIL, None
except Exception: # pylint: disable=broad-except
-
# If an unhandled (not normalized to BstError) occurs, that's a bug,
# send the traceback and formatted exception back to the frontend
# and print it to the log file.
diff --git a/src/buildstream/_scheduler/queues/artifactpushqueue.py b/src/buildstream/_scheduler/queues/artifactpushqueue.py
index 97a724218..3d56047c8 100644
--- a/src/buildstream/_scheduler/queues/artifactpushqueue.py
+++ b/src/buildstream/_scheduler/queues/artifactpushqueue.py
@@ -24,7 +24,6 @@
# A queue which pushes element artifacts
#
class ArtifactPushQueue(Queue):
-
action_name = "Push"
complete_name = "Artifacts Pushed"
resources = [ResourceType.UPLOAD]
diff --git a/src/buildstream/_scheduler/queues/buildqueue.py b/src/buildstream/_scheduler/queues/buildqueue.py
index 4b3fadab6..97220b2eb 100644
--- a/src/buildstream/_scheduler/queues/buildqueue.py
+++ b/src/buildstream/_scheduler/queues/buildqueue.py
@@ -23,7 +23,6 @@
# A queue which assembles elements
#
class BuildQueue(Queue):
-
action_name = "Build"
complete_name = "Built"
resources = [ResourceType.PROCESS, ResourceType.CACHE]
@@ -41,7 +40,6 @@ def status(self, element):
return QueueStatus.READY
def done(self, job, element, result, status):
-
# Inform element in main process that assembly is done
element._assemble_done(status is JobStatus.OK)
diff --git a/src/buildstream/_scheduler/queues/cachequeryqueue.py b/src/buildstream/_scheduler/queues/cachequeryqueue.py
index 9870535c9..1b5c9a1af 100644
--- a/src/buildstream/_scheduler/queues/cachequeryqueue.py
+++ b/src/buildstream/_scheduler/queues/cachequeryqueue.py
@@ -21,7 +21,6 @@
# A queue which queries the cache for artifacts and sources
#
class CacheQueryQueue(Queue):
-
action_name = "Cache-query"
complete_name = "Cache queried"
resources = [ResourceType.PROCESS, ResourceType.CACHE]
diff --git a/src/buildstream/_scheduler/queues/fetchqueue.py b/src/buildstream/_scheduler/queues/fetchqueue.py
index b865011a1..4bb22b6c8 100644
--- a/src/buildstream/_scheduler/queues/fetchqueue.py
+++ b/src/buildstream/_scheduler/queues/fetchqueue.py
@@ -24,7 +24,6 @@
# A queue which fetches element sources
#
class FetchQueue(Queue):
-
action_name = "Fetch"
complete_name = "Sources Fetched"
resources = [ResourceType.DOWNLOAD]
@@ -59,7 +58,6 @@ def status(self, element):
return QueueStatus.READY
def done(self, _, element, result, status):
-
if status is JobStatus.FAIL:
return
diff --git a/src/buildstream/_scheduler/queues/pullqueue.py b/src/buildstream/_scheduler/queues/pullqueue.py
index 1bb3ed55c..b33073520 100644
--- a/src/buildstream/_scheduler/queues/pullqueue.py
+++ b/src/buildstream/_scheduler/queues/pullqueue.py
@@ -25,7 +25,6 @@
# A queue which pulls element artifacts
#
class PullQueue(Queue):
-
action_name = "Pull"
complete_name = "Artifacts Pulled"
resources = [ResourceType.DOWNLOAD, ResourceType.CACHE]
@@ -40,7 +39,6 @@ def status(self, element):
return QueueStatus.SKIP
def done(self, _, element, result, status):
-
if status is JobStatus.FAIL:
return
diff --git a/src/buildstream/_scheduler/queues/queue.py b/src/buildstream/_scheduler/queues/queue.py
index badb5399d..fd066a4a9 100644
--- a/src/buildstream/_scheduler/queues/queue.py
+++ b/src/buildstream/_scheduler/queues/queue.py
@@ -55,7 +55,6 @@ class QueueStatus(FastEnum):
# scheduler (Scheduler): The Scheduler
#
class Queue:
-
# These should be overridden on class data of of concrete Queue implementations
action_name = None # type: Optional[str]
complete_name = None # type: Optional[str]
@@ -64,7 +63,6 @@ class Queue:
log_to_file = True
def __init__(self, scheduler, *, imperative=False):
-
#
# Private members
#
@@ -307,7 +305,6 @@ def _update_workspaces(self, element):
# See the Job object for an explanation of the call signature
#
def _job_done(self, job, element, status, result):
-
# Now release the resources we reserved
#
self._resources.release(self.resources)
@@ -322,7 +319,6 @@ def _job_done(self, job, element, status, result):
try:
self.done(job, element, result, status)
except BstError as e:
-
# Report error and mark as failed
#
self._message(element, MessageType.ERROR, "Post processing error", detail=str(e))
@@ -336,7 +332,6 @@ def _job_done(self, job, element, status, result):
set_last_task_error(e.domain, e.reason)
except Exception: # pylint: disable=broad-except
-
# Report unhandled exceptions and mark as failed
#
self._message(
diff --git a/src/buildstream/_scheduler/queues/sourcepushqueue.py b/src/buildstream/_scheduler/queues/sourcepushqueue.py
index 4b839f067..94b7424fc 100644
--- a/src/buildstream/_scheduler/queues/sourcepushqueue.py
+++ b/src/buildstream/_scheduler/queues/sourcepushqueue.py
@@ -22,7 +22,6 @@
# A queue which pushes staged sources
#
class SourcePushQueue(Queue):
-
action_name = "Src-push"
complete_name = "Sources Pushed"
resources = [ResourceType.UPLOAD]
diff --git a/src/buildstream/_scheduler/queues/trackqueue.py b/src/buildstream/_scheduler/queues/trackqueue.py
index 3375b0c98..00506bd23 100644
--- a/src/buildstream/_scheduler/queues/trackqueue.py
+++ b/src/buildstream/_scheduler/queues/trackqueue.py
@@ -27,7 +27,6 @@
# A queue which tracks sources
#
class TrackQueue(Queue):
-
action_name = "Track"
complete_name = "Sources Tracked"
resources = [ResourceType.DOWNLOAD]
@@ -38,7 +37,6 @@ def get_process_func(self):
def status(self, element):
# We can skip elements without any sources
if not any(element.sources()):
-
# But we still have to mark them as tracked
element._tracking_done()
return QueueStatus.SKIP
@@ -46,7 +44,6 @@ def status(self, element):
return QueueStatus.READY
def done(self, _, element, result, status):
-
if status is JobStatus.FAIL:
return
diff --git a/src/buildstream/_scheduler/resources.py b/src/buildstream/_scheduler/resources.py
index 736227f44..faa58c28d 100644
--- a/src/buildstream/_scheduler/resources.py
+++ b/src/buildstream/_scheduler/resources.py
@@ -81,7 +81,6 @@ def reserve(self, resources, exclusive=None, *, peek=False):
# about.
#
for resource in resources - exclusive:
-
# If our job wants this resource exclusively, we never
# check this, so we can get away with not (temporarily)
# removing it from the set.
diff --git a/src/buildstream/_scheduler/scheduler.py b/src/buildstream/_scheduler/scheduler.py
index f2ae736a2..d945ac474 100644
--- a/src/buildstream/_scheduler/scheduler.py
+++ b/src/buildstream/_scheduler/scheduler.py
@@ -81,7 +81,6 @@ def wrapper(*args, **kwargs):
#
class Scheduler:
def __init__(self, context, start_time, state, interrupt_callback, ticker_callback):
-
#
# Public members
#
@@ -134,7 +133,6 @@ def __init__(self, context, start_time, state, interrupt_callback, ticker_callba
#
@reset_signals_on_exit
def run(self, queues, casd_process_manager):
-
# Hold on to the queues to process
self.queues = queues
@@ -231,7 +229,6 @@ def clear_queues(self):
# remain blocked after Scheduler.run() returns.
#
def terminate(self):
-
# Set this right away, the frontend will check this
# attribute to decide whether or not to print status info
# etc and the following code block will trigger some callbacks.
@@ -327,7 +324,6 @@ def _abort_on_casd_failure(self, pid, returncode):
# job (Job): The job to start
#
def _start_job(self, job):
-
# From the scheduler perspective, the following
# is considered atomic; started jobs are always in the
# active_jobs list, and jobs in the active_jobs list
@@ -367,7 +363,6 @@ def _sched_queue_jobs(self):
queues_to_process = []
while process_queues:
-
# Pull elements forward through all queues, regardless of whether we're processing those
# queues. The main reason to do this is to ensure we propagate finished jobs from the
# imperative queue.
@@ -412,7 +407,6 @@ def _sched_queue_jobs(self):
#
def _sched(self):
def real_schedule():
-
# Reset the scheduling handle before queuing any jobs.
#
# We do this right away because starting jobs can result
@@ -428,7 +422,6 @@ def real_schedule():
self._sched_handle = None
if not self.terminated:
-
#
# Run as many jobs as the queues can handle for the
# available resources
@@ -477,7 +470,6 @@ def _resume_jobs(self):
# A loop registered event callback for keyboard interrupts
#
def _interrupt_event(self):
-
# The event loop receives a copy of all signals that are sent while it is running
# This means that even though we catch the SIGINT in the question to the user,
# the loop will receive it too, and thus we need to skip it here.
diff --git a/src/buildstream/_signals.py b/src/buildstream/_signals.py
index 0556945d3..a96bb7b7a 100644
--- a/src/buildstream/_signals.py
+++ b/src/buildstream/_signals.py
@@ -212,9 +212,7 @@ def suspendable(suspend_callback, resume_callback):
#
@contextmanager
def blocked(signal_list, ignore=True):
-
with ExitStack() as stack:
-
# Optionally add the ignored() context manager to this context
if ignore:
stack.enter_context(ignored(signal_list))
@@ -239,7 +237,6 @@ def blocked(signal_list, ignore=True):
#
@contextmanager
def ignored(signal_list):
-
orig_handlers = {}
for sig in signal_list:
orig_handlers[sig] = signal.signal(sig, signal.SIG_IGN)
diff --git a/src/buildstream/_state.py b/src/buildstream/_state.py
index b581642b3..f31509152 100644
--- a/src/buildstream/_state.py
+++ b/src/buildstream/_state.py
@@ -28,7 +28,6 @@
#
class TaskGroup:
def __init__(self, name: str, state: "State", complete_name: Optional[str] = None) -> None:
-
#
# Public members
#
@@ -106,7 +105,6 @@ class Task:
def __init__(
self, state: "State", task_id: str, action_name: str, full_name: str, elapsed_offset: datetime.timedelta
) -> None:
-
#
# Public members
#
@@ -203,7 +201,6 @@ def _notify_task_changed(self) -> None:
#
class State:
def __init__(self, session_start: datetime.datetime) -> None:
-
#
# Public members
#
diff --git a/src/buildstream/_stream.py b/src/buildstream/_stream.py
index 61462ed2c..721e02bb8 100644
--- a/src/buildstream/_stream.py
+++ b/src/buildstream/_stream.py
@@ -67,7 +67,6 @@ class Stream:
def __init__(
self, context, session_start, *, session_start_callback=None, interrupt_callback=None, ticker_callback=None
):
-
#
# Public members
#
@@ -390,7 +389,6 @@ def build(
ignore_project_source_remotes: bool = False,
retry_failed: bool = False,
):
-
# Flag the build state
self._context.build = True
@@ -461,7 +459,6 @@ def fetch(
source_remotes: Iterable[RemoteSpec] = (),
ignore_project_source_remotes: bool = False,
):
-
if self._context.remote_cache_spec:
self._context.messenger.warn(
"Cache Storage Service is configured, fetched sources may not be available in the local cache"
@@ -495,7 +492,6 @@ def fetch(
# are rewritten inline.
#
def track(self, targets, *, selection=_PipelineSelection.REDIRECT, except_targets=None, cross_junctions=False):
-
elements = self._load_tracking(
targets, selection=selection, except_targets=except_targets, cross_junctions=cross_junctions
)
@@ -539,7 +535,6 @@ def source_push(
source_remotes: Iterable[RemoteSpec] = (),
ignore_project_source_remotes: bool = False,
):
-
elements = self._load(
targets,
selection=selection,
@@ -584,7 +579,6 @@ def pull(
artifact_remotes: Iterable[RemoteSpec] = (),
ignore_project_artifact_remotes: bool = False,
):
-
if self._context.remote_cache_spec:
self._context.messenger.warn(
"Cache Storage Service is configured, pulled artifacts may not be available in the local cache"
@@ -638,7 +632,6 @@ def push(
artifact_remotes: Iterable[RemoteSpec] = (),
ignore_project_artifact_remotes: bool = False,
):
-
elements = self._load(
targets,
selection=selection,
@@ -696,7 +689,6 @@ def checkout(
artifact_remotes: Iterable[RemoteSpec] = (),
ignore_project_artifact_remotes: bool = False,
):
-
elements = self._load(
(target,),
selection=selection,
@@ -734,7 +726,7 @@ def checkout(
self._export_artifact(tar, location, compression, element, hardlinks, virdir)
except BstError as e:
raise StreamError(
- "Error while staging dependencies into a sandbox" ": '{}'".format(e), detail=e.detail, reason=e.reason
+ "Error while staging dependencies into a sandbox: '{}'".format(e), detail=e.detail, reason=e.reason
) from e
# _export_artifact()
@@ -930,7 +922,6 @@ def source_checkout(
source_remotes: Iterable[RemoteSpec] = (),
ignore_project_source_remotes: bool = False,
):
-
self._check_location_writable(location, force=force, tar=tar)
elements = self._load(
@@ -951,9 +942,7 @@ def source_checkout(
try:
self._source_checkout(elements, location, force, deps, tar, compression, include_build_scripts)
except BstError as e:
- raise StreamError(
- "Error while writing sources" ": '{}'".format(e), detail=e.detail, reason=e.reason
- ) from e
+ raise StreamError("Error while writing sources: '{}'".format(e), detail=e.detail, reason=e.reason) from e
self._context.messenger.info("Checked out sources to '{}'".format(location))
@@ -1000,7 +989,6 @@ def workspace_open(
expanded_directories = []
# To try to be more atomic, loop through the elements and raise any errors we can early
for target in elements:
-
if not list(target.sources()):
build_depends = [x.name for x in target._dependencies(_Scope.BUILD, recurse=False)]
if not build_depends:
@@ -1372,7 +1360,6 @@ def _assert_project(self, message: str) -> None:
#
def _load_artifacts(self, artifact_names):
with self._context.messenger.simple_task("Loading artifacts") as task:
-
# Use a set here to avoid duplicates.
#
# ArtifactElement.new_from_artifact_name() will take care of ensuring
@@ -1404,7 +1391,6 @@ def _load_artifacts(self, artifact_names):
# (tuple of lists): A tuple of Element object lists, grouped corresponding to target_groups
#
def _load_elements(self, target_groups):
-
# First concatenate all the lists for the loader's sake
targets = list(itertools.chain(*target_groups))
@@ -1443,7 +1429,6 @@ def _load_elements_from_targets(
rewritable: bool = False,
valid_artifact_names: bool = False,
) -> Tuple[List[Element], List[Element], List[Element]]:
-
# First determine which of the user specified targets are artifact
# names and which are element names.
element_names, artifact_names = self._expand_and_classify_targets(
@@ -1575,7 +1560,6 @@ def _load_tracking(self, targets, *, selection=_PipelineSelection.NONE, except_t
# (list of Element): The filtered or asserted result
#
def _track_cross_junction_filter(self, project, elements, cross_junction_requested):
-
# First filter out cross junctioned elements
if not cross_junction_requested:
elements = [element for element in elements if element._get_project() is project]
@@ -1798,7 +1782,6 @@ def _enqueue_plan(self, plan, *, queue=None):
# announce_session (bool): Whether to announce the session in the frontend.
#
def _run(self, *, announce_session: bool = False):
-
# Inform the frontend of the full list of elements
# and the list of elements which will be processed in this run
#
@@ -1827,7 +1810,6 @@ def _run(self, *, announce_session: bool = False):
# announce_session (bool): Whether to announce the session in the frontend
#
def _fetch(self, elements: List[Element], *, fetch_original: bool = False, announce_session: bool = False):
-
# Assert consistency for the fetch elements
_pipeline.assert_consistent(self._context, elements)
@@ -1952,7 +1934,6 @@ def _write_build_scripts(self, location, elements):
# Write a master build script to the sandbox
def _write_master_build_script(self, directory, elements):
-
module_string = ""
for element in elements:
module_string += shlex.quote(element.normal_name) + " "
@@ -2058,7 +2039,6 @@ def _expand_and_classify_targets(
# Expand globs for elements
if element_globs:
-
# Bail out if an element glob is specified without providing a project directory
if not self._project:
raise StreamError(
diff --git a/src/buildstream/_testing/__init__.py b/src/buildstream/_testing/__init__.py
index 7fa3643a2..ffe050be6 100644
--- a/src/buildstream/_testing/__init__.py
+++ b/src/buildstream/_testing/__init__.py
@@ -39,7 +39,7 @@
import pytest
except ImportError:
module_name = globals()["__name__"]
- msg = "Could not import pytest:\n" "To use the {} module, you must have pytest installed.".format(module_name)
+ msg = "Could not import pytest:\nTo use the {} module, you must have pytest installed.".format(module_name)
raise ImportError(msg)
diff --git a/src/buildstream/_testing/_cachekeys.py b/src/buildstream/_testing/_cachekeys.py
index aa28f156a..894e2fe6d 100644
--- a/src/buildstream/_testing/_cachekeys.py
+++ b/src/buildstream/_testing/_cachekeys.py
@@ -69,7 +69,6 @@ def _element_filename(project_dir, element_name, alt_suffix=None):
# the option of changing the .bst suffix to something else
#
if alt_suffix:
-
# Just in case...
assert element_name.endswith(".bst")
diff --git a/src/buildstream/_testing/_sourcetests/utils.py b/src/buildstream/_testing/_sourcetests/utils.py
index c0b631e56..ee8553554 100644
--- a/src/buildstream/_testing/_sourcetests/utils.py
+++ b/src/buildstream/_testing/_sourcetests/utils.py
@@ -24,7 +24,7 @@
import pytest
except ImportError:
module_name = globals()["__name__"]
- msg = "Could not import pytest:\n" "To use the {} module, you must have pytest installed.".format(module_name)
+ msg = "Could not import pytest:\nTo use the {} module, you must have pytest installed.".format(module_name)
raise ImportError(msg)
from buildstream import _yaml
diff --git a/src/buildstream/_testing/_utils/site.py b/src/buildstream/_testing/_utils/site.py
index 0895e2b11..34b51d73c 100644
--- a/src/buildstream/_testing/_utils/site.py
+++ b/src/buildstream/_testing/_utils/site.py
@@ -93,7 +93,6 @@
# filesystem where @directory is located.
#
def have_subsecond_mtime(directory):
-
try:
test_file, test_filename = tempfile.mkstemp(dir=directory)
os.close(test_file)
diff --git a/src/buildstream/_testing/repo.py b/src/buildstream/_testing/repo.py
index e429cf4a7..de2f211df 100644
--- a/src/buildstream/_testing/repo.py
+++ b/src/buildstream/_testing/repo.py
@@ -17,6 +17,7 @@
"""
+
import os
import shutil
@@ -36,7 +37,6 @@ class Repo:
"""
def __init__(self, directory, subdir="repo"):
-
# The working directory for the repo object
#
self.directory = os.path.abspath(directory)
diff --git a/src/buildstream/_testing/runcli.py b/src/buildstream/_testing/runcli.py
index 0083e71f3..b6df60d72 100644
--- a/src/buildstream/_testing/runcli.py
+++ b/src/buildstream/_testing/runcli.py
@@ -24,7 +24,6 @@
"""
-
import os
import re
import sys
@@ -75,7 +74,6 @@ def __init__(self, exit_code=None, exception=None, exc_info=None, output=None, s
# in the case that the exit code reported is 0 (success).
#
if self.exit_code != 0:
-
# Check if buildstream failed to handle an
# exception, topevel CLI exit should always
# be a SystemExit exception.
@@ -128,9 +126,7 @@ def assert_main_error(self, error_domain, error_reason, fail_message="", *, debu
Exception: {}
Domain: {}
Reason: {}
- """.format(
- self.exit_code, self.exception, self.exception.domain, self.exception.reason
- )
+ """.format(self.exit_code, self.exception, self.exception.domain, self.exception.reason)
)
assert self.exit_code == -1, fail_message
assert self.exc is not None, fail_message
@@ -156,7 +152,6 @@ def assert_main_error(self, error_domain, error_reason, fail_message="", *, debu
# (AssertionError): If any of the assertions fail
#
def assert_task_error(self, error_domain, error_reason, fail_message=""):
-
assert self.exit_code == -1, fail_message
assert self.exc is not None, fail_message
assert self.exception is not None, fail_message
@@ -307,7 +302,6 @@ def remove_artifact_from_cache(self, project, element_name, *, cache_dir=None):
# binary_capture (bool): Whether to capture the stdout/stderr as binary
#
def run(self, project=None, silent=False, env=None, cwd=None, options=None, args=None, binary_capture=False):
-
# We don't want to carry the state of one bst invocation into another
# bst invocation. Since node _FileInfo objects hold onto BuildStream
# projects, this means that they would be also carried forward. This
@@ -482,7 +476,6 @@ def get_artifact_name(self, project, project_name, element_name, cache_key=None)
class CliIntegration(Cli):
-
# run()
#
# This supports the same arguments as Cli.run(), see run_project_config().
@@ -506,7 +499,6 @@ def run(self, project=None, silent=False, env=None, cwd=None, options=None, args
# will be composited on top of the already loaded project.conf
#
def run_project_config(self, *, project_config=None, **kwargs):
-
# First load the project.conf and substitute {project_dir}
#
# Save the original project.conf, because we will run more than
@@ -527,7 +519,6 @@ def run_project_config(self, *, project_config=None, **kwargs):
config = config.format(project_dir=project_directory)
if project_config is not None:
-
# If a custom project configuration dictionary was
# specified, composite it on top of the already
# substituted base project configuration
@@ -538,7 +529,6 @@ def run_project_config(self, *, project_config=None, **kwargs):
# dictionaries need to be loaded via _yaml.load_data() first
#
with tempfile.TemporaryDirectory(dir=project_directory) as scratchdir:
-
temp_project = os.path.join(scratchdir, "project.conf")
with open(temp_project, "w", encoding="utf-8") as f:
_yaml.roundtrip_dump(project_config, f)
@@ -550,7 +540,6 @@ def run_project_config(self, *, project_config=None, **kwargs):
_yaml.roundtrip_dump(base_config, project_filename)
else:
-
# Otherwise, just dump it as is
with open(project_filename, "w", encoding="utf-8") as f:
f.write(config)
@@ -559,7 +548,6 @@ def run_project_config(self, *, project_config=None, **kwargs):
class CliRemote(CliIntegration):
-
# ensure_services():
#
# Make sure that required services are configured and that
@@ -617,7 +605,6 @@ def ensure_services(self, actions=True, execution=True, storage=True, artifacts=
class TestArtifact:
-
# remove_artifact_from_cache():
#
# Remove given element artifact from artifact cache
@@ -627,7 +614,6 @@ class TestArtifact:
# element_name (str): The name of the element artifact
#
def remove_artifact_from_cache(self, cache_dir, element_name):
-
cache_dir = os.path.join(cache_dir, "artifacts", "refs")
normal_name = element_name.replace(os.sep, "-")
@@ -654,7 +640,6 @@ def remove_artifact_from_cache(self, cache_dir, element_name):
# (bool): If the cache contains the element's artifact
#
def is_cached(self, cache_dir, element, element_key):
-
# cas = CASCache(str(cache_dir))
artifact_ref = element.get_artifact_name(element_key)
return os.path.exists(os.path.join(cache_dir, "artifacts", "refs", artifact_ref))
@@ -672,7 +657,6 @@ def is_cached(self, cache_dir, element, element_key):
# (Digest): The digest stored in the ref
#
def get_digest(self, cache_dir, element, element_key):
-
artifact_ref = element.get_artifact_name(element_key)
artifact_dir = os.path.join(cache_dir, "artifacts", "refs")
artifact_proto = artifact_pb2.Artifact()
@@ -837,7 +821,6 @@ def chdir(directory):
@contextmanager
def environment(env):
-
old_env = {}
for key, value in env.items():
old_env[key] = os.environ.get(key)
@@ -857,7 +840,6 @@ def environment(env):
@contextmanager
def configured(directory, config=None):
-
# Ensure we've at least relocated the caches to a temp directory
if not config:
config = {}
diff --git a/src/buildstream/_workspaces.py b/src/buildstream/_workspaces.py
index 7d4fc4dbe..565483752 100644
--- a/src/buildstream/_workspaces.py
+++ b/src/buildstream/_workspaces.py
@@ -274,7 +274,6 @@ def to_dict(self):
#
@classmethod
def from_dict(cls, toplevel_project, dictionary):
-
# Just pass the dictionary as kwargs
return cls(toplevel_project, **dictionary)
@@ -487,8 +486,9 @@ def _parse_workspace_config(self, workspaces):
if version < 4:
# bst 1.x workspaces do not separate source and build files.
raise LoadError(
- "Workspace configuration format version {} not supported."
- "Please recreate this workspace.".format(version),
+ "Workspace configuration format version {} not supported.Please recreate this workspace.".format(
+ version
+ ),
LoadErrorReason.INVALID_DATA,
)
diff --git a/src/buildstream/buildelement.py b/src/buildstream/buildelement.py
index 40355f5cf..20126d3c9 100644
--- a/src/buildstream/buildelement.py
+++ b/src/buildstream/buildelement.py
@@ -198,12 +198,10 @@
class BuildElement(Element):
-
#############################################################
# Abstract Method Implementations #
#############################################################
def configure(self, node):
-
self.__commands = {} # pylint: disable=attribute-defined-outside-init
# FIXME: Currently this forcefully validates configurations
@@ -217,7 +215,6 @@ def configure(self, node):
self.__commands[command_name] = node.get_str_list(command_name, [])
def configure_dependencies(self, dependencies):
-
self.__layout = {} # pylint: disable=attribute-defined-outside-init
# FIXME: Currently this forcefully validates configurations
@@ -289,7 +286,6 @@ def configure_sandbox(self, sandbox):
sandbox.set_environment(self.get_environment())
def stage(self, sandbox):
-
# First stage it all
#
sorted_locations = sorted(self.__layout)
@@ -311,9 +307,7 @@ def stage(self, sandbox):
self.stage_sources(sandbox, self.get_variable("build-root"))
def assemble(self, sandbox):
-
with sandbox.batch(root_read_only=True, label="Running commands"):
-
# We need to ensure that configure-commands are only called
# once in workspaces, because the changes will persist across
# incremental builds - not desirable, for example, in the case
diff --git a/src/buildstream/downloadablefilesource.py b/src/buildstream/downloadablefilesource.py
index ba6f1229e..8fbd7369a 100644
--- a/src/buildstream/downloadablefilesource.py
+++ b/src/buildstream/downloadablefilesource.py
@@ -142,7 +142,6 @@ def translate_url(
:ref:`built-in functionality documentation `.
"""
-
import os
import re
import urllib.request
@@ -328,7 +327,6 @@ def track(self): # pylint: disable=arguments-differ
return new_ref
def fetch(self): # pylint: disable=arguments-differ
-
# Just a defensive check, it is impossible for the
# file to be already cached because Source.fetch() will
# not be called if the source is already cached.
diff --git a/src/buildstream/element.py b/src/buildstream/element.py
index ca8080314..9996409b4 100644
--- a/src/buildstream/element.py
+++ b/src/buildstream/element.py
@@ -149,7 +149,6 @@ class DependencyConfiguration:
"""
def __init__(self, element: "Element", path: str, config: Optional["MappingNode"]):
-
self.element = element # type: Element
"""The dependency Element"""
@@ -221,7 +220,6 @@ def __init__(
*,
artifact_key: Optional[str] = None,
):
-
self.__cache_key_dict = None # Dict for cache key calculation
self.__cache_key: Optional[str] = None # Our cached cache key
@@ -848,7 +846,6 @@ def run_cleanup_commands(self, sandbox: "Sandbox") -> None:
# (Element): The dependencies in `scope`, in deterministic staging order
#
def _dependencies(self, scope, *, recurse=True, visited=None):
-
# The format of visited is (BitMap(), BitMap()), with the first BitMap
# containing element that have been visited for the `_Scope.BUILD` case
# and the second one relating to the `_Scope.RUN` case.
@@ -918,7 +915,6 @@ def visit(element, scope, visited):
# (Element): The dependency element, or None if not found.
#
def _search(self, scope, name):
-
for dep in self._dependencies(scope):
if dep.name == name:
return dep
@@ -960,7 +956,6 @@ def _stage_artifact(
orphans: bool = True,
owner: Optional["Element"] = None,
) -> FileListResult:
-
owner = owner or self
assert owner._overlap_collector is not None, "Attempted to stage artifacts outside of Element.stage()"
@@ -1034,7 +1029,6 @@ def _stage_dependency_artifacts(self, sandbox, scope, *, path=None, include=None
#
@classmethod
def _new_from_load_element(cls, load_element, task=None):
-
if not load_element.first_pass:
load_element.project.ensure_fully_loaded()
@@ -1069,13 +1063,11 @@ def _new_from_load_element(cls, load_element, task=None):
# then the assertion will be raised by the LoadElement.
#
if custom_configurations is not None:
-
# Create a proxy for the dependency
dep_proxy = cast("Element", ElementProxy(element, dependency))
# Class supports dependency configuration
if dep.config_nodes:
-
# Ensure variables are substituted first
#
for config in dep.config_nodes:
@@ -1409,7 +1401,6 @@ def _track(self):
#
@contextmanager
def _prepare_sandbox(self, scope, shell=False, integrate=True, usebuildtree=False):
-
# Assert first that we have a sandbox configuration
if not self.__sandbox_config:
raise ElementError(
@@ -1421,7 +1412,6 @@ def _prepare_sandbox(self, scope, shell=False, integrate=True, usebuildtree=Fals
# bst shell and bst artifact checkout require a local sandbox.
# pylint: disable-next=contextmanager-generator-missing-cleanup
with self.__sandbox(config=self.__sandbox_config, allow_remote=False) as sandbox:
-
# Configure always comes first, and we need it.
self.__configure_sandbox(sandbox)
@@ -1456,7 +1446,6 @@ def _prepare_sandbox(self, scope, shell=False, integrate=True, usebuildtree=Fals
# directory (str): An absolute path to stage the sources at
#
def _stage_sources_in_sandbox(self, sandbox, directory):
-
# Stage all sources that need to be copied
sandbox_vroot = sandbox.get_virtual_directory()
host_vdirectory = sandbox_vroot.open_directory(directory.lstrip(os.sep), create=True)
@@ -1470,11 +1459,9 @@ def _stage_sources_in_sandbox(self, sandbox, directory):
# vdirectory (Union[str, Directory]): A virtual directory object or local path to stage sources to.
#
def _stage_sources_at(self, vdirectory):
-
# It's advantageous to have this temporary directory on
# the same file system as the rest of our cache.
with self.timed_activity("Staging sources", silent_nested=True):
-
if not isinstance(vdirectory, Directory):
vdirectory = FileBasedDirectory(vdirectory)
if vdirectory:
@@ -1645,7 +1632,6 @@ def _assemble_done(self, successful):
# - Cache the resulting artifact
#
def _assemble(self):
-
# Only do this the first time around (i.e. __assemble_done is False)
# to allow for retrying the job
if self._cached_failure() and not self.__assemble_done:
@@ -1670,12 +1656,10 @@ def _assemble(self):
context = self._get_context()
with self._output_file() as output_file:
-
# Explicitly clean it up, keep the build dir around if exceptions are raised
os.makedirs(context.builddir, exist_ok=True)
with self.__sandbox(output_file, output_file, self.__sandbox_config) as sandbox:
-
# Ensure that the plugin does not run commands if it said that it wouldn't
#
# We only disable commands here in _assemble() instead of __sandbox() because
@@ -1712,7 +1696,6 @@ def _assemble(self):
self._cache_artifact(sandbox, collect)
def _cache_artifact(self, sandbox, collect):
-
context = self._get_context()
buildresult = self.__build_result
with self.__dynamic_public_guard:
@@ -1773,8 +1756,7 @@ def _cache_artifact(self, sandbox, collect):
if collect is not None and collectvdir is None:
raise ElementError(
- "Directory '{}' was not found inside the sandbox, "
- "unable to collect artifact contents".format(collect)
+ "Directory '{}' was not found inside the sandbox, unable to collect artifact contents".format(collect)
)
# _fetch_done()
@@ -1876,7 +1858,6 @@ def _load_artifact(self, *, pull, strict=None):
if artifact.cached() and ignore_failed_artifact:
success, _, _ = artifact.load_build_result()
if not success:
-
self.info(
"Discarded failed build",
detail="Discarded '{}'\n".format(artifact.strong_key)
@@ -1928,7 +1909,6 @@ def _load_artifact(self, *, pull, strict=None):
# equal to the resolved strict key.
#
if ignore_failed_artifact or artifact.strong_key != self.__strict_cache_key:
-
if ignore_failed_artifact:
reason = "because retrying failed builds is enabled."
else:
@@ -2035,7 +2015,6 @@ def _push(self):
#
# Returns: Exit code
def _shell(self, scope=None, *, mounts=None, isolate=False, prompt=None, command=None, usebuildtree=False):
-
with self._prepare_sandbox(scope, shell=True, usebuildtree=usebuildtree) as sandbox:
environment = self.get_environment()
environment = copy.copy(environment)
@@ -2052,7 +2031,6 @@ def _shell(self, scope=None, *, mounts=None, isolate=False, prompt=None, command
# Special configurations for non-isolated sandboxes
if not isolate:
-
# Open the network, and reuse calling uid/gid
#
flags |= _SandboxFlags.NETWORK_ENABLED | _SandboxFlags.INHERIT_UID
@@ -2677,7 +2655,6 @@ def __get_last_build_artifact(self):
# Internal method for calling public abstract configure_sandbox() method.
#
def __configure_sandbox(self, sandbox):
-
self.configure_sandbox(sandbox)
# __stage():
@@ -2685,7 +2662,6 @@ def __configure_sandbox(self, sandbox):
# Internal method for calling public abstract stage() method.
#
def __stage(self, sandbox):
-
# Enable the overlap collector during the staging process
with self.__collect_overlaps():
self.stage(sandbox)
@@ -2696,7 +2672,6 @@ def __stage(self, sandbox):
# the element and its sources.
#
def __preflight(self):
-
if self.BST_FORBID_RDEPENDS and self.BST_FORBID_BDEPENDS:
if any(self._dependencies(_Scope.RUN, recurse=False)) or any(
self._dependencies(_Scope.BUILD, recurse=False)
@@ -2769,7 +2744,6 @@ def __assert_cached(self):
#
def __get_tainted(self, recalculate=False):
if recalculate or self.__tainted is None:
-
# Whether this artifact has a workspace
workspaced = self.__artifact.get_metadata_workspaced()
@@ -2854,7 +2828,6 @@ def __sandbox(self, stdout=None, stderr=None, config=None, allow_remote=True):
# Normal element initialization procedure.
#
def __initialize_from_yaml(self, load_element: "LoadElement", plugin_conf: Optional[str]):
-
context = self._get_context()
project = self._get_project()
@@ -2923,7 +2896,6 @@ def __initialize_from_artifact_key(self, key: str):
@classmethod
def __compose_default_splits(cls, project, defaults, first_pass):
-
element_public = defaults.get_mapping(Symbol.PUBLIC, default={})
element_bst = element_public.get_mapping("bst", default={})
element_splits = element_bst.get_mapping("split-rules", default={})
diff --git a/src/buildstream/plugin.py b/src/buildstream/plugin.py
index 5190ac31e..c4049ec6e 100644
--- a/src/buildstream/plugin.py
+++ b/src/buildstream/plugin.py
@@ -160,6 +160,7 @@
_STR_BYTES_PATH = Union[str, bytes, "os.PathLike[str]", "os.PathLike[bytes]"]
_CMD = Union[_STR_BYTES_PATH, Sequence[_STR_BYTES_PATH]]
+
# _background_job_wrapper()
#
# Wrapper for running jobs in the background, transparently for users
@@ -286,7 +287,6 @@ def __init__(
type_tag: str,
unique_id: Optional[int] = None,
):
-
self.name = name
"""The plugin name
@@ -589,7 +589,7 @@ def blocking_activity(
activity_name: str,
*,
detail: Optional[str] = None,
- silent_nested: bool = False
+ silent_nested: bool = False,
) -> T1:
"""Execute a blocking activity in the background.
diff --git a/src/buildstream/plugins/elements/compose.py b/src/buildstream/plugins/elements/compose.py
index 7152528df..c865c98b4 100644
--- a/src/buildstream/plugins/elements/compose.py
+++ b/src/buildstream/plugins/elements/compose.py
@@ -83,7 +83,6 @@ def configure_sandbox(self, sandbox):
pass
def stage(self, sandbox):
-
# Stage deps in the sandbox root
with self.timed_activity("Staging dependencies", silent_nested=True):
self.stage_dependency_artifacts(sandbox)
@@ -110,7 +109,6 @@ def assemble(self, sandbox):
if self.integration:
with self.timed_activity("Integrating sandbox"):
if require_split:
-
# Make a snapshot of all the files before integration-commands are run.
snapshot = set(vbasedir.list_relative_paths())
diff --git a/src/buildstream/plugins/elements/import.py b/src/buildstream/plugins/elements/import.py
index 55bc29e08..b65e0b0fb 100644
--- a/src/buildstream/plugins/elements/import.py
+++ b/src/buildstream/plugins/elements/import.py
@@ -63,7 +63,6 @@ def stage(self, sandbox):
pass
def assemble(self, sandbox):
-
# Stage sources into the input directory
self.stage_sources(sandbox, "input")
diff --git a/src/buildstream/plugins/elements/junction.py b/src/buildstream/plugins/elements/junction.py
index 7c77abfc7..1ef1a1757 100644
--- a/src/buildstream/plugins/elements/junction.py
+++ b/src/buildstream/plugins/elements/junction.py
@@ -351,7 +351,6 @@ class JunctionElement(Element):
BST_FORBID_RDEPENDS = True
def configure(self, node):
-
node.validate_keys(["path", "options", "overrides", "aliases", "map-aliases"])
self.path = node.get_str("path", default="")
@@ -363,7 +362,6 @@ def configure(self, node):
self.overrides = {}
overrides_node = node.get_mapping("overrides", {})
for key, junction_name in overrides_node.items():
-
# Cannot override a subproject with the project itself
#
if junction_name.as_str() == self.name:
diff --git a/src/buildstream/plugins/elements/link.py b/src/buildstream/plugins/elements/link.py
index 48e38901b..8c4765a29 100644
--- a/src/buildstream/plugins/elements/link.py
+++ b/src/buildstream/plugins/elements/link.py
@@ -53,7 +53,6 @@ class LinkElement(Element):
BST_FORBID_SOURCES = True
def configure(self, node):
-
node.validate_keys(["target"])
# Hold onto the node, keep it around for provenance.
diff --git a/src/buildstream/plugins/elements/script.py b/src/buildstream/plugins/elements/script.py
index 90e4e024c..e5d670a4f 100644
--- a/src/buildstream/plugins/elements/script.py
+++ b/src/buildstream/plugins/elements/script.py
@@ -51,7 +51,6 @@ def configure(self, node):
def configure_dependencies(self, dependencies):
for dep in dependencies:
-
# Determine the location to stage each element, default is "/"
location = "/"
if dep.config:
diff --git a/src/buildstream/plugins/elements/stack.py b/src/buildstream/plugins/elements/stack.py
index 83f205b55..90d9d64a0 100644
--- a/src/buildstream/plugins/elements/stack.py
+++ b/src/buildstream/plugins/elements/stack.py
@@ -119,7 +119,6 @@ def configure(self, node):
pass
def preflight(self):
-
# Assert that all dependencies are both build and runtime dependencies.
#
all_deps = list(self._dependencies(_Scope.ALL, recurse=False))
@@ -146,7 +145,6 @@ def stage(self, sandbox):
pass
def assemble(self, sandbox):
-
# Just create a dummy empty artifact, its existence is a statement
# that all this stack's dependencies are built.
vrootdir = sandbox.get_virtual_directory()
diff --git a/src/buildstream/plugins/sources/remote.py b/src/buildstream/plugins/sources/remote.py
index ab97c73c7..bf626d2eb 100644
--- a/src/buildstream/plugins/sources/remote.py
+++ b/src/buildstream/plugins/sources/remote.py
@@ -52,6 +52,7 @@
as such, behaves as described in the :ref:`default reporting of SourceInfo `
documentation.
"""
+
import os
from buildstream import DownloadableFileSource, SourceError, utils
@@ -81,7 +82,6 @@ def stage(self, directory):
# are not write protected in the sandbox.
dest = os.path.join(directory, self.filename)
with self.timed_activity("Staging remote file to {}".format(dest)):
-
utils.safe_copy(self._get_mirror_file(), dest)
# To prevent user's umask introducing variability here, explicitly set
diff --git a/src/buildstream/plugins/sources/tar.py b/src/buildstream/plugins/sources/tar.py
index f1d939119..79be7105b 100644
--- a/src/buildstream/plugins/sources/tar.py
+++ b/src/buildstream/plugins/sources/tar.py
@@ -166,16 +166,18 @@ def _assert_safe(self, member: tarfile.TarInfo, target_dir: str):
final_path = os.path.abspath(os.path.join(target_dir, member.path))
if not final_path.startswith(target_dir):
raise SourceError(
- "{}: Tarfile attempts to extract outside the staging area: "
- "{} -> {}".format(self, member.path, final_path)
+ "{}: Tarfile attempts to extract outside the staging area: {} -> {}".format(
+ self, member.path, final_path
+ )
)
if member.islnk():
linked_path = os.path.abspath(os.path.join(target_dir, member.linkname))
if not linked_path.startswith(target_dir):
raise SourceError(
- "{}: Tarfile attempts to hardlink outside the staging area: "
- "{} -> {}".format(self, member.path, final_path)
+ "{}: Tarfile attempts to hardlink outside the staging area: {} -> {}".format(
+ self, member.path, final_path
+ )
)
# Don't need to worry about symlinks because they're just
@@ -226,16 +228,13 @@ def _extract_filter(
# is not enough because some tarballs simply do not contain the leading
# directory paths for the archived files.
def _list_tar_paths(self, tar):
-
visited = set()
for member in tar.getmembers():
-
# Remove any possible leading './', offer more consistent behavior
# across tarballs encoded with or without a leading '.'
member_name = member.name.lstrip("./")
if not member.isdir():
-
# Loop over the components of a path, for a path of a/b/c/d
# we will first visit 'a', then 'a/b' and then 'a/b/c', excluding
# the final component
diff --git a/src/buildstream/sandbox/_config.py b/src/buildstream/sandbox/_config.py
index 87e6b35fa..e76c8ae67 100644
--- a/src/buildstream/sandbox/_config.py
+++ b/src/buildstream/sandbox/_config.py
@@ -52,7 +52,7 @@ def __init__(
build_arch: str,
build_uid: Optional[int] = None,
build_gid: Optional[int] = None,
- remote_apis_socket_path: Optional[str] = None
+ remote_apis_socket_path: Optional[str] = None,
):
self.build_os = build_os
self.build_arch = build_arch
@@ -75,7 +75,6 @@ def __init__(
# A dictionary representation of this SandboxConfig
#
def to_dict(self) -> Dict[str, Union[str, int]]:
-
# Assign mandatory portions of the sandbox configuration
#
# /!\ No additional mandatory members can ever be added to
diff --git a/src/buildstream/sandbox/_sandboxbuildboxrun.py b/src/buildstream/sandbox/_sandboxbuildboxrun.py
index d4b49860a..889bd5124 100644
--- a/src/buildstream/sandbox/_sandboxbuildboxrun.py
+++ b/src/buildstream/sandbox/_sandboxbuildboxrun.py
@@ -171,7 +171,6 @@ def resume_proc():
os.killpg(group_id, signal.SIGCONT)
with ExitStack() as stack:
-
# We want to launch buildbox-run in a new session in non-interactive
# mode so that we handle the SIGTERM and SIGTSTP signals separately
# from the nested process, but in interactive mode this causes
diff --git a/src/buildstream/sandbox/_sandboxdummy.py b/src/buildstream/sandbox/_sandboxdummy.py
index 31834e81b..677bd1c86 100644
--- a/src/buildstream/sandbox/_sandboxdummy.py
+++ b/src/buildstream/sandbox/_sandboxdummy.py
@@ -22,10 +22,9 @@ def __init__(self, *args, **kwargs):
self._reason = kwargs.get("dummy_reason", "no reason given")
def _run(self, command, *, flags, cwd, env):
-
if not self._has_command(command[0], env):
raise SandboxCommandError(
- "Staged artifacts do not provide command " "'{}'".format(command[0]), reason="missing-command"
+ "Staged artifacts do not provide command '{}'".format(command[0]), reason="missing-command"
)
raise SandboxError(
diff --git a/src/buildstream/sandbox/_sandboxreapi.py b/src/buildstream/sandbox/_sandboxreapi.py
index 262cb3f04..3bd2120ee 100644
--- a/src/buildstream/sandbox/_sandboxreapi.py
+++ b/src/buildstream/sandbox/_sandboxreapi.py
@@ -40,7 +40,7 @@ def _run(self, command, *, flags, cwd, env):
if not self._has_command(command[0], env):
raise SandboxCommandError(
- "Staged artifacts do not provide command " "'{}'".format(command[0]), reason="missing-command"
+ "Staged artifacts do not provide command '{}'".format(command[0]), reason="missing-command"
)
# Ensure working directory exists
@@ -60,7 +60,6 @@ def _run(self, command, *, flags, cwd, env):
read_write_directories = []
mount_sources = self._get_mount_sources()
for directory in self._get_marked_directories():
-
if directory in mount_sources:
# Bind mount
mount_point = directory.lstrip(os.path.sep)
diff --git a/src/buildstream/sandbox/_sandboxremote.py b/src/buildstream/sandbox/_sandboxremote.py
index 8abaa2b53..d895f13c6 100644
--- a/src/buildstream/sandbox/_sandboxremote.py
+++ b/src/buildstream/sandbox/_sandboxremote.py
@@ -185,8 +185,9 @@ def __run_remote_command(stub, execute_request=None, running_operation=None):
grpc.StatusCode.DEADLINE_EXCEEDED,
):
raise SandboxError(
- "Failed contacting remote execution server at {}."
- "{}: {}".format(self.exec_spec.url, status_code.name, e.details())
+ "Failed contacting remote execution server at {}.{}: {}".format(
+ self.exec_spec.url, status_code.name, e.details()
+ )
)
if running_operation and status_code == grpc.StatusCode.UNIMPLEMENTED:
@@ -199,9 +200,12 @@ def __run_remote_command(stub, execute_request=None, running_operation=None):
# Set up signal handler to trigger cancel_operation on SIGTERM
operation = None
- with self._get_context().messenger.timed_activity(
- "Waiting for the remote build to complete", element_name=self._get_element_name()
- ), _signals.terminator(self.cancel_operation):
+ with (
+ self._get_context().messenger.timed_activity(
+ "Waiting for the remote build to complete", element_name=self._get_element_name()
+ ),
+ _signals.terminator(self.cancel_operation),
+ ):
operation = __run_remote_command(stub, execute_request=request)
if operation is None:
return None
@@ -227,7 +231,7 @@ def cancel_operation(self):
pass
else:
raise SandboxError(
- "Failed trying to send CancelOperation request: " "{} ({})".format(e.details(), e.code().name)
+ "Failed trying to send CancelOperation request: {} ({})".format(e.details(), e.code().name)
)
def _fetch_missing_blobs(self, vdir):
diff --git a/src/buildstream/sandbox/sandbox.py b/src/buildstream/sandbox/sandbox.py
index 505cc5c1a..18cae22b7 100644
--- a/src/buildstream/sandbox/sandbox.py
+++ b/src/buildstream/sandbox/sandbox.py
@@ -171,7 +171,7 @@ def run(
root_read_only: bool = False,
cwd: Optional[str] = None,
env: Optional[Dict[str, str]] = None,
- label: Optional[str] = None
+ label: Optional[str] = None,
) -> Optional[int]:
"""Run a command in the sandbox.
@@ -345,7 +345,7 @@ def _run_with_flags(
flags: int,
cwd: Optional[str] = None,
env: Optional[Dict[str, str]] = None,
- label: Optional[str] = None
+ label: Optional[str] = None,
) -> Optional[int]:
if not self.__allow_run:
raise _SandboxBug("Element specified BST_RUN_COMMANDS as False but called Sandbox.run()")
@@ -571,7 +571,6 @@ def _disable_run(self):
# Flags indicating how the sandbox should be run.
#
class _SandboxFlags:
-
# Use default sandbox configuration.
#
NONE = 0
diff --git a/src/buildstream/scriptelement.py b/src/buildstream/scriptelement.py
index a24ddcee8..9eb7ceed0 100644
--- a/src/buildstream/scriptelement.py
+++ b/src/buildstream/scriptelement.py
@@ -195,7 +195,6 @@ def get_unique_key(self):
}
def configure_sandbox(self, sandbox):
-
# Setup the environment and work directory
sandbox.set_work_directory(self.__cwd)
@@ -206,11 +205,9 @@ def configure_sandbox(self, sandbox):
sandbox.mark_directory(self.__install_root)
def stage(self, sandbox):
-
# If self.layout_add() was never called, do the default staging of
# everything in "/" and run the integration commands
if not self.__layout:
-
with self.timed_activity("Staging dependencies", silent_nested=True):
self.stage_dependency_artifacts(sandbox)
diff --git a/src/buildstream/source.py b/src/buildstream/source.py
index 8ba64c600..9983ef257 100644
--- a/src/buildstream/source.py
+++ b/src/buildstream/source.py
@@ -1156,7 +1156,6 @@ def translate_url(
# implicitly (the Source being constructed with an __alias_override).
#
if alias_override or self.__alias_override:
-
url_alias, url_body = url.split(utils._ALIAS_SEPARATOR, 1)
project_alias_url = project.get_alias_url(url_alias, first_pass=self.__first_pass)
@@ -1213,10 +1212,10 @@ def mark_download_url(self, url: str, *, primary: bool = True) -> None:
if primary:
expected_alias = _extract_alias(url)
- assert (
- self.__expected_alias is None or self.__expected_alias == expected_alias
- ), "Attempt to mark primary URL with {}, already marked with {}".format(
- expected_alias, self.__expected_alias
+ assert self.__expected_alias is None or self.__expected_alias == expected_alias, (
+ "Attempt to mark primary URL with {}, already marked with {}".format(
+ expected_alias, self.__expected_alias
+ )
)
self.__expected_alias = expected_alias
@@ -1237,9 +1236,9 @@ def mark_download_url(self, url: str, *, primary: bool = True) -> None:
# the case for git submodules which might be automatically
# discovered.
#
- assert url in self.__marked_urls or not _extract_alias(
- url
- ), "URL was not seen at configure time: {}".format(url)
+ assert url in self.__marked_urls or not _extract_alias(url), (
+ "URL was not seen at configure time: {}".format(url)
+ )
alias = _extract_alias(url)
@@ -1529,7 +1528,6 @@ def do_load_ref(node):
# the elements themselves.
#
if project.ref_storage == ProjectRefStorage.PROJECT_REFS:
-
# First warn if there is a ref already loaded, and reset it
redundant_ref = self.get_ref() # pylint: disable=assignment-from-no-return
if redundant_ref is not None:
@@ -1560,7 +1558,6 @@ def do_load_ref(node):
# (SourceError): In the case we encounter errors saving a file to disk
#
def _set_ref(self, new_ref, *, save):
-
context = self._get_context()
project = self._get_project()
toplevel = context.get_toplevel_project()
@@ -1871,7 +1868,6 @@ def __do_fetch(self, **kwargs):
# Use the source fetchers if they are provided
#
if source_fetchers:
-
# Use a contorted loop here, this is to allow us to
# silence the messages which can result from consuming
# the items of source_fetchers, if it happens to be a generator.
@@ -1879,7 +1875,6 @@ def __do_fetch(self, **kwargs):
source_fetchers = iter(source_fetchers)
while True:
-
with context.messenger.silence():
try:
fetcher = next(source_fetchers)
@@ -1921,7 +1916,6 @@ def __do_fetch(self, **kwargs):
last_error = None
for mirror in project.get_alias_uris(alias, first_pass=self.__first_pass, tracking=False):
-
new_source = self.__clone_for_uri(mirror)
try:
new_source.fetch(**kwargs)
diff --git a/src/buildstream/sourcemirror.py b/src/buildstream/sourcemirror.py
index 9209640a1..95d39849d 100644
--- a/src/buildstream/sourcemirror.py
+++ b/src/buildstream/sourcemirror.py
@@ -37,7 +37,7 @@
these methods are mandatory to implement.
* :func:`SourceMirror.translate_url() `
-
+
Produce an appropriate URL for the given URL and alias.
@@ -53,7 +53,6 @@
from .exceptions import ErrorDomain
if TYPE_CHECKING:
-
# pylint: disable=cyclic-import
from ._context import Context
from ._project import Project
diff --git a/src/buildstream/storage/_casbaseddirectory.py b/src/buildstream/storage/_casbaseddirectory.py
index ee6aeb650..084ec0105 100644
--- a/src/buildstream/storage/_casbaseddirectory.py
+++ b/src/buildstream/storage/_casbaseddirectory.py
@@ -47,7 +47,7 @@ def __init__(
target: Optional[str] = None,
is_executable: bool = False,
directory: Optional["CasBasedDirectory"] = None,
- mtime: Optional[timestamp_pb2.Timestamp] = None # pylint: disable=no-member
+ mtime: Optional[timestamp_pb2.Timestamp] = None, # pylint: disable=no-member
) -> None:
# The CAS cache
self.cas_cache: CASCache = cas_cache
@@ -132,7 +132,7 @@ def __init__(
*,
digest=None,
parent: Optional["CasBasedDirectory"] = None,
- filename: Optional[str] = None
+ filename: Optional[str] = None,
) -> None:
# The CAS cache
self.__cas_cache: CASCache = cas_cache
@@ -358,7 +358,7 @@ def _import_files(
filter_callback: Optional[Callable[[str], bool]] = None,
update_mtime: Optional[float] = None,
properties: Optional[List[str]] = None,
- collect_result: bool = True
+ collect_result: bool = True,
) -> Optional[FileListResult]:
result = FileListResult() if collect_result else None
@@ -762,7 +762,7 @@ def __partial_import_cas_into_cas(
*,
path_prefix: str = "",
origin: Optional["CasBasedDirectory"] = None,
- result: Optional[FileListResult]
+ result: Optional[FileListResult],
) -> None:
if origin is None:
origin = self
@@ -853,10 +853,10 @@ def __list_prefixed_relative_paths(self, prefix: str = "") -> Iterator[str]:
if prefix != "":
yield prefix
- for (k, v) in sorted(file_list):
+ for k, v in sorted(file_list):
yield os.path.join(prefix, k)
- for (k, v) in sorted(directory_list):
+ for k, v in sorted(directory_list):
subdir = v.get_directory(self)
yield from subdir.__list_prefixed_relative_paths(prefix=os.path.join(prefix, k))
diff --git a/src/buildstream/storage/_filebaseddirectory.py b/src/buildstream/storage/_filebaseddirectory.py
index 87749fda2..444d82fa5 100644
--- a/src/buildstream/storage/_filebaseddirectory.py
+++ b/src/buildstream/storage/_filebaseddirectory.py
@@ -27,6 +27,7 @@
from ..utils import BST_ARBITRARY_TIMESTAMP
from ..utils import FileListResult
+
# FileBasedDirectory intentionally doesn't call its superclass constuctor,
# which is meant to be unimplemented.
# pylint: disable=super-init-not-called
@@ -205,7 +206,6 @@ def remove(self, path: str, *, recursive: bool = False) -> None:
raise DirectoryError("Error removing '{}': {}".format(newpath, e))
def rename(self, src: str, dest: str) -> None:
-
self._validate_path(src)
self._validate_path(dest)
src_paths = src.split("/")
@@ -234,9 +234,8 @@ def _import_files(
filter_callback: Optional[Callable[[str], bool]] = None,
update_mtime: Optional[float] = None,
properties: Optional[List[str]] = None,
- collect_result: bool = True
+ collect_result: bool = True,
) -> FileListResult:
-
# See if we can get a source directory to copy from
source_directory: Optional[str] = None
if isinstance(external_pathspec, str):
@@ -366,7 +365,6 @@ def __open_directory(
# Convert an os.stat_result into a FileStat
#
def __convert_filestat(self, st: os.stat_result) -> FileStat:
-
file_type: int = 0
if stat.S_ISREG(st.st_mode):
@@ -404,12 +402,10 @@ def __import_files_from_directory(
*,
path_prefix: str = "",
update_mtime: Optional[float] = None,
- result: FileListResult
+ result: FileListResult,
) -> None:
-
# Iterate over entries in the source directory
for name in source_directory:
-
# The destination filename, relative to the root where the import started
relative_pathname = os.path.join(path_prefix, name)
diff --git a/src/buildstream/storage/directory.py b/src/buildstream/storage/directory.py
index 5080d9153..bd636dc65 100644
--- a/src/buildstream/storage/directory.py
+++ b/src/buildstream/storage/directory.py
@@ -41,7 +41,6 @@
of filesystem root directories.
"""
-
from contextlib import contextmanager
from tarfile import TarFile
from typing import Callable, Optional, Union, List, IO, Iterator
@@ -96,7 +95,6 @@ class FileStat:
def __init__(
self, file_type: int, *, executable: bool = False, size: int = 0, mtime: float = BST_ARBITRARY_TIMESTAMP
) -> None:
-
self.file_type: int = file_type
"""The :class:`.FileType` of this file"""
@@ -167,7 +165,7 @@ def import_files(
external_pathspec: Union["Directory", str],
*,
filter_callback: Optional[Callable[[str], bool]] = None,
- collect_result: bool = True
+ collect_result: bool = True,
) -> Optional[FileListResult]:
"""Imports some or all files from external_path into this directory.
@@ -406,7 +404,7 @@ def _import_files_internal(
filter_callback: Optional[Callable[[str], bool]] = None,
update_mtime: Optional[float] = None,
properties: Optional[List[str]] = None,
- collect_result: bool = True
+ collect_result: bool = True,
) -> Optional[FileListResult]:
return self._import_files(
external_pathspec,
@@ -445,7 +443,7 @@ def _import_files(
filter_callback: Optional[Callable[[str], bool]] = None,
update_mtime: Optional[float] = None,
properties: Optional[List[str]] = None,
- collect_result: bool = True
+ collect_result: bool = True,
) -> Optional[FileListResult]:
raise NotImplementedError()
diff --git a/src/buildstream/types.py b/src/buildstream/types.py
index 2ee92afa7..7a5e70437 100644
--- a/src/buildstream/types.py
+++ b/src/buildstream/types.py
@@ -171,7 +171,6 @@ class OverlapAction(FastEnum):
# Element._dependencies().
#
class _Scope(FastEnum):
-
# All elements which the given element depends on, following
# all elements required for building. Including the element itself.
#
@@ -197,7 +196,6 @@ class _Scope(FastEnum):
# Strength of cache key
#
class _KeyStrength(FastEnum):
-
# Includes strong cache keys of all build dependencies and their
# runtime dependencies.
STRONG = 1
@@ -230,7 +228,6 @@ def __init__(self, full: str, brief: str, strict: bool):
# Actions the scheduler can take on error
#
class _SchedulerErrorAction(FastEnum):
-
# Continue building the rest of the tree
CONTINUE = "continue"
@@ -246,7 +243,6 @@ class _SchedulerErrorAction(FastEnum):
# When to cache build trees
#
class _CacheBuildTrees(FastEnum):
-
# Always store build trees
ALWAYS = "always"
@@ -263,7 +259,6 @@ class _CacheBuildTrees(FastEnum):
# A policy for which URIs to access when fetching and tracking
#
class _SourceUriPolicy(FastEnum):
-
# Use all URIs from default aliases and mirrors
ALL = "all"
@@ -289,7 +284,6 @@ class _SourceUriPolicy(FastEnum):
# These values correspond to the CLI `--deps` arguments for convenience.
#
class _PipelineSelection(FastEnum):
-
# Select only the target elements in the associated targets
NONE = "none"
@@ -335,7 +329,6 @@ def __init__(self, project, provenance_node, duplicates, internal):
#
class _HostMount:
def __init__(self, path: str, host_path: Optional[str] = None, optional: bool = False) -> None:
-
# Support environment variable expansion in host mounts
path = os.path.expandvars(path)
if host_path is None:
diff --git a/src/buildstream/utils.py b/src/buildstream/utils.py
index 0b9c20257..a1ae391b7 100644
--- a/src/buildstream/utils.py
+++ b/src/buildstream/utils.py
@@ -104,7 +104,6 @@ class FileListResult:
"""
def __init__(self):
-
self.overwritten = []
"""List of files which were overwritten in the target directory"""
@@ -188,8 +187,7 @@ def list_relative_paths(directory: str) -> Iterator[str]:
Yields:
Relative filenames in `directory`
"""
- for (dirpath, dirnames, filenames) in os.walk(directory):
-
+ for dirpath, dirnames, filenames in os.walk(directory):
# os.walk does not decend into symlink directories, which
# makes sense because otherwise we might have redundant
# directories, or end up descending into directories outside
@@ -440,7 +438,7 @@ def copy_files(
*,
filter_callback: Optional[Callable[[str], bool]] = None,
ignore_missing: bool = False,
- report_written: bool = False
+ report_written: bool = False,
) -> FileListResult:
"""Copy files from source to destination.
@@ -490,7 +488,7 @@ def link_files(
*,
filter_callback: Optional[Callable[[str], bool]] = None,
ignore_missing: bool = False,
- report_written: bool = False
+ report_written: bool = False,
) -> FileListResult:
"""Hardlink files from source to destination.
@@ -571,8 +569,7 @@ def get_bst_version() -> Tuple[int, int]:
if versions[0] == "0+untagged":
raise UtilError(
- "Your git repository has no tags - BuildStream can't "
- "determine its version. Please run `git fetch --tags`."
+ "Your git repository has no tags - BuildStream can't determine its version. Please run `git fetch --tags`."
)
try:
@@ -630,7 +627,7 @@ def save_file_atomic(
newline: Optional[str] = None,
closefd: bool = True,
opener: Optional[Callable[[str, int], int]] = None,
- tempdir: Optional[str] = None
+ tempdir: Optional[str] = None,
) -> Iterator[IO]:
"""Save a file with a temporary name and rename it into place when ready.
@@ -763,7 +760,6 @@ def guess_version(string: str, *, pattern: Optional[Pattern[str]] = None) -> Opt
# Iterate over non-overlapping matches, and prefer a match which is more qualified (i.e. 1.2.3 is better than 1.2)
for version_match in pattern.finditer(string):
-
if not version_match:
iter_guess = None
iter_n_groups = 0
@@ -1043,7 +1039,7 @@ def _copy_directories(srcdir, destdir, target):
os.makedirs(new_dir)
yield (new_dir, mode)
else:
- raise UtilError("Source directory tree has file where " "directory expected: {}".format(old_dir))
+ raise UtilError("Source directory tree has file where directory expected: {}".format(old_dir))
else:
if not os.access(new_dir, os.W_OK):
# If the destination directory is not writable, change permissions to make it
@@ -1102,7 +1098,6 @@ def _ensure_real_directory(root, path):
def _process_list(
srcdir, destdir, actionfunc, result, filter_callback=None, ignore_missing=False, report_written=False
):
-
# Keep track of directory permissions, since these need to be set
# *after* files have been written.
permissions = []
@@ -1228,9 +1223,9 @@ def _tempdir(*, suffix="", prefix="tmp", dir): # pylint: disable=redefined-buil
# Do not allow fallback to a global temp directory. Due to the chmod
# below, this method is not safe to be used in global temp
# directories such as /tmp.
- assert (
- dir
- ), "Creating directories in the public fallback `/tmp` is dangerous. Please use a directory with tight access controls."
+ assert dir, (
+ "Creating directories in the public fallback `/tmp` is dangerous. Please use a directory with tight access controls."
+ )
tempdir = tempfile.mkdtemp(suffix=suffix, prefix=prefix, dir=dir)
@@ -1275,9 +1270,10 @@ def close_tempfile():
if temp is not None:
temp.close()
- with _signals.terminator(close_tempfile), tempfile.NamedTemporaryFile(
- mode=mode, encoding=encoding, suffix=suffix, prefix=prefix, dir=dir
- ) as temp:
+ with (
+ _signals.terminator(close_tempfile),
+ tempfile.NamedTemporaryFile(mode=mode, encoding=encoding, suffix=suffix, prefix=prefix, dir=dir) as temp,
+ ):
yield temp
@@ -1325,7 +1321,6 @@ def kill_proc(p):
# (str): The program output.
#
def _call(*popenargs, terminate=False, **kwargs):
-
kwargs["start_new_session"] = True
process = None
@@ -1335,7 +1330,6 @@ def _call(*popenargs, terminate=False, **kwargs):
# Handle termination, suspend and resume
def kill_proc():
if process:
-
# Some callers know that their subprocess can be
# gracefully terminated, make an attempt first
if terminate:
@@ -1375,9 +1369,11 @@ def resume_proc():
group_id = os.getpgid(process.pid)
os.killpg(group_id, signal.SIGCONT)
- with _signals.suspendable(suspend_proc, resume_proc), _signals.terminator(kill_proc), subprocess.Popen(
- *popenargs, universal_newlines=True, **kwargs
- ) as process:
+ with (
+ _signals.suspendable(suspend_proc, resume_proc),
+ _signals.terminator(kill_proc),
+ subprocess.Popen(*popenargs, universal_newlines=True, **kwargs) as process,
+ ):
# Here, we don't use `process.communicate()` directly without a timeout
# This is because, if we were to do that, and the process would never
# output anything, the control would never be given back to the python
@@ -1589,7 +1585,6 @@ def _get_compression(tar):
# UtilError: In the case of a malformed version string
#
def _parse_version(version: str) -> Tuple[int, int]:
-
try:
versions = version.split(".")
major = int(versions[0])
@@ -1612,7 +1607,6 @@ def _parse_version(version: str) -> Tuple[int, int]:
# A 2-tuple of form (major version, minor version)
#
def _get_bst_api_version() -> Tuple[int, int]:
-
bst_major, bst_minor = get_bst_version()
if bst_major < 2:
diff --git a/tests/artifactcache/capabilities.py b/tests/artifactcache/capabilities.py
index 39fb18ccd..5e73e7f6e 100644
--- a/tests/artifactcache/capabilities.py
+++ b/tests/artifactcache/capabilities.py
@@ -67,6 +67,6 @@ def test_artifact_cache_with_missing_capabilities_is_skipped(cli, tmpdir, datafi
# Initialize remotes
context.initialize_remotes(True, True, None, None)
- assert (
- not artifactcache.has_fetch_remotes()
- ), "System didn't realize the artifact cache didn't support BuildStream"
+ assert not artifactcache.has_fetch_remotes(), (
+ "System didn't realize the artifact cache didn't support BuildStream"
+ )
diff --git a/tests/artifactcache/config.py b/tests/artifactcache/config.py
index 025d11ffa..befe1e028 100644
--- a/tests/artifactcache/config.py
+++ b/tests/artifactcache/config.py
@@ -113,9 +113,10 @@ def test_artifact_cache_precedence(tmpdir, override_caches, project_caches, user
project_config_file = str(project_dir.join("project.conf"))
_yaml.roundtrip_dump(project_config, file=project_config_file)
- with runcli.configured(str(tmpdir), user_config) as user_config_file, dummy_context(
- config=user_config_file
- ) as context:
+ with (
+ runcli.configured(str(tmpdir), user_config) as user_config_file,
+ dummy_context(config=user_config_file) as context,
+ ):
project = Project(str(project_dir), context)
project.ensure_fully_loaded()
diff --git a/tests/artifactcache/junctions.py b/tests/artifactcache/junctions.py
index 83c9e0136..47b2488c7 100644
--- a/tests/artifactcache/junctions.py
+++ b/tests/artifactcache/junctions.py
@@ -43,10 +43,10 @@ def test_push_pull(cli, tmpdir, datafiles):
project = os.path.join(str(datafiles), "parent")
base_project = os.path.join(str(project), "base")
- with create_artifact_share(os.path.join(str(tmpdir), "artifactshare-parent")) as share, create_artifact_share(
- os.path.join(str(tmpdir), "artifactshare-base")
- ) as base_share:
-
+ with (
+ create_artifact_share(os.path.join(str(tmpdir), "artifactshare-parent")) as share,
+ create_artifact_share(os.path.join(str(tmpdir), "artifactshare-base")) as base_share,
+ ):
# First build it without the artifact cache configured
result = cli.run(project=project, args=["build", "target.bst"])
assert result.exit_code == 0
diff --git a/tests/cachekey/cachekey.py b/tests/cachekey/cachekey.py
index 15d690017..25cc81237 100644
--- a/tests/cachekey/cachekey.py
+++ b/tests/cachekey/cachekey.py
@@ -107,7 +107,6 @@ def test_cache_key(datafiles, cli):
],
)
def test_cache_key_fatal_warnings(cli, tmpdir, first_warnings, second_warnings, identical_keys):
-
# Builds project, Runs bst show, gathers cache keys
def run_get_cache_key(project_name, warnings):
config = {"name": "test", "min-version": "2.0", "element-path": "elements", "fatal-warnings": warnings}
diff --git a/tests/elements/filter/basic/element_plugins/dynamic.py b/tests/elements/filter/basic/element_plugins/dynamic.py
index 401c6b128..e8ea076d2 100644
--- a/tests/elements/filter/basic/element_plugins/dynamic.py
+++ b/tests/elements/filter/basic/element_plugins/dynamic.py
@@ -3,7 +3,6 @@
# Copies files from the dependent element but inserts split-rules using dynamic data
class DynamicElement(Element):
-
BST_MIN_VERSION = "2.0"
def configure(self, node):
diff --git a/tests/format/include_composition.py b/tests/format/include_composition.py
index 9f5faf3f8..c10403662 100644
--- a/tests/format/include_composition.py
+++ b/tests/format/include_composition.py
@@ -33,7 +33,6 @@ def make_includes(basedir):
def test_main_has_priority(tmpdir):
with make_includes(str(tmpdir)) as includes:
-
_yaml.roundtrip_dump({"(@)": ["a.yml"], "test": ["main"]}, str(tmpdir.join("main.yml")))
main = _yaml.load(str(tmpdir.join("main.yml")), shortname=None)
@@ -47,7 +46,6 @@ def test_main_has_priority(tmpdir):
def test_include_cannot_append(tmpdir):
with make_includes(str(tmpdir)) as includes:
-
_yaml.roundtrip_dump({"(@)": ["a.yml"], "test": ["main"]}, str(tmpdir.join("main.yml")))
main = _yaml.load(str(tmpdir.join("main.yml")), shortname=None)
@@ -60,7 +58,6 @@ def test_include_cannot_append(tmpdir):
def test_main_can_append(tmpdir):
with make_includes(str(tmpdir)) as includes:
-
_yaml.roundtrip_dump({"(@)": ["a.yml"], "test": {"(>)": ["main"]}}, str(tmpdir.join("main.yml")))
main = _yaml.load(str(tmpdir.join("main.yml")), shortname=None)
@@ -73,7 +70,6 @@ def test_main_can_append(tmpdir):
def test_sibling_cannot_append_backward(tmpdir):
with make_includes(str(tmpdir)) as includes:
-
_yaml.roundtrip_dump({"(@)": ["a.yml", "b.yml"]}, str(tmpdir.join("main.yml")))
main = _yaml.load(str(tmpdir.join("main.yml")), shortname=None)
@@ -87,7 +83,6 @@ def test_sibling_cannot_append_backward(tmpdir):
def test_sibling_can_append_forward(tmpdir):
with make_includes(str(tmpdir)) as includes:
-
_yaml.roundtrip_dump({"(@)": ["a.yml", "b.yml"]}, str(tmpdir.join("main.yml")))
main = _yaml.load(str(tmpdir.join("main.yml")), shortname=None)
@@ -101,7 +96,6 @@ def test_sibling_can_append_forward(tmpdir):
def test_lastest_sibling_has_priority(tmpdir):
with make_includes(str(tmpdir)) as includes:
-
_yaml.roundtrip_dump({"(@)": ["a.yml", "b.yml"]}, str(tmpdir.join("main.yml")))
main = _yaml.load(str(tmpdir.join("main.yml")), shortname=None)
@@ -115,7 +109,6 @@ def test_lastest_sibling_has_priority(tmpdir):
def test_main_keeps_keys(tmpdir):
with make_includes(str(tmpdir)) as includes:
-
_yaml.roundtrip_dump({"(@)": ["a.yml"], "something": "else"}, str(tmpdir.join("main.yml")))
main = _yaml.load(str(tmpdir.join("main.yml")), shortname=None)
@@ -129,7 +122,6 @@ def test_main_keeps_keys(tmpdir):
def test_overwrite_directive_on_later_composite(tmpdir):
with make_includes(str(tmpdir)) as includes:
-
_yaml.roundtrip_dump(
{"(@)": ["a.yml", "b.yml"], "test": {"(=)": ["Overwritten"]}}, str(tmpdir.join("main.yml"))
)
diff --git a/tests/format/link.py b/tests/format/link.py
index fee6de37b..b07ed600c 100644
--- a/tests/format/link.py
+++ b/tests/format/link.py
@@ -29,6 +29,7 @@
"link",
)
+
#
# Test links to elements, this tests both specifying the link as
# the main target, and also as a dependency of the main target.
diff --git a/tests/format/optionarch.py b/tests/format/optionarch.py
index b3ab8f0e9..c5f1613c9 100644
--- a/tests/format/optionarch.py
+++ b/tests/format/optionarch.py
@@ -63,7 +63,6 @@ def test_conditional(cli, datafiles, machine, value, expected):
@pytest.mark.datafiles(DATA_DIR)
def test_unsupported_arch(cli, datafiles):
-
with override_platform_uname(machine="x86_64"):
project = os.path.join(datafiles, "option-arch")
result = cli.run(
@@ -75,7 +74,6 @@ def test_unsupported_arch(cli, datafiles):
@pytest.mark.datafiles(DATA_DIR)
def test_alias(cli, datafiles):
-
with override_platform_uname(machine="arm"):
project = os.path.join(datafiles, "option-arch-alias")
result = cli.run(
@@ -87,7 +85,6 @@ def test_alias(cli, datafiles):
@pytest.mark.datafiles(DATA_DIR)
def test_unknown_host_arch(cli, datafiles):
-
with override_platform_uname(machine="x86_128"):
project = os.path.join(datafiles, "option-arch")
result = cli.run(
@@ -99,7 +96,6 @@ def test_unknown_host_arch(cli, datafiles):
@pytest.mark.datafiles(DATA_DIR)
def test_unknown_project_arch(cli, datafiles):
-
project = os.path.join(datafiles, "option-arch-unknown")
result = cli.run(
project=project, silent=True, args=["show", "--deps", "none", "--format", "%{vars}", "element.bst"]
diff --git a/tests/format/optionos.py b/tests/format/optionos.py
index 79316b2c3..dbea6e4e6 100644
--- a/tests/format/optionos.py
+++ b/tests/format/optionos.py
@@ -62,7 +62,6 @@ def test_conditionals(cli, datafiles, system, value, expected):
@pytest.mark.datafiles(DATA_DIR)
def test_unsupported_arch(cli, datafiles):
-
with override_platform_uname(system="ULTRIX"):
project = os.path.join(datafiles, "option-os")
result = cli.run(
diff --git a/tests/format/project/plugin-no-load-ref/plugins/noloadref.py b/tests/format/project/plugin-no-load-ref/plugins/noloadref.py
index 2b8fd0b51..e2c6e8c94 100644
--- a/tests/format/project/plugin-no-load-ref/plugins/noloadref.py
+++ b/tests/format/project/plugin-no-load-ref/plugins/noloadref.py
@@ -6,7 +6,6 @@
# Use this to test that the core behaves as expected with such plugins.
#
class NoLoadRefSource(Source):
-
BST_MIN_VERSION = "2.0"
def configure(self, node):
diff --git a/tests/format/project/plugin-preflight-error/errorplugin/preflighterror.py b/tests/format/project/plugin-preflight-error/errorplugin/preflighterror.py
index a03cb64ee..0f8fcaa8e 100644
--- a/tests/format/project/plugin-preflight-error/errorplugin/preflighterror.py
+++ b/tests/format/project/plugin-preflight-error/errorplugin/preflighterror.py
@@ -2,14 +2,12 @@
class PreflightErrorSource(Source):
-
BST_MIN_VERSION = "2.0"
def configure(self, node):
pass
def preflight(self):
-
# Raise a preflight error unconditionally
raise SourceError("Unsatisfied requirements in preflight, raising this error", reason="the-preflight-error")
diff --git a/tests/format/variables/notparallel/plugins/custom.py b/tests/format/variables/notparallel/plugins/custom.py
index 887645a00..adb872643 100644
--- a/tests/format/variables/notparallel/plugins/custom.py
+++ b/tests/format/variables/notparallel/plugins/custom.py
@@ -3,7 +3,6 @@
# A custom build element
class CustomElement(BuildElement):
-
BST_MIN_VERSION = "2.0"
diff --git a/tests/frontend/artifact_checkout.py b/tests/frontend/artifact_checkout.py
index b238019b5..eb3344d85 100644
--- a/tests/frontend/artifact_checkout.py
+++ b/tests/frontend/artifact_checkout.py
@@ -28,6 +28,7 @@
DATA_DIR = os.path.join(os.path.dirname(os.path.realpath(__file__)), "project")
+
#
# Test modes of `bst artifact checkout --pull` when given an artifact name
#
diff --git a/tests/frontend/artifact_list_contents.py b/tests/frontend/artifact_list_contents.py
index d01d63854..b31fdedb1 100644
--- a/tests/frontend/artifact_list_contents.py
+++ b/tests/frontend/artifact_list_contents.py
@@ -65,9 +65,9 @@ def test_artifact_list_exact_contents(cli, datafiles, target, with_project):
if not with_project:
os.remove(os.path.join(project, "project.conf"))
- expected_output_bin = ("{target}:\n" "\tusr\n" "\tusr/bin\n" "\tusr/bin/hello\n\n").format(target=arg_bin)
+ expected_output_bin = ("{target}:\n\tusr\n\tusr/bin\n\tusr/bin/hello\n\n").format(target=arg_bin)
expected_output_links = (
- "{target}:\n" "\tbasicfile\n" "\tbasicfolder\n" "\tbasicfolder/basicsymlink\n" "\tbasicfolder/subdir-file\n\n"
+ "{target}:\n\tbasicfile\n\tbasicfolder\n\tbasicfolder/basicsymlink\n\tbasicfolder/subdir-file\n\n"
).format(target=arg_links)
for arg, expected_output in [(arg_bin, expected_output_bin), (arg_links, expected_output_links)]:
diff --git a/tests/frontend/artifact_pull.py b/tests/frontend/artifact_pull.py
index b33dd538f..4adcead15 100644
--- a/tests/frontend/artifact_pull.py
+++ b/tests/frontend/artifact_pull.py
@@ -28,6 +28,7 @@
DATA_DIR = os.path.join(os.path.dirname(os.path.realpath(__file__)), "project")
+
#
# Test modes of `bst artifact pull` when given an artifact
#
@@ -51,7 +52,6 @@ def test_pull(cli, tmpdir, datafiles, deps, expect_cached, with_project):
project = str(datafiles)
with create_artifact_share(os.path.join(str(tmpdir), "artifactshare")) as share:
-
# Build the element to push it to cache, and explicitly configure local cache so we can check it
local_cache = os.path.join(str(tmpdir), "cache")
cli.configure({"cachedir": local_cache, "artifacts": {"servers": [{"url": share.repo, "push": True}]}})
diff --git a/tests/frontend/buildcheckout.py b/tests/frontend/buildcheckout.py
index f5e41db63..106f118d0 100644
--- a/tests/frontend/buildcheckout.py
+++ b/tests/frontend/buildcheckout.py
@@ -146,7 +146,6 @@ def test_non_strict_pull_build_strict_checkout(datafiles, cli, tmpdir):
element_name = "target.bst"
with create_artifact_share(os.path.join(str(tmpdir), "artifactshare")) as share:
-
cli.configure({"artifacts": {"servers": [{"url": share.repo}]}})
# First build it in non-strict mode with an artifact server configured.
@@ -181,7 +180,6 @@ def test_non_strict_checkout_uncached(datafiles, cli, tmpdir):
element_name = "target.bst"
with create_artifact_share(os.path.join(str(tmpdir), "artifactshare")) as share:
-
cli.configure({"artifacts": {"servers": [{"url": share.repo}]}})
# Attempt to checkout an uncached artifact with remote artifact server
@@ -1076,7 +1074,6 @@ def test_partial_artifact_checkout_fetch(cli, datafiles, tmpdir):
_yaml.roundtrip_dump(input_config, input_file)
with create_artifact_share(os.path.join(str(tmpdir), "artifactshare")) as share:
-
cli.configure({"artifacts": {"servers": [{"url": share.repo, "push": True}]}})
result = cli.run(project=project, args=["source", "track", input_name])
@@ -1111,7 +1108,6 @@ def test_partial_checkout_fail(tmpdir, datafiles, cli):
checkout_dir = os.path.join(str(tmpdir), "checkout")
with create_artifact_share(os.path.join(str(tmpdir), "artifactshare")) as share:
-
cli.configure({"artifacts": {"servers": [{"url": share.repo, "push": True}]}})
res = cli.run(project=project, args=["artifact", "checkout", build_elt, "--directory", checkout_dir])
diff --git a/tests/frontend/configuredwarning/plugins/corewarn.py b/tests/frontend/configuredwarning/plugins/corewarn.py
index bcd40753c..d5dfbe3f4 100644
--- a/tests/frontend/configuredwarning/plugins/corewarn.py
+++ b/tests/frontend/configuredwarning/plugins/corewarn.py
@@ -3,7 +3,6 @@
class CoreWarn(Element):
-
BST_MIN_VERSION = "2.0"
def configure(self, node):
diff --git a/tests/frontend/configuredwarning/plugins/warninga.py b/tests/frontend/configuredwarning/plugins/warninga.py
index 4ad0f3d20..a3e2aa0f2 100644
--- a/tests/frontend/configuredwarning/plugins/warninga.py
+++ b/tests/frontend/configuredwarning/plugins/warninga.py
@@ -4,7 +4,6 @@
class WarningA(Element):
-
BST_MIN_VERSION = "2.0"
def configure(self, node):
diff --git a/tests/frontend/configuredwarning/plugins/warningb.py b/tests/frontend/configuredwarning/plugins/warningb.py
index c7a995cf8..962714870 100644
--- a/tests/frontend/configuredwarning/plugins/warningb.py
+++ b/tests/frontend/configuredwarning/plugins/warningb.py
@@ -4,7 +4,6 @@
class WarningB(Element):
-
BST_MIN_VERSION = "2.0"
def configure(self, node):
diff --git a/tests/frontend/consistencyerror/plugins/consistencybug.py b/tests/frontend/consistencyerror/plugins/consistencybug.py
index ea1ab2b00..a98cd8814 100644
--- a/tests/frontend/consistencyerror/plugins/consistencybug.py
+++ b/tests/frontend/consistencyerror/plugins/consistencybug.py
@@ -6,7 +6,6 @@ class CustomError(Exception):
class ConsistencyBugSource(Source):
-
BST_MIN_VERSION = "2.0"
def configure(self, node):
@@ -22,7 +21,6 @@ def is_resolved(self):
return True
def is_cached(self):
-
# Raise an unhandled exception (not a BstError)
raise CustomError("Something went terribly wrong")
diff --git a/tests/frontend/consistencyerror/plugins/consistencyerror.py b/tests/frontend/consistencyerror/plugins/consistencyerror.py
index 34af45782..1b510f8b1 100644
--- a/tests/frontend/consistencyerror/plugins/consistencyerror.py
+++ b/tests/frontend/consistencyerror/plugins/consistencyerror.py
@@ -2,7 +2,6 @@
class ConsistencyErrorSource(Source):
-
BST_MIN_VERSION = "2.0"
def configure(self, node):
@@ -18,7 +17,6 @@ def is_resolved(self):
return True
def is_cached(self):
-
# Raise an error unconditionally
raise SourceError("Something went terribly wrong", reason="the-consistency-error")
diff --git a/tests/frontend/logging.py b/tests/frontend/logging.py
index 0ab0fc5e0..a910b489e 100644
--- a/tests/frontend/logging.py
+++ b/tests/frontend/logging.py
@@ -143,7 +143,6 @@ def test_failed_build_listing(cli, datafiles):
#
@pytest.mark.datafiles(os.path.join(DATA_DIR, "logging"))
def test_log_line_element_names(cli, datafiles):
-
project = str(datafiles)
# First discover the cache keys, this will give us a dictionary
diff --git a/tests/frontend/overlaps/plugins/overlap.py b/tests/frontend/overlaps/plugins/overlap.py
index 51b6e45fb..5a395844b 100644
--- a/tests/frontend/overlaps/plugins/overlap.py
+++ b/tests/frontend/overlaps/plugins/overlap.py
@@ -4,7 +4,6 @@
# A testing element to test the behavior of staging overlapping files
#
class OverlapElement(Element):
-
BST_MIN_VERSION = "2.0"
def configure(self, node):
diff --git a/tests/frontend/project/plugins/randomelement.py b/tests/frontend/project/plugins/randomelement.py
index f3a1d9e5e..406d1820b 100644
--- a/tests/frontend/project/plugins/randomelement.py
+++ b/tests/frontend/project/plugins/randomelement.py
@@ -4,7 +4,6 @@
class RandomElement(Element):
-
BST_MIN_VERSION = "2.0"
def configure(self, node):
diff --git a/tests/frontend/project/sourcemirrors/mirror.py b/tests/frontend/project/sourcemirrors/mirror.py
index fc737031a..8ed8e9508 100644
--- a/tests/frontend/project/sourcemirrors/mirror.py
+++ b/tests/frontend/project/sourcemirrors/mirror.py
@@ -32,5 +32,4 @@ def translate_url(
# Plugin entry point
def setup():
-
return Sample
diff --git a/tests/frontend/project/sources/fetch_source.py b/tests/frontend/project/sources/fetch_source.py
index 6078e5e5f..908f927cc 100644
--- a/tests/frontend/project/sources/fetch_source.py
+++ b/tests/frontend/project/sources/fetch_source.py
@@ -32,7 +32,6 @@ def fetch(self, alias_override=None):
class FetchSource(Source):
-
BST_MIN_VERSION = "2.0"
# Read config to know which URLs to fetch
diff --git a/tests/frontend/pull.py b/tests/frontend/pull.py
index 87cc2de67..89d4a0b6c 100644
--- a/tests/frontend/pull.py
+++ b/tests/frontend/pull.py
@@ -61,7 +61,6 @@ def test_push_pull_deps(cli, tmpdir, datafiles, deps, expected_states):
all_elements = [target, build_dep, runtime_dep]
with create_artifact_share(os.path.join(str(tmpdir), "artifactshare")) as share:
-
# First build the target element and push to the remote.
cli.configure({"artifacts": {"servers": [{"url": share.repo, "push": True}]}})
result = cli.run(project=project, args=["build", target])
@@ -102,10 +101,10 @@ def test_push_pull_deps(cli, tmpdir, datafiles, deps, expected_states):
def test_pull_secondary_cache(cli, tmpdir, datafiles):
project = str(datafiles)
- with create_artifact_share(os.path.join(str(tmpdir), "artifactshare1")) as share1, create_artifact_share(
- os.path.join(str(tmpdir), "artifactshare2")
- ) as share2:
-
+ with (
+ create_artifact_share(os.path.join(str(tmpdir), "artifactshare1")) as share1,
+ create_artifact_share(os.path.join(str(tmpdir), "artifactshare2")) as share2,
+ ):
# Build the target and push it to share2 only.
cli.configure(
{
@@ -150,10 +149,10 @@ def test_pull_secondary_cache(cli, tmpdir, datafiles):
def test_push_pull_specific_remote(cli, tmpdir, datafiles):
project = str(datafiles)
- with create_artifact_share(os.path.join(str(tmpdir), "goodartifactshare")) as good_share, create_artifact_share(
- os.path.join(str(tmpdir), "badartifactshare")
- ) as bad_share:
-
+ with (
+ create_artifact_share(os.path.join(str(tmpdir), "goodartifactshare")) as good_share,
+ create_artifact_share(os.path.join(str(tmpdir), "badartifactshare")) as bad_share,
+ ):
# Build the target so we have it cached locally only.
result = cli.run(project=project, args=["build", "target.bst"])
result.assert_success()
@@ -383,7 +382,6 @@ def test_pull_missing_local_blob(cli, tmpdir, datafiles):
_yaml.roundtrip_dump(depends_config, depends_file)
with create_artifact_share(os.path.join(str(tmpdir), "artifactshare")) as share:
-
# First build the import-bin element and push to the remote.
cli.configure({"artifacts": {"servers": [{"url": share.repo, "push": True}]}})
@@ -415,7 +413,6 @@ def test_pull_missing_notifies_user(caplog, cli, tmpdir, datafiles):
caplog.set_level(1)
with create_artifact_share(os.path.join(str(tmpdir), "artifactshare")) as share:
-
cli.configure({"artifacts": {"servers": [{"url": share.repo}]}})
result = cli.run(project=project, args=["build", "target.bst"])
@@ -431,10 +428,10 @@ def test_build_remote_option(caplog, cli, tmpdir, datafiles):
project = str(datafiles)
caplog.set_level(1)
- with create_artifact_share(os.path.join(str(tmpdir), "artifactshare1")) as shareuser, create_artifact_share(
- os.path.join(str(tmpdir), "artifactshare2")
- ) as sharecli:
-
+ with (
+ create_artifact_share(os.path.join(str(tmpdir), "artifactshare1")) as shareuser,
+ create_artifact_share(os.path.join(str(tmpdir), "artifactshare2")) as sharecli,
+ ):
# Configure shareuser remote in user conf
cli.configure({"artifacts": {"servers": [{"url": shareuser.repo, "push": True}]}})
@@ -490,7 +487,6 @@ def test_pull_access_rights(cli, tmpdir, datafiles):
f.write(buf)
with create_artifact_share(os.path.join(str(tmpdir), "artifactshare")) as share:
-
cli.configure({"artifacts": {"servers": [{"url": share.repo, "push": True}]}})
result = cli.run(project=project, args=["build", "compose-all.bst"])
result.assert_success()
@@ -551,7 +547,6 @@ def test_pull_artifact(cli, tmpdir, datafiles):
cli.configure({"cachedir": local_cache})
with create_artifact_share(os.path.join(str(tmpdir), "artifactshare")) as share:
-
# First build the target element and push to the remote.
cli.configure({"artifacts": {"servers": [{"url": share.repo, "push": True}]}})
@@ -589,7 +584,6 @@ def test_dynamic_build_plan(cli, tmpdir, datafiles):
all_elements = [target, build_dep, runtime_dep]
with create_artifact_share(os.path.join(str(tmpdir), "artifactshare")) as share:
-
# First build the target element and push to the remote.
cli.configure({"artifacts": {"servers": [{"url": share.repo, "push": True}]}})
result = cli.run(project=project, args=["build", target])
diff --git a/tests/frontend/push.py b/tests/frontend/push.py
index 1a24771d4..bfb3211d0 100644
--- a/tests/frontend/push.py
+++ b/tests/frontend/push.py
@@ -61,9 +61,7 @@ def test_push(cli, tmpdir, datafiles):
# Set up two artifact shares.
with create_artifact_share(os.path.join(str(tmpdir), "artifactshare1")) as share1:
-
with create_artifact_share(os.path.join(str(tmpdir), "artifactshare2")) as share2:
-
# Try pushing with no remotes configured. This should fail.
result = cli.run(project=project, args=["artifact", "push", "target.bst"])
result.assert_main_error(ErrorDomain.STREAM, None)
@@ -120,7 +118,6 @@ def test_push_artifact(cli, tmpdir, datafiles):
cli.configure({"cachedir": local_cache})
with create_artifact_share(os.path.join(str(tmpdir), "artifactshare")) as share:
-
# First build it without the artifact cache configured
result = cli.run(project=project, args=["build", element])
result.assert_success()
@@ -174,7 +171,6 @@ def test_push_artifact_glob(cli, tmpdir, datafiles):
cli.configure({"cachedir": local_cache})
with create_artifact_share(os.path.join(str(tmpdir), "artifactshare")) as share:
-
# First build it without the artifact cache configured
result = cli.run(project=project, args=["build", element])
result.assert_success()
@@ -243,7 +239,6 @@ def test_push_fails_with_on_error_continue(cli, tmpdir, datafiles):
# Set up the share
with create_artifact_share(os.path.join(str(tmpdir), "artifactshare")) as share:
-
# First build the target (and its deps)
result = cli.run(project=project, args=["build", "target.bst"])
assert cli.get_element_state(project, "target.bst") == "cached"
@@ -303,7 +298,6 @@ def test_push_deps(cli, tmpdir, datafiles, deps, expected_states):
runtime_dep = "import-bin.bst"
with create_artifact_share(os.path.join(str(tmpdir), "artifactshare")) as share:
-
# First build it without the artifact cache configured
result = cli.run(project=project, args=["build", target])
result.assert_success()
@@ -354,7 +348,6 @@ def test_push_artifacts_all_deps_fails(cli, tmpdir, datafiles):
cli.configure({"cachedir": local_cache})
with create_artifact_share(os.path.join(str(tmpdir), "artifactshare")) as share:
-
# First build it without the artifact cache configured
result = cli.run(project=project, args=["build", element])
result.assert_success()
@@ -400,10 +393,10 @@ def test_push_after_pull(cli, tmpdir, datafiles):
project = str(datafiles)
# Set up two artifact shares.
- with create_artifact_share(os.path.join(str(tmpdir), "artifactshare1")) as share1, create_artifact_share(
- os.path.join(str(tmpdir), "artifactshare2")
- ) as share2:
-
+ with (
+ create_artifact_share(os.path.join(str(tmpdir), "artifactshare1")) as share1,
+ create_artifact_share(os.path.join(str(tmpdir), "artifactshare2")) as share2,
+ ):
# Set the scene: share1 has the artifact, share2 does not.
#
cli.configure(
@@ -465,7 +458,6 @@ def test_artifact_expires(cli, datafiles, tmpdir):
# Create an artifact share (remote artifact cache) in the tmpdir/artifactshare
# Set a 22 MB quota
with create_artifact_share(os.path.join(str(tmpdir), "artifactshare"), quota=int(22e6)) as share:
-
# Configure bst to push to the cache
cli.configure(
{
@@ -523,7 +515,6 @@ def test_artifact_too_large(cli, datafiles, tmpdir):
# Create an artifact share (remote cache) in tmpdir/artifactshare
# Mock a file system with 5 MB total space
with create_artifact_share(os.path.join(str(tmpdir), "artifactshare"), quota=int(5e6)) as share:
-
# Configure bst to push to the remote cache
cli.configure(
{
@@ -580,7 +571,6 @@ def test_recently_pulled_artifact_does_not_expire(cli, datafiles, tmpdir):
# Create an artifact share (remote cache) in tmpdir/artifactshare
# Set a 22 MB quota
with create_artifact_share(os.path.join(str(tmpdir), "artifactshare"), quota=int(22e6)) as share:
-
# Configure bst to push to the cache
cli.configure(
{
@@ -669,7 +659,6 @@ def test_push_already_cached(caplog, cli, tmpdir, datafiles):
caplog.set_level(1)
with create_artifact_share(os.path.join(str(tmpdir), "artifactshare")) as share:
-
cli.configure({"artifacts": {"servers": [{"url": share.repo, "push": True}]}})
result = cli.run(project=project, args=["build", "target.bst"])
@@ -691,10 +680,11 @@ def test_build_remote_option(caplog, cli, tmpdir, datafiles, use_remote, ignore_
project = str(datafiles)
caplog.set_level(1)
- with create_artifact_share(os.path.join(str(tmpdir), "artifactshare1")) as shareuser, create_artifact_share(
- os.path.join(str(tmpdir), "artifactshare2")
- ) as shareproject, create_artifact_share(os.path.join(str(tmpdir), "artifactshare3")) as sharecli:
-
+ with (
+ create_artifact_share(os.path.join(str(tmpdir), "artifactshare1")) as shareuser,
+ create_artifact_share(os.path.join(str(tmpdir), "artifactshare2")) as shareproject,
+ create_artifact_share(os.path.join(str(tmpdir), "artifactshare3")) as sharecli,
+ ):
# Add shareproject repo url to project.conf
with open(os.path.join(project, "project.conf"), "a", encoding="utf-8") as projconf:
projconf.write("artifacts:\n- url: {}\n push: True".format(shareproject.repo))
diff --git a/tests/frontend/show_artifact_cas_digest.py b/tests/frontend/show_artifact_cas_digest.py
index dc89e4447..e44579c7c 100644
--- a/tests/frontend/show_artifact_cas_digest.py
+++ b/tests/frontend/show_artifact_cas_digest.py
@@ -54,8 +54,7 @@ def test_show_artifact_cas_digest_uncached(cli, tmpdir, datafiles, target):
# Check the target has not been built locally and is not existing in the remote cache
assert (
# May be "buildable" or "waiting" but shouldn't be "cached"
- cli.get_element_state(project, target)
- != "cached"
+ cli.get_element_state(project, target) != "cached"
)
# Check the target has no artifact digest
@@ -161,7 +160,6 @@ def test_show_artifact_cas_digest_remote(cli, tmpdir, datafiles):
cli.configure({"cachedir": local_cache})
with create_artifact_share(os.path.join(str(tmpdir), "artifactshare")) as share:
-
cli.configure({"artifacts": {"servers": [{"url": share.repo, "push": True}]}})
# Test a target cached neither locally or remotely has no digest
diff --git a/tests/frontend/track.py b/tests/frontend/track.py
index 0ed8a7ea9..bb2dda6f7 100644
--- a/tests/frontend/track.py
+++ b/tests/frontend/track.py
@@ -248,7 +248,6 @@ def get_subproject_element_state():
result = cli.run(project=project, args=args)
if ref_storage == "inline":
-
if cross_junction == "cross":
#
# Cross junction tracking is not allowed when the toplevel project
diff --git a/tests/frontend/workspace.py b/tests/frontend/workspace.py
index 8372a17cd..523cced49 100644
--- a/tests/frontend/workspace.py
+++ b/tests/frontend/workspace.py
@@ -96,7 +96,6 @@ def create_workspace_element(self, kind, suffix="", workspace_dir=None, element_
return element_name, element_path, workspace_dir
def create_workspace_elements(self, kinds, suffixs=None, workspace_dir_usr=None, element_attrs=None):
-
element_tuples = []
if suffixs is None:
@@ -119,7 +118,6 @@ def create_workspace_elements(self, kinds, suffixs=None, workspace_dir_usr=None,
return element_tuples
def open_workspaces(self, kinds, suffixs=None, workspace_dir=None, element_attrs=None, no_checkout=False):
-
element_tuples = self.create_workspace_elements(kinds, suffixs, workspace_dir, element_attrs)
os.makedirs(self.workspace_cmd, exist_ok=True)
diff --git a/tests/integration/artifact.py b/tests/integration/artifact.py
index 09c713610..2c47698b1 100644
--- a/tests/integration/artifact.py
+++ b/tests/integration/artifact.py
@@ -42,6 +42,7 @@
# behaviour, which by default is to include the buildtree
# content of an element on caching.
+
# Dse this really need a sandbox?
@pytest.mark.datafiles(DATA_DIR)
@pytest.mark.skipif(not HAVE_SANDBOX, reason="Only available with a functioning sandbox")
@@ -51,9 +52,11 @@ def test_cache_buildtrees(cli, tmpdir, datafiles):
cwd = str(tmpdir)
# Create artifact shares for pull & push testing
- with create_artifact_share(os.path.join(str(tmpdir), "share1")) as share1, create_artifact_share(
- os.path.join(str(tmpdir), "share2")
- ) as share2, create_artifact_share(os.path.join(str(tmpdir), "share3")) as share3:
+ with (
+ create_artifact_share(os.path.join(str(tmpdir), "share1")) as share1,
+ create_artifact_share(os.path.join(str(tmpdir), "share2")) as share2,
+ create_artifact_share(os.path.join(str(tmpdir), "share3")) as share3,
+ ):
cli.configure({"artifacts": {"servers": [{"url": share1.repo, "push": True}]}, "cachedir": str(tmpdir)})
# Build autotools element with the default behavior of caching buildtrees
diff --git a/tests/integration/cachedfail.py b/tests/integration/cachedfail.py
index ea7708adf..132c7fe1e 100644
--- a/tests/integration/cachedfail.py
+++ b/tests/integration/cachedfail.py
@@ -197,7 +197,6 @@ def test_retry_failed(cli, tmpdir, datafiles, use_share, retry, strict):
cli.configure({"cachedir": cli.directory, "projects": {"test": {"strict": strict}}})
with ExitStack() as stack:
-
if use_share:
share = stack.enter_context(create_artifact_share(os.path.join(str(tmpdir), "artifactshare")))
cli.configure({"artifacts": {"servers": [{"url": share.repo, "push": True}]}})
@@ -291,7 +290,6 @@ def generate_target():
}
with ExitStack() as stack:
-
if use_share:
share = stack.enter_context(create_artifact_share(os.path.join(str(tmpdir), "artifactshare")))
cli.configure({"artifacts": {"servers": [{"url": share.repo, "push": True}]}})
@@ -472,7 +470,6 @@ def test_push_but_stop_building_after_failed(cli, tmpdir, datafiles):
cli.remove_artifact_from_cache(project, "base-also-fail.bst")
with create_artifact_share(os.path.join(str(tmpdir), "remote")) as share:
-
# Set only 1 builder, and explicitly configure `--on-error quit`
cli.configure(
{
diff --git a/tests/integration/pullbuildtrees.py b/tests/integration/pullbuildtrees.py
index a5fe18c7b..97c2e4ca7 100644
--- a/tests/integration/pullbuildtrees.py
+++ b/tests/integration/pullbuildtrees.py
@@ -59,9 +59,11 @@ def test_pullbuildtrees(cli2, tmpdir, datafiles):
cwd = str(tmpdir)
# Create artifact shares for pull & push testing
- with create_artifact_share(os.path.join(str(tmpdir), "share1")) as share1, create_artifact_share(
- os.path.join(str(tmpdir), "share2")
- ) as share2, create_artifact_share(os.path.join(str(tmpdir), "share3")) as share3:
+ with (
+ create_artifact_share(os.path.join(str(tmpdir), "share1")) as share1,
+ create_artifact_share(os.path.join(str(tmpdir), "share2")) as share2,
+ create_artifact_share(os.path.join(str(tmpdir), "share3")) as share3,
+ ):
cli2.configure(
{
"artifacts": {"servers": [{"url": share1.repo, "push": True}]},
diff --git a/tests/integration/shell.py b/tests/integration/shell.py
index 4af301c6c..8f9202545 100644
--- a/tests/integration/shell.py
+++ b/tests/integration/shell.py
@@ -367,7 +367,6 @@ def test_integration_external_workspace(cli, tmpdir_factory, datafiles, build_sh
@pytest.mark.datafiles(DATA_DIR)
@pytest.mark.skipif(not HAVE_SANDBOX, reason="Only available with a functioning sandbox")
def test_integration_partial_artifact(cli, datafiles, tmpdir, integration_cache):
-
project = str(datafiles)
element_name = "autotools/amhello.bst"
diff --git a/tests/integration/shellbuildtrees.py b/tests/integration/shellbuildtrees.py
index c4f1d35bc..bf362d2f0 100644
--- a/tests/integration/shellbuildtrees.py
+++ b/tests/integration/shellbuildtrees.py
@@ -219,7 +219,6 @@ def share_without_buildtrees(tmp_path_factory, integration_cache):
# pull_buildtree (bool): Whether to also pull buildtrees
#
def maybe_pull_deps(cli, project, element_name, pull_deps, pull_buildtree):
-
# Optionally pull the buildtree along with `bst artifact pull`
if pull_deps:
args = []
diff --git a/tests/internals/loader.py b/tests/internals/loader.py
index 1cf65045f..07fadd0e4 100644
--- a/tests/internals/loader.py
+++ b/tests/internals/loader.py
@@ -41,7 +41,6 @@ def make_loader(basedir):
##############################################################
@pytest.mark.datafiles(os.path.join(DATA_DIR, "onefile"))
def test_one_file(datafiles):
-
basedir = str(datafiles)
with make_loader(basedir) as loader:
element = loader.load(["elements/onefile.bst"])[0]
@@ -52,7 +51,6 @@ def test_one_file(datafiles):
@pytest.mark.datafiles(os.path.join(DATA_DIR, "onefile"))
def test_missing_file(datafiles):
-
basedir = str(datafiles)
with make_loader(basedir) as loader, pytest.raises(LoadError) as exc:
loader.load(["elements/missing.bst"])
@@ -62,7 +60,6 @@ def test_missing_file(datafiles):
@pytest.mark.datafiles(os.path.join(DATA_DIR, "onefile"))
def test_invalid_reference(datafiles):
-
basedir = str(datafiles)
with make_loader(basedir) as loader, pytest.raises(LoadError) as exc:
loader.load(["elements/badreference.bst"])
@@ -72,7 +69,6 @@ def test_invalid_reference(datafiles):
@pytest.mark.datafiles(os.path.join(DATA_DIR, "onefile"))
def test_invalid_yaml(datafiles):
-
basedir = str(datafiles)
with make_loader(basedir) as loader, pytest.raises(LoadError) as exc:
loader.load(["elements/badfile.bst"])
@@ -82,7 +78,6 @@ def test_invalid_yaml(datafiles):
@pytest.mark.datafiles(os.path.join(DATA_DIR, "onefile"))
def test_fail_fullpath_target(datafiles):
-
basedir = str(datafiles)
fullpath = os.path.join(basedir, "elements", "onefile.bst")
@@ -94,7 +89,6 @@ def test_fail_fullpath_target(datafiles):
@pytest.mark.datafiles(os.path.join(DATA_DIR, "onefile"))
def test_invalid_key(datafiles):
-
basedir = str(datafiles)
with make_loader(basedir) as loader, pytest.raises(LoadError) as exc:
loader.load(["elements/invalidkey.bst"])
@@ -104,7 +98,6 @@ def test_invalid_key(datafiles):
@pytest.mark.datafiles(os.path.join(DATA_DIR, "onefile"))
def test_invalid_directory_load(datafiles):
-
basedir = str(datafiles)
os.makedirs(os.path.join(basedir, "element.bst"))
with make_loader(basedir) as loader, pytest.raises(LoadError) as exc:
diff --git a/tests/internals/storage.py b/tests/internals/storage.py
index f3b4031cb..f005ea839 100644
--- a/tests/internals/storage.py
+++ b/tests/internals/storage.py
@@ -73,9 +73,11 @@ def test_merge_same_casdirs(tmpdir, datafiles, directories):
clear_gitkeeps(directory)
utime_recursively(directory, (100, 100))
- with setup_backend(CasBasedDirectory, str(tmpdir)) as c, setup_backend(
- CasBasedDirectory, str(tmpdir)
- ) as a, setup_backend(CasBasedDirectory, str(tmpdir)) as b:
+ with (
+ setup_backend(CasBasedDirectory, str(tmpdir)) as c,
+ setup_backend(CasBasedDirectory, str(tmpdir)) as a,
+ setup_backend(CasBasedDirectory, str(tmpdir)) as b,
+ ):
a.import_files(before)
b.import_files(after)
c.import_files(buildtree)
@@ -150,9 +152,12 @@ def test_merge_casdir_properties(tmpdir, datafiles, modification):
def _test_merge_dirs(
before: str, after: str, buildtree: str, tmpdir: str, properties: Optional[List[str]] = None
) -> bool:
- with setup_backend(CasBasedDirectory, tmpdir) as c, setup_backend(
- CasBasedDirectory, tmpdir
- ) as copy, setup_backend(CasBasedDirectory, tmpdir) as a, setup_backend(CasBasedDirectory, tmpdir) as b:
+ with (
+ setup_backend(CasBasedDirectory, tmpdir) as c,
+ setup_backend(CasBasedDirectory, tmpdir) as copy,
+ setup_backend(CasBasedDirectory, tmpdir) as a,
+ setup_backend(CasBasedDirectory, tmpdir) as b,
+ ):
a._import_files_internal(before, properties=properties)
b._import_files_internal(after, properties=properties)
c._import_files_internal(buildtree, properties=properties)
diff --git a/tests/internals/storage_vdir_import.py b/tests/internals/storage_vdir_import.py
index e3bf84ab7..d687c34ec 100644
--- a/tests/internals/storage_vdir_import.py
+++ b/tests/internals/storage_vdir_import.py
@@ -61,7 +61,7 @@ def generate_import_roots(rootno, directory):
def generate_import_root(rootdir, filelist):
if os.path.exists(rootdir):
return
- for (path, typesymbol, content) in filelist:
+ for path, typesymbol, content in filelist:
if typesymbol == "F":
(dirnames, filename) = os.path.split(path)
os.makedirs(os.path.join(rootdir, dirnames), exist_ok=True)
@@ -77,7 +77,7 @@ def generate_import_root(rootdir, filelist):
os.makedirs(os.path.join(rootdir, dirnames), exist_ok=True)
os.symlink(content, os.path.join(rootdir, path))
# Set deterministic mtime for all directories
- for (dirpath, _, _) in os.walk(rootdir):
+ for dirpath, _, _ in os.walk(rootdir):
_set_file_mtime(dirpath, MTIME)
@@ -120,7 +120,7 @@ def generate_random_root(rootno, directory):
os.symlink(relative_link, target)
things.append(os.path.join(location, thingname))
# Set deterministic mtime for all directories
- for (dirpath, _, _) in os.walk(rootdir):
+ for dirpath, _, _ in os.walk(rootdir):
_set_file_mtime(dirpath, MTIME)
@@ -190,7 +190,6 @@ def directory_not_empty(path):
def _import_test(tmpdir, original, overlay, generator_function, verify_contents=False):
-
# Skip this test if we do not have support for subsecond precision mtimes
#
if not have_subsecond_mtime(str(tmpdir)):
diff --git a/tests/internals/utils_move_atomic.py b/tests/internals/utils_move_atomic.py
index c9a0cfef2..1e3d1ceb3 100644
--- a/tests/internals/utils_move_atomic.py
+++ b/tests/internals/utils_move_atomic.py
@@ -112,7 +112,6 @@ def test_move_to_existing_non_empty_dir(src, tmp_path):
def test_move_to_empty_dir_set_mtime(src, tmp_path):
-
# Skip this test if we do not have support for subsecond precision mtimes
#
if not have_subsecond_mtime(str(tmp_path)):
diff --git a/tests/internals/yaml.py b/tests/internals/yaml.py
index 4a187005f..838f3a5ba 100644
--- a/tests/internals/yaml.py
+++ b/tests/internals/yaml.py
@@ -29,7 +29,6 @@
@pytest.mark.datafiles(os.path.join(DATA_DIR))
def test_load_yaml(datafiles):
-
filename = os.path.join(datafiles, "basics.yaml")
loaded = _yaml.load(filename, shortname=None)
@@ -48,7 +47,6 @@ def assert_provenance(filename, line, col, node):
@pytest.mark.datafiles(os.path.join(DATA_DIR))
def test_basic_provenance(datafiles):
-
filename = os.path.join(datafiles, "basics.yaml")
loaded = _yaml.load(filename, shortname=None)
@@ -59,7 +57,6 @@ def test_basic_provenance(datafiles):
@pytest.mark.datafiles(os.path.join(DATA_DIR))
def test_member_provenance(datafiles):
-
filename = os.path.join(datafiles, "basics.yaml")
loaded = _yaml.load(filename, shortname=None)
@@ -69,7 +66,6 @@ def test_member_provenance(datafiles):
@pytest.mark.datafiles(os.path.join(DATA_DIR))
def test_element_provenance(datafiles):
-
filename = os.path.join(datafiles, "basics.yaml")
loaded = _yaml.load(filename, shortname=None)
@@ -79,7 +75,6 @@ def test_element_provenance(datafiles):
@pytest.mark.datafiles(os.path.join(DATA_DIR))
def test_mapping_validate_keys(datafiles):
-
valid = os.path.join(datafiles, "basics.yaml")
invalid = os.path.join(datafiles, "invalid.yaml")
@@ -97,7 +92,6 @@ def test_mapping_validate_keys(datafiles):
@pytest.mark.datafiles(os.path.join(DATA_DIR))
def test_node_get(datafiles):
-
filename = os.path.join(datafiles, "basics.yaml")
base = _yaml.load(filename, shortname=None)
@@ -119,7 +113,6 @@ def test_node_get(datafiles):
@pytest.mark.datafiles(os.path.join(DATA_DIR))
def test_node_set(datafiles):
-
filename = os.path.join(datafiles, "basics.yaml")
base = _yaml.load(filename, shortname=None)
@@ -131,7 +124,6 @@ def test_node_set(datafiles):
@pytest.mark.datafiles(os.path.join(DATA_DIR))
def test_node_set_overwrite(datafiles):
-
filename = os.path.join(datafiles, "basics.yaml")
base = _yaml.load(filename, shortname=None)
@@ -149,7 +141,6 @@ def test_node_set_overwrite(datafiles):
@pytest.mark.datafiles(os.path.join(DATA_DIR))
def test_node_set_list_element(datafiles):
-
filename = os.path.join(datafiles, "basics.yaml")
base = _yaml.load(filename, shortname=None)
@@ -166,7 +157,6 @@ def test_node_set_list_element(datafiles):
#
@pytest.mark.datafiles(os.path.join(DATA_DIR))
def test_composite_preserve_originals(datafiles):
-
filename = os.path.join(datafiles, "basics.yaml")
overlayfile = os.path.join(datafiles, "composite.yaml")
diff --git a/tests/plugins/loading/plugins/elements/incompatiblemajor/incompatiblemajor.py b/tests/plugins/loading/plugins/elements/incompatiblemajor/incompatiblemajor.py
index a0ea59309..7a9ac4555 100644
--- a/tests/plugins/loading/plugins/elements/incompatiblemajor/incompatiblemajor.py
+++ b/tests/plugins/loading/plugins/elements/incompatiblemajor/incompatiblemajor.py
@@ -2,7 +2,6 @@
class IncompatibleMajor(Element):
-
BST_MIN_VERSION = "1.0"
diff --git a/tests/plugins/loading/plugins/elements/incompatibleminor/incompatibleminor.py b/tests/plugins/loading/plugins/elements/incompatibleminor/incompatibleminor.py
index d9967caaa..cd3f30195 100644
--- a/tests/plugins/loading/plugins/elements/incompatibleminor/incompatibleminor.py
+++ b/tests/plugins/loading/plugins/elements/incompatibleminor/incompatibleminor.py
@@ -2,7 +2,6 @@
class IncompatibleMinor(Element):
-
BST_MIN_VERSION = "2.1000"
diff --git a/tests/plugins/loading/plugins/elements/malformedminversion/badstring.py b/tests/plugins/loading/plugins/elements/malformedminversion/badstring.py
index 5f96c1897..c73b54b91 100644
--- a/tests/plugins/loading/plugins/elements/malformedminversion/badstring.py
+++ b/tests/plugins/loading/plugins/elements/malformedminversion/badstring.py
@@ -3,7 +3,6 @@
class MalformedMinVersion(Element):
-
BST_MIN_VERSION = "1.pony"
diff --git a/tests/plugins/loading/plugins/elements/malformedminversion/dict.py b/tests/plugins/loading/plugins/elements/malformedminversion/dict.py
index 1ee2e52f4..d17bc289a 100644
--- a/tests/plugins/loading/plugins/elements/malformedminversion/dict.py
+++ b/tests/plugins/loading/plugins/elements/malformedminversion/dict.py
@@ -3,7 +3,6 @@
class MalformedMinVersion(Element):
-
BST_MIN_VERSION = {"major": 2, "minor": 0}
diff --git a/tests/plugins/loading/plugins/elements/malformedminversion/list.py b/tests/plugins/loading/plugins/elements/malformedminversion/list.py
index c92224969..b8eb45c17 100644
--- a/tests/plugins/loading/plugins/elements/malformedminversion/list.py
+++ b/tests/plugins/loading/plugins/elements/malformedminversion/list.py
@@ -3,7 +3,6 @@
class MalformedMinVersion(Element):
-
BST_MIN_VERSION = [2, 0]
diff --git a/tests/plugins/loading/plugins/elements/malformedminversion/number.py b/tests/plugins/loading/plugins/elements/malformedminversion/number.py
index 154f16d33..6437fa35b 100644
--- a/tests/plugins/loading/plugins/elements/malformedminversion/number.py
+++ b/tests/plugins/loading/plugins/elements/malformedminversion/number.py
@@ -3,7 +3,6 @@
class MalformedMinVersion(Element):
-
BST_MIN_VERSION = 2.0
diff --git a/tests/plugins/loading/plugins/sources/deprecated/deprecated.py b/tests/plugins/loading/plugins/sources/deprecated/deprecated.py
index 6203eb2fa..9903d1a25 100644
--- a/tests/plugins/loading/plugins/sources/deprecated/deprecated.py
+++ b/tests/plugins/loading/plugins/sources/deprecated/deprecated.py
@@ -30,5 +30,4 @@ def is_cached(self):
# Plugin entry point
def setup():
-
return Deprecated
diff --git a/tests/plugins/loading/plugins/sources/found/found.py b/tests/plugins/loading/plugins/sources/found/found.py
index 4ab40f005..ae87090e3 100644
--- a/tests/plugins/loading/plugins/sources/found/found.py
+++ b/tests/plugins/loading/plugins/sources/found/found.py
@@ -28,5 +28,4 @@ def is_cached(self):
# Plugin entry point
def setup():
-
return Found
diff --git a/tests/plugins/loading/plugins/sources/incompatiblemajor/incompatiblemajor.py b/tests/plugins/loading/plugins/sources/incompatiblemajor/incompatiblemajor.py
index f2d088b77..a1edd928b 100644
--- a/tests/plugins/loading/plugins/sources/incompatiblemajor/incompatiblemajor.py
+++ b/tests/plugins/loading/plugins/sources/incompatiblemajor/incompatiblemajor.py
@@ -2,7 +2,6 @@
class IncompatibleMajor(Source):
-
BST_MIN_VERSION = "1.0"
diff --git a/tests/plugins/loading/plugins/sources/incompatibleminor/incompatibleminor.py b/tests/plugins/loading/plugins/sources/incompatibleminor/incompatibleminor.py
index 2282534ae..9cd98a1e1 100644
--- a/tests/plugins/loading/plugins/sources/incompatibleminor/incompatibleminor.py
+++ b/tests/plugins/loading/plugins/sources/incompatibleminor/incompatibleminor.py
@@ -2,7 +2,6 @@
class IncompatibleMinor(Source):
-
BST_MIN_VERSION = "2.1000"
diff --git a/tests/plugins/loading/plugins/sources/malformedminversion/badstring.py b/tests/plugins/loading/plugins/sources/malformedminversion/badstring.py
index 90474597c..1c8fc2b5e 100644
--- a/tests/plugins/loading/plugins/sources/malformedminversion/badstring.py
+++ b/tests/plugins/loading/plugins/sources/malformedminversion/badstring.py
@@ -3,7 +3,6 @@
class MalformedMinVersion(Source):
-
BST_MIN_VERSION = "1.pony"
diff --git a/tests/plugins/loading/plugins/sources/malformedminversion/dict.py b/tests/plugins/loading/plugins/sources/malformedminversion/dict.py
index 4df4d5297..122579063 100644
--- a/tests/plugins/loading/plugins/sources/malformedminversion/dict.py
+++ b/tests/plugins/loading/plugins/sources/malformedminversion/dict.py
@@ -3,7 +3,6 @@
class MalformedMinVersion(Source):
-
BST_MIN_VERSION = {"major": 2, "minor": 0}
diff --git a/tests/plugins/loading/plugins/sources/malformedminversion/list.py b/tests/plugins/loading/plugins/sources/malformedminversion/list.py
index a3bcf1928..489861a0b 100644
--- a/tests/plugins/loading/plugins/sources/malformedminversion/list.py
+++ b/tests/plugins/loading/plugins/sources/malformedminversion/list.py
@@ -3,7 +3,6 @@
class MalformedMinVersion(Source):
-
BST_MIN_VERSION = [2, 0]
diff --git a/tests/plugins/loading/plugins/sources/malformedminversion/number.py b/tests/plugins/loading/plugins/sources/malformedminversion/number.py
index 1d1517945..660cddcfa 100644
--- a/tests/plugins/loading/plugins/sources/malformedminversion/number.py
+++ b/tests/plugins/loading/plugins/sources/malformedminversion/number.py
@@ -3,7 +3,6 @@
class MalformedMinVersion(Source):
-
BST_MIN_VERSION = 2.0
diff --git a/tests/plugins/sample-plugins/src/sample_plugins/sourcemirrors/mirror.py b/tests/plugins/sample-plugins/src/sample_plugins/sourcemirrors/mirror.py
index fc737031a..8ed8e9508 100644
--- a/tests/plugins/sample-plugins/src/sample_plugins/sourcemirrors/mirror.py
+++ b/tests/plugins/sample-plugins/src/sample_plugins/sourcemirrors/mirror.py
@@ -32,5 +32,4 @@ def translate_url(
# Plugin entry point
def setup():
-
return Sample
diff --git a/tests/plugins/sample-plugins/src/sample_plugins/sources/git.py b/tests/plugins/sample-plugins/src/sample_plugins/sources/git.py
index 4c7bace85..fe233a6aa 100644
--- a/tests/plugins/sample-plugins/src/sample_plugins/sources/git.py
+++ b/tests/plugins/sample-plugins/src/sample_plugins/sources/git.py
@@ -155,7 +155,6 @@
The provided ref was not found in the provided track in the element's git repository.
"""
-
import os
import re
import shutil
@@ -204,7 +203,6 @@ def _strip_tag(rev):
#
class GitMirror(SourceFetcher):
def __init__(self, source, path, url, ref, *, primary=False, tags=None):
-
super().__init__()
self.source = source
self.path = path
@@ -579,7 +577,6 @@ def _rebuild_git(self, fullpath):
included = set()
shallow = set()
for _, commit_ref, _ in self.tags:
-
if commit_ref == self.ref:
# rev-list does not work in case of same rev
shallow.add(self.ref)
@@ -832,7 +829,6 @@ def set_ref(self, ref, node):
del node["tags"]
def track(self): # pylint: disable=arguments-differ
-
# If self.tracking is not specified it's not an error, just silently return
if not self.tracking:
# Is there a better way to check if a ref is given.
diff --git a/tests/plugins/sample-plugins/src/sample_plugins/sources/sample.py b/tests/plugins/sample-plugins/src/sample_plugins/sources/sample.py
index 968a0e342..18c6cb378 100644
--- a/tests/plugins/sample-plugins/src/sample_plugins/sources/sample.py
+++ b/tests/plugins/sample-plugins/src/sample_plugins/sources/sample.py
@@ -28,5 +28,4 @@ def is_cached(self):
# Plugin entry point
def setup():
-
return Sample
diff --git a/tests/remotecache/simple.py b/tests/remotecache/simple.py
index 9a5443d24..1f4197ba4 100644
--- a/tests/remotecache/simple.py
+++ b/tests/remotecache/simple.py
@@ -30,6 +30,7 @@
DATA_DIR = os.path.join(os.path.dirname(os.path.realpath(__file__)), "project")
+
# Test building an executable with a remote cache:
@pytest.mark.datafiles(DATA_DIR)
@pytest.mark.skipif("not pip_sample_packages()", reason=SAMPLE_PACKAGES_SKIP_REASON)
diff --git a/tests/sourcecache/capabilities.py b/tests/sourcecache/capabilities.py
index a22bbf4c5..139c39d0d 100644
--- a/tests/sourcecache/capabilities.py
+++ b/tests/sourcecache/capabilities.py
@@ -66,6 +66,6 @@ def test_artifact_cache_with_missing_capabilities_is_skipped(cli, tmpdir, datafi
# Create a local artifact cache handle
sourcecache = context.sourcecache
- assert (
- not sourcecache.has_fetch_remotes()
- ), "System didn't realize the source cache didn't support BuildStream"
+ assert not sourcecache.has_fetch_remotes(), (
+ "System didn't realize the source cache didn't support BuildStream"
+ )
diff --git a/tests/sourcecache/project/plugins/elements/always_fail.py b/tests/sourcecache/project/plugins/elements/always_fail.py
index 8885d413a..1ff5d936e 100644
--- a/tests/sourcecache/project/plugins/elements/always_fail.py
+++ b/tests/sourcecache/project/plugins/elements/always_fail.py
@@ -20,7 +20,6 @@
class AlwaysFail(BuildElement):
-
BST_MIN_VERSION = "2.0"
def assemble(self, sandbox):
diff --git a/tests/sourcecache/project/plugins/sources/patch.py b/tests/sourcecache/project/plugins/sources/patch.py
index 00e824925..73ab3632e 100644
--- a/tests/sourcecache/project/plugins/sources/patch.py
+++ b/tests/sourcecache/project/plugins/sources/patch.py
@@ -88,7 +88,6 @@ def fetch(self): # pylint: disable=arguments-differ
def stage(self, directory):
with self.timed_activity("Applying local patch: {}".format(self.path)):
-
# Bail out with a comprehensive message if the target directory is empty
if not os.listdir(directory):
raise SourceError(
diff --git a/tests/sources/local.py b/tests/sources/local.py
index d1d197ec3..61fc291bd 100644
--- a/tests/sources/local.py
+++ b/tests/sources/local.py
@@ -115,7 +115,6 @@ def test_stage_directory(cli, tmpdir, datafiles):
@pytest.mark.datafiles(os.path.join(DATA_DIR, "symlink"))
def test_stage_symlink(cli, tmpdir, datafiles):
-
project = str(datafiles)
checkoutdir = os.path.join(str(tmpdir), "checkout")
diff --git a/tests/sources/tar/fetch/sourcemirrors/bearermirror.py b/tests/sources/tar/fetch/sourcemirrors/bearermirror.py
index f8a6c7143..a02c58035 100644
--- a/tests/sources/tar/fetch/sourcemirrors/bearermirror.py
+++ b/tests/sources/tar/fetch/sourcemirrors/bearermirror.py
@@ -23,7 +23,6 @@ def translate_url(
source_url: str,
extra_data: Optional[Dict[str, Any]],
) -> str:
-
if extra_data is not None:
extra_data["http-auth"] = "bearer"
@@ -32,5 +31,4 @@ def translate_url(
# Plugin entry point
def setup():
-
return Sample
diff --git a/tests/testutils/artifactshare.py b/tests/testutils/artifactshare.py
index fcdbcb597..5333b9f69 100644
--- a/tests/testutils/artifactshare.py
+++ b/tests/testutils/artifactshare.py
@@ -122,7 +122,6 @@ def _create_server(self):
#
class ArtifactShare(BaseArtifactShare):
def __init__(self, directory, *, quota=None, index_only=False):
-
# The working directory for the artifact share (in case it
# needs to do something outside of its backend's storage folder).
#
@@ -162,7 +161,6 @@ def _create_server(self):
# Returns:
# (bool): True if the object exists in the share, otherwise false.
def has_object(self, digest):
-
assert isinstance(digest, remote_execution_pb2.Digest)
object_path = self.cas.objpath(digest)
@@ -220,7 +218,6 @@ def get_source_proto(self, source_name):
channel.close()
def get_cas_files(self, artifact_proto_digest):
-
reachable = set()
def reachable_dir(digest):
diff --git a/tests/testutils/element_generators.py b/tests/testutils/element_generators.py
index 2547665ff..e7cf3256e 100644
--- a/tests/testutils/element_generators.py
+++ b/tests/testutils/element_generators.py
@@ -44,7 +44,6 @@ def create_element_size(name, project_dir, elements_path, dependencies, size):
repo = create_repo("tar", repodir, subdir=name)
with utils._tempdir(dir=project_dir) as tmp:
-
# We use a data/ subdir in the git repo we create,
# and we set the import element to only extract that
# part; this ensures we never include a .git/ directory
diff --git a/tox.ini b/tox.ini
index 2967e9bc8..90f78d052 100644
--- a/tox.ini
+++ b/tox.ini
@@ -140,9 +140,9 @@ setenv =
[testenv:format]
skip_install = True
deps =
- black==22.3.0
+ ruff==0.12.0
commands =
- black {posargs: src tests doc/source/conf.py setup.py}
+ ruff format {posargs: src tests doc/source/conf.py setup.py}
#
# Code format checkers
@@ -150,9 +150,9 @@ commands =
[testenv:format-check]
skip_install = True
deps =
- black==22.3.0
+ ruff==0.12.0
commands =
- black --check --diff {posargs: src tests doc/source/conf.py setup.py}
+ ruff format --check --diff {posargs: src tests doc/source/conf.py setup.py}
#
# Running linters