diff --git a/changelog/265.fixed.md b/changelog/265.fixed.md new file mode 100644 index 00000000..4e3c43a9 --- /dev/null +++ b/changelog/265.fixed.md @@ -0,0 +1 @@ +Allow SDK tracking feature to continue after encountering delete errors due to impacted nodes having already been deleted by cascade delete. diff --git a/infrahub_sdk/batch.py b/infrahub_sdk/batch.py index c86d7c77..6e9cc1cb 100644 --- a/infrahub_sdk/batch.py +++ b/infrahub_sdk/batch.py @@ -71,16 +71,14 @@ def add(self, *args: Any, task: Callable, node: Any | None = None, **kwargs: Any self._tasks.append(BatchTask(task=task, node=node, args=args, kwargs=kwargs)) async def execute(self) -> AsyncGenerator: - tasks = [] - - for batch_task in self._tasks: - tasks.append( - asyncio.create_task( - execute_batch_task_in_pool( - task=batch_task, semaphore=self.semaphore, return_exceptions=self.return_exceptions - ) + tasks = [ + asyncio.create_task( + execute_batch_task_in_pool( + task=batch_task, semaphore=self.semaphore, return_exceptions=self.return_exceptions ) ) + for batch_task in self._tasks + ] for completed_task in asyncio.as_completed(tasks): node, result = await completed_task diff --git a/infrahub_sdk/client.py b/infrahub_sdk/client.py index 33487ebf..92cebdea 100644 --- a/infrahub_sdk/client.py +++ b/infrahub_sdk/client.py @@ -1200,12 +1200,11 @@ async def query_gql_query( url_params["update_group"] = str(update_group).lower() if url_params: - url_params_str = [] + url_params_str: list[tuple[str, str]] = [] url_params_dict = {} for key, value in url_params.items(): if isinstance(value, (list)): - for item in value: - url_params_str.append((key, item)) + url_params_str.extend((key, item) for item in value) else: url_params_dict[key] = value @@ -2512,12 +2511,11 @@ def query_gql_query( url_params["update_group"] = str(update_group).lower() if url_params: - url_params_str = [] + url_params_str: list[tuple[str, str]] = [] url_params_dict = {} for key, value in url_params.items(): if isinstance(value, (list)): - for item in value: - url_params_str.append((key, item)) + url_params_str.extend((key, item) for item in value) else: url_params_dict[key] = value diff --git a/infrahub_sdk/ctl/check.py b/infrahub_sdk/ctl/check.py index dfed8dd0..c74e08a7 100644 --- a/infrahub_sdk/ctl/check.py +++ b/infrahub_sdk/ctl/check.py @@ -55,8 +55,8 @@ def run( """Locate and execute all checks under the defined path.""" log_level = "DEBUG" if debug else "INFO" - FORMAT = "%(message)s" - logging.basicConfig(level=log_level, format=FORMAT, datefmt="[%X]", handlers=[RichHandler()]) + format_str = "%(message)s" + logging.basicConfig(level=log_level, format=format_str, datefmt="[%X]", handlers=[RichHandler()]) repository_config = get_repository_config(find_repository_config_file()) diff --git a/infrahub_sdk/ctl/cli_commands.py b/infrahub_sdk/ctl/cli_commands.py index e76225e4..2b571723 100644 --- a/infrahub_sdk/ctl/cli_commands.py +++ b/infrahub_sdk/ctl/cli_commands.py @@ -152,8 +152,8 @@ async def run( logging.getLogger("httpcore").setLevel(logging.ERROR) log_level = "DEBUG" if debug else "INFO" - FORMAT = "%(message)s" - logging.basicConfig(level=log_level, format=FORMAT, datefmt="[%X]", handlers=[RichHandler()]) + format_str = "%(message)s" + logging.basicConfig(level=log_level, format=format_str, datefmt="[%X]", handlers=[RichHandler()]) log = logging.getLogger("infrahubctl") variables_dict = parse_cli_vars(variables) @@ -239,7 +239,7 @@ async def _run_transform( elif isinstance(error, str) and "Branch:" in error: console.print(f"[yellow] - {error}") console.print("[yellow] you can specify a different branch with --branch") - raise typer.Abort() + raise typer.Abort if inspect.iscoroutinefunction(transform_func): output = await transform_func(response) diff --git a/infrahub_sdk/ctl/config.py b/infrahub_sdk/ctl/config.py index 38b527d1..b3d2a404 100644 --- a/infrahub_sdk/ctl/config.py +++ b/infrahub_sdk/ctl/config.py @@ -44,7 +44,7 @@ def active(self) -> Settings: return self._settings print("Configuration not properly loaded") - raise typer.Abort() + raise typer.Abort def load(self, config_file: str | Path = "infrahubctl.toml", config_data: dict | None = None) -> None: """Load configuration. @@ -90,7 +90,7 @@ def load_and_exit(self, config_file: str | Path = "infrahubctl.toml", config_dat for error in exc.errors(): loc_str = [str(item) for item in error["loc"]] print(f" {'/'.join(loc_str)} | {error['msg']} ({error['type']})") - raise typer.Abort() + raise typer.Abort SETTINGS = ConfiguredSettings() diff --git a/infrahub_sdk/ctl/repository.py b/infrahub_sdk/ctl/repository.py index ec03136a..1e4ca6f4 100644 --- a/infrahub_sdk/ctl/repository.py +++ b/infrahub_sdk/ctl/repository.py @@ -150,8 +150,8 @@ async def add( await client.execute_graphql(query=query.render(), tracker="mutation-repository-create") -@app.command() -async def list( +@app.command(name="list") +async def list_repositories( branch: str | None = typer.Option(None, help="Branch on which to list repositories."), debug: bool = False, _: str = CONFIG_PARAM, diff --git a/infrahub_sdk/ctl/utils.py b/infrahub_sdk/ctl/utils.py index f87a81a1..968f6093 100644 --- a/infrahub_sdk/ctl/utils.py +++ b/infrahub_sdk/ctl/utils.py @@ -45,8 +45,8 @@ def init_logging(debug: bool = False) -> None: logging.getLogger("httpcore").setLevel(logging.ERROR) log_level = "DEBUG" if debug else "INFO" - FORMAT = "%(message)s" - logging.basicConfig(level=log_level, format=FORMAT, datefmt="[%X]", handlers=[RichHandler(show_path=debug)]) + format_str = "%(message)s" + logging.basicConfig(level=log_level, format=format_str, datefmt="[%X]", handlers=[RichHandler(show_path=debug)]) logging.getLogger("infrahubctl") diff --git a/infrahub_sdk/diff.py b/infrahub_sdk/diff.py index 8e5513e2..fd4d9308 100644 --- a/infrahub_sdk/diff.py +++ b/infrahub_sdk/diff.py @@ -117,18 +117,17 @@ def diff_tree_node_to_node_diff(node_dict: dict[str, Any], branch_name: str) -> }, ) if not is_cardinality_one and "elements" in relationship_dict: - peer_diffs = [] - for element_dict in relationship_dict["elements"]: - peer_diffs.append( - NodeDiffPeer( - action=str(element_dict.get("status")), - summary={ - "added": int(element_dict.get("num_added") or 0), - "removed": int(element_dict.get("num_removed") or 0), - "updated": int(element_dict.get("num_updated") or 0), - }, - ) + peer_diffs = [ + NodeDiffPeer( + action=str(element_dict.get("status")), + summary={ + "added": int(element_dict.get("num_added") or 0), + "removed": int(element_dict.get("num_removed") or 0), + "updated": int(element_dict.get("num_updated") or 0), + }, ) + for element_dict in relationship_dict["elements"] + ] relationship_diff["peers"] = peer_diffs element_diffs.append(relationship_diff) return NodeDiff( diff --git a/infrahub_sdk/generator.py b/infrahub_sdk/generator.py index 20b7dc88..0b1efc9c 100644 --- a/infrahub_sdk/generator.py +++ b/infrahub_sdk/generator.py @@ -44,7 +44,7 @@ def __init__( self.params = params or {} self.generator_instance = generator_instance self._client: InfrahubClient | None = None - self.logger = logger if logger else logging.getLogger("infrahub.tasks") + self.logger = logger or logging.getLogger("infrahub.tasks") self.request_context = request_context self.execute_in_proposed_change = execute_in_proposed_change self.execute_after_merge = execute_after_merge diff --git a/infrahub_sdk/graphql/renderers.py b/infrahub_sdk/graphql/renderers.py index 91b77526..b0d2ab28 100644 --- a/infrahub_sdk/graphql/renderers.py +++ b/infrahub_sdk/graphql/renderers.py @@ -134,30 +134,30 @@ def render_query_block(data: dict, offset: int = 4, indentation: int = 4, conver >>> render_query_block(data) [' u: user(id: 123) {', ' name', ' }'] """ - FILTERS_KEY = "@filters" - ALIAS_KEY = "@alias" - KEYWORDS_TO_SKIP = [FILTERS_KEY, ALIAS_KEY] + filters_key = "@filters" + alias_key = "@alias" + keywords_to_skip = [filters_key, alias_key] offset_str = " " * offset lines = [] for key, value in data.items(): - if key in KEYWORDS_TO_SKIP: + if key in keywords_to_skip: continue if value is None: lines.append(f"{offset_str}{key}") - elif isinstance(value, dict) and len(value) == 1 and ALIAS_KEY in value and value[ALIAS_KEY]: - lines.append(f"{offset_str}{value[ALIAS_KEY]}: {key}") + elif isinstance(value, dict) and len(value) == 1 and alias_key in value and value[alias_key]: + lines.append(f"{offset_str}{value[alias_key]}: {key}") elif isinstance(value, dict): - if value.get(ALIAS_KEY): - key_str = f"{value[ALIAS_KEY]}: {key}" + if value.get(alias_key): + key_str = f"{value[alias_key]}: {key}" else: key_str = key - if value.get(FILTERS_KEY): + if value.get(filters_key): filters_str = ", ".join( [ f"{key2}: {convert_to_graphql_as_string(value=value2, convert_enum=convert_enum)}" - for key2, value2 in value[FILTERS_KEY].items() + for key2, value2 in value[filters_key].items() ] ) lines.append(f"{offset_str}{key_str}({filters_str}) " + "{") diff --git a/infrahub_sdk/protocols_generator/generator.py b/infrahub_sdk/protocols_generator/generator.py index ee80732f..e70e221c 100644 --- a/infrahub_sdk/protocols_generator/generator.py +++ b/infrahub_sdk/protocols_generator/generator.py @@ -69,7 +69,12 @@ def __init__(self, schema: dict[str, MainSchemaTypesAll]) -> None: ) def render(self, sync: bool = True) -> str: - jinja2_env = jinja2.Environment(loader=jinja2.BaseLoader(), trim_blocks=True, lstrip_blocks=True) + jinja2_env = jinja2.Environment( + loader=jinja2.BaseLoader(), + trim_blocks=True, + lstrip_blocks=True, + autoescape=False, # noqa: S701 + ) jinja2_env.filters["render_attribute"] = self._jinja2_filter_render_attribute jinja2_env.filters["render_relationship"] = self._jinja2_filter_render_relationship jinja2_env.filters["syncify"] = self._jinja2_filter_syncify diff --git a/infrahub_sdk/query_groups.py b/infrahub_sdk/query_groups.py index 696aa260..78525087 100644 --- a/infrahub_sdk/query_groups.py +++ b/infrahub_sdk/query_groups.py @@ -4,7 +4,7 @@ from typing import TYPE_CHECKING, Any from .constants import InfrahubClientMode -from .exceptions import NodeNotFoundError +from .exceptions import GraphQLError, NodeNotFoundError from .utils import dict_hash if TYPE_CHECKING: @@ -109,7 +109,13 @@ async def delete_unused(self) -> None: if self.previous_members and self.unused_member_ids: for member in self.previous_members: if member.id in self.unused_member_ids and member.typename: - await self.client.delete(kind=member.typename, id=member.id) + try: + await self.client.delete(kind=member.typename, id=member.id) + except GraphQLError as exc: + if not exc.message or "Unable to find the node" not in exc.message: + # If the node already has been deleted, skip the error as it would have been deleted + # by the cascade delete of another node + raise async def add_related_nodes(self, ids: list[str], update_group_context: bool | None = None) -> None: """ diff --git a/infrahub_sdk/recorder.py b/infrahub_sdk/recorder.py index 40c45dd3..e2038be7 100644 --- a/infrahub_sdk/recorder.py +++ b/infrahub_sdk/recorder.py @@ -34,6 +34,9 @@ def default(cls) -> NoRecorder: def __eq__(self, other: object) -> bool: return isinstance(other, NoRecorder) + def __hash__(self) -> int: + return hash(self.__class__) + class JSONRecorder(BaseSettings): model_config = SettingsConfigDict(env_prefix="INFRAHUB_JSON_RECORDER_") diff --git a/infrahub_sdk/schema/repository.py b/infrahub_sdk/schema/repository.py index f079baac..defea238 100644 --- a/infrahub_sdk/schema/repository.py +++ b/infrahub_sdk/schema/repository.py @@ -216,10 +216,7 @@ def unique_items(cls, v: list[Any]) -> list[Any]: return v def _has_resource(self, resource_id: str, resource_type: type[ResourceClass], resource_field: str = "name") -> bool: - for item in getattr(self, RESOURCE_MAP[resource_type]): - if getattr(item, resource_field) == resource_id: - return True - return False + return any(getattr(item, resource_field) == resource_id for item in getattr(self, RESOURCE_MAP[resource_type])) def _get_resource( self, resource_id: str, resource_type: type[ResourceClass], resource_field: str = "name" diff --git a/infrahub_sdk/spec/object.py b/infrahub_sdk/spec/object.py index e21c8e12..cf7a6fc3 100644 --- a/infrahub_sdk/spec/object.py +++ b/infrahub_sdk/spec/object.py @@ -264,9 +264,11 @@ async def validate_object( context = context.copy() if context else {} # First validate if all mandatory fields are present - for element in schema.mandatory_input_names: - if not any([element in data, element in context]): - errors.append(ObjectValidationError(position=position + [element], message=f"{element} is mandatory")) + errors.extend( + ObjectValidationError(position=position + [element], message=f"{element} is mandatory") + for element in schema.mandatory_input_names + if not any([element in data, element in context]) + ) # Validate if all attributes are valid for key, value in data.items(): diff --git a/infrahub_sdk/utils.py b/infrahub_sdk/utils.py index f62513d3..6168664b 100644 --- a/infrahub_sdk/utils.py +++ b/infrahub_sdk/utils.py @@ -173,7 +173,7 @@ def str_to_bool(value: str | bool | int) -> bool: if not isinstance(value, str): raise TypeError(f"{value} must be a string") - MAP = { + str_to_bool_map = { "y": True, "yes": True, "t": True, @@ -188,7 +188,7 @@ def str_to_bool(value: str | bool | int) -> bool: "0": False, } try: - return MAP[value.lower()] + return str_to_bool_map[value.lower()] except KeyError as exc: raise ValueError(f"{value} can not be converted into a boolean") from exc @@ -338,13 +338,12 @@ def get_user_permissions(data: list[dict]) -> dict: groups = {} for group in data: group_name = group["node"]["display_label"] - permissions = [] + permissions: list[str] = [] roles = group["node"].get("roles", {}).get("edges", []) for role in roles: role_permissions = role["node"].get("permissions", {}).get("edges", []) - for permission in role_permissions: - permissions.append(permission["node"]["identifier"]["value"]) + permissions.extend(permission["node"]["identifier"]["value"] for permission in role_permissions) groups[group_name] = permissions diff --git a/infrahub_sdk/yaml.py b/infrahub_sdk/yaml.py index 6b764081..dd5a0754 100644 --- a/infrahub_sdk/yaml.py +++ b/infrahub_sdk/yaml.py @@ -92,10 +92,10 @@ def load_file_from_disk(cls, path: Path) -> list[Self]: has_multiple_document = bool(file_content.count("---") > 1) if has_multiple_document: - for content in yaml.safe_load_all(file_content): - yaml_files.append( - cls.init(location=path, multiple_documents=has_multiple_document, content=content) - ) + yaml_files.extend( + cls.init(location=path, multiple_documents=has_multiple_document, content=content) + for content in yaml.safe_load_all(file_content) + ) else: yaml_files.append( diff --git a/pyproject.toml b/pyproject.toml index 78a7a943..4277c5ea 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -219,7 +219,6 @@ ignore = [ ################################################################################################## # Rules below needs to be Investigated # ################################################################################################## - "PT", # flake8-pytest-style "ERA", # eradicate commented-out code "SLF001", # flake8-self "EM", # flake8-errmsg @@ -229,7 +228,6 @@ ignore = [ "TID", # flake8-tidy-imports "FBT", # flake8-boolean-trap "G", # flake8-logging-format - "RSE", # flake8-raise "BLE", # flake8-blind-except (BLE) ################################################################################################## @@ -239,25 +237,16 @@ ignore = [ ################################################################################################## "B008", # Do not perform function call `typer.Option` in argument defaults; instead, perform the call within the function, or read the default from a module-level singleton variable "B904", # Within an `except` clause, raise exceptions with `raise ... from err` or `raise ... from None` to distinguish them from errors in exception handling - "FURB110", # Replace ternary `if` expression with `or` operator - "INP001", # File declares a package, but is nested under an implicit namespace package. "N802", # Function name should be lowercase - "N806", # Variable in function should be lowercase "PERF203", # `try`-`except` within a loop incurs performance overhead - "PERF401", # Use a list comprehension to create a transformed list - "PLR0912", # Too many branches "PLR0913", # Too many arguments in function definition "PLR0917", # Too many positional arguments "PLR2004", # Magic value used in comparison "PLR6301", # Method could be a function, class method, or static method - "PLW0603", # Using the global statement to update `SETTINGS` is discouraged - "PLW1641", # Object does not implement `__hash__` method "RUF005", # Consider `[*path, str(key)]` instead of concatenation "RUF029", # Function is declared `async`, but doesn't `await` or use `async` features. "S311", # Standard pseudo-random generators are not suitable for cryptographic purposes - "S701", # By default, jinja2 sets `autoescape` to `False`. Consider using `autoescape=True` "SIM108", # Use ternary operator `key_str = f"{value[ALIAS_KEY]}: {key}" if ALIAS_KEY in value and value[ALIAS_KEY] else key` instead of `if`-`else`-block - "SIM110", # Use `return any(getattr(item, resource_field) == resource_id for item in getattr(self, RESOURCE_MAP[resource_type]))` instead of `for` loop "TC003", # Move standard library import `collections.abc.Iterable` into a type-checking block "UP031", # Use format specifiers instead of percent format ] @@ -277,7 +266,6 @@ ignorelist = [ # Review and update builtin shadowing below this line "filter", "format", - "list", "property", ] @@ -301,6 +289,7 @@ max-complexity = 17 "ANN202", # Missing return type annotation for private function "ANN401", # Dynamically typed expressions (typing.Any) are disallowed "ASYNC240", # Async functions should not use pathlib.Path methods, use trio.Path or anyio.path + "PT013", # Incorrect import of `pytest`; use `import pytest` instead ] "infrahub_sdk/client.py" = [ @@ -310,10 +299,45 @@ max-complexity = 17 "PLR0904", # Too many public methods ] +"infrahub_sdk/node/node.py" = [ + ################################################################################################## + # Review and change the below later # + ################################################################################################## + "PLR0912", # Too many branches +] + +"infrahub_sdk/node/related_node.py" = [ + ################################################################################################## + # Review and change the below later # + ################################################################################################## + "PLR0912", # Too many branches +] + "infrahub_sdk/pytest_plugin/models.py" = [ "S105", # 'PASS' is not a password but a state ] +"infrahub_sdk/template/__init__.py" = [ + ################################################################################################## + # Review and change the below later # + ################################################################################################## + "S701", # By default, jinja2 sets `autoescape` to `False`. Consider using `autoescape=True` or the `select_autoescape` function to mitigate XSS vulnerabilities. +] + +"infrahub_sdk/spec/object.py" = [ + ################################################################################################## + # Review and change the below later # + ################################################################################################## + "PLR0912", # Too many branches +] + +"infrahub_sdk/transfer/importer/json.py" = [ + ################################################################################################## + # Review and change the below later # + ################################################################################################## + "PLR0912", # Too many branches +] + "tests/**/*.py" = [ "PLR2004", # Magic value used in comparison "S101", # Use of assert detected @@ -321,6 +345,10 @@ max-complexity = 17 "S106", # Possible hardcoded password assigned to argument "ARG001", # Unused function argument "ARG002", # Unused method argument + "PT006", # Wrong type passed to first argument of `pytest.mark.parametrize`; expected `tuple` + "PT011", # `pytest.raises(ValueError)` is too broad, set the `match` parameter or use a more specific exception + "PT012", # `pytest.raises()` block should contain a single simple statement + "PT013", # Incorrect import of `pytest`; use `import pytest` instead ] # tests/integration/ diff --git a/tasks.py b/tasks.py index e47bffce..c4c82004 100644 --- a/tasks.py +++ b/tasks.py @@ -100,7 +100,7 @@ def _generate_infrahub_sdk_configuration_documentation() -> None: template_text = template_file.read_text(encoding="utf-8") - environment = jinja2.Environment(trim_blocks=True) + environment = jinja2.Environment(trim_blocks=True, autoescape=jinja2.select_autoescape(default_for_string=False)) template = environment.from_string(template_text) rendered_file = template.render(properties=properties) diff --git a/tests/fixtures/__init__.py b/tests/fixtures/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/tests/fixtures/integration/__init__.py b/tests/fixtures/integration/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/tests/fixtures/integration/test_infrahubctl/__init__.py b/tests/fixtures/integration/test_infrahubctl/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/tests/fixtures/nested_spec_objects/0_folder/4_subfolder/__init__.py b/tests/fixtures/nested_spec_objects/0_folder/4_subfolder/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/tests/fixtures/nested_spec_objects/0_folder/__init__.py b/tests/fixtures/nested_spec_objects/0_folder/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/tests/fixtures/nested_spec_objects/__init__.py b/tests/fixtures/nested_spec_objects/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/tests/fixtures/repos/__init__.py b/tests/fixtures/repos/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/tests/fixtures/repos/ctl_integration/__init__.py b/tests/fixtures/repos/ctl_integration/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/tests/fixtures/repos/ctl_integration/generators/__init__.py b/tests/fixtures/repos/ctl_integration/generators/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/tests/fixtures/repos/ctl_integration/transforms/__init__.py b/tests/fixtures/repos/ctl_integration/transforms/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/tests/unit/sdk/graphql/__init__.py b/tests/unit/sdk/graphql/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/tests/unit/sdk/spec/__init__.py b/tests/unit/sdk/spec/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/tests/unit/sdk/test_node.py b/tests/unit/sdk/test_node.py index 74434a92..8dc18c9b 100644 --- a/tests/unit/sdk/test_node.py +++ b/tests/unit/sdk/test_node.py @@ -106,7 +106,7 @@ async def test_validate_method_signature( replace_async_return_annotation: Callable[[str], str], replace_sync_return_annotation: Callable[[str], str], ) -> None: - EXCLUDE_PARAMETERS = ["client"] + exclude_parameters = ["client"] async_method = getattr(InfrahubNode, method) sync_method = getattr(InfrahubNodeSync, method) async_sig = inspect.signature(async_method) @@ -115,8 +115,8 @@ async def test_validate_method_signature( # Extract names of parameters and exclude some from the comparaison like client async_params_name = async_sig.parameters.keys() sync_params_name = sync_sig.parameters.keys() - async_params = {key: value for key, value in async_sig.parameters.items() if key not in EXCLUDE_PARAMETERS} - sync_params = {key: value for key, value in sync_sig.parameters.items() if key not in EXCLUDE_PARAMETERS} + async_params = {key: value for key, value in async_sig.parameters.items() if key not in exclude_parameters} + sync_params = {key: value for key, value in sync_sig.parameters.items() if key not in exclude_parameters} assert async_params_name == sync_params_name assert replace_sync_parameter_annotations(async_params) == replace_sync_parameter_annotations(sync_params) diff --git a/tests/unit/sdk/test_timestamp.py b/tests/unit/sdk/test_timestamp.py index a4e9bc79..713f2f72 100644 --- a/tests/unit/sdk/test_timestamp.py +++ b/tests/unit/sdk/test_timestamp.py @@ -28,9 +28,9 @@ def test_init_timestamp() -> None: def test_parse_string() -> None: - REF = "2022-01-01T10:00:00.000000Z" + ref = "2022-01-01T10:00:00.000000Z" - assert Timestamp._parse_string(REF).to_instant() == Instant.parse_iso(REF) + assert Timestamp._parse_string(ref).to_instant() == Instant.parse_iso(ref) assert Timestamp._parse_string("5m") assert Timestamp._parse_string("10min") assert Timestamp._parse_string("2h") diff --git a/uv.lock b/uv.lock index a4855b8b..77334b35 100644 --- a/uv.lock +++ b/uv.lock @@ -571,11 +571,11 @@ wheels = [ [[package]] name = "filelock" -version = "3.20.0" +version = "3.20.3" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/58/46/0028a82567109b5ef6e4d2a1f04a583fb513e6cf9527fcdd09afd817deeb/filelock-3.20.0.tar.gz", hash = "sha256:711e943b4ec6be42e1d4e6690b48dc175c822967466bb31c0c293f34334c13f4", size = 18922, upload-time = "2025-10-08T18:03:50.056Z" } +sdist = { url = "https://files.pythonhosted.org/packages/1d/65/ce7f1b70157833bf3cb851b556a37d4547ceafc158aa9b34b36782f23696/filelock-3.20.3.tar.gz", hash = "sha256:18c57ee915c7ec61cff0ecf7f0f869936c7c30191bb0cf406f1341778d0834e1", size = 19485, upload-time = "2026-01-09T17:55:05.421Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/76/91/7216b27286936c16f5b4d0c530087e4a54eead683e6b0b73dd0c64844af6/filelock-3.20.0-py3-none-any.whl", hash = "sha256:339b4732ffda5cd79b13f4e2711a31b0365ce445d95d243bb996273d072546a2", size = 16054, upload-time = "2025-10-08T18:03:48.35Z" }, + { url = "https://files.pythonhosted.org/packages/b5/36/7fb70f04bf00bc646cd5bb45aa9eddb15e19437a28b8fb2b4a5249fac770/filelock-3.20.3-py3-none-any.whl", hash = "sha256:4b0dda527ee31078689fc205ec4f1c1bf7d56cf88b6dc9426c4f230e46c2dce1", size = 16701, upload-time = "2026-01-09T17:55:04.334Z" }, ] [[package]] @@ -2943,11 +2943,11 @@ wheels = [ [[package]] name = "urllib3" -version = "2.6.0" +version = "2.6.3" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/1c/43/554c2569b62f49350597348fc3ac70f786e3c32e7f19d266e19817812dd3/urllib3-2.6.0.tar.gz", hash = "sha256:cb9bcef5a4b345d5da5d145dc3e30834f58e8018828cbc724d30b4cb7d4d49f1", size = 432585, upload-time = "2025-12-05T15:08:47.885Z" } +sdist = { url = "https://files.pythonhosted.org/packages/c7/24/5f1b3bdffd70275f6661c76461e25f024d5a38a46f04aaca912426a2b1d3/urllib3-2.6.3.tar.gz", hash = "sha256:1b62b6884944a57dbe321509ab94fd4d3b307075e0c2eae991ac71ee15ad38ed", size = 435556, upload-time = "2026-01-07T16:24:43.925Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/56/1a/9ffe814d317c5224166b23e7c47f606d6e473712a2fad0f704ea9b99f246/urllib3-2.6.0-py3-none-any.whl", hash = "sha256:c90f7a39f716c572c4e3e58509581ebd83f9b59cced005b7db7ad2d22b0db99f", size = 131083, upload-time = "2025-12-05T15:08:45.983Z" }, + { url = "https://files.pythonhosted.org/packages/39/08/aaaad47bc4e9dc8c725e68f9d04865dbcb2052843ff09c97b08904852d84/urllib3-2.6.3-py3-none-any.whl", hash = "sha256:bf272323e553dfb2e87d9bfd225ca7b0f467b919d7bbd355436d3fd37cb0acd4", size = 131584, upload-time = "2026-01-07T16:24:42.685Z" }, ] [[package]] @@ -2966,7 +2966,7 @@ wheels = [ [[package]] name = "virtualenv" -version = "20.35.3" +version = "20.36.1" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "distlib" }, @@ -2974,9 +2974,9 @@ dependencies = [ { name = "platformdirs" }, { name = "typing-extensions", marker = "python_full_version < '3.11'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/a4/d5/b0ccd381d55c8f45d46f77df6ae59fbc23d19e901e2d523395598e5f4c93/virtualenv-20.35.3.tar.gz", hash = "sha256:4f1a845d131133bdff10590489610c98c168ff99dc75d6c96853801f7f67af44", size = 6002907, upload-time = "2025-10-10T21:23:33.178Z" } +sdist = { url = "https://files.pythonhosted.org/packages/aa/a3/4d310fa5f00863544e1d0f4de93bddec248499ccf97d4791bc3122c9d4f3/virtualenv-20.36.1.tar.gz", hash = "sha256:8befb5c81842c641f8ee658481e42641c68b5eab3521d8e092d18320902466ba", size = 6032239, upload-time = "2026-01-09T18:21:01.296Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/27/73/d9a94da0e9d470a543c1b9d3ccbceb0f59455983088e727b8a1824ed90fb/virtualenv-20.35.3-py3-none-any.whl", hash = "sha256:63d106565078d8c8d0b206d48080f938a8b25361e19432d2c9db40d2899c810a", size = 5981061, upload-time = "2025-10-10T21:23:30.433Z" }, + { url = "https://files.pythonhosted.org/packages/6a/2a/dc2228b2888f51192c7dc766106cd475f1b768c10caaf9727659726f7391/virtualenv-20.36.1-py3-none-any.whl", hash = "sha256:575a8d6b124ef88f6f51d56d656132389f961062a9177016a50e4f507bbcc19f", size = 6008258, upload-time = "2026-01-09T18:20:59.425Z" }, ] [[package]]