Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
28 commits
Select commit Hold shift + click to select a range
c9d8342
Prefer variables in lowercase
ogenstad Jan 27, 2026
044ee84
Replace ternary `if` expression with `or` operator
ogenstad Jan 27, 2026
100866a
Merge pull request #772 from opsmill/pog-FURB110
ogenstad Jan 27, 2026
2f06924
Merge pull request #771 from opsmill/pog-N806
ogenstad Jan 27, 2026
5e15523
Simplify for-loop with any()
ogenstad Jan 27, 2026
6673875
Merge pull request #777 from opsmill/pog-SIM110
ogenstad Jan 27, 2026
fda5fcf
Prevent cascade delete from causing SDK tracking delete to fail
ogenstad Jan 27, 2026
dee576f
Use a list comprehension to create a transformed list
ogenstad Jan 27, 2026
8852d21
Merge pull request #776 from opsmill/pog-early-error-return-IHS-98
ogenstad Jan 27, 2026
10e642b
Merge pull request #775 from opsmill/stable
ogenstad Jan 27, 2026
8f9606a
Merge pull request #774 from opsmill/pog-PERF401
ogenstad Jan 27, 2026
184f421
Add missing __init__.py files
ogenstad Jan 27, 2026
2b01488
Add __hash__ method to object that already has __eq__
ogenstad Jan 27, 2026
42d7871
Fix ruff Jinja2 violations and move the remaining warning
ogenstad Jan 27, 2026
06f6f76
Merge pull request #778 from opsmill/pog-INP001
ogenstad Jan 28, 2026
296c34a
Merge pull request #781 from opsmill/pog-S701
ogenstad Jan 28, 2026
aeba8d8
Merge pull request #780 from opsmill/pog-PLW1641
ogenstad Jan 28, 2026
5aa7e40
Move rules for too many branches
ogenstad Jan 27, 2026
be07068
Merge pull request #779 from opsmill/pog-too-many-branches
ogenstad Jan 28, 2026
33e6410
Update packages: urllib3, filelock & virtualenv
ogenstad Jan 28, 2026
3fbbcf3
Merge pull request #783 from opsmill/pog-update-packages
ogenstad Jan 28, 2026
e58e329
Fix flake8 raise linting violations
ogenstad Jan 28, 2026
8f9641c
Split apart pytest rule violations
ogenstad Jan 28, 2026
d6defe3
Avoid shadowing builtin `list`
ogenstad Jan 28, 2026
60681d2
Merge pull request #784 from opsmill/pog-flake8-raise
ogenstad Jan 28, 2026
2c5f582
Merge pull request #785 from opsmill/pog-split-pytest-rule-violations
ogenstad Jan 28, 2026
09db4fa
Merge pull request #786 from opsmill/pog-shadow-list
ogenstad Jan 28, 2026
4843e01
Merge pull request #782 from opsmill/stable
ogenstad Jan 28, 2026
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions changelog/265.fixed.md
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
Allow SDK tracking feature to continue after encountering delete errors due to impacted nodes having already been deleted by cascade delete.
14 changes: 6 additions & 8 deletions infrahub_sdk/batch.py
Original file line number Diff line number Diff line change
Expand Up @@ -71,16 +71,14 @@ def add(self, *args: Any, task: Callable, node: Any | None = None, **kwargs: Any
self._tasks.append(BatchTask(task=task, node=node, args=args, kwargs=kwargs))

async def execute(self) -> AsyncGenerator:
tasks = []

for batch_task in self._tasks:
tasks.append(
asyncio.create_task(
execute_batch_task_in_pool(
task=batch_task, semaphore=self.semaphore, return_exceptions=self.return_exceptions
)
tasks = [
asyncio.create_task(
execute_batch_task_in_pool(
task=batch_task, semaphore=self.semaphore, return_exceptions=self.return_exceptions
)
)
for batch_task in self._tasks
]

for completed_task in asyncio.as_completed(tasks):
node, result = await completed_task
Expand Down
10 changes: 4 additions & 6 deletions infrahub_sdk/client.py
Original file line number Diff line number Diff line change
Expand Up @@ -1200,12 +1200,11 @@ async def query_gql_query(
url_params["update_group"] = str(update_group).lower()

if url_params:
url_params_str = []
url_params_str: list[tuple[str, str]] = []
url_params_dict = {}
for key, value in url_params.items():
if isinstance(value, (list)):
for item in value:
url_params_str.append((key, item))
url_params_str.extend((key, item) for item in value)
else:
url_params_dict[key] = value

Expand Down Expand Up @@ -2512,12 +2511,11 @@ def query_gql_query(
url_params["update_group"] = str(update_group).lower()

if url_params:
url_params_str = []
url_params_str: list[tuple[str, str]] = []
url_params_dict = {}
for key, value in url_params.items():
if isinstance(value, (list)):
for item in value:
url_params_str.append((key, item))
url_params_str.extend((key, item) for item in value)
else:
url_params_dict[key] = value

Expand Down
4 changes: 2 additions & 2 deletions infrahub_sdk/ctl/check.py
Original file line number Diff line number Diff line change
Expand Up @@ -55,8 +55,8 @@ def run(
"""Locate and execute all checks under the defined path."""

log_level = "DEBUG" if debug else "INFO"
FORMAT = "%(message)s"
logging.basicConfig(level=log_level, format=FORMAT, datefmt="[%X]", handlers=[RichHandler()])
format_str = "%(message)s"
logging.basicConfig(level=log_level, format=format_str, datefmt="[%X]", handlers=[RichHandler()])

repository_config = get_repository_config(find_repository_config_file())

Expand Down
6 changes: 3 additions & 3 deletions infrahub_sdk/ctl/cli_commands.py
Original file line number Diff line number Diff line change
Expand Up @@ -152,8 +152,8 @@ async def run(
logging.getLogger("httpcore").setLevel(logging.ERROR)

log_level = "DEBUG" if debug else "INFO"
FORMAT = "%(message)s"
logging.basicConfig(level=log_level, format=FORMAT, datefmt="[%X]", handlers=[RichHandler()])
format_str = "%(message)s"
logging.basicConfig(level=log_level, format=format_str, datefmt="[%X]", handlers=[RichHandler()])
log = logging.getLogger("infrahubctl")

variables_dict = parse_cli_vars(variables)
Expand Down Expand Up @@ -239,7 +239,7 @@ async def _run_transform(
elif isinstance(error, str) and "Branch:" in error:
console.print(f"[yellow] - {error}")
console.print("[yellow] you can specify a different branch with --branch")
raise typer.Abort()
raise typer.Abort

if inspect.iscoroutinefunction(transform_func):
output = await transform_func(response)
Expand Down
4 changes: 2 additions & 2 deletions infrahub_sdk/ctl/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -44,7 +44,7 @@ def active(self) -> Settings:
return self._settings

print("Configuration not properly loaded")
raise typer.Abort()
raise typer.Abort

def load(self, config_file: str | Path = "infrahubctl.toml", config_data: dict | None = None) -> None:
"""Load configuration.
Expand Down Expand Up @@ -90,7 +90,7 @@ def load_and_exit(self, config_file: str | Path = "infrahubctl.toml", config_dat
for error in exc.errors():
loc_str = [str(item) for item in error["loc"]]
print(f" {'/'.join(loc_str)} | {error['msg']} ({error['type']})")
raise typer.Abort()
raise typer.Abort


SETTINGS = ConfiguredSettings()
4 changes: 2 additions & 2 deletions infrahub_sdk/ctl/repository.py
Original file line number Diff line number Diff line change
Expand Up @@ -150,8 +150,8 @@ async def add(
await client.execute_graphql(query=query.render(), tracker="mutation-repository-create")


@app.command()
async def list(
@app.command(name="list")
async def list_repositories(
branch: str | None = typer.Option(None, help="Branch on which to list repositories."),
debug: bool = False,
_: str = CONFIG_PARAM,
Expand Down
4 changes: 2 additions & 2 deletions infrahub_sdk/ctl/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -45,8 +45,8 @@ def init_logging(debug: bool = False) -> None:
logging.getLogger("httpcore").setLevel(logging.ERROR)

log_level = "DEBUG" if debug else "INFO"
FORMAT = "%(message)s"
logging.basicConfig(level=log_level, format=FORMAT, datefmt="[%X]", handlers=[RichHandler(show_path=debug)])
format_str = "%(message)s"
logging.basicConfig(level=log_level, format=format_str, datefmt="[%X]", handlers=[RichHandler(show_path=debug)])
logging.getLogger("infrahubctl")


Expand Down
21 changes: 10 additions & 11 deletions infrahub_sdk/diff.py
Original file line number Diff line number Diff line change
Expand Up @@ -117,18 +117,17 @@ def diff_tree_node_to_node_diff(node_dict: dict[str, Any], branch_name: str) ->
},
)
if not is_cardinality_one and "elements" in relationship_dict:
peer_diffs = []
for element_dict in relationship_dict["elements"]:
peer_diffs.append(
NodeDiffPeer(
action=str(element_dict.get("status")),
summary={
"added": int(element_dict.get("num_added") or 0),
"removed": int(element_dict.get("num_removed") or 0),
"updated": int(element_dict.get("num_updated") or 0),
},
)
peer_diffs = [
NodeDiffPeer(
action=str(element_dict.get("status")),
summary={
"added": int(element_dict.get("num_added") or 0),
"removed": int(element_dict.get("num_removed") or 0),
"updated": int(element_dict.get("num_updated") or 0),
},
)
for element_dict in relationship_dict["elements"]
]
relationship_diff["peers"] = peer_diffs
element_diffs.append(relationship_diff)
return NodeDiff(
Expand Down
2 changes: 1 addition & 1 deletion infrahub_sdk/generator.py
Original file line number Diff line number Diff line change
Expand Up @@ -44,7 +44,7 @@ def __init__(
self.params = params or {}
self.generator_instance = generator_instance
self._client: InfrahubClient | None = None
self.logger = logger if logger else logging.getLogger("infrahub.tasks")
self.logger = logger or logging.getLogger("infrahub.tasks")
self.request_context = request_context
self.execute_in_proposed_change = execute_in_proposed_change
self.execute_after_merge = execute_after_merge
Expand Down
20 changes: 10 additions & 10 deletions infrahub_sdk/graphql/renderers.py
Original file line number Diff line number Diff line change
Expand Up @@ -134,30 +134,30 @@ def render_query_block(data: dict, offset: int = 4, indentation: int = 4, conver
>>> render_query_block(data)
[' u: user(id: 123) {', ' name', ' }']
"""
FILTERS_KEY = "@filters"
ALIAS_KEY = "@alias"
KEYWORDS_TO_SKIP = [FILTERS_KEY, ALIAS_KEY]
filters_key = "@filters"
alias_key = "@alias"
keywords_to_skip = [filters_key, alias_key]

offset_str = " " * offset
lines = []
for key, value in data.items():
if key in KEYWORDS_TO_SKIP:
if key in keywords_to_skip:
continue
if value is None:
lines.append(f"{offset_str}{key}")
elif isinstance(value, dict) and len(value) == 1 and ALIAS_KEY in value and value[ALIAS_KEY]:
lines.append(f"{offset_str}{value[ALIAS_KEY]}: {key}")
elif isinstance(value, dict) and len(value) == 1 and alias_key in value and value[alias_key]:
lines.append(f"{offset_str}{value[alias_key]}: {key}")
elif isinstance(value, dict):
if value.get(ALIAS_KEY):
key_str = f"{value[ALIAS_KEY]}: {key}"
if value.get(alias_key):
key_str = f"{value[alias_key]}: {key}"
else:
key_str = key

if value.get(FILTERS_KEY):
if value.get(filters_key):
filters_str = ", ".join(
[
f"{key2}: {convert_to_graphql_as_string(value=value2, convert_enum=convert_enum)}"
for key2, value2 in value[FILTERS_KEY].items()
for key2, value2 in value[filters_key].items()
]
)
lines.append(f"{offset_str}{key_str}({filters_str}) " + "{")
Expand Down
7 changes: 6 additions & 1 deletion infrahub_sdk/protocols_generator/generator.py
Original file line number Diff line number Diff line change
Expand Up @@ -69,7 +69,12 @@ def __init__(self, schema: dict[str, MainSchemaTypesAll]) -> None:
)

def render(self, sync: bool = True) -> str:
jinja2_env = jinja2.Environment(loader=jinja2.BaseLoader(), trim_blocks=True, lstrip_blocks=True)
jinja2_env = jinja2.Environment(
loader=jinja2.BaseLoader(),
trim_blocks=True,
lstrip_blocks=True,
autoescape=False, # noqa: S701
)
jinja2_env.filters["render_attribute"] = self._jinja2_filter_render_attribute
jinja2_env.filters["render_relationship"] = self._jinja2_filter_render_relationship
jinja2_env.filters["syncify"] = self._jinja2_filter_syncify
Expand Down
10 changes: 8 additions & 2 deletions infrahub_sdk/query_groups.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@
from typing import TYPE_CHECKING, Any

from .constants import InfrahubClientMode
from .exceptions import NodeNotFoundError
from .exceptions import GraphQLError, NodeNotFoundError
from .utils import dict_hash

if TYPE_CHECKING:
Expand Down Expand Up @@ -109,7 +109,13 @@ async def delete_unused(self) -> None:
if self.previous_members and self.unused_member_ids:
for member in self.previous_members:
if member.id in self.unused_member_ids and member.typename:
await self.client.delete(kind=member.typename, id=member.id)
try:
await self.client.delete(kind=member.typename, id=member.id)
except GraphQLError as exc:
if not exc.message or "Unable to find the node" not in exc.message:
# If the node already has been deleted, skip the error as it would have been deleted
# by the cascade delete of another node
raise

async def add_related_nodes(self, ids: list[str], update_group_context: bool | None = None) -> None:
"""
Expand Down
3 changes: 3 additions & 0 deletions infrahub_sdk/recorder.py
Original file line number Diff line number Diff line change
Expand Up @@ -34,6 +34,9 @@ def default(cls) -> NoRecorder:
def __eq__(self, other: object) -> bool:
return isinstance(other, NoRecorder)

def __hash__(self) -> int:
return hash(self.__class__)


class JSONRecorder(BaseSettings):
model_config = SettingsConfigDict(env_prefix="INFRAHUB_JSON_RECORDER_")
Expand Down
5 changes: 1 addition & 4 deletions infrahub_sdk/schema/repository.py
Original file line number Diff line number Diff line change
Expand Up @@ -216,10 +216,7 @@ def unique_items(cls, v: list[Any]) -> list[Any]:
return v

def _has_resource(self, resource_id: str, resource_type: type[ResourceClass], resource_field: str = "name") -> bool:
for item in getattr(self, RESOURCE_MAP[resource_type]):
if getattr(item, resource_field) == resource_id:
return True
return False
return any(getattr(item, resource_field) == resource_id for item in getattr(self, RESOURCE_MAP[resource_type]))

def _get_resource(
self, resource_id: str, resource_type: type[ResourceClass], resource_field: str = "name"
Expand Down
8 changes: 5 additions & 3 deletions infrahub_sdk/spec/object.py
Original file line number Diff line number Diff line change
Expand Up @@ -264,9 +264,11 @@ async def validate_object(
context = context.copy() if context else {}

# First validate if all mandatory fields are present
for element in schema.mandatory_input_names:
if not any([element in data, element in context]):
errors.append(ObjectValidationError(position=position + [element], message=f"{element} is mandatory"))
errors.extend(
ObjectValidationError(position=position + [element], message=f"{element} is mandatory")
for element in schema.mandatory_input_names
if not any([element in data, element in context])
)

# Validate if all attributes are valid
for key, value in data.items():
Expand Down
9 changes: 4 additions & 5 deletions infrahub_sdk/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -173,7 +173,7 @@ def str_to_bool(value: str | bool | int) -> bool:
if not isinstance(value, str):
raise TypeError(f"{value} must be a string")

MAP = {
str_to_bool_map = {
"y": True,
"yes": True,
"t": True,
Expand All @@ -188,7 +188,7 @@ def str_to_bool(value: str | bool | int) -> bool:
"0": False,
}
try:
return MAP[value.lower()]
return str_to_bool_map[value.lower()]
except KeyError as exc:
raise ValueError(f"{value} can not be converted into a boolean") from exc

Expand Down Expand Up @@ -338,13 +338,12 @@ def get_user_permissions(data: list[dict]) -> dict:
groups = {}
for group in data:
group_name = group["node"]["display_label"]
permissions = []
permissions: list[str] = []

roles = group["node"].get("roles", {}).get("edges", [])
for role in roles:
role_permissions = role["node"].get("permissions", {}).get("edges", [])
for permission in role_permissions:
permissions.append(permission["node"]["identifier"]["value"])
permissions.extend(permission["node"]["identifier"]["value"] for permission in role_permissions)

groups[group_name] = permissions

Expand Down
8 changes: 4 additions & 4 deletions infrahub_sdk/yaml.py
Original file line number Diff line number Diff line change
Expand Up @@ -92,10 +92,10 @@ def load_file_from_disk(cls, path: Path) -> list[Self]:
has_multiple_document = bool(file_content.count("---") > 1)

if has_multiple_document:
for content in yaml.safe_load_all(file_content):
yaml_files.append(
cls.init(location=path, multiple_documents=has_multiple_document, content=content)
)
yaml_files.extend(
cls.init(location=path, multiple_documents=has_multiple_document, content=content)
for content in yaml.safe_load_all(file_content)
)

else:
yaml_files.append(
Expand Down
Loading